1 Star 0 Fork 93

huangxiaoquan / src-openEuler-gcc

forked from src-openEuler / gcc 
加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
LoongArch-Add-Loongson-ASX-directive-builtin-functio.patch 318.38 KB
一键复制 编辑 原始数据 按行查看 历史
ticat_fp 提交于 2024-03-26 09:26 . LoongArch: update from gcc upstream
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458
From 6871a6a4ef5f10bc75a9dd76fff37302057cf528 Mon Sep 17 00:00:00 2001
From: Lulu Cheng <chenglulu@loongson.cn>
Date: Fri, 25 Nov 2022 11:09:49 +0800
Subject: [PATCH 066/124] LoongArch: Add Loongson ASX directive builtin
function support.
gcc/ChangeLog:
* config.gcc: Export the header file lasxintrin.h.
* config/loongarch/loongarch-builtins.cc (enum loongarch_builtin_type):
Add Loongson ASX builtin functions support.
(AVAIL_ALL): Ditto.
(LASX_BUILTIN): Ditto.
(LASX_NO_TARGET_BUILTIN): Ditto.
(LASX_BUILTIN_TEST_BRANCH): Ditto.
(CODE_FOR_lasx_xvsadd_b): Ditto.
(CODE_FOR_lasx_xvsadd_h): Ditto.
(CODE_FOR_lasx_xvsadd_w): Ditto.
(CODE_FOR_lasx_xvsadd_d): Ditto.
(CODE_FOR_lasx_xvsadd_bu): Ditto.
(CODE_FOR_lasx_xvsadd_hu): Ditto.
(CODE_FOR_lasx_xvsadd_wu): Ditto.
(CODE_FOR_lasx_xvsadd_du): Ditto.
(CODE_FOR_lasx_xvadd_b): Ditto.
(CODE_FOR_lasx_xvadd_h): Ditto.
(CODE_FOR_lasx_xvadd_w): Ditto.
(CODE_FOR_lasx_xvadd_d): Ditto.
(CODE_FOR_lasx_xvaddi_bu): Ditto.
(CODE_FOR_lasx_xvaddi_hu): Ditto.
(CODE_FOR_lasx_xvaddi_wu): Ditto.
(CODE_FOR_lasx_xvaddi_du): Ditto.
(CODE_FOR_lasx_xvand_v): Ditto.
(CODE_FOR_lasx_xvandi_b): Ditto.
(CODE_FOR_lasx_xvbitsel_v): Ditto.
(CODE_FOR_lasx_xvseqi_b): Ditto.
(CODE_FOR_lasx_xvseqi_h): Ditto.
(CODE_FOR_lasx_xvseqi_w): Ditto.
(CODE_FOR_lasx_xvseqi_d): Ditto.
(CODE_FOR_lasx_xvslti_b): Ditto.
(CODE_FOR_lasx_xvslti_h): Ditto.
(CODE_FOR_lasx_xvslti_w): Ditto.
(CODE_FOR_lasx_xvslti_d): Ditto.
(CODE_FOR_lasx_xvslti_bu): Ditto.
(CODE_FOR_lasx_xvslti_hu): Ditto.
(CODE_FOR_lasx_xvslti_wu): Ditto.
(CODE_FOR_lasx_xvslti_du): Ditto.
(CODE_FOR_lasx_xvslei_b): Ditto.
(CODE_FOR_lasx_xvslei_h): Ditto.
(CODE_FOR_lasx_xvslei_w): Ditto.
(CODE_FOR_lasx_xvslei_d): Ditto.
(CODE_FOR_lasx_xvslei_bu): Ditto.
(CODE_FOR_lasx_xvslei_hu): Ditto.
(CODE_FOR_lasx_xvslei_wu): Ditto.
(CODE_FOR_lasx_xvslei_du): Ditto.
(CODE_FOR_lasx_xvdiv_b): Ditto.
(CODE_FOR_lasx_xvdiv_h): Ditto.
(CODE_FOR_lasx_xvdiv_w): Ditto.
(CODE_FOR_lasx_xvdiv_d): Ditto.
(CODE_FOR_lasx_xvdiv_bu): Ditto.
(CODE_FOR_lasx_xvdiv_hu): Ditto.
(CODE_FOR_lasx_xvdiv_wu): Ditto.
(CODE_FOR_lasx_xvdiv_du): Ditto.
(CODE_FOR_lasx_xvfadd_s): Ditto.
(CODE_FOR_lasx_xvfadd_d): Ditto.
(CODE_FOR_lasx_xvftintrz_w_s): Ditto.
(CODE_FOR_lasx_xvftintrz_l_d): Ditto.
(CODE_FOR_lasx_xvftintrz_wu_s): Ditto.
(CODE_FOR_lasx_xvftintrz_lu_d): Ditto.
(CODE_FOR_lasx_xvffint_s_w): Ditto.
(CODE_FOR_lasx_xvffint_d_l): Ditto.
(CODE_FOR_lasx_xvffint_s_wu): Ditto.
(CODE_FOR_lasx_xvffint_d_lu): Ditto.
(CODE_FOR_lasx_xvfsub_s): Ditto.
(CODE_FOR_lasx_xvfsub_d): Ditto.
(CODE_FOR_lasx_xvfmul_s): Ditto.
(CODE_FOR_lasx_xvfmul_d): Ditto.
(CODE_FOR_lasx_xvfdiv_s): Ditto.
(CODE_FOR_lasx_xvfdiv_d): Ditto.
(CODE_FOR_lasx_xvfmax_s): Ditto.
(CODE_FOR_lasx_xvfmax_d): Ditto.
(CODE_FOR_lasx_xvfmin_s): Ditto.
(CODE_FOR_lasx_xvfmin_d): Ditto.
(CODE_FOR_lasx_xvfsqrt_s): Ditto.
(CODE_FOR_lasx_xvfsqrt_d): Ditto.
(CODE_FOR_lasx_xvflogb_s): Ditto.
(CODE_FOR_lasx_xvflogb_d): Ditto.
(CODE_FOR_lasx_xvmax_b): Ditto.
(CODE_FOR_lasx_xvmax_h): Ditto.
(CODE_FOR_lasx_xvmax_w): Ditto.
(CODE_FOR_lasx_xvmax_d): Ditto.
(CODE_FOR_lasx_xvmaxi_b): Ditto.
(CODE_FOR_lasx_xvmaxi_h): Ditto.
(CODE_FOR_lasx_xvmaxi_w): Ditto.
(CODE_FOR_lasx_xvmaxi_d): Ditto.
(CODE_FOR_lasx_xvmax_bu): Ditto.
(CODE_FOR_lasx_xvmax_hu): Ditto.
(CODE_FOR_lasx_xvmax_wu): Ditto.
(CODE_FOR_lasx_xvmax_du): Ditto.
(CODE_FOR_lasx_xvmaxi_bu): Ditto.
(CODE_FOR_lasx_xvmaxi_hu): Ditto.
(CODE_FOR_lasx_xvmaxi_wu): Ditto.
(CODE_FOR_lasx_xvmaxi_du): Ditto.
(CODE_FOR_lasx_xvmin_b): Ditto.
(CODE_FOR_lasx_xvmin_h): Ditto.
(CODE_FOR_lasx_xvmin_w): Ditto.
(CODE_FOR_lasx_xvmin_d): Ditto.
(CODE_FOR_lasx_xvmini_b): Ditto.
(CODE_FOR_lasx_xvmini_h): Ditto.
(CODE_FOR_lasx_xvmini_w): Ditto.
(CODE_FOR_lasx_xvmini_d): Ditto.
(CODE_FOR_lasx_xvmin_bu): Ditto.
(CODE_FOR_lasx_xvmin_hu): Ditto.
(CODE_FOR_lasx_xvmin_wu): Ditto.
(CODE_FOR_lasx_xvmin_du): Ditto.
(CODE_FOR_lasx_xvmini_bu): Ditto.
(CODE_FOR_lasx_xvmini_hu): Ditto.
(CODE_FOR_lasx_xvmini_wu): Ditto.
(CODE_FOR_lasx_xvmini_du): Ditto.
(CODE_FOR_lasx_xvmod_b): Ditto.
(CODE_FOR_lasx_xvmod_h): Ditto.
(CODE_FOR_lasx_xvmod_w): Ditto.
(CODE_FOR_lasx_xvmod_d): Ditto.
(CODE_FOR_lasx_xvmod_bu): Ditto.
(CODE_FOR_lasx_xvmod_hu): Ditto.
(CODE_FOR_lasx_xvmod_wu): Ditto.
(CODE_FOR_lasx_xvmod_du): Ditto.
(CODE_FOR_lasx_xvmul_b): Ditto.
(CODE_FOR_lasx_xvmul_h): Ditto.
(CODE_FOR_lasx_xvmul_w): Ditto.
(CODE_FOR_lasx_xvmul_d): Ditto.
(CODE_FOR_lasx_xvclz_b): Ditto.
(CODE_FOR_lasx_xvclz_h): Ditto.
(CODE_FOR_lasx_xvclz_w): Ditto.
(CODE_FOR_lasx_xvclz_d): Ditto.
(CODE_FOR_lasx_xvnor_v): Ditto.
(CODE_FOR_lasx_xvor_v): Ditto.
(CODE_FOR_lasx_xvori_b): Ditto.
(CODE_FOR_lasx_xvnori_b): Ditto.
(CODE_FOR_lasx_xvpcnt_b): Ditto.
(CODE_FOR_lasx_xvpcnt_h): Ditto.
(CODE_FOR_lasx_xvpcnt_w): Ditto.
(CODE_FOR_lasx_xvpcnt_d): Ditto.
(CODE_FOR_lasx_xvxor_v): Ditto.
(CODE_FOR_lasx_xvxori_b): Ditto.
(CODE_FOR_lasx_xvsll_b): Ditto.
(CODE_FOR_lasx_xvsll_h): Ditto.
(CODE_FOR_lasx_xvsll_w): Ditto.
(CODE_FOR_lasx_xvsll_d): Ditto.
(CODE_FOR_lasx_xvslli_b): Ditto.
(CODE_FOR_lasx_xvslli_h): Ditto.
(CODE_FOR_lasx_xvslli_w): Ditto.
(CODE_FOR_lasx_xvslli_d): Ditto.
(CODE_FOR_lasx_xvsra_b): Ditto.
(CODE_FOR_lasx_xvsra_h): Ditto.
(CODE_FOR_lasx_xvsra_w): Ditto.
(CODE_FOR_lasx_xvsra_d): Ditto.
(CODE_FOR_lasx_xvsrai_b): Ditto.
(CODE_FOR_lasx_xvsrai_h): Ditto.
(CODE_FOR_lasx_xvsrai_w): Ditto.
(CODE_FOR_lasx_xvsrai_d): Ditto.
(CODE_FOR_lasx_xvsrl_b): Ditto.
(CODE_FOR_lasx_xvsrl_h): Ditto.
(CODE_FOR_lasx_xvsrl_w): Ditto.
(CODE_FOR_lasx_xvsrl_d): Ditto.
(CODE_FOR_lasx_xvsrli_b): Ditto.
(CODE_FOR_lasx_xvsrli_h): Ditto.
(CODE_FOR_lasx_xvsrli_w): Ditto.
(CODE_FOR_lasx_xvsrli_d): Ditto.
(CODE_FOR_lasx_xvsub_b): Ditto.
(CODE_FOR_lasx_xvsub_h): Ditto.
(CODE_FOR_lasx_xvsub_w): Ditto.
(CODE_FOR_lasx_xvsub_d): Ditto.
(CODE_FOR_lasx_xvsubi_bu): Ditto.
(CODE_FOR_lasx_xvsubi_hu): Ditto.
(CODE_FOR_lasx_xvsubi_wu): Ditto.
(CODE_FOR_lasx_xvsubi_du): Ditto.
(CODE_FOR_lasx_xvpackod_d): Ditto.
(CODE_FOR_lasx_xvpackev_d): Ditto.
(CODE_FOR_lasx_xvpickod_d): Ditto.
(CODE_FOR_lasx_xvpickev_d): Ditto.
(CODE_FOR_lasx_xvrepli_b): Ditto.
(CODE_FOR_lasx_xvrepli_h): Ditto.
(CODE_FOR_lasx_xvrepli_w): Ditto.
(CODE_FOR_lasx_xvrepli_d): Ditto.
(CODE_FOR_lasx_xvandn_v): Ditto.
(CODE_FOR_lasx_xvorn_v): Ditto.
(CODE_FOR_lasx_xvneg_b): Ditto.
(CODE_FOR_lasx_xvneg_h): Ditto.
(CODE_FOR_lasx_xvneg_w): Ditto.
(CODE_FOR_lasx_xvneg_d): Ditto.
(CODE_FOR_lasx_xvbsrl_v): Ditto.
(CODE_FOR_lasx_xvbsll_v): Ditto.
(CODE_FOR_lasx_xvfmadd_s): Ditto.
(CODE_FOR_lasx_xvfmadd_d): Ditto.
(CODE_FOR_lasx_xvfmsub_s): Ditto.
(CODE_FOR_lasx_xvfmsub_d): Ditto.
(CODE_FOR_lasx_xvfnmadd_s): Ditto.
(CODE_FOR_lasx_xvfnmadd_d): Ditto.
(CODE_FOR_lasx_xvfnmsub_s): Ditto.
(CODE_FOR_lasx_xvfnmsub_d): Ditto.
(CODE_FOR_lasx_xvpermi_q): Ditto.
(CODE_FOR_lasx_xvpermi_d): Ditto.
(CODE_FOR_lasx_xbnz_v): Ditto.
(CODE_FOR_lasx_xbz_v): Ditto.
(CODE_FOR_lasx_xvssub_b): Ditto.
(CODE_FOR_lasx_xvssub_h): Ditto.
(CODE_FOR_lasx_xvssub_w): Ditto.
(CODE_FOR_lasx_xvssub_d): Ditto.
(CODE_FOR_lasx_xvssub_bu): Ditto.
(CODE_FOR_lasx_xvssub_hu): Ditto.
(CODE_FOR_lasx_xvssub_wu): Ditto.
(CODE_FOR_lasx_xvssub_du): Ditto.
(CODE_FOR_lasx_xvabsd_b): Ditto.
(CODE_FOR_lasx_xvabsd_h): Ditto.
(CODE_FOR_lasx_xvabsd_w): Ditto.
(CODE_FOR_lasx_xvabsd_d): Ditto.
(CODE_FOR_lasx_xvabsd_bu): Ditto.
(CODE_FOR_lasx_xvabsd_hu): Ditto.
(CODE_FOR_lasx_xvabsd_wu): Ditto.
(CODE_FOR_lasx_xvabsd_du): Ditto.
(CODE_FOR_lasx_xvavg_b): Ditto.
(CODE_FOR_lasx_xvavg_h): Ditto.
(CODE_FOR_lasx_xvavg_w): Ditto.
(CODE_FOR_lasx_xvavg_d): Ditto.
(CODE_FOR_lasx_xvavg_bu): Ditto.
(CODE_FOR_lasx_xvavg_hu): Ditto.
(CODE_FOR_lasx_xvavg_wu): Ditto.
(CODE_FOR_lasx_xvavg_du): Ditto.
(CODE_FOR_lasx_xvavgr_b): Ditto.
(CODE_FOR_lasx_xvavgr_h): Ditto.
(CODE_FOR_lasx_xvavgr_w): Ditto.
(CODE_FOR_lasx_xvavgr_d): Ditto.
(CODE_FOR_lasx_xvavgr_bu): Ditto.
(CODE_FOR_lasx_xvavgr_hu): Ditto.
(CODE_FOR_lasx_xvavgr_wu): Ditto.
(CODE_FOR_lasx_xvavgr_du): Ditto.
(CODE_FOR_lasx_xvmuh_b): Ditto.
(CODE_FOR_lasx_xvmuh_h): Ditto.
(CODE_FOR_lasx_xvmuh_w): Ditto.
(CODE_FOR_lasx_xvmuh_d): Ditto.
(CODE_FOR_lasx_xvmuh_bu): Ditto.
(CODE_FOR_lasx_xvmuh_hu): Ditto.
(CODE_FOR_lasx_xvmuh_wu): Ditto.
(CODE_FOR_lasx_xvmuh_du): Ditto.
(CODE_FOR_lasx_xvssran_b_h): Ditto.
(CODE_FOR_lasx_xvssran_h_w): Ditto.
(CODE_FOR_lasx_xvssran_w_d): Ditto.
(CODE_FOR_lasx_xvssran_bu_h): Ditto.
(CODE_FOR_lasx_xvssran_hu_w): Ditto.
(CODE_FOR_lasx_xvssran_wu_d): Ditto.
(CODE_FOR_lasx_xvssrarn_b_h): Ditto.
(CODE_FOR_lasx_xvssrarn_h_w): Ditto.
(CODE_FOR_lasx_xvssrarn_w_d): Ditto.
(CODE_FOR_lasx_xvssrarn_bu_h): Ditto.
(CODE_FOR_lasx_xvssrarn_hu_w): Ditto.
(CODE_FOR_lasx_xvssrarn_wu_d): Ditto.
(CODE_FOR_lasx_xvssrln_bu_h): Ditto.
(CODE_FOR_lasx_xvssrln_hu_w): Ditto.
(CODE_FOR_lasx_xvssrln_wu_d): Ditto.
(CODE_FOR_lasx_xvssrlrn_bu_h): Ditto.
(CODE_FOR_lasx_xvssrlrn_hu_w): Ditto.
(CODE_FOR_lasx_xvssrlrn_wu_d): Ditto.
(CODE_FOR_lasx_xvftint_w_s): Ditto.
(CODE_FOR_lasx_xvftint_l_d): Ditto.
(CODE_FOR_lasx_xvftint_wu_s): Ditto.
(CODE_FOR_lasx_xvftint_lu_d): Ditto.
(CODE_FOR_lasx_xvsllwil_h_b): Ditto.
(CODE_FOR_lasx_xvsllwil_w_h): Ditto.
(CODE_FOR_lasx_xvsllwil_d_w): Ditto.
(CODE_FOR_lasx_xvsllwil_hu_bu): Ditto.
(CODE_FOR_lasx_xvsllwil_wu_hu): Ditto.
(CODE_FOR_lasx_xvsllwil_du_wu): Ditto.
(CODE_FOR_lasx_xvsat_b): Ditto.
(CODE_FOR_lasx_xvsat_h): Ditto.
(CODE_FOR_lasx_xvsat_w): Ditto.
(CODE_FOR_lasx_xvsat_d): Ditto.
(CODE_FOR_lasx_xvsat_bu): Ditto.
(CODE_FOR_lasx_xvsat_hu): Ditto.
(CODE_FOR_lasx_xvsat_wu): Ditto.
(CODE_FOR_lasx_xvsat_du): Ditto.
(loongarch_builtin_vectorized_function): Ditto.
(loongarch_expand_builtin_insn): Ditto.
(loongarch_expand_builtin): Ditto.
* config/loongarch/loongarch-ftypes.def (1): Ditto.
(2): Ditto.
(3): Ditto.
(4): Ditto.
* config/loongarch/lasxintrin.h: New file.
Signed-off-by: Peng Fan <fanpeng@loongson.cn>
Signed-off-by: ticat_fp <fanpeng@loongson.cn>
---
gcc/config.gcc | 2 +-
gcc/config/loongarch/lasxintrin.h | 5338 ++++++++++++++++++++
gcc/config/loongarch/loongarch-builtins.cc | 1180 ++++-
gcc/config/loongarch/loongarch-ftypes.def | 271 +-
4 files changed, 6788 insertions(+), 3 deletions(-)
create mode 100644 gcc/config/loongarch/lasxintrin.h
diff --git a/gcc/config.gcc b/gcc/config.gcc
index 4e149e0ef..19f584344 100644
--- a/gcc/config.gcc
+++ b/gcc/config.gcc
@@ -456,7 +456,7 @@ mips*-*-*)
;;
loongarch*-*-*)
cpu_type=loongarch
- extra_headers="larchintrin.h lsxintrin.h"
+ extra_headers="larchintrin.h lsxintrin.h lasxintrin.h"
extra_objs="loongarch-c.o loongarch-builtins.o loongarch-cpu.o loongarch-opts.o loongarch-def.o"
extra_gcc_objs="loongarch-driver.o loongarch-cpu.o loongarch-opts.o loongarch-def.o"
extra_options="${extra_options} g.opt fused-madd.opt"
diff --git a/gcc/config/loongarch/lasxintrin.h b/gcc/config/loongarch/lasxintrin.h
new file mode 100644
index 000000000..d39379927
--- /dev/null
+++ b/gcc/config/loongarch/lasxintrin.h
@@ -0,0 +1,5338 @@
+/* LARCH Loongson ASX intrinsics include file.
+
+ Copyright (C) 2018 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published
+ by the Free Software Foundation; either version 3, or (at your
+ option) any later version.
+
+ GCC is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ Under Section 7 of GPL version 3, you are granted additional
+ permissions described in the GCC Runtime Library Exception, version
+ 3.1, as published by the Free Software Foundation.
+
+ You should have received a copy of the GNU General Public License and
+ a copy of the GCC Runtime Library Exception along with this program;
+ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+ <http://www.gnu.org/licenses/>. */
+
+#ifndef _GCC_LOONGSON_ASXINTRIN_H
+#define _GCC_LOONGSON_ASXINTRIN_H 1
+
+#if defined(__loongarch_asx)
+
+typedef signed char v32i8 __attribute__ ((vector_size(32), aligned(32)));
+typedef signed char v32i8_b __attribute__ ((vector_size(32), aligned(1)));
+typedef unsigned char v32u8 __attribute__ ((vector_size(32), aligned(32)));
+typedef unsigned char v32u8_b __attribute__ ((vector_size(32), aligned(1)));
+typedef short v16i16 __attribute__ ((vector_size(32), aligned(32)));
+typedef short v16i16_h __attribute__ ((vector_size(32), aligned(2)));
+typedef unsigned short v16u16 __attribute__ ((vector_size(32), aligned(32)));
+typedef unsigned short v16u16_h __attribute__ ((vector_size(32), aligned(2)));
+typedef int v8i32 __attribute__ ((vector_size(32), aligned(32)));
+typedef int v8i32_w __attribute__ ((vector_size(32), aligned(4)));
+typedef unsigned int v8u32 __attribute__ ((vector_size(32), aligned(32)));
+typedef unsigned int v8u32_w __attribute__ ((vector_size(32), aligned(4)));
+typedef long long v4i64 __attribute__ ((vector_size(32), aligned(32)));
+typedef long long v4i64_d __attribute__ ((vector_size(32), aligned(8)));
+typedef unsigned long long v4u64 __attribute__ ((vector_size(32), aligned(32)));
+typedef unsigned long long v4u64_d __attribute__ ((vector_size(32), aligned(8)));
+typedef float v8f32 __attribute__ ((vector_size(32), aligned(32)));
+typedef float v8f32_w __attribute__ ((vector_size(32), aligned(4)));
+typedef double v4f64 __attribute__ ((vector_size(32), aligned(32)));
+typedef double v4f64_d __attribute__ ((vector_size(32), aligned(8)));
+typedef float __m256 __attribute__ ((__vector_size__ (32),
+ __may_alias__));
+typedef long long __m256i __attribute__ ((__vector_size__ (32),
+ __may_alias__));
+typedef double __m256d __attribute__ ((__vector_size__ (32),
+ __may_alias__));
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsll_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsll_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsll_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsll_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsll_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsll_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsll_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsll_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvslli_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvslli_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvslli_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvslli_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvslli_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslli_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvslli_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvslli_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsra_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsra_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsra_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsra_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsra_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsra_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsra_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsra_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvsrai_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrai_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvsrai_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrai_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvsrai_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrai_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvsrai_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrai_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrar_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrar_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrar_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrar_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrar_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrar_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrar_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrar_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvsrari_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrari_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvsrari_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrari_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvsrari_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrari_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvsrari_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrari_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrl_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrl_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrl_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrl_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrl_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrl_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrl_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrl_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvsrli_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrli_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvsrli_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrli_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvsrli_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrli_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvsrli_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrli_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrlr_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrlr_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrlr_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrlr_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrlr_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrlr_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrlr_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrlr_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvsrlri_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrlri_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvsrlri_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrlri_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvsrlri_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrlri_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvsrlri_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvsrlri_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitclr_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitclr_b ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitclr_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitclr_h ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitclr_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitclr_w ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitclr_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitclr_d ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvbitclri_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitclri_b ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: UV16HI, UV16HI, UQI. */
+#define __lasx_xvbitclri_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitclri_h ((v16u16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV8SI, UV8SI, UQI. */
+#define __lasx_xvbitclri_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitclri_w ((v8u32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: UV4DI, UV4DI, UQI. */
+#define __lasx_xvbitclri_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitclri_d ((v4u64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitset_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitset_b ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitset_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitset_h ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitset_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitset_w ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitset_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitset_d ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvbitseti_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitseti_b ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: UV16HI, UV16HI, UQI. */
+#define __lasx_xvbitseti_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitseti_h ((v16u16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV8SI, UV8SI, UQI. */
+#define __lasx_xvbitseti_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitseti_w ((v8u32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: UV4DI, UV4DI, UQI. */
+#define __lasx_xvbitseti_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitseti_d ((v4u64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitrev_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitrev_b ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitrev_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitrev_h ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitrev_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitrev_w ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitrev_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvbitrev_d ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvbitrevi_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitrevi_b ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: UV16HI, UV16HI, UQI. */
+#define __lasx_xvbitrevi_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitrevi_h ((v16u16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV8SI, UV8SI, UQI. */
+#define __lasx_xvbitrevi_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitrevi_w ((v8u32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: UV4DI, UV4DI, UQI. */
+#define __lasx_xvbitrevi_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvbitrevi_d ((v4u64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvadd_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvadd_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvadd_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvadd_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvadd_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvadd_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvadd_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvadd_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvaddi_bu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvaddi_bu ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvaddi_hu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvaddi_hu ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvaddi_wu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvaddi_wu ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvaddi_du(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvaddi_du ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsub_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsub_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsub_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsub_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsub_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsub_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsub_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsub_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvsubi_bu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsubi_bu ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvsubi_hu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsubi_hu ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvsubi_wu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsubi_wu ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvsubi_du(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsubi_du ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmax_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmax_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmax_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmax_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmax_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmax_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmax_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmax_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V32QI, V32QI, QI. */
+#define __lasx_xvmaxi_b(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmaxi_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V16HI, V16HI, QI. */
+#define __lasx_xvmaxi_h(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmaxi_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V8SI, V8SI, QI. */
+#define __lasx_xvmaxi_w(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmaxi_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V4DI, V4DI, QI. */
+#define __lasx_xvmaxi_d(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmaxi_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmax_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmax_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmax_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmax_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmax_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmax_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmax_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmax_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvmaxi_bu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmaxi_bu ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV16HI, UV16HI, UQI. */
+#define __lasx_xvmaxi_hu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmaxi_hu ((v16u16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV8SI, UV8SI, UQI. */
+#define __lasx_xvmaxi_wu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmaxi_wu ((v8u32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV4DI, UV4DI, UQI. */
+#define __lasx_xvmaxi_du(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmaxi_du ((v4u64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmin_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmin_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmin_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmin_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmin_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmin_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmin_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmin_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V32QI, V32QI, QI. */
+#define __lasx_xvmini_b(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmini_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V16HI, V16HI, QI. */
+#define __lasx_xvmini_h(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmini_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V8SI, V8SI, QI. */
+#define __lasx_xvmini_w(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmini_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V4DI, V4DI, QI. */
+#define __lasx_xvmini_d(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmini_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmin_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmin_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmin_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmin_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmin_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmin_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmin_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmin_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvmini_bu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmini_bu ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV16HI, UV16HI, UQI. */
+#define __lasx_xvmini_hu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmini_hu ((v16u16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV8SI, UV8SI, UQI. */
+#define __lasx_xvmini_wu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmini_wu ((v8u32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV4DI, UV4DI, UQI. */
+#define __lasx_xvmini_du(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvmini_du ((v4u64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvseq_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvseq_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvseq_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvseq_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvseq_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvseq_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvseq_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvseq_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V32QI, V32QI, QI. */
+#define __lasx_xvseqi_b(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvseqi_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V16HI, V16HI, QI. */
+#define __lasx_xvseqi_h(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvseqi_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V8SI, V8SI, QI. */
+#define __lasx_xvseqi_w(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvseqi_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V4DI, V4DI, QI. */
+#define __lasx_xvseqi_d(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvseqi_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvslt_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvslt_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvslt_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvslt_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvslt_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvslt_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvslt_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvslt_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V32QI, V32QI, QI. */
+#define __lasx_xvslti_b(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslti_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V16HI, V16HI, QI. */
+#define __lasx_xvslti_h(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslti_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V8SI, V8SI, QI. */
+#define __lasx_xvslti_w(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslti_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V4DI, V4DI, QI. */
+#define __lasx_xvslti_d(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslti_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvslt_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvslt_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvslt_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvslt_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvslt_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvslt_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvslt_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvslt_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V32QI, UV32QI, UQI. */
+#define __lasx_xvslti_bu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslti_bu ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, UV16HI, UQI. */
+#define __lasx_xvslti_hu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslti_hu ((v16u16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, UV8SI, UQI. */
+#define __lasx_xvslti_wu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslti_wu ((v8u32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V4DI, UV4DI, UQI. */
+#define __lasx_xvslti_du(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslti_du ((v4u64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsle_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsle_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsle_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsle_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsle_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsle_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsle_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsle_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V32QI, V32QI, QI. */
+#define __lasx_xvslei_b(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslei_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V16HI, V16HI, QI. */
+#define __lasx_xvslei_h(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslei_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V8SI, V8SI, QI. */
+#define __lasx_xvslei_w(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslei_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, si5. */
+/* Data types in instruction templates: V4DI, V4DI, QI. */
+#define __lasx_xvslei_d(/*__m256i*/ _1, /*si5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslei_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsle_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsle_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsle_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsle_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsle_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsle_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsle_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsle_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V32QI, UV32QI, UQI. */
+#define __lasx_xvslei_bu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslei_bu ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, UV16HI, UQI. */
+#define __lasx_xvslei_hu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslei_hu ((v16u16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, UV8SI, UQI. */
+#define __lasx_xvslei_wu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslei_wu ((v8u32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V4DI, UV4DI, UQI. */
+#define __lasx_xvslei_du(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvslei_du ((v4u64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvsat_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvsat_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvsat_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvsat_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvsat_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsat_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvsat_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvsat_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvsat_bu(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvsat_bu ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: UV16HI, UV16HI, UQI. */
+#define __lasx_xvsat_hu(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvsat_hu ((v16u16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV8SI, UV8SI, UQI. */
+#define __lasx_xvsat_wu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsat_wu ((v8u32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: UV4DI, UV4DI, UQI. */
+#define __lasx_xvsat_du(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvsat_du ((v4u64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvadda_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvadda_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvadda_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvadda_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvadda_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvadda_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvadda_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvadda_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsadd_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsadd_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsadd_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsadd_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsadd_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsadd_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsadd_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsadd_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsadd_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsadd_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsadd_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsadd_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsadd_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsadd_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsadd_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsadd_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavg_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavg_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavg_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavg_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavg_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavg_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavg_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavg_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavg_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavg_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavg_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavg_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavg_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavg_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavg_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavg_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavgr_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavgr_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavgr_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavgr_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavgr_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavgr_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavgr_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavgr_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavgr_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavgr_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavgr_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavgr_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavgr_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavgr_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvavgr_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvavgr_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssub_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssub_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssub_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssub_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssub_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssub_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssub_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssub_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssub_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssub_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssub_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssub_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssub_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssub_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssub_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssub_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvabsd_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvabsd_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvabsd_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvabsd_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvabsd_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvabsd_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvabsd_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvabsd_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvabsd_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvabsd_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvabsd_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvabsd_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvabsd_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvabsd_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvabsd_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvabsd_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmul_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmul_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmul_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmul_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmul_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmul_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmul_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmul_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmadd_b (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmadd_b ((v32i8)_1, (v32i8)_2, (v32i8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmadd_h (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmadd_h ((v16i16)_1, (v16i16)_2, (v16i16)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmadd_w (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmadd_w ((v8i32)_1, (v8i32)_2, (v8i32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmadd_d (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmadd_d ((v4i64)_1, (v4i64)_2, (v4i64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmsub_b (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmsub_b ((v32i8)_1, (v32i8)_2, (v32i8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmsub_h (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmsub_h ((v16i16)_1, (v16i16)_2, (v16i16)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmsub_w (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmsub_w ((v8i32)_1, (v8i32)_2, (v8i32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmsub_d (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmsub_d ((v4i64)_1, (v4i64)_2, (v4i64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvdiv_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvdiv_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvdiv_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvdiv_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvdiv_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvdiv_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvdiv_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvdiv_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvdiv_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvdiv_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvdiv_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvdiv_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvdiv_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvdiv_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvdiv_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvdiv_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhaddw_h_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhaddw_h_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhaddw_w_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhaddw_w_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhaddw_d_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhaddw_d_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhaddw_hu_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhaddw_hu_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhaddw_wu_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhaddw_wu_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhaddw_du_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhaddw_du_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhsubw_h_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhsubw_h_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhsubw_w_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhsubw_w_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhsubw_d_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhsubw_d_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhsubw_hu_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhsubw_hu_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhsubw_wu_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhsubw_wu_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhsubw_du_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhsubw_du_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmod_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmod_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmod_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmod_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmod_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmod_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmod_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmod_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmod_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmod_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmod_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmod_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmod_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmod_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmod_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmod_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvrepl128vei_b(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvrepl128vei_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvrepl128vei_h(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvrepl128vei_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui2. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvrepl128vei_w(/*__m256i*/ _1, /*ui2*/ _2) \
+ ((__m256i)__builtin_lasx_xvrepl128vei_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui1. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvrepl128vei_d(/*__m256i*/ _1, /*ui1*/ _2) \
+ ((__m256i)__builtin_lasx_xvrepl128vei_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpickev_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpickev_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpickev_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpickev_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpickev_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpickev_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpickev_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpickev_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpickod_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpickod_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpickod_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpickod_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpickod_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpickod_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpickod_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpickod_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvilvh_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvilvh_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvilvh_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvilvh_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvilvh_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvilvh_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvilvh_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvilvh_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvilvl_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvilvl_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvilvl_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvilvl_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvilvl_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvilvl_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvilvl_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvilvl_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpackev_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpackev_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpackev_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpackev_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpackev_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpackev_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpackev_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpackev_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpackod_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpackod_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpackod_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpackod_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpackod_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpackod_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpackod_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvpackod_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvshuf_b (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvshuf_b ((v32i8)_1, (v32i8)_2, (v32i8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvshuf_h (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvshuf_h ((v16i16)_1, (v16i16)_2, (v16i16)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvshuf_w (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvshuf_w ((v8i32)_1, (v8i32)_2, (v8i32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvshuf_d (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvshuf_d ((v4i64)_1, (v4i64)_2, (v4i64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvand_v (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvand_v ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvandi_b(/*__m256i*/ _1, /*ui8*/ _2) \
+ ((__m256i)__builtin_lasx_xvandi_b ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvor_v (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvor_v ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvori_b(/*__m256i*/ _1, /*ui8*/ _2) \
+ ((__m256i)__builtin_lasx_xvori_b ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvnor_v (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvnor_v ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvnori_b(/*__m256i*/ _1, /*ui8*/ _2) \
+ ((__m256i)__builtin_lasx_xvnori_b ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvxor_v (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvxor_v ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: UV32QI, UV32QI, UQI. */
+#define __lasx_xvxori_b(/*__m256i*/ _1, /*ui8*/ _2) \
+ ((__m256i)__builtin_lasx_xvxori_b ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvbitsel_v (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvbitsel_v ((v32u8)_1, (v32u8)_2, (v32u8)_3);
+}
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI, USI. */
+#define __lasx_xvbitseli_b(/*__m256i*/ _1, /*__m256i*/ _2, /*ui8*/ _3) \
+ ((__m256i)__builtin_lasx_xvbitseli_b ((v32u8)(_1), (v32u8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V32QI, V32QI, USI. */
+#define __lasx_xvshuf4i_b(/*__m256i*/ _1, /*ui8*/ _2) \
+ ((__m256i)__builtin_lasx_xvshuf4i_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V16HI, V16HI, USI. */
+#define __lasx_xvshuf4i_h(/*__m256i*/ _1, /*ui8*/ _2) \
+ ((__m256i)__builtin_lasx_xvshuf4i_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V8SI, V8SI, USI. */
+#define __lasx_xvshuf4i_w(/*__m256i*/ _1, /*ui8*/ _2) \
+ ((__m256i)__builtin_lasx_xvshuf4i_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, rj. */
+/* Data types in instruction templates: V32QI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplgr2vr_b (int _1)
+{
+ return (__m256i)__builtin_lasx_xvreplgr2vr_b ((int)_1);
+}
+
+/* Assembly instruction format: xd, rj. */
+/* Data types in instruction templates: V16HI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplgr2vr_h (int _1)
+{
+ return (__m256i)__builtin_lasx_xvreplgr2vr_h ((int)_1);
+}
+
+/* Assembly instruction format: xd, rj. */
+/* Data types in instruction templates: V8SI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplgr2vr_w (int _1)
+{
+ return (__m256i)__builtin_lasx_xvreplgr2vr_w ((int)_1);
+}
+
+/* Assembly instruction format: xd, rj. */
+/* Data types in instruction templates: V4DI, DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplgr2vr_d (long int _1)
+{
+ return (__m256i)__builtin_lasx_xvreplgr2vr_d ((long int)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpcnt_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvpcnt_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpcnt_h (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvpcnt_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpcnt_w (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvpcnt_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvpcnt_d (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvpcnt_d ((v4i64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvclo_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvclo_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvclo_h (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvclo_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvclo_w (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvclo_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvclo_d (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvclo_d ((v4i64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvclz_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvclz_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvclz_h (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvclz_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvclz_w (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvclz_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvclz_d (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvclz_d ((v4i64)_1);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfadd_s (__m256 _1, __m256 _2)
+{
+ return (__m256)__builtin_lasx_xvfadd_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfadd_d (__m256d _1, __m256d _2)
+{
+ return (__m256d)__builtin_lasx_xvfadd_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfsub_s (__m256 _1, __m256 _2)
+{
+ return (__m256)__builtin_lasx_xvfsub_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfsub_d (__m256d _1, __m256d _2)
+{
+ return (__m256d)__builtin_lasx_xvfsub_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfmul_s (__m256 _1, __m256 _2)
+{
+ return (__m256)__builtin_lasx_xvfmul_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfmul_d (__m256d _1, __m256d _2)
+{
+ return (__m256d)__builtin_lasx_xvfmul_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfdiv_s (__m256 _1, __m256 _2)
+{
+ return (__m256)__builtin_lasx_xvfdiv_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfdiv_d (__m256d _1, __m256d _2)
+{
+ return (__m256d)__builtin_lasx_xvfdiv_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcvt_h_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcvt_h_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfcvt_s_d (__m256d _1, __m256d _2)
+{
+ return (__m256)__builtin_lasx_xvfcvt_s_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfmin_s (__m256 _1, __m256 _2)
+{
+ return (__m256)__builtin_lasx_xvfmin_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfmin_d (__m256d _1, __m256d _2)
+{
+ return (__m256d)__builtin_lasx_xvfmin_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfmina_s (__m256 _1, __m256 _2)
+{
+ return (__m256)__builtin_lasx_xvfmina_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfmina_d (__m256d _1, __m256d _2)
+{
+ return (__m256d)__builtin_lasx_xvfmina_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfmax_s (__m256 _1, __m256 _2)
+{
+ return (__m256)__builtin_lasx_xvfmax_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfmax_d (__m256d _1, __m256d _2)
+{
+ return (__m256d)__builtin_lasx_xvfmax_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfmaxa_s (__m256 _1, __m256 _2)
+{
+ return (__m256)__builtin_lasx_xvfmaxa_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfmaxa_d (__m256d _1, __m256d _2)
+{
+ return (__m256d)__builtin_lasx_xvfmaxa_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfclass_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvfclass_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfclass_d (__m256d _1)
+{
+ return (__m256i)__builtin_lasx_xvfclass_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfsqrt_s (__m256 _1)
+{
+ return (__m256)__builtin_lasx_xvfsqrt_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfsqrt_d (__m256d _1)
+{
+ return (__m256d)__builtin_lasx_xvfsqrt_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfrecip_s (__m256 _1)
+{
+ return (__m256)__builtin_lasx_xvfrecip_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfrecip_d (__m256d _1)
+{
+ return (__m256d)__builtin_lasx_xvfrecip_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfrint_s (__m256 _1)
+{
+ return (__m256)__builtin_lasx_xvfrint_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfrint_d (__m256d _1)
+{
+ return (__m256d)__builtin_lasx_xvfrint_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfrsqrt_s (__m256 _1)
+{
+ return (__m256)__builtin_lasx_xvfrsqrt_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfrsqrt_d (__m256d _1)
+{
+ return (__m256d)__builtin_lasx_xvfrsqrt_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvflogb_s (__m256 _1)
+{
+ return (__m256)__builtin_lasx_xvflogb_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvflogb_d (__m256d _1)
+{
+ return (__m256d)__builtin_lasx_xvflogb_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SF, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfcvth_s_h (__m256i _1)
+{
+ return (__m256)__builtin_lasx_xvfcvth_s_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfcvth_d_s (__m256 _1)
+{
+ return (__m256d)__builtin_lasx_xvfcvth_d_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SF, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfcvtl_s_h (__m256i _1)
+{
+ return (__m256)__builtin_lasx_xvfcvtl_s_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfcvtl_d_s (__m256 _1)
+{
+ return (__m256d)__builtin_lasx_xvfcvtl_d_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftint_w_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftint_w_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftint_l_d (__m256d _1)
+{
+ return (__m256i)__builtin_lasx_xvftint_l_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: UV8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftint_wu_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftint_wu_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: UV4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftint_lu_d (__m256d _1)
+{
+ return (__m256i)__builtin_lasx_xvftint_lu_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrz_w_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrz_w_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrz_l_d (__m256d _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrz_l_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: UV8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrz_wu_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrz_wu_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: UV4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrz_lu_d (__m256d _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrz_lu_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SF, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvffint_s_w (__m256i _1)
+{
+ return (__m256)__builtin_lasx_xvffint_s_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvffint_d_l (__m256i _1)
+{
+ return (__m256d)__builtin_lasx_xvffint_d_l ((v4i64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SF, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvffint_s_wu (__m256i _1)
+{
+ return (__m256)__builtin_lasx_xvffint_s_wu ((v8u32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvffint_d_lu (__m256i _1)
+{
+ return (__m256d)__builtin_lasx_xvffint_d_lu ((v4u64)_1);
+}
+
+/* Assembly instruction format: xd, xj, rk. */
+/* Data types in instruction templates: V32QI, V32QI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplve_b (__m256i _1, int _2)
+{
+ return (__m256i)__builtin_lasx_xvreplve_b ((v32i8)_1, (int)_2);
+}
+
+/* Assembly instruction format: xd, xj, rk. */
+/* Data types in instruction templates: V16HI, V16HI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplve_h (__m256i _1, int _2)
+{
+ return (__m256i)__builtin_lasx_xvreplve_h ((v16i16)_1, (int)_2);
+}
+
+/* Assembly instruction format: xd, xj, rk. */
+/* Data types in instruction templates: V8SI, V8SI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplve_w (__m256i _1, int _2)
+{
+ return (__m256i)__builtin_lasx_xvreplve_w ((v8i32)_1, (int)_2);
+}
+
+/* Assembly instruction format: xd, xj, rk. */
+/* Data types in instruction templates: V4DI, V4DI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplve_d (__m256i _1, int _2)
+{
+ return (__m256i)__builtin_lasx_xvreplve_d ((v4i64)_1, (int)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvpermi_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui8*/ _3) \
+ ((__m256i)__builtin_lasx_xvpermi_w ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvandn_v (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvandn_v ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvneg_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvneg_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvneg_h (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvneg_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvneg_w (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvneg_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvneg_d (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvneg_d ((v4i64)_1);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmuh_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmuh_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmuh_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmuh_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmuh_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmuh_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmuh_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmuh_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmuh_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmuh_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmuh_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmuh_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmuh_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmuh_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmuh_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmuh_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V16HI, V32QI, UQI. */
+#define __lasx_xvsllwil_h_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvsllwil_h_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V8SI, V16HI, UQI. */
+#define __lasx_xvsllwil_w_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvsllwil_w_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V4DI, V8SI, UQI. */
+#define __lasx_xvsllwil_d_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsllwil_d_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: UV16HI, UV32QI, UQI. */
+#define __lasx_xvsllwil_hu_bu(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvsllwil_hu_bu ((v32u8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: UV8SI, UV16HI, UQI. */
+#define __lasx_xvsllwil_wu_hu(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvsllwil_wu_hu ((v16u16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV4DI, UV8SI, UQI. */
+#define __lasx_xvsllwil_du_wu(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvsllwil_du_wu ((v8u32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsran_b_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsran_b_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsran_h_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsran_h_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsran_w_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsran_w_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssran_b_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssran_b_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssran_h_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssran_h_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssran_w_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssran_w_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssran_bu_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssran_bu_h ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssran_hu_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssran_hu_w ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssran_wu_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssran_wu_d ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrarn_b_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrarn_b_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrarn_h_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrarn_h_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrarn_w_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrarn_w_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrarn_b_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrarn_b_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrarn_h_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrarn_h_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrarn_w_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrarn_w_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrarn_bu_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrarn_bu_h ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrarn_hu_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrarn_hu_w ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrarn_wu_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrarn_wu_d ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrln_b_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrln_b_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrln_h_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrln_h_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrln_w_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrln_w_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrln_bu_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrln_bu_h ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrln_hu_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrln_hu_w ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrln_wu_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrln_wu_d ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrlrn_b_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrlrn_b_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrlrn_h_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrlrn_h_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsrlrn_w_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsrlrn_w_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV32QI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrlrn_bu_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrlrn_bu_h ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrlrn_hu_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrlrn_hu_w ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrlrn_wu_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrlrn_wu_d ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, UQI. */
+#define __lasx_xvfrstpi_b(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvfrstpi_b ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, UQI. */
+#define __lasx_xvfrstpi_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvfrstpi_h ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfrstp_b (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvfrstp_b ((v32i8)_1, (v32i8)_2, (v32i8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfrstp_h (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvfrstp_h ((v16i16)_1, (v16i16)_2, (v16i16)_3);
+}
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvshuf4i_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui8*/ _3) \
+ ((__m256i)__builtin_lasx_xvshuf4i_d ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvbsrl_v(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvbsrl_v ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvbsll_v(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvbsll_v ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvextrins_b(/*__m256i*/ _1, /*__m256i*/ _2, /*ui8*/ _3) \
+ ((__m256i)__builtin_lasx_xvextrins_b ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, USI. */
+#define __lasx_xvextrins_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui8*/ _3) \
+ ((__m256i)__builtin_lasx_xvextrins_h ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvextrins_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui8*/ _3) \
+ ((__m256i)__builtin_lasx_xvextrins_w ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvextrins_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui8*/ _3) \
+ ((__m256i)__builtin_lasx_xvextrins_d ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmskltz_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvmskltz_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmskltz_h (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvmskltz_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmskltz_w (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvmskltz_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmskltz_d (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvmskltz_d ((v4i64)_1);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsigncov_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsigncov_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsigncov_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsigncov_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsigncov_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsigncov_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsigncov_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsigncov_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfmadd_s (__m256 _1, __m256 _2, __m256 _3)
+{
+ return (__m256)__builtin_lasx_xvfmadd_s ((v8f32)_1, (v8f32)_2, (v8f32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfmadd_d (__m256d _1, __m256d _2, __m256d _3)
+{
+ return (__m256d)__builtin_lasx_xvfmadd_d ((v4f64)_1, (v4f64)_2, (v4f64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfmsub_s (__m256 _1, __m256 _2, __m256 _3)
+{
+ return (__m256)__builtin_lasx_xvfmsub_s ((v8f32)_1, (v8f32)_2, (v8f32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfmsub_d (__m256d _1, __m256d _2, __m256d _3)
+{
+ return (__m256d)__builtin_lasx_xvfmsub_d ((v4f64)_1, (v4f64)_2, (v4f64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfnmadd_s (__m256 _1, __m256 _2, __m256 _3)
+{
+ return (__m256)__builtin_lasx_xvfnmadd_s ((v8f32)_1, (v8f32)_2, (v8f32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfnmadd_d (__m256d _1, __m256d _2, __m256d _3)
+{
+ return (__m256d)__builtin_lasx_xvfnmadd_d ((v4f64)_1, (v4f64)_2, (v4f64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: V8SF, V8SF, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfnmsub_s (__m256 _1, __m256 _2, __m256 _3)
+{
+ return (__m256)__builtin_lasx_xvfnmsub_s ((v8f32)_1, (v8f32)_2, (v8f32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk, xa. */
+/* Data types in instruction templates: V4DF, V4DF, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfnmsub_d (__m256d _1, __m256d _2, __m256d _3)
+{
+ return (__m256d)__builtin_lasx_xvfnmsub_d ((v4f64)_1, (v4f64)_2, (v4f64)_3);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrne_w_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrne_w_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrne_l_d (__m256d _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrne_l_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrp_w_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrp_w_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrp_l_d (__m256d _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrp_l_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrm_w_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrm_w_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrm_l_d (__m256d _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrm_l_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftint_w_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvftint_w_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SF, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvffint_s_l (__m256i _1, __m256i _2)
+{
+ return (__m256)__builtin_lasx_xvffint_s_l ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrz_w_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvftintrz_w_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrp_w_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvftintrp_w_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrm_w_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvftintrm_w_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrne_w_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvftintrne_w_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftinth_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftinth_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintl_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintl_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvffinth_d_w (__m256i _1)
+{
+ return (__m256d)__builtin_lasx_xvffinth_d_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DF, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvffintl_d_w (__m256i _1)
+{
+ return (__m256d)__builtin_lasx_xvffintl_d_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrzh_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrzh_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrzl_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrzl_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrph_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrph_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrpl_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrpl_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrmh_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrmh_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrml_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrml_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrneh_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrneh_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvftintrnel_l_s (__m256 _1)
+{
+ return (__m256i)__builtin_lasx_xvftintrnel_l_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfrintrne_s (__m256 _1)
+{
+ return (__m256)__builtin_lasx_xvfrintrne_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfrintrne_d (__m256d _1)
+{
+ return (__m256d)__builtin_lasx_xvfrintrne_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfrintrz_s (__m256 _1)
+{
+ return (__m256)__builtin_lasx_xvfrintrz_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfrintrz_d (__m256d _1)
+{
+ return (__m256d)__builtin_lasx_xvfrintrz_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfrintrp_s (__m256 _1)
+{
+ return (__m256)__builtin_lasx_xvfrintrp_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfrintrp_d (__m256d _1)
+{
+ return (__m256d)__builtin_lasx_xvfrintrp_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256 __lasx_xvfrintrm_s (__m256 _1)
+{
+ return (__m256)__builtin_lasx_xvfrintrm_s ((v8f32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256d __lasx_xvfrintrm_d (__m256d _1)
+{
+ return (__m256d)__builtin_lasx_xvfrintrm_d ((v4f64)_1);
+}
+
+/* Assembly instruction format: xd, rj, si12. */
+/* Data types in instruction templates: V32QI, CVPOINTER, SI. */
+#define __lasx_xvld(/*void **/ _1, /*si12*/ _2) \
+ ((__m256i)__builtin_lasx_xvld ((void *)(_1), (_2)))
+
+/* Assembly instruction format: xd, rj, si12. */
+/* Data types in instruction templates: VOID, V32QI, CVPOINTER, SI. */
+#define __lasx_xvst(/*__m256i*/ _1, /*void **/ _2, /*si12*/ _3) \
+ ((void)__builtin_lasx_xvst ((v32i8)(_1), (void *)(_2), (_3)))
+
+/* Assembly instruction format: xd, rj, si8, idx. */
+/* Data types in instruction templates: VOID, V32QI, CVPOINTER, SI, UQI. */
+#define __lasx_xvstelm_b(/*__m256i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+ ((void)__builtin_lasx_xvstelm_b ((v32i8)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format: xd, rj, si8, idx. */
+/* Data types in instruction templates: VOID, V16HI, CVPOINTER, SI, UQI. */
+#define __lasx_xvstelm_h(/*__m256i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+ ((void)__builtin_lasx_xvstelm_h ((v16i16)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format: xd, rj, si8, idx. */
+/* Data types in instruction templates: VOID, V8SI, CVPOINTER, SI, UQI. */
+#define __lasx_xvstelm_w(/*__m256i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+ ((void)__builtin_lasx_xvstelm_w ((v8i32)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format: xd, rj, si8, idx. */
+/* Data types in instruction templates: VOID, V4DI, CVPOINTER, SI, UQI. */
+#define __lasx_xvstelm_d(/*__m256i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+ ((void)__builtin_lasx_xvstelm_d ((v4i64)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, UQI. */
+#define __lasx_xvinsve0_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui3*/ _3) \
+ ((__m256i)__builtin_lasx_xvinsve0_w ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui2. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, UQI. */
+#define __lasx_xvinsve0_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui2*/ _3) \
+ ((__m256i)__builtin_lasx_xvinsve0_d ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvpickve_w(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvpickve_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui2. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvpickve_d(/*__m256i*/ _1, /*ui2*/ _2) \
+ ((__m256i)__builtin_lasx_xvpickve_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrlrn_b_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrlrn_b_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrlrn_h_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrlrn_h_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrlrn_w_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrlrn_w_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrln_b_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrln_b_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrln_h_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrln_h_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvssrln_w_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvssrln_w_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvorn_v (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvorn_v ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, i13. */
+/* Data types in instruction templates: V4DI, HI. */
+#define __lasx_xvldi(/*i13*/ _1) \
+ ((__m256i)__builtin_lasx_xvldi ((_1)))
+
+/* Assembly instruction format: xd, rj, rk. */
+/* Data types in instruction templates: V32QI, CVPOINTER, DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvldx (void * _1, long int _2)
+{
+ return (__m256i)__builtin_lasx_xvldx ((void *)_1, (long int)_2);
+}
+
+/* Assembly instruction format: xd, rj, rk. */
+/* Data types in instruction templates: VOID, V32QI, CVPOINTER, DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+void __lasx_xvstx (__m256i _1, void * _2, long int _3)
+{
+ return (void)__builtin_lasx_xvstx ((v32i8)_1, (void *)_2, (long int)_3);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvextl_qu_du (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvextl_qu_du ((v4u64)_1);
+}
+
+/* Assembly instruction format: xd, rj, ui3. */
+/* Data types in instruction templates: V8SI, V8SI, SI, UQI. */
+#define __lasx_xvinsgr2vr_w(/*__m256i*/ _1, /*int*/ _2, /*ui3*/ _3) \
+ ((__m256i)__builtin_lasx_xvinsgr2vr_w ((v8i32)(_1), (int)(_2), (_3)))
+
+/* Assembly instruction format: xd, rj, ui2. */
+/* Data types in instruction templates: V4DI, V4DI, DI, UQI. */
+#define __lasx_xvinsgr2vr_d(/*__m256i*/ _1, /*long int*/ _2, /*ui2*/ _3) \
+ ((__m256i)__builtin_lasx_xvinsgr2vr_d ((v4i64)(_1), (long int)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplve0_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvreplve0_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplve0_h (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvreplve0_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplve0_w (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvreplve0_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplve0_d (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvreplve0_d ((v4i64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvreplve0_q (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvreplve0_q ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V16HI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_h_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_h_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_w_h (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_w_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_d_w (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_d_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_w_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_w_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_d_h (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_d_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_d_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_d_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V16HI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_hu_bu (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_hu_bu ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_wu_hu (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_wu_hu ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_du_wu (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_du_wu ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_wu_bu (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_wu_bu ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_du_hu (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_du_hu ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_vext2xv_du_bu (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_vext2xv_du_bu ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvpermi_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui8*/ _3) \
+ ((__m256i)__builtin_lasx_xvpermi_q ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui8. */
+/* Data types in instruction templates: V4DI, V4DI, USI. */
+#define __lasx_xvpermi_d(/*__m256i*/ _1, /*ui8*/ _2) \
+ ((__m256i)__builtin_lasx_xvpermi_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvperm_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvperm_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, rj, si12. */
+/* Data types in instruction templates: V32QI, CVPOINTER, SI. */
+#define __lasx_xvldrepl_b(/*void **/ _1, /*si12*/ _2) \
+ ((__m256i)__builtin_lasx_xvldrepl_b ((void *)(_1), (_2)))
+
+/* Assembly instruction format: xd, rj, si11. */
+/* Data types in instruction templates: V16HI, CVPOINTER, SI. */
+#define __lasx_xvldrepl_h(/*void **/ _1, /*si11*/ _2) \
+ ((__m256i)__builtin_lasx_xvldrepl_h ((void *)(_1), (_2)))
+
+/* Assembly instruction format: xd, rj, si10. */
+/* Data types in instruction templates: V8SI, CVPOINTER, SI. */
+#define __lasx_xvldrepl_w(/*void **/ _1, /*si10*/ _2) \
+ ((__m256i)__builtin_lasx_xvldrepl_w ((void *)(_1), (_2)))
+
+/* Assembly instruction format: xd, rj, si9. */
+/* Data types in instruction templates: V4DI, CVPOINTER, SI. */
+#define __lasx_xvldrepl_d(/*void **/ _1, /*si9*/ _2) \
+ ((__m256i)__builtin_lasx_xvldrepl_d ((void *)(_1), (_2)))
+
+/* Assembly instruction format: rd, xj, ui3. */
+/* Data types in instruction templates: SI, V8SI, UQI. */
+#define __lasx_xvpickve2gr_w(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((int)__builtin_lasx_xvpickve2gr_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: rd, xj, ui3. */
+/* Data types in instruction templates: USI, V8SI, UQI. */
+#define __lasx_xvpickve2gr_wu(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((unsigned int)__builtin_lasx_xvpickve2gr_wu ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: rd, xj, ui2. */
+/* Data types in instruction templates: DI, V4DI, UQI. */
+#define __lasx_xvpickve2gr_d(/*__m256i*/ _1, /*ui2*/ _2) \
+ ((long int)__builtin_lasx_xvpickve2gr_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: rd, xj, ui2. */
+/* Data types in instruction templates: UDI, V4DI, UQI. */
+#define __lasx_xvpickve2gr_du(/*__m256i*/ _1, /*ui2*/ _2) \
+ ((unsigned long int)__builtin_lasx_xvpickve2gr_du ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_q_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_q_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_d_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_d_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_w_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_w_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_h_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_h_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_q_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_q_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_d_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_d_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_w_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_w_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_h_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_h_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwev_q_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwev_q_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwev_d_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwev_d_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwev_w_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwev_w_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwev_h_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwev_h_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwev_q_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwev_q_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwev_d_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwev_d_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwev_w_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwev_w_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwev_h_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwev_h_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_q_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_q_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_d_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_d_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_w_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_w_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_h_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_h_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_q_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_q_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_d_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_d_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_w_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_w_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_h_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_h_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_q_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_q_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_d_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_d_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_w_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_w_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_h_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_h_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_q_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_q_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_d_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_d_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_w_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_w_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_h_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_h_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwod_q_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwod_q_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwod_d_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwod_d_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwod_w_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwod_w_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwod_h_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwod_h_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwod_q_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwod_q_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwod_d_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwod_d_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwod_w_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwod_w_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsubwod_h_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsubwod_h_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_q_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_q_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_d_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_d_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_w_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_w_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_h_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_h_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_q_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_q_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_d_wu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_d_wu ((v8u32)_1, (v8u32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_w_hu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_w_hu ((v16u16)_1, (v16u16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_h_bu (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_h_bu ((v32u8)_1, (v32u8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_d_wu_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_d_wu_w ((v8u32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_w_hu_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_w_hu_h ((v16u16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_h_bu_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_h_bu_b ((v32u8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_d_wu_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_d_wu_w ((v8u32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_w_hu_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_w_hu_h ((v16u16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_h_bu_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_h_bu_b ((v32u8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_d_wu_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_d_wu_w ((v8u32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_w_hu_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_w_hu_h ((v16u16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_h_bu_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_h_bu_b ((v32u8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_d_wu_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_d_wu_w ((v8u32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, UV16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_w_hu_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_w_hu_h ((v16u16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, UV32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_h_bu_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_h_bu_b ((v32u8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhaddw_q_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhaddw_q_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhaddw_qu_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhaddw_qu_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhsubw_q_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhsubw_q_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvhsubw_qu_du (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvhsubw_qu_du ((v4u64)_1, (v4u64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_q_d (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_q_d ((v4i64)_1, (v4i64)_2, (v4i64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_d_w (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_d_w ((v4i64)_1, (v8i32)_2, (v8i32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_w_h (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_w_h ((v8i32)_1, (v16i16)_2, (v16i16)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_h_b (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_h_b ((v16i16)_1, (v32i8)_2, (v32i8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_q_du (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_q_du ((v4u64)_1, (v4u64)_2, (v4u64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_d_wu (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_d_wu ((v4u64)_1, (v8u32)_2, (v8u32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_w_hu (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_w_hu ((v8u32)_1, (v16u16)_2, (v16u16)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_h_bu (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_h_bu ((v16u16)_1, (v32u8)_2, (v32u8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_q_d (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_q_d ((v4i64)_1, (v4i64)_2, (v4i64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_d_w (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_d_w ((v4i64)_1, (v8i32)_2, (v8i32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_w_h (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_w_h ((v8i32)_1, (v16i16)_2, (v16i16)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_h_b (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_h_b ((v16i16)_1, (v32i8)_2, (v32i8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_q_du (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_q_du ((v4u64)_1, (v4u64)_2, (v4u64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV4DI, UV4DI, UV8SI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_d_wu (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_d_wu ((v4u64)_1, (v8u32)_2, (v8u32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV8SI, UV8SI, UV16HI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_w_hu (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_w_hu ((v8u32)_1, (v16u16)_2, (v16u16)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: UV16HI, UV16HI, UV32QI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_h_bu (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_h_bu ((v16u16)_1, (v32u8)_2, (v32u8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, UV4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_q_du_d (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_q_du_d ((v4i64)_1, (v4u64)_2, (v4i64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, UV8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_d_wu_w (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_d_wu_w ((v4i64)_1, (v8u32)_2, (v8i32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, UV16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_w_hu_h (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_w_hu_h ((v8i32)_1, (v16u16)_2, (v16i16)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, UV32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwev_h_bu_b (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwev_h_bu_b ((v16i16)_1, (v32u8)_2, (v32i8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, UV4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_q_du_d (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_q_du_d ((v4i64)_1, (v4u64)_2, (v4i64)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, UV8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_d_wu_w (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_d_wu_w ((v4i64)_1, (v8u32)_2, (v8i32)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, UV16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_w_hu_h (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_w_hu_h ((v8i32)_1, (v16u16)_2, (v16i16)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, UV32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmaddwod_h_bu_b (__m256i _1, __m256i _2, __m256i _3)
+{
+ return (__m256i)__builtin_lasx_xvmaddwod_h_bu_b ((v16i16)_1, (v32u8)_2, (v32i8)_3);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvrotr_b (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvrotr_b ((v32i8)_1, (v32i8)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvrotr_h (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvrotr_h ((v16i16)_1, (v16i16)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvrotr_w (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvrotr_w ((v8i32)_1, (v8i32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvrotr_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvrotr_d ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvadd_q (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvadd_q ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvsub_q (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvsub_q ((v4i64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwev_q_du_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwev_q_du_d ((v4u64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvaddwod_q_du_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvaddwod_q_du_d ((v4u64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwev_q_du_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwev_q_du_d ((v4u64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, UV4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmulwod_q_du_d (__m256i _1, __m256i _2)
+{
+ return (__m256i)__builtin_lasx_xvmulwod_q_du_d ((v4u64)_1, (v4i64)_2);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmskgez_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvmskgez_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V32QI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvmsknz_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvmsknz_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V16HI, V32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvexth_h_b (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvexth_h_b ((v32i8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V8SI, V16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvexth_w_h (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvexth_w_h ((v16i16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvexth_d_w (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvexth_d_w ((v8i32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvexth_q_d (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvexth_q_d ((v4i64)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: UV16HI, UV32QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvexth_hu_bu (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvexth_hu_bu ((v32u8)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: UV8SI, UV16HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvexth_wu_hu (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvexth_wu_hu ((v16u16)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: UV4DI, UV8SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvexth_du_wu (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvexth_du_wu ((v8u32)_1);
+}
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: UV4DI, UV4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvexth_qu_du (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvexth_qu_du ((v4u64)_1);
+}
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V32QI, V32QI, UQI. */
+#define __lasx_xvrotri_b(/*__m256i*/ _1, /*ui3*/ _2) \
+ ((__m256i)__builtin_lasx_xvrotri_b ((v32i8)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V16HI, V16HI, UQI. */
+#define __lasx_xvrotri_h(/*__m256i*/ _1, /*ui4*/ _2) \
+ ((__m256i)__builtin_lasx_xvrotri_h ((v16i16)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V8SI, V8SI, UQI. */
+#define __lasx_xvrotri_w(/*__m256i*/ _1, /*ui5*/ _2) \
+ ((__m256i)__builtin_lasx_xvrotri_w ((v8i32)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V4DI, V4DI, UQI. */
+#define __lasx_xvrotri_d(/*__m256i*/ _1, /*ui6*/ _2) \
+ ((__m256i)__builtin_lasx_xvrotri_d ((v4i64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj. */
+/* Data types in instruction templates: V4DI, V4DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvextl_q_d (__m256i _1)
+{
+ return (__m256i)__builtin_lasx_xvextl_q_d ((v4i64)_1);
+}
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvsrlni_b_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrlni_b_h ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, USI. */
+#define __lasx_xvsrlni_h_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrlni_h_w ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvsrlni_w_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrlni_w_d ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvsrlni_d_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrlni_d_q ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvsrlrni_b_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrlrni_b_h ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, USI. */
+#define __lasx_xvsrlrni_h_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrlrni_h_w ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvsrlrni_w_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrlrni_w_d ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvsrlrni_d_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrlrni_d_q ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvssrlni_b_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlni_b_h ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, USI. */
+#define __lasx_xvssrlni_h_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlni_h_w ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvssrlni_w_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlni_w_d ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvssrlni_d_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlni_d_q ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: UV32QI, UV32QI, V32QI, USI. */
+#define __lasx_xvssrlni_bu_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlni_bu_h ((v32u8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV16HI, UV16HI, V16HI, USI. */
+#define __lasx_xvssrlni_hu_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlni_hu_w ((v16u16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: UV8SI, UV8SI, V8SI, USI. */
+#define __lasx_xvssrlni_wu_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlni_wu_d ((v8u32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: UV4DI, UV4DI, V4DI, USI. */
+#define __lasx_xvssrlni_du_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlni_du_q ((v4u64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvssrlrni_b_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlrni_b_h ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, USI. */
+#define __lasx_xvssrlrni_h_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlrni_h_w ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvssrlrni_w_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlrni_w_d ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvssrlrni_d_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlrni_d_q ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: UV32QI, UV32QI, V32QI, USI. */
+#define __lasx_xvssrlrni_bu_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlrni_bu_h ((v32u8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV16HI, UV16HI, V16HI, USI. */
+#define __lasx_xvssrlrni_hu_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlrni_hu_w ((v16u16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: UV8SI, UV8SI, V8SI, USI. */
+#define __lasx_xvssrlrni_wu_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlrni_wu_d ((v8u32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: UV4DI, UV4DI, V4DI, USI. */
+#define __lasx_xvssrlrni_du_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrlrni_du_q ((v4u64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvsrani_b_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrani_b_h ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, USI. */
+#define __lasx_xvsrani_h_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrani_h_w ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvsrani_w_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrani_w_d ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvsrani_d_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrani_d_q ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvsrarni_b_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrarni_b_h ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, USI. */
+#define __lasx_xvsrarni_h_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrarni_h_w ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvsrarni_w_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrarni_w_d ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvsrarni_d_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvsrarni_d_q ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvssrani_b_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrani_b_h ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, USI. */
+#define __lasx_xvssrani_h_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrani_h_w ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvssrani_w_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrani_w_d ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvssrani_d_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrani_d_q ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: UV32QI, UV32QI, V32QI, USI. */
+#define __lasx_xvssrani_bu_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrani_bu_h ((v32u8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV16HI, UV16HI, V16HI, USI. */
+#define __lasx_xvssrani_hu_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrani_hu_w ((v16u16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: UV8SI, UV8SI, V8SI, USI. */
+#define __lasx_xvssrani_wu_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrani_wu_d ((v8u32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: UV4DI, UV4DI, V4DI, USI. */
+#define __lasx_xvssrani_du_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrani_du_q ((v4u64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: V32QI, V32QI, V32QI, USI. */
+#define __lasx_xvssrarni_b_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrarni_b_h ((v32i8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: V16HI, V16HI, V16HI, USI. */
+#define __lasx_xvssrarni_h_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrarni_h_w ((v16i16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: V8SI, V8SI, V8SI, USI. */
+#define __lasx_xvssrarni_w_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrarni_w_d ((v8i32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: V4DI, V4DI, V4DI, USI. */
+#define __lasx_xvssrarni_d_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrarni_d_q ((v4i64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui4. */
+/* Data types in instruction templates: UV32QI, UV32QI, V32QI, USI. */
+#define __lasx_xvssrarni_bu_h(/*__m256i*/ _1, /*__m256i*/ _2, /*ui4*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrarni_bu_h ((v32u8)(_1), (v32i8)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui5. */
+/* Data types in instruction templates: UV16HI, UV16HI, V16HI, USI. */
+#define __lasx_xvssrarni_hu_w(/*__m256i*/ _1, /*__m256i*/ _2, /*ui5*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrarni_hu_w ((v16u16)(_1), (v16i16)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui6. */
+/* Data types in instruction templates: UV8SI, UV8SI, V8SI, USI. */
+#define __lasx_xvssrarni_wu_d(/*__m256i*/ _1, /*__m256i*/ _2, /*ui6*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrarni_wu_d ((v8u32)(_1), (v8i32)(_2), (_3)))
+
+/* Assembly instruction format: xd, xj, ui7. */
+/* Data types in instruction templates: UV4DI, UV4DI, V4DI, USI. */
+#define __lasx_xvssrarni_du_q(/*__m256i*/ _1, /*__m256i*/ _2, /*ui7*/ _3) \
+ ((__m256i)__builtin_lasx_xvssrarni_du_q ((v4u64)(_1), (v4i64)(_2), (_3)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV32QI. */
+#define __lasx_xbnz_b(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbnz_b ((v32u8)(_1)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV4DI. */
+#define __lasx_xbnz_d(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbnz_d ((v4u64)(_1)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV16HI. */
+#define __lasx_xbnz_h(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbnz_h ((v16u16)(_1)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV32QI. */
+#define __lasx_xbnz_v(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbnz_v ((v32u8)(_1)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV8SI. */
+#define __lasx_xbnz_w(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbnz_w ((v8u32)(_1)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV32QI. */
+#define __lasx_xbz_b(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbz_b ((v32u8)(_1)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV4DI. */
+#define __lasx_xbz_d(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbz_d ((v4u64)(_1)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV16HI. */
+#define __lasx_xbz_h(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbz_h ((v16u16)(_1)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV32QI. */
+#define __lasx_xbz_v(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbz_v ((v32u8)(_1)))
+
+/* Assembly instruction format: cd, xj. */
+/* Data types in instruction templates: SI, UV8SI. */
+#define __lasx_xbz_w(/*__m256i*/ _1) \
+ ((int)__builtin_lasx_xbz_w ((v8u32)(_1)))
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_caf_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_caf_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_caf_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_caf_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_ceq_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_ceq_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_ceq_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_ceq_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cle_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cle_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cle_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cle_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_clt_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_clt_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_clt_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_clt_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cne_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cne_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cne_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cne_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cor_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cor_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cor_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cor_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cueq_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cueq_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cueq_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cueq_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cule_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cule_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cule_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cule_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cult_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cult_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cult_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cult_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cun_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cun_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cune_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cune_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cune_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cune_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_cun_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_cun_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_saf_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_saf_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_saf_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_saf_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_seq_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_seq_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_seq_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_seq_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sle_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sle_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sle_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sle_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_slt_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_slt_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_slt_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_slt_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sne_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sne_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sne_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sne_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sor_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sor_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sor_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sor_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sueq_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sueq_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sueq_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sueq_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sule_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sule_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sule_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sule_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sult_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sult_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sult_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sult_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sun_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sun_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V4DI, V4DF, V4DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sune_d (__m256d _1, __m256d _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sune_d ((v4f64)_1, (v4f64)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sune_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sune_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, xk. */
+/* Data types in instruction templates: V8SI, V8SF, V8SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m256i __lasx_xvfcmp_sun_s (__m256 _1, __m256 _2)
+{
+ return (__m256i)__builtin_lasx_xvfcmp_sun_s ((v8f32)_1, (v8f32)_2);
+}
+
+/* Assembly instruction format: xd, xj, ui2. */
+/* Data types in instruction templates: V4DF, V4DF, UQI. */
+#define __lasx_xvpickve_d_f(/*__m256d*/ _1, /*ui2*/ _2) \
+ ((__m256d)__builtin_lasx_xvpickve_d_f ((v4f64)(_1), (_2)))
+
+/* Assembly instruction format: xd, xj, ui3. */
+/* Data types in instruction templates: V8SF, V8SF, UQI. */
+#define __lasx_xvpickve_w_f(/*__m256*/ _1, /*ui3*/ _2) \
+ ((__m256)__builtin_lasx_xvpickve_w_f ((v8f32)(_1), (_2)))
+
+/* Assembly instruction format: xd, si10. */
+/* Data types in instruction templates: V32QI, HI. */
+#define __lasx_xvrepli_b(/*si10*/ _1) \
+ ((__m256i)__builtin_lasx_xvrepli_b ((_1)))
+
+/* Assembly instruction format: xd, si10. */
+/* Data types in instruction templates: V4DI, HI. */
+#define __lasx_xvrepli_d(/*si10*/ _1) \
+ ((__m256i)__builtin_lasx_xvrepli_d ((_1)))
+
+/* Assembly instruction format: xd, si10. */
+/* Data types in instruction templates: V16HI, HI. */
+#define __lasx_xvrepli_h(/*si10*/ _1) \
+ ((__m256i)__builtin_lasx_xvrepli_h ((_1)))
+
+/* Assembly instruction format: xd, si10. */
+/* Data types in instruction templates: V8SI, HI. */
+#define __lasx_xvrepli_w(/*si10*/ _1) \
+ ((__m256i)__builtin_lasx_xvrepli_w ((_1)))
+
+#endif /* defined(__loongarch_asx). */
+#endif /* _GCC_LOONGSON_ASXINTRIN_H. */
diff --git a/gcc/config/loongarch/loongarch-builtins.cc b/gcc/config/loongarch/loongarch-builtins.cc
index de6428ac6..a4a7dbec9 100644
--- a/gcc/config/loongarch/loongarch-builtins.cc
+++ b/gcc/config/loongarch/loongarch-builtins.cc
@@ -74,6 +74,13 @@ enum loongarch_builtin_type
/* The function corresponds to an LSX conditional branch instruction
combined with a compare instruction. */
LARCH_BUILTIN_LSX_TEST_BRANCH,
+
+ /* For generating LoongArch LASX. */
+ LARCH_BUILTIN_LASX,
+
+ /* The function corresponds to an LASX conditional branch instruction
+ combined with a compare instruction. */
+ LARCH_BUILTIN_LASX_TEST_BRANCH,
};
/* Declare an availability predicate for built-in functions that require
@@ -112,6 +119,7 @@ struct loongarch_builtin_description
AVAIL_ALL (hard_float, TARGET_HARD_FLOAT_ABI)
AVAIL_ALL (lsx, ISA_HAS_LSX)
+AVAIL_ALL (lasx, ISA_HAS_LASX)
/* Construct a loongarch_builtin_description from the given arguments.
@@ -173,6 +181,30 @@ AVAIL_ALL (lsx, ISA_HAS_LSX)
"__builtin_lsx_" #INSN, LARCH_BUILTIN_DIRECT_NO_TARGET, \
FUNCTION_TYPE, loongarch_builtin_avail_lsx }
+/* Define an LASX LARCH_BUILTIN_DIRECT function __builtin_lasx_<INSN>
+ for instruction CODE_FOR_lasx_<INSN>. FUNCTION_TYPE is a builtin_description
+ field. */
+#define LASX_BUILTIN(INSN, FUNCTION_TYPE) \
+ { CODE_FOR_lasx_ ## INSN, \
+ "__builtin_lasx_" #INSN, LARCH_BUILTIN_LASX, \
+ FUNCTION_TYPE, loongarch_builtin_avail_lasx }
+
+/* Define an LASX LARCH_BUILTIN_DIRECT_NO_TARGET function __builtin_lasx_<INSN>
+ for instruction CODE_FOR_lasx_<INSN>. FUNCTION_TYPE is a builtin_description
+ field. */
+#define LASX_NO_TARGET_BUILTIN(INSN, FUNCTION_TYPE) \
+ { CODE_FOR_lasx_ ## INSN, \
+ "__builtin_lasx_" #INSN, LARCH_BUILTIN_DIRECT_NO_TARGET, \
+ FUNCTION_TYPE, loongarch_builtin_avail_lasx }
+
+/* Define an LASX LARCH_BUILTIN_LASX_TEST_BRANCH function __builtin_lasx_<INSN>
+ for instruction CODE_FOR_lasx_<INSN>. FUNCTION_TYPE is a builtin_description
+ field. */
+#define LASX_BUILTIN_TEST_BRANCH(INSN, FUNCTION_TYPE) \
+ { CODE_FOR_lasx_ ## INSN, \
+ "__builtin_lasx_" #INSN, LARCH_BUILTIN_LASX_TEST_BRANCH, \
+ FUNCTION_TYPE, loongarch_builtin_avail_lasx }
+
/* LoongArch SX define CODE_FOR_lsx_xxx */
#define CODE_FOR_lsx_vsadd_b CODE_FOR_ssaddv16qi3
#define CODE_FOR_lsx_vsadd_h CODE_FOR_ssaddv8hi3
@@ -442,6 +474,276 @@ AVAIL_ALL (lsx, ISA_HAS_LSX)
#define CODE_FOR_lsx_vssrlrn_hu_w CODE_FOR_lsx_vssrlrn_u_hu_w
#define CODE_FOR_lsx_vssrlrn_wu_d CODE_FOR_lsx_vssrlrn_u_wu_d
+/* LoongArch ASX define CODE_FOR_lasx_mxxx */
+#define CODE_FOR_lasx_xvsadd_b CODE_FOR_ssaddv32qi3
+#define CODE_FOR_lasx_xvsadd_h CODE_FOR_ssaddv16hi3
+#define CODE_FOR_lasx_xvsadd_w CODE_FOR_ssaddv8si3
+#define CODE_FOR_lasx_xvsadd_d CODE_FOR_ssaddv4di3
+#define CODE_FOR_lasx_xvsadd_bu CODE_FOR_usaddv32qi3
+#define CODE_FOR_lasx_xvsadd_hu CODE_FOR_usaddv16hi3
+#define CODE_FOR_lasx_xvsadd_wu CODE_FOR_usaddv8si3
+#define CODE_FOR_lasx_xvsadd_du CODE_FOR_usaddv4di3
+#define CODE_FOR_lasx_xvadd_b CODE_FOR_addv32qi3
+#define CODE_FOR_lasx_xvadd_h CODE_FOR_addv16hi3
+#define CODE_FOR_lasx_xvadd_w CODE_FOR_addv8si3
+#define CODE_FOR_lasx_xvadd_d CODE_FOR_addv4di3
+#define CODE_FOR_lasx_xvaddi_bu CODE_FOR_addv32qi3
+#define CODE_FOR_lasx_xvaddi_hu CODE_FOR_addv16hi3
+#define CODE_FOR_lasx_xvaddi_wu CODE_FOR_addv8si3
+#define CODE_FOR_lasx_xvaddi_du CODE_FOR_addv4di3
+#define CODE_FOR_lasx_xvand_v CODE_FOR_andv32qi3
+#define CODE_FOR_lasx_xvandi_b CODE_FOR_andv32qi3
+#define CODE_FOR_lasx_xvbitsel_v CODE_FOR_lasx_xvbitsel_b
+#define CODE_FOR_lasx_xvseqi_b CODE_FOR_lasx_xvseq_b
+#define CODE_FOR_lasx_xvseqi_h CODE_FOR_lasx_xvseq_h
+#define CODE_FOR_lasx_xvseqi_w CODE_FOR_lasx_xvseq_w
+#define CODE_FOR_lasx_xvseqi_d CODE_FOR_lasx_xvseq_d
+#define CODE_FOR_lasx_xvslti_b CODE_FOR_lasx_xvslt_b
+#define CODE_FOR_lasx_xvslti_h CODE_FOR_lasx_xvslt_h
+#define CODE_FOR_lasx_xvslti_w CODE_FOR_lasx_xvslt_w
+#define CODE_FOR_lasx_xvslti_d CODE_FOR_lasx_xvslt_d
+#define CODE_FOR_lasx_xvslti_bu CODE_FOR_lasx_xvslt_bu
+#define CODE_FOR_lasx_xvslti_hu CODE_FOR_lasx_xvslt_hu
+#define CODE_FOR_lasx_xvslti_wu CODE_FOR_lasx_xvslt_wu
+#define CODE_FOR_lasx_xvslti_du CODE_FOR_lasx_xvslt_du
+#define CODE_FOR_lasx_xvslei_b CODE_FOR_lasx_xvsle_b
+#define CODE_FOR_lasx_xvslei_h CODE_FOR_lasx_xvsle_h
+#define CODE_FOR_lasx_xvslei_w CODE_FOR_lasx_xvsle_w
+#define CODE_FOR_lasx_xvslei_d CODE_FOR_lasx_xvsle_d
+#define CODE_FOR_lasx_xvslei_bu CODE_FOR_lasx_xvsle_bu
+#define CODE_FOR_lasx_xvslei_hu CODE_FOR_lasx_xvsle_hu
+#define CODE_FOR_lasx_xvslei_wu CODE_FOR_lasx_xvsle_wu
+#define CODE_FOR_lasx_xvslei_du CODE_FOR_lasx_xvsle_du
+#define CODE_FOR_lasx_xvdiv_b CODE_FOR_divv32qi3
+#define CODE_FOR_lasx_xvdiv_h CODE_FOR_divv16hi3
+#define CODE_FOR_lasx_xvdiv_w CODE_FOR_divv8si3
+#define CODE_FOR_lasx_xvdiv_d CODE_FOR_divv4di3
+#define CODE_FOR_lasx_xvdiv_bu CODE_FOR_udivv32qi3
+#define CODE_FOR_lasx_xvdiv_hu CODE_FOR_udivv16hi3
+#define CODE_FOR_lasx_xvdiv_wu CODE_FOR_udivv8si3
+#define CODE_FOR_lasx_xvdiv_du CODE_FOR_udivv4di3
+#define CODE_FOR_lasx_xvfadd_s CODE_FOR_addv8sf3
+#define CODE_FOR_lasx_xvfadd_d CODE_FOR_addv4df3
+#define CODE_FOR_lasx_xvftintrz_w_s CODE_FOR_fix_truncv8sfv8si2
+#define CODE_FOR_lasx_xvftintrz_l_d CODE_FOR_fix_truncv4dfv4di2
+#define CODE_FOR_lasx_xvftintrz_wu_s CODE_FOR_fixuns_truncv8sfv8si2
+#define CODE_FOR_lasx_xvftintrz_lu_d CODE_FOR_fixuns_truncv4dfv4di2
+#define CODE_FOR_lasx_xvffint_s_w CODE_FOR_floatv8siv8sf2
+#define CODE_FOR_lasx_xvffint_d_l CODE_FOR_floatv4div4df2
+#define CODE_FOR_lasx_xvffint_s_wu CODE_FOR_floatunsv8siv8sf2
+#define CODE_FOR_lasx_xvffint_d_lu CODE_FOR_floatunsv4div4df2
+#define CODE_FOR_lasx_xvfsub_s CODE_FOR_subv8sf3
+#define CODE_FOR_lasx_xvfsub_d CODE_FOR_subv4df3
+#define CODE_FOR_lasx_xvfmul_s CODE_FOR_mulv8sf3
+#define CODE_FOR_lasx_xvfmul_d CODE_FOR_mulv4df3
+#define CODE_FOR_lasx_xvfdiv_s CODE_FOR_divv8sf3
+#define CODE_FOR_lasx_xvfdiv_d CODE_FOR_divv4df3
+#define CODE_FOR_lasx_xvfmax_s CODE_FOR_smaxv8sf3
+#define CODE_FOR_lasx_xvfmax_d CODE_FOR_smaxv4df3
+#define CODE_FOR_lasx_xvfmin_s CODE_FOR_sminv8sf3
+#define CODE_FOR_lasx_xvfmin_d CODE_FOR_sminv4df3
+#define CODE_FOR_lasx_xvfsqrt_s CODE_FOR_sqrtv8sf2
+#define CODE_FOR_lasx_xvfsqrt_d CODE_FOR_sqrtv4df2
+#define CODE_FOR_lasx_xvflogb_s CODE_FOR_logbv8sf2
+#define CODE_FOR_lasx_xvflogb_d CODE_FOR_logbv4df2
+#define CODE_FOR_lasx_xvmax_b CODE_FOR_smaxv32qi3
+#define CODE_FOR_lasx_xvmax_h CODE_FOR_smaxv16hi3
+#define CODE_FOR_lasx_xvmax_w CODE_FOR_smaxv8si3
+#define CODE_FOR_lasx_xvmax_d CODE_FOR_smaxv4di3
+#define CODE_FOR_lasx_xvmaxi_b CODE_FOR_smaxv32qi3
+#define CODE_FOR_lasx_xvmaxi_h CODE_FOR_smaxv16hi3
+#define CODE_FOR_lasx_xvmaxi_w CODE_FOR_smaxv8si3
+#define CODE_FOR_lasx_xvmaxi_d CODE_FOR_smaxv4di3
+#define CODE_FOR_lasx_xvmax_bu CODE_FOR_umaxv32qi3
+#define CODE_FOR_lasx_xvmax_hu CODE_FOR_umaxv16hi3
+#define CODE_FOR_lasx_xvmax_wu CODE_FOR_umaxv8si3
+#define CODE_FOR_lasx_xvmax_du CODE_FOR_umaxv4di3
+#define CODE_FOR_lasx_xvmaxi_bu CODE_FOR_umaxv32qi3
+#define CODE_FOR_lasx_xvmaxi_hu CODE_FOR_umaxv16hi3
+#define CODE_FOR_lasx_xvmaxi_wu CODE_FOR_umaxv8si3
+#define CODE_FOR_lasx_xvmaxi_du CODE_FOR_umaxv4di3
+#define CODE_FOR_lasx_xvmin_b CODE_FOR_sminv32qi3
+#define CODE_FOR_lasx_xvmin_h CODE_FOR_sminv16hi3
+#define CODE_FOR_lasx_xvmin_w CODE_FOR_sminv8si3
+#define CODE_FOR_lasx_xvmin_d CODE_FOR_sminv4di3
+#define CODE_FOR_lasx_xvmini_b CODE_FOR_sminv32qi3
+#define CODE_FOR_lasx_xvmini_h CODE_FOR_sminv16hi3
+#define CODE_FOR_lasx_xvmini_w CODE_FOR_sminv8si3
+#define CODE_FOR_lasx_xvmini_d CODE_FOR_sminv4di3
+#define CODE_FOR_lasx_xvmin_bu CODE_FOR_uminv32qi3
+#define CODE_FOR_lasx_xvmin_hu CODE_FOR_uminv16hi3
+#define CODE_FOR_lasx_xvmin_wu CODE_FOR_uminv8si3
+#define CODE_FOR_lasx_xvmin_du CODE_FOR_uminv4di3
+#define CODE_FOR_lasx_xvmini_bu CODE_FOR_uminv32qi3
+#define CODE_FOR_lasx_xvmini_hu CODE_FOR_uminv16hi3
+#define CODE_FOR_lasx_xvmini_wu CODE_FOR_uminv8si3
+#define CODE_FOR_lasx_xvmini_du CODE_FOR_uminv4di3
+#define CODE_FOR_lasx_xvmod_b CODE_FOR_modv32qi3
+#define CODE_FOR_lasx_xvmod_h CODE_FOR_modv16hi3
+#define CODE_FOR_lasx_xvmod_w CODE_FOR_modv8si3
+#define CODE_FOR_lasx_xvmod_d CODE_FOR_modv4di3
+#define CODE_FOR_lasx_xvmod_bu CODE_FOR_umodv32qi3
+#define CODE_FOR_lasx_xvmod_hu CODE_FOR_umodv16hi3
+#define CODE_FOR_lasx_xvmod_wu CODE_FOR_umodv8si3
+#define CODE_FOR_lasx_xvmod_du CODE_FOR_umodv4di3
+#define CODE_FOR_lasx_xvmul_b CODE_FOR_mulv32qi3
+#define CODE_FOR_lasx_xvmul_h CODE_FOR_mulv16hi3
+#define CODE_FOR_lasx_xvmul_w CODE_FOR_mulv8si3
+#define CODE_FOR_lasx_xvmul_d CODE_FOR_mulv4di3
+#define CODE_FOR_lasx_xvclz_b CODE_FOR_clzv32qi2
+#define CODE_FOR_lasx_xvclz_h CODE_FOR_clzv16hi2
+#define CODE_FOR_lasx_xvclz_w CODE_FOR_clzv8si2
+#define CODE_FOR_lasx_xvclz_d CODE_FOR_clzv4di2
+#define CODE_FOR_lasx_xvnor_v CODE_FOR_lasx_xvnor_b
+#define CODE_FOR_lasx_xvor_v CODE_FOR_iorv32qi3
+#define CODE_FOR_lasx_xvori_b CODE_FOR_iorv32qi3
+#define CODE_FOR_lasx_xvnori_b CODE_FOR_lasx_xvnor_b
+#define CODE_FOR_lasx_xvpcnt_b CODE_FOR_popcountv32qi2
+#define CODE_FOR_lasx_xvpcnt_h CODE_FOR_popcountv16hi2
+#define CODE_FOR_lasx_xvpcnt_w CODE_FOR_popcountv8si2
+#define CODE_FOR_lasx_xvpcnt_d CODE_FOR_popcountv4di2
+#define CODE_FOR_lasx_xvxor_v CODE_FOR_xorv32qi3
+#define CODE_FOR_lasx_xvxori_b CODE_FOR_xorv32qi3
+#define CODE_FOR_lasx_xvsll_b CODE_FOR_vashlv32qi3
+#define CODE_FOR_lasx_xvsll_h CODE_FOR_vashlv16hi3
+#define CODE_FOR_lasx_xvsll_w CODE_FOR_vashlv8si3
+#define CODE_FOR_lasx_xvsll_d CODE_FOR_vashlv4di3
+#define CODE_FOR_lasx_xvslli_b CODE_FOR_vashlv32qi3
+#define CODE_FOR_lasx_xvslli_h CODE_FOR_vashlv16hi3
+#define CODE_FOR_lasx_xvslli_w CODE_FOR_vashlv8si3
+#define CODE_FOR_lasx_xvslli_d CODE_FOR_vashlv4di3
+#define CODE_FOR_lasx_xvsra_b CODE_FOR_vashrv32qi3
+#define CODE_FOR_lasx_xvsra_h CODE_FOR_vashrv16hi3
+#define CODE_FOR_lasx_xvsra_w CODE_FOR_vashrv8si3
+#define CODE_FOR_lasx_xvsra_d CODE_FOR_vashrv4di3
+#define CODE_FOR_lasx_xvsrai_b CODE_FOR_vashrv32qi3
+#define CODE_FOR_lasx_xvsrai_h CODE_FOR_vashrv16hi3
+#define CODE_FOR_lasx_xvsrai_w CODE_FOR_vashrv8si3
+#define CODE_FOR_lasx_xvsrai_d CODE_FOR_vashrv4di3
+#define CODE_FOR_lasx_xvsrl_b CODE_FOR_vlshrv32qi3
+#define CODE_FOR_lasx_xvsrl_h CODE_FOR_vlshrv16hi3
+#define CODE_FOR_lasx_xvsrl_w CODE_FOR_vlshrv8si3
+#define CODE_FOR_lasx_xvsrl_d CODE_FOR_vlshrv4di3
+#define CODE_FOR_lasx_xvsrli_b CODE_FOR_vlshrv32qi3
+#define CODE_FOR_lasx_xvsrli_h CODE_FOR_vlshrv16hi3
+#define CODE_FOR_lasx_xvsrli_w CODE_FOR_vlshrv8si3
+#define CODE_FOR_lasx_xvsrli_d CODE_FOR_vlshrv4di3
+#define CODE_FOR_lasx_xvsub_b CODE_FOR_subv32qi3
+#define CODE_FOR_lasx_xvsub_h CODE_FOR_subv16hi3
+#define CODE_FOR_lasx_xvsub_w CODE_FOR_subv8si3
+#define CODE_FOR_lasx_xvsub_d CODE_FOR_subv4di3
+#define CODE_FOR_lasx_xvsubi_bu CODE_FOR_subv32qi3
+#define CODE_FOR_lasx_xvsubi_hu CODE_FOR_subv16hi3
+#define CODE_FOR_lasx_xvsubi_wu CODE_FOR_subv8si3
+#define CODE_FOR_lasx_xvsubi_du CODE_FOR_subv4di3
+#define CODE_FOR_lasx_xvpackod_d CODE_FOR_lasx_xvilvh_d
+#define CODE_FOR_lasx_xvpackev_d CODE_FOR_lasx_xvilvl_d
+#define CODE_FOR_lasx_xvpickod_d CODE_FOR_lasx_xvilvh_d
+#define CODE_FOR_lasx_xvpickev_d CODE_FOR_lasx_xvilvl_d
+#define CODE_FOR_lasx_xvrepli_b CODE_FOR_lasx_xvrepliv32qi
+#define CODE_FOR_lasx_xvrepli_h CODE_FOR_lasx_xvrepliv16hi
+#define CODE_FOR_lasx_xvrepli_w CODE_FOR_lasx_xvrepliv8si
+#define CODE_FOR_lasx_xvrepli_d CODE_FOR_lasx_xvrepliv4di
+
+#define CODE_FOR_lasx_xvandn_v CODE_FOR_xvandnv32qi3
+#define CODE_FOR_lasx_xvorn_v CODE_FOR_xvornv32qi3
+#define CODE_FOR_lasx_xvneg_b CODE_FOR_negv32qi2
+#define CODE_FOR_lasx_xvneg_h CODE_FOR_negv16hi2
+#define CODE_FOR_lasx_xvneg_w CODE_FOR_negv8si2
+#define CODE_FOR_lasx_xvneg_d CODE_FOR_negv4di2
+#define CODE_FOR_lasx_xvbsrl_v CODE_FOR_lasx_xvbsrl_b
+#define CODE_FOR_lasx_xvbsll_v CODE_FOR_lasx_xvbsll_b
+#define CODE_FOR_lasx_xvfmadd_s CODE_FOR_fmav8sf4
+#define CODE_FOR_lasx_xvfmadd_d CODE_FOR_fmav4df4
+#define CODE_FOR_lasx_xvfmsub_s CODE_FOR_fmsv8sf4
+#define CODE_FOR_lasx_xvfmsub_d CODE_FOR_fmsv4df4
+#define CODE_FOR_lasx_xvfnmadd_s CODE_FOR_xvfnmaddv8sf4_nmadd4
+#define CODE_FOR_lasx_xvfnmadd_d CODE_FOR_xvfnmaddv4df4_nmadd4
+#define CODE_FOR_lasx_xvfnmsub_s CODE_FOR_xvfnmsubv8sf4_nmsub4
+#define CODE_FOR_lasx_xvfnmsub_d CODE_FOR_xvfnmsubv4df4_nmsub4
+
+#define CODE_FOR_lasx_xvpermi_q CODE_FOR_lasx_xvpermi_q_v32qi
+#define CODE_FOR_lasx_xvpermi_d CODE_FOR_lasx_xvpermi_d_v4di
+#define CODE_FOR_lasx_xbnz_v CODE_FOR_lasx_xbnz_v_b
+#define CODE_FOR_lasx_xbz_v CODE_FOR_lasx_xbz_v_b
+
+#define CODE_FOR_lasx_xvssub_b CODE_FOR_lasx_xvssub_s_b
+#define CODE_FOR_lasx_xvssub_h CODE_FOR_lasx_xvssub_s_h
+#define CODE_FOR_lasx_xvssub_w CODE_FOR_lasx_xvssub_s_w
+#define CODE_FOR_lasx_xvssub_d CODE_FOR_lasx_xvssub_s_d
+#define CODE_FOR_lasx_xvssub_bu CODE_FOR_lasx_xvssub_u_bu
+#define CODE_FOR_lasx_xvssub_hu CODE_FOR_lasx_xvssub_u_hu
+#define CODE_FOR_lasx_xvssub_wu CODE_FOR_lasx_xvssub_u_wu
+#define CODE_FOR_lasx_xvssub_du CODE_FOR_lasx_xvssub_u_du
+#define CODE_FOR_lasx_xvabsd_b CODE_FOR_lasx_xvabsd_s_b
+#define CODE_FOR_lasx_xvabsd_h CODE_FOR_lasx_xvabsd_s_h
+#define CODE_FOR_lasx_xvabsd_w CODE_FOR_lasx_xvabsd_s_w
+#define CODE_FOR_lasx_xvabsd_d CODE_FOR_lasx_xvabsd_s_d
+#define CODE_FOR_lasx_xvabsd_bu CODE_FOR_lasx_xvabsd_u_bu
+#define CODE_FOR_lasx_xvabsd_hu CODE_FOR_lasx_xvabsd_u_hu
+#define CODE_FOR_lasx_xvabsd_wu CODE_FOR_lasx_xvabsd_u_wu
+#define CODE_FOR_lasx_xvabsd_du CODE_FOR_lasx_xvabsd_u_du
+#define CODE_FOR_lasx_xvavg_b CODE_FOR_lasx_xvavg_s_b
+#define CODE_FOR_lasx_xvavg_h CODE_FOR_lasx_xvavg_s_h
+#define CODE_FOR_lasx_xvavg_w CODE_FOR_lasx_xvavg_s_w
+#define CODE_FOR_lasx_xvavg_d CODE_FOR_lasx_xvavg_s_d
+#define CODE_FOR_lasx_xvavg_bu CODE_FOR_lasx_xvavg_u_bu
+#define CODE_FOR_lasx_xvavg_hu CODE_FOR_lasx_xvavg_u_hu
+#define CODE_FOR_lasx_xvavg_wu CODE_FOR_lasx_xvavg_u_wu
+#define CODE_FOR_lasx_xvavg_du CODE_FOR_lasx_xvavg_u_du
+#define CODE_FOR_lasx_xvavgr_b CODE_FOR_lasx_xvavgr_s_b
+#define CODE_FOR_lasx_xvavgr_h CODE_FOR_lasx_xvavgr_s_h
+#define CODE_FOR_lasx_xvavgr_w CODE_FOR_lasx_xvavgr_s_w
+#define CODE_FOR_lasx_xvavgr_d CODE_FOR_lasx_xvavgr_s_d
+#define CODE_FOR_lasx_xvavgr_bu CODE_FOR_lasx_xvavgr_u_bu
+#define CODE_FOR_lasx_xvavgr_hu CODE_FOR_lasx_xvavgr_u_hu
+#define CODE_FOR_lasx_xvavgr_wu CODE_FOR_lasx_xvavgr_u_wu
+#define CODE_FOR_lasx_xvavgr_du CODE_FOR_lasx_xvavgr_u_du
+#define CODE_FOR_lasx_xvmuh_b CODE_FOR_lasx_xvmuh_s_b
+#define CODE_FOR_lasx_xvmuh_h CODE_FOR_lasx_xvmuh_s_h
+#define CODE_FOR_lasx_xvmuh_w CODE_FOR_lasx_xvmuh_s_w
+#define CODE_FOR_lasx_xvmuh_d CODE_FOR_lasx_xvmuh_s_d
+#define CODE_FOR_lasx_xvmuh_bu CODE_FOR_lasx_xvmuh_u_bu
+#define CODE_FOR_lasx_xvmuh_hu CODE_FOR_lasx_xvmuh_u_hu
+#define CODE_FOR_lasx_xvmuh_wu CODE_FOR_lasx_xvmuh_u_wu
+#define CODE_FOR_lasx_xvmuh_du CODE_FOR_lasx_xvmuh_u_du
+#define CODE_FOR_lasx_xvssran_b_h CODE_FOR_lasx_xvssran_s_b_h
+#define CODE_FOR_lasx_xvssran_h_w CODE_FOR_lasx_xvssran_s_h_w
+#define CODE_FOR_lasx_xvssran_w_d CODE_FOR_lasx_xvssran_s_w_d
+#define CODE_FOR_lasx_xvssran_bu_h CODE_FOR_lasx_xvssran_u_bu_h
+#define CODE_FOR_lasx_xvssran_hu_w CODE_FOR_lasx_xvssran_u_hu_w
+#define CODE_FOR_lasx_xvssran_wu_d CODE_FOR_lasx_xvssran_u_wu_d
+#define CODE_FOR_lasx_xvssrarn_b_h CODE_FOR_lasx_xvssrarn_s_b_h
+#define CODE_FOR_lasx_xvssrarn_h_w CODE_FOR_lasx_xvssrarn_s_h_w
+#define CODE_FOR_lasx_xvssrarn_w_d CODE_FOR_lasx_xvssrarn_s_w_d
+#define CODE_FOR_lasx_xvssrarn_bu_h CODE_FOR_lasx_xvssrarn_u_bu_h
+#define CODE_FOR_lasx_xvssrarn_hu_w CODE_FOR_lasx_xvssrarn_u_hu_w
+#define CODE_FOR_lasx_xvssrarn_wu_d CODE_FOR_lasx_xvssrarn_u_wu_d
+#define CODE_FOR_lasx_xvssrln_bu_h CODE_FOR_lasx_xvssrln_u_bu_h
+#define CODE_FOR_lasx_xvssrln_hu_w CODE_FOR_lasx_xvssrln_u_hu_w
+#define CODE_FOR_lasx_xvssrln_wu_d CODE_FOR_lasx_xvssrln_u_wu_d
+#define CODE_FOR_lasx_xvssrlrn_bu_h CODE_FOR_lasx_xvssrlrn_u_bu_h
+#define CODE_FOR_lasx_xvssrlrn_hu_w CODE_FOR_lasx_xvssrlrn_u_hu_w
+#define CODE_FOR_lasx_xvssrlrn_wu_d CODE_FOR_lasx_xvssrlrn_u_wu_d
+#define CODE_FOR_lasx_xvftint_w_s CODE_FOR_lasx_xvftint_s_w_s
+#define CODE_FOR_lasx_xvftint_l_d CODE_FOR_lasx_xvftint_s_l_d
+#define CODE_FOR_lasx_xvftint_wu_s CODE_FOR_lasx_xvftint_u_wu_s
+#define CODE_FOR_lasx_xvftint_lu_d CODE_FOR_lasx_xvftint_u_lu_d
+#define CODE_FOR_lasx_xvsllwil_h_b CODE_FOR_lasx_xvsllwil_s_h_b
+#define CODE_FOR_lasx_xvsllwil_w_h CODE_FOR_lasx_xvsllwil_s_w_h
+#define CODE_FOR_lasx_xvsllwil_d_w CODE_FOR_lasx_xvsllwil_s_d_w
+#define CODE_FOR_lasx_xvsllwil_hu_bu CODE_FOR_lasx_xvsllwil_u_hu_bu
+#define CODE_FOR_lasx_xvsllwil_wu_hu CODE_FOR_lasx_xvsllwil_u_wu_hu
+#define CODE_FOR_lasx_xvsllwil_du_wu CODE_FOR_lasx_xvsllwil_u_du_wu
+#define CODE_FOR_lasx_xvsat_b CODE_FOR_lasx_xvsat_s_b
+#define CODE_FOR_lasx_xvsat_h CODE_FOR_lasx_xvsat_s_h
+#define CODE_FOR_lasx_xvsat_w CODE_FOR_lasx_xvsat_s_w
+#define CODE_FOR_lasx_xvsat_d CODE_FOR_lasx_xvsat_s_d
+#define CODE_FOR_lasx_xvsat_bu CODE_FOR_lasx_xvsat_u_bu
+#define CODE_FOR_lasx_xvsat_hu CODE_FOR_lasx_xvsat_u_hu
+#define CODE_FOR_lasx_xvsat_wu CODE_FOR_lasx_xvsat_u_wu
+#define CODE_FOR_lasx_xvsat_du CODE_FOR_lasx_xvsat_u_du
+
static const struct loongarch_builtin_description loongarch_builtins[] = {
#define LARCH_MOVFCSR2GR 0
DIRECT_BUILTIN (movfcsr2gr, LARCH_USI_FTYPE_UQI, hard_float),
@@ -1209,7 +1511,761 @@ static const struct loongarch_builtin_description loongarch_builtins[] = {
LSX_BUILTIN (vshuf_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI),
LSX_BUILTIN (vldx, LARCH_V16QI_FTYPE_CVPOINTER_DI),
LSX_NO_TARGET_BUILTIN (vstx, LARCH_VOID_FTYPE_V16QI_CVPOINTER_DI),
- LSX_BUILTIN (vextl_qu_du, LARCH_UV2DI_FTYPE_UV2DI)
+ LSX_BUILTIN (vextl_qu_du, LARCH_UV2DI_FTYPE_UV2DI),
+
+ /* Built-in functions for LASX */
+ LASX_BUILTIN (xvsll_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsll_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsll_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsll_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvslli_b, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvslli_h, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvslli_w, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvslli_d, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvsra_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsra_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsra_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsra_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsrai_b, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvsrai_h, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvsrai_w, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvsrai_d, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvsrar_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsrar_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsrar_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsrar_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsrari_b, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvsrari_h, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvsrari_w, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvsrari_d, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvsrl_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsrl_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsrl_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsrl_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsrli_b, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvsrli_h, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvsrli_w, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvsrli_d, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvsrlr_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsrlr_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsrlr_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsrlr_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsrlri_b, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvsrlri_h, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvsrlri_w, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvsrlri_d, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvbitclr_b, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvbitclr_h, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvbitclr_w, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvbitclr_d, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvbitclri_b, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvbitclri_h, LARCH_UV16HI_FTYPE_UV16HI_UQI),
+ LASX_BUILTIN (xvbitclri_w, LARCH_UV8SI_FTYPE_UV8SI_UQI),
+ LASX_BUILTIN (xvbitclri_d, LARCH_UV4DI_FTYPE_UV4DI_UQI),
+ LASX_BUILTIN (xvbitset_b, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvbitset_h, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvbitset_w, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvbitset_d, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvbitseti_b, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvbitseti_h, LARCH_UV16HI_FTYPE_UV16HI_UQI),
+ LASX_BUILTIN (xvbitseti_w, LARCH_UV8SI_FTYPE_UV8SI_UQI),
+ LASX_BUILTIN (xvbitseti_d, LARCH_UV4DI_FTYPE_UV4DI_UQI),
+ LASX_BUILTIN (xvbitrev_b, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvbitrev_h, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvbitrev_w, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvbitrev_d, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvbitrevi_b, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvbitrevi_h, LARCH_UV16HI_FTYPE_UV16HI_UQI),
+ LASX_BUILTIN (xvbitrevi_w, LARCH_UV8SI_FTYPE_UV8SI_UQI),
+ LASX_BUILTIN (xvbitrevi_d, LARCH_UV4DI_FTYPE_UV4DI_UQI),
+ LASX_BUILTIN (xvadd_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvadd_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvadd_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvadd_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvaddi_bu, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvaddi_hu, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvaddi_wu, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvaddi_du, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvsub_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsub_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsub_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsub_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsubi_bu, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvsubi_hu, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvsubi_wu, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvsubi_du, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvmax_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvmax_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvmax_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvmax_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvmaxi_b, LARCH_V32QI_FTYPE_V32QI_QI),
+ LASX_BUILTIN (xvmaxi_h, LARCH_V16HI_FTYPE_V16HI_QI),
+ LASX_BUILTIN (xvmaxi_w, LARCH_V8SI_FTYPE_V8SI_QI),
+ LASX_BUILTIN (xvmaxi_d, LARCH_V4DI_FTYPE_V4DI_QI),
+ LASX_BUILTIN (xvmax_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvmax_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvmax_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvmax_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvmaxi_bu, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvmaxi_hu, LARCH_UV16HI_FTYPE_UV16HI_UQI),
+ LASX_BUILTIN (xvmaxi_wu, LARCH_UV8SI_FTYPE_UV8SI_UQI),
+ LASX_BUILTIN (xvmaxi_du, LARCH_UV4DI_FTYPE_UV4DI_UQI),
+ LASX_BUILTIN (xvmin_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvmin_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvmin_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvmin_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvmini_b, LARCH_V32QI_FTYPE_V32QI_QI),
+ LASX_BUILTIN (xvmini_h, LARCH_V16HI_FTYPE_V16HI_QI),
+ LASX_BUILTIN (xvmini_w, LARCH_V8SI_FTYPE_V8SI_QI),
+ LASX_BUILTIN (xvmini_d, LARCH_V4DI_FTYPE_V4DI_QI),
+ LASX_BUILTIN (xvmin_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvmin_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvmin_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvmin_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvmini_bu, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvmini_hu, LARCH_UV16HI_FTYPE_UV16HI_UQI),
+ LASX_BUILTIN (xvmini_wu, LARCH_UV8SI_FTYPE_UV8SI_UQI),
+ LASX_BUILTIN (xvmini_du, LARCH_UV4DI_FTYPE_UV4DI_UQI),
+ LASX_BUILTIN (xvseq_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvseq_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvseq_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvseq_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvseqi_b, LARCH_V32QI_FTYPE_V32QI_QI),
+ LASX_BUILTIN (xvseqi_h, LARCH_V16HI_FTYPE_V16HI_QI),
+ LASX_BUILTIN (xvseqi_w, LARCH_V8SI_FTYPE_V8SI_QI),
+ LASX_BUILTIN (xvseqi_d, LARCH_V4DI_FTYPE_V4DI_QI),
+ LASX_BUILTIN (xvslt_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvslt_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvslt_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvslt_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvslti_b, LARCH_V32QI_FTYPE_V32QI_QI),
+ LASX_BUILTIN (xvslti_h, LARCH_V16HI_FTYPE_V16HI_QI),
+ LASX_BUILTIN (xvslti_w, LARCH_V8SI_FTYPE_V8SI_QI),
+ LASX_BUILTIN (xvslti_d, LARCH_V4DI_FTYPE_V4DI_QI),
+ LASX_BUILTIN (xvslt_bu, LARCH_V32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvslt_hu, LARCH_V16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvslt_wu, LARCH_V8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvslt_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvslti_bu, LARCH_V32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvslti_hu, LARCH_V16HI_FTYPE_UV16HI_UQI),
+ LASX_BUILTIN (xvslti_wu, LARCH_V8SI_FTYPE_UV8SI_UQI),
+ LASX_BUILTIN (xvslti_du, LARCH_V4DI_FTYPE_UV4DI_UQI),
+ LASX_BUILTIN (xvsle_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsle_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsle_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsle_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvslei_b, LARCH_V32QI_FTYPE_V32QI_QI),
+ LASX_BUILTIN (xvslei_h, LARCH_V16HI_FTYPE_V16HI_QI),
+ LASX_BUILTIN (xvslei_w, LARCH_V8SI_FTYPE_V8SI_QI),
+ LASX_BUILTIN (xvslei_d, LARCH_V4DI_FTYPE_V4DI_QI),
+ LASX_BUILTIN (xvsle_bu, LARCH_V32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvsle_hu, LARCH_V16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvsle_wu, LARCH_V8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvsle_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvslei_bu, LARCH_V32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvslei_hu, LARCH_V16HI_FTYPE_UV16HI_UQI),
+ LASX_BUILTIN (xvslei_wu, LARCH_V8SI_FTYPE_UV8SI_UQI),
+ LASX_BUILTIN (xvslei_du, LARCH_V4DI_FTYPE_UV4DI_UQI),
+
+ LASX_BUILTIN (xvsat_b, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvsat_h, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvsat_w, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvsat_d, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvsat_bu, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvsat_hu, LARCH_UV16HI_FTYPE_UV16HI_UQI),
+ LASX_BUILTIN (xvsat_wu, LARCH_UV8SI_FTYPE_UV8SI_UQI),
+ LASX_BUILTIN (xvsat_du, LARCH_UV4DI_FTYPE_UV4DI_UQI),
+
+ LASX_BUILTIN (xvadda_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvadda_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvadda_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvadda_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsadd_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsadd_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsadd_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsadd_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsadd_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvsadd_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvsadd_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvsadd_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+
+ LASX_BUILTIN (xvavg_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvavg_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvavg_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvavg_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvavg_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvavg_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvavg_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvavg_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+
+ LASX_BUILTIN (xvavgr_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvavgr_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvavgr_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvavgr_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvavgr_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvavgr_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvavgr_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvavgr_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+
+ LASX_BUILTIN (xvssub_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvssub_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvssub_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvssub_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvssub_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvssub_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvssub_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvssub_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvabsd_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvabsd_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvabsd_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvabsd_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvabsd_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvabsd_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvabsd_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvabsd_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+
+ LASX_BUILTIN (xvmul_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvmul_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvmul_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvmul_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvmadd_b, LARCH_V32QI_FTYPE_V32QI_V32QI_V32QI),
+ LASX_BUILTIN (xvmadd_h, LARCH_V16HI_FTYPE_V16HI_V16HI_V16HI),
+ LASX_BUILTIN (xvmadd_w, LARCH_V8SI_FTYPE_V8SI_V8SI_V8SI),
+ LASX_BUILTIN (xvmadd_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI),
+ LASX_BUILTIN (xvmsub_b, LARCH_V32QI_FTYPE_V32QI_V32QI_V32QI),
+ LASX_BUILTIN (xvmsub_h, LARCH_V16HI_FTYPE_V16HI_V16HI_V16HI),
+ LASX_BUILTIN (xvmsub_w, LARCH_V8SI_FTYPE_V8SI_V8SI_V8SI),
+ LASX_BUILTIN (xvmsub_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI),
+ LASX_BUILTIN (xvdiv_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvdiv_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvdiv_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvdiv_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvdiv_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvdiv_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvdiv_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvdiv_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvhaddw_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvhaddw_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvhaddw_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvhaddw_hu_bu, LARCH_UV16HI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvhaddw_wu_hu, LARCH_UV8SI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvhaddw_du_wu, LARCH_UV4DI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvhsubw_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvhsubw_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvhsubw_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvhsubw_hu_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvhsubw_wu_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvhsubw_du_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvmod_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvmod_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvmod_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvmod_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvmod_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvmod_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvmod_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvmod_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+
+ LASX_BUILTIN (xvrepl128vei_b, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvrepl128vei_h, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvrepl128vei_w, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvrepl128vei_d, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvpickev_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvpickev_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvpickev_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvpickev_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvpickod_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvpickod_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvpickod_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvpickod_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvilvh_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvilvh_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvilvh_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvilvh_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvilvl_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvilvl_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvilvl_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvilvl_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvpackev_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvpackev_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvpackev_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvpackev_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvpackod_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvpackod_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvpackod_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvpackod_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvshuf_b, LARCH_V32QI_FTYPE_V32QI_V32QI_V32QI),
+ LASX_BUILTIN (xvshuf_h, LARCH_V16HI_FTYPE_V16HI_V16HI_V16HI),
+ LASX_BUILTIN (xvshuf_w, LARCH_V8SI_FTYPE_V8SI_V8SI_V8SI),
+ LASX_BUILTIN (xvshuf_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI),
+ LASX_BUILTIN (xvand_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvandi_b, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvor_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvori_b, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvnor_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvnori_b, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvxor_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvxori_b, LARCH_UV32QI_FTYPE_UV32QI_UQI),
+ LASX_BUILTIN (xvbitsel_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI_UV32QI),
+ LASX_BUILTIN (xvbitseli_b, LARCH_UV32QI_FTYPE_UV32QI_UV32QI_USI),
+
+ LASX_BUILTIN (xvshuf4i_b, LARCH_V32QI_FTYPE_V32QI_USI),
+ LASX_BUILTIN (xvshuf4i_h, LARCH_V16HI_FTYPE_V16HI_USI),
+ LASX_BUILTIN (xvshuf4i_w, LARCH_V8SI_FTYPE_V8SI_USI),
+
+ LASX_BUILTIN (xvreplgr2vr_b, LARCH_V32QI_FTYPE_SI),
+ LASX_BUILTIN (xvreplgr2vr_h, LARCH_V16HI_FTYPE_SI),
+ LASX_BUILTIN (xvreplgr2vr_w, LARCH_V8SI_FTYPE_SI),
+ LASX_BUILTIN (xvreplgr2vr_d, LARCH_V4DI_FTYPE_DI),
+ LASX_BUILTIN (xvpcnt_b, LARCH_V32QI_FTYPE_V32QI),
+ LASX_BUILTIN (xvpcnt_h, LARCH_V16HI_FTYPE_V16HI),
+ LASX_BUILTIN (xvpcnt_w, LARCH_V8SI_FTYPE_V8SI),
+ LASX_BUILTIN (xvpcnt_d, LARCH_V4DI_FTYPE_V4DI),
+ LASX_BUILTIN (xvclo_b, LARCH_V32QI_FTYPE_V32QI),
+ LASX_BUILTIN (xvclo_h, LARCH_V16HI_FTYPE_V16HI),
+ LASX_BUILTIN (xvclo_w, LARCH_V8SI_FTYPE_V8SI),
+ LASX_BUILTIN (xvclo_d, LARCH_V4DI_FTYPE_V4DI),
+ LASX_BUILTIN (xvclz_b, LARCH_V32QI_FTYPE_V32QI),
+ LASX_BUILTIN (xvclz_h, LARCH_V16HI_FTYPE_V16HI),
+ LASX_BUILTIN (xvclz_w, LARCH_V8SI_FTYPE_V8SI),
+ LASX_BUILTIN (xvclz_d, LARCH_V4DI_FTYPE_V4DI),
+
+ LASX_BUILTIN (xvrepli_b, LARCH_V32QI_FTYPE_HI),
+ LASX_BUILTIN (xvrepli_h, LARCH_V16HI_FTYPE_HI),
+ LASX_BUILTIN (xvrepli_w, LARCH_V8SI_FTYPE_HI),
+ LASX_BUILTIN (xvrepli_d, LARCH_V4DI_FTYPE_HI),
+ LASX_BUILTIN (xvfcmp_caf_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_caf_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_cor_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_cor_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_cun_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_cun_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_cune_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_cune_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_cueq_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_cueq_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_ceq_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_ceq_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_cne_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_cne_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_clt_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_clt_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_cult_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_cult_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_cle_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_cle_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_cule_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_cule_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_saf_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_saf_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_sor_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_sor_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_sun_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_sun_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_sune_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_sune_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_sueq_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_sueq_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_seq_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_seq_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_sne_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_sne_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_slt_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_slt_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_sult_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_sult_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_sle_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_sle_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcmp_sule_s, LARCH_V8SI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcmp_sule_d, LARCH_V4DI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfadd_s, LARCH_V8SF_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfadd_d, LARCH_V4DF_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfsub_s, LARCH_V8SF_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfsub_d, LARCH_V4DF_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfmul_s, LARCH_V8SF_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfmul_d, LARCH_V4DF_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfdiv_s, LARCH_V8SF_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfdiv_d, LARCH_V4DF_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfcvt_h_s, LARCH_V16HI_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfcvt_s_d, LARCH_V8SF_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfmin_s, LARCH_V8SF_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfmin_d, LARCH_V4DF_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfmina_s, LARCH_V8SF_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfmina_d, LARCH_V4DF_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfmax_s, LARCH_V8SF_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfmax_d, LARCH_V4DF_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfmaxa_s, LARCH_V8SF_FTYPE_V8SF_V8SF),
+ LASX_BUILTIN (xvfmaxa_d, LARCH_V4DF_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvfclass_s, LARCH_V8SI_FTYPE_V8SF),
+ LASX_BUILTIN (xvfclass_d, LARCH_V4DI_FTYPE_V4DF),
+ LASX_BUILTIN (xvfsqrt_s, LARCH_V8SF_FTYPE_V8SF),
+ LASX_BUILTIN (xvfsqrt_d, LARCH_V4DF_FTYPE_V4DF),
+ LASX_BUILTIN (xvfrecip_s, LARCH_V8SF_FTYPE_V8SF),
+ LASX_BUILTIN (xvfrecip_d, LARCH_V4DF_FTYPE_V4DF),
+ LASX_BUILTIN (xvfrint_s, LARCH_V8SF_FTYPE_V8SF),
+ LASX_BUILTIN (xvfrint_d, LARCH_V4DF_FTYPE_V4DF),
+ LASX_BUILTIN (xvfrsqrt_s, LARCH_V8SF_FTYPE_V8SF),
+ LASX_BUILTIN (xvfrsqrt_d, LARCH_V4DF_FTYPE_V4DF),
+ LASX_BUILTIN (xvflogb_s, LARCH_V8SF_FTYPE_V8SF),
+ LASX_BUILTIN (xvflogb_d, LARCH_V4DF_FTYPE_V4DF),
+ LASX_BUILTIN (xvfcvth_s_h, LARCH_V8SF_FTYPE_V16HI),
+ LASX_BUILTIN (xvfcvth_d_s, LARCH_V4DF_FTYPE_V8SF),
+ LASX_BUILTIN (xvfcvtl_s_h, LARCH_V8SF_FTYPE_V16HI),
+ LASX_BUILTIN (xvfcvtl_d_s, LARCH_V4DF_FTYPE_V8SF),
+ LASX_BUILTIN (xvftint_w_s, LARCH_V8SI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftint_l_d, LARCH_V4DI_FTYPE_V4DF),
+ LASX_BUILTIN (xvftint_wu_s, LARCH_UV8SI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftint_lu_d, LARCH_UV4DI_FTYPE_V4DF),
+ LASX_BUILTIN (xvftintrz_w_s, LARCH_V8SI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrz_l_d, LARCH_V4DI_FTYPE_V4DF),
+ LASX_BUILTIN (xvftintrz_wu_s, LARCH_UV8SI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrz_lu_d, LARCH_UV4DI_FTYPE_V4DF),
+ LASX_BUILTIN (xvffint_s_w, LARCH_V8SF_FTYPE_V8SI),
+ LASX_BUILTIN (xvffint_d_l, LARCH_V4DF_FTYPE_V4DI),
+ LASX_BUILTIN (xvffint_s_wu, LARCH_V8SF_FTYPE_UV8SI),
+ LASX_BUILTIN (xvffint_d_lu, LARCH_V4DF_FTYPE_UV4DI),
+
+ LASX_BUILTIN (xvreplve_b, LARCH_V32QI_FTYPE_V32QI_SI),
+ LASX_BUILTIN (xvreplve_h, LARCH_V16HI_FTYPE_V16HI_SI),
+ LASX_BUILTIN (xvreplve_w, LARCH_V8SI_FTYPE_V8SI_SI),
+ LASX_BUILTIN (xvreplve_d, LARCH_V4DI_FTYPE_V4DI_SI),
+ LASX_BUILTIN (xvpermi_w, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+
+ LASX_BUILTIN (xvandn_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvneg_b, LARCH_V32QI_FTYPE_V32QI),
+ LASX_BUILTIN (xvneg_h, LARCH_V16HI_FTYPE_V16HI),
+ LASX_BUILTIN (xvneg_w, LARCH_V8SI_FTYPE_V8SI),
+ LASX_BUILTIN (xvneg_d, LARCH_V4DI_FTYPE_V4DI),
+ LASX_BUILTIN (xvmuh_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvmuh_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvmuh_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvmuh_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvmuh_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvmuh_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvmuh_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvmuh_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvsllwil_h_b, LARCH_V16HI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvsllwil_w_h, LARCH_V8SI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvsllwil_d_w, LARCH_V4DI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvsllwil_hu_bu, LARCH_UV16HI_FTYPE_UV32QI_UQI), /* FIXME: U? */
+ LASX_BUILTIN (xvsllwil_wu_hu, LARCH_UV8SI_FTYPE_UV16HI_UQI),
+ LASX_BUILTIN (xvsllwil_du_wu, LARCH_UV4DI_FTYPE_UV8SI_UQI),
+ LASX_BUILTIN (xvsran_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsran_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsran_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvssran_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvssran_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvssran_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvssran_bu_h, LARCH_UV32QI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvssran_hu_w, LARCH_UV16HI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvssran_wu_d, LARCH_UV8SI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvsrarn_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsrarn_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsrarn_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvssrarn_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvssrarn_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvssrarn_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvssrarn_bu_h, LARCH_UV32QI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvssrarn_hu_w, LARCH_UV16HI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvssrarn_wu_d, LARCH_UV8SI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvsrln_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsrln_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsrln_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvssrln_bu_h, LARCH_UV32QI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvssrln_hu_w, LARCH_UV16HI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvssrln_wu_d, LARCH_UV8SI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvsrlrn_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsrlrn_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsrlrn_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvssrlrn_bu_h, LARCH_UV32QI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvssrlrn_hu_w, LARCH_UV16HI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvssrlrn_wu_d, LARCH_UV8SI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvfrstpi_b, LARCH_V32QI_FTYPE_V32QI_V32QI_UQI),
+ LASX_BUILTIN (xvfrstpi_h, LARCH_V16HI_FTYPE_V16HI_V16HI_UQI),
+ LASX_BUILTIN (xvfrstp_b, LARCH_V32QI_FTYPE_V32QI_V32QI_V32QI),
+ LASX_BUILTIN (xvfrstp_h, LARCH_V16HI_FTYPE_V16HI_V16HI_V16HI),
+ LASX_BUILTIN (xvshuf4i_d, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvbsrl_v, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvbsll_v, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvextrins_b, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvextrins_h, LARCH_V16HI_FTYPE_V16HI_V16HI_USI),
+ LASX_BUILTIN (xvextrins_w, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+ LASX_BUILTIN (xvextrins_d, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvmskltz_b, LARCH_V32QI_FTYPE_V32QI),
+ LASX_BUILTIN (xvmskltz_h, LARCH_V16HI_FTYPE_V16HI),
+ LASX_BUILTIN (xvmskltz_w, LARCH_V8SI_FTYPE_V8SI),
+ LASX_BUILTIN (xvmskltz_d, LARCH_V4DI_FTYPE_V4DI),
+ LASX_BUILTIN (xvsigncov_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsigncov_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsigncov_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsigncov_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvfmadd_s, LARCH_V8SF_FTYPE_V8SF_V8SF_V8SF),
+ LASX_BUILTIN (xvfmadd_d, LARCH_V4DF_FTYPE_V4DF_V4DF_V4DF),
+ LASX_BUILTIN (xvfmsub_s, LARCH_V8SF_FTYPE_V8SF_V8SF_V8SF),
+ LASX_BUILTIN (xvfmsub_d, LARCH_V4DF_FTYPE_V4DF_V4DF_V4DF),
+ LASX_BUILTIN (xvfnmadd_s, LARCH_V8SF_FTYPE_V8SF_V8SF_V8SF),
+ LASX_BUILTIN (xvfnmadd_d, LARCH_V4DF_FTYPE_V4DF_V4DF_V4DF),
+ LASX_BUILTIN (xvfnmsub_s, LARCH_V8SF_FTYPE_V8SF_V8SF_V8SF),
+ LASX_BUILTIN (xvfnmsub_d, LARCH_V4DF_FTYPE_V4DF_V4DF_V4DF),
+ LASX_BUILTIN (xvftintrne_w_s, LARCH_V8SI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrne_l_d, LARCH_V4DI_FTYPE_V4DF),
+ LASX_BUILTIN (xvftintrp_w_s, LARCH_V8SI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrp_l_d, LARCH_V4DI_FTYPE_V4DF),
+ LASX_BUILTIN (xvftintrm_w_s, LARCH_V8SI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrm_l_d, LARCH_V4DI_FTYPE_V4DF),
+ LASX_BUILTIN (xvftint_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvffint_s_l, LARCH_V8SF_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvftintrz_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvftintrp_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvftintrm_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvftintrne_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF),
+ LASX_BUILTIN (xvftinth_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintl_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvffinth_d_w, LARCH_V4DF_FTYPE_V8SI),
+ LASX_BUILTIN (xvffintl_d_w, LARCH_V4DF_FTYPE_V8SI),
+ LASX_BUILTIN (xvftintrzh_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrzl_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrph_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrpl_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrmh_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrml_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrneh_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvftintrnel_l_s, LARCH_V4DI_FTYPE_V8SF),
+ LASX_BUILTIN (xvfrintrne_s, LARCH_V8SF_FTYPE_V8SF),
+ LASX_BUILTIN (xvfrintrne_d, LARCH_V4DF_FTYPE_V4DF),
+ LASX_BUILTIN (xvfrintrz_s, LARCH_V8SF_FTYPE_V8SF),
+ LASX_BUILTIN (xvfrintrz_d, LARCH_V4DF_FTYPE_V4DF),
+ LASX_BUILTIN (xvfrintrp_s, LARCH_V8SF_FTYPE_V8SF),
+ LASX_BUILTIN (xvfrintrp_d, LARCH_V4DF_FTYPE_V4DF),
+ LASX_BUILTIN (xvfrintrm_s, LARCH_V8SF_FTYPE_V8SF),
+ LASX_BUILTIN (xvfrintrm_d, LARCH_V4DF_FTYPE_V4DF),
+ LASX_BUILTIN (xvld, LARCH_V32QI_FTYPE_CVPOINTER_SI),
+ LASX_NO_TARGET_BUILTIN (xvst, LARCH_VOID_FTYPE_V32QI_CVPOINTER_SI),
+ LASX_NO_TARGET_BUILTIN (xvstelm_b, LARCH_VOID_FTYPE_V32QI_CVPOINTER_SI_UQI),
+ LASX_NO_TARGET_BUILTIN (xvstelm_h, LARCH_VOID_FTYPE_V16HI_CVPOINTER_SI_UQI),
+ LASX_NO_TARGET_BUILTIN (xvstelm_w, LARCH_VOID_FTYPE_V8SI_CVPOINTER_SI_UQI),
+ LASX_NO_TARGET_BUILTIN (xvstelm_d, LARCH_VOID_FTYPE_V4DI_CVPOINTER_SI_UQI),
+ LASX_BUILTIN (xvinsve0_w, LARCH_V8SI_FTYPE_V8SI_V8SI_UQI),
+ LASX_BUILTIN (xvinsve0_d, LARCH_V4DI_FTYPE_V4DI_V4DI_UQI),
+ LASX_BUILTIN (xvpickve_w, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvpickve_d, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvpickve_w_f, LARCH_V8SF_FTYPE_V8SF_UQI),
+ LASX_BUILTIN (xvpickve_d_f, LARCH_V4DF_FTYPE_V4DF_UQI),
+ LASX_BUILTIN (xvssrlrn_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvssrlrn_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvssrlrn_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvssrln_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvssrln_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvssrln_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvorn_v, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvldi, LARCH_V4DI_FTYPE_HI),
+ LASX_BUILTIN (xvldx, LARCH_V32QI_FTYPE_CVPOINTER_DI),
+ LASX_NO_TARGET_BUILTIN (xvstx, LARCH_VOID_FTYPE_V32QI_CVPOINTER_DI),
+ LASX_BUILTIN (xvextl_qu_du, LARCH_UV4DI_FTYPE_UV4DI),
+
+ /* LASX */
+ LASX_BUILTIN (xvinsgr2vr_w, LARCH_V8SI_FTYPE_V8SI_SI_UQI),
+ LASX_BUILTIN (xvinsgr2vr_d, LARCH_V4DI_FTYPE_V4DI_DI_UQI),
+
+ LASX_BUILTIN (xvreplve0_b, LARCH_V32QI_FTYPE_V32QI),
+ LASX_BUILTIN (xvreplve0_h, LARCH_V16HI_FTYPE_V16HI),
+ LASX_BUILTIN (xvreplve0_w, LARCH_V8SI_FTYPE_V8SI),
+ LASX_BUILTIN (xvreplve0_d, LARCH_V4DI_FTYPE_V4DI),
+ LASX_BUILTIN (xvreplve0_q, LARCH_V32QI_FTYPE_V32QI),
+ LASX_BUILTIN (vext2xv_h_b, LARCH_V16HI_FTYPE_V32QI),
+ LASX_BUILTIN (vext2xv_w_h, LARCH_V8SI_FTYPE_V16HI),
+ LASX_BUILTIN (vext2xv_d_w, LARCH_V4DI_FTYPE_V8SI),
+ LASX_BUILTIN (vext2xv_w_b, LARCH_V8SI_FTYPE_V32QI),
+ LASX_BUILTIN (vext2xv_d_h, LARCH_V4DI_FTYPE_V16HI),
+ LASX_BUILTIN (vext2xv_d_b, LARCH_V4DI_FTYPE_V32QI),
+ LASX_BUILTIN (vext2xv_hu_bu, LARCH_V16HI_FTYPE_V32QI),
+ LASX_BUILTIN (vext2xv_wu_hu, LARCH_V8SI_FTYPE_V16HI),
+ LASX_BUILTIN (vext2xv_du_wu, LARCH_V4DI_FTYPE_V8SI),
+ LASX_BUILTIN (vext2xv_wu_bu, LARCH_V8SI_FTYPE_V32QI),
+ LASX_BUILTIN (vext2xv_du_hu, LARCH_V4DI_FTYPE_V16HI),
+ LASX_BUILTIN (vext2xv_du_bu, LARCH_V4DI_FTYPE_V32QI),
+ LASX_BUILTIN (xvpermi_q, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvpermi_d, LARCH_V4DI_FTYPE_V4DI_USI),
+ LASX_BUILTIN (xvperm_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN_TEST_BRANCH (xbz_b, LARCH_SI_FTYPE_UV32QI),
+ LASX_BUILTIN_TEST_BRANCH (xbz_h, LARCH_SI_FTYPE_UV16HI),
+ LASX_BUILTIN_TEST_BRANCH (xbz_w, LARCH_SI_FTYPE_UV8SI),
+ LASX_BUILTIN_TEST_BRANCH (xbz_d, LARCH_SI_FTYPE_UV4DI),
+ LASX_BUILTIN_TEST_BRANCH (xbnz_b, LARCH_SI_FTYPE_UV32QI),
+ LASX_BUILTIN_TEST_BRANCH (xbnz_h, LARCH_SI_FTYPE_UV16HI),
+ LASX_BUILTIN_TEST_BRANCH (xbnz_w, LARCH_SI_FTYPE_UV8SI),
+ LASX_BUILTIN_TEST_BRANCH (xbnz_d, LARCH_SI_FTYPE_UV4DI),
+ LASX_BUILTIN_TEST_BRANCH (xbz_v, LARCH_SI_FTYPE_UV32QI),
+ LASX_BUILTIN_TEST_BRANCH (xbnz_v, LARCH_SI_FTYPE_UV32QI),
+ LASX_BUILTIN (xvldrepl_b, LARCH_V32QI_FTYPE_CVPOINTER_SI),
+ LASX_BUILTIN (xvldrepl_h, LARCH_V16HI_FTYPE_CVPOINTER_SI),
+ LASX_BUILTIN (xvldrepl_w, LARCH_V8SI_FTYPE_CVPOINTER_SI),
+ LASX_BUILTIN (xvldrepl_d, LARCH_V4DI_FTYPE_CVPOINTER_SI),
+ LASX_BUILTIN (xvpickve2gr_w, LARCH_SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvpickve2gr_wu, LARCH_USI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvpickve2gr_d, LARCH_DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvpickve2gr_du, LARCH_UDI_FTYPE_V4DI_UQI),
+
+ LASX_BUILTIN (xvaddwev_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvaddwev_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvaddwev_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvaddwev_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvaddwev_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvaddwev_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvaddwev_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvaddwev_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvsubwev_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsubwev_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsubwev_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsubwev_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsubwev_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvsubwev_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvsubwev_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvsubwev_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvmulwev_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvmulwev_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvmulwev_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvmulwev_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvmulwev_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvmulwev_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvmulwev_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvmulwev_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvaddwod_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvaddwod_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvaddwod_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvaddwod_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvaddwod_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvaddwod_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvaddwod_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvaddwod_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvsubwod_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsubwod_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvsubwod_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvsubwod_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvsubwod_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvsubwod_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvsubwod_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvsubwod_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvmulwod_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvmulwod_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvmulwod_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvmulwod_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvmulwod_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvmulwod_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI),
+ LASX_BUILTIN (xvmulwod_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI),
+ LASX_BUILTIN (xvmulwod_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI),
+ LASX_BUILTIN (xvaddwev_d_wu_w, LARCH_V4DI_FTYPE_UV8SI_V8SI),
+ LASX_BUILTIN (xvaddwev_w_hu_h, LARCH_V8SI_FTYPE_UV16HI_V16HI),
+ LASX_BUILTIN (xvaddwev_h_bu_b, LARCH_V16HI_FTYPE_UV32QI_V32QI),
+ LASX_BUILTIN (xvmulwev_d_wu_w, LARCH_V4DI_FTYPE_UV8SI_V8SI),
+ LASX_BUILTIN (xvmulwev_w_hu_h, LARCH_V8SI_FTYPE_UV16HI_V16HI),
+ LASX_BUILTIN (xvmulwev_h_bu_b, LARCH_V16HI_FTYPE_UV32QI_V32QI),
+ LASX_BUILTIN (xvaddwod_d_wu_w, LARCH_V4DI_FTYPE_UV8SI_V8SI),
+ LASX_BUILTIN (xvaddwod_w_hu_h, LARCH_V8SI_FTYPE_UV16HI_V16HI),
+ LASX_BUILTIN (xvaddwod_h_bu_b, LARCH_V16HI_FTYPE_UV32QI_V32QI),
+ LASX_BUILTIN (xvmulwod_d_wu_w, LARCH_V4DI_FTYPE_UV8SI_V8SI),
+ LASX_BUILTIN (xvmulwod_w_hu_h, LARCH_V8SI_FTYPE_UV16HI_V16HI),
+ LASX_BUILTIN (xvmulwod_h_bu_b, LARCH_V16HI_FTYPE_UV32QI_V32QI),
+ LASX_BUILTIN (xvhaddw_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvhaddw_qu_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvhsubw_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvhsubw_qu_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI),
+ LASX_BUILTIN (xvmaddwev_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI),
+ LASX_BUILTIN (xvmaddwev_d_w, LARCH_V4DI_FTYPE_V4DI_V8SI_V8SI),
+ LASX_BUILTIN (xvmaddwev_w_h, LARCH_V8SI_FTYPE_V8SI_V16HI_V16HI),
+ LASX_BUILTIN (xvmaddwev_h_b, LARCH_V16HI_FTYPE_V16HI_V32QI_V32QI),
+ LASX_BUILTIN (xvmaddwev_q_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI_UV4DI),
+ LASX_BUILTIN (xvmaddwev_d_wu, LARCH_UV4DI_FTYPE_UV4DI_UV8SI_UV8SI),
+ LASX_BUILTIN (xvmaddwev_w_hu, LARCH_UV8SI_FTYPE_UV8SI_UV16HI_UV16HI),
+ LASX_BUILTIN (xvmaddwev_h_bu, LARCH_UV16HI_FTYPE_UV16HI_UV32QI_UV32QI),
+ LASX_BUILTIN (xvmaddwod_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI),
+ LASX_BUILTIN (xvmaddwod_d_w, LARCH_V4DI_FTYPE_V4DI_V8SI_V8SI),
+ LASX_BUILTIN (xvmaddwod_w_h, LARCH_V8SI_FTYPE_V8SI_V16HI_V16HI),
+ LASX_BUILTIN (xvmaddwod_h_b, LARCH_V16HI_FTYPE_V16HI_V32QI_V32QI),
+ LASX_BUILTIN (xvmaddwod_q_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI_UV4DI),
+ LASX_BUILTIN (xvmaddwod_d_wu, LARCH_UV4DI_FTYPE_UV4DI_UV8SI_UV8SI),
+ LASX_BUILTIN (xvmaddwod_w_hu, LARCH_UV8SI_FTYPE_UV8SI_UV16HI_UV16HI),
+ LASX_BUILTIN (xvmaddwod_h_bu, LARCH_UV16HI_FTYPE_UV16HI_UV32QI_UV32QI),
+ LASX_BUILTIN (xvmaddwev_q_du_d, LARCH_V4DI_FTYPE_V4DI_UV4DI_V4DI),
+ LASX_BUILTIN (xvmaddwev_d_wu_w, LARCH_V4DI_FTYPE_V4DI_UV8SI_V8SI),
+ LASX_BUILTIN (xvmaddwev_w_hu_h, LARCH_V8SI_FTYPE_V8SI_UV16HI_V16HI),
+ LASX_BUILTIN (xvmaddwev_h_bu_b, LARCH_V16HI_FTYPE_V16HI_UV32QI_V32QI),
+ LASX_BUILTIN (xvmaddwod_q_du_d, LARCH_V4DI_FTYPE_V4DI_UV4DI_V4DI),
+ LASX_BUILTIN (xvmaddwod_d_wu_w, LARCH_V4DI_FTYPE_V4DI_UV8SI_V8SI),
+ LASX_BUILTIN (xvmaddwod_w_hu_h, LARCH_V8SI_FTYPE_V8SI_UV16HI_V16HI),
+ LASX_BUILTIN (xvmaddwod_h_bu_b, LARCH_V16HI_FTYPE_V16HI_UV32QI_V32QI),
+ LASX_BUILTIN (xvrotr_b, LARCH_V32QI_FTYPE_V32QI_V32QI),
+ LASX_BUILTIN (xvrotr_h, LARCH_V16HI_FTYPE_V16HI_V16HI),
+ LASX_BUILTIN (xvrotr_w, LARCH_V8SI_FTYPE_V8SI_V8SI),
+ LASX_BUILTIN (xvrotr_d, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvadd_q, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvsub_q, LARCH_V4DI_FTYPE_V4DI_V4DI),
+ LASX_BUILTIN (xvaddwev_q_du_d, LARCH_V4DI_FTYPE_UV4DI_V4DI),
+ LASX_BUILTIN (xvaddwod_q_du_d, LARCH_V4DI_FTYPE_UV4DI_V4DI),
+ LASX_BUILTIN (xvmulwev_q_du_d, LARCH_V4DI_FTYPE_UV4DI_V4DI),
+ LASX_BUILTIN (xvmulwod_q_du_d, LARCH_V4DI_FTYPE_UV4DI_V4DI),
+ LASX_BUILTIN (xvmskgez_b, LARCH_V32QI_FTYPE_V32QI),
+ LASX_BUILTIN (xvmsknz_b, LARCH_V32QI_FTYPE_V32QI),
+ LASX_BUILTIN (xvexth_h_b, LARCH_V16HI_FTYPE_V32QI),
+ LASX_BUILTIN (xvexth_w_h, LARCH_V8SI_FTYPE_V16HI),
+ LASX_BUILTIN (xvexth_d_w, LARCH_V4DI_FTYPE_V8SI),
+ LASX_BUILTIN (xvexth_q_d, LARCH_V4DI_FTYPE_V4DI),
+ LASX_BUILTIN (xvexth_hu_bu, LARCH_UV16HI_FTYPE_UV32QI),
+ LASX_BUILTIN (xvexth_wu_hu, LARCH_UV8SI_FTYPE_UV16HI),
+ LASX_BUILTIN (xvexth_du_wu, LARCH_UV4DI_FTYPE_UV8SI),
+ LASX_BUILTIN (xvexth_qu_du, LARCH_UV4DI_FTYPE_UV4DI),
+ LASX_BUILTIN (xvrotri_b, LARCH_V32QI_FTYPE_V32QI_UQI),
+ LASX_BUILTIN (xvrotri_h, LARCH_V16HI_FTYPE_V16HI_UQI),
+ LASX_BUILTIN (xvrotri_w, LARCH_V8SI_FTYPE_V8SI_UQI),
+ LASX_BUILTIN (xvrotri_d, LARCH_V4DI_FTYPE_V4DI_UQI),
+ LASX_BUILTIN (xvextl_q_d, LARCH_V4DI_FTYPE_V4DI),
+ LASX_BUILTIN (xvsrlni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvsrlni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI),
+ LASX_BUILTIN (xvsrlni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+ LASX_BUILTIN (xvsrlni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvsrlrni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvsrlrni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI),
+ LASX_BUILTIN (xvsrlrni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+ LASX_BUILTIN (xvsrlrni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvssrlni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvssrlni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI),
+ LASX_BUILTIN (xvssrlni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+ LASX_BUILTIN (xvssrlni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvssrlni_bu_h, LARCH_UV32QI_FTYPE_UV32QI_V32QI_USI),
+ LASX_BUILTIN (xvssrlni_hu_w, LARCH_UV16HI_FTYPE_UV16HI_V16HI_USI),
+ LASX_BUILTIN (xvssrlni_wu_d, LARCH_UV8SI_FTYPE_UV8SI_V8SI_USI),
+ LASX_BUILTIN (xvssrlni_du_q, LARCH_UV4DI_FTYPE_UV4DI_V4DI_USI),
+ LASX_BUILTIN (xvssrlrni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvssrlrni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI),
+ LASX_BUILTIN (xvssrlrni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+ LASX_BUILTIN (xvssrlrni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvssrlrni_bu_h, LARCH_UV32QI_FTYPE_UV32QI_V32QI_USI),
+ LASX_BUILTIN (xvssrlrni_hu_w, LARCH_UV16HI_FTYPE_UV16HI_V16HI_USI),
+ LASX_BUILTIN (xvssrlrni_wu_d, LARCH_UV8SI_FTYPE_UV8SI_V8SI_USI),
+ LASX_BUILTIN (xvssrlrni_du_q, LARCH_UV4DI_FTYPE_UV4DI_V4DI_USI),
+ LASX_BUILTIN (xvsrani_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvsrani_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI),
+ LASX_BUILTIN (xvsrani_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+ LASX_BUILTIN (xvsrani_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvsrarni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvsrarni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI),
+ LASX_BUILTIN (xvsrarni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+ LASX_BUILTIN (xvsrarni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvssrani_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvssrani_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI),
+ LASX_BUILTIN (xvssrani_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+ LASX_BUILTIN (xvssrani_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvssrani_bu_h, LARCH_UV32QI_FTYPE_UV32QI_V32QI_USI),
+ LASX_BUILTIN (xvssrani_hu_w, LARCH_UV16HI_FTYPE_UV16HI_V16HI_USI),
+ LASX_BUILTIN (xvssrani_wu_d, LARCH_UV8SI_FTYPE_UV8SI_V8SI_USI),
+ LASX_BUILTIN (xvssrani_du_q, LARCH_UV4DI_FTYPE_UV4DI_V4DI_USI),
+ LASX_BUILTIN (xvssrarni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI),
+ LASX_BUILTIN (xvssrarni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI),
+ LASX_BUILTIN (xvssrarni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI),
+ LASX_BUILTIN (xvssrarni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI),
+ LASX_BUILTIN (xvssrarni_bu_h, LARCH_UV32QI_FTYPE_UV32QI_V32QI_USI),
+ LASX_BUILTIN (xvssrarni_hu_w, LARCH_UV16HI_FTYPE_UV16HI_V16HI_USI),
+ LASX_BUILTIN (xvssrarni_wu_d, LARCH_UV8SI_FTYPE_UV8SI_V8SI_USI),
+ LASX_BUILTIN (xvssrarni_du_q, LARCH_UV4DI_FTYPE_UV4DI_V4DI_USI)
};
/* Index I is the function declaration for loongarch_builtins[I], or null if
@@ -1446,11 +2502,15 @@ loongarch_builtin_vectorized_function (unsigned int fn, tree type_out,
{
if (out_n == 2 && in_n == 2)
return LARCH_GET_BUILTIN (lsx_vfrintrp_d);
+ if (out_n == 4 && in_n == 4)
+ return LARCH_GET_BUILTIN (lasx_xvfrintrp_d);
}
if (out_mode == SFmode && in_mode == SFmode)
{
if (out_n == 4 && in_n == 4)
return LARCH_GET_BUILTIN (lsx_vfrintrp_s);
+ if (out_n == 8 && in_n == 8)
+ return LARCH_GET_BUILTIN (lasx_xvfrintrp_s);
}
break;
@@ -1459,11 +2519,15 @@ loongarch_builtin_vectorized_function (unsigned int fn, tree type_out,
{
if (out_n == 2 && in_n == 2)
return LARCH_GET_BUILTIN (lsx_vfrintrz_d);
+ if (out_n == 4 && in_n == 4)
+ return LARCH_GET_BUILTIN (lasx_xvfrintrz_d);
}
if (out_mode == SFmode && in_mode == SFmode)
{
if (out_n == 4 && in_n == 4)
return LARCH_GET_BUILTIN (lsx_vfrintrz_s);
+ if (out_n == 8 && in_n == 8)
+ return LARCH_GET_BUILTIN (lasx_xvfrintrz_s);
}
break;
@@ -1473,11 +2537,15 @@ loongarch_builtin_vectorized_function (unsigned int fn, tree type_out,
{
if (out_n == 2 && in_n == 2)
return LARCH_GET_BUILTIN (lsx_vfrint_d);
+ if (out_n == 4 && in_n == 4)
+ return LARCH_GET_BUILTIN (lasx_xvfrint_d);
}
if (out_mode == SFmode && in_mode == SFmode)
{
if (out_n == 4 && in_n == 4)
return LARCH_GET_BUILTIN (lsx_vfrint_s);
+ if (out_n == 8 && in_n == 8)
+ return LARCH_GET_BUILTIN (lasx_xvfrint_s);
}
break;
@@ -1486,11 +2554,15 @@ loongarch_builtin_vectorized_function (unsigned int fn, tree type_out,
{
if (out_n == 2 && in_n == 2)
return LARCH_GET_BUILTIN (lsx_vfrintrm_d);
+ if (out_n == 4 && in_n == 4)
+ return LARCH_GET_BUILTIN (lasx_xvfrintrm_d);
}
if (out_mode == SFmode && in_mode == SFmode)
{
if (out_n == 4 && in_n == 4)
return LARCH_GET_BUILTIN (lsx_vfrintrm_s);
+ if (out_n == 8 && in_n == 8)
+ return LARCH_GET_BUILTIN (lasx_xvfrintrm_s);
}
break;
@@ -1565,6 +2637,30 @@ loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
case CODE_FOR_lsx_vsubi_hu:
case CODE_FOR_lsx_vsubi_wu:
case CODE_FOR_lsx_vsubi_du:
+ case CODE_FOR_lasx_xvaddi_bu:
+ case CODE_FOR_lasx_xvaddi_hu:
+ case CODE_FOR_lasx_xvaddi_wu:
+ case CODE_FOR_lasx_xvaddi_du:
+ case CODE_FOR_lasx_xvslti_bu:
+ case CODE_FOR_lasx_xvslti_hu:
+ case CODE_FOR_lasx_xvslti_wu:
+ case CODE_FOR_lasx_xvslti_du:
+ case CODE_FOR_lasx_xvslei_bu:
+ case CODE_FOR_lasx_xvslei_hu:
+ case CODE_FOR_lasx_xvslei_wu:
+ case CODE_FOR_lasx_xvslei_du:
+ case CODE_FOR_lasx_xvmaxi_bu:
+ case CODE_FOR_lasx_xvmaxi_hu:
+ case CODE_FOR_lasx_xvmaxi_wu:
+ case CODE_FOR_lasx_xvmaxi_du:
+ case CODE_FOR_lasx_xvmini_bu:
+ case CODE_FOR_lasx_xvmini_hu:
+ case CODE_FOR_lasx_xvmini_wu:
+ case CODE_FOR_lasx_xvmini_du:
+ case CODE_FOR_lasx_xvsubi_bu:
+ case CODE_FOR_lasx_xvsubi_hu:
+ case CODE_FOR_lasx_xvsubi_wu:
+ case CODE_FOR_lasx_xvsubi_du:
gcc_assert (has_target_p && nops == 3);
/* We only generate a vector of constants iff the second argument
is an immediate. We also validate the range of the immediate. */
@@ -1603,6 +2699,26 @@ loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
case CODE_FOR_lsx_vmini_h:
case CODE_FOR_lsx_vmini_w:
case CODE_FOR_lsx_vmini_d:
+ case CODE_FOR_lasx_xvseqi_b:
+ case CODE_FOR_lasx_xvseqi_h:
+ case CODE_FOR_lasx_xvseqi_w:
+ case CODE_FOR_lasx_xvseqi_d:
+ case CODE_FOR_lasx_xvslti_b:
+ case CODE_FOR_lasx_xvslti_h:
+ case CODE_FOR_lasx_xvslti_w:
+ case CODE_FOR_lasx_xvslti_d:
+ case CODE_FOR_lasx_xvslei_b:
+ case CODE_FOR_lasx_xvslei_h:
+ case CODE_FOR_lasx_xvslei_w:
+ case CODE_FOR_lasx_xvslei_d:
+ case CODE_FOR_lasx_xvmaxi_b:
+ case CODE_FOR_lasx_xvmaxi_h:
+ case CODE_FOR_lasx_xvmaxi_w:
+ case CODE_FOR_lasx_xvmaxi_d:
+ case CODE_FOR_lasx_xvmini_b:
+ case CODE_FOR_lasx_xvmini_h:
+ case CODE_FOR_lasx_xvmini_w:
+ case CODE_FOR_lasx_xvmini_d:
gcc_assert (has_target_p && nops == 3);
/* We only generate a vector of constants iff the second argument
is an immediate. We also validate the range of the immediate. */
@@ -1625,6 +2741,10 @@ loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
case CODE_FOR_lsx_vori_b:
case CODE_FOR_lsx_vnori_b:
case CODE_FOR_lsx_vxori_b:
+ case CODE_FOR_lasx_xvandi_b:
+ case CODE_FOR_lasx_xvori_b:
+ case CODE_FOR_lasx_xvnori_b:
+ case CODE_FOR_lasx_xvxori_b:
gcc_assert (has_target_p && nops == 3);
if (!CONST_INT_P (ops[2].value))
break;
@@ -1634,6 +2754,7 @@ loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
break;
case CODE_FOR_lsx_vbitseli_b:
+ case CODE_FOR_lasx_xvbitseli_b:
gcc_assert (has_target_p && nops == 4);
if (!CONST_INT_P (ops[3].value))
break;
@@ -1646,6 +2767,10 @@ loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
case CODE_FOR_lsx_vreplgr2vr_h:
case CODE_FOR_lsx_vreplgr2vr_w:
case CODE_FOR_lsx_vreplgr2vr_d:
+ case CODE_FOR_lasx_xvreplgr2vr_b:
+ case CODE_FOR_lasx_xvreplgr2vr_h:
+ case CODE_FOR_lasx_xvreplgr2vr_w:
+ case CODE_FOR_lasx_xvreplgr2vr_d:
/* Map the built-ins to vector fill operations. We need fix up the mode
for the element being inserted. */
gcc_assert (has_target_p && nops == 2);
@@ -1674,6 +2799,26 @@ loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
case CODE_FOR_lsx_vpickod_b:
case CODE_FOR_lsx_vpickod_h:
case CODE_FOR_lsx_vpickod_w:
+ case CODE_FOR_lasx_xvilvh_b:
+ case CODE_FOR_lasx_xvilvh_h:
+ case CODE_FOR_lasx_xvilvh_w:
+ case CODE_FOR_lasx_xvilvh_d:
+ case CODE_FOR_lasx_xvilvl_b:
+ case CODE_FOR_lasx_xvilvl_h:
+ case CODE_FOR_lasx_xvilvl_w:
+ case CODE_FOR_lasx_xvilvl_d:
+ case CODE_FOR_lasx_xvpackev_b:
+ case CODE_FOR_lasx_xvpackev_h:
+ case CODE_FOR_lasx_xvpackev_w:
+ case CODE_FOR_lasx_xvpackod_b:
+ case CODE_FOR_lasx_xvpackod_h:
+ case CODE_FOR_lasx_xvpackod_w:
+ case CODE_FOR_lasx_xvpickev_b:
+ case CODE_FOR_lasx_xvpickev_h:
+ case CODE_FOR_lasx_xvpickev_w:
+ case CODE_FOR_lasx_xvpickod_b:
+ case CODE_FOR_lasx_xvpickod_h:
+ case CODE_FOR_lasx_xvpickod_w:
/* Swap the operands 1 and 2 for interleave operations. Built-ins follow
convention of ISA, which have op1 as higher component and op2 as lower
component. However, the VEC_PERM op in tree and vec_concat in RTL
@@ -1695,6 +2840,18 @@ loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
case CODE_FOR_lsx_vsrli_h:
case CODE_FOR_lsx_vsrli_w:
case CODE_FOR_lsx_vsrli_d:
+ case CODE_FOR_lasx_xvslli_b:
+ case CODE_FOR_lasx_xvslli_h:
+ case CODE_FOR_lasx_xvslli_w:
+ case CODE_FOR_lasx_xvslli_d:
+ case CODE_FOR_lasx_xvsrai_b:
+ case CODE_FOR_lasx_xvsrai_h:
+ case CODE_FOR_lasx_xvsrai_w:
+ case CODE_FOR_lasx_xvsrai_d:
+ case CODE_FOR_lasx_xvsrli_b:
+ case CODE_FOR_lasx_xvsrli_h:
+ case CODE_FOR_lasx_xvsrli_w:
+ case CODE_FOR_lasx_xvsrli_d:
gcc_assert (has_target_p && nops == 3);
if (CONST_INT_P (ops[2].value))
{
@@ -1755,6 +2912,25 @@ loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
INTVAL (ops[2].value));
break;
+ case CODE_FOR_lasx_xvinsgr2vr_w:
+ case CODE_FOR_lasx_xvinsgr2vr_d:
+ /* Map the built-ins to insert operations. We need to swap operands,
+ fix up the mode for the element being inserted, and generate
+ a bit mask for vec_merge. */
+ gcc_assert (has_target_p && nops == 4);
+ std::swap (ops[1], ops[2]);
+ imode = GET_MODE_INNER (ops[0].mode);
+ ops[1].value = lowpart_subreg (imode, ops[1].value, ops[1].mode);
+ ops[1].mode = imode;
+ rangelo = 0;
+ rangehi = GET_MODE_NUNITS (ops[0].mode) - 1;
+ if (CONST_INT_P (ops[3].value)
+ && IN_RANGE (INTVAL (ops[3].value), rangelo, rangehi))
+ ops[3].value = GEN_INT (1 << INTVAL (ops[3].value));
+ else
+ error_opno = 2;
+ break;
+
default:
break;
}
@@ -1864,12 +3040,14 @@ loongarch_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
{
case LARCH_BUILTIN_DIRECT:
case LARCH_BUILTIN_LSX:
+ case LARCH_BUILTIN_LASX:
return loongarch_expand_builtin_direct (d->icode, target, exp, true);
case LARCH_BUILTIN_DIRECT_NO_TARGET:
return loongarch_expand_builtin_direct (d->icode, target, exp, false);
case LARCH_BUILTIN_LSX_TEST_BRANCH:
+ case LARCH_BUILTIN_LASX_TEST_BRANCH:
return loongarch_expand_builtin_lsx_test_branch (d->icode, exp);
}
gcc_unreachable ();
diff --git a/gcc/config/loongarch/loongarch-ftypes.def b/gcc/config/loongarch/loongarch-ftypes.def
index 2b0d50892..c7f849e88 100644
--- a/gcc/config/loongarch/loongarch-ftypes.def
+++ b/gcc/config/loongarch/loongarch-ftypes.def
@@ -67,6 +67,7 @@ DEF_LARCH_FTYPE (3, (UDI, UDI, UDI, USI))
DEF_LARCH_FTYPE (1, (DF, DF))
DEF_LARCH_FTYPE (2, (DF, DF, DF))
DEF_LARCH_FTYPE (1, (DF, V2DF))
+DEF_LARCH_FTYPE (1, (DF, V4DF))
DEF_LARCH_FTYPE (1, (DI, DI))
DEF_LARCH_FTYPE (1, (DI, SI))
@@ -83,6 +84,7 @@ DEF_LARCH_FTYPE (2, (DI, SI, SI))
DEF_LARCH_FTYPE (2, (DI, USI, USI))
DEF_LARCH_FTYPE (2, (DI, V2DI, UQI))
+DEF_LARCH_FTYPE (2, (DI, V4DI, UQI))
DEF_LARCH_FTYPE (2, (INT, DF, DF))
DEF_LARCH_FTYPE (2, (INT, SF, SF))
@@ -104,21 +106,31 @@ DEF_LARCH_FTYPE (3, (SI, SI, SI, SI))
DEF_LARCH_FTYPE (3, (SI, SI, SI, QI))
DEF_LARCH_FTYPE (1, (SI, UQI))
DEF_LARCH_FTYPE (1, (SI, UV16QI))
+DEF_LARCH_FTYPE (1, (SI, UV32QI))
DEF_LARCH_FTYPE (1, (SI, UV2DI))
+DEF_LARCH_FTYPE (1, (SI, UV4DI))
DEF_LARCH_FTYPE (1, (SI, UV4SI))
+DEF_LARCH_FTYPE (1, (SI, UV8SI))
DEF_LARCH_FTYPE (1, (SI, UV8HI))
+DEF_LARCH_FTYPE (1, (SI, UV16HI))
DEF_LARCH_FTYPE (2, (SI, V16QI, UQI))
+DEF_LARCH_FTYPE (2, (SI, V32QI, UQI))
DEF_LARCH_FTYPE (1, (SI, V2HI))
DEF_LARCH_FTYPE (2, (SI, V2HI, V2HI))
DEF_LARCH_FTYPE (1, (SI, V4QI))
DEF_LARCH_FTYPE (2, (SI, V4QI, V4QI))
DEF_LARCH_FTYPE (2, (SI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (SI, V8SI, UQI))
DEF_LARCH_FTYPE (2, (SI, V8HI, UQI))
DEF_LARCH_FTYPE (1, (SI, VOID))
DEF_LARCH_FTYPE (2, (UDI, UDI, UDI))
+DEF_LARCH_FTYPE (2, (USI, V32QI, UQI))
DEF_LARCH_FTYPE (2, (UDI, UV2SI, UV2SI))
+DEF_LARCH_FTYPE (2, (USI, V8SI, UQI))
DEF_LARCH_FTYPE (2, (UDI, V2DI, UQI))
+DEF_LARCH_FTYPE (2, (USI, V16HI, UQI))
+DEF_LARCH_FTYPE (2, (UDI, V4DI, UQI))
DEF_LARCH_FTYPE (2, (USI, V16QI, UQI))
DEF_LARCH_FTYPE (2, (USI, V4SI, UQI))
@@ -142,6 +154,23 @@ DEF_LARCH_FTYPE (2, (UV2DI, UV2DI, V2DI))
DEF_LARCH_FTYPE (2, (UV2DI, UV4SI, UV4SI))
DEF_LARCH_FTYPE (1, (UV2DI, V2DF))
+DEF_LARCH_FTYPE (2, (UV32QI, UV32QI, UQI))
+DEF_LARCH_FTYPE (2, (UV32QI, UV32QI, USI))
+DEF_LARCH_FTYPE (2, (UV32QI, UV32QI, UV32QI))
+DEF_LARCH_FTYPE (3, (UV32QI, UV32QI, UV32QI, UQI))
+DEF_LARCH_FTYPE (3, (UV32QI, UV32QI, UV32QI, USI))
+DEF_LARCH_FTYPE (3, (UV32QI, UV32QI, UV32QI, UV32QI))
+DEF_LARCH_FTYPE (2, (UV32QI, UV32QI, V32QI))
+
+DEF_LARCH_FTYPE (2, (UV4DI, UV4DI, UQI))
+DEF_LARCH_FTYPE (2, (UV4DI, UV4DI, UV4DI))
+DEF_LARCH_FTYPE (3, (UV4DI, UV4DI, UV4DI, UQI))
+DEF_LARCH_FTYPE (3, (UV4DI, UV4DI, UV4DI, UV4DI))
+DEF_LARCH_FTYPE (3, (UV4DI, UV4DI, UV8SI, UV8SI))
+DEF_LARCH_FTYPE (2, (UV4DI, UV4DI, V4DI))
+DEF_LARCH_FTYPE (2, (UV4DI, UV8SI, UV8SI))
+DEF_LARCH_FTYPE (1, (UV4DI, V4DF))
+
DEF_LARCH_FTYPE (2, (UV2SI, UV2SI, UQI))
DEF_LARCH_FTYPE (2, (UV2SI, UV2SI, UV2SI))
@@ -170,7 +199,22 @@ DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, UV8HI, UQI))
DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, UV8HI, UV8HI))
DEF_LARCH_FTYPE (2, (UV8HI, UV8HI, V8HI))
-
+DEF_LARCH_FTYPE (2, (UV8SI, UV8SI, UQI))
+DEF_LARCH_FTYPE (2, (UV8SI, UV8SI, UV8SI))
+DEF_LARCH_FTYPE (3, (UV8SI, UV8SI, UV8SI, UQI))
+DEF_LARCH_FTYPE (3, (UV8SI, UV8SI, UV8SI, UV8SI))
+DEF_LARCH_FTYPE (3, (UV8SI, UV8SI, UV16HI, UV16HI))
+DEF_LARCH_FTYPE (2, (UV8SI, UV8SI, V8SI))
+DEF_LARCH_FTYPE (2, (UV8SI, UV16HI, UV16HI))
+DEF_LARCH_FTYPE (1, (UV8SI, V8SF))
+
+DEF_LARCH_FTYPE (2, (UV16HI, UV32QI, UV32QI))
+DEF_LARCH_FTYPE (2, (UV16HI, UV16HI, UQI))
+DEF_LARCH_FTYPE (3, (UV16HI, UV16HI, UV32QI, UV32QI))
+DEF_LARCH_FTYPE (2, (UV16HI, UV16HI, UV16HI))
+DEF_LARCH_FTYPE (3, (UV16HI, UV16HI, UV16HI, UQI))
+DEF_LARCH_FTYPE (3, (UV16HI, UV16HI, UV16HI, UV16HI))
+DEF_LARCH_FTYPE (2, (UV16HI, UV16HI, V16HI))
DEF_LARCH_FTYPE (2, (UV8QI, UV4HI, UV4HI))
DEF_LARCH_FTYPE (1, (UV8QI, UV8QI))
@@ -196,6 +240,25 @@ DEF_LARCH_FTYPE (4, (V16QI, V16QI, V16QI, UQI, UQI))
DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, USI))
DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V32QI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (2, (V32QI, CVPOINTER, DI))
+DEF_LARCH_FTYPE (1, (V32QI, HI))
+DEF_LARCH_FTYPE (1, (V32QI, SI))
+DEF_LARCH_FTYPE (2, (V32QI, UV32QI, UQI))
+DEF_LARCH_FTYPE (2, (V32QI, UV32QI, UV32QI))
+DEF_LARCH_FTYPE (1, (V32QI, V32QI))
+DEF_LARCH_FTYPE (2, (V32QI, V32QI, QI))
+DEF_LARCH_FTYPE (2, (V32QI, V32QI, SI))
+DEF_LARCH_FTYPE (2, (V32QI, V32QI, UQI))
+DEF_LARCH_FTYPE (2, (V32QI, V32QI, USI))
+DEF_LARCH_FTYPE (3, (V32QI, V32QI, SI, UQI))
+DEF_LARCH_FTYPE (3, (V32QI, V32QI, UQI, V32QI))
+DEF_LARCH_FTYPE (2, (V32QI, V32QI, V32QI))
+DEF_LARCH_FTYPE (3, (V32QI, V32QI, V32QI, SI))
+DEF_LARCH_FTYPE (3, (V32QI, V32QI, V32QI, UQI))
+DEF_LARCH_FTYPE (4, (V32QI, V32QI, V32QI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V32QI, V32QI, V32QI, USI))
+DEF_LARCH_FTYPE (3, (V32QI, V32QI, V32QI, V32QI))
DEF_LARCH_FTYPE (1, (V2DF, DF))
DEF_LARCH_FTYPE (1, (V2DF, UV2DI))
@@ -207,6 +270,16 @@ DEF_LARCH_FTYPE (1, (V2DF, V2DI))
DEF_LARCH_FTYPE (1, (V2DF, V4SF))
DEF_LARCH_FTYPE (1, (V2DF, V4SI))
+DEF_LARCH_FTYPE (1, (V4DF, DF))
+DEF_LARCH_FTYPE (1, (V4DF, UV4DI))
+DEF_LARCH_FTYPE (1, (V4DF, V4DF))
+DEF_LARCH_FTYPE (2, (V4DF, V4DF, V4DF))
+DEF_LARCH_FTYPE (3, (V4DF, V4DF, V4DF, V4DF))
+DEF_LARCH_FTYPE (2, (V4DF, V4DF, V4DI))
+DEF_LARCH_FTYPE (1, (V4DF, V4DI))
+DEF_LARCH_FTYPE (1, (V4DF, V8SF))
+DEF_LARCH_FTYPE (1, (V4DF, V8SI))
+
DEF_LARCH_FTYPE (2, (V2DI, CVPOINTER, SI))
DEF_LARCH_FTYPE (1, (V2DI, DI))
DEF_LARCH_FTYPE (1, (V2DI, HI))
@@ -233,6 +306,32 @@ DEF_LARCH_FTYPE (3, (V2DI, V2DI, V2DI, V2DI))
DEF_LARCH_FTYPE (3, (V2DI, V2DI, V4SI, V4SI))
DEF_LARCH_FTYPE (2, (V2DI, V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V4DI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V4DI, DI))
+DEF_LARCH_FTYPE (1, (V4DI, HI))
+DEF_LARCH_FTYPE (2, (V4DI, UV4DI, UQI))
+DEF_LARCH_FTYPE (2, (V4DI, UV4DI, UV4DI))
+DEF_LARCH_FTYPE (2, (V4DI, UV8SI, UV8SI))
+DEF_LARCH_FTYPE (1, (V4DI, V4DF))
+DEF_LARCH_FTYPE (2, (V4DI, V4DF, V4DF))
+DEF_LARCH_FTYPE (1, (V4DI, V4DI))
+DEF_LARCH_FTYPE (1, (UV4DI, UV4DI))
+DEF_LARCH_FTYPE (2, (V4DI, V4DI, QI))
+DEF_LARCH_FTYPE (2, (V4DI, V4DI, SI))
+DEF_LARCH_FTYPE (2, (V4DI, V4DI, UQI))
+DEF_LARCH_FTYPE (2, (V4DI, V4DI, USI))
+DEF_LARCH_FTYPE (3, (V4DI, V4DI, DI, UQI))
+DEF_LARCH_FTYPE (3, (V4DI, V4DI, UQI, V4DI))
+DEF_LARCH_FTYPE (3, (V4DI, V4DI, UV8SI, UV8SI))
+DEF_LARCH_FTYPE (2, (V4DI, V4DI, V4DI))
+DEF_LARCH_FTYPE (3, (V4DI, V4DI, V4DI, SI))
+DEF_LARCH_FTYPE (3, (V4DI, V4DI, V4DI, USI))
+DEF_LARCH_FTYPE (3, (V4DI, V4DI, V4DI, UQI))
+DEF_LARCH_FTYPE (4, (V4DI, V4DI, V4DI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V4DI, V4DI, V4DI, V4DI))
+DEF_LARCH_FTYPE (3, (V4DI, V4DI, V8SI, V8SI))
+DEF_LARCH_FTYPE (2, (V4DI, V8SI, V8SI))
+
DEF_LARCH_FTYPE (1, (V2HI, SI))
DEF_LARCH_FTYPE (2, (V2HI, SI, SI))
DEF_LARCH_FTYPE (3, (V2HI, SI, SI, SI))
@@ -274,6 +373,17 @@ DEF_LARCH_FTYPE (3, (V4SF, V4SF, V4SF, V4SF))
DEF_LARCH_FTYPE (2, (V4SF, V4SF, V4SI))
DEF_LARCH_FTYPE (1, (V4SF, V4SI))
DEF_LARCH_FTYPE (1, (V4SF, V8HI))
+DEF_LARCH_FTYPE (1, (V8SF, V16HI))
+
+DEF_LARCH_FTYPE (1, (V8SF, SF))
+DEF_LARCH_FTYPE (1, (V8SF, UV8SI))
+DEF_LARCH_FTYPE (2, (V8SF, V4DF, V4DF))
+DEF_LARCH_FTYPE (1, (V8SF, V8SF))
+DEF_LARCH_FTYPE (2, (V8SF, V8SF, V8SF))
+DEF_LARCH_FTYPE (3, (V8SF, V8SF, V8SF, V8SF))
+DEF_LARCH_FTYPE (2, (V8SF, V8SF, V8SI))
+DEF_LARCH_FTYPE (1, (V8SF, V8SI))
+DEF_LARCH_FTYPE (1, (V8SF, V8HI))
DEF_LARCH_FTYPE (2, (V4SI, CVPOINTER, SI))
DEF_LARCH_FTYPE (1, (V4SI, HI))
@@ -282,6 +392,7 @@ DEF_LARCH_FTYPE (2, (V4SI, UV4SI, UQI))
DEF_LARCH_FTYPE (2, (V4SI, UV4SI, UV4SI))
DEF_LARCH_FTYPE (2, (V4SI, UV8HI, UV8HI))
DEF_LARCH_FTYPE (2, (V4SI, V2DF, V2DF))
+DEF_LARCH_FTYPE (2, (V8SI, V4DF, V4DF))
DEF_LARCH_FTYPE (1, (V4SI, V4SF))
DEF_LARCH_FTYPE (2, (V4SI, V4SF, V4SF))
DEF_LARCH_FTYPE (1, (V4SI, V4SI))
@@ -301,6 +412,32 @@ DEF_LARCH_FTYPE (3, (V4SI, V4SI, V4SI, V4SI))
DEF_LARCH_FTYPE (3, (V4SI, V4SI, V8HI, V8HI))
DEF_LARCH_FTYPE (2, (V4SI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V8SI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V8SI, HI))
+DEF_LARCH_FTYPE (1, (V8SI, SI))
+DEF_LARCH_FTYPE (2, (V8SI, UV8SI, UQI))
+DEF_LARCH_FTYPE (2, (V8SI, UV8SI, UV8SI))
+DEF_LARCH_FTYPE (2, (V8SI, UV16HI, UV16HI))
+DEF_LARCH_FTYPE (2, (V8SI, V2DF, V2DF))
+DEF_LARCH_FTYPE (1, (V8SI, V8SF))
+DEF_LARCH_FTYPE (2, (V8SI, V8SF, V8SF))
+DEF_LARCH_FTYPE (1, (V8SI, V8SI))
+DEF_LARCH_FTYPE (2, (V8SI, V8SI, QI))
+DEF_LARCH_FTYPE (2, (V8SI, V8SI, SI))
+DEF_LARCH_FTYPE (2, (V8SI, V8SI, UQI))
+DEF_LARCH_FTYPE (2, (V8SI, V8SI, USI))
+DEF_LARCH_FTYPE (3, (V8SI, V8SI, SI, UQI))
+DEF_LARCH_FTYPE (3, (V8SI, V8SI, UQI, V8SI))
+DEF_LARCH_FTYPE (3, (V8SI, V8SI, UV16HI, UV16HI))
+DEF_LARCH_FTYPE (2, (V8SI, V8SI, V8SI))
+DEF_LARCH_FTYPE (3, (V8SI, V8SI, V8SI, SI))
+DEF_LARCH_FTYPE (3, (V8SI, V8SI, V8SI, UQI))
+DEF_LARCH_FTYPE (3, (V8SI, V8SI, V8SI, USI))
+DEF_LARCH_FTYPE (4, (V8SI, V8SI, V8SI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V8SI, V8SI, V8SI, V8SI))
+DEF_LARCH_FTYPE (3, (V8SI, V8SI, V16HI, V16HI))
+DEF_LARCH_FTYPE (2, (V8SI, V16HI, V16HI))
+
DEF_LARCH_FTYPE (2, (V8HI, CVPOINTER, SI))
DEF_LARCH_FTYPE (1, (V8HI, HI))
DEF_LARCH_FTYPE (1, (V8HI, SI))
@@ -326,6 +463,31 @@ DEF_LARCH_FTYPE (4, (V8HI, V8HI, V8HI, UQI, UQI))
DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, USI))
DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V16HI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V16HI, HI))
+DEF_LARCH_FTYPE (1, (V16HI, SI))
+DEF_LARCH_FTYPE (2, (V16HI, UV32QI, UV32QI))
+DEF_LARCH_FTYPE (2, (V16HI, UV16HI, UQI))
+DEF_LARCH_FTYPE (2, (V16HI, UV16HI, UV16HI))
+DEF_LARCH_FTYPE (2, (V16HI, V32QI, V32QI))
+DEF_LARCH_FTYPE (2, (V16HI, V8SF, V8SF))
+DEF_LARCH_FTYPE (1, (V16HI, V16HI))
+DEF_LARCH_FTYPE (2, (V16HI, V16HI, QI))
+DEF_LARCH_FTYPE (2, (V16HI, V16HI, SI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, SI, UQI))
+DEF_LARCH_FTYPE (2, (V16HI, V16HI, UQI))
+DEF_LARCH_FTYPE (2, (V16HI, V16HI, USI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, UQI, SI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, UQI, V16HI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, UV32QI, UV32QI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, V32QI, V32QI))
+DEF_LARCH_FTYPE (2, (V16HI, V16HI, V16HI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, V16HI, SI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, V16HI, UQI))
+DEF_LARCH_FTYPE (4, (V16HI, V16HI, V16HI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, V16HI, USI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, V16HI, V16HI))
+
DEF_LARCH_FTYPE (2, (V8QI, V4HI, V4HI))
DEF_LARCH_FTYPE (1, (V8QI, V8QI))
DEF_LARCH_FTYPE (2, (V8QI, V8QI, V8QI))
@@ -337,62 +499,113 @@ DEF_LARCH_FTYPE (2, (VOID, USI, UQI))
DEF_LARCH_FTYPE (1, (VOID, UHI))
DEF_LARCH_FTYPE (3, (VOID, V16QI, CVPOINTER, SI))
DEF_LARCH_FTYPE (3, (VOID, V16QI, CVPOINTER, DI))
+DEF_LARCH_FTYPE (3, (VOID, V32QI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V32QI, CVPOINTER, DI))
+DEF_LARCH_FTYPE (3, (VOID, V4DF, POINTER, SI))
DEF_LARCH_FTYPE (3, (VOID, V2DF, POINTER, SI))
DEF_LARCH_FTYPE (3, (VOID, V2DI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V4DI, CVPOINTER, SI))
DEF_LARCH_FTYPE (2, (VOID, V2HI, V2HI))
DEF_LARCH_FTYPE (2, (VOID, V4QI, V4QI))
DEF_LARCH_FTYPE (3, (VOID, V4SF, POINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V8SF, POINTER, SI))
DEF_LARCH_FTYPE (3, (VOID, V4SI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V8SI, CVPOINTER, SI))
DEF_LARCH_FTYPE (3, (VOID, V8HI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V16HI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V16HI, V32QI))
+DEF_LARCH_FTYPE (1, (UV16HI, UV32QI))
+DEF_LARCH_FTYPE (1, (V8SI, V32QI))
+DEF_LARCH_FTYPE (1, (V4DI, V32QI))
DEF_LARCH_FTYPE (1, (V8HI, V16QI))
DEF_LARCH_FTYPE (1, (V4SI, V16QI))
DEF_LARCH_FTYPE (1, (V2DI, V16QI))
+DEF_LARCH_FTYPE (1, (UV8SI, UV16HI))
+DEF_LARCH_FTYPE (1, (V8SI, V16HI))
+DEF_LARCH_FTYPE (1, (V4DI, V16HI))
DEF_LARCH_FTYPE (1, (V4SI, V8HI))
DEF_LARCH_FTYPE (1, (V2DI, V8HI))
DEF_LARCH_FTYPE (1, (V2DI, V4SI))
+DEF_LARCH_FTYPE (1, (V4DI, V8SI))
+DEF_LARCH_FTYPE (1, (UV4DI, UV8SI))
+DEF_LARCH_FTYPE (1, (UV16HI, V32QI))
+DEF_LARCH_FTYPE (1, (UV8SI, V32QI))
+DEF_LARCH_FTYPE (1, (UV4DI, V32QI))
DEF_LARCH_FTYPE (1, (UV8HI, V16QI))
DEF_LARCH_FTYPE (1, (UV4SI, V16QI))
DEF_LARCH_FTYPE (1, (UV2DI, V16QI))
+DEF_LARCH_FTYPE (1, (UV8SI, V16HI))
+DEF_LARCH_FTYPE (1, (UV4DI, V16HI))
DEF_LARCH_FTYPE (1, (UV4SI, V8HI))
DEF_LARCH_FTYPE (1, (UV2DI, V8HI))
DEF_LARCH_FTYPE (1, (UV2DI, V4SI))
+DEF_LARCH_FTYPE (1, (UV4DI, V8SI))
DEF_LARCH_FTYPE (1, (UV8HI, UV16QI))
DEF_LARCH_FTYPE (1, (UV4SI, UV16QI))
DEF_LARCH_FTYPE (1, (UV2DI, UV16QI))
+DEF_LARCH_FTYPE (1, (UV4DI, UV32QI))
DEF_LARCH_FTYPE (1, (UV4SI, UV8HI))
DEF_LARCH_FTYPE (1, (UV2DI, UV8HI))
DEF_LARCH_FTYPE (1, (UV2DI, UV4SI))
DEF_LARCH_FTYPE (2, (UV8HI, V16QI, V16QI))
DEF_LARCH_FTYPE (2, (UV4SI, V8HI, V8HI))
DEF_LARCH_FTYPE (2, (UV2DI, V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V16HI, V32QI, UQI))
+DEF_LARCH_FTYPE (2, (V8SI, V16HI, UQI))
+DEF_LARCH_FTYPE (2, (V4DI, V8SI, UQI))
DEF_LARCH_FTYPE (2, (V8HI, V16QI, UQI))
DEF_LARCH_FTYPE (2, (V4SI, V8HI, UQI))
DEF_LARCH_FTYPE (2, (V2DI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (UV16HI, UV32QI, UQI))
+DEF_LARCH_FTYPE (2, (UV8SI, UV16HI, UQI))
+DEF_LARCH_FTYPE (2, (UV4DI, UV8SI, UQI))
DEF_LARCH_FTYPE (2, (UV8HI, UV16QI, UQI))
DEF_LARCH_FTYPE (2, (UV4SI, UV8HI, UQI))
DEF_LARCH_FTYPE (2, (UV2DI, UV4SI, UQI))
+DEF_LARCH_FTYPE (2, (V32QI, V16HI, V16HI))
+DEF_LARCH_FTYPE (2, (V16HI, V8SI, V8SI))
+DEF_LARCH_FTYPE (2, (V8SI, V4DI, V4DI))
DEF_LARCH_FTYPE (2, (V16QI, V8HI, V8HI))
DEF_LARCH_FTYPE (2, (V8HI, V4SI, V4SI))
DEF_LARCH_FTYPE (2, (V4SI, V2DI, V2DI))
+DEF_LARCH_FTYPE (2, (UV32QI, UV16HI, UV16HI))
+DEF_LARCH_FTYPE (2, (UV16HI, UV8SI, UV8SI))
+DEF_LARCH_FTYPE (2, (UV8SI, UV4DI, UV4DI))
DEF_LARCH_FTYPE (2, (UV16QI, UV8HI, UV8HI))
DEF_LARCH_FTYPE (2, (UV8HI, UV4SI, UV4SI))
DEF_LARCH_FTYPE (2, (UV4SI, UV2DI, UV2DI))
+DEF_LARCH_FTYPE (2, (V32QI, V16HI, UQI))
+DEF_LARCH_FTYPE (2, (V16HI, V8SI, UQI))
+DEF_LARCH_FTYPE (2, (V8SI, V4DI, UQI))
DEF_LARCH_FTYPE (2, (V16QI, V8HI, UQI))
DEF_LARCH_FTYPE (2, (V8HI, V4SI, UQI))
DEF_LARCH_FTYPE (2, (V4SI, V2DI, UQI))
+DEF_LARCH_FTYPE (2, (UV32QI, UV16HI, UQI))
+DEF_LARCH_FTYPE (2, (UV16HI, UV8SI, UQI))
+DEF_LARCH_FTYPE (2, (UV8SI, UV4DI, UQI))
DEF_LARCH_FTYPE (2, (UV16QI, UV8HI, UQI))
DEF_LARCH_FTYPE (2, (UV8HI, UV4SI, UQI))
DEF_LARCH_FTYPE (2, (UV4SI, UV2DI, UQI))
+DEF_LARCH_FTYPE (2, (V32QI, V32QI, DI))
DEF_LARCH_FTYPE (2, (V16QI, V16QI, DI))
+DEF_LARCH_FTYPE (2, (V32QI, UQI, UQI))
DEF_LARCH_FTYPE (2, (V16QI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V32QI, V32QI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V16HI, V16HI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V8SI, V8SI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V4DI, V4DI, UQI, UQI))
DEF_LARCH_FTYPE (3, (V16QI, V16QI, UQI, UQI))
DEF_LARCH_FTYPE (3, (V8HI, V8HI, UQI, UQI))
DEF_LARCH_FTYPE (3, (V4SI, V4SI, UQI, UQI))
DEF_LARCH_FTYPE (3, (V2DI, V2DI, UQI, UQI))
+DEF_LARCH_FTYPE (2, (V8SF, V4DI, V4DI))
DEF_LARCH_FTYPE (2, (V4SF, V2DI, V2DI))
+DEF_LARCH_FTYPE (1, (V4DI, V8SF))
DEF_LARCH_FTYPE (1, (V2DI, V4SF))
+DEF_LARCH_FTYPE (2, (V4DI, UQI, USI))
DEF_LARCH_FTYPE (2, (V2DI, UQI, USI))
+DEF_LARCH_FTYPE (2, (V4DI, UQI, UQI))
DEF_LARCH_FTYPE (2, (V2DI, UQI, UQI))
DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V16QI, CVPOINTER))
DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V8HI, CVPOINTER))
@@ -402,6 +615,17 @@ DEF_LARCH_FTYPE (2, (V16QI, SI, CVPOINTER))
DEF_LARCH_FTYPE (2, (V8HI, SI, CVPOINTER))
DEF_LARCH_FTYPE (2, (V4SI, SI, CVPOINTER))
DEF_LARCH_FTYPE (2, (V2DI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, V32QI, UQI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, V16HI, UQI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, V8SI, UQI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, V4DI, UQI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (3, (VOID, V32QI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V32QI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V16HI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V8SI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V4DI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (1, (V32QI, POINTER))
+DEF_LARCH_FTYPE (2, (VOID, V32QI, POINTER))
DEF_LARCH_FTYPE (2, (V8HI, UV16QI, V16QI))
DEF_LARCH_FTYPE (2, (V16QI, V16QI, UV16QI))
DEF_LARCH_FTYPE (2, (UV16QI, V16QI, UV16QI))
@@ -431,6 +655,33 @@ DEF_LARCH_FTYPE (3, (V4SI, V4SI, V16QI, V16QI))
DEF_LARCH_FTYPE (3, (V4SI, V4SI, UV16QI, V16QI))
DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, UV16QI, UV16QI))
+
+DEF_LARCH_FTYPE(2,(V4DI,V16HI,V16HI))
+DEF_LARCH_FTYPE(2,(V4DI,UV4SI,V4SI))
+DEF_LARCH_FTYPE(2,(V8SI,UV16HI,V16HI))
+DEF_LARCH_FTYPE(2,(V16HI,UV32QI,V32QI))
+DEF_LARCH_FTYPE(2,(V4DI,UV8SI,V8SI))
+DEF_LARCH_FTYPE(3,(V4DI,V4DI,V16HI,V16HI))
+DEF_LARCH_FTYPE(2,(UV32QI,V32QI,UV32QI))
+DEF_LARCH_FTYPE(2,(UV16HI,V16HI,UV16HI))
+DEF_LARCH_FTYPE(2,(UV8SI,V8SI,UV8SI))
+DEF_LARCH_FTYPE(2,(UV4DI,V4DI,UV4DI))
+DEF_LARCH_FTYPE(3,(V4DI,V4DI,UV4DI,V4DI))
+DEF_LARCH_FTYPE(3,(V4DI,V4DI,UV8SI,V8SI))
+DEF_LARCH_FTYPE(3,(V8SI,V8SI,UV16HI,V16HI))
+DEF_LARCH_FTYPE(3,(V16HI,V16HI,UV32QI,V32QI))
+DEF_LARCH_FTYPE(2,(V4DI,UV4DI,V4DI))
+DEF_LARCH_FTYPE(2,(V8SI,V32QI,V32QI))
+DEF_LARCH_FTYPE(2,(UV4DI,UV16HI,UV16HI))
+DEF_LARCH_FTYPE(2,(V4DI,UV16HI,V16HI))
+DEF_LARCH_FTYPE(3,(V8SI,V8SI,V32QI,V32QI))
+DEF_LARCH_FTYPE(3,(UV8SI,UV8SI,UV32QI,UV32QI))
+DEF_LARCH_FTYPE(3,(UV4DI,UV4DI,UV16HI,UV16HI))
+DEF_LARCH_FTYPE(3,(V8SI,V8SI,UV32QI,V32QI))
+DEF_LARCH_FTYPE(3,(V4DI,V4DI,UV16HI,V16HI))
+DEF_LARCH_FTYPE(2,(UV8SI,UV32QI,UV32QI))
+DEF_LARCH_FTYPE(2,(V8SI,UV32QI,V32QI))
+
DEF_LARCH_FTYPE(4,(VOID,V16QI,CVPOINTER,SI,UQI))
DEF_LARCH_FTYPE(4,(VOID,V8HI,CVPOINTER,SI,UQI))
DEF_LARCH_FTYPE(4,(VOID,V4SI,CVPOINTER,SI,UQI))
@@ -448,11 +699,29 @@ DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, V8HI, USI))
DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, V4SI, USI))
DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, V2DI, USI))
+DEF_LARCH_FTYPE (2, (DI, V8SI, UQI))
+DEF_LARCH_FTYPE (2, (UDI, V8SI, UQI))
+
+DEF_LARCH_FTYPE (3, (UV32QI, UV32QI, V32QI, USI))
+DEF_LARCH_FTYPE (3, (UV16HI, UV16HI, V16HI, USI))
+DEF_LARCH_FTYPE (3, (UV8SI, UV8SI, V8SI, USI))
+DEF_LARCH_FTYPE (3, (UV4DI, UV4DI, V4DI, USI))
+
+DEF_LARCH_FTYPE(4,(VOID,V32QI,CVPOINTER,SI,UQI))
+DEF_LARCH_FTYPE(4,(VOID,V16HI,CVPOINTER,SI,UQI))
+DEF_LARCH_FTYPE(4,(VOID,V8SI,CVPOINTER,SI,UQI))
+DEF_LARCH_FTYPE(4,(VOID,V4DI,CVPOINTER,SI,UQI))
+
DEF_LARCH_FTYPE (1, (BOOLEAN,V16QI))
DEF_LARCH_FTYPE(2,(V16QI,CVPOINTER,CVPOINTER))
DEF_LARCH_FTYPE(3,(VOID,V16QI,CVPOINTER,CVPOINTER))
+DEF_LARCH_FTYPE(2,(V32QI,CVPOINTER,CVPOINTER))
+DEF_LARCH_FTYPE(3,(VOID,V32QI,CVPOINTER,CVPOINTER))
DEF_LARCH_FTYPE (3, (V16QI, V16QI, SI, UQI))
DEF_LARCH_FTYPE (3, (V2DI, V2DI, SI, UQI))
DEF_LARCH_FTYPE (3, (V2DI, V2DI, DI, UQI))
DEF_LARCH_FTYPE (3, (V4SI, V4SI, SI, UQI))
+
+DEF_LARCH_FTYPE (2, (V8SF, V8SF, UQI))
+DEF_LARCH_FTYPE (2, (V4DF, V4DF, UQI))
--
2.33.0
1
https://gitee.com/huang-xiaoquan/src-openEuler-gcc.git
git@gitee.com:huang-xiaoquan/src-openEuler-gcc.git
huang-xiaoquan
src-openEuler-gcc
src-openEuler-gcc
master

搜索帮助

53164aa7 5694891 3bd8fe86 5694891