|
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316 |
- /*******************************************************************************
- Copyright (c) 2024, The OpenBLAS Project
- All rights reserved.
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are
- met:
- 1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in
- the documentation and/or other materials provided with the
- distribution.
- 3. Neither the name of the OpenBLAS project nor the names of
- its contributors may be used to endorse or promote products
- derived from this software without specific prior written permission.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- ARE DISCLAIMED. IN NO EVENT SHALL THE OPENBLAS PROJECT OR CONTRIBUTORS BE
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *******************************************************************************/
- #define ASSEMBLER
-
- #include "common.h"
-
-
- /* Function parameters */
- #define M $r4 // param 1: bm
- #define N $r5 // param 2: bn
- #define K $r6 // param 3: bk
- #define ALPHA_R $f0 // param 4: alphar
- #define ALPHA_I $f1 // param 5: alphai
- #define A $r7 // param 6: ba
- #define B $r8 // param 7: bb
- #define C $r9 // param 8: bc
- #define LDC $r10 // param 9: ldc
-
- #if defined (TRMMKERNEL)
- #define OFFSET $r11 // param 10: offset
- #endif
- #define OFF $r26
-
- #define I $r12
- #define J $r13
- #define L $r14
- #define TL $r15
- #define A0 $r16
- #define B0 $r17
- #define C0 $r18
- #define C1 $r19
- #define C2 $r20
- #define C3 $r23
- #define T0 $r24
- #define T1 $r25
- #define T2 $r26
- #define T3 $r27
-
- #define a1 $f2
- #define a2 $f3
- #define a3 $f4
- #define a4 $f5
- #define a5 $f6
- #define a6 $f7
- #define a7 $f8
- #define a8 $f9
- #define b1 $f10
- #define b2 $f11
- #define b3 $f12
- #define b4 $f13
- #define b5 $f14
- #define b6 $f15
- #define b7 $f16
- #define b8 $f17
- #define c11 $f18
- #define c12 $f19
- #define c21 $f20
- #define c22 $f21
- #define c31 $f22
- #define c32 $f23
- #define c41 $f24
- #define c42 $f25
-
- /* LSX vectors */
- #define U0 $vr30
- #define U1 $vr31
- #define U2 $vr2
- #define U3 $vr3
- #define U4 $vr4
- #define U5 $vr5
- #define U6 $vr6
- #define U7 $vr7
- #define U8 $vr8
- #define U9 $vr9
- #define U10 $vr10
- #define U11 $vr11
- #define U12 $vr12
- #define U13 $vr13
- #define U14 $vr14
- #define U15 $vr15
- #define D0 $vr16
- #define D1 $vr17
- #define D2 $vr18
- #define D3 $vr19
- #define D4 $vr20
- #define D5 $vr21
- #define D6 $vr22
- #define D7 $vr23
- #define D8 $vr24
- #define D9 $vr25
- #define D10 $vr26
- #define D11 $vr27
- #define D12 $vr28
- #define D13 $vr29
- #define VALPHAR $vr28
- #define VALPHAI $vr29
-
-
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
- #define VMADD1 VFMADD
- #define VMADD2 VFMADD
- #define VMADD3 VNMSUB
- #define VMADD4 VFMADD
-
- #define MADD1 MADD
- #define MADD2 MADD
- #define MADD3 NMSUB
- #define MADD4 MADD
- #endif
-
- #if defined(NR) || defined(NC) || defined(TR) || defined(TC)
- #define VMADD1 VFMADD
- #define VMADD2 VFMADD
- #define VMADD3 VFMADD
- #define VMADD4 VNMSUB
-
- #define MADD1 MADD
- #define MADD2 MADD
- #define MADD3 MADD
- #define MADD4 NMSUB
- #endif
-
- #if defined(RN) || defined(RT) || defined(CN) || defined(CT)
- #define VMADD1 VFMADD
- #define VMADD2 VNMSUB
- #define VMADD3 VFMADD
- #define VMADD4 VFMADD
-
- #define MADD1 MADD
- #define MADD2 NMSUB
- #define MADD3 MADD
- #define MADD4 MADD
- #endif
-
- #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
- #define VMADD1 VFMADD
- #define VMADD2 VNMSUB
- #define VMADD3 VNMSUB
- #define VMADD4 VNMSUB
-
- #define MADD1 MADD
- #define MADD2 NMSUB
- #define MADD3 NMSUB
- #define MADD4 NMSUB
- #endif
-
- PROLOGUE
-
- addi.d $sp, $sp, -128
- SDARG $r23, $sp, 0
- SDARG $r24, $sp, 8
- SDARG $r25, $sp, 16
- SDARG $r26, $sp, 24
- SDARG $r27, $sp, 32
- ST $f23, $sp, 40
- ST $f24, $sp, 48
- ST $f25, $sp, 56
- ST $f26, $sp, 64
- ST $f27, $sp, 72
- ST $f28, $sp, 80
- ST $f29, $sp, 88
- ST $f30, $sp, 96
- ST $f31, $sp, 104
- ST ALPHA_R,$sp, 112
- ST ALPHA_I,$sp, 120
-
- vldrepl.d VALPHAR, $sp, 112
- vldrepl.d VALPHAI, $sp, 120
-
- #if defined (TRMMKERNEL) && !defined(LEFT)
- sub.d OFF, $r0, OFFSET
- #else
- xor OFF, OFF, OFF
- #endif
-
- slli.d LDC, LDC, BASE_SHIFT
-
- move J, $r0
- srai.d T0, N, 2 //bn/4
- beq J, T0, .L19
-
- .L10: /* for(j=0; j<bn/4; j+=1) */
- move C0, C
- slli.d TL, LDC, 1
- add.d C1, C0, TL
- add.d C2, C1, TL
- add.d C3, C2, TL
- move A0, A //ptrba
-
- #if defined(TRMMKERNEL) && defined(LEFT)
- move OFF, OFFSET
- #endif
-
- move I, $r0
- srai.d T0, M, 2 //bm/4
- beq I, T0, .L18
-
- .L11: /* for(i=0; i<bm/4; i+=1) */
- move B0, B //ptrbb
- move TL, K /* TL = bk */
- #if defined(TRMMKERNEL)
-
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- move B0, B //ptrbb
- #else
- slli.d T3, OFF, 0x06
- add.d A0, A0, T3
- slli.d T3, OFF, 0x06
- add.d B0, B, T3
- #endif
-
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- sub.d TL, K, OFF //temp
- #elif defined(LEFT)
- addi.d TL, OFF, 4
- #else
- addi.d TL, OFF, 4
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- vxor.v U0, U0, U0
- vxor.v U1, U1, U1
- vxor.v U2, U2, U2
- vxor.v U3, U3, U3
- vxor.v U4, U4, U4
- vxor.v U5, U5, U5
- vxor.v U6, U6, U6
- vxor.v U7, U7, U7
- vxor.v U8, U8, U8
- vxor.v U9, U9, U9
- vxor.v U10, U10, U10
- vxor.v U11, U11, U11
- vxor.v U12, U12, U12
- vxor.v U13, U13, U13
- vxor.v U14, U14, U14
- vxor.v U15, U15, U15
-
- move L, $r0 //cycle param k
- beq L, TL, .L13
- blt TL, L, .L13
-
- .L12: /* for(k=0; k<temp; k+=1) */
- vld D1, B0, 0x00 // b0ri
- vld D2, B0, 0x10 // b1ri
- vld D3, B0, 0x20 // b2ri
- vld D4, B0, 0x30 // b3ri
- vld D0, A0, 0x00 // a0ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a0rr
- vshuf4i.d D6, D0, 0x55 //a0ii
-
- vpackev.d D7, D2, D1 //b0r b1r
- vpackod.d D8, D2, D1 //b0i b1i
-
- vpackev.d D9, D4, D3 //b2r b3r
- vpackod.d D10, D4, D3 //b2i b3i
-
- VMADD1 U0, D5, D7, U0 //00r 10r
- VMADD2 U1, D6, D7, U1 //00i 10i
- VMADD3 U0, D6, D8, U0
- VMADD4 U1, D5, D8, U1
-
- VMADD1 U2, D5, D9, U2 //20r 30r
- VMADD2 U3, D6, D9, U3 //20i 30i
- VMADD3 U2, D6, D10, U2
- VMADD4 U3, D5, D10, U3
-
- vld D0, A0, 0x10 // a1ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a1rr
- vshuf4i.d D6, D0, 0x55 //a1ii
-
- VMADD1 U4, D5, D7, U4 //01r 11r
- VMADD2 U5, D6, D7, U5 //01i 11i
- VMADD3 U4, D6, D8, U4
- VMADD4 U5, D5, D8, U5
-
- VMADD1 U6, D5, D9, U6 //21r 31r
- VMADD2 U7, D6, D9, U7 //21i 31i
- VMADD3 U6, D6, D10, U6
- VMADD4 U7, D5, D10, U7
-
- vld D0, A0, 0x20 // a2ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a2rr
- vshuf4i.d D6, D0, 0x55 //a2ii
-
- VMADD1 U8, D5, D7, U8 //02r 12r
- VMADD2 U9, D6, D7, U9 //02i 12i
- VMADD3 U8, D6, D8, U8
- VMADD4 U9, D5, D8, U9
-
- VMADD1 U10, D5, D9, U10 //22r 32r
- VMADD2 U11, D6, D9, U11 //22i 32i
- VMADD3 U10, D6, D10, U10
- VMADD4 U11, D5, D10, U11
-
- vld D0, A0, 0x30 // a3ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a3rr
- vshuf4i.d D6, D0, 0x55 //a3ii
-
- VMADD1 U12, D5, D7, U12 //03r 13r
- VMADD2 U13, D6, D7, U13 //03i 13i
- VMADD3 U12, D6, D8, U12
- VMADD4 U13, D5, D8, U13
-
- VMADD1 U14, D5, D9, U14 //23r 33r
- VMADD2 U15, D6, D9, U15 //23i 33i
- VMADD3 U14, D6, D10, U14
- VMADD4 U15, D5, D10, U15
-
- addi.d A0, A0, 0x40
- addi.d B0, B0, 0x40
-
- addi.d L, L, 1
- blt L, TL, .L12
-
- .L13:
- #if defined(TRMMKERNEL)
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U0, VALPHAR
- vfmul.d D3, U1, VALPHAR
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res20 res30
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- vfmul.d D2, U2, VALPHAR
- vfmul.d D3, U3, VALPHAR
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
-
- //res01 res11
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U4, VALPHAR
- vfmul.d D3, U5, VALPHAR
- VNMSUB D2, U5, VALPHAI, D2
- VFMADD D3, U4, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res21 res31
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- vfmul.d D2, U6, VALPHAR
- vfmul.d D3, U7, VALPHAR
- VNMSUB D2, U7, VALPHAI, D2
- VFMADD D3, U6, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
-
- //res02 res12
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U8, VALPHAR
- vfmul.d D3, U9, VALPHAR
- VNMSUB D2, U9, VALPHAI, D2
- VFMADD D3, U8, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res22 res32
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- vfmul.d D2, U10, VALPHAR
- vfmul.d D3, U11, VALPHAR
- VNMSUB D2, U11, VALPHAI, D2
- VFMADD D3, U10, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
-
- //res03 res13
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U12, VALPHAR
- vfmul.d D3, U13, VALPHAR
- VNMSUB D2, U13, VALPHAI, D2
- VFMADD D3, U12, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res23 res33
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- vfmul.d D2, U14, VALPHAR
- vfmul.d D3, U15, VALPHAR
- VNMSUB D2, U15, VALPHAI, D2
- VFMADD D3, U14, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
- #else
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vst U0, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U1, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U2, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U3, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U4, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U5, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U6, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U7, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U8, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U9, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U10, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U11, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U12, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U13, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U14, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vst U15, C0, 0x00
- fld.d $f27, C0, 0x00
- fld.d $f27, C0, 0x08
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U0, VALPHAR, D2
- VFMADD D3, U1, VALPHAR, D3
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res20 res30
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- VFMADD D2, U2, VALPHAR, D2
- VFMADD D3, U3, VALPHAR, D3
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
-
- //res01 res11
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U4, VALPHAR, D2
- VFMADD D3, U5, VALPHAR, D3
- VNMSUB D2, U5, VALPHAI, D2
- VFMADD D3, U4, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res21 res31
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- VFMADD D2, U6, VALPHAR, D2
- VFMADD D3, U7, VALPHAR, D3
- VNMSUB D2, U7, VALPHAI, D2
- VFMADD D3, U6, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
-
- //res02 res12
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U8, VALPHAR, D2
- VFMADD D3, U9, VALPHAR, D3
- VNMSUB D2, U9, VALPHAI, D2
- VFMADD D3, U8, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res22 res32
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- VFMADD D2, U10, VALPHAR, D2
- VFMADD D3, U11, VALPHAR, D3
- VNMSUB D2, U11, VALPHAI, D2
- VFMADD D3, U10, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
-
- //res03 res13
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U12, VALPHAR, D2
- VFMADD D3, U13, VALPHAR, D3
- VNMSUB D2, U13, VALPHAI, D2
- VFMADD D3, U12, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res23 res33
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- VFMADD D2, U14, VALPHAR, D2
- VFMADD D3, U15, VALPHAR, D3
- VNMSUB D2, U15, VALPHAI, D2
- VFMADD D3, U14, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
- #endif
-
- #if defined(TRMMKERNEL)
-
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- sub.d TL, K, OFF
- #ifdef LEFT
- addi.d TL, TL, -4
- #else
- addi.d TL, TL, -4
- #endif
- slli.d T3, TL, 0x06
- add.d A0, A0, T3
- slli.d T3, TL, 0x06
- add.d B0, B0, T3
- #endif
-
- #ifdef LEFT
- addi.d OFF, OFF, 4
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- addi.d I, I, 1
- blt I, T0, .L11
-
- .L18: /* if (bm & 2) */
- move I, $r0
- andi T0, M, 2
- beq I, T0, .L183
-
- move B0, B //ptrbb
- move TL, K /* TL = bk */
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) ||(!defined(LEFT) && !defined(TRANSA))
- move B0, B //ptrbb
- #else
- slli.d T3, OFF, 0x05
- add.d A0, A0, T3
- slli.d T3, OFF, 0x06
- add.d B0, B, T3
- #endif
-
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- sub.d TL, K, OFF
- #elif defined(LEFT)
- addi.d TL, OFF, 2
- #else
- addi.d TL, OFF, 4
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- vxor.v U0, U0, U0
- vxor.v U1, U1, U1
- vxor.v U2, U2, U2
- vxor.v U3, U3, U3
- vxor.v U4, U4, U4
- vxor.v U5, U5, U5
- vxor.v U6, U6, U6
- vxor.v U7, U7, U7
-
- move L, $r0 //cycle param k
- beq L, TL, .L182
- blt TL, L, .L182
-
- .L181: /* for (k=0; k<temp; k++) */
- vld D1, B0, 0x00 // b0ri
- vld D2, B0, 0x10 // b1ri
- vld D3, B0, 0x20 // b2ri
- vld D4, B0, 0x30 // b3ri
- vld D0, A0, 0x00 // a0ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a0rr
- vshuf4i.d D6, D0, 0x55 //a0ii
-
- vpackev.d D7, D2, D1 //b0r b1r
- vpackod.d D8, D2, D1 //b0i b1i
-
- vpackev.d D9, D4, D3 //b2r b3r
- vpackod.d D10, D4, D3 //b2i b3i
-
- VMADD1 U0, D5, D7, U0 //00r 10r
- VMADD2 U1, D6, D7, U1 //00i 10i
- VMADD3 U0, D6, D8, U0
- VMADD4 U1, D5, D8, U1
-
- VMADD1 U2, D5, D9, U2 //20r 30r
- VMADD2 U3, D6, D9, U3 //20i 30i
- VMADD3 U2, D6, D10, U2
- VMADD4 U3, D5, D10, U3
-
- vld D0, A0, 0x10 // a1ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a1rr
- vshuf4i.d D6, D0, 0x55 //a1ii
-
- VMADD1 U4, D5, D7, U4 //01r 11r
- VMADD2 U5, D6, D7, U5 //01i 11i
- VMADD3 U4, D6, D8, U4
- VMADD4 U5, D5, D8, U5
-
- VMADD1 U6, D5, D9, U6 //21r 31r
- VMADD2 U7, D6, D9, U7 //21i 31i
- VMADD3 U6, D6, D10, U6
- VMADD4 U7, D5, D10, U7
-
- addi.d A0, A0, 0x20
- addi.d B0, B0, 0x40
-
- addi.d L, L, 1
- blt L, TL, .L181
-
- .L182:
- #if defined(TRMMKERNEL)
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U0, VALPHAR
- vfmul.d D3, U1, VALPHAR
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res20 res30
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- vfmul.d D2, U2, VALPHAR
- vfmul.d D3, U3, VALPHAR
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
-
- //res01 res11
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U4, VALPHAR
- vfmul.d D3, U5, VALPHAR
- VNMSUB D2, U5, VALPHAI, D2
- VFMADD D3, U4, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res21 res31
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- vfmul.d D2, U6, VALPHAR
- vfmul.d D3, U7, VALPHAR
- VNMSUB D2, U7, VALPHAI, D2
- VFMADD D3, U6, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
- #else
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U0, VALPHAR, D2
- VFMADD D3, U1, VALPHAR, D3
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res20 res30
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- VFMADD D2, U2, VALPHAR, D2
- VFMADD D3, U3, VALPHAR, D3
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
-
- //res01 res11
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U4, VALPHAR, D2
- VFMADD D3, U5, VALPHAR, D3
- VNMSUB D2, U5, VALPHAI, D2
- VFMADD D3, U4, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res21 res31
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- VFMADD D2, U6, VALPHAR, D2
- VFMADD D3, U7, VALPHAR, D3
- VNMSUB D2, U7, VALPHAI, D2
- VFMADD D3, U6, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
- #endif
-
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- sub.d TL, K, OFF
- #ifdef LEFT
- addi.d TL, TL, -2
- #else
- addi.d TL, TL, -4
- #endif
- slli.d T3, TL, 0x05
- add.d A0, A0, T3
- slli.d T3, TL, 0x06
- add.d B0, B0, T3
- #endif
-
- #ifdef LEFT
- addi.d OFF, OFF, 2
- #endif
- #endif // #if defined(TRMMKERNEL)
-
- .L183: /* if (bm & 1) */
- move I, $r0
- andi T0, M, 1
- beq I, T0, .L186
-
- move B0, B //ptrbb
- move TL, K /* TL = bk */
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) ||(!defined(LEFT) && !defined(TRANSA))
- move B0, B //ptrbb
- #else
- slli.d T3, OFF, 0x04
- add.d A0, A0, T3
- slli.d T3, OFF, 0x06
- add.d B0, B, T3
- #endif
-
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- sub.d TL, K, OFF
- #elif defined(LEFT)
- addi.d TL, OFF, 1
- #else
- addi.d TL, OFF, 4
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- vxor.v U0, U0, U0
- vxor.v U1, U1, U1
- vxor.v U2, U2, U2
- vxor.v U3, U3, U3
-
- move L, $r0 //cycle param k
- beq L, TL, .L185
- blt TL, L, .L185
-
- .L184: /* for (k=0; k<temp; k++) */
- vld D1, B0, 0x00 // b0ri
- vld D2, B0, 0x10 // b1ri
- vld D3, B0, 0x20 // b2ri
- vld D4, B0, 0x30 // b3ri
- vld D0, A0, 0x00 // a0ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a0rr
- vshuf4i.d D6, D0, 0x55 //a0ii
-
- vpackev.d D7, D2, D1 //b0r b1r
- vpackod.d D8, D2, D1 //b0i b1i
-
- vpackev.d D9, D4, D3 //b2r b3r
- vpackod.d D10, D4, D3 //b2i b3i
-
- VMADD1 U0, D5, D7, U0 //00r 10r
- VMADD2 U1, D6, D7, U1 //00i 10i
- VMADD3 U0, D6, D8, U0
- VMADD4 U1, D5, D8, U1
-
- VMADD1 U2, D5, D9, U2 //20r 30r
- VMADD2 U3, D6, D9, U3 //20i 30i
- VMADD3 U2, D6, D10, U2
- VMADD4 U3, D5, D10, U3
-
- addi.d A0, A0, 0x10
- addi.d B0, B0, 0x40
-
- addi.d L, L, 1
- blt L, TL, .L184
-
- .L185:
- #if defined(TRMMKERNEL)
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U0, VALPHAR
- vfmul.d D3, U1, VALPHAR
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res20 res30
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- vfmul.d D2, U2, VALPHAR
- vfmul.d D3, U3, VALPHAR
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
- #else
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U0, VALPHAR, D2
- VFMADD D3, U1, VALPHAR, D3
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res20 res30
- vld D0, C2, 0x00 //c2: 0 1
- vld D1, C3, 0x00 //c3: 0 1
-
- vpackev.d D2, D1, D0 //c2[0] c3[0]
- vpackod.d D3, D1, D0 //c2[1] c3[1]
-
- VFMADD D2, U2, VALPHAR, D2
- VFMADD D3, U3, VALPHAR, D3
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c2[0] c2[1]
- vpackod.d D5, D3, D2 //c3[0] c3[1]
-
- vst D4, C2, 0x00
- vst D5, C3, 0x00
-
- addi.d C2, C2, 0x10
- addi.d C3, C3, 0x10
- #endif
-
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- sub.d TL, K, OFF
- #ifdef LEFT
- addi.d TL, TL, -1
- #else
- addi.d TL, TL, -4
- #endif
- slli.d T3, TL, 0x04
- add.d A0, A0, T3
- slli.d C3, TL, 0x06
- add.d B0, B0, T3
- #endif
-
- #ifdef LEFT
- addi.d OFF, OFF, 1
- #endif
- #endif // #if defined(TRMMKERNEL)
-
-
- .L186:
- #if defined(TRMMKERNEL) && !defined(LEFT)
- addi.d OFF, OFF, 4
- #endif
-
- slli.d L, K, 0x06
- add.d B, B, L
-
- slli.d I, LDC, 0x03
- add.d C, C, I
-
- addi.d J, J, 1
- srai.d T0, N, 2
- blt J, T0, .L10
-
- .L19:
- move J, $r0
- andi T0, N, 2
- beq J, T0, .L30
-
- .L20: /* for (j=0; j<(bn&2); j+=2) */
- #if defined(TRMMKERNEL) && defined(LEFT)
- move OFF, OFFSET
- #endif
-
- move C0, C
- slli.d TL, LDC, 1
- add.d C1, C0, TL
- move A0, A //ptrba
-
- move I, $r0
- srai.d T0, M, 2 //bm/4
- beq I, T0, .L280
-
- .L21: /* for (i=0; i<bm/4; i+=1) */
- move B0, B //ptrbb
- move TL, K /* TL = bk */
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) ||(!defined(LEFT) && !defined(TRANSA))
- move B0, B //ptrbb
- #else
- slli.d T3, OFF, 0x06
- add.d A0, A0, T3
- slli.d T3, OFF, 0x05
- add.d B0, B, T3
- #endif
-
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- sub.d TL, K, OFF
- #elif defined(LEFT)
- addi.d TL, OFF, 4
- #else
- addi.d TL, OFF, 2
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- vxor.v U0, U0, U0
- vxor.v U1, U1, U1
- vxor.v U2, U2, U2
- vxor.v U3, U3, U3
- vxor.v U4, U4, U4
- vxor.v U5, U5, U5
- vxor.v U6, U6, U6
- vxor.v U7, U7, U7
-
- move L, $r0 //cycle param k
- beq L, TL, .L23
- blt TL, L, .L23
-
- .L22: /* for (k=0; k<temp; k++) */
- vld D1, B0, 0x00 // b0ri
- vld D2, B0, 0x10 // b1ri
- vld D0, A0, 0x00 // a0ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a0rr
- vshuf4i.d D6, D0, 0x55 //a0ii
-
- vpackev.d D7, D2, D1 //b0r b1r
- vpackod.d D8, D2, D1 //b0i b1i
-
- VMADD1 U0, D5, D7, U0 //00r 10r
- VMADD2 U1, D6, D7, U1 //00i 10i
- VMADD3 U0, D6, D8, U0
- VMADD4 U1, D5, D8, U1
-
- vld D0, A0, 0x10 // a1ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a1rr
- vshuf4i.d D6, D0, 0x55 //a1ii
-
- VMADD1 U2, D5, D7, U2 //01r 11r
- VMADD2 U3, D6, D7, U3 //01i 11i
- VMADD3 U2, D6, D8, U2
- VMADD4 U3, D5, D8, U3
-
- vld D0, A0, 0x20 // a2ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a2rr
- vshuf4i.d D6, D0, 0x55 //a2ii
-
- VMADD1 U4, D5, D7, U4 //02r 12r
- VMADD2 U5, D6, D7, U5 //02i 12i
- VMADD3 U4, D6, D8, U4
- VMADD4 U5, D5, D8, U5
-
- vld D0, A0, 0x30 // a3ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a3rr
- vshuf4i.d D6, D0, 0x55 //a3ii
-
- VMADD1 U6, D5, D7, U6 //03r 13r
- VMADD2 U7, D6, D7, U7 //03i 13i
- VMADD3 U6, D6, D8, U6
- VMADD4 U7, D5, D8, U7
-
- addi.d A0, A0, 0x40
- addi.d B0, B0, 0x20
-
- addi.d L, L, 1
- blt L, TL, .L22
-
- .L23:
- #if defined(TRMMKERNEL)
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U0, VALPHAR
- vfmul.d D3, U1, VALPHAR
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res01 res11
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U2, VALPHAR
- vfmul.d D3, U3, VALPHAR
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res02 res12
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U4, VALPHAR
- vfmul.d D3, U5, VALPHAR
- VNMSUB D2, U5, VALPHAI, D2
- VFMADD D3, U4, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res03 res13
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U6, VALPHAR
- vfmul.d D3, U7, VALPHAR
- VNMSUB D2, U7, VALPHAI, D2
- VFMADD D3, U6, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
- #else
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U0, VALPHAR, D2
- VFMADD D3, U1, VALPHAR, D3
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res01 res11
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U2, VALPHAR, D2
- VFMADD D3, U3, VALPHAR, D3
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res02 res12
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U4, VALPHAR, D2
- VFMADD D3, U5, VALPHAR, D3
- VNMSUB D2, U5, VALPHAI, D2
- VFMADD D3, U4, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res03 res13
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U6, VALPHAR, D2
- VFMADD D3, U7, VALPHAR, D3
- VNMSUB D2, U7, VALPHAI, D2
- VFMADD D3, U6, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
- #endif
-
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- sub.d TL, K, OFF
- #ifdef LEFT
- addi.d TL, TL, -4
- #else
- addi.d TL, TL, -2
- #endif
- slli.d T3, TL, 0x06
- add.d A0, A0, T3
- slli.d T3, TL, 0x05
- add.d B0, B0, T3
- #endif
-
- #ifdef LEFT
- addi.d OFF, OFF, 4
- #endif
- #endif // #if defined(TRMMKERNEL)
-
- addi.d I, I, 1
- blt I, T0, .L21
-
- .L280: /* if ( bm & 2 )*/
- move I, $r0
- andi T1, M, 2 //bm&2
- beq I, T1, .L284
-
- .L281:
- move B0, B //ptrbb
- move TL, K /* TL = bk */
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) ||(!defined(LEFT) && !defined(TRANSA))
- move B0, B //ptrbb
- #else
- slli.d T3, OFF, 0x05
- add.d A0, A0, T3
- add.d B0, B, T3
- #endif
-
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- sub.d TL, K, OFF
- #elif defined(LEFT)
- addi.d TL, OFF, 2
- #else
- addi.d TL, OFF, 2
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- vxor.v U0, U0, U0
- vxor.v U1, U1, U1
- vxor.v U2, U2, U2
- vxor.v U3, U3, U3
-
- move L, $r0 //cycle param k
- beq L, TL, .L283
- blt TL, L, .L283
-
- .L282: /* for (k=0; k<temp; k++) */
- vld D1, B0, 0x00 // b0ri
- vld D2, B0, 0x10 // b1ri
- vld D0, A0, 0x00 // a0ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a0rr
- vshuf4i.d D6, D0, 0x55 //a0ii
-
- vpackev.d D7, D2, D1 //b0r b1r
- vpackod.d D8, D2, D1 //b0i b1i
-
- VMADD1 U0, D5, D7, U0 //00r 10r
- VMADD2 U1, D6, D7, U1 //00i 10i
- VMADD3 U0, D6, D8, U0
- VMADD4 U1, D5, D8, U1
-
- vld D0, A0, 0x10 // a1ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a1rr
- vshuf4i.d D6, D0, 0x55 //a1ii
-
- VMADD1 U2, D5, D7, U2 //01r 11r
- VMADD2 U3, D6, D7, U3 //01i 11i
- VMADD3 U2, D6, D8, U2
- VMADD4 U3, D5, D8, U3
-
- addi.d A0, A0, 0x20
- addi.d B0, B0, 0x20
-
- addi.d L, L, 1
- blt L, TL, .L282
-
- .L283:
- #if defined(TRMMKERNEL)
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U0, VALPHAR
- vfmul.d D3, U1, VALPHAR
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res01 res11
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U2, VALPHAR
- vfmul.d D3, U3, VALPHAR
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
- #else
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U0, VALPHAR, D2
- VFMADD D3, U1, VALPHAR, D3
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
-
- //res01 res11
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U2, VALPHAR, D2
- VFMADD D3, U3, VALPHAR, D3
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
- #endif
-
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- sub.d TL, K, OFF
- #ifdef LEFT
- addi.d TL, TL, -2
- #else
- addi.d TL, TL, -2
- #endif
- slli.d T3, TL, 0x05
- add.d A0, A0, T3
- slli.d T3, TL, 0x05
- add.d B0, B0, T3
- #endif
-
- #ifdef LEFT
- addi.d OFF, OFF, 2
- #endif
- #endif // #if defined(TRMMKERNEL)
-
- .L284: /* if ( bm & 1 )*/
- move I, $r0
- andi T1, M, 1 //bm&1
- beq I, T1, .L288
-
- .L285:
- move B0, B //ptrbb
- move TL, K /* TL = bk */
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) ||(!defined(LEFT) && !defined(TRANSA))
- move B0, B //ptrbb
- #else
- slli.d T3, OFF, 0x04
- add.d A0, A0, T3
- slli.d T3, OFF, 0x05
- add.d B0, B, T3
- #endif
-
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- sub.d TL, K, OFF
- #elif defined(LEFT)
- addi.d TL, OFF, 1
- #else
- addi.d TL, OFF, 2
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- vxor.v U0, U0, U0
- vxor.v U1, U1, U1
-
- move L, $r0 //cycle param k
- beq L, TL, .L287
- blt TL, L, .L287
-
- .L286: /* for (k=0; k<temp; k++) */
- vld D1, B0, 0x00 // b0ri
- vld D2, B0, 0x10 // b1ri
- vld D0, A0, 0x00 // a0ri
-
- vand.v D5, D0, D0
- vand.v D6, D0, D0
- vshuf4i.d D5, D0, 0x00 //a0rr
- vshuf4i.d D6, D0, 0x55 //a0ii
-
- vpackev.d D7, D2, D1 //b0r b1r
- vpackod.d D8, D2, D1 //b0i b1i
-
- VMADD1 U0, D5, D7, U0 //00r 10r
- VMADD2 U1, D6, D7, U1 //00i 10i
- VMADD3 U0, D6, D8, U0
- VMADD4 U1, D5, D8, U1
-
- addi.d A0, A0, 0x10
- addi.d B0, B0, 0x20
-
- addi.d L, L, 1
- blt L, TL, .L286
-
- .L287:
- #if defined(TRMMKERNEL)
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- vfmul.d D2, U0, VALPHAR
- vfmul.d D3, U1, VALPHAR
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
- #else
- //res00 res10
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C1, 0x00 //c1: 0 1
-
- vpackev.d D2, D1, D0 //c0[0] c1[0]
- vpackod.d D3, D1, D0 //c0[1] c1[1]
-
- VFMADD D2, U0, VALPHAR, D2
- VFMADD D3, U1, VALPHAR, D3
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0[0] c0[1]
- vpackod.d D5, D3, D2 //c1[0] c1[1]
-
- vst D4, C0, 0x00
- vst D5, C1, 0x00
-
- addi.d C0, C0, 0x10
- addi.d C1, C1, 0x10
- #endif
-
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- sub.d TL, K, OFF
- #ifdef LEFT
- addi.d TL, TL, -1
- #else
- addi.d TL, TL, -2
- #endif
- slli.d T3, TL, 0x04
- add.d A0, A0, T3
- slli.d T3, TL, 0x05
- add.d B0, B0, T3
- #endif
-
- #ifdef LEFT
- addi.d OFF, OFF, 1
- #endif
- #endif // #if defined(TRMMKERNEL)
-
- .L288:
- #if defined(TRMMKERNEL) && !defined(LEFT)
- addi.d OFF, OFF, 2
- #endif
- slli.d L, K, 5
- add.d B, B, L
-
- slli.d I, LDC, 2
- add.d C, C, I
-
- addi.d J, J, 2
- andi T0, N, 2
- blt J, T0, .L20
-
- .L30:
- move J, $r0
- andi T0, N, 1
- beq J, T0, .L999
-
- .L300: /* for (j=0; j<(bn&1); j+=1) */
- #if defined(TRMMKERNEL) && defined(LEFT)
- move OFF, OFFSET
- #endif
-
- move C0, C
- move A0, A //ptrba
-
- move I, $r0
- srai.d T0, M, 2 //bm/4
- beq I, T0, .L38
-
- .L31: /* for (i=0; i<bm/4; i+=1) */
- move B0, B //ptrbb
- move TL, K /* TL = bk */
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) ||(!defined(LEFT) && !defined(TRANSA))
- move B0, B //ptrbb
- #else
- slli.d T3, OFF, 0x06
- add.d A0, A0, T3
- slli.d T3, OFF, 0x04
- add.d B0, B, T3
- #endif
-
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- sub.d TL, K, OFF
- #elif defined(LEFT)
- addi.d TL, OFF, 4
- #else
- addi.d TL, OFF, 1
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- vxor.v U0, U0, U0
- vxor.v U1, U1, U1
- vxor.v U2, U2, U2
- vxor.v U3, U3, U3
-
- move L, $r0 //cycle param k
- beq L, TL, .L33
- blt TL, L, .L33
-
- .L32: /* for (k=0; k<temp; k++) */
- vld D1, B0, 0x00 // b0ri
-
- vld D0, A0, 0x00 // a0ri
- vld D2, A0, 0x10 // a1ri
-
- vpackev.d D5, D2, D0 //a0r a1r
- vpackod.d D6, D2, D0 //a0i a1i
-
- vand.v D7, D1, D1
- vand.v D8, D1, D1
- vshuf4i.d D7, D1, 0x00 //b0rr
- vshuf4i.d D8, D1, 0x55 //b0ii
-
- VMADD1 U0, D5, D7, U0 //00r 01r
- VMADD2 U1, D6, D7, U1 //00i 01i
- VMADD3 U0, D6, D8, U0
- VMADD4 U1, D5, D8, U1
-
- vld D0, A0, 0x20 // a0ri
- vld D2, A0, 0x30 // a1ri
-
- vpackev.d D5, D2, D0 //a0r a1r
- vpackod.d D6, D2, D0 //a0i a1i
-
- VMADD1 U2, D5, D7, U2 //02r 03r
- VMADD2 U3, D6, D7, U3 //02i 03i
- VMADD3 U2, D6, D8, U2
- VMADD4 U3, D5, D8, U3
-
- addi.d A0, A0, 0x40
- addi.d B0, B0, 0x10
-
- addi.d L, L, 1
- blt L, TL, .L32
-
- .L33:
- #if defined(TRMMKERNEL)
- //res00 res01
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C0, 0x10 //c0: 2 3
-
- vpackev.d D2, D1, D0 //c0: 0 2
- vpackod.d D3, D1, D0 //c0: 1 3
-
- vfmul.d D2, U0, VALPHAR
- vfmul.d D3, U1, VALPHAR
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0: 0 1
- vpackod.d D5, D3, D2 //c0: 2 3
-
- vst D4, C0, 0x00
- vst D5, C0, 0x10
-
- addi.d C0, C0, 0x20
-
- //res02 res03
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C0, 0x10 //c0: 2 3
-
- vpackev.d D2, D1, D0 //c0: 0 2
- vpackod.d D3, D1, D0 //c0: 1 3
-
- vfmul.d D2, U2, VALPHAR
- vfmul.d D3, U3, VALPHAR
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0: 0 1
- vpackod.d D5, D3, D2 //c0: 2 3
-
- vst D4, C0, 0x00
- vst D5, C0, 0x10
-
- addi.d C0, C0, 0x20
- #else
- //res00 res01
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C0, 0x10 //c0: 2 3
-
- vpackev.d D2, D1, D0 //c0: 0 2
- vpackod.d D3, D1, D0 //c0: 1 3
-
- VFMADD D2, U0, VALPHAR, D2
- VFMADD D3, U1, VALPHAR, D3
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0: 0 1
- vpackod.d D5, D3, D2 //c0: 2 3
-
- vst D4, C0, 0x00
- vst D5, C0, 0x10
-
- addi.d C0, C0, 0x20
-
- //res02 res03
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C0, 0x10 //c0: 2 3
-
- vpackev.d D2, D1, D0 //c0: 0 2
- vpackod.d D3, D1, D0 //c0: 1 3
-
- VFMADD D2, U2, VALPHAR, D2
- VFMADD D3, U3, VALPHAR, D3
- VNMSUB D2, U3, VALPHAI, D2
- VFMADD D3, U2, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0: 0 1
- vpackod.d D5, D3, D2 //c0: 2 3
-
- vst D4, C0, 0x00
- vst D5, C0, 0x10
-
- addi.d C0, C0, 0x20
- #endif
-
- #if defined(TRMMKERNEL)
-
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- sub.d TL, K, OFF
- #ifdef LEFT
- addi.d TL, TL, -4
- #else
- addi.d TL, TL, -1
- #endif
- slli.d T3, TL, 0x06
- add.d A0, A0, T3
- slli.d T3, TL, 0x04
- add.d B0, B0, T3
- #endif
-
- #ifdef LEFT
- addi.d OFF, OFF, 4
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- addi.d I, I, 1
- blt I, T0, .L31
-
- .L38: /* if ( bm & 2 ) */
- move I, $r0
- andi T1, M, 2 //bm&2
- beq I, T1, .L312
-
- .L39:
- move B0, B //ptrbb
- move TL, K /* TL = bk */
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) ||(!defined(LEFT) && !defined(TRANSA))
- move B0, B //ptrbb
- #else
- slli.d T3, OFF, 0x05
- add.d A0, A0, T3
- slli.d T3, OFF, 0x04
- add.d B0, B, T3
- #endif
-
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- sub.d TL, K, OFF
- #elif defined(LEFT)
- addi.d TL, OFF, 2
- #else
- addi.d TL, OFF, 1
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- vxor.v U0, U0, U0
- vxor.v U1, U1, U1
-
- move L, $r0 //cycle param k
- beq L, TL, .L311
- blt TL, L, .L311
-
- .L310: /* for (k=0; k<temp; k++) */
- vld D1, B0, 0x00 // b0ri
-
- vld D0, A0, 0x00 // a0ri
- vld D2, A0, 0x10 // a1ri
-
- vpackev.d D5, D2, D0 //a0r a1r
- vpackod.d D6, D2, D0 //a0i a1i
-
- vand.v D7, D1, D1
- vand.v D8, D1, D1
- vshuf4i.d D7, D1, 0x00 //b0rr
- vshuf4i.d D8, D1, 0x55 //b0ii
-
- VMADD1 U0, D5, D7, U0 //00r 01r
- VMADD2 U1, D6, D7, U1 //00i 01i
- VMADD3 U0, D6, D8, U0
- VMADD4 U1, D5, D8, U1
-
- addi.d A0, A0, 0x20
- addi.d B0, B0, 0x10
-
- addi.d L, L, 1
- blt L, TL, .L310
-
- .L311:
- #if defined(TRMMKERNEL)
- //res00 res01
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C0, 0x10 //c0: 2 3
-
- vpackev.d D2, D1, D0 //c0: 0 2
- vpackod.d D3, D1, D0 //c0: 1 3
-
- vfmul.d D2, U0, VALPHAR
- vfmul.d D3, U1, VALPHAR
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0: 0 1
- vpackod.d D5, D3, D2 //c0: 2 3
-
- vst D4, C0, 0x00
- vst D5, C0, 0x10
-
- addi.d C0, C0, 0x20
- #else
- //res00 res01
- vld D0, C0, 0x00 //c0: 0 1
- vld D1, C0, 0x10 //c0: 2 3
-
- vpackev.d D2, D1, D0 //c0: 0 2
- vpackod.d D3, D1, D0 //c0: 1 3
-
- VFMADD D2, U0, VALPHAR, D2
- VFMADD D3, U1, VALPHAR, D3
- VNMSUB D2, U1, VALPHAI, D2
- VFMADD D3, U0, VALPHAI, D3
-
- vpackev.d D4, D3, D2 //c0: 0 1
- vpackod.d D5, D3, D2 //c0: 2 3
-
- vst D4, C0, 0x00
- vst D5, C0, 0x10
-
- addi.d C0, C0, 0x20
- #endif
-
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- sub.d TL, K, OFF
- #ifdef LEFT
- addi.d TL, TL, -2
- #else
- addi.d TL, TL, -1
- #endif
- slli.d T3, TL, 0x05
- add.d A0, A0, T3
- slli.d T3, TL, 0x04
- add.d B0, B0, T3
- #endif
-
- #ifdef LEFT
- addi.d OFF, OFF, 2
- #endif
- #endif // #if defined(TRMMKERNEL)
-
- .L312: /* if ( bm & 1 )*/
- move I, $r0
- andi T1, M, 1 //bm&1
- beq I, T1, .L316
-
- .L313:
- move B0, B //ptrbb
- move TL, K /* TL = bk */
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) ||(!defined(LEFT) && !defined(TRANSA))
- move B0, B //ptrbb
- #else
- slli.d T3, OFF, 0x04
- add.d A0, A0, T3
- slli.d T3, OFF, 0x04
- add.d B0, B, T3
- #endif
-
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- sub.d TL, K, OFF
- #elif defined(LEFT)
- addi.d TL, OFF, 1
- #else
- addi.d TL, OFF, 1
- #endif
-
- #endif // #if defined(TRMMKERNEL)
-
- MTC c11, $r0
- MTC c12, $r0
-
- move L, $r0 //cycle param k
- beq L, TL, .L315
- blt TL, L, .L315
-
- .L314: /* for (k=0; k<temp; k++) */
- LD a1, A0, 0x00
- LD a2, A0, 0x08
-
- LD b1, B0, 0x00
- LD b2, B0, 0x08
-
- MADD1 c11, a1, b1, c11
- MADD2 c12, a2, b1, c12
- MADD3 c11, a2, b2, c11
- MADD4 c12, a1, b2, c12
-
- addi.d A0, A0, 0x10
- addi.d B0, B0, 0x10
-
- addi.d L, L, 1
- blt L, TL, .L314
-
- .L315:
- #if defined(TRMMKERNEL)
- MUL a5, c11, ALPHA_R
- MUL a6, c12, ALPHA_I
- SUB a5, a5, a6
- ST a5, C0, 0x00
-
- MUL a5, c12, ALPHA_R
- MUL a6, c11, ALPHA_I
- ADD a6, a5, a6
- ST a6, C0, 0x08
- #else
- LD a5, C0, 0x00 //C0[0]
- LD a6, C0, 0x08 //C0[1]
-
- MADD a5, c11, ALPHA_R, a5
- MADD a6, c12, ALPHA_R, a6
- NMSUB a5, c12, ALPHA_I, a5
- MADD a6, c11, ALPHA_I, a6
-
- ST a5, C0, 0x00
- ST a6, C0, 0x08
-
- addi.d C0, C0, 0x10
- #endif
-
- #if defined(TRMMKERNEL)
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- sub.d TL, K, OFF
- #ifdef LEFT
- addi.d TL, TL, -1
- #else
- addi.d TL, TL, -1
- #endif
- slli.d T3, TL, 0x04
- add.d A0, A0, T3
- add.d B0, B0, T3
- #endif
-
- #ifdef LEFT
- addi.d OFF, OFF, 1
- #endif
- #endif // #if defined(TRMMKERNEL)
-
- .L316:
- slli.d L, K, 4
- add.d B, B, L
-
- slli.d I, LDC, 1
- add.d C, C, I
-
- addi.d J, J, 1
- andi T0, N, 1
- blt J, T0, .L300
-
- .L999:
- LDARG $r23, $sp, 0
- LDARG $r24, $sp, 8
- LDARG $r25, $sp, 16
- LDARG $r26, $sp, 24
- LDARG $r27, $sp, 32
- LD $f23, $sp, 40
- LD $f24, $sp, 48
- LD $f25, $sp, 56
- LD $f26, $sp, 64
- LD $f27, $sp, 72
- LD $f28, $sp, 80
- LD $f29, $sp, 88
- LD $f30, $sp, 96
- LD $f31, $sp, 104
-
- addi.d $sp, $sp, 128
- jirl $r0, $r1, 0x0
-
- EPILOGUE
|