|
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335 |
- /****************************************Implementation**Details**********************************************/
- /* */
- /* Lets denote (a,a1i) complex which is mathematically a+a1*i */
- /* Complex number multiplication: (a,a1i)*(b,b1i) */
- /* As i*i=-1 .The multiplication result will be: */
- /* (a+a1*i)(b+b1*i)=a*b+a1*i*b1*i+ a1*i*b+a*b1*i=a*b-a1*b1 + (a1*b+a*b1)*i which is (ab-a1b1,a1b+ab1) */
- /* so let c= ab-a1b1 , ci=a1b+ab1 then */
- /* c=c+a*b-a1*b1 => c=a*b-( a1*b1-c) => c= a1*b1-c then c=a*b-c two mseb */
- /* ci=ci+a1*b+a*b1 => ci= a1*b+ci then ci= a*b1+ci */
- /* For simd real and imaginary parts will be grouped together */
- /* such (realA,realK) and (imageA ,imageK) */
- /* Simd(0,1)=(a*b,k*b)-((ai*bi,ki*bi)-Simd(0,1)) */
- /* SimdI(0,1)=SimdI(0,1)+(a*bi,k*bi)+(ai*b,ki*b) */
- /* */
- /* */
- /* for defined(NR) || defined(NC) || defined(TR) || defined(TC) */
- /* (a+a1*I)(b-b1*I)=ab+a1*b1+I(a1b-ab1) */
- /* */
- /* c=c+ab+a1b1 => c=a1b1+c;c=ab+c */
- /* ci=ci+a1b-ab1 => ci=a1*b-(ab1-ci) => ci=ab1-ci; ci=a1*b-ci */
- /* */
- /* */
- /* for defined(RN) || defined(RT) || defined(CN) || defined(CT) */
- /* (a-a1*I)(b+b1*I)=ab+a1*b1+I(-a1b+ab1) */
- /* */
- /* c=c+ab+a1b1 => c=a1b1+c;c=ab+c */
- /* ci=ci+a1b-ab1 => ci=a*b1-(a1b-ci) => ci=a1b-ci; ci=a*b1-ci */
- /* */
- /* */
- /* for defined(RR) || defined(RC) || defined(CR) || defined(CC) */
- /* (a-a1*I)(b-b1*I)=ab-a1*b1+I(-a1b-ab1) */
- /* */
- /* c= a1*b1-c then c=a*b-c */
- /* ci = ci-a1*b -a*b1; */
- /* as ibm z13 only has x*z-m x*z+m instructions implementation will be changed a bit */
- /* Assuming ci=0; and cix=cix+a1b+ab1 ; ci=ci-cix will work */
- /* cix= a*b1+cix ; cix= a1*b+cix (two madb) ci=ci-cix (sign change if ci=0) */
- /* As c=0 then */
- /* c=a*b-c then c=a1*b1-c => c=(a1*b1-(a*b-c)) which is -1*( a*b -(a1*b1-c)) */
- /* */
- /* Values will be equal to (-c) and (-ci) */
- /* To change sign it'll be multiplied by -1*(alpha+alpha_i) */
- /* This is done once: */
- /* lcdbr ALPHA_I,ALPHA_I */
- /* lcdbr ALPHA ,ALPHA */
- /*************************************************************************************************************/
-
- /*************************Zero vectors***************************************/
- /*zero vectors for 4x4 */
- .macro ZERO_ZCVEC_4x4
- vzero %v16
- vzero %v17
- vzero %v18
- vzero %v19
- vzero %v20
- vzero %v21
- vzero %v22
- vzero %v23
- vzero %v24
- vzero %v25
- vzero %v26
- vzero %v27
- vzero %v28
- vzero %v29
- vzero %v30
- vzero %v31
- .endm
-
- /*zero vectors for */
- .macro ZERO_ZCVEC_2x4
- vzero %v16
- vzero %v17
- vzero %v18
- vzero %v19
- vzero %v20
- vzero %v21
- vzero %v22
- vzero %v23
- .endm
-
- /*zero vectors for */
- .macro ZERO_ZCVEC_1x4
- vzero %v16
- vzero %v17
- vzero %v18
- vzero %v19
- .endm
-
- /*zero vectors for */
- .macro ZERO_ZCVEC_4x2
- ZERO_ZCVEC_2x4
- .endm
-
- .macro ZERO_ZCVEC_4x1
- ZERO_ZCVEC_1x4
- .endm
-
- /*zero vectors for */
- .macro ZERO_ZCVEC_2x2
- vzero %v16
- vzero %v17
- vzero %v20
- vzero %v21
- .endm
-
- /*zero vectors for */
- .macro ZERO_ZCVEC_1x2
- vzero %v16
- vzero %v17
- .endm
-
- /*zero vectors for */
- .macro ZERO_ZCVEC_2x1
- vzero %v16
- vzero %v17
- .endm
-
- /*zero vectors for 1x1*/
- .macro ZERO_ZCVEC_1x1
- lzdr %f6
- lzdr %f7
- .endm
-
-
- /*
- Calculate for 4x2 inner
- */
- .macro CalcComplex_4x2 vResR1, vResI1, vResR2, vResI2, vResR3, vResI3, vResR4, vResI4, vr1, vi1, vr2, vi2, vrB, viB,vrB2, viB2
-
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
- vfmsdb \vResR1, \vi1, \viB, \vResR1
- vfmadb \vResI1, \vr1, \viB, \vResI1
- vfmsdb \vResR2, \vi2, \viB, \vResR2
- vfmadb \vResI2, \vr2, \viB, \vResI2
-
- vfmsdb \vResR3, \vi1, \viB2, \vResR3
- vfmadb \vResI3, \vr1, \viB2, \vResI3
- vfmsdb \vResR4, \vi2, \viB2, \vResR4
- vfmadb \vResI4, \vr2, \viB2, \vResI4
-
- vfmsdb \vResR1, \vr1, \vrB, \vResR1
- vfmadb \vResI1, \vi1, \vrB, \vResI1
- vfmsdb \vResR2, \vr2, \vrB, \vResR2
- vfmadb \vResI2, \vi2, \vrB, \vResI2
-
- vfmsdb \vResR3, \vr1, \vrB2, \vResR3
- vfmadb \vResI3, \vi1, \vrB2, \vResI3
- vfmsdb \vResR4, \vr2, \vrB2, \vResR4
- vfmadb \vResI4, \vi2, \vrB2, \vResI4
-
- #endif
-
- #if defined(NR) || defined(NC) || defined(TR) || defined(TC)
- vfmadb \vResR1, \vi1, \viB, \vResR1
- vfmsdb \vResI1, \vr1, \viB, \vResI1
- vfmadb \vResR2, \vi2, \viB, \vResR2
- vfmsdb \vResI2, \vr2, \viB, \vResI2
-
- vfmadb \vResR3, \vi1, \viB2, \vResR3
- vfmsdb \vResI3, \vr1, \viB2, \vResI3
- vfmadb \vResR4, \vi2, \viB2, \vResR4
- vfmsdb \vResI4, \vr2, \viB2, \vResI4
-
- vfmadb \vResR1, \vr1, \vrB, \vResR1
- vfmsdb \vResI1, \vi1, \vrB, \vResI1
- vfmadb \vResR2, \vr2, \vrB, \vResR2
- vfmsdb \vResI2, \vi2, \vrB, \vResI2
-
- vfmadb \vResR3, \vr1, \vrB2, \vResR3
- vfmsdb \vResI3, \vi1, \vrB2, \vResI3
- vfmadb \vResR4, \vr2, \vrB2, \vResR4
- vfmsdb \vResI4, \vi2, \vrB2, \vResI4
-
- #endif
-
- #if defined(RN) || defined(RT) || defined(CN) || defined(CT)
- vfmadb \vResR1, \vi1, \viB, \vResR1
- vfmsdb \vResI1, \vi1, \vrB, \vResI1
- vfmadb \vResR2, \vi2, \viB, \vResR2
- vfmsdb \vResI2, \vi2, \vrB, \vResI2
-
- vfmadb \vResR3, \vi1, \viB2, \vResR3
- vfmsdb \vResI3, \vi1, \vrB2, \vResI3
- vfmadb \vResR4, \vi2, \viB2, \vResR4
- vfmsdb \vResI4, \vi2, \vrB2, \vResI4
-
- vfmadb \vResR1, \vr1, \vrB, \vResR1
- vfmsdb \vResI1, \vr1, \viB, \vResI1
- vfmadb \vResR2, \vr2, \vrB, \vResR2
- vfmsdb \vResI2, \vr2, \viB, \vResI2
-
- vfmadb \vResR3, \vr1, \vrB2, \vResR3
- vfmsdb \vResI3, \vr1, \viB2, \vResI3
- vfmadb \vResR4, \vr2, \vrB2, \vResR4
- vfmsdb \vResI4, \vr2, \viB2, \vResI4
- #endif
- #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
-
- vfmsdb \vResR1, \vr1, \vrB, \vResR1
- vfmadb \vResI1, \vi1, \vrB, \vResI1
- vfmsdb \vResR2, \vr2, \vrB, \vResR2
- vfmadb \vResI2, \vi2, \vrB, \vResI2
-
- vfmsdb \vResR3, \vr1, \vrB2, \vResR3
- vfmadb \vResI3, \vi1, \vrB2, \vResI3
- vfmsdb \vResR4, \vr2, \vrB2, \vResR4
- vfmadb \vResI4, \vi2, \vrB2, \vResI4
-
- vfmsdb \vResR1, \vi1, \viB, \vResR1
- vfmadb \vResI1, \vr1, \viB, \vResI1
- vfmsdb \vResR2, \vi2, \viB, \vResR2
- vfmadb \vResI2, \vr2, \viB, \vResI2
-
- vfmsdb \vResR3, \vi1, \viB2, \vResR3
- vfmadb \vResI3, \vr1, \viB2, \vResI3
- vfmsdb \vResR4, \vi2, \viB2, \vResR4
- vfmadb \vResI4, \vr2, \viB2, \vResI4
-
-
- #endif
-
- .endm
-
- /*
- Calculate for 2x4 inner
- */
- .macro CalcComplex_2x4 vResR1, vResI1, vResR2, vResI2, vResR3, vResI3, vResR4, vResI4, vr1, vi1, vr2, vi2, vrB, viB,vrB2, viB2
-
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
- vfmsdb \vResR1, \vi1, \viB, \vResR1
- vfmadb \vResI1, \vr1, \viB, \vResI1
- vfmsdb \vResR2, \vi2, \viB, \vResR2
- vfmadb \vResI2, \vr2, \viB, \vResI2
-
- vfmsdb \vResR3, \vi1, \viB2, \vResR3
- vfmadb \vResI3, \vr1, \viB2, \vResI3
- vfmsdb \vResR4, \vi2, \viB2, \vResR4
- vfmadb \vResI4, \vr2, \viB2, \vResI4
-
- vfmsdb \vResR1, \vr1, \vrB, \vResR1
- vfmadb \vResI1, \vi1, \vrB, \vResI1
- vfmsdb \vResR2, \vr2, \vrB, \vResR2
- vfmadb \vResI2, \vi2, \vrB, \vResI2
-
- vfmsdb \vResR3, \vr1, \vrB2, \vResR3
- vfmadb \vResI3, \vi1, \vrB2, \vResI3
- vfmsdb \vResR4, \vr2, \vrB2, \vResR4
- vfmadb \vResI4, \vi2, \vrB2, \vResI4
-
- #endif
-
- #if defined(RN) || defined(RT) || defined(CN) || defined(CT)
- vfmadb \vResR1, \vi1, \viB, \vResR1
- vfmsdb \vResI1, \vr1, \viB, \vResI1
- vfmadb \vResR2, \vi2, \viB, \vResR2
- vfmsdb \vResI2, \vr2, \viB, \vResI2
-
- vfmadb \vResR3, \vi1, \viB2, \vResR3
- vfmsdb \vResI3, \vr1, \viB2, \vResI3
- vfmadb \vResR4, \vi2, \viB2, \vResR4
- vfmsdb \vResI4, \vr2, \viB2, \vResI4
-
- vfmadb \vResR1, \vr1, \vrB, \vResR1
- vfmsdb \vResI1, \vi1, \vrB, \vResI1
- vfmadb \vResR2, \vr2, \vrB, \vResR2
- vfmsdb \vResI2, \vi2, \vrB, \vResI2
-
- vfmadb \vResR3, \vr1, \vrB2, \vResR3
- vfmsdb \vResI3, \vi1, \vrB2, \vResI3
- vfmadb \vResR4, \vr2, \vrB2, \vResR4
- vfmsdb \vResI4, \vi2, \vrB2, \vResI4
-
- #endif
-
- #if defined(NR) || defined(NC) || defined(TR) || defined(TC)
- vfmadb \vResR1, \vi1, \viB, \vResR1
- vfmsdb \vResI1, \vi1, \vrB, \vResI1
- vfmadb \vResR2, \vi2, \viB, \vResR2
- vfmsdb \vResI2, \vi2, \vrB, \vResI2
-
- vfmadb \vResR3, \vi1, \viB2, \vResR3
- vfmsdb \vResI3, \vi1, \vrB2, \vResI3
- vfmadb \vResR4, \vi2, \viB2, \vResR4
- vfmsdb \vResI4, \vi2, \vrB2, \vResI4
-
- vfmadb \vResR1, \vr1, \vrB, \vResR1
- vfmsdb \vResI1, \vr1, \viB, \vResI1
- vfmadb \vResR2, \vr2, \vrB, \vResR2
- vfmsdb \vResI2, \vr2, \viB, \vResI2
-
- vfmadb \vResR3, \vr1, \vrB2, \vResR3
- vfmsdb \vResI3, \vr1, \viB2, \vResI3
- vfmadb \vResR4, \vr2, \vrB2, \vResR4
- vfmsdb \vResI4, \vr2, \viB2, \vResI4
- #endif
- #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
-
- vfmsdb \vResR1, \vr1, \vrB, \vResR1
- vfmadb \vResI1, \vi1, \vrB, \vResI1
- vfmsdb \vResR2, \vr2, \vrB, \vResR2
- vfmadb \vResI2, \vi2, \vrB, \vResI2
-
- vfmsdb \vResR3, \vr1, \vrB2, \vResR3
- vfmadb \vResI3, \vi1, \vrB2, \vResI3
- vfmsdb \vResR4, \vr2, \vrB2, \vResR4
- vfmadb \vResI4, \vi2, \vrB2, \vResI4
-
- vfmsdb \vResR1, \vi1, \viB, \vResR1
- vfmadb \vResI1, \vr1, \viB, \vResI1
- vfmsdb \vResR2, \vi2, \viB, \vResR2
- vfmadb \vResI2, \vr2, \viB, \vResI2
-
- vfmsdb \vResR3, \vi1, \viB2, \vResR3
- vfmadb \vResI3, \vr1, \viB2, \vResI3
- vfmsdb \vResR4, \vi2, \viB2, \vResR4
- vfmadb \vResI4, \vr2, \viB2, \vResI4
-
-
- #endif
-
- .endm
-
- /*
- Calculate for 2x2 inner
- */
- .macro CalcComplex_2x2 vResR1, vResI1,vResR2, vResI2, vR1, vI1, vRB, vIB, vRB2, vIB2
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
- vfmsdb \vResR1, \vI1, \vIB, \vResR1
- vfmadb \vResI1, \vR1, \vIB, \vResI1
-
- vfmsdb \vResR2, \vI1, \vIB2, \vResR2
- vfmadb \vResI2, \vR1, \vIB2, \vResI2
-
- vfmsdb \vResR1, \vR1, \vRB, \vResR1
- vfmadb \vResI1, \vI1, \vRB, \vResI1
-
- vfmsdb \vResR2, \vR1, \vRB2, \vResR2
- vfmadb \vResI2, \vI1, \vRB2, \vResI2
- #endif
-
- #if defined(NR) || defined(NC) || defined(TR) || defined(TC)
- vfmadb \vResR1, \vI1, \vIB, \vResR1
- vfmsdb \vResI1, \vR1, \vIB, \vResI1
-
- vfmadb \vResR2, \vI1, \vIB2, \vResR2
- vfmsdb \vResI2, \vR1, \vIB2, \vResI2
-
- vfmadb \vResR1, \vR1, \vRB, \vResR1
- vfmsdb \vResI1, \vI1, \vRB, \vResI1
-
- vfmadb \vResR2, \vR1, \vRB2, \vResR2
- vfmsdb \vResI2, \vI1, \vRB2, \vResI2
- #endif
-
- #if defined(RN) || defined(RT) || defined(CN) || defined(CT)
- vfmadb \vResR1, \vI1, \vIB, \vResR1
- vfmsdb \vResI1, \vI1, \vRB, \vResI1
-
- vfmadb \vResR2, \vI1, \vIB2, \vResR2
- vfmsdb \vResI2, \vI1, \vRB2, \vResI2
-
- vfmadb \vResR1, \vR1, \vRB, \vResR1
- vfmsdb \vResI1, \vR1, \vIB, \vResI1
-
- vfmadb \vResR2, \vR1, \vRB2, \vResR2
- vfmsdb \vResI2, \vR1, \vIB2, \vResI2
- #endif
- #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
- vfmsdb \vResR1, \vR1, \vRB, \vResR1
- vfmadb \vResI1, \vI1, \vRB, \vResI1
-
- vfmsdb \vResR2, \vR1, \vRB2, \vResR2
- vfmadb \vResI2, \vI1, \vRB2, \vResI2
-
- vfmsdb \vResR1, \vI1, \vIB, \vResR1
- vfmadb \vResI1, \vR1, \vIB, \vResI1
-
- vfmsdb \vResR2, \vI1, \vIB2, \vResR2
- vfmadb \vResI2, \vR1, \vIB2, \vResI2
- #endif
- .endm
-
- /*
- Calculate for 2x1 inner
- */
- .macro CalcComplex_2x1 vRealResult1, vImageResult1, vReal1, vImage1, vecRealB, vecImageB
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- #endif
-
- #if defined(NR) || defined(NC) || defined(TR) || defined(TC)
- vfmadb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmsdb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmadb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmsdb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- #endif
-
- #if defined(RN) || defined(RT) || defined(CN) || defined(CT)
- vfmadb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmsdb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmadb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmsdb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- #endif
- #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- #endif
- .endm
-
- /*
- Calculate for 1x2 inner
- */
- .macro CalcComplex_1x2 vRealResult1, vImageResult1, vReal1, vImage1, vecRealB, vecImageB
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- #endif
-
- #if defined(RN) || defined(CN) || defined(RT) || defined(CT)
- vfmadb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmsdb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmadb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmsdb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- #endif
-
- #if defined(NR) || defined(TR) || defined(NC) || defined(TC)
- vfmadb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmsdb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmadb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmsdb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- #endif
- #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- #endif
- .endm
-
-
- /*
- Calculate for 4x1 inner
- */
- .macro CalcComplex_4x1 vRealResult1, vImageResult1, vRealResult2, vImageResult2, vReal1, vImage1, vReal2, vImage2, vecRealB, vecImageB
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmsdb \vRealResult2, \vImage2, \vecImageB, \vRealResult2
- vfmadb \vImageResult2, \vReal2, \vecImageB, \vImageResult2
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmsdb \vRealResult2, \vReal2, \vecRealB, \vRealResult2
- vfmadb \vImageResult2, \vImage2, \vecRealB, \vImageResult2
- #endif
-
- #if defined(NR) || defined(NC) || defined(TR) || defined(TC)
- vfmadb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmsdb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmadb \vRealResult2, \vImage2, \vecImageB, \vRealResult2
- vfmsdb \vImageResult2, \vReal2, \vecImageB, \vImageResult2
- vfmadb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmsdb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmadb \vRealResult2, \vReal2, \vecRealB, \vRealResult2
- vfmsdb \vImageResult2, \vImage2, \vecRealB, \vImageResult2
- #endif
-
- #if defined(RN) || defined(RT) || defined(CN) || defined(CT)
- vfmadb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmsdb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmadb \vRealResult2, \vImage2, \vecImageB, \vRealResult2
- vfmsdb \vImageResult2, \vImage2, \vecRealB, \vImageResult2
- vfmadb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmsdb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmadb \vRealResult2, \vReal2, \vecRealB, \vRealResult2
- vfmsdb \vImageResult2, \vReal2, \vecImageB, \vImageResult2
- #endif
- #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
-
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmsdb \vRealResult2, \vReal2, \vecRealB, \vRealResult2
- vfmadb \vImageResult2, \vImage2, \vecRealB, \vImageResult2
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmsdb \vRealResult2, \vImage2, \vecImageB, \vRealResult2
- vfmadb \vImageResult2, \vReal2, \vecImageB, \vImageResult2
- #endif
-
- .endm
-
- /*
- Calculate for 1x4 inner
- */
- .macro CalcComplex_1x4 vRealResult1, vImageResult1, vRealResult2, vImageResult2, vReal1, vImage1, vReal2, vImage2, vecRealB, vecImageB
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmsdb \vRealResult2, \vImage2, \vecImageB, \vRealResult2
- vfmadb \vImageResult2, \vReal2, \vecImageB, \vImageResult2
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmsdb \vRealResult2, \vReal2, \vecRealB, \vRealResult2
- vfmadb \vImageResult2, \vImage2, \vecRealB, \vImageResult2
- #endif
-
- #if defined(RN) || defined(CN) || defined(RT) || defined(CT)
- vfmadb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmsdb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmadb \vRealResult2, \vImage2, \vecImageB, \vRealResult2
- vfmsdb \vImageResult2, \vReal2, \vecImageB, \vImageResult2
- vfmadb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmsdb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmadb \vRealResult2, \vReal2, \vecRealB, \vRealResult2
- vfmsdb \vImageResult2, \vImage2, \vecRealB, \vImageResult2
- #endif
-
- #if defined(NR) || defined(TR) || defined(NC) || defined(TC)
- vfmadb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmsdb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmadb \vRealResult2, \vImage2, \vecImageB, \vRealResult2
- vfmsdb \vImageResult2, \vImage2, \vecRealB, \vImageResult2
- vfmadb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmsdb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmadb \vRealResult2, \vReal2, \vecRealB, \vRealResult2
- vfmsdb \vImageResult2, \vReal2, \vecImageB, \vImageResult2
- #endif
- #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
-
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmsdb \vRealResult2, \vReal2, \vecRealB, \vRealResult2
- vfmadb \vImageResult2, \vImage2, \vecRealB, \vImageResult2
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmsdb \vRealResult2, \vImage2, \vecImageB, \vRealResult2
- vfmadb \vImageResult2, \vReal2, \vecImageB, \vImageResult2
- #endif
-
- .endm
-
- .macro CalcComplex_1x1 RealResult1, ImageResult1, Real1, Image1, RealB, ImageB
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
- msdbr \RealResult1, \Image1, \ImageB
- madbr \ImageResult1, \Real1, \ImageB
- msdbr \RealResult1, \Real1, \RealB
- madbr \ImageResult1, \Image1, \RealB
- #endif
-
- #if defined(NR) || defined(NC) || defined(TR) || defined(TC)
- madbr \RealResult1, \Image1, \ImageB
- msdbr \ImageResult1, \Real1, \ImageB
- madbr \RealResult1, \Real1, \RealB
- msdbr \ImageResult1, \Image1, \RealB
- #endif
-
- #if defined(RN) || defined(RT) || defined(CN) || defined(CT)
- madbr \RealResult1, \Image1, \ImageB
- msdbr \ImageResult1, \Image1, \RealB
- madbr \RealResult1, \Real1, \RealB
- msdbr \ImageResult1, \Real1, \ImageB
- #endif
- #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
- msdbr \RealResult1, \Real1, \RealB
- madbr \ImageResult1, \Image1, \RealB
- msdbr \RealResult1, \Image1, \ImageB
- madbr \ImageResult1, \Real1, \ImageB
- #endif
- .endm
-
- #define DISP(ind,stride,disp) (ind*stride+disp)
- #define DISP64(ind,disp) (ind*64+disp)
- #define DISP32(ind,disp) (ind*32+disp)
- #define DISP16(ind,disp) (ind*16+disp)
- #define USE_VLM 1
-
- .macro ZCALC_4x4_I PTR_A_REG,PTR_B_REG,Index,IsLast
- #if defined(USE_VLM)
- vlm %v4,%v7, DISP64(\Index ,0) (\PTR_A_REG)
- #else
- vl %v4 , DISP64(\Index ,0) (\PTR_A_REG)
- vl %v5 , DISP64(\Index ,16)(\PTR_A_REG)
- vl %v6 , DISP64(\Index ,32)(\PTR_A_REG)
- vl %v7 , DISP64(\Index ,48)(\PTR_A_REG)
- #endif
-
- vlrepg %v9, DISP64(\Index ,0)(\PTR_B_REG)
- vlrepg %v10 , DISP64(\Index ,8)(\PTR_B_REG)
- vlrepg %v11, DISP64(\Index ,16)(\PTR_B_REG)
- vlrepg %v12 , DISP64(\Index ,24)(\PTR_B_REG)
-
- vpdi %v1,%v4,%v5,0
- vpdi %v5,%v4,%v5,0b101
- vpdi %v3,%v6,%v7,0
- vpdi %v7,%v6,%v7,0b101
-
- CalcComplex_4x2 %v16,%v17,%v18,%v19,%v20,%v21,%v22,%v23,%v1,%v5,%v3,%v7,%v9,%v10,%v11,%v12
-
- vlrepg %v9, DISP64(\Index ,32)(\PTR_B_REG)
- vlrepg %v10 , DISP64(\Index ,40)(\PTR_B_REG)
- vlrepg %v11, DISP64(\Index ,48)(\PTR_B_REG)
- vlrepg %v12 , DISP64(\Index ,56)(\PTR_B_REG)
- .if \IsLast==1
- la \PTR_A_REG, DISP64(\Index ,64)(\PTR_A_REG)
- .endif
- CalcComplex_4x2 %v24,%v25,%v26,%v27,%v28,%v29,%v30,%v31,%v1,%v5,%v3,%v7,%v9,%v10,%v11,%v12
-
- .if \IsLast==1
- la \PTR_B_REG, DISP64(\Index ,64)(\PTR_B_REG)
- .endif
- .endm
-
- .macro ZCALC_4x2_I PTR_A_REG,PTR_B_REG,Index,IsLast
- #if defined(USE_VLM)
- vlm %v4,%v7, DISP64(\Index ,0) (\PTR_A_REG)
- #else
- vl %v4 , DISP64(\Index ,0) (\PTR_A_REG)
- vl %v5 , DISP64(\Index ,16)(\PTR_A_REG)
- vl %v6 , DISP64(\Index ,32)(\PTR_A_REG)
- vl %v7 , DISP64(\Index ,48)(\PTR_A_REG)
- #endif
- vlrepg %v9, DISP32(\Index ,0)(\PTR_B_REG)
- vlrepg %v10 , DISP32(\Index ,8)(\PTR_B_REG)
- vlrepg %v11, DISP32(\Index ,16)(\PTR_B_REG)
- vlrepg %v12 , DISP32(\Index ,24)(\PTR_B_REG)
-
- vpdi %v1,%v4,%v5,0
- vpdi %v5,%v4,%v5,0b101
- vpdi %v3,%v6,%v7,0
- vpdi %v7,%v6,%v7,0b101
- .if \IsLast==1
- la \PTR_A_REG, DISP64(\Index ,64)(\PTR_A_REG)
- .endif
- CalcComplex_4x2 %v16,%v17,%v18,%v19,%v20,%v21,%v22,%v23,%v1,%v5,%v3,%v7,%v9,%v10,%v11,%v12
-
- .if \IsLast==1
- la \PTR_B_REG, DISP32(\Index ,32)(\PTR_B_REG)
- .endif
- .endm
-
- .macro ZCALC_2x4_I PTR_A_REG,PTR_B_REG,Index,IsLast
- #if defined(USE_VLM)
- vlm %v4,%v7, DISP64(\Index ,0) (\PTR_B_REG)
- #else
- vl %v4 , DISP64(\Index ,0) (\PTR_B_REG)
- vl %v5 , DISP64(\Index ,16)(\PTR_B_REG)
- vl %v6 , DISP64(\Index ,32)(\PTR_B_REG)
- vl %v7 , DISP64(\Index ,48)(\PTR_B_REG)
- #endif
- vlrepg %v9, DISP32(\Index ,0)(\PTR_A_REG)
- vlrepg %v10 , DISP32(\Index ,8)(\PTR_A_REG)
- vlrepg %v11, DISP32(\Index ,16)(\PTR_A_REG)
- vlrepg %v12 , DISP32(\Index ,24)(\PTR_A_REG)
-
- vpdi %v1,%v4,%v5,0
- vpdi %v5,%v4,%v5,0b101
- vpdi %v3,%v6,%v7,0
- vpdi %v7,%v6,%v7,0b101
- .if \IsLast==1
- la \PTR_B_REG, DISP64(\Index ,64)(\PTR_B_REG)
- .endif
- CalcComplex_2x4 %v16,%v17,%v18,%v19,%v20,%v21,%v22,%v23,%v1,%v5,%v3,%v7,%v9,%v10,%v11,%v12
-
- .if \IsLast==1
- la \PTR_A_REG, DISP32(\Index ,32)(\PTR_A_REG)
- .endif
- .endm
-
- .macro ZCALC_4x1_I PTR_A_REG,PTR_B_REG,Index,IsLast
- #if defined(USE_VLM)
- vlm %v4,%v7, DISP64(\Index ,0) (\PTR_A_REG)
- #else
- vl %v4 , DISP64(\Index ,0) (\PTR_A_REG)
- vl %v5 , DISP64(\Index ,16)(\PTR_A_REG)
- vl %v6 , DISP64(\Index ,32)(\PTR_A_REG)
- vl %v7 , DISP64(\Index ,48)(\PTR_A_REG)
- #endif
- vlrepg %v9, DISP16(\Index ,0)(\PTR_B_REG)
- vlrepg %v10 , DISP16(\Index ,8)(\PTR_B_REG)
-
- vpdi %v1,%v4,%v5,0
- vpdi %v11,%v4,%v5,0b101
- vpdi %v3,%v6,%v7,0
- vpdi %v12,%v6,%v7,0b101
- .if \IsLast==1
- la \PTR_A_REG, DISP64(\Index ,64)(\PTR_A_REG)
- .endif
- CalcComplex_4x1 %v16,%v17,%v18,%v19,%v1,%v11,%v3,%v12,%v9,%v10
- .if \IsLast==1
- la \PTR_B_REG, DISP16(\Index ,16)(\PTR_B_REG)
- .endif
- .endm
-
- .macro ZCALC_1x4_I PTR_A_REG,PTR_B_REG,Index,IsLast
- #if defined(USE_VLM)
- vlm %v4,%v7, DISP64(\Index ,0) (\PTR_B_REG)
- #else
- vl %v4 , DISP64(\Index ,0) (\PTR_B_REG)
- vl %v5 , DISP64(\Index ,16)(\PTR_B_REG)
- vl %v6 , DISP64(\Index ,32)(\PTR_B_REG)
- vl %v7 , DISP64(\Index ,48)(\PTR_B_REG)
- #endif
- vlrepg %v9, DISP16(\Index ,0)(\PTR_A_REG)
- vlrepg %v10 , DISP16(\Index ,8)(\PTR_A_REG)
-
- vpdi %v1,%v4,%v5,0
- vpdi %v11,%v4,%v5,0b101
- vpdi %v3,%v6,%v7,0
- vpdi %v12,%v6,%v7,0b101
- .if \IsLast==1
- la \PTR_B_REG, DISP64(\Index ,64)(\PTR_B_REG)
- .endif
- CalcComplex_1x4 %v16,%v17,%v18,%v19,%v1,%v11,%v3,%v12,%v9,%v10
- .if \IsLast==1
- la \PTR_A_REG, DISP16(\Index ,16)(\PTR_A_REG)
- .endif
- .endm
-
- .macro ZCALC_2x2_I PTR_A_REG,PTR_B_REG ,Index,IsLast
- vl %v1 , DISP32(\Index ,0)(\PTR_A_REG)
- vl %v3 , DISP32(\Index ,16)(\PTR_A_REG)
- vlrepg %v9, DISP32(\Index ,0)(\PTR_B_REG)
- vlrepg %v10 , DISP32(\Index ,8)(\PTR_B_REG)
- vlrepg %v11, DISP32(\Index ,16)(\PTR_B_REG)
- vlrepg %v12 , DISP32(\Index ,24)(\PTR_B_REG)
- vpdi %v5,%v1,%v3,0
- vpdi %v6,%v1,%v3,0b101
-
- .if \IsLast==1
- la \PTR_A_REG, DISP32(\Index ,32)(\PTR_A_REG)
- .endif
- CalcComplex_2x2 %v16,%v17,%v20,%v21,%v5,%v6, %v9,%v10,%v11,%v12
- .if \IsLast==1
- la \PTR_B_REG, DISP32(\Index ,32)(\PTR_B_REG)
- .endif
- .endm
-
- .macro ZCALC_2x1_I PTR_A_REG,PTR_B_REG ,Index,IsLast
- vl %v1 , DISP32(\Index ,0)(\PTR_A_REG)
- vl %v3 , DISP32(\Index ,16)(\PTR_A_REG)
- vlrepg %v6, DISP16(\Index ,0)(\PTR_B_REG)
- vlrepg %v7 , DISP16(\Index ,8)(\PTR_B_REG)
- vpdi %v4,%v1,%v3,0
- vpdi %v5,%v1,%v3,0b101
-
- .if \IsLast==1
- la \PTR_A_REG, DISP32(\Index ,32)(\PTR_A_REG)
- .endif
- CalcComplex_2x1 %v16,%v17,%v4,%v5,%v6,%v7
- .if \IsLast==1
- la \PTR_B_REG, DISP16(\Index ,16)(\PTR_B_REG)
- .endif
- .endm
-
- .macro ZCALC_1x2_I PTR_A_REG,PTR_B_REG ,Index,IsLast
- vl %v1 , DISP32(\Index ,0)(\PTR_B_REG)
- vl %v3 , DISP32(\Index ,16)(\PTR_B_REG)
- vlrepg %v6, DISP16(\Index ,0)(\PTR_A_REG)
- vlrepg %v7 , DISP16(\Index ,8)(\PTR_A_REG)
- vpdi %v4,%v1,%v3,0
- vpdi %v5,%v1,%v3,0b101
-
- .if \IsLast==1
- la \PTR_B_REG, DISP32(\Index ,32)(\PTR_B_REG)
- .endif
- CalcComplex_1x2 %v16,%v17,%v4,%v5,%v6,%v7
- .if \IsLast==1
- la \PTR_A_REG, DISP16(\Index ,16)(\PTR_A_REG)
- .endif
- .endm
-
- .macro ZCALC_1x1_I PTR_A_REG,PTR_B_REG ,Index,IsLast
- ld %f1 , DISP16(\Index ,0)(\PTR_A_REG)
- ld %f3 , DISP16(\Index ,8)(\PTR_A_REG)
- ld %f4 , DISP16(\Index ,0)(\PTR_B_REG)
- ld %f5 , DISP16(\Index ,8)(\PTR_B_REG)
- .if \IsLast==1
- la \PTR_A_REG, DISP16(\Index ,16)(\PTR_A_REG)
- .endif
- CalcComplex_1x1 %f6,%f7,%f1,%f3,%f4,%f5
- .if \IsLast==1
- la \PTR_B_REG, DISP16(\Index ,16)(\PTR_B_REG)
- .endif
- .endm
-
- .macro ZCALC_4x4 PTR_A_REG,PTR_B_REG
- ZCALC_4x4_I \PTR_A_REG,\PTR_B_REG,0,1
- .endm
- .macro ZCALC_4x2 PTR_A_REG,PTR_B_REG
- ZCALC_4x2_I \PTR_A_REG,\PTR_B_REG,0,1
- .endm
- .macro ZCALC_4x1 PTR_A_REG,PTR_B_REG
- ZCALC_4x1_I \PTR_A_REG,\PTR_B_REG,0,1
- .endm
-
- .macro ZCALC_4x4_4 PTR_A_REG,PTR_B_REG
- ZCALC_4x4_I \PTR_A_REG,\PTR_B_REG,0,0
- ZCALC_4x4_I \PTR_A_REG,\PTR_B_REG,1,0
- ZCALC_4x4_I \PTR_A_REG,\PTR_B_REG,2,0
- ZCALC_4x4_I \PTR_A_REG,\PTR_B_REG,3,1
- .endm
- .macro ZCALC_4x2_4 PTR_A_REG,PTR_B_REG
- ZCALC_4x2_I \PTR_A_REG,\PTR_B_REG,0,0
- ZCALC_4x2_I \PTR_A_REG,\PTR_B_REG,1,0
- ZCALC_4x2_I \PTR_A_REG,\PTR_B_REG,2,0
- ZCALC_4x2_I \PTR_A_REG,\PTR_B_REG,3,1
- .endm
- .macro ZCALC_4x1_4 PTR_A_REG,PTR_B_REG
- ZCALC_4x1_I \PTR_A_REG,\PTR_B_REG,0,0
- ZCALC_4x1_I \PTR_A_REG,\PTR_B_REG,1,0
- ZCALC_4x1_I \PTR_A_REG,\PTR_B_REG,2,0
- ZCALC_4x1_I \PTR_A_REG,\PTR_B_REG,3,1
- .endm
-
- .macro ZCALC_2x4_4 PTR_A_REG,PTR_B_REG
- ZCALC_2x4_I \PTR_A_REG,\PTR_B_REG,0,0
- ZCALC_2x4_I \PTR_A_REG,\PTR_B_REG,1,0
- ZCALC_2x4_I \PTR_A_REG,\PTR_B_REG,2,0
- ZCALC_2x4_I \PTR_A_REG,\PTR_B_REG,3,1
- .endm
-
- .macro ZCALC_2x4 PTR_A_REG,PTR_B_REG
- ZCALC_2x4_I \PTR_A_REG,\PTR_B_REG,0,1
- .endm
-
- .macro ZCALC_1x4_4 PTR_A_REG,PTR_B_REG
- ZCALC_1x4_I \PTR_A_REG,\PTR_B_REG,0,0
- ZCALC_1x4_I \PTR_A_REG,\PTR_B_REG,1,0
- ZCALC_1x4_I \PTR_A_REG,\PTR_B_REG,2,0
- ZCALC_1x4_I \PTR_A_REG,\PTR_B_REG,3,1
- .endm
-
- .macro ZCALC_1x4 PTR_A_REG,PTR_B_REG
- ZCALC_1x4_I \PTR_A_REG,\PTR_B_REG,0,1
- .endm
- .macro ZCALC_2x2 PTR_A_REG,PTR_B_REG
- ZCALC_2x2_I \PTR_A_REG,\PTR_B_REG,0,1
- .endm
-
- .macro ZCALC_2x2_4 PTR_A_REG,PTR_B_REG
- ZCALC_2x2_I \PTR_A_REG,\PTR_B_REG,0,0
- ZCALC_2x2_I \PTR_A_REG,\PTR_B_REG,1,0
- ZCALC_2x2_I \PTR_A_REG,\PTR_B_REG,2,0
- ZCALC_2x2_I \PTR_A_REG,\PTR_B_REG,3,1
- .endm
-
- .macro ZCALC_2x1 PTR_A_REG,PTR_B_REG
- ZCALC_2x1_I \PTR_A_REG,\PTR_B_REG,0,1
- .endm
-
- .macro ZCALC_2x1_4 PTR_A_REG,PTR_B_REG
- ZCALC_2x1_I \PTR_A_REG,\PTR_B_REG,0,0
- ZCALC_2x1_I \PTR_A_REG,\PTR_B_REG,1,0
- ZCALC_2x1_I \PTR_A_REG,\PTR_B_REG,2,0
- ZCALC_2x1_I \PTR_A_REG,\PTR_B_REG,3,1
- .endm
-
-
- .macro ZCALC_1x2_4 PTR_A_REG,PTR_B_REG
- ZCALC_1x2_I \PTR_A_REG,\PTR_B_REG,0,0
- ZCALC_1x2_I \PTR_A_REG,\PTR_B_REG,1,0
- ZCALC_1x2_I \PTR_A_REG,\PTR_B_REG,2,0
- ZCALC_1x2_I \PTR_A_REG,\PTR_B_REG,3,1
- .endm
-
- .macro ZCALC_1x2 PTR_A_REG,PTR_B_REG
- ZCALC_1x2_I \PTR_A_REG,\PTR_B_REG,0,1
- .endm
-
- .macro ZCALC_1x1_4 PTR_A_REG,PTR_B_REG
- ZCALC_1x1_I \PTR_A_REG,\PTR_B_REG,0,0
- ZCALC_1x1_I \PTR_A_REG,\PTR_B_REG,1,0
- ZCALC_1x1_I \PTR_A_REG,\PTR_B_REG,2,0
- ZCALC_1x1_I \PTR_A_REG,\PTR_B_REG,3,1
- .endm
-
- .macro ZCALC_1x1 PTR_A_REG,PTR_B_REG
- ZCALC_1x1_I \PTR_A_REG,\PTR_B_REG,0,1
- .endm
-
-
-
- /*****************************STORE RESULTS************************************/
- .macro CalcMultAlpha_4x1 vRealResult1, vImageResult1, vRealResult2, vImageResult2, vReal1, vImage1, vReal2, vImage2, vecRealB, vecImageB
- #if defined (TRMMKERNEL)
- vfmdb \vRealResult1, \vImage1, \vecImageB
- vfmdb \vImageResult1, \vReal1, \vecImageB
- vfmdb \vRealResult2, \vImage2, \vecImageB
- vfmdb \vImageResult2, \vReal2, \vecImageB
- #else
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- vfmsdb \vRealResult2, \vImage2, \vecImageB, \vRealResult2
- vfmadb \vImageResult2, \vReal2, \vecImageB, \vImageResult2
- #endif
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- vfmsdb \vRealResult2, \vReal2, \vecRealB, \vRealResult2
- vfmadb \vImageResult2, \vImage2, \vecRealB, \vImageResult2
-
- .endm
-
- .macro CalcMultAlpha_2x1 vRealResult1, vImageResult1, vReal1, vImage1, vecRealB, vecImageB
- #if defined (TRMMKERNEL)
- vfmdb \vRealResult1, \vImage1, \vecImageB
- vfmdb \vImageResult1, \vReal1, \vecImageB
- #else
- vfmsdb \vRealResult1, \vImage1, \vecImageB, \vRealResult1
- vfmadb \vImageResult1, \vReal1, \vecImageB, \vImageResult1
- #endif
- vfmsdb \vRealResult1, \vReal1, \vecRealB, \vRealResult1
- vfmadb \vImageResult1, \vImage1, \vecRealB, \vImageResult1
- .endm
-
- .macro CalcMultAlpha_1x1 RealResult1, ImageResult1, Real1, Image1, RealB, ImageB
-
- msdbr \RealResult1, \Image1, \ImageB
- madbr \ImageResult1, \Real1, \ImageB
- msdbr \RealResult1, \Real1, \RealB
- madbr \ImageResult1, \Image1, \RealB
- .endm
-
- .macro ZSTORE_4x4 ALPHA_VECREG,ALPHA_VECI,CIJ_REG , LDC_BYTE_ORIGINAL ,LC1,LC2
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG)
- vl %v4 , 16(\CIJ_REG)
- vpdi %v3,%v1,%v4,0
- vl %v7 , 32(\CIJ_REG)
- vpdi %v4,%v1,%v4,0b101
- vl %v6 , 48 (\CIJ_REG)
- vpdi %v1,%v7,%v6,0
- vpdi %v6,%v7,%v6,0b101
- #endif
- la \LC1,0(\LDC_BYTE_ORIGINAL, \LDC_BYTE_ORIGINAL)
- CalcMultAlpha_4x1 %v3,%v4,%v1,%v6,%v16,%v17,%v18,%v19,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v16, %v3 ,%v4,0
- la \LC2,0(\LC1,\LDC_BYTE_ORIGINAL )
- vpdi %v17, %v3,%v4,0b0101
- vst %v16,0(\CIJ_REG)
- vpdi %v18, %v1 ,%v6,0
- vst %v17,16(\CIJ_REG)
- vpdi %v19, %v1 ,%v6,0b0101
- vst %v18,32(\CIJ_REG)
- vst %v19,48(\CIJ_REG)
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vl %v4 , 16(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v3,%v1,%v4,0
- vl %v7 , 32(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v4,%v1,%v4,0b101
- vl %v6 , 48 (\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v1,%v7,%v6,0
- vpdi %v6,%v7,%v6,0b101
- #endif
- CalcMultAlpha_4x1 %v3,%v4,%v1,%v6,%v20,%v21,%v22,%v23,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3 ,%v4,0b0101
- vst %v16,0(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v18, %v1 ,%v6,0
- vst %v17,16(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v19, %v1 ,%v6,0b0101
- vst %v18,32(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vst %v19,48(\CIJ_REG,\LDC_BYTE_ORIGINAL)
-
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG,\LC1)
- vl %v4 , 16(\CIJ_REG,\LC1)
- vpdi %v3,%v1,%v4,0
- vl %v7 , 32(\CIJ_REG,\LC1)
- vpdi %v4,%v1,%v4,0b101
- vl %v6 , 48 (\CIJ_REG,\LC1)
- vpdi %v1,%v7,%v6,0
- vpdi %v6,%v7,%v6,0b101
- #endif
- CalcMultAlpha_4x1 %v3,%v4,%v1,%v6,%v24,%v25,%v26,%v27,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3 ,%v4,0b0101
- vst %v16,0(\CIJ_REG,\LC1)
- vpdi %v18, %v1 ,%v6,0
- vst %v17,16(\CIJ_REG,\LC1)
- vpdi %v19, %v1 ,%v6,0b0101
- vst %v18,32(\CIJ_REG,\LC1)
- vst %v19,48(\CIJ_REG,\LC1)
-
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG,\LC2)
- vl %v4 , 16(\CIJ_REG,\LC2)
- vpdi %v3,%v1,%v4,0
- vl %v7 , 32(\CIJ_REG,\LC2)
- vpdi %v4,%v1,%v4,0b101
- vl %v6 , 48 (\CIJ_REG,\LC2)
- vpdi %v1,%v7,%v6,0
- vpdi %v6,%v7,%v6,0b101
- #endif
- CalcMultAlpha_4x1 %v3,%v4,%v1,%v6,%v28,%v29,%v30,%v31,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3 ,%v4,0b0101
- vst %v16,0(\CIJ_REG,\LC2)
- vpdi %v18, %v1 ,%v6,0
- vst %v17,16(\CIJ_REG,\LC2)
- vpdi %v19, %v1 ,%v6,0b0101
- vst %v18,32(\CIJ_REG,\LC2)
- vst %v19,48(\CIJ_REG,\LC2)
- la \CIJ_REG,64(\CIJ_REG)
- .endm
-
- .macro ZSTORE_4x2 ALPHA_VECREG,ALPHA_VECI,CIJ_REG , LDC_BYTE_ORIGINAL
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG)
- vl %v4 , 16(\CIJ_REG)
- vpdi %v3,%v1,%v4,0
- vl %v7 , 32(\CIJ_REG)
- vpdi %v4,%v1,%v4,0b101
- vl %v6 , 48 (\CIJ_REG)
- vpdi %v1,%v7,%v6,0
- vpdi %v6,%v7,%v6,0b101
- #endif
- CalcMultAlpha_4x1 %v3,%v4,%v1,%v6,%v16,%v17,%v18,%v19,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3,%v4,0b0101
- vst %v16,0(\CIJ_REG)
- vpdi %v18, %v1 ,%v6,0
- vst %v17,16(\CIJ_REG)
- vpdi %v19, %v1 ,%v6,0b0101
- vst %v18,32(\CIJ_REG)
- vst %v19,48(\CIJ_REG)
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vl %v4 , 16(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v3,%v1,%v4,0
- vl %v7 , 32(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v4,%v1,%v4,0b101
- vl %v6 , 48 (\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v1,%v7,%v6,0
- vpdi %v6,%v7,%v6,0b101
- #endif
- CalcMultAlpha_4x1 %v3,%v4,%v1,%v6,%v20,%v21,%v22,%v23,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v20, %v3 ,%v4,0
- vpdi %v21, %v3 ,%v4,0b0101
- vst %v20,0(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v22, %v1 ,%v6,0
- vst %v21,16(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v23, %v1 ,%v6,0b0101
- vst %v22,32(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vst %v23,48(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- la \CIJ_REG,64(\CIJ_REG)
- .endm
- .macro ZSTORE_4x1 ALPHA_VECREG,ALPHA_VECI,CIJ_REG , LDC_BYTE_ORIGINAL
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG)
- vl %v4 , 16(\CIJ_REG)
- vpdi %v3,%v1,%v4,0
- vl %v7 , 32(\CIJ_REG)
- vpdi %v4,%v1,%v4,0b101
- vl %v6 , 48 (\CIJ_REG)
- vpdi %v1,%v7,%v6,0
- vpdi %v6,%v7,%v6,0b101
- #endif
- CalcMultAlpha_4x1 %v3,%v4,%v1,%v6,%v16,%v17,%v18,%v19,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3,%v4,0b0101
- vst %v16,0(\CIJ_REG)
- vpdi %v18, %v1 ,%v6,0
- vst %v17,16(\CIJ_REG)
- vpdi %v19, %v1 ,%v6,0b0101
- vst %v18,32(\CIJ_REG)
- vst %v19,48(\CIJ_REG)
- la \CIJ_REG,64(\CIJ_REG)
- .endm
- .macro ZSTORE_1x4 ALPHA_VECREG,ALPHA_VECI,CIJ_REG , LDC_BYTE_ORIGINAL,LC1,LC2
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG)
- la \LC1,0(\LDC_BYTE_ORIGINAL, \LDC_BYTE_ORIGINAL)
- vl %v4 , 0(\CIJ_REG, \LDC_BYTE_ORIGINAL)
- vpdi %v3,%v1,%v4,0
- la \LC2,0(\LC1,\LDC_BYTE_ORIGINAL )
- vl %v7 , 0(\CIJ_REG, \LC1)
- vpdi %v4,%v1,%v4,0b101
- vl %v6 , 0 (\CIJ_REG,\LC2)
- vpdi %v1,%v7,%v6,0
- vpdi %v6,%v7,%v6,0b101
- #else
- la \LC1,0(\LDC_BYTE_ORIGINAL, \LDC_BYTE_ORIGINAL)
- #endif
- CalcMultAlpha_4x1 %v3,%v4,%v1,%v6,%v16,%v17,%v18,%v19,\ALPHA_VECREG,\ALPHA_VECI
- #if defined(TRMMKERNEL)
- la \LC2,0(\LC1,\LDC_BYTE_ORIGINAL )
- #endif
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3,%v4,0b0101
- vst %v16,0(\CIJ_REG)
- vpdi %v18, %v1 ,%v6,0
- vst %v17,0(\CIJ_REG, \LDC_BYTE_ORIGINAL)
- vpdi %v19, %v1 ,%v6,0b0101
- vst %v18,0(\CIJ_REG, \LC1)
- vst %v19,0(\CIJ_REG,\LC2)
- la \CIJ_REG,16(\CIJ_REG)
- .endm
- .macro ZSTORE_2x4 ALPHA_VECREG,ALPHA_VECI,CIJ_REG , LDC_BYTE_ORIGINAL,LC1,LC2
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG)
- vl %v26 , 16(\CIJ_REG)
- la \LC1,0(\LDC_BYTE_ORIGINAL, \LDC_BYTE_ORIGINAL)
- vl %v4 , 0(\CIJ_REG, \LDC_BYTE_ORIGINAL)
- vl %v25 , 16(\CIJ_REG, \LDC_BYTE_ORIGINAL)
- vpdi %v3,%v1,%v4,0
- vpdi %v24,%v26,%v25,0
- la \LC2,0(\LC1,\LDC_BYTE_ORIGINAL )
- vl %v7 , 0(\CIJ_REG, \LC1)
- vl %v28 , 16(\CIJ_REG, \LC1)
- vpdi %v4,%v1,%v4,0b101
- vpdi %v25,%v26,%v25,0b101
- vl %v6 , 0 (\CIJ_REG,\LC2)
- vl %v27 , 16 (\CIJ_REG,\LC2)
- vpdi %v1,%v7,%v6,0
- vpdi %v6,%v7,%v6,0b101
- vpdi %v26,%v28,%v27,0
- vpdi %v27,%v28,%v27,0b101
- #else
- la \LC1,0(\LDC_BYTE_ORIGINAL, \LDC_BYTE_ORIGINAL)
- #endif
- CalcMultAlpha_4x1 %v3,%v4,%v1,%v6,%v16,%v17,%v18,%v19,\ALPHA_VECREG,\ALPHA_VECI
- CalcMultAlpha_4x1 %v24,%v25,%v26,%v27,%v20,%v21,%v22,%v23,\ALPHA_VECREG,\ALPHA_VECI
- #if defined(TRMMKERNEL)
- la \LC2,0(\LC1,\LDC_BYTE_ORIGINAL )
- #endif
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3,%v4,0b0101
- vpdi %v20, %v24 ,%v25,0
- vpdi %v21, %v24,%v25,0b0101
- vpdi %v22, %v26 ,%v27,0
- vpdi %v23, %v26 ,%v27,0b0101
- vst %v16,0(\CIJ_REG)
- vst %v20,16(\CIJ_REG)
- vpdi %v18, %v1 ,%v6,0
- vst %v17,0(\CIJ_REG, \LDC_BYTE_ORIGINAL)
- vst %v21,16(\CIJ_REG, \LDC_BYTE_ORIGINAL)
- vpdi %v19, %v1 ,%v6,0b0101
- vst %v18,0(\CIJ_REG, \LC1)
- vst %v22,16(\CIJ_REG, \LC1)
- vst %v19,0(\CIJ_REG,\LC2)
- vst %v23,16(\CIJ_REG,\LC2)
- la \CIJ_REG,32(\CIJ_REG)
-
- .endm
-
- .macro ZSTORE_2x2 ALPHA_VECREG,ALPHA_VECI,CIJ_REG , LDC_BYTE_ORIGINAL
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG)
- vl %v4 , 16(\CIJ_REG)
- vpdi %v3,%v1,%v4,0
- vpdi %v4,%v1,%v4,0b101
- vl %v5 , 0(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vl %v7 , 16(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v6,%v5,%v7,0
- vpdi %v7,%v5,%v7,0b101
- #endif
- CalcMultAlpha_2x1 %v3,%v4, %v16,%v17,\ALPHA_VECREG,\ALPHA_VECI
- CalcMultAlpha_2x1 %v6,%v7, %v20,%v21 ,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3,%v4,0b0101
- vst %v16,0(\CIJ_REG)
- vst %v17,16(\CIJ_REG)
- vpdi %v20, %v6 ,%v7,0
- vpdi %v21, %v6 ,%v7,0b0101
- vst %v20,0(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vst %v21,16(\CIJ_REG,\LDC_BYTE_ORIGINAL)
-
- la \CIJ_REG,32(\CIJ_REG)
- .endm
-
- .macro ZSTORE_2x1 ALPHA_VECREG,ALPHA_VECI,CIJ_REG , LDC_BYTE_ORIGINAL
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG)
- vl %v4 , 16(\CIJ_REG)
- vpdi %v3,%v1,%v4,0
- vpdi %v4,%v1,%v4,0b101
- #endif
- CalcMultAlpha_2x1 %v3,%v4, %v16,%v17,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3,%v4,0b0101
- vst %v16,0(\CIJ_REG)
- vst %v17,16(\CIJ_REG)
- la \CIJ_REG,32(\CIJ_REG)
- .endm
-
- .macro ZSTORE_1x2 ALPHA_VECREG,ALPHA_VECI,CIJ_REG , LDC_BYTE_ORIGINAL
- #if !defined(TRMMKERNEL)
- vl %v1 , 0(\CIJ_REG)
- vl %v4 , 0(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- vpdi %v3,%v1,%v4,0
- vpdi %v4,%v1,%v4,0b101
- #endif
- CalcMultAlpha_2x1 %v3,%v4, %v16,%v17,\ALPHA_VECREG,\ALPHA_VECI
- vpdi %v16, %v3 ,%v4,0
- vpdi %v17, %v3,%v4,0b0101
- vst %v16,0(\CIJ_REG)
- vst %v17,0(\CIJ_REG,\LDC_BYTE_ORIGINAL)
- la \CIJ_REG,16(\CIJ_REG)
- .endm
-
- .macro ZSTORE_1x1 ALPHA_RR,ALPHA_RI ,CIJ_REG
- #if defined (TRMMKERNEL)
- lzdr %f1
- lzdr %f4
- #else
- ld %f1 , 0(\CIJ_REG)
- ld %f4 , 8(\CIJ_REG )
- #endif
- CalcMultAlpha_1x1 %f1,%f4, %f6,%f7,\ALPHA_RR,\ALPHA_RI
- std %f1,0(\CIJ_REG)
- std %f4,8(\CIJ_REG)
- la \CIJ_REG,16(\CIJ_REG)
- .endm
-
- /****************************TRMM POINTER REFRESH MACROSES*************************/
-
- .macro RefreshPointers PTR_A,PTR_B,OFF_VAL,B_VAL,C_A,C_B
- #if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- /* ptrbb = bb;*/
- lgr \PTR_B,\B_VAL /*refresh BPOINT*/
-
- #else
- /* ptrba =ptrba+ off*C_A;
- ptrbb = bb + off*C_B;*/
- .if \C_B==4
- .if \C_A==4
- sllg \PTR_B, \OFF_VAL,6
- agr \PTR_A,\PTR_B /*ptrba+off*4**/
- la \PTR_B,0(\B_VAL,\PTR_B) /*refresh BPOINT*/
- .elseif \C_A==2
- sllg \PTR_B, \OFF_VAL,5
- la \PTR_A,0(\PTR_A,\PTR_B) /*ptrba+off*2**/
- agr \PTR_B, \PTR_B
- la \PTR_B,0(\B_VAL,\PTR_B) /*refresh BPOINT*/
-
- .elseif \C_A==1
- sllg \PTR_B, \OFF_VAL,4
- agr \PTR_A,\PTR_B /*ptrba+off*4**/
- sllg \PTR_B, \OFF_VAL,6
- la \PTR_B,0(\B_VAL,\PTR_B) /*refresh BPOINT*/
- .endif
-
- .elseif \C_B==2
- .if \C_A==4
- sllg \PTR_B, \OFF_VAL,5
- la \PTR_A,0(\PTR_A,\PTR_B) /*ptrba+off*2**/
- agr \PTR_A,\PTR_B /*ptrba+off*2**/
- la \PTR_B,0(\B_VAL,\PTR_B) /*refresh BPOINT*/
- .elseif \C_A==2
- sllg \PTR_B, \OFF_VAL,5
- agr \PTR_A,\PTR_B /*ptrba+off*2**/
- la \PTR_B,0(\B_VAL,\PTR_B) /*refresh BPOINT*/
- .elseif \C_A==1
- sllg \PTR_B, \OFF_VAL,4
- la \PTR_A,0(\PTR_A,\PTR_B) /*ptrba+off*1**/
- agr \PTR_B,\PTR_B /* off+off**/
- la \PTR_B,0(\B_VAL,\PTR_B) /*refresh BPOINT*/
- .endif
-
- .elseif \C_B==1
- .if \C_A==4
- sllg \PTR_B, \OFF_VAL,6
- agr \PTR_A,\PTR_B /*ptrba+off*4**/
- sllg \PTR_B, \OFF_VAL,4
- la \PTR_B,0(\B_VAL,\PTR_B) /*refresh BPOINT*/
- .elseif \C_A==2
- sllg \PTR_B, \OFF_VAL,4
- la \PTR_A,0(\PTR_A,\PTR_B) /*ptrba+off*1**/
- agr \PTR_A,\PTR_B /*ptrba+off*1**/
- la \PTR_B,0(\B_VAL,\PTR_B) /*refresh BPOINT*/
-
- .elseif \C_A==1
- sllg \PTR_B, \OFF_VAL,4
- agr \PTR_A,\PTR_B /*ptrba+off*1**/
- la \PTR_B,0(\B_VAL,\PTR_B) /*refresh BPOINT*/
- .endif
- .endif
-
- #endif
- .endm
-
- /**/
- .macro RefreshTempBk TEMP_VAL,BK_VAL,OFF_VAL,INCR_A,INCR_B
- #if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
- /* temp = bk-off;*/
- sgrk \TEMP_VAL,\BK_VAL,\OFF_VAL
-
- #elif defined(LEFT)
- /* temp = off+INCR_A; // number of values in A */
- la \TEMP_VAL,\INCR_A(\OFF_VAL)
- #else
- /* temp = off+INCR_B // number of values in B*/
- la \TEMP_VAL,\INCR_B(\OFF_VAL)
- #endif
-
- .endm
-
- .macro RefreshPointersAndOFF TEMP_VAL,BK_VAL,OFF_VAL,PTR_A,C_A,C_B
-
- #if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
- /*temp = bk - off;*/
- sgrk \TEMP_VAL,\BK_VAL,\OFF_VAL
- #ifdef LEFT
- /*temp -= 8; // number of values in A*/
- lay \TEMP_VAL,-\C_A(\TEMP_VAL)
- #else
- /*temp -= 4; // number of values in B*/
- lay \TEMP_VAL,-\C_B(\TEMP_VAL)
- #endif
- /*ptrba += temp*C_A;
- ptrbb += temp*C_B;*/
-
- .if \C_A==4
- sllg \TEMP_VAL, \TEMP_VAL,6 /*temp*4*/
- .elseif \C_A==2
- sllg \TEMP_VAL, \TEMP_VAL,5 /*temp*2*/
- .elseif \C_A==1
- sllg \TEMP_VAL, \TEMP_VAL,4 /*temp*1*/
- .endif
- la \PTR_A,0(\PTR_A,\TEMP_VAL) /*ptrba+temp*C_A*/
- #endif
-
- #ifdef LEFT
- /*off += \c_A; // number of values in A*/
- aghi \OFF_VAL,\C_A
- #endif
- .endm
-
|