|
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693 |
- /***************************************************************************
- Copyright (c) 2013-2016, The OpenBLAS Project
- All rights reserved.
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are
- met:
- 1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in
- the documentation and/or other materials provided with the
- distribution.
- 3. Neither the name of the OpenBLAS project nor the names of
- its contributors may be used to endorse or promote products
- derived from this software without specific prior written permission.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- ARE DISCLAIMED. IN NO EVENT SHALL THE OPENBLAS PROJECT OR CONTRIBUTORS BE
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *****************************************************************************/
-
- /**************************************************************************************
- * 2016/04/22 Werner Saar (wernsaar@googlemail.com)
- * BLASTEST : OK
- * CTEST : OK
- * TEST : OK
- * LAPACK-TEST : OK
- **************************************************************************************/
-
- #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
-
- #define XSFADD_R1 xsadddp
- #define XSFADD_R2 xssubdp
- #define XSFADD_I1 xsadddp
- #define XSFADD_I2 xsadddp
-
- #elif defined(CN) || defined(CT) || defined(RN) || defined(RT)
-
- #define XSFADD_R1 xsadddp
- #define XSFADD_R2 xsadddp
- #define XSFADD_I1 xssubdp
- #define XSFADD_I2 xsadddp
-
- #elif defined(NC) || defined(TC) || defined(NR) || defined(TR)
-
- #define XSFADD_R1 xsadddp
- #define XSFADD_R2 xsadddp
- #define XSFADD_I1 xsadddp
- #define XSFADD_I2 xssubdp
-
- #else // CC || CR || RC || RR
-
- #define XSFADD_R1 xsadddp
- #define XSFADD_R2 xssubdp
- #define XSFADD_I1 xssubdp
- #define XSFADD_I2 xssubdp
-
- #endif
-
- /**********************************************************************************************
- * Macros for N=2 and M=8
- **********************************************************************************************/
-
- #if defined(_AIX)
- define(`LOAD2x8_1', `
- #else
- .macro LOAD2x8_1
- #endif
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs4, o0, AO // load real,imag from A
- lxvd2x vs5, o16, AO // load real,imag from A
- lxvd2x vs6, o32, AO // load real,imag from A
- lxvd2x vs7, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x8_I1', `
- #else
- .macro KERNEL2x8_I1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
- lxvd2x vs10, o32, AO // load real,imag from A
- lxvd2x vs11, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs12, o0, AO // load real,imag from A
- lxvd2x vs13, o16, AO // load real,imag from A
- lxvd2x vs14, o32, AO // load real,imag from A
- lxvd2x vs15, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
- lxvd2x vs22, o32, BO // load real part from B
- lxvd2x vs23, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
- xvmuldp vs36, vs2, vs16 // real*real, imag*real
- xvmuldp vs37, vs2, vs17 // real*imag, imag*imag
- xvmuldp vs38, vs3, vs16 // real*real, imag*real
- xvmuldp vs39, vs3, vs17 // real*imag, imag*imag
- xvmuldp vs40, vs4, vs16 // real*real, imag*real
- xvmuldp vs41, vs4, vs17 // real*imag, imag*imag
- xvmuldp vs42, vs5, vs16 // real*real, imag*real
- xvmuldp vs43, vs5, vs17 // real*imag, imag*imag
- xvmuldp vs44, vs6, vs16 // real*real, imag*real
- xvmuldp vs45, vs6, vs17 // real*imag, imag*imag
- xvmuldp vs46, vs7, vs16 // real*real, imag*real
- xvmuldp vs47, vs7, vs17 // real*imag, imag*imag
-
- xvmuldp vs48, vs0, vs18 // real*real, imag*real
- xvmuldp vs49, vs0, vs19 // real*imag, imag*imag
- xvmuldp vs50, vs1, vs18 // real*real, imag*real
- xvmuldp vs51, vs1, vs19 // real*imag, imag*imag
- xvmuldp vs52, vs2, vs18 // real*real, imag*real
- xvmuldp vs53, vs2, vs19 // real*imag, imag*imag
- xvmuldp vs54, vs3, vs18 // real*real, imag*real
- xvmuldp vs55, vs3, vs19 // real*imag, imag*imag
- xvmuldp vs56, vs4, vs18 // real*real, imag*real
- xvmuldp vs57, vs4, vs19 // real*imag, imag*imag
- xvmuldp vs58, vs5, vs18 // real*real, imag*real
- xvmuldp vs59, vs5, vs19 // real*imag, imag*imag
- xvmuldp vs60, vs6, vs18 // real*real, imag*real
- xvmuldp vs61, vs6, vs19 // real*imag, imag*imag
- xvmuldp vs62, vs7, vs18 // real*real, imag*real
- xvmuldp vs63, vs7, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x8_1', `
- #else
- .macro KERNEL2x8_1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
- lxvd2x vs10, o32, AO // load real,imag from A
- lxvd2x vs11, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs12, o0, AO // load real,imag from A
- lxvd2x vs13, o16, AO // load real,imag from A
- lxvd2x vs14, o32, AO // load real,imag from A
- lxvd2x vs15, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
- lxvd2x vs22, o32, BO // load real part from B
- lxvd2x vs23, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
- xvmaddadp vs36, vs2, vs16 // real*real, imag*real
- xvmaddadp vs37, vs2, vs17 // real*imag, imag*imag
- xvmaddadp vs38, vs3, vs16 // real*real, imag*real
- xvmaddadp vs39, vs3, vs17 // real*imag, imag*imag
- xvmaddadp vs40, vs4, vs16 // real*real, imag*real
- xvmaddadp vs41, vs4, vs17 // real*imag, imag*imag
- xvmaddadp vs42, vs5, vs16 // real*real, imag*real
- xvmaddadp vs43, vs5, vs17 // real*imag, imag*imag
- xvmaddadp vs44, vs6, vs16 // real*real, imag*real
- xvmaddadp vs45, vs6, vs17 // real*imag, imag*imag
- xvmaddadp vs46, vs7, vs16 // real*real, imag*real
- xvmaddadp vs47, vs7, vs17 // real*imag, imag*imag
-
- xvmaddadp vs48, vs0, vs18 // real*real, imag*real
- xvmaddadp vs49, vs0, vs19 // real*imag, imag*imag
- xvmaddadp vs50, vs1, vs18 // real*real, imag*real
- xvmaddadp vs51, vs1, vs19 // real*imag, imag*imag
- xvmaddadp vs52, vs2, vs18 // real*real, imag*real
- xvmaddadp vs53, vs2, vs19 // real*imag, imag*imag
- xvmaddadp vs54, vs3, vs18 // real*real, imag*real
- xvmaddadp vs55, vs3, vs19 // real*imag, imag*imag
- xvmaddadp vs56, vs4, vs18 // real*real, imag*real
- xvmaddadp vs57, vs4, vs19 // real*imag, imag*imag
- xvmaddadp vs58, vs5, vs18 // real*real, imag*real
- xvmaddadp vs59, vs5, vs19 // real*imag, imag*imag
- xvmaddadp vs60, vs6, vs18 // real*real, imag*real
- xvmaddadp vs61, vs6, vs19 // real*imag, imag*imag
- xvmaddadp vs62, vs7, vs18 // real*real, imag*real
- xvmaddadp vs63, vs7, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x8_2', `
- #else
- .macro KERNEL2x8_2
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs4, o0, AO // load real,imag from A
- lxvd2x vs5, o16, AO // load real,imag from A
- lxvd2x vs6, o32, AO // load real,imag from A
- lxvd2x vs7, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
- xvmaddadp vs36, vs10, vs20 // real*real, imag*real
- xvmaddadp vs37, vs10, vs21 // real*imag, imag*imag
- xvmaddadp vs38, vs11, vs20 // real*real, imag*real
- xvmaddadp vs39, vs11, vs21 // real*imag, imag*imag
- xvmaddadp vs40, vs12, vs20 // real*real, imag*real
- xvmaddadp vs41, vs12, vs21 // real*imag, imag*imag
- xvmaddadp vs42, vs13, vs20 // real*real, imag*real
- xvmaddadp vs43, vs13, vs21 // real*imag, imag*imag
- xvmaddadp vs44, vs14, vs20 // real*real, imag*real
- xvmaddadp vs45, vs14, vs21 // real*imag, imag*imag
- xvmaddadp vs46, vs15, vs20 // real*real, imag*real
- xvmaddadp vs47, vs15, vs21 // real*imag, imag*imag
-
- xvmaddadp vs48, vs8, vs22 // real*real, imag*real
- xvmaddadp vs49, vs8, vs23 // real*imag, imag*imag
- xvmaddadp vs50, vs9, vs22 // real*real, imag*real
- xvmaddadp vs51, vs9, vs23 // real*imag, imag*imag
- xvmaddadp vs52, vs10, vs22 // real*real, imag*real
- xvmaddadp vs53, vs10, vs23 // real*imag, imag*imag
- xvmaddadp vs54, vs11, vs22 // real*real, imag*real
- xvmaddadp vs55, vs11, vs23 // real*imag, imag*imag
- xvmaddadp vs56, vs12, vs22 // real*real, imag*real
- xvmaddadp vs57, vs12, vs23 // real*imag, imag*imag
- xvmaddadp vs58, vs13, vs22 // real*real, imag*real
- xvmaddadp vs59, vs13, vs23 // real*imag, imag*imag
- xvmaddadp vs60, vs14, vs22 // real*real, imag*real
- xvmaddadp vs61, vs14, vs23 // real*imag, imag*imag
- xvmaddadp vs62, vs15, vs22 // real*real, imag*real
- xvmaddadp vs63, vs15, vs23 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x8_E2', `
- #else
- .macro KERNEL2x8_E2
- #endif
-
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
- xvmaddadp vs36, vs10, vs20 // real*real, imag*real
- xvmaddadp vs37, vs10, vs21 // real*imag, imag*imag
- xvmaddadp vs38, vs11, vs20 // real*real, imag*real
- xvmaddadp vs39, vs11, vs21 // real*imag, imag*imag
- xvmaddadp vs40, vs12, vs20 // real*real, imag*real
- xvmaddadp vs41, vs12, vs21 // real*imag, imag*imag
- xvmaddadp vs42, vs13, vs20 // real*real, imag*real
- xvmaddadp vs43, vs13, vs21 // real*imag, imag*imag
- xvmaddadp vs44, vs14, vs20 // real*real, imag*real
- xvmaddadp vs45, vs14, vs21 // real*imag, imag*imag
- xvmaddadp vs46, vs15, vs20 // real*real, imag*real
- xvmaddadp vs47, vs15, vs21 // real*imag, imag*imag
-
- xvmaddadp vs48, vs8, vs22 // real*real, imag*real
- xvmaddadp vs49, vs8, vs23 // real*imag, imag*imag
- xvmaddadp vs50, vs9, vs22 // real*real, imag*real
- xvmaddadp vs51, vs9, vs23 // real*imag, imag*imag
- xvmaddadp vs52, vs10, vs22 // real*real, imag*real
- xvmaddadp vs53, vs10, vs23 // real*imag, imag*imag
- xvmaddadp vs54, vs11, vs22 // real*real, imag*real
- xvmaddadp vs55, vs11, vs23 // real*imag, imag*imag
- xvmaddadp vs56, vs12, vs22 // real*real, imag*real
- xvmaddadp vs57, vs12, vs23 // real*imag, imag*imag
- xvmaddadp vs58, vs13, vs22 // real*real, imag*real
- xvmaddadp vs59, vs13, vs23 // real*imag, imag*imag
- xvmaddadp vs60, vs14, vs22 // real*real, imag*real
- xvmaddadp vs61, vs14, vs23 // real*imag, imag*imag
- xvmaddadp vs62, vs15, vs22 // real*real, imag*real
- xvmaddadp vs63, vs15, vs23 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x8_SUBI1', `
- #else
- .macro KERNEL2x8_SUBI1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs4, o0, AO // load real,imag from A
- lxvd2x vs5, o16, AO // load real,imag from A
- lxvd2x vs6, o32, AO // load real,imag from A
- lxvd2x vs7, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
- xvmuldp vs36, vs2, vs16 // real*real, imag*real
- xvmuldp vs37, vs2, vs17 // real*imag, imag*imag
- xvmuldp vs38, vs3, vs16 // real*real, imag*real
- xvmuldp vs39, vs3, vs17 // real*imag, imag*imag
- xvmuldp vs40, vs4, vs16 // real*real, imag*real
- xvmuldp vs41, vs4, vs17 // real*imag, imag*imag
- xvmuldp vs42, vs5, vs16 // real*real, imag*real
- xvmuldp vs43, vs5, vs17 // real*imag, imag*imag
- xvmuldp vs44, vs6, vs16 // real*real, imag*real
- xvmuldp vs45, vs6, vs17 // real*imag, imag*imag
- xvmuldp vs46, vs7, vs16 // real*real, imag*real
- xvmuldp vs47, vs7, vs17 // real*imag, imag*imag
-
- xvmuldp vs48, vs0, vs18 // real*real, imag*real
- xvmuldp vs49, vs0, vs19 // real*imag, imag*imag
- xvmuldp vs50, vs1, vs18 // real*real, imag*real
- xvmuldp vs51, vs1, vs19 // real*imag, imag*imag
- xvmuldp vs52, vs2, vs18 // real*real, imag*real
- xvmuldp vs53, vs2, vs19 // real*imag, imag*imag
- xvmuldp vs54, vs3, vs18 // real*real, imag*real
- xvmuldp vs55, vs3, vs19 // real*imag, imag*imag
- xvmuldp vs56, vs4, vs18 // real*real, imag*real
- xvmuldp vs57, vs4, vs19 // real*imag, imag*imag
- xvmuldp vs58, vs5, vs18 // real*real, imag*real
- xvmuldp vs59, vs5, vs19 // real*imag, imag*imag
- xvmuldp vs60, vs6, vs18 // real*real, imag*real
- xvmuldp vs61, vs6, vs19 // real*imag, imag*imag
- xvmuldp vs62, vs7, vs18 // real*real, imag*real
- xvmuldp vs63, vs7, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x8_SUB1', `
- #else
- .macro KERNEL2x8_SUB1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs4, o0, AO // load real,imag from A
- lxvd2x vs5, o16, AO // load real,imag from A
- lxvd2x vs6, o32, AO // load real,imag from A
- lxvd2x vs7, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
- xvmaddadp vs36, vs2, vs16 // real*real, imag*real
- xvmaddadp vs37, vs2, vs17 // real*imag, imag*imag
- xvmaddadp vs38, vs3, vs16 // real*real, imag*real
- xvmaddadp vs39, vs3, vs17 // real*imag, imag*imag
- xvmaddadp vs40, vs4, vs16 // real*real, imag*real
- xvmaddadp vs41, vs4, vs17 // real*imag, imag*imag
- xvmaddadp vs42, vs5, vs16 // real*real, imag*real
- xvmaddadp vs43, vs5, vs17 // real*imag, imag*imag
- xvmaddadp vs44, vs6, vs16 // real*real, imag*real
- xvmaddadp vs45, vs6, vs17 // real*imag, imag*imag
- xvmaddadp vs46, vs7, vs16 // real*real, imag*real
- xvmaddadp vs47, vs7, vs17 // real*imag, imag*imag
-
- xvmaddadp vs48, vs0, vs18 // real*real, imag*real
- xvmaddadp vs49, vs0, vs19 // real*imag, imag*imag
- xvmaddadp vs50, vs1, vs18 // real*real, imag*real
- xvmaddadp vs51, vs1, vs19 // real*imag, imag*imag
- xvmaddadp vs52, vs2, vs18 // real*real, imag*real
- xvmaddadp vs53, vs2, vs19 // real*imag, imag*imag
- xvmaddadp vs54, vs3, vs18 // real*real, imag*real
- xvmaddadp vs55, vs3, vs19 // real*imag, imag*imag
- xvmaddadp vs56, vs4, vs18 // real*real, imag*real
- xvmaddadp vs57, vs4, vs19 // real*imag, imag*imag
- xvmaddadp vs58, vs5, vs18 // real*real, imag*real
- xvmaddadp vs59, vs5, vs19 // real*imag, imag*imag
- xvmaddadp vs60, vs6, vs18 // real*real, imag*real
- xvmaddadp vs61, vs6, vs19 // real*imag, imag*imag
- xvmaddadp vs62, vs7, vs18 // real*real, imag*real
- xvmaddadp vs63, vs7, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`SAVE2x8', `
- #else
- .macro SAVE2x8
- #endif
-
-
- mr T1, CO
- addi T2, T1, 64
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
- lxvd2x vs17, o16, T1
- lxvd2x vs18, o32, T1
- lxvd2x vs19, o48, T1
- lxvd2x vs20, o0, T2
- lxvd2x vs21, o16, T2
- lxvd2x vs22, o32, T2
- lxvd2x vs23, o48, T2
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs33,vs33) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs32 // realA*realB
- XSFADD_R2 vs0, vs0, vs33 // imagA*imagB
-
- XXSWAPD(vs32,vs32) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs33,vs33) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs32 // realA*imagB
- XSFADD_I2 vs1, vs1, vs33 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs35,vs35) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs34 // realA*realB
- XSFADD_R2 vs0, vs0, vs35 // imagA*imagB
-
- XXSWAPD(vs34,vs34) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs35,vs35) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs34 // realA*imagB
- XSFADD_I2 vs1, vs1, vs35 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs9, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs37,vs37) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs36 // realA*realB
- XSFADD_R2 vs0, vs0, vs37 // imagA*imagB
-
- XXSWAPD(vs36,vs36) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs37,vs37) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs36 // realA*imagB
- XSFADD_I2 vs1, vs1, vs37 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs10, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs39,vs39) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs38 // realA*realB
- XSFADD_R2 vs0, vs0, vs39 // imagA*imagB
-
- XXSWAPD(vs38,vs38) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs39,vs39) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs38 // realA*imagB
- XSFADD_I2 vs1, vs1, vs39 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs11, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs41,vs41) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs40 // realA*realB
- XSFADD_R2 vs0, vs0, vs41 // imagA*imagB
-
- XXSWAPD(vs40,vs40) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs41,vs41) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs40 // realA*imagB
- XSFADD_I2 vs1, vs1, vs41 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs12, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs43,vs43) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs42 // realA*realB
- XSFADD_R2 vs0, vs0, vs43 // imagA*imagB
-
- XXSWAPD(vs42,vs42) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs43,vs43) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs42 // realA*imagB
- XSFADD_I2 vs1, vs1, vs43 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs13, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs45,vs45) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs44 // realA*realB
- XSFADD_R2 vs0, vs0, vs45 // imagA*imagB
-
- XXSWAPD(vs44,vs44) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs45,vs45) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs44 // realA*imagB
- XSFADD_I2 vs1, vs1, vs45 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs14, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs47,vs47) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs46 // realA*realB
- XSFADD_R2 vs0, vs0, vs47 // imagA*imagB
-
- XXSWAPD(vs46,vs46) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs47,vs47) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs46 // realA*imagB
- XSFADD_I2 vs1, vs1, vs47 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs15, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
- xvadddp vs9, vs9, vs17
- xvadddp vs10, vs10, vs18
- xvadddp vs11, vs11, vs19
- xvadddp vs12, vs12, vs20
- xvadddp vs13, vs13, vs21
- xvadddp vs14, vs14, vs22
- xvadddp vs15, vs15, vs23
-
- #endif
-
- stxvd2x vs8, o0, T1
- stxvd2x vs9, o16, T1
- stxvd2x vs10, o32, T1
- stxvd2x vs11, o48, T1
- stxvd2x vs12, o0, T2
- stxvd2x vs13, o16, T2
- stxvd2x vs14, o32, T2
- stxvd2x vs15, o48, T2
-
- add T1, T1, LDC
- add T2, T2, LDC
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
- lxvd2x vs17, o16, T1
- lxvd2x vs18, o32, T1
- lxvd2x vs19, o48, T1
- lxvd2x vs20, o0, T2
- lxvd2x vs21, o16, T2
- lxvd2x vs22, o32, T2
- lxvd2x vs23, o48, T2
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs49,vs49) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs48 // realA*realB
- XSFADD_R2 vs0, vs0, vs49 // imagA*imagB
-
- XXSWAPD(vs48,vs48) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs49,vs49) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs48 // realA*imagB
- XSFADD_I2 vs1, vs1, vs49 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs51,vs51) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs50 // realA*realB
- XSFADD_R2 vs0, vs0, vs51 // imagA*imagB
-
- XXSWAPD(vs50,vs50) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs51,vs51) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs50 // realA*imagB
- XSFADD_I2 vs1, vs1, vs51 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs9, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs53,vs53) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs52 // realA*realB
- XSFADD_R2 vs0, vs0, vs53 // imagA*imagB
-
- XXSWAPD(vs52,vs52) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs53,vs53) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs52 // realA*imagB
- XSFADD_I2 vs1, vs1, vs53 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs10, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs55,vs55) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs54 // realA*realB
- XSFADD_R2 vs0, vs0, vs55 // imagA*imagB
-
- XXSWAPD(vs54,vs54) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs55,vs55) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs54 // realA*imagB
- XSFADD_I2 vs1, vs1, vs55 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs11, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs57,vs57) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs56 // realA*realB
- XSFADD_R2 vs0, vs0, vs57 // imagA*imagB
-
- XXSWAPD(vs56,vs56) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs57,vs57) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs56 // realA*imagB
- XSFADD_I2 vs1, vs1, vs57 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs12, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs59,vs59) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs58 // realA*realB
- XSFADD_R2 vs0, vs0, vs59 // imagA*imagB
-
- XXSWAPD(vs58,vs58) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs59,vs59) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs58 // realA*imagB
- XSFADD_I2 vs1, vs1, vs59 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs13, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs61,vs61) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs60 // realA*realB
- XSFADD_R2 vs0, vs0, vs61 // imagA*imagB
-
- XXSWAPD(vs60,vs60) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs61,vs61) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs60 // realA*imagB
- XSFADD_I2 vs1, vs1, vs61 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs14, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs63,vs63) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs62 // realA*realB
- XSFADD_R2 vs0, vs0, vs63 // imagA*imagB
-
- XXSWAPD(vs62,vs62) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs63,vs63) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs62 // realA*imagB
- XSFADD_I2 vs1, vs1, vs63 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs15, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
- xvadddp vs9, vs9, vs17
- xvadddp vs10, vs10, vs18
- xvadddp vs11, vs11, vs19
- xvadddp vs12, vs12, vs20
- xvadddp vs13, vs13, vs21
- xvadddp vs14, vs14, vs22
- xvadddp vs15, vs15, vs23
-
- #endif
-
- stxvd2x vs8, o0, T1
- stxvd2x vs9, o16, T1
- stxvd2x vs10, o32, T1
- stxvd2x vs11, o48, T1
- stxvd2x vs12, o0, T2
- stxvd2x vs13, o16, T2
- stxvd2x vs14, o32, T2
- stxvd2x vs15, o48, T2
-
- add T1, T1, LDC
- add T2, T2, LDC
- addi CO, CO, 128
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
- /**********************************************************************************************
- * Macros for N=2 and M=4
- **********************************************************************************************/
-
- #if defined(_AIX)
- define(`LOAD2x4_1', `
- #else
- .macro LOAD2x4_1
- #endif
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x4_I1', `
- #else
- .macro KERNEL2x4_I1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
- lxvd2x vs10, o32, AO // load real,imag from A
- lxvd2x vs11, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
- lxvd2x vs22, o32, BO // load real part from B
- lxvd2x vs23, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
- xvmuldp vs36, vs2, vs16 // real*real, imag*real
- xvmuldp vs37, vs2, vs17 // real*imag, imag*imag
- xvmuldp vs38, vs3, vs16 // real*real, imag*real
- xvmuldp vs39, vs3, vs17 // real*imag, imag*imag
-
- xvmuldp vs40, vs0, vs18 // real*real, imag*real
- xvmuldp vs41, vs0, vs19 // real*imag, imag*imag
- xvmuldp vs42, vs1, vs18 // real*real, imag*real
- xvmuldp vs43, vs1, vs19 // real*imag, imag*imag
- xvmuldp vs44, vs2, vs18 // real*real, imag*real
- xvmuldp vs45, vs2, vs19 // real*imag, imag*imag
- xvmuldp vs46, vs3, vs18 // real*real, imag*real
- xvmuldp vs47, vs3, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x4_1', `
- #else
- .macro KERNEL2x4_1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
- lxvd2x vs10, o32, AO // load real,imag from A
- lxvd2x vs11, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
- lxvd2x vs22, o32, BO // load real part from B
- lxvd2x vs23, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
- xvmaddadp vs36, vs2, vs16 // real*real, imag*real
- xvmaddadp vs37, vs2, vs17 // real*imag, imag*imag
- xvmaddadp vs38, vs3, vs16 // real*real, imag*real
- xvmaddadp vs39, vs3, vs17 // real*imag, imag*imag
-
- xvmaddadp vs40, vs0, vs18 // real*real, imag*real
- xvmaddadp vs41, vs0, vs19 // real*imag, imag*imag
- xvmaddadp vs42, vs1, vs18 // real*real, imag*real
- xvmaddadp vs43, vs1, vs19 // real*imag, imag*imag
- xvmaddadp vs44, vs2, vs18 // real*real, imag*real
- xvmaddadp vs45, vs2, vs19 // real*imag, imag*imag
- xvmaddadp vs46, vs3, vs18 // real*real, imag*real
- xvmaddadp vs47, vs3, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x4_2', `
- #else
- .macro KERNEL2x4_2
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
- xvmaddadp vs36, vs10, vs20 // real*real, imag*real
- xvmaddadp vs37, vs10, vs21 // real*imag, imag*imag
- xvmaddadp vs38, vs11, vs20 // real*real, imag*real
- xvmaddadp vs39, vs11, vs21 // real*imag, imag*imag
-
- xvmaddadp vs40, vs8, vs22 // real*real, imag*real
- xvmaddadp vs41, vs8, vs23 // real*imag, imag*imag
- xvmaddadp vs42, vs9, vs22 // real*real, imag*real
- xvmaddadp vs43, vs9, vs23 // real*imag, imag*imag
- xvmaddadp vs44, vs10, vs22 // real*real, imag*real
- xvmaddadp vs45, vs10, vs23 // real*imag, imag*imag
- xvmaddadp vs46, vs11, vs22 // real*real, imag*real
- xvmaddadp vs47, vs11, vs23 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x4_E2', `
- #else
- .macro KERNEL2x4_E2
- #endif
-
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
- xvmaddadp vs36, vs10, vs20 // real*real, imag*real
- xvmaddadp vs37, vs10, vs21 // real*imag, imag*imag
- xvmaddadp vs38, vs11, vs20 // real*real, imag*real
- xvmaddadp vs39, vs11, vs21 // real*imag, imag*imag
-
- xvmaddadp vs40, vs8, vs22 // real*real, imag*real
- xvmaddadp vs41, vs8, vs23 // real*imag, imag*imag
- xvmaddadp vs42, vs9, vs22 // real*real, imag*real
- xvmaddadp vs43, vs9, vs23 // real*imag, imag*imag
- xvmaddadp vs44, vs10, vs22 // real*real, imag*real
- xvmaddadp vs45, vs10, vs23 // real*imag, imag*imag
- xvmaddadp vs46, vs11, vs22 // real*real, imag*real
- xvmaddadp vs47, vs11, vs23 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x4_SUBI1', `
- #else
- .macro KERNEL2x4_SUBI1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
- xvmuldp vs36, vs2, vs16 // real*real, imag*real
- xvmuldp vs37, vs2, vs17 // real*imag, imag*imag
- xvmuldp vs38, vs3, vs16 // real*real, imag*real
- xvmuldp vs39, vs3, vs17 // real*imag, imag*imag
-
- xvmuldp vs40, vs0, vs18 // real*real, imag*real
- xvmuldp vs41, vs0, vs19 // real*imag, imag*imag
- xvmuldp vs42, vs1, vs18 // real*real, imag*real
- xvmuldp vs43, vs1, vs19 // real*imag, imag*imag
- xvmuldp vs44, vs2, vs18 // real*real, imag*real
- xvmuldp vs45, vs2, vs19 // real*imag, imag*imag
- xvmuldp vs46, vs3, vs18 // real*real, imag*real
- xvmuldp vs47, vs3, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x4_SUB1', `
- #else
- .macro KERNEL2x4_SUB1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
- xvmaddadp vs36, vs2, vs16 // real*real, imag*real
- xvmaddadp vs37, vs2, vs17 // real*imag, imag*imag
- xvmaddadp vs38, vs3, vs16 // real*real, imag*real
- xvmaddadp vs39, vs3, vs17 // real*imag, imag*imag
-
- xvmaddadp vs40, vs0, vs18 // real*real, imag*real
- xvmaddadp vs41, vs0, vs19 // real*imag, imag*imag
- xvmaddadp vs42, vs1, vs18 // real*real, imag*real
- xvmaddadp vs43, vs1, vs19 // real*imag, imag*imag
- xvmaddadp vs44, vs2, vs18 // real*real, imag*real
- xvmaddadp vs45, vs2, vs19 // real*imag, imag*imag
- xvmaddadp vs46, vs3, vs18 // real*real, imag*real
- xvmaddadp vs47, vs3, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`SAVE2x4', `
- #else
- .macro SAVE2x4
- #endif
-
-
- mr T1, CO
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
- lxvd2x vs17, o16, T1
- lxvd2x vs18, o32, T1
- lxvd2x vs19, o48, T1
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs33,vs33) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs32 // realA*realB
- XSFADD_R2 vs0, vs0, vs33 // imagA*imagB
-
- XXSWAPD(vs32,vs32) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs33,vs33) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs32 // realA*imagB
- XSFADD_I2 vs1, vs1, vs33 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs35,vs35) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs34 // realA*realB
- XSFADD_R2 vs0, vs0, vs35 // imagA*imagB
-
- XXSWAPD(vs34,vs34) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs35,vs35) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs34 // realA*imagB
- XSFADD_I2 vs1, vs1, vs35 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs9, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs37,vs37) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs36 // realA*realB
- XSFADD_R2 vs0, vs0, vs37 // imagA*imagB
-
- XXSWAPD(vs36,vs36) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs37,vs37) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs36 // realA*imagB
- XSFADD_I2 vs1, vs1, vs37 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs10, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs39,vs39) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs38 // realA*realB
- XSFADD_R2 vs0, vs0, vs39 // imagA*imagB
-
- XXSWAPD(vs38,vs38) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs39,vs39) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs38 // realA*imagB
- XSFADD_I2 vs1, vs1, vs39 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs11, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
- xvadddp vs9, vs9, vs17
- xvadddp vs10, vs10, vs18
- xvadddp vs11, vs11, vs19
-
- #endif
-
- stxvd2x vs8, o0, T1
- stxvd2x vs9, o16, T1
- stxvd2x vs10, o32, T1
- stxvd2x vs11, o48, T1
-
- add T1, T1, LDC
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
- lxvd2x vs17, o16, T1
- lxvd2x vs18, o32, T1
- lxvd2x vs19, o48, T1
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs41,vs41) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs40 // realA*realB
- XSFADD_R2 vs0, vs0, vs41 // imagA*imagB
-
- XXSWAPD(vs40,vs40) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs41,vs41) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs40 // realA*imagB
- XSFADD_I2 vs1, vs1, vs41 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs43,vs43) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs42 // realA*realB
- XSFADD_R2 vs0, vs0, vs43 // imagA*imagB
-
- XXSWAPD(vs42,vs42) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs43,vs43) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs42 // realA*imagB
- XSFADD_I2 vs1, vs1, vs43 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs9, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs45,vs45) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs44 // realA*realB
- XSFADD_R2 vs0, vs0, vs45 // imagA*imagB
-
- XXSWAPD(vs44,vs44) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs45,vs45) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs44 // realA*imagB
- XSFADD_I2 vs1, vs1, vs45 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs10, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs47,vs47) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs46 // realA*realB
- XSFADD_R2 vs0, vs0, vs47 // imagA*imagB
-
- XXSWAPD(vs46,vs46) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs47,vs47) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs46 // realA*imagB
- XSFADD_I2 vs1, vs1, vs47 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs11, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
- xvadddp vs9, vs9, vs17
- xvadddp vs10, vs10, vs18
- xvadddp vs11, vs11, vs19
-
- #endif
-
- stxvd2x vs8, o0, T1
- stxvd2x vs9, o16, T1
- stxvd2x vs10, o32, T1
- stxvd2x vs11, o48, T1
-
- add T1, T1, LDC
- addi CO, CO, 64
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
- /**********************************************************************************************
- * Macros for N=2 and M=2
- **********************************************************************************************/
-
- #if defined(_AIX)
- define(`LOAD2x2_1', `
- #else
- .macro LOAD2x2_1
- #endif
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x2_I1', `
- #else
- .macro KERNEL2x2_I1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
- lxvd2x vs22, o32, BO // load real part from B
- lxvd2x vs23, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
-
- xvmuldp vs36, vs0, vs18 // real*real, imag*real
- xvmuldp vs37, vs0, vs19 // real*imag, imag*imag
- xvmuldp vs38, vs1, vs18 // real*real, imag*real
- xvmuldp vs39, vs1, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x2_1', `
- #else
- .macro KERNEL2x2_1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
- lxvd2x vs22, o32, BO // load real part from B
- lxvd2x vs23, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
-
- xvmaddadp vs36, vs0, vs18 // real*real, imag*real
- xvmaddadp vs37, vs0, vs19 // real*imag, imag*imag
- xvmaddadp vs38, vs1, vs18 // real*real, imag*real
- xvmaddadp vs39, vs1, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x2_2', `
- #else
- .macro KERNEL2x2_2
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
-
- xvmaddadp vs36, vs8, vs22 // real*real, imag*real
- xvmaddadp vs37, vs8, vs23 // real*imag, imag*imag
- xvmaddadp vs38, vs9, vs22 // real*real, imag*real
- xvmaddadp vs39, vs9, vs23 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x2_E2', `
- #else
- .macro KERNEL2x2_E2
- #endif
-
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
-
- xvmaddadp vs36, vs8, vs22 // real*real, imag*real
- xvmaddadp vs37, vs8, vs23 // real*imag, imag*imag
- xvmaddadp vs38, vs9, vs22 // real*real, imag*real
- xvmaddadp vs39, vs9, vs23 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x2_SUBI1', `
- #else
- .macro KERNEL2x2_SUBI1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
-
- xvmuldp vs36, vs0, vs18 // real*real, imag*real
- xvmuldp vs37, vs0, vs19 // real*imag, imag*imag
- xvmuldp vs38, vs1, vs18 // real*real, imag*real
- xvmuldp vs39, vs1, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x2_SUB1', `
- #else
- .macro KERNEL2x2_SUB1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
-
- xvmaddadp vs36, vs0, vs18 // real*real, imag*real
- xvmaddadp vs37, vs0, vs19 // real*imag, imag*imag
- xvmaddadp vs38, vs1, vs18 // real*real, imag*real
- xvmaddadp vs39, vs1, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`SAVE2x2', `
- #else
- .macro SAVE2x2
- #endif
-
-
- mr T1, CO
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
- lxvd2x vs17, o16, T1
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs33,vs33) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs32 // realA*realB
- XSFADD_R2 vs0, vs0, vs33 // imagA*imagB
-
- XXSWAPD(vs32,vs32) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs33,vs33) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs32 // realA*imagB
- XSFADD_I2 vs1, vs1, vs33 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs35,vs35) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs34 // realA*realB
- XSFADD_R2 vs0, vs0, vs35 // imagA*imagB
-
- XXSWAPD(vs34,vs34) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs35,vs35) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs34 // realA*imagB
- XSFADD_I2 vs1, vs1, vs35 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs9, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
- xvadddp vs9, vs9, vs17
-
- #endif
-
- stxvd2x vs8, o0, T1
- stxvd2x vs9, o16, T1
-
- add T1, T1, LDC
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
- lxvd2x vs17, o16, T1
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs37,vs37) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs36 // realA*realB
- XSFADD_R2 vs0, vs0, vs37 // imagA*imagB
-
- XXSWAPD(vs36,vs36) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs37,vs37) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs36 // realA*imagB
- XSFADD_I2 vs1, vs1, vs37 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs39,vs39) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs38 // realA*realB
- XSFADD_R2 vs0, vs0, vs39 // imagA*imagB
-
- XXSWAPD(vs38,vs38) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs39,vs39) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs38 // realA*imagB
- XSFADD_I2 vs1, vs1, vs39 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs9, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
- xvadddp vs9, vs9, vs17
-
- #endif
-
- stxvd2x vs8, o0, T1
- stxvd2x vs9, o16, T1
-
- add T1, T1, LDC
- addi CO, CO, 32
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
- /**********************************************************************************************
- * Macros for N=2 and M=1
- **********************************************************************************************/
-
- #if defined(_AIX)
- define(`LOAD2x1_1', `
- #else
- .macro LOAD2x1_1
- #endif
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- lxvd2x vs0, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x1_I1', `
- #else
- .macro KERNEL2x1_I1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
- lxvd2x vs22, o32, BO // load real part from B
- lxvd2x vs23, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
-
- xvmuldp vs34, vs0, vs18 // real*real, imag*real
- xvmuldp vs35, vs0, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x1_1', `
- #else
- .macro KERNEL2x1_1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
- lxvd2x vs22, o32, BO // load real part from B
- lxvd2x vs23, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
-
- xvmaddadp vs34, vs0, vs18 // real*real, imag*real
- xvmaddadp vs35, vs0, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x1_2', `
- #else
- .macro KERNEL2x1_2
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
-
- xvmaddadp vs34, vs8, vs22 // real*real, imag*real
- xvmaddadp vs35, vs8, vs23 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x1_E2', `
- #else
- .macro KERNEL2x1_E2
- #endif
-
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
-
- xvmaddadp vs34, vs8, vs22 // real*real, imag*real
- xvmaddadp vs35, vs8, vs23 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x1_SUBI1', `
- #else
- .macro KERNEL2x1_SUBI1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
-
- xvmuldp vs34, vs0, vs18 // real*real, imag*real
- xvmuldp vs35, vs0, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL2x1_SUB1', `
- #else
- .macro KERNEL2x1_SUB1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
- lxvd2x vs18, o32, BO // load real part from B
- lxvd2x vs19, o48, BO // load imag part from B
-
- addi BO, BO, 64
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
-
- xvmaddadp vs34, vs0, vs18 // real*real, imag*real
- xvmaddadp vs35, vs0, vs19 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`SAVE2x1', `
- #else
- .macro SAVE2x1
- #endif
-
-
- mr T1, CO
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs33,vs33) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs32 // realA*realB
- XSFADD_R2 vs0, vs0, vs33 // imagA*imagB
-
- XXSWAPD(vs32,vs32) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs33,vs33) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs32 // realA*imagB
- XSFADD_I2 vs1, vs1, vs33 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
-
- #endif
-
- stxvd2x vs8, o0, T1
-
- add T1, T1, LDC
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs35,vs35) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs34 // realA*realB
- XSFADD_R2 vs0, vs0, vs35 // imagA*imagB
-
- XXSWAPD(vs34,vs34) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs35,vs35) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs34 // realA*imagB
- XSFADD_I2 vs1, vs1, vs35 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
-
- #endif
-
- stxvd2x vs8, o0, T1
-
- add T1, T1, LDC
- addi CO, CO, 16
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
- /**********************************************************************************************
- * Macros for N=1 and M=8
- **********************************************************************************************/
-
- #if defined(_AIX)
- define(`LOAD1x8_1', `
- #else
- .macro LOAD1x8_1
- #endif
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs4, o0, AO // load real,imag from A
- lxvd2x vs5, o16, AO // load real,imag from A
- lxvd2x vs6, o32, AO // load real,imag from A
- lxvd2x vs7, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x8_I1', `
- #else
- .macro KERNEL1x8_I1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
- lxvd2x vs10, o32, AO // load real,imag from A
- lxvd2x vs11, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs12, o0, AO // load real,imag from A
- lxvd2x vs13, o16, AO // load real,imag from A
- lxvd2x vs14, o32, AO // load real,imag from A
- lxvd2x vs15, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
- xvmuldp vs36, vs2, vs16 // real*real, imag*real
- xvmuldp vs37, vs2, vs17 // real*imag, imag*imag
- xvmuldp vs38, vs3, vs16 // real*real, imag*real
- xvmuldp vs39, vs3, vs17 // real*imag, imag*imag
- xvmuldp vs40, vs4, vs16 // real*real, imag*real
- xvmuldp vs41, vs4, vs17 // real*imag, imag*imag
- xvmuldp vs42, vs5, vs16 // real*real, imag*real
- xvmuldp vs43, vs5, vs17 // real*imag, imag*imag
- xvmuldp vs44, vs6, vs16 // real*real, imag*real
- xvmuldp vs45, vs6, vs17 // real*imag, imag*imag
- xvmuldp vs46, vs7, vs16 // real*real, imag*real
- xvmuldp vs47, vs7, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x8_1', `
- #else
- .macro KERNEL1x8_1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
- lxvd2x vs10, o32, AO // load real,imag from A
- lxvd2x vs11, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs12, o0, AO // load real,imag from A
- lxvd2x vs13, o16, AO // load real,imag from A
- lxvd2x vs14, o32, AO // load real,imag from A
- lxvd2x vs15, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
- xvmaddadp vs36, vs2, vs16 // real*real, imag*real
- xvmaddadp vs37, vs2, vs17 // real*imag, imag*imag
- xvmaddadp vs38, vs3, vs16 // real*real, imag*real
- xvmaddadp vs39, vs3, vs17 // real*imag, imag*imag
- xvmaddadp vs40, vs4, vs16 // real*real, imag*real
- xvmaddadp vs41, vs4, vs17 // real*imag, imag*imag
- xvmaddadp vs42, vs5, vs16 // real*real, imag*real
- xvmaddadp vs43, vs5, vs17 // real*imag, imag*imag
- xvmaddadp vs44, vs6, vs16 // real*real, imag*real
- xvmaddadp vs45, vs6, vs17 // real*imag, imag*imag
- xvmaddadp vs46, vs7, vs16 // real*real, imag*real
- xvmaddadp vs47, vs7, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x8_2', `
- #else
- .macro KERNEL1x8_2
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs4, o0, AO // load real,imag from A
- lxvd2x vs5, o16, AO // load real,imag from A
- lxvd2x vs6, o32, AO // load real,imag from A
- lxvd2x vs7, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
- xvmaddadp vs36, vs10, vs20 // real*real, imag*real
- xvmaddadp vs37, vs10, vs21 // real*imag, imag*imag
- xvmaddadp vs38, vs11, vs20 // real*real, imag*real
- xvmaddadp vs39, vs11, vs21 // real*imag, imag*imag
- xvmaddadp vs40, vs12, vs20 // real*real, imag*real
- xvmaddadp vs41, vs12, vs21 // real*imag, imag*imag
- xvmaddadp vs42, vs13, vs20 // real*real, imag*real
- xvmaddadp vs43, vs13, vs21 // real*imag, imag*imag
- xvmaddadp vs44, vs14, vs20 // real*real, imag*real
- xvmaddadp vs45, vs14, vs21 // real*imag, imag*imag
- xvmaddadp vs46, vs15, vs20 // real*real, imag*real
- xvmaddadp vs47, vs15, vs21 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x8_E2', `
- #else
- .macro KERNEL1x8_E2
- #endif
-
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
- xvmaddadp vs36, vs10, vs20 // real*real, imag*real
- xvmaddadp vs37, vs10, vs21 // real*imag, imag*imag
- xvmaddadp vs38, vs11, vs20 // real*real, imag*real
- xvmaddadp vs39, vs11, vs21 // real*imag, imag*imag
- xvmaddadp vs40, vs12, vs20 // real*real, imag*real
- xvmaddadp vs41, vs12, vs21 // real*imag, imag*imag
- xvmaddadp vs42, vs13, vs20 // real*real, imag*real
- xvmaddadp vs43, vs13, vs21 // real*imag, imag*imag
- xvmaddadp vs44, vs14, vs20 // real*real, imag*real
- xvmaddadp vs45, vs14, vs21 // real*imag, imag*imag
- xvmaddadp vs46, vs15, vs20 // real*real, imag*real
- xvmaddadp vs47, vs15, vs21 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x8_SUBI1', `
- #else
- .macro KERNEL1x8_SUBI1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs4, o0, AO // load real,imag from A
- lxvd2x vs5, o16, AO // load real,imag from A
- lxvd2x vs6, o32, AO // load real,imag from A
- lxvd2x vs7, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
- xvmuldp vs36, vs2, vs16 // real*real, imag*real
- xvmuldp vs37, vs2, vs17 // real*imag, imag*imag
- xvmuldp vs38, vs3, vs16 // real*real, imag*real
- xvmuldp vs39, vs3, vs17 // real*imag, imag*imag
- xvmuldp vs40, vs4, vs16 // real*real, imag*real
- xvmuldp vs41, vs4, vs17 // real*imag, imag*imag
- xvmuldp vs42, vs5, vs16 // real*real, imag*real
- xvmuldp vs43, vs5, vs17 // real*imag, imag*imag
- xvmuldp vs44, vs6, vs16 // real*real, imag*real
- xvmuldp vs45, vs6, vs17 // real*imag, imag*imag
- xvmuldp vs46, vs7, vs16 // real*real, imag*real
- xvmuldp vs47, vs7, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x8_SUB1', `
- #else
- .macro KERNEL1x8_SUB1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs4, o0, AO // load real,imag from A
- lxvd2x vs5, o16, AO // load real,imag from A
- lxvd2x vs6, o32, AO // load real,imag from A
- lxvd2x vs7, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
- xvmaddadp vs36, vs2, vs16 // real*real, imag*real
- xvmaddadp vs37, vs2, vs17 // real*imag, imag*imag
- xvmaddadp vs38, vs3, vs16 // real*real, imag*real
- xvmaddadp vs39, vs3, vs17 // real*imag, imag*imag
- xvmaddadp vs40, vs4, vs16 // real*real, imag*real
- xvmaddadp vs41, vs4, vs17 // real*imag, imag*imag
- xvmaddadp vs42, vs5, vs16 // real*real, imag*real
- xvmaddadp vs43, vs5, vs17 // real*imag, imag*imag
- xvmaddadp vs44, vs6, vs16 // real*real, imag*real
- xvmaddadp vs45, vs6, vs17 // real*imag, imag*imag
- xvmaddadp vs46, vs7, vs16 // real*real, imag*real
- xvmaddadp vs47, vs7, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`SAVE1x8', `
- #else
- .macro SAVE1x8
- #endif
-
-
- mr T1, CO
- addi T2, T1, 64
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
- lxvd2x vs17, o16, T1
- lxvd2x vs18, o32, T1
- lxvd2x vs19, o48, T1
- lxvd2x vs20, o0, T2
- lxvd2x vs21, o16, T2
- lxvd2x vs22, o32, T2
- lxvd2x vs23, o48, T2
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs33,vs33) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs32 // realA*realB
- XSFADD_R2 vs0, vs0, vs33 // imagA*imagB
-
- XXSWAPD(vs32,vs32) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs33,vs33) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs32 // realA*imagB
- XSFADD_I2 vs1, vs1, vs33 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs35,vs35) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs34 // realA*realB
- XSFADD_R2 vs0, vs0, vs35 // imagA*imagB
-
- XXSWAPD(vs34,vs34) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs35,vs35) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs34 // realA*imagB
- XSFADD_I2 vs1, vs1, vs35 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs9, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs37,vs37) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs36 // realA*realB
- XSFADD_R2 vs0, vs0, vs37 // imagA*imagB
-
- XXSWAPD(vs36,vs36) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs37,vs37) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs36 // realA*imagB
- XSFADD_I2 vs1, vs1, vs37 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs10, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs39,vs39) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs38 // realA*realB
- XSFADD_R2 vs0, vs0, vs39 // imagA*imagB
-
- XXSWAPD(vs38,vs38) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs39,vs39) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs38 // realA*imagB
- XSFADD_I2 vs1, vs1, vs39 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs11, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs41,vs41) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs40 // realA*realB
- XSFADD_R2 vs0, vs0, vs41 // imagA*imagB
-
- XXSWAPD(vs40,vs40) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs41,vs41) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs40 // realA*imagB
- XSFADD_I2 vs1, vs1, vs41 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs12, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs43,vs43) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs42 // realA*realB
- XSFADD_R2 vs0, vs0, vs43 // imagA*imagB
-
- XXSWAPD(vs42,vs42) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs43,vs43) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs42 // realA*imagB
- XSFADD_I2 vs1, vs1, vs43 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs13, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs45,vs45) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs44 // realA*realB
- XSFADD_R2 vs0, vs0, vs45 // imagA*imagB
-
- XXSWAPD(vs44,vs44) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs45,vs45) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs44 // realA*imagB
- XSFADD_I2 vs1, vs1, vs45 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs14, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs47,vs47) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs46 // realA*realB
- XSFADD_R2 vs0, vs0, vs47 // imagA*imagB
-
- XXSWAPD(vs46,vs46) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs47,vs47) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs46 // realA*imagB
- XSFADD_I2 vs1, vs1, vs47 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs15, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
- xvadddp vs9, vs9, vs17
- xvadddp vs10, vs10, vs18
- xvadddp vs11, vs11, vs19
- xvadddp vs12, vs12, vs20
- xvadddp vs13, vs13, vs21
- xvadddp vs14, vs14, vs22
- xvadddp vs15, vs15, vs23
-
- #endif
-
- stxvd2x vs8, o0, T1
- stxvd2x vs9, o16, T1
- stxvd2x vs10, o32, T1
- stxvd2x vs11, o48, T1
- stxvd2x vs12, o0, T2
- stxvd2x vs13, o16, T2
- stxvd2x vs14, o32, T2
- stxvd2x vs15, o48, T2
-
- add T1, T1, LDC
- add T2, T2, LDC
- addi CO, CO, 128
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
- /**********************************************************************************************
- * Macros for N=1 and M=4
- **********************************************************************************************/
-
- #if defined(_AIX)
- define(`LOAD1x4_1', `
- #else
- .macro LOAD1x4_1
- #endif
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x4_I1', `
- #else
- .macro KERNEL1x4_I1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
- lxvd2x vs10, o32, AO // load real,imag from A
- lxvd2x vs11, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
- xvmuldp vs36, vs2, vs16 // real*real, imag*real
- xvmuldp vs37, vs2, vs17 // real*imag, imag*imag
- xvmuldp vs38, vs3, vs16 // real*real, imag*real
- xvmuldp vs39, vs3, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x4_1', `
- #else
- .macro KERNEL1x4_1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
- lxvd2x vs10, o32, AO // load real,imag from A
- lxvd2x vs11, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
- xvmaddadp vs36, vs2, vs16 // real*real, imag*real
- xvmaddadp vs37, vs2, vs17 // real*imag, imag*imag
- xvmaddadp vs38, vs3, vs16 // real*real, imag*real
- xvmaddadp vs39, vs3, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x4_2', `
- #else
- .macro KERNEL1x4_2
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
- xvmaddadp vs36, vs10, vs20 // real*real, imag*real
- xvmaddadp vs37, vs10, vs21 // real*imag, imag*imag
- xvmaddadp vs38, vs11, vs20 // real*real, imag*real
- xvmaddadp vs39, vs11, vs21 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x4_E2', `
- #else
- .macro KERNEL1x4_E2
- #endif
-
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
- xvmaddadp vs36, vs10, vs20 // real*real, imag*real
- xvmaddadp vs37, vs10, vs21 // real*imag, imag*imag
- xvmaddadp vs38, vs11, vs20 // real*real, imag*real
- xvmaddadp vs39, vs11, vs21 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x4_SUBI1', `
- #else
- .macro KERNEL1x4_SUBI1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
- xvmuldp vs36, vs2, vs16 // real*real, imag*real
- xvmuldp vs37, vs2, vs17 // real*imag, imag*imag
- xvmuldp vs38, vs3, vs16 // real*real, imag*real
- xvmuldp vs39, vs3, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x4_SUB1', `
- #else
- .macro KERNEL1x4_SUB1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
- lxvd2x vs2, o32, AO // load real,imag from A
- lxvd2x vs3, o48, AO // load real,imag from A
-
- addi AO, AO, 64
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
- xvmaddadp vs36, vs2, vs16 // real*real, imag*real
- xvmaddadp vs37, vs2, vs17 // real*imag, imag*imag
- xvmaddadp vs38, vs3, vs16 // real*real, imag*real
- xvmaddadp vs39, vs3, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`SAVE1x4', `
- #else
- .macro SAVE1x4
- #endif
-
-
- mr T1, CO
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
- lxvd2x vs17, o16, T1
- lxvd2x vs18, o32, T1
- lxvd2x vs19, o48, T1
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs33,vs33) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs32 // realA*realB
- XSFADD_R2 vs0, vs0, vs33 // imagA*imagB
-
- XXSWAPD(vs32,vs32) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs33,vs33) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs32 // realA*imagB
- XSFADD_I2 vs1, vs1, vs33 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs35,vs35) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs34 // realA*realB
- XSFADD_R2 vs0, vs0, vs35 // imagA*imagB
-
- XXSWAPD(vs34,vs34) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs35,vs35) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs34 // realA*imagB
- XSFADD_I2 vs1, vs1, vs35 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs9, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs37,vs37) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs36 // realA*realB
- XSFADD_R2 vs0, vs0, vs37 // imagA*imagB
-
- XXSWAPD(vs36,vs36) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs37,vs37) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs36 // realA*imagB
- XSFADD_I2 vs1, vs1, vs37 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs10, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs39,vs39) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs38 // realA*realB
- XSFADD_R2 vs0, vs0, vs39 // imagA*imagB
-
- XXSWAPD(vs38,vs38) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs39,vs39) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs38 // realA*imagB
- XSFADD_I2 vs1, vs1, vs39 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs11, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
- xvadddp vs9, vs9, vs17
- xvadddp vs10, vs10, vs18
- xvadddp vs11, vs11, vs19
-
- #endif
-
- stxvd2x vs8, o0, T1
- stxvd2x vs9, o16, T1
- stxvd2x vs10, o32, T1
- stxvd2x vs11, o48, T1
-
- add T1, T1, LDC
- addi CO, CO, 64
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
- /**********************************************************************************************
- * Macros for N=1 and M=2
- **********************************************************************************************/
-
- #if defined(_AIX)
- define(`LOAD1x2_1', `
- #else
- .macro LOAD1x2_1
- #endif
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x2_I1', `
- #else
- .macro KERNEL1x2_I1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x2_1', `
- #else
- .macro KERNEL1x2_1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
- lxvd2x vs9, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x2_2', `
- #else
- .macro KERNEL1x2_2
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x2_E2', `
- #else
- .macro KERNEL1x2_E2
- #endif
-
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
- xvmaddadp vs34, vs9, vs20 // real*real, imag*real
- xvmaddadp vs35, vs9, vs21 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x2_SUBI1', `
- #else
- .macro KERNEL1x2_SUBI1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
- xvmuldp vs34, vs1, vs16 // real*real, imag*real
- xvmuldp vs35, vs1, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x2_SUB1', `
- #else
- .macro KERNEL1x2_SUB1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
- lxvd2x vs1, o16, AO // load real,imag from A
-
- addi AO, AO, 32
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
- xvmaddadp vs34, vs1, vs16 // real*real, imag*real
- xvmaddadp vs35, vs1, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`SAVE1x2', `
- #else
- .macro SAVE1x2
- #endif
-
-
- mr T1, CO
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
- lxvd2x vs17, o16, T1
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs33,vs33) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs32 // realA*realB
- XSFADD_R2 vs0, vs0, vs33 // imagA*imagB
-
- XXSWAPD(vs32,vs32) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs33,vs33) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs32 // realA*imagB
- XSFADD_I2 vs1, vs1, vs33 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs35,vs35) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs34 // realA*realB
- XSFADD_R2 vs0, vs0, vs35 // imagA*imagB
-
- XXSWAPD(vs34,vs34) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs35,vs35) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs34 // realA*imagB
- XSFADD_I2 vs1, vs1, vs35 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs9, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
- xvadddp vs9, vs9, vs17
-
- #endif
-
- stxvd2x vs8, o0, T1
- stxvd2x vs9, o16, T1
-
- add T1, T1, LDC
- addi CO, CO, 32
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
- /**********************************************************************************************
- * Macros for N=1 and M=1
- **********************************************************************************************/
-
- #if defined(_AIX)
- define(`LOAD1x1_1', `
- #else
- .macro LOAD1x1_1
- #endif
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- lxvd2x vs0, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x1_I1', `
- #else
- .macro KERNEL1x1_I1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x1_1', `
- #else
- .macro KERNEL1x1_1
- #endif
-
- lxvd2x vs8, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs20, o0, BO // load real part from B
- lxvd2x vs21, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x1_2', `
- #else
- .macro KERNEL1x1_2
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x1_E2', `
- #else
- .macro KERNEL1x1_E2
- #endif
-
-
- xvmaddadp vs32, vs8, vs20 // real*real, imag*real
- xvmaddadp vs33, vs8, vs21 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x1_SUBI1', `
- #else
- .macro KERNEL1x1_SUBI1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmuldp vs32, vs0, vs16 // real*real, imag*real
- xvmuldp vs33, vs0, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`KERNEL1x1_SUB1', `
- #else
- .macro KERNEL1x1_SUB1
- #endif
-
- lxvd2x vs0, o0, AO // load real,imag from A
-
- addi AO, AO, 16
-
- lxvd2x vs16, o0, BO // load real part from B
- lxvd2x vs17, o16, BO // load imag part from B
-
- addi BO, BO, 32
-
- xvmaddadp vs32, vs0, vs16 // real*real, imag*real
- xvmaddadp vs33, vs0, vs17 // real*imag, imag*imag
-
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
- #if defined(_AIX)
- define(`SAVE1x1', `
- #else
- .macro SAVE1x1
- #endif
-
-
- mr T1, CO
-
- #ifndef TRMMKERNEL
-
- lxvd2x vs16, o0, T1
-
- #endif
-
-
- xxlxor vs0, vs0, vs0
- xxlxor vs1, vs1, vs1
- XXSWAPD(vs33,vs33) // realA*imagB, imagA*imagB -> imagA*imagB, realA*imagB
-
- XSFADD_R1 vs0, vs0, vs32 // realA*realB
- XSFADD_R2 vs0, vs0, vs33 // imagA*imagB
-
- XXSWAPD(vs32,vs32) // realA*realB, imagA*realB -> imagA*realB, realA*realB
- XXSWAPD(vs33,vs33) // imagA*imagB, realA*imagB -> realA*imagB, imagA*imagB
-
- XSFADD_I1 vs1, vs1, vs32 // realA*imagB
- XSFADD_I2 vs1, vs1, vs33 // imagA*realB
-
- xsmuldp vs4, vs0, alpha_r // real*alpha_r
- xsmuldp vs5, vs1, alpha_i // imag*alpha_i
- xsmuldp vs6, vs0, alpha_i // real*alpha_i
- xsmuldp vs7, vs1, alpha_r // imag*alpha_r
-
- xssubdp vs2, vs4, vs5 // real*alpha_r - imag*alpha_i
- xsadddp vs3, vs6, vs7 // real*alpha_i + imag*alpha_r
- xxpermdi vs8, vs2, vs3, 0 // merge real and imag part
-
-
- #ifndef TRMMKERNEL
-
- xvadddp vs8, vs8, vs16
-
- #endif
-
- stxvd2x vs8, o0, T1
-
- add T1, T1, LDC
- addi CO, CO, 16
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
-
- #if defined(_AIX)
- define(`ZCOPYB_1x1', `
- #else
- .macro ZCOPYB_1x1
- #endif
-
- lxvdsx vs4, o0, BO // b0_r
- lxvdsx vs5, o8, BO // b0_i
- addi BO, BO, 16
- stxvd2x vs4, o0, BBO
- stxvd2x vs5, o16, BBO
- addi BBO, BBO, 32
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
- #if defined(_AIX)
- define(`ZCOPYB_8x1', `
- #else
- .macro ZCOPYB_8x1
- #endif
-
- lxvd2x vs32, o0, BO
- lxvd2x vs33, o16, BO
- lxvd2x vs34, o32, BO
- lxvd2x vs35, o48, BO
- addi BO, BO, 64
-
- lxvd2x vs36, o0, BO
- lxvd2x vs37, o16, BO
- lxvd2x vs38, o32, BO
- lxvd2x vs39, o48, BO
- addi BO, BO, 64
-
- XXSPLTD(vs40,vs32,0)
- XXSPLTD(vs41,vs32,1)
- XXSPLTD(vs42,vs33,0)
- XXSPLTD(vs43,vs33,1)
- XXSPLTD(vs44,vs34,0)
- XXSPLTD(vs45,vs34,1)
- XXSPLTD(vs46,vs35,0)
- XXSPLTD(vs47,vs35,1)
-
- XXSPLTD(vs48,vs36,0)
- XXSPLTD(vs49,vs36,1)
- XXSPLTD(vs50,vs37,0)
- XXSPLTD(vs51,vs37,1)
- XXSPLTD(vs52,vs38,0)
- XXSPLTD(vs53,vs38,1)
- XXSPLTD(vs54,vs39,0)
- XXSPLTD(vs55,vs39,1)
-
- stxvd2x vs40, o0, BBO
- stxvd2x vs41, o16, BBO
- stxvd2x vs42, o32, BBO
- stxvd2x vs43, o48, BBO
- addi BBO, BBO, 64
-
- stxvd2x vs44, o0, BBO
- stxvd2x vs45, o16, BBO
- stxvd2x vs46, o32, BBO
- stxvd2x vs47, o48, BBO
- addi BBO, BBO, 64
-
- stxvd2x vs48, o0, BBO
- stxvd2x vs49, o16, BBO
- stxvd2x vs50, o32, BBO
- stxvd2x vs51, o48, BBO
- addi BBO, BBO, 64
-
- stxvd2x vs52, o0, BBO
- stxvd2x vs53, o16, BBO
- stxvd2x vs54, o32, BBO
- stxvd2x vs55, o48, BBO
- addi BBO, BBO, 64
-
- #if defined(_AIX)
- ')
- #else
- .endm
- #endif
-
-
|