You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

strmm_macros_16x8_power8.S 95 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120
  1. /***************************************************************************
  2. Copyright (c) 2013-2016, The OpenBLAS Project
  3. All rights reserved.
  4. Redistribution and use in source and binary forms, with or without
  5. modification, are permitted provided that the following conditions are
  6. met:
  7. 1. Redistributions of source code must retain the above copyright
  8. notice, this list of conditions and the following disclaimer.
  9. 2. Redistributions in binary form must reproduce the above copyright
  10. notice, this list of conditions and the following disclaimer in
  11. the documentation and/or other materials provided with the
  12. distribution.
  13. 3. Neither the name of the OpenBLAS project nor the names of
  14. its contributors may be used to endorse or promote products
  15. derived from this software without specific prior written permission.
  16. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
  17. AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  18. IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  19. ARE DISCLAIMED. IN NO EVENT SHALL THE OPENBLAS PROJECT OR CONTRIBUTORS BE
  20. LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  21. DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
  22. SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  23. CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
  24. OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
  25. USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  26. *****************************************************************************/
  27. /**************************************************************************************
  28. * 2016/04/02 Werner Saar (wernsaar@googlemail.com)
  29. * BLASTEST : OK
  30. * CTEST : OK
  31. * TEST : OK
  32. * LAPACK-TEST : OK
  33. **************************************************************************************/
  34. /**********************************************************************************************
  35. * Macros for N=8 and M=16
  36. **********************************************************************************************/
  37. #if defined(_AIX)
  38. define(`LOAD8x16_1', `
  39. #else
  40. .macro LOAD8x16_1
  41. #endif
  42. lxvw4x vs0, o0, AO
  43. lxvw4x vs1, o16, AO
  44. lxvw4x vs2, o32, AO
  45. lxvw4x vs3, o48, AO
  46. addi AO, AO, 64
  47. lxvw4x vs28, o0, BO
  48. xxspltw vs8, vs28, 0
  49. xxspltw vs9, vs28, 1
  50. xxspltw vs10, vs28, 2
  51. xxspltw vs11, vs28, 3
  52. lxvw4x vs29, o16, BO
  53. xxspltw vs12, vs29, 0
  54. xxspltw vs13, vs29, 1
  55. xxspltw vs14, vs29, 2
  56. xxspltw vs15, vs29, 3
  57. addi BO, BO, 32
  58. #if defined(_AIX)
  59. ')
  60. #else
  61. .endm
  62. #endif
  63. #if defined(_AIX)
  64. define(`KERNEL8x16_I1', `
  65. #else
  66. .macro KERNEL8x16_I1
  67. #endif
  68. lxvw4x vs4, o0, AO
  69. lxvw4x vs5, o16, AO
  70. lxvw4x vs6, o32, AO
  71. lxvw4x vs7, o48, AO
  72. addi AO, AO, 64
  73. lxvw4x vs28, o0, BO
  74. xxspltw vs16, vs28, 0
  75. xxspltw vs17, vs28, 1
  76. xxspltw vs18, vs28, 2
  77. xxspltw vs19, vs28, 3
  78. lxvw4x vs29, o16, BO
  79. xxspltw vs20, vs29, 0
  80. xxspltw vs21, vs29, 1
  81. xxspltw vs22, vs29, 2
  82. xxspltw vs23, vs29, 3
  83. addi BO, BO, 32
  84. xvmulsp vs32, vs0, vs8
  85. xvmulsp vs33, vs1, vs8
  86. xvmulsp vs34, vs2, vs8
  87. xvmulsp vs35, vs3, vs8
  88. xvmulsp vs36, vs0, vs9
  89. xvmulsp vs37, vs1, vs9
  90. xvmulsp vs38, vs2, vs9
  91. xvmulsp vs39, vs3, vs9
  92. xvmulsp vs40, vs0, vs10
  93. xvmulsp vs41, vs1, vs10
  94. xvmulsp vs42, vs2, vs10
  95. xvmulsp vs43, vs3, vs10
  96. xvmulsp vs44, vs0, vs11
  97. xvmulsp vs45, vs1, vs11
  98. xvmulsp vs46, vs2, vs11
  99. xvmulsp vs47, vs3, vs11
  100. xvmulsp vs48, vs0, vs12
  101. xvmulsp vs49, vs1, vs12
  102. xvmulsp vs50, vs2, vs12
  103. xvmulsp vs51, vs3, vs12
  104. xvmulsp vs52, vs0, vs13
  105. xvmulsp vs53, vs1, vs13
  106. xvmulsp vs54, vs2, vs13
  107. xvmulsp vs55, vs3, vs13
  108. xvmulsp vs56, vs0, vs14
  109. xvmulsp vs57, vs1, vs14
  110. xvmulsp vs58, vs2, vs14
  111. xvmulsp vs59, vs3, vs14
  112. xvmulsp vs60, vs0, vs15
  113. xvmulsp vs61, vs1, vs15
  114. xvmulsp vs62, vs2, vs15
  115. xvmulsp vs63, vs3, vs15
  116. #if defined(_AIX)
  117. ')
  118. #else
  119. .endm
  120. #endif
  121. #if defined(_AIX)
  122. define(`KERNEL8x16_1', `
  123. #else
  124. .macro KERNEL8x16_1
  125. #endif
  126. lxvw4x vs4, o0, AO
  127. lxvw4x vs5, o16, AO
  128. lxvw4x vs6, o32, AO
  129. lxvw4x vs7, o48, AO
  130. addi AO, AO, 64
  131. lxvw4x vs28, o0, BO
  132. xxspltw vs16, vs28, 0
  133. xxspltw vs17, vs28, 1
  134. xxspltw vs18, vs28, 2
  135. xxspltw vs19, vs28, 3
  136. lxvw4x vs29, o16, BO
  137. xxspltw vs20, vs29, 0
  138. xxspltw vs21, vs29, 1
  139. xxspltw vs22, vs29, 2
  140. xxspltw vs23, vs29, 3
  141. addi BO, BO, 32
  142. xvmaddasp vs32, vs0, vs8
  143. xvmaddasp vs33, vs1, vs8
  144. xvmaddasp vs34, vs2, vs8
  145. xvmaddasp vs35, vs3, vs8
  146. xvmaddasp vs36, vs0, vs9
  147. xvmaddasp vs37, vs1, vs9
  148. xvmaddasp vs38, vs2, vs9
  149. xvmaddasp vs39, vs3, vs9
  150. xvmaddasp vs40, vs0, vs10
  151. xvmaddasp vs41, vs1, vs10
  152. xvmaddasp vs42, vs2, vs10
  153. xvmaddasp vs43, vs3, vs10
  154. xvmaddasp vs44, vs0, vs11
  155. xvmaddasp vs45, vs1, vs11
  156. xvmaddasp vs46, vs2, vs11
  157. xvmaddasp vs47, vs3, vs11
  158. xvmaddasp vs48, vs0, vs12
  159. xvmaddasp vs49, vs1, vs12
  160. xvmaddasp vs50, vs2, vs12
  161. xvmaddasp vs51, vs3, vs12
  162. xvmaddasp vs52, vs0, vs13
  163. xvmaddasp vs53, vs1, vs13
  164. xvmaddasp vs54, vs2, vs13
  165. xvmaddasp vs55, vs3, vs13
  166. xvmaddasp vs56, vs0, vs14
  167. xvmaddasp vs57, vs1, vs14
  168. xvmaddasp vs58, vs2, vs14
  169. xvmaddasp vs59, vs3, vs14
  170. xvmaddasp vs60, vs0, vs15
  171. xvmaddasp vs61, vs1, vs15
  172. xvmaddasp vs62, vs2, vs15
  173. xvmaddasp vs63, vs3, vs15
  174. #if defined(_AIX)
  175. ')
  176. #else
  177. .endm
  178. #endif
  179. #if defined(_AIX)
  180. define(`KERNEL8x16_2', `
  181. #else
  182. .macro KERNEL8x16_2
  183. #endif
  184. lxvw4x vs0, o0, AO
  185. lxvw4x vs1, o16, AO
  186. lxvw4x vs2, o32, AO
  187. lxvw4x vs3, o48, AO
  188. addi AO, AO, 64
  189. lxvw4x vs28, o0, BO
  190. xxspltw vs8, vs28, 0
  191. xxspltw vs9, vs28, 1
  192. xxspltw vs10, vs28, 2
  193. xxspltw vs11, vs28, 3
  194. lxvw4x vs29, o16, BO
  195. xxspltw vs12, vs29, 0
  196. xxspltw vs13, vs29, 1
  197. xxspltw vs14, vs29, 2
  198. xxspltw vs15, vs29, 3
  199. addi BO, BO, 32
  200. xvmaddasp vs32, vs4, vs16
  201. xvmaddasp vs33, vs5, vs16
  202. xvmaddasp vs34, vs6, vs16
  203. xvmaddasp vs35, vs7, vs16
  204. xvmaddasp vs36, vs4, vs17
  205. xvmaddasp vs37, vs5, vs17
  206. xvmaddasp vs38, vs6, vs17
  207. xvmaddasp vs39, vs7, vs17
  208. xvmaddasp vs40, vs4, vs18
  209. xvmaddasp vs41, vs5, vs18
  210. xvmaddasp vs42, vs6, vs18
  211. xvmaddasp vs43, vs7, vs18
  212. xvmaddasp vs44, vs4, vs19
  213. xvmaddasp vs45, vs5, vs19
  214. xvmaddasp vs46, vs6, vs19
  215. xvmaddasp vs47, vs7, vs19
  216. xvmaddasp vs48, vs4, vs20
  217. xvmaddasp vs49, vs5, vs20
  218. xvmaddasp vs50, vs6, vs20
  219. xvmaddasp vs51, vs7, vs20
  220. xvmaddasp vs52, vs4, vs21
  221. xvmaddasp vs53, vs5, vs21
  222. xvmaddasp vs54, vs6, vs21
  223. xvmaddasp vs55, vs7, vs21
  224. xvmaddasp vs56, vs4, vs22
  225. xvmaddasp vs57, vs5, vs22
  226. xvmaddasp vs58, vs6, vs22
  227. xvmaddasp vs59, vs7, vs22
  228. xvmaddasp vs60, vs4, vs23
  229. xvmaddasp vs61, vs5, vs23
  230. xvmaddasp vs62, vs6, vs23
  231. xvmaddasp vs63, vs7, vs23
  232. #if defined(_AIX)
  233. ')
  234. #else
  235. .endm
  236. #endif
  237. #if defined(_AIX)
  238. define(`KERNEL8x16_E2', `
  239. #else
  240. .macro KERNEL8x16_E2
  241. #endif
  242. xvmaddasp vs32, vs4, vs16
  243. xvmaddasp vs33, vs5, vs16
  244. xvmaddasp vs34, vs6, vs16
  245. xvmaddasp vs35, vs7, vs16
  246. xvmaddasp vs36, vs4, vs17
  247. xvmaddasp vs37, vs5, vs17
  248. xvmaddasp vs38, vs6, vs17
  249. xvmaddasp vs39, vs7, vs17
  250. xvmaddasp vs40, vs4, vs18
  251. xvmaddasp vs41, vs5, vs18
  252. xvmaddasp vs42, vs6, vs18
  253. xvmaddasp vs43, vs7, vs18
  254. xvmaddasp vs44, vs4, vs19
  255. xvmaddasp vs45, vs5, vs19
  256. xvmaddasp vs46, vs6, vs19
  257. xvmaddasp vs47, vs7, vs19
  258. xvmaddasp vs48, vs4, vs20
  259. xvmaddasp vs49, vs5, vs20
  260. xvmaddasp vs50, vs6, vs20
  261. xvmaddasp vs51, vs7, vs20
  262. xvmaddasp vs52, vs4, vs21
  263. xvmaddasp vs53, vs5, vs21
  264. xvmaddasp vs54, vs6, vs21
  265. xvmaddasp vs55, vs7, vs21
  266. xvmaddasp vs56, vs4, vs22
  267. xvmaddasp vs57, vs5, vs22
  268. xvmaddasp vs58, vs6, vs22
  269. xvmaddasp vs59, vs7, vs22
  270. xvmaddasp vs60, vs4, vs23
  271. xvmaddasp vs61, vs5, vs23
  272. xvmaddasp vs62, vs6, vs23
  273. xvmaddasp vs63, vs7, vs23
  274. #if defined(_AIX)
  275. ')
  276. #else
  277. .endm
  278. #endif
  279. #if defined(_AIX)
  280. define(`KERNEL8x16_SUBI1', `
  281. #else
  282. .macro KERNEL8x16_SUBI1
  283. #endif
  284. lxvw4x vs0, o0, AO
  285. lxvw4x vs1, o16, AO
  286. lxvw4x vs2, o32, AO
  287. lxvw4x vs3, o48, AO
  288. addi AO, AO, 64
  289. lxvw4x vs28, o0, BO
  290. xxspltw vs8, vs28, 0
  291. xxspltw vs9, vs28, 1
  292. xxspltw vs10, vs28, 2
  293. xxspltw vs11, vs28, 3
  294. lxvw4x vs29, o16, BO
  295. xxspltw vs12, vs29, 0
  296. xxspltw vs13, vs29, 1
  297. xxspltw vs14, vs29, 2
  298. xxspltw vs15, vs29, 3
  299. addi BO, BO, 32
  300. xvmulsp vs32, vs0, vs8
  301. xvmulsp vs33, vs1, vs8
  302. xvmulsp vs34, vs2, vs8
  303. xvmulsp vs35, vs3, vs8
  304. xvmulsp vs36, vs0, vs9
  305. xvmulsp vs37, vs1, vs9
  306. xvmulsp vs38, vs2, vs9
  307. xvmulsp vs39, vs3, vs9
  308. xvmulsp vs40, vs0, vs10
  309. xvmulsp vs41, vs1, vs10
  310. xvmulsp vs42, vs2, vs10
  311. xvmulsp vs43, vs3, vs10
  312. xvmulsp vs44, vs0, vs11
  313. xvmulsp vs45, vs1, vs11
  314. xvmulsp vs46, vs2, vs11
  315. xvmulsp vs47, vs3, vs11
  316. xvmulsp vs48, vs0, vs12
  317. xvmulsp vs49, vs1, vs12
  318. xvmulsp vs50, vs2, vs12
  319. xvmulsp vs51, vs3, vs12
  320. xvmulsp vs52, vs0, vs13
  321. xvmulsp vs53, vs1, vs13
  322. xvmulsp vs54, vs2, vs13
  323. xvmulsp vs55, vs3, vs13
  324. xvmulsp vs56, vs0, vs14
  325. xvmulsp vs57, vs1, vs14
  326. xvmulsp vs58, vs2, vs14
  327. xvmulsp vs59, vs3, vs14
  328. xvmulsp vs60, vs0, vs15
  329. xvmulsp vs61, vs1, vs15
  330. xvmulsp vs62, vs2, vs15
  331. xvmulsp vs63, vs3, vs15
  332. #if defined(_AIX)
  333. ')
  334. #else
  335. .endm
  336. #endif
  337. #if defined(_AIX)
  338. define(`KERNEL8x16_SUB1', `
  339. #else
  340. .macro KERNEL8x16_SUB1
  341. #endif
  342. lxvw4x vs0, o0, AO
  343. lxvw4x vs1, o16, AO
  344. lxvw4x vs2, o32, AO
  345. lxvw4x vs3, o48, AO
  346. addi AO, AO, 64
  347. lxvw4x vs28, o0, BO
  348. xxspltw vs8, vs28, 0
  349. xxspltw vs9, vs28, 1
  350. xxspltw vs10, vs28, 2
  351. xxspltw vs11, vs28, 3
  352. lxvw4x vs29, o16, BO
  353. xxspltw vs12, vs29, 0
  354. xxspltw vs13, vs29, 1
  355. xxspltw vs14, vs29, 2
  356. xxspltw vs15, vs29, 3
  357. addi BO, BO, 32
  358. xvmaddasp vs32, vs0, vs8
  359. xvmaddasp vs33, vs1, vs8
  360. xvmaddasp vs34, vs2, vs8
  361. xvmaddasp vs35, vs3, vs8
  362. xvmaddasp vs36, vs0, vs9
  363. xvmaddasp vs37, vs1, vs9
  364. xvmaddasp vs38, vs2, vs9
  365. xvmaddasp vs39, vs3, vs9
  366. xvmaddasp vs40, vs0, vs10
  367. xvmaddasp vs41, vs1, vs10
  368. xvmaddasp vs42, vs2, vs10
  369. xvmaddasp vs43, vs3, vs10
  370. xvmaddasp vs44, vs0, vs11
  371. xvmaddasp vs45, vs1, vs11
  372. xvmaddasp vs46, vs2, vs11
  373. xvmaddasp vs47, vs3, vs11
  374. xvmaddasp vs48, vs0, vs12
  375. xvmaddasp vs49, vs1, vs12
  376. xvmaddasp vs50, vs2, vs12
  377. xvmaddasp vs51, vs3, vs12
  378. xvmaddasp vs52, vs0, vs13
  379. xvmaddasp vs53, vs1, vs13
  380. xvmaddasp vs54, vs2, vs13
  381. xvmaddasp vs55, vs3, vs13
  382. xvmaddasp vs56, vs0, vs14
  383. xvmaddasp vs57, vs1, vs14
  384. xvmaddasp vs58, vs2, vs14
  385. xvmaddasp vs59, vs3, vs14
  386. xvmaddasp vs60, vs0, vs15
  387. xvmaddasp vs61, vs1, vs15
  388. xvmaddasp vs62, vs2, vs15
  389. xvmaddasp vs63, vs3, vs15
  390. #if defined(_AIX)
  391. ')
  392. #else
  393. .endm
  394. #endif
  395. #if defined(_AIX)
  396. define(`SAVE8x16', `
  397. #else
  398. .macro SAVE8x16
  399. #endif
  400. mr T1, CO
  401. #ifndef TRMMKERNEL
  402. lxvw4x vs0, o0, T1
  403. lxvw4x vs1, o16, T1
  404. lxvw4x vs2, o32, T1
  405. lxvw4x vs3, o48, T1
  406. #endif
  407. #ifdef TRMMKERNEL
  408. xvmulsp vs0, vs32, alpha_vr
  409. xvmulsp vs1, vs33, alpha_vr
  410. xvmulsp vs2, vs34, alpha_vr
  411. xvmulsp vs3, vs35, alpha_vr
  412. #else
  413. xvmaddasp vs0, vs32, alpha_vr
  414. xvmaddasp vs1, vs33, alpha_vr
  415. xvmaddasp vs2, vs34, alpha_vr
  416. xvmaddasp vs3, vs35, alpha_vr
  417. #endif
  418. stxvw4x vs0, o0, T1
  419. stxvw4x vs1, o16, T1
  420. stxvw4x vs2, o32, T1
  421. stxvw4x vs3, o48, T1
  422. add T1, T1, LDC
  423. #ifndef TRMMKERNEL
  424. lxvw4x vs0, o0, T1
  425. lxvw4x vs1, o16, T1
  426. lxvw4x vs2, o32, T1
  427. lxvw4x vs3, o48, T1
  428. #endif
  429. #ifdef TRMMKERNEL
  430. xvmulsp vs0, vs36, alpha_vr
  431. xvmulsp vs1, vs37, alpha_vr
  432. xvmulsp vs2, vs38, alpha_vr
  433. xvmulsp vs3, vs39, alpha_vr
  434. #else
  435. xvmaddasp vs0, vs36, alpha_vr
  436. xvmaddasp vs1, vs37, alpha_vr
  437. xvmaddasp vs2, vs38, alpha_vr
  438. xvmaddasp vs3, vs39, alpha_vr
  439. #endif
  440. stxvw4x vs0, o0, T1
  441. stxvw4x vs1, o16, T1
  442. stxvw4x vs2, o32, T1
  443. stxvw4x vs3, o48, T1
  444. add T1, T1, LDC
  445. #ifndef TRMMKERNEL
  446. lxvw4x vs0, o0, T1
  447. lxvw4x vs1, o16, T1
  448. lxvw4x vs2, o32, T1
  449. lxvw4x vs3, o48, T1
  450. #endif
  451. #ifdef TRMMKERNEL
  452. xvmulsp vs0, vs40, alpha_vr
  453. xvmulsp vs1, vs41, alpha_vr
  454. xvmulsp vs2, vs42, alpha_vr
  455. xvmulsp vs3, vs43, alpha_vr
  456. #else
  457. xvmaddasp vs0, vs40, alpha_vr
  458. xvmaddasp vs1, vs41, alpha_vr
  459. xvmaddasp vs2, vs42, alpha_vr
  460. xvmaddasp vs3, vs43, alpha_vr
  461. #endif
  462. stxvw4x vs0, o0, T1
  463. stxvw4x vs1, o16, T1
  464. stxvw4x vs2, o32, T1
  465. stxvw4x vs3, o48, T1
  466. add T1, T1, LDC
  467. #ifndef TRMMKERNEL
  468. lxvw4x vs0, o0, T1
  469. lxvw4x vs1, o16, T1
  470. lxvw4x vs2, o32, T1
  471. lxvw4x vs3, o48, T1
  472. #endif
  473. #ifdef TRMMKERNEL
  474. xvmulsp vs0, vs44, alpha_vr
  475. xvmulsp vs1, vs45, alpha_vr
  476. xvmulsp vs2, vs46, alpha_vr
  477. xvmulsp vs3, vs47, alpha_vr
  478. #else
  479. xvmaddasp vs0, vs44, alpha_vr
  480. xvmaddasp vs1, vs45, alpha_vr
  481. xvmaddasp vs2, vs46, alpha_vr
  482. xvmaddasp vs3, vs47, alpha_vr
  483. #endif
  484. stxvw4x vs0, o0, T1
  485. stxvw4x vs1, o16, T1
  486. stxvw4x vs2, o32, T1
  487. stxvw4x vs3, o48, T1
  488. add T1, T1, LDC
  489. #ifndef TRMMKERNEL
  490. lxvw4x vs0, o0, T1
  491. lxvw4x vs1, o16, T1
  492. lxvw4x vs2, o32, T1
  493. lxvw4x vs3, o48, T1
  494. #endif
  495. #ifdef TRMMKERNEL
  496. xvmulsp vs0, vs48, alpha_vr
  497. xvmulsp vs1, vs49, alpha_vr
  498. xvmulsp vs2, vs50, alpha_vr
  499. xvmulsp vs3, vs51, alpha_vr
  500. #else
  501. xvmaddasp vs0, vs48, alpha_vr
  502. xvmaddasp vs1, vs49, alpha_vr
  503. xvmaddasp vs2, vs50, alpha_vr
  504. xvmaddasp vs3, vs51, alpha_vr
  505. #endif
  506. stxvw4x vs0, o0, T1
  507. stxvw4x vs1, o16, T1
  508. stxvw4x vs2, o32, T1
  509. stxvw4x vs3, o48, T1
  510. add T1, T1, LDC
  511. #ifndef TRMMKERNEL
  512. lxvw4x vs0, o0, T1
  513. lxvw4x vs1, o16, T1
  514. lxvw4x vs2, o32, T1
  515. lxvw4x vs3, o48, T1
  516. #endif
  517. #ifdef TRMMKERNEL
  518. xvmulsp vs0, vs52, alpha_vr
  519. xvmulsp vs1, vs53, alpha_vr
  520. xvmulsp vs2, vs54, alpha_vr
  521. xvmulsp vs3, vs55, alpha_vr
  522. #else
  523. xvmaddasp vs0, vs52, alpha_vr
  524. xvmaddasp vs1, vs53, alpha_vr
  525. xvmaddasp vs2, vs54, alpha_vr
  526. xvmaddasp vs3, vs55, alpha_vr
  527. #endif
  528. stxvw4x vs0, o0, T1
  529. stxvw4x vs1, o16, T1
  530. stxvw4x vs2, o32, T1
  531. stxvw4x vs3, o48, T1
  532. add T1, T1, LDC
  533. #ifndef TRMMKERNEL
  534. lxvw4x vs0, o0, T1
  535. lxvw4x vs1, o16, T1
  536. lxvw4x vs2, o32, T1
  537. lxvw4x vs3, o48, T1
  538. #endif
  539. #ifdef TRMMKERNEL
  540. xvmulsp vs0, vs56, alpha_vr
  541. xvmulsp vs1, vs57, alpha_vr
  542. xvmulsp vs2, vs58, alpha_vr
  543. xvmulsp vs3, vs59, alpha_vr
  544. #else
  545. xvmaddasp vs0, vs56, alpha_vr
  546. xvmaddasp vs1, vs57, alpha_vr
  547. xvmaddasp vs2, vs58, alpha_vr
  548. xvmaddasp vs3, vs59, alpha_vr
  549. #endif
  550. stxvw4x vs0, o0, T1
  551. stxvw4x vs1, o16, T1
  552. stxvw4x vs2, o32, T1
  553. stxvw4x vs3, o48, T1
  554. add T1, T1, LDC
  555. #ifndef TRMMKERNEL
  556. lxvw4x vs0, o0, T1
  557. lxvw4x vs1, o16, T1
  558. lxvw4x vs2, o32, T1
  559. lxvw4x vs3, o48, T1
  560. #endif
  561. #ifdef TRMMKERNEL
  562. xvmulsp vs0, vs60, alpha_vr
  563. xvmulsp vs1, vs61, alpha_vr
  564. xvmulsp vs2, vs62, alpha_vr
  565. xvmulsp vs3, vs63, alpha_vr
  566. #else
  567. xvmaddasp vs0, vs60, alpha_vr
  568. xvmaddasp vs1, vs61, alpha_vr
  569. xvmaddasp vs2, vs62, alpha_vr
  570. xvmaddasp vs3, vs63, alpha_vr
  571. #endif
  572. stxvw4x vs0, o0, T1
  573. stxvw4x vs1, o16, T1
  574. stxvw4x vs2, o32, T1
  575. stxvw4x vs3, o48, T1
  576. add T1, T1, LDC
  577. addi CO, CO, 64
  578. #if defined(_AIX)
  579. ')
  580. #else
  581. .endm
  582. #endif
  583. /**********************************************************************************************
  584. * Macros for N=8 and M=8
  585. **********************************************************************************************/
  586. #if defined(_AIX)
  587. define(`LOAD8x8_1', `
  588. #else
  589. .macro LOAD8x8_1
  590. #endif
  591. lxvw4x vs0, o0, AO
  592. lxvw4x vs1, o16, AO
  593. addi AO, AO, 32
  594. lxvw4x vs28, o0, BO
  595. xxspltw vs8, vs28, 0
  596. xxspltw vs9, vs28, 1
  597. xxspltw vs10, vs28, 2
  598. xxspltw vs11, vs28, 3
  599. lxvw4x vs29, o16, BO
  600. xxspltw vs12, vs29, 0
  601. xxspltw vs13, vs29, 1
  602. xxspltw vs14, vs29, 2
  603. xxspltw vs15, vs29, 3
  604. addi BO, BO, 32
  605. #if defined(_AIX)
  606. ')
  607. #else
  608. .endm
  609. #endif
  610. #if defined(_AIX)
  611. define(`KERNEL8x8_I1', `
  612. #else
  613. .macro KERNEL8x8_I1
  614. #endif
  615. lxvw4x vs4, o0, AO
  616. lxvw4x vs5, o16, AO
  617. addi AO, AO, 32
  618. lxvw4x vs28, o0, BO
  619. xxspltw vs16, vs28, 0
  620. xxspltw vs17, vs28, 1
  621. xxspltw vs18, vs28, 2
  622. xxspltw vs19, vs28, 3
  623. lxvw4x vs29, o16, BO
  624. xxspltw vs20, vs29, 0
  625. xxspltw vs21, vs29, 1
  626. xxspltw vs22, vs29, 2
  627. xxspltw vs23, vs29, 3
  628. addi BO, BO, 32
  629. xvmulsp vs32, vs0, vs8
  630. xvmulsp vs33, vs1, vs8
  631. xvmulsp vs34, vs0, vs9
  632. xvmulsp vs35, vs1, vs9
  633. xvmulsp vs36, vs0, vs10
  634. xvmulsp vs37, vs1, vs10
  635. xvmulsp vs38, vs0, vs11
  636. xvmulsp vs39, vs1, vs11
  637. xvmulsp vs40, vs0, vs12
  638. xvmulsp vs41, vs1, vs12
  639. xvmulsp vs42, vs0, vs13
  640. xvmulsp vs43, vs1, vs13
  641. xvmulsp vs44, vs0, vs14
  642. xvmulsp vs45, vs1, vs14
  643. xvmulsp vs46, vs0, vs15
  644. xvmulsp vs47, vs1, vs15
  645. #if defined(_AIX)
  646. ')
  647. #else
  648. .endm
  649. #endif
  650. #if defined(_AIX)
  651. define(`KERNEL8x8_1', `
  652. #else
  653. .macro KERNEL8x8_1
  654. #endif
  655. lxvw4x vs4, o0, AO
  656. lxvw4x vs5, o16, AO
  657. addi AO, AO, 32
  658. lxvw4x vs28, o0, BO
  659. xxspltw vs16, vs28, 0
  660. xxspltw vs17, vs28, 1
  661. xxspltw vs18, vs28, 2
  662. xxspltw vs19, vs28, 3
  663. lxvw4x vs29, o16, BO
  664. xxspltw vs20, vs29, 0
  665. xxspltw vs21, vs29, 1
  666. xxspltw vs22, vs29, 2
  667. xxspltw vs23, vs29, 3
  668. addi BO, BO, 32
  669. xvmaddasp vs32, vs0, vs8
  670. xvmaddasp vs33, vs1, vs8
  671. xvmaddasp vs34, vs0, vs9
  672. xvmaddasp vs35, vs1, vs9
  673. xvmaddasp vs36, vs0, vs10
  674. xvmaddasp vs37, vs1, vs10
  675. xvmaddasp vs38, vs0, vs11
  676. xvmaddasp vs39, vs1, vs11
  677. xvmaddasp vs40, vs0, vs12
  678. xvmaddasp vs41, vs1, vs12
  679. xvmaddasp vs42, vs0, vs13
  680. xvmaddasp vs43, vs1, vs13
  681. xvmaddasp vs44, vs0, vs14
  682. xvmaddasp vs45, vs1, vs14
  683. xvmaddasp vs46, vs0, vs15
  684. xvmaddasp vs47, vs1, vs15
  685. #if defined(_AIX)
  686. ')
  687. #else
  688. .endm
  689. #endif
  690. #if defined(_AIX)
  691. define(`KERNEL8x8_2', `
  692. #else
  693. .macro KERNEL8x8_2
  694. #endif
  695. lxvw4x vs0, o0, AO
  696. lxvw4x vs1, o16, AO
  697. addi AO, AO, 32
  698. lxvw4x vs28, o0, BO
  699. xxspltw vs8, vs28, 0
  700. xxspltw vs9, vs28, 1
  701. xxspltw vs10, vs28, 2
  702. xxspltw vs11, vs28, 3
  703. lxvw4x vs29, o16, BO
  704. xxspltw vs12, vs29, 0
  705. xxspltw vs13, vs29, 1
  706. xxspltw vs14, vs29, 2
  707. xxspltw vs15, vs29, 3
  708. addi BO, BO, 32
  709. xvmaddasp vs32, vs4, vs16
  710. xvmaddasp vs33, vs5, vs16
  711. xvmaddasp vs34, vs4, vs17
  712. xvmaddasp vs35, vs5, vs17
  713. xvmaddasp vs36, vs4, vs18
  714. xvmaddasp vs37, vs5, vs18
  715. xvmaddasp vs38, vs4, vs19
  716. xvmaddasp vs39, vs5, vs19
  717. xvmaddasp vs40, vs4, vs20
  718. xvmaddasp vs41, vs5, vs20
  719. xvmaddasp vs42, vs4, vs21
  720. xvmaddasp vs43, vs5, vs21
  721. xvmaddasp vs44, vs4, vs22
  722. xvmaddasp vs45, vs5, vs22
  723. xvmaddasp vs46, vs4, vs23
  724. xvmaddasp vs47, vs5, vs23
  725. #if defined(_AIX)
  726. ')
  727. #else
  728. .endm
  729. #endif
  730. #if defined(_AIX)
  731. define(`KERNEL8x8_E2', `
  732. #else
  733. .macro KERNEL8x8_E2
  734. #endif
  735. xvmaddasp vs32, vs4, vs16
  736. xvmaddasp vs33, vs5, vs16
  737. xvmaddasp vs34, vs4, vs17
  738. xvmaddasp vs35, vs5, vs17
  739. xvmaddasp vs36, vs4, vs18
  740. xvmaddasp vs37, vs5, vs18
  741. xvmaddasp vs38, vs4, vs19
  742. xvmaddasp vs39, vs5, vs19
  743. xvmaddasp vs40, vs4, vs20
  744. xvmaddasp vs41, vs5, vs20
  745. xvmaddasp vs42, vs4, vs21
  746. xvmaddasp vs43, vs5, vs21
  747. xvmaddasp vs44, vs4, vs22
  748. xvmaddasp vs45, vs5, vs22
  749. xvmaddasp vs46, vs4, vs23
  750. xvmaddasp vs47, vs5, vs23
  751. #if defined(_AIX)
  752. ')
  753. #else
  754. .endm
  755. #endif
  756. #if defined(_AIX)
  757. define(`KERNEL8x8_SUBI1', `
  758. #else
  759. .macro KERNEL8x8_SUBI1
  760. #endif
  761. lxvw4x vs0, o0, AO
  762. lxvw4x vs1, o16, AO
  763. addi AO, AO, 32
  764. lxvw4x vs28, o0, BO
  765. xxspltw vs8, vs28, 0
  766. xxspltw vs9, vs28, 1
  767. xxspltw vs10, vs28, 2
  768. xxspltw vs11, vs28, 3
  769. lxvw4x vs29, o16, BO
  770. xxspltw vs12, vs29, 0
  771. xxspltw vs13, vs29, 1
  772. xxspltw vs14, vs29, 2
  773. xxspltw vs15, vs29, 3
  774. addi BO, BO, 32
  775. xvmulsp vs32, vs0, vs8
  776. xvmulsp vs33, vs1, vs8
  777. xvmulsp vs34, vs0, vs9
  778. xvmulsp vs35, vs1, vs9
  779. xvmulsp vs36, vs0, vs10
  780. xvmulsp vs37, vs1, vs10
  781. xvmulsp vs38, vs0, vs11
  782. xvmulsp vs39, vs1, vs11
  783. xvmulsp vs40, vs0, vs12
  784. xvmulsp vs41, vs1, vs12
  785. xvmulsp vs42, vs0, vs13
  786. xvmulsp vs43, vs1, vs13
  787. xvmulsp vs44, vs0, vs14
  788. xvmulsp vs45, vs1, vs14
  789. xvmulsp vs46, vs0, vs15
  790. xvmulsp vs47, vs1, vs15
  791. #if defined(_AIX)
  792. ')
  793. #else
  794. .endm
  795. #endif
  796. #if defined(_AIX)
  797. define(`KERNEL8x8_SUB1', `
  798. #else
  799. .macro KERNEL8x8_SUB1
  800. #endif
  801. lxvw4x vs0, o0, AO
  802. lxvw4x vs1, o16, AO
  803. addi AO, AO, 32
  804. lxvw4x vs28, o0, BO
  805. xxspltw vs8, vs28, 0
  806. xxspltw vs9, vs28, 1
  807. xxspltw vs10, vs28, 2
  808. xxspltw vs11, vs28, 3
  809. lxvw4x vs29, o16, BO
  810. xxspltw vs12, vs29, 0
  811. xxspltw vs13, vs29, 1
  812. xxspltw vs14, vs29, 2
  813. xxspltw vs15, vs29, 3
  814. addi BO, BO, 32
  815. xvmaddasp vs32, vs0, vs8
  816. xvmaddasp vs33, vs1, vs8
  817. xvmaddasp vs34, vs0, vs9
  818. xvmaddasp vs35, vs1, vs9
  819. xvmaddasp vs36, vs0, vs10
  820. xvmaddasp vs37, vs1, vs10
  821. xvmaddasp vs38, vs0, vs11
  822. xvmaddasp vs39, vs1, vs11
  823. xvmaddasp vs40, vs0, vs12
  824. xvmaddasp vs41, vs1, vs12
  825. xvmaddasp vs42, vs0, vs13
  826. xvmaddasp vs43, vs1, vs13
  827. xvmaddasp vs44, vs0, vs14
  828. xvmaddasp vs45, vs1, vs14
  829. xvmaddasp vs46, vs0, vs15
  830. xvmaddasp vs47, vs1, vs15
  831. #if defined(_AIX)
  832. ')
  833. #else
  834. .endm
  835. #endif
  836. #if defined(_AIX)
  837. define(`SAVE8x8', `
  838. #else
  839. .macro SAVE8x8
  840. #endif
  841. mr T1, CO
  842. #ifndef TRMMKERNEL
  843. lxvw4x vs0, o0, T1
  844. lxvw4x vs1, o16, T1
  845. #endif
  846. #ifdef TRMMKERNEL
  847. xvmulsp vs0, vs32, alpha_vr
  848. xvmulsp vs1, vs33, alpha_vr
  849. #else
  850. xvmaddasp vs0, vs32, alpha_vr
  851. xvmaddasp vs1, vs33, alpha_vr
  852. #endif
  853. stxvw4x vs0, o0, T1
  854. stxvw4x vs1, o16, T1
  855. add T1, T1, LDC
  856. #ifndef TRMMKERNEL
  857. lxvw4x vs0, o0, T1
  858. lxvw4x vs1, o16, T1
  859. #endif
  860. #ifdef TRMMKERNEL
  861. xvmulsp vs0, vs34, alpha_vr
  862. xvmulsp vs1, vs35, alpha_vr
  863. #else
  864. xvmaddasp vs0, vs34, alpha_vr
  865. xvmaddasp vs1, vs35, alpha_vr
  866. #endif
  867. stxvw4x vs0, o0, T1
  868. stxvw4x vs1, o16, T1
  869. add T1, T1, LDC
  870. #ifndef TRMMKERNEL
  871. lxvw4x vs0, o0, T1
  872. lxvw4x vs1, o16, T1
  873. #endif
  874. #ifdef TRMMKERNEL
  875. xvmulsp vs0, vs36, alpha_vr
  876. xvmulsp vs1, vs37, alpha_vr
  877. #else
  878. xvmaddasp vs0, vs36, alpha_vr
  879. xvmaddasp vs1, vs37, alpha_vr
  880. #endif
  881. stxvw4x vs0, o0, T1
  882. stxvw4x vs1, o16, T1
  883. add T1, T1, LDC
  884. #ifndef TRMMKERNEL
  885. lxvw4x vs0, o0, T1
  886. lxvw4x vs1, o16, T1
  887. #endif
  888. #ifdef TRMMKERNEL
  889. xvmulsp vs0, vs38, alpha_vr
  890. xvmulsp vs1, vs39, alpha_vr
  891. #else
  892. xvmaddasp vs0, vs38, alpha_vr
  893. xvmaddasp vs1, vs39, alpha_vr
  894. #endif
  895. stxvw4x vs0, o0, T1
  896. stxvw4x vs1, o16, T1
  897. add T1, T1, LDC
  898. #ifndef TRMMKERNEL
  899. lxvw4x vs0, o0, T1
  900. lxvw4x vs1, o16, T1
  901. #endif
  902. #ifdef TRMMKERNEL
  903. xvmulsp vs0, vs40, alpha_vr
  904. xvmulsp vs1, vs41, alpha_vr
  905. #else
  906. xvmaddasp vs0, vs40, alpha_vr
  907. xvmaddasp vs1, vs41, alpha_vr
  908. #endif
  909. stxvw4x vs0, o0, T1
  910. stxvw4x vs1, o16, T1
  911. add T1, T1, LDC
  912. #ifndef TRMMKERNEL
  913. lxvw4x vs0, o0, T1
  914. lxvw4x vs1, o16, T1
  915. #endif
  916. #ifdef TRMMKERNEL
  917. xvmulsp vs0, vs42, alpha_vr
  918. xvmulsp vs1, vs43, alpha_vr
  919. #else
  920. xvmaddasp vs0, vs42, alpha_vr
  921. xvmaddasp vs1, vs43, alpha_vr
  922. #endif
  923. stxvw4x vs0, o0, T1
  924. stxvw4x vs1, o16, T1
  925. add T1, T1, LDC
  926. #ifndef TRMMKERNEL
  927. lxvw4x vs0, o0, T1
  928. lxvw4x vs1, o16, T1
  929. #endif
  930. #ifdef TRMMKERNEL
  931. xvmulsp vs0, vs44, alpha_vr
  932. xvmulsp vs1, vs45, alpha_vr
  933. #else
  934. xvmaddasp vs0, vs44, alpha_vr
  935. xvmaddasp vs1, vs45, alpha_vr
  936. #endif
  937. stxvw4x vs0, o0, T1
  938. stxvw4x vs1, o16, T1
  939. add T1, T1, LDC
  940. #ifndef TRMMKERNEL
  941. lxvw4x vs0, o0, T1
  942. lxvw4x vs1, o16, T1
  943. #endif
  944. #ifdef TRMMKERNEL
  945. xvmulsp vs0, vs46, alpha_vr
  946. xvmulsp vs1, vs47, alpha_vr
  947. #else
  948. xvmaddasp vs0, vs46, alpha_vr
  949. xvmaddasp vs1, vs47, alpha_vr
  950. #endif
  951. stxvw4x vs0, o0, T1
  952. stxvw4x vs1, o16, T1
  953. add T1, T1, LDC
  954. addi CO, CO, 32
  955. #if defined(_AIX)
  956. ')
  957. #else
  958. .endm
  959. #endif
  960. /**********************************************************************************************
  961. * Macros for N=8 and M=4
  962. **********************************************************************************************/
  963. #if defined(_AIX)
  964. define(`LOAD8x4_1', `
  965. #else
  966. .macro LOAD8x4_1
  967. #endif
  968. lxvw4x vs0, o0, AO
  969. addi AO, AO, 16
  970. lxvw4x vs28, o0, BO
  971. xxspltw vs8, vs28, 0
  972. xxspltw vs9, vs28, 1
  973. xxspltw vs10, vs28, 2
  974. xxspltw vs11, vs28, 3
  975. lxvw4x vs29, o16, BO
  976. xxspltw vs12, vs29, 0
  977. xxspltw vs13, vs29, 1
  978. xxspltw vs14, vs29, 2
  979. xxspltw vs15, vs29, 3
  980. addi BO, BO, 32
  981. #if defined(_AIX)
  982. ')
  983. #else
  984. .endm
  985. #endif
  986. #if defined(_AIX)
  987. define(`KERNEL8x4_I1', `
  988. #else
  989. .macro KERNEL8x4_I1
  990. #endif
  991. lxvw4x vs4, o0, AO
  992. addi AO, AO, 16
  993. lxvw4x vs28, o0, BO
  994. xxspltw vs16, vs28, 0
  995. xxspltw vs17, vs28, 1
  996. xxspltw vs18, vs28, 2
  997. xxspltw vs19, vs28, 3
  998. lxvw4x vs29, o16, BO
  999. xxspltw vs20, vs29, 0
  1000. xxspltw vs21, vs29, 1
  1001. xxspltw vs22, vs29, 2
  1002. xxspltw vs23, vs29, 3
  1003. addi BO, BO, 32
  1004. xvmulsp vs32, vs0, vs8
  1005. xvmulsp vs33, vs0, vs9
  1006. xvmulsp vs34, vs0, vs10
  1007. xvmulsp vs35, vs0, vs11
  1008. xvmulsp vs36, vs0, vs12
  1009. xvmulsp vs37, vs0, vs13
  1010. xvmulsp vs38, vs0, vs14
  1011. xvmulsp vs39, vs0, vs15
  1012. #if defined(_AIX)
  1013. ')
  1014. #else
  1015. .endm
  1016. #endif
  1017. #if defined(_AIX)
  1018. define(`KERNEL8x4_1', `
  1019. #else
  1020. .macro KERNEL8x4_1
  1021. #endif
  1022. lxvw4x vs4, o0, AO
  1023. addi AO, AO, 16
  1024. lxvw4x vs28, o0, BO
  1025. xxspltw vs16, vs28, 0
  1026. xxspltw vs17, vs28, 1
  1027. xxspltw vs18, vs28, 2
  1028. xxspltw vs19, vs28, 3
  1029. lxvw4x vs29, o16, BO
  1030. xxspltw vs20, vs29, 0
  1031. xxspltw vs21, vs29, 1
  1032. xxspltw vs22, vs29, 2
  1033. xxspltw vs23, vs29, 3
  1034. addi BO, BO, 32
  1035. xvmaddasp vs32, vs0, vs8
  1036. xvmaddasp vs33, vs0, vs9
  1037. xvmaddasp vs34, vs0, vs10
  1038. xvmaddasp vs35, vs0, vs11
  1039. xvmaddasp vs36, vs0, vs12
  1040. xvmaddasp vs37, vs0, vs13
  1041. xvmaddasp vs38, vs0, vs14
  1042. xvmaddasp vs39, vs0, vs15
  1043. #if defined(_AIX)
  1044. ')
  1045. #else
  1046. .endm
  1047. #endif
  1048. #if defined(_AIX)
  1049. define(`KERNEL8x4_2', `
  1050. #else
  1051. .macro KERNEL8x4_2
  1052. #endif
  1053. lxvw4x vs0, o0, AO
  1054. addi AO, AO, 16
  1055. lxvw4x vs28, o0, BO
  1056. xxspltw vs8, vs28, 0
  1057. xxspltw vs9, vs28, 1
  1058. xxspltw vs10, vs28, 2
  1059. xxspltw vs11, vs28, 3
  1060. lxvw4x vs29, o16, BO
  1061. xxspltw vs12, vs29, 0
  1062. xxspltw vs13, vs29, 1
  1063. xxspltw vs14, vs29, 2
  1064. xxspltw vs15, vs29, 3
  1065. addi BO, BO, 32
  1066. xvmaddasp vs32, vs4, vs16
  1067. xvmaddasp vs33, vs4, vs17
  1068. xvmaddasp vs34, vs4, vs18
  1069. xvmaddasp vs35, vs4, vs19
  1070. xvmaddasp vs36, vs4, vs20
  1071. xvmaddasp vs37, vs4, vs21
  1072. xvmaddasp vs38, vs4, vs22
  1073. xvmaddasp vs39, vs4, vs23
  1074. #if defined(_AIX)
  1075. ')
  1076. #else
  1077. .endm
  1078. #endif
  1079. #if defined(_AIX)
  1080. define(`KERNEL8x4_E2', `
  1081. #else
  1082. .macro KERNEL8x4_E2
  1083. #endif
  1084. xvmaddasp vs32, vs4, vs16
  1085. xvmaddasp vs33, vs4, vs17
  1086. xvmaddasp vs34, vs4, vs18
  1087. xvmaddasp vs35, vs4, vs19
  1088. xvmaddasp vs36, vs4, vs20
  1089. xvmaddasp vs37, vs4, vs21
  1090. xvmaddasp vs38, vs4, vs22
  1091. xvmaddasp vs39, vs4, vs23
  1092. #if defined(_AIX)
  1093. ')
  1094. #else
  1095. .endm
  1096. #endif
  1097. #if defined(_AIX)
  1098. define(`KERNEL8x4_SUBI1', `
  1099. #else
  1100. .macro KERNEL8x4_SUBI1
  1101. #endif
  1102. lxvw4x vs0, o0, AO
  1103. addi AO, AO, 16
  1104. lxvw4x vs28, o0, BO
  1105. xxspltw vs8, vs28, 0
  1106. xxspltw vs9, vs28, 1
  1107. xxspltw vs10, vs28, 2
  1108. xxspltw vs11, vs28, 3
  1109. lxvw4x vs29, o16, BO
  1110. xxspltw vs12, vs29, 0
  1111. xxspltw vs13, vs29, 1
  1112. xxspltw vs14, vs29, 2
  1113. xxspltw vs15, vs29, 3
  1114. addi BO, BO, 32
  1115. xvmulsp vs32, vs0, vs8
  1116. xvmulsp vs33, vs0, vs9
  1117. xvmulsp vs34, vs0, vs10
  1118. xvmulsp vs35, vs0, vs11
  1119. xvmulsp vs36, vs0, vs12
  1120. xvmulsp vs37, vs0, vs13
  1121. xvmulsp vs38, vs0, vs14
  1122. xvmulsp vs39, vs0, vs15
  1123. #if defined(_AIX)
  1124. ')
  1125. #else
  1126. .endm
  1127. #endif
  1128. #if defined(_AIX)
  1129. define(`KERNEL8x4_SUB1', `
  1130. #else
  1131. .macro KERNEL8x4_SUB1
  1132. #endif
  1133. lxvw4x vs0, o0, AO
  1134. addi AO, AO, 16
  1135. lxvw4x vs28, o0, BO
  1136. xxspltw vs8, vs28, 0
  1137. xxspltw vs9, vs28, 1
  1138. xxspltw vs10, vs28, 2
  1139. xxspltw vs11, vs28, 3
  1140. lxvw4x vs29, o16, BO
  1141. xxspltw vs12, vs29, 0
  1142. xxspltw vs13, vs29, 1
  1143. xxspltw vs14, vs29, 2
  1144. xxspltw vs15, vs29, 3
  1145. addi BO, BO, 32
  1146. xvmaddasp vs32, vs0, vs8
  1147. xvmaddasp vs33, vs0, vs9
  1148. xvmaddasp vs34, vs0, vs10
  1149. xvmaddasp vs35, vs0, vs11
  1150. xvmaddasp vs36, vs0, vs12
  1151. xvmaddasp vs37, vs0, vs13
  1152. xvmaddasp vs38, vs0, vs14
  1153. xvmaddasp vs39, vs0, vs15
  1154. #if defined(_AIX)
  1155. ')
  1156. #else
  1157. .endm
  1158. #endif
  1159. #if defined(_AIX)
  1160. define(`SAVE8x4', `
  1161. #else
  1162. .macro SAVE8x4
  1163. #endif
  1164. mr T1, CO
  1165. #ifndef TRMMKERNEL
  1166. lxvw4x vs0, o0, T1
  1167. #endif
  1168. #ifdef TRMMKERNEL
  1169. xvmulsp vs0, vs32, alpha_vr
  1170. #else
  1171. xvmaddasp vs0, vs32, alpha_vr
  1172. #endif
  1173. stxvw4x vs0, o0, T1
  1174. add T1, T1, LDC
  1175. #ifndef TRMMKERNEL
  1176. lxvw4x vs0, o0, T1
  1177. #endif
  1178. #ifdef TRMMKERNEL
  1179. xvmulsp vs0, vs33, alpha_vr
  1180. #else
  1181. xvmaddasp vs0, vs33, alpha_vr
  1182. #endif
  1183. stxvw4x vs0, o0, T1
  1184. add T1, T1, LDC
  1185. #ifndef TRMMKERNEL
  1186. lxvw4x vs0, o0, T1
  1187. #endif
  1188. #ifdef TRMMKERNEL
  1189. xvmulsp vs0, vs34, alpha_vr
  1190. #else
  1191. xvmaddasp vs0, vs34, alpha_vr
  1192. #endif
  1193. stxvw4x vs0, o0, T1
  1194. add T1, T1, LDC
  1195. #ifndef TRMMKERNEL
  1196. lxvw4x vs0, o0, T1
  1197. #endif
  1198. #ifdef TRMMKERNEL
  1199. xvmulsp vs0, vs35, alpha_vr
  1200. #else
  1201. xvmaddasp vs0, vs35, alpha_vr
  1202. #endif
  1203. stxvw4x vs0, o0, T1
  1204. add T1, T1, LDC
  1205. #ifndef TRMMKERNEL
  1206. lxvw4x vs0, o0, T1
  1207. #endif
  1208. #ifdef TRMMKERNEL
  1209. xvmulsp vs0, vs36, alpha_vr
  1210. #else
  1211. xvmaddasp vs0, vs36, alpha_vr
  1212. #endif
  1213. stxvw4x vs0, o0, T1
  1214. add T1, T1, LDC
  1215. #ifndef TRMMKERNEL
  1216. lxvw4x vs0, o0, T1
  1217. #endif
  1218. #ifdef TRMMKERNEL
  1219. xvmulsp vs0, vs37, alpha_vr
  1220. #else
  1221. xvmaddasp vs0, vs37, alpha_vr
  1222. #endif
  1223. stxvw4x vs0, o0, T1
  1224. add T1, T1, LDC
  1225. #ifndef TRMMKERNEL
  1226. lxvw4x vs0, o0, T1
  1227. #endif
  1228. #ifdef TRMMKERNEL
  1229. xvmulsp vs0, vs38, alpha_vr
  1230. #else
  1231. xvmaddasp vs0, vs38, alpha_vr
  1232. #endif
  1233. stxvw4x vs0, o0, T1
  1234. add T1, T1, LDC
  1235. #ifndef TRMMKERNEL
  1236. lxvw4x vs0, o0, T1
  1237. #endif
  1238. #ifdef TRMMKERNEL
  1239. xvmulsp vs0, vs39, alpha_vr
  1240. #else
  1241. xvmaddasp vs0, vs39, alpha_vr
  1242. #endif
  1243. stxvw4x vs0, o0, T1
  1244. add T1, T1, LDC
  1245. addi CO, CO, 16
  1246. #if defined(_AIX)
  1247. ')
  1248. #else
  1249. .endm
  1250. #endif
  1251. /**********************************************************************************************
  1252. * Macros for N=8 and M=2
  1253. **********************************************************************************************/
  1254. #if defined(_AIX)
  1255. define(`LOAD8x2_1', `
  1256. #else
  1257. .macro LOAD8x2_1
  1258. #endif
  1259. lxsspx vs0, o0, AO
  1260. lxsspx vs1, o4, AO
  1261. addi AO, AO, 8
  1262. mr T1, BO
  1263. lxsspx vs8, o0, T1
  1264. lxsspx vs9, o4, T1
  1265. lxsspx vs10, o8, T1
  1266. lxsspx vs11, o12, T1
  1267. addi T1, T1, 16
  1268. lxsspx vs12, o0, T1
  1269. lxsspx vs13, o4, T1
  1270. lxsspx vs14, o8, T1
  1271. lxsspx vs15, o12, T1
  1272. addi BO, BO, 32
  1273. #if defined(_AIX)
  1274. ')
  1275. #else
  1276. .endm
  1277. #endif
  1278. #if defined(_AIX)
  1279. define(`KERNEL8x2_I1', `
  1280. #else
  1281. .macro KERNEL8x2_I1
  1282. #endif
  1283. lxsspx vs4, o0, AO
  1284. lxsspx vs5, o4, AO
  1285. addi AO, AO, 8
  1286. mr T1, BO
  1287. lxsspx vs16, o0, T1
  1288. lxsspx vs17, o4, T1
  1289. lxsspx vs18, o8, T1
  1290. lxsspx vs19, o12, T1
  1291. addi T1, T1, 16
  1292. lxsspx vs20, o0, T1
  1293. lxsspx vs21, o4, T1
  1294. lxsspx vs22, o8, T1
  1295. lxsspx vs23, o12, T1
  1296. addi BO, BO, 32
  1297. xsmuldp vs32, vs0, vs8
  1298. xsmuldp vs33, vs1, vs8
  1299. xsmuldp vs34, vs0, vs9
  1300. xsmuldp vs35, vs1, vs9
  1301. xsmuldp vs36, vs0, vs10
  1302. xsmuldp vs37, vs1, vs10
  1303. xsmuldp vs38, vs0, vs11
  1304. xsmuldp vs39, vs1, vs11
  1305. xsmuldp vs40, vs0, vs12
  1306. xsmuldp vs41, vs1, vs12
  1307. xsmuldp vs42, vs0, vs13
  1308. xsmuldp vs43, vs1, vs13
  1309. xsmuldp vs44, vs0, vs14
  1310. xsmuldp vs45, vs1, vs14
  1311. xsmuldp vs46, vs0, vs15
  1312. xsmuldp vs47, vs1, vs15
  1313. #if defined(_AIX)
  1314. ')
  1315. #else
  1316. .endm
  1317. #endif
  1318. #if defined(_AIX)
  1319. define(`KERNEL8x2_1', `
  1320. #else
  1321. .macro KERNEL8x2_1
  1322. #endif
  1323. lxsspx vs4, o0, AO
  1324. lxsspx vs5, o4, AO
  1325. addi AO, AO, 8
  1326. mr T1, BO
  1327. lxsspx vs16, o0, T1
  1328. lxsspx vs17, o4, T1
  1329. lxsspx vs18, o8, T1
  1330. lxsspx vs19, o12, T1
  1331. addi T1, T1, 16
  1332. lxsspx vs20, o0, T1
  1333. lxsspx vs21, o4, T1
  1334. lxsspx vs22, o8, T1
  1335. lxsspx vs23, o12, T1
  1336. addi BO, BO, 32
  1337. xsmaddadp vs32, vs0, vs8
  1338. xsmaddadp vs33, vs1, vs8
  1339. xsmaddadp vs34, vs0, vs9
  1340. xsmaddadp vs35, vs1, vs9
  1341. xsmaddadp vs36, vs0, vs10
  1342. xsmaddadp vs37, vs1, vs10
  1343. xsmaddadp vs38, vs0, vs11
  1344. xsmaddadp vs39, vs1, vs11
  1345. xsmaddadp vs40, vs0, vs12
  1346. xsmaddadp vs41, vs1, vs12
  1347. xsmaddadp vs42, vs0, vs13
  1348. xsmaddadp vs43, vs1, vs13
  1349. xsmaddadp vs44, vs0, vs14
  1350. xsmaddadp vs45, vs1, vs14
  1351. xsmaddadp vs46, vs0, vs15
  1352. xsmaddadp vs47, vs1, vs15
  1353. #if defined(_AIX)
  1354. ')
  1355. #else
  1356. .endm
  1357. #endif
  1358. #if defined(_AIX)
  1359. define(`KERNEL8x2_2', `
  1360. #else
  1361. .macro KERNEL8x2_2
  1362. #endif
  1363. lxsspx vs0, o0, AO
  1364. lxsspx vs1, o4, AO
  1365. addi AO, AO, 8
  1366. mr T1, BO
  1367. lxsspx vs8, o0, T1
  1368. lxsspx vs9, o4, T1
  1369. lxsspx vs10, o8, T1
  1370. lxsspx vs11, o12, T1
  1371. addi T1, T1, 16
  1372. lxsspx vs12, o0, T1
  1373. lxsspx vs13, o4, T1
  1374. lxsspx vs14, o8, T1
  1375. lxsspx vs15, o12, T1
  1376. addi BO, BO, 32
  1377. xsmaddadp vs32, vs4, vs16
  1378. xsmaddadp vs33, vs5, vs16
  1379. xsmaddadp vs34, vs4, vs17
  1380. xsmaddadp vs35, vs5, vs17
  1381. xsmaddadp vs36, vs4, vs18
  1382. xsmaddadp vs37, vs5, vs18
  1383. xsmaddadp vs38, vs4, vs19
  1384. xsmaddadp vs39, vs5, vs19
  1385. xsmaddadp vs40, vs4, vs20
  1386. xsmaddadp vs41, vs5, vs20
  1387. xsmaddadp vs42, vs4, vs21
  1388. xsmaddadp vs43, vs5, vs21
  1389. xsmaddadp vs44, vs4, vs22
  1390. xsmaddadp vs45, vs5, vs22
  1391. xsmaddadp vs46, vs4, vs23
  1392. xsmaddadp vs47, vs5, vs23
  1393. #if defined(_AIX)
  1394. ')
  1395. #else
  1396. .endm
  1397. #endif
  1398. #if defined(_AIX)
  1399. define(`KERNEL8x2_E2', `
  1400. #else
  1401. .macro KERNEL8x2_E2
  1402. #endif
  1403. xsmaddadp vs32, vs4, vs16
  1404. xsmaddadp vs33, vs5, vs16
  1405. xsmaddadp vs34, vs4, vs17
  1406. xsmaddadp vs35, vs5, vs17
  1407. xsmaddadp vs36, vs4, vs18
  1408. xsmaddadp vs37, vs5, vs18
  1409. xsmaddadp vs38, vs4, vs19
  1410. xsmaddadp vs39, vs5, vs19
  1411. xsmaddadp vs40, vs4, vs20
  1412. xsmaddadp vs41, vs5, vs20
  1413. xsmaddadp vs42, vs4, vs21
  1414. xsmaddadp vs43, vs5, vs21
  1415. xsmaddadp vs44, vs4, vs22
  1416. xsmaddadp vs45, vs5, vs22
  1417. xsmaddadp vs46, vs4, vs23
  1418. xsmaddadp vs47, vs5, vs23
  1419. #if defined(_AIX)
  1420. ')
  1421. #else
  1422. .endm
  1423. #endif
  1424. #if defined(_AIX)
  1425. define(`KERNEL8x2_SUBI1', `
  1426. #else
  1427. .macro KERNEL8x2_SUBI1
  1428. #endif
  1429. lxsspx vs0, o0, AO
  1430. lxsspx vs1, o4, AO
  1431. addi AO, AO, 8
  1432. mr T1, BO
  1433. lxsspx vs8, o0, T1
  1434. lxsspx vs9, o4, T1
  1435. lxsspx vs10, o8, T1
  1436. lxsspx vs11, o12, T1
  1437. addi T1, T1, 16
  1438. lxsspx vs12, o0, T1
  1439. lxsspx vs13, o4, T1
  1440. lxsspx vs14, o8, T1
  1441. lxsspx vs15, o12, T1
  1442. addi BO, BO, 32
  1443. xsmuldp vs32, vs0, vs8
  1444. xsmuldp vs33, vs1, vs8
  1445. xsmuldp vs34, vs0, vs9
  1446. xsmuldp vs35, vs1, vs9
  1447. xsmuldp vs36, vs0, vs10
  1448. xsmuldp vs37, vs1, vs10
  1449. xsmuldp vs38, vs0, vs11
  1450. xsmuldp vs39, vs1, vs11
  1451. xsmuldp vs40, vs0, vs12
  1452. xsmuldp vs41, vs1, vs12
  1453. xsmuldp vs42, vs0, vs13
  1454. xsmuldp vs43, vs1, vs13
  1455. xsmuldp vs44, vs0, vs14
  1456. xsmuldp vs45, vs1, vs14
  1457. xsmuldp vs46, vs0, vs15
  1458. xsmuldp vs47, vs1, vs15
  1459. #if defined(_AIX)
  1460. ')
  1461. #else
  1462. .endm
  1463. #endif
  1464. #if defined(_AIX)
  1465. define(`KERNEL8x2_SUB1', `
  1466. #else
  1467. .macro KERNEL8x2_SUB1
  1468. #endif
  1469. lxsspx vs0, o0, AO
  1470. lxsspx vs1, o4, AO
  1471. addi AO, AO, 8
  1472. mr T1, BO
  1473. lxsspx vs8, o0, T1
  1474. lxsspx vs9, o4, T1
  1475. lxsspx vs10, o8, T1
  1476. lxsspx vs11, o12, T1
  1477. addi T1, T1, 16
  1478. lxsspx vs12, o0, T1
  1479. lxsspx vs13, o4, T1
  1480. lxsspx vs14, o8, T1
  1481. lxsspx vs15, o12, T1
  1482. addi BO, BO, 32
  1483. xsmaddadp vs32, vs0, vs8
  1484. xsmaddadp vs33, vs1, vs8
  1485. xsmaddadp vs34, vs0, vs9
  1486. xsmaddadp vs35, vs1, vs9
  1487. xsmaddadp vs36, vs0, vs10
  1488. xsmaddadp vs37, vs1, vs10
  1489. xsmaddadp vs38, vs0, vs11
  1490. xsmaddadp vs39, vs1, vs11
  1491. xsmaddadp vs40, vs0, vs12
  1492. xsmaddadp vs41, vs1, vs12
  1493. xsmaddadp vs42, vs0, vs13
  1494. xsmaddadp vs43, vs1, vs13
  1495. xsmaddadp vs44, vs0, vs14
  1496. xsmaddadp vs45, vs1, vs14
  1497. xsmaddadp vs46, vs0, vs15
  1498. xsmaddadp vs47, vs1, vs15
  1499. #if defined(_AIX)
  1500. ')
  1501. #else
  1502. .endm
  1503. #endif
  1504. #if defined(_AIX)
  1505. define(`SAVE8x2', `
  1506. #else
  1507. .macro SAVE8x2
  1508. #endif
  1509. mr T1, CO
  1510. #ifndef TRMMKERNEL
  1511. lxsspx vs0, o0, T1
  1512. lxsspx vs1, o4, T1
  1513. #endif
  1514. #ifdef TRMMKERNEL
  1515. xsmuldp vs0, vs32, alpha_r
  1516. xsmuldp vs1, vs33, alpha_r
  1517. #else
  1518. xsmaddadp vs0, vs32, alpha_r
  1519. xsmaddadp vs1, vs33, alpha_r
  1520. #endif
  1521. stxsspx vs0, o0, T1
  1522. stxsspx vs1, o4, T1
  1523. add T1, T1, LDC
  1524. #ifndef TRMMKERNEL
  1525. lxsspx vs0, o0, T1
  1526. lxsspx vs1, o4, T1
  1527. #endif
  1528. #ifdef TRMMKERNEL
  1529. xsmuldp vs0, vs34, alpha_r
  1530. xsmuldp vs1, vs35, alpha_r
  1531. #else
  1532. xsmaddadp vs0, vs34, alpha_r
  1533. xsmaddadp vs1, vs35, alpha_r
  1534. #endif
  1535. stxsspx vs0, o0, T1
  1536. stxsspx vs1, o4, T1
  1537. add T1, T1, LDC
  1538. #ifndef TRMMKERNEL
  1539. lxsspx vs0, o0, T1
  1540. lxsspx vs1, o4, T1
  1541. #endif
  1542. #ifdef TRMMKERNEL
  1543. xsmuldp vs0, vs36, alpha_r
  1544. xsmuldp vs1, vs37, alpha_r
  1545. #else
  1546. xsmaddadp vs0, vs36, alpha_r
  1547. xsmaddadp vs1, vs37, alpha_r
  1548. #endif
  1549. stxsspx vs0, o0, T1
  1550. stxsspx vs1, o4, T1
  1551. add T1, T1, LDC
  1552. #ifndef TRMMKERNEL
  1553. lxsspx vs0, o0, T1
  1554. lxsspx vs1, o4, T1
  1555. #endif
  1556. #ifdef TRMMKERNEL
  1557. xsmuldp vs0, vs38, alpha_r
  1558. xsmuldp vs1, vs39, alpha_r
  1559. #else
  1560. xsmaddadp vs0, vs38, alpha_r
  1561. xsmaddadp vs1, vs39, alpha_r
  1562. #endif
  1563. stxsspx vs0, o0, T1
  1564. stxsspx vs1, o4, T1
  1565. add T1, T1, LDC
  1566. #ifndef TRMMKERNEL
  1567. lxsspx vs0, o0, T1
  1568. lxsspx vs1, o4, T1
  1569. #endif
  1570. #ifdef TRMMKERNEL
  1571. xsmuldp vs0, vs40, alpha_r
  1572. xsmuldp vs1, vs41, alpha_r
  1573. #else
  1574. xsmaddadp vs0, vs40, alpha_r
  1575. xsmaddadp vs1, vs41, alpha_r
  1576. #endif
  1577. stxsspx vs0, o0, T1
  1578. stxsspx vs1, o4, T1
  1579. add T1, T1, LDC
  1580. #ifndef TRMMKERNEL
  1581. lxsspx vs0, o0, T1
  1582. lxsspx vs1, o4, T1
  1583. #endif
  1584. #ifdef TRMMKERNEL
  1585. xsmuldp vs0, vs42, alpha_r
  1586. xsmuldp vs1, vs43, alpha_r
  1587. #else
  1588. xsmaddadp vs0, vs42, alpha_r
  1589. xsmaddadp vs1, vs43, alpha_r
  1590. #endif
  1591. stxsspx vs0, o0, T1
  1592. stxsspx vs1, o4, T1
  1593. add T1, T1, LDC
  1594. #ifndef TRMMKERNEL
  1595. lxsspx vs0, o0, T1
  1596. lxsspx vs1, o4, T1
  1597. #endif
  1598. #ifdef TRMMKERNEL
  1599. xsmuldp vs0, vs44, alpha_r
  1600. xsmuldp vs1, vs45, alpha_r
  1601. #else
  1602. xsmaddadp vs0, vs44, alpha_r
  1603. xsmaddadp vs1, vs45, alpha_r
  1604. #endif
  1605. stxsspx vs0, o0, T1
  1606. stxsspx vs1, o4, T1
  1607. add T1, T1, LDC
  1608. #ifndef TRMMKERNEL
  1609. lxsspx vs0, o0, T1
  1610. lxsspx vs1, o4, T1
  1611. #endif
  1612. #ifdef TRMMKERNEL
  1613. xsmuldp vs0, vs46, alpha_r
  1614. xsmuldp vs1, vs47, alpha_r
  1615. #else
  1616. xsmaddadp vs0, vs46, alpha_r
  1617. xsmaddadp vs1, vs47, alpha_r
  1618. #endif
  1619. stxsspx vs0, o0, T1
  1620. stxsspx vs1, o4, T1
  1621. add T1, T1, LDC
  1622. addi CO, CO, 8
  1623. #if defined(_AIX)
  1624. ')
  1625. #else
  1626. .endm
  1627. #endif
  1628. /**********************************************************************************************
  1629. * Macros for N=8 and M=1
  1630. **********************************************************************************************/
  1631. #if defined(_AIX)
  1632. define(`LOAD8x1_1', `
  1633. #else
  1634. .macro LOAD8x1_1
  1635. #endif
  1636. lxsspx vs0, o0, AO
  1637. addi AO, AO, 4
  1638. mr T1, BO
  1639. lxsspx vs8, o0, T1
  1640. lxsspx vs9, o4, T1
  1641. lxsspx vs10, o8, T1
  1642. lxsspx vs11, o12, T1
  1643. addi T1, T1, 16
  1644. lxsspx vs12, o0, T1
  1645. lxsspx vs13, o4, T1
  1646. lxsspx vs14, o8, T1
  1647. lxsspx vs15, o12, T1
  1648. addi BO, BO, 32
  1649. #if defined(_AIX)
  1650. ')
  1651. #else
  1652. .endm
  1653. #endif
  1654. #if defined(_AIX)
  1655. define(`KERNEL8x1_I1', `
  1656. #else
  1657. .macro KERNEL8x1_I1
  1658. #endif
  1659. lxsspx vs4, o0, AO
  1660. addi AO, AO, 4
  1661. mr T1, BO
  1662. lxsspx vs16, o0, T1
  1663. lxsspx vs17, o4, T1
  1664. lxsspx vs18, o8, T1
  1665. lxsspx vs19, o12, T1
  1666. addi T1, T1, 16
  1667. lxsspx vs20, o0, T1
  1668. lxsspx vs21, o4, T1
  1669. lxsspx vs22, o8, T1
  1670. lxsspx vs23, o12, T1
  1671. addi BO, BO, 32
  1672. xsmuldp vs32, vs0, vs8
  1673. xsmuldp vs33, vs0, vs9
  1674. xsmuldp vs34, vs0, vs10
  1675. xsmuldp vs35, vs0, vs11
  1676. xsmuldp vs36, vs0, vs12
  1677. xsmuldp vs37, vs0, vs13
  1678. xsmuldp vs38, vs0, vs14
  1679. xsmuldp vs39, vs0, vs15
  1680. #if defined(_AIX)
  1681. ')
  1682. #else
  1683. .endm
  1684. #endif
  1685. #if defined(_AIX)
  1686. define(`KERNEL8x1_1', `
  1687. #else
  1688. .macro KERNEL8x1_1
  1689. #endif
  1690. lxsspx vs4, o0, AO
  1691. addi AO, AO, 4
  1692. mr T1, BO
  1693. lxsspx vs16, o0, T1
  1694. lxsspx vs17, o4, T1
  1695. lxsspx vs18, o8, T1
  1696. lxsspx vs19, o12, T1
  1697. addi T1, T1, 16
  1698. lxsspx vs20, o0, T1
  1699. lxsspx vs21, o4, T1
  1700. lxsspx vs22, o8, T1
  1701. lxsspx vs23, o12, T1
  1702. addi BO, BO, 32
  1703. xsmaddadp vs32, vs0, vs8
  1704. xsmaddadp vs33, vs0, vs9
  1705. xsmaddadp vs34, vs0, vs10
  1706. xsmaddadp vs35, vs0, vs11
  1707. xsmaddadp vs36, vs0, vs12
  1708. xsmaddadp vs37, vs0, vs13
  1709. xsmaddadp vs38, vs0, vs14
  1710. xsmaddadp vs39, vs0, vs15
  1711. #if defined(_AIX)
  1712. ')
  1713. #else
  1714. .endm
  1715. #endif
  1716. #if defined(_AIX)
  1717. define(`KERNEL8x1_2', `
  1718. #else
  1719. .macro KERNEL8x1_2
  1720. #endif
  1721. lxsspx vs0, o0, AO
  1722. addi AO, AO, 4
  1723. mr T1, BO
  1724. lxsspx vs8, o0, T1
  1725. lxsspx vs9, o4, T1
  1726. lxsspx vs10, o8, T1
  1727. lxsspx vs11, o12, T1
  1728. addi T1, T1, 16
  1729. lxsspx vs12, o0, T1
  1730. lxsspx vs13, o4, T1
  1731. lxsspx vs14, o8, T1
  1732. lxsspx vs15, o12, T1
  1733. addi BO, BO, 32
  1734. xsmaddadp vs32, vs4, vs16
  1735. xsmaddadp vs33, vs4, vs17
  1736. xsmaddadp vs34, vs4, vs18
  1737. xsmaddadp vs35, vs4, vs19
  1738. xsmaddadp vs36, vs4, vs20
  1739. xsmaddadp vs37, vs4, vs21
  1740. xsmaddadp vs38, vs4, vs22
  1741. xsmaddadp vs39, vs4, vs23
  1742. #if defined(_AIX)
  1743. ')
  1744. #else
  1745. .endm
  1746. #endif
  1747. #if defined(_AIX)
  1748. define(`KERNEL8x1_E2', `
  1749. #else
  1750. .macro KERNEL8x1_E2
  1751. #endif
  1752. xsmaddadp vs32, vs4, vs16
  1753. xsmaddadp vs33, vs4, vs17
  1754. xsmaddadp vs34, vs4, vs18
  1755. xsmaddadp vs35, vs4, vs19
  1756. xsmaddadp vs36, vs4, vs20
  1757. xsmaddadp vs37, vs4, vs21
  1758. xsmaddadp vs38, vs4, vs22
  1759. xsmaddadp vs39, vs4, vs23
  1760. #if defined(_AIX)
  1761. ')
  1762. #else
  1763. .endm
  1764. #endif
  1765. #if defined(_AIX)
  1766. define(`KERNEL8x1_SUBI1', `
  1767. #else
  1768. .macro KERNEL8x1_SUBI1
  1769. #endif
  1770. lxsspx vs0, o0, AO
  1771. addi AO, AO, 4
  1772. mr T1, BO
  1773. lxsspx vs8, o0, T1
  1774. lxsspx vs9, o4, T1
  1775. lxsspx vs10, o8, T1
  1776. lxsspx vs11, o12, T1
  1777. addi T1, T1, 16
  1778. lxsspx vs12, o0, T1
  1779. lxsspx vs13, o4, T1
  1780. lxsspx vs14, o8, T1
  1781. lxsspx vs15, o12, T1
  1782. addi BO, BO, 32
  1783. xsmuldp vs32, vs0, vs8
  1784. xsmuldp vs33, vs0, vs9
  1785. xsmuldp vs34, vs0, vs10
  1786. xsmuldp vs35, vs0, vs11
  1787. xsmuldp vs36, vs0, vs12
  1788. xsmuldp vs37, vs0, vs13
  1789. xsmuldp vs38, vs0, vs14
  1790. xsmuldp vs39, vs0, vs15
  1791. #if defined(_AIX)
  1792. ')
  1793. #else
  1794. .endm
  1795. #endif
  1796. #if defined(_AIX)
  1797. define(`KERNEL8x1_SUB1', `
  1798. #else
  1799. .macro KERNEL8x1_SUB1
  1800. #endif
  1801. lxsspx vs0, o0, AO
  1802. addi AO, AO, 4
  1803. mr T1, BO
  1804. lxsspx vs8, o0, T1
  1805. lxsspx vs9, o4, T1
  1806. lxsspx vs10, o8, T1
  1807. lxsspx vs11, o12, T1
  1808. addi T1, T1, 16
  1809. lxsspx vs12, o0, T1
  1810. lxsspx vs13, o4, T1
  1811. lxsspx vs14, o8, T1
  1812. lxsspx vs15, o12, T1
  1813. addi BO, BO, 32
  1814. xsmaddadp vs32, vs0, vs8
  1815. xsmaddadp vs33, vs0, vs9
  1816. xsmaddadp vs34, vs0, vs10
  1817. xsmaddadp vs35, vs0, vs11
  1818. xsmaddadp vs36, vs0, vs12
  1819. xsmaddadp vs37, vs0, vs13
  1820. xsmaddadp vs38, vs0, vs14
  1821. xsmaddadp vs39, vs0, vs15
  1822. #if defined(_AIX)
  1823. ')
  1824. #else
  1825. .endm
  1826. #endif
  1827. #if defined(_AIX)
  1828. define(`SAVE8x1', `
  1829. #else
  1830. .macro SAVE8x1
  1831. #endif
  1832. mr T1, CO
  1833. #ifndef TRMMKERNEL
  1834. lxsspx vs0, o0, T1
  1835. #endif
  1836. #ifdef TRMMKERNEL
  1837. xsmuldp vs0, vs32, alpha_r
  1838. #else
  1839. xsmaddadp vs0, vs32, alpha_r
  1840. #endif
  1841. stxsspx vs0, o0, T1
  1842. add T1, T1, LDC
  1843. #ifndef TRMMKERNEL
  1844. lxsspx vs0, o0, T1
  1845. #endif
  1846. #ifdef TRMMKERNEL
  1847. xsmuldp vs0, vs33, alpha_r
  1848. #else
  1849. xsmaddadp vs0, vs33, alpha_r
  1850. #endif
  1851. stxsspx vs0, o0, T1
  1852. add T1, T1, LDC
  1853. #ifndef TRMMKERNEL
  1854. lxsspx vs0, o0, T1
  1855. #endif
  1856. #ifdef TRMMKERNEL
  1857. xsmuldp vs0, vs34, alpha_r
  1858. #else
  1859. xsmaddadp vs0, vs34, alpha_r
  1860. #endif
  1861. stxsspx vs0, o0, T1
  1862. add T1, T1, LDC
  1863. #ifndef TRMMKERNEL
  1864. lxsspx vs0, o0, T1
  1865. #endif
  1866. #ifdef TRMMKERNEL
  1867. xsmuldp vs0, vs35, alpha_r
  1868. #else
  1869. xsmaddadp vs0, vs35, alpha_r
  1870. #endif
  1871. stxsspx vs0, o0, T1
  1872. add T1, T1, LDC
  1873. #ifndef TRMMKERNEL
  1874. lxsspx vs0, o0, T1
  1875. #endif
  1876. #ifdef TRMMKERNEL
  1877. xsmuldp vs0, vs36, alpha_r
  1878. #else
  1879. xsmaddadp vs0, vs36, alpha_r
  1880. #endif
  1881. stxsspx vs0, o0, T1
  1882. add T1, T1, LDC
  1883. #ifndef TRMMKERNEL
  1884. lxsspx vs0, o0, T1
  1885. #endif
  1886. #ifdef TRMMKERNEL
  1887. xsmuldp vs0, vs37, alpha_r
  1888. #else
  1889. xsmaddadp vs0, vs37, alpha_r
  1890. #endif
  1891. stxsspx vs0, o0, T1
  1892. add T1, T1, LDC
  1893. #ifndef TRMMKERNEL
  1894. lxsspx vs0, o0, T1
  1895. #endif
  1896. #ifdef TRMMKERNEL
  1897. xsmuldp vs0, vs38, alpha_r
  1898. #else
  1899. xsmaddadp vs0, vs38, alpha_r
  1900. #endif
  1901. stxsspx vs0, o0, T1
  1902. add T1, T1, LDC
  1903. #ifndef TRMMKERNEL
  1904. lxsspx vs0, o0, T1
  1905. #endif
  1906. #ifdef TRMMKERNEL
  1907. xsmuldp vs0, vs39, alpha_r
  1908. #else
  1909. xsmaddadp vs0, vs39, alpha_r
  1910. #endif
  1911. stxsspx vs0, o0, T1
  1912. add T1, T1, LDC
  1913. addi CO, CO, 4
  1914. #if defined(_AIX)
  1915. ')
  1916. #else
  1917. .endm
  1918. #endif
  1919. /**********************************************************************************************
  1920. * Macros for N=4 and M=16
  1921. **********************************************************************************************/
  1922. #if defined(_AIX)
  1923. define(`LOAD4x16_1', `
  1924. #else
  1925. .macro LOAD4x16_1
  1926. #endif
  1927. lxvw4x vs0, o0, AO
  1928. lxvw4x vs1, o16, AO
  1929. lxvw4x vs2, o32, AO
  1930. lxvw4x vs3, o48, AO
  1931. addi AO, AO, 64
  1932. lxvw4x vs28, o0, BO
  1933. xxspltw vs8, vs28, 0
  1934. xxspltw vs9, vs28, 1
  1935. xxspltw vs10, vs28, 2
  1936. xxspltw vs11, vs28, 3
  1937. addi BO, BO, 16
  1938. #if defined(_AIX)
  1939. ')
  1940. #else
  1941. .endm
  1942. #endif
  1943. #if defined(_AIX)
  1944. define(`KERNEL4x16_I1', `
  1945. #else
  1946. .macro KERNEL4x16_I1
  1947. #endif
  1948. lxvw4x vs4, o0, AO
  1949. lxvw4x vs5, o16, AO
  1950. lxvw4x vs6, o32, AO
  1951. lxvw4x vs7, o48, AO
  1952. addi AO, AO, 64
  1953. lxvw4x vs28, o0, BO
  1954. xxspltw vs16, vs28, 0
  1955. xxspltw vs17, vs28, 1
  1956. xxspltw vs18, vs28, 2
  1957. xxspltw vs19, vs28, 3
  1958. addi BO, BO, 16
  1959. xvmulsp vs32, vs0, vs8
  1960. xvmulsp vs33, vs1, vs8
  1961. xvmulsp vs34, vs2, vs8
  1962. xvmulsp vs35, vs3, vs8
  1963. xvmulsp vs36, vs0, vs9
  1964. xvmulsp vs37, vs1, vs9
  1965. xvmulsp vs38, vs2, vs9
  1966. xvmulsp vs39, vs3, vs9
  1967. xvmulsp vs40, vs0, vs10
  1968. xvmulsp vs41, vs1, vs10
  1969. xvmulsp vs42, vs2, vs10
  1970. xvmulsp vs43, vs3, vs10
  1971. xvmulsp vs44, vs0, vs11
  1972. xvmulsp vs45, vs1, vs11
  1973. xvmulsp vs46, vs2, vs11
  1974. xvmulsp vs47, vs3, vs11
  1975. #if defined(_AIX)
  1976. ')
  1977. #else
  1978. .endm
  1979. #endif
  1980. #if defined(_AIX)
  1981. define(`KERNEL4x16_1', `
  1982. #else
  1983. .macro KERNEL4x16_1
  1984. #endif
  1985. lxvw4x vs4, o0, AO
  1986. lxvw4x vs5, o16, AO
  1987. lxvw4x vs6, o32, AO
  1988. lxvw4x vs7, o48, AO
  1989. addi AO, AO, 64
  1990. lxvw4x vs28, o0, BO
  1991. xxspltw vs16, vs28, 0
  1992. xxspltw vs17, vs28, 1
  1993. xxspltw vs18, vs28, 2
  1994. xxspltw vs19, vs28, 3
  1995. addi BO, BO, 16
  1996. xvmaddasp vs32, vs0, vs8
  1997. xvmaddasp vs33, vs1, vs8
  1998. xvmaddasp vs34, vs2, vs8
  1999. xvmaddasp vs35, vs3, vs8
  2000. xvmaddasp vs36, vs0, vs9
  2001. xvmaddasp vs37, vs1, vs9
  2002. xvmaddasp vs38, vs2, vs9
  2003. xvmaddasp vs39, vs3, vs9
  2004. xvmaddasp vs40, vs0, vs10
  2005. xvmaddasp vs41, vs1, vs10
  2006. xvmaddasp vs42, vs2, vs10
  2007. xvmaddasp vs43, vs3, vs10
  2008. xvmaddasp vs44, vs0, vs11
  2009. xvmaddasp vs45, vs1, vs11
  2010. xvmaddasp vs46, vs2, vs11
  2011. xvmaddasp vs47, vs3, vs11
  2012. #if defined(_AIX)
  2013. ')
  2014. #else
  2015. .endm
  2016. #endif
  2017. #if defined(_AIX)
  2018. define(`KERNEL4x16_2', `
  2019. #else
  2020. .macro KERNEL4x16_2
  2021. #endif
  2022. lxvw4x vs0, o0, AO
  2023. lxvw4x vs1, o16, AO
  2024. lxvw4x vs2, o32, AO
  2025. lxvw4x vs3, o48, AO
  2026. addi AO, AO, 64
  2027. lxvw4x vs28, o0, BO
  2028. xxspltw vs8, vs28, 0
  2029. xxspltw vs9, vs28, 1
  2030. xxspltw vs10, vs28, 2
  2031. xxspltw vs11, vs28, 3
  2032. addi BO, BO, 16
  2033. xvmaddasp vs32, vs4, vs16
  2034. xvmaddasp vs33, vs5, vs16
  2035. xvmaddasp vs34, vs6, vs16
  2036. xvmaddasp vs35, vs7, vs16
  2037. xvmaddasp vs36, vs4, vs17
  2038. xvmaddasp vs37, vs5, vs17
  2039. xvmaddasp vs38, vs6, vs17
  2040. xvmaddasp vs39, vs7, vs17
  2041. xvmaddasp vs40, vs4, vs18
  2042. xvmaddasp vs41, vs5, vs18
  2043. xvmaddasp vs42, vs6, vs18
  2044. xvmaddasp vs43, vs7, vs18
  2045. xvmaddasp vs44, vs4, vs19
  2046. xvmaddasp vs45, vs5, vs19
  2047. xvmaddasp vs46, vs6, vs19
  2048. xvmaddasp vs47, vs7, vs19
  2049. #if defined(_AIX)
  2050. ')
  2051. #else
  2052. .endm
  2053. #endif
  2054. #if defined(_AIX)
  2055. define(`KERNEL4x16_E2', `
  2056. #else
  2057. .macro KERNEL4x16_E2
  2058. #endif
  2059. xvmaddasp vs32, vs4, vs16
  2060. xvmaddasp vs33, vs5, vs16
  2061. xvmaddasp vs34, vs6, vs16
  2062. xvmaddasp vs35, vs7, vs16
  2063. xvmaddasp vs36, vs4, vs17
  2064. xvmaddasp vs37, vs5, vs17
  2065. xvmaddasp vs38, vs6, vs17
  2066. xvmaddasp vs39, vs7, vs17
  2067. xvmaddasp vs40, vs4, vs18
  2068. xvmaddasp vs41, vs5, vs18
  2069. xvmaddasp vs42, vs6, vs18
  2070. xvmaddasp vs43, vs7, vs18
  2071. xvmaddasp vs44, vs4, vs19
  2072. xvmaddasp vs45, vs5, vs19
  2073. xvmaddasp vs46, vs6, vs19
  2074. xvmaddasp vs47, vs7, vs19
  2075. #if defined(_AIX)
  2076. ')
  2077. #else
  2078. .endm
  2079. #endif
  2080. #if defined(_AIX)
  2081. define(`KERNEL4x16_SUBI1', `
  2082. #else
  2083. .macro KERNEL4x16_SUBI1
  2084. #endif
  2085. lxvw4x vs0, o0, AO
  2086. lxvw4x vs1, o16, AO
  2087. lxvw4x vs2, o32, AO
  2088. lxvw4x vs3, o48, AO
  2089. addi AO, AO, 64
  2090. lxvw4x vs28, o0, BO
  2091. xxspltw vs8, vs28, 0
  2092. xxspltw vs9, vs28, 1
  2093. xxspltw vs10, vs28, 2
  2094. xxspltw vs11, vs28, 3
  2095. addi BO, BO, 16
  2096. xvmulsp vs32, vs0, vs8
  2097. xvmulsp vs33, vs1, vs8
  2098. xvmulsp vs34, vs2, vs8
  2099. xvmulsp vs35, vs3, vs8
  2100. xvmulsp vs36, vs0, vs9
  2101. xvmulsp vs37, vs1, vs9
  2102. xvmulsp vs38, vs2, vs9
  2103. xvmulsp vs39, vs3, vs9
  2104. xvmulsp vs40, vs0, vs10
  2105. xvmulsp vs41, vs1, vs10
  2106. xvmulsp vs42, vs2, vs10
  2107. xvmulsp vs43, vs3, vs10
  2108. xvmulsp vs44, vs0, vs11
  2109. xvmulsp vs45, vs1, vs11
  2110. xvmulsp vs46, vs2, vs11
  2111. xvmulsp vs47, vs3, vs11
  2112. #if defined(_AIX)
  2113. ')
  2114. #else
  2115. .endm
  2116. #endif
  2117. #if defined(_AIX)
  2118. define(`KERNEL4x16_SUB1', `
  2119. #else
  2120. .macro KERNEL4x16_SUB1
  2121. #endif
  2122. lxvw4x vs0, o0, AO
  2123. lxvw4x vs1, o16, AO
  2124. lxvw4x vs2, o32, AO
  2125. lxvw4x vs3, o48, AO
  2126. addi AO, AO, 64
  2127. lxvw4x vs28, o0, BO
  2128. xxspltw vs8, vs28, 0
  2129. xxspltw vs9, vs28, 1
  2130. xxspltw vs10, vs28, 2
  2131. xxspltw vs11, vs28, 3
  2132. addi BO, BO, 16
  2133. xvmaddasp vs32, vs0, vs8
  2134. xvmaddasp vs33, vs1, vs8
  2135. xvmaddasp vs34, vs2, vs8
  2136. xvmaddasp vs35, vs3, vs8
  2137. xvmaddasp vs36, vs0, vs9
  2138. xvmaddasp vs37, vs1, vs9
  2139. xvmaddasp vs38, vs2, vs9
  2140. xvmaddasp vs39, vs3, vs9
  2141. xvmaddasp vs40, vs0, vs10
  2142. xvmaddasp vs41, vs1, vs10
  2143. xvmaddasp vs42, vs2, vs10
  2144. xvmaddasp vs43, vs3, vs10
  2145. xvmaddasp vs44, vs0, vs11
  2146. xvmaddasp vs45, vs1, vs11
  2147. xvmaddasp vs46, vs2, vs11
  2148. xvmaddasp vs47, vs3, vs11
  2149. #if defined(_AIX)
  2150. ')
  2151. #else
  2152. .endm
  2153. #endif
  2154. #if defined(_AIX)
  2155. define(`SAVE4x16', `
  2156. #else
  2157. .macro SAVE4x16
  2158. #endif
  2159. mr T1, CO
  2160. #ifndef TRMMKERNEL
  2161. lxvw4x vs0, o0, T1
  2162. lxvw4x vs1, o16, T1
  2163. lxvw4x vs2, o32, T1
  2164. lxvw4x vs3, o48, T1
  2165. #endif
  2166. #ifdef TRMMKERNEL
  2167. xvmulsp vs0, vs32, alpha_vr
  2168. xvmulsp vs1, vs33, alpha_vr
  2169. xvmulsp vs2, vs34, alpha_vr
  2170. xvmulsp vs3, vs35, alpha_vr
  2171. #else
  2172. xvmaddasp vs0, vs32, alpha_vr
  2173. xvmaddasp vs1, vs33, alpha_vr
  2174. xvmaddasp vs2, vs34, alpha_vr
  2175. xvmaddasp vs3, vs35, alpha_vr
  2176. #endif
  2177. stxvw4x vs0, o0, T1
  2178. stxvw4x vs1, o16, T1
  2179. stxvw4x vs2, o32, T1
  2180. stxvw4x vs3, o48, T1
  2181. add T1, T1, LDC
  2182. #ifndef TRMMKERNEL
  2183. lxvw4x vs0, o0, T1
  2184. lxvw4x vs1, o16, T1
  2185. lxvw4x vs2, o32, T1
  2186. lxvw4x vs3, o48, T1
  2187. #endif
  2188. #ifdef TRMMKERNEL
  2189. xvmulsp vs0, vs36, alpha_vr
  2190. xvmulsp vs1, vs37, alpha_vr
  2191. xvmulsp vs2, vs38, alpha_vr
  2192. xvmulsp vs3, vs39, alpha_vr
  2193. #else
  2194. xvmaddasp vs0, vs36, alpha_vr
  2195. xvmaddasp vs1, vs37, alpha_vr
  2196. xvmaddasp vs2, vs38, alpha_vr
  2197. xvmaddasp vs3, vs39, alpha_vr
  2198. #endif
  2199. stxvw4x vs0, o0, T1
  2200. stxvw4x vs1, o16, T1
  2201. stxvw4x vs2, o32, T1
  2202. stxvw4x vs3, o48, T1
  2203. add T1, T1, LDC
  2204. #ifndef TRMMKERNEL
  2205. lxvw4x vs0, o0, T1
  2206. lxvw4x vs1, o16, T1
  2207. lxvw4x vs2, o32, T1
  2208. lxvw4x vs3, o48, T1
  2209. #endif
  2210. #ifdef TRMMKERNEL
  2211. xvmulsp vs0, vs40, alpha_vr
  2212. xvmulsp vs1, vs41, alpha_vr
  2213. xvmulsp vs2, vs42, alpha_vr
  2214. xvmulsp vs3, vs43, alpha_vr
  2215. #else
  2216. xvmaddasp vs0, vs40, alpha_vr
  2217. xvmaddasp vs1, vs41, alpha_vr
  2218. xvmaddasp vs2, vs42, alpha_vr
  2219. xvmaddasp vs3, vs43, alpha_vr
  2220. #endif
  2221. stxvw4x vs0, o0, T1
  2222. stxvw4x vs1, o16, T1
  2223. stxvw4x vs2, o32, T1
  2224. stxvw4x vs3, o48, T1
  2225. add T1, T1, LDC
  2226. #ifndef TRMMKERNEL
  2227. lxvw4x vs0, o0, T1
  2228. lxvw4x vs1, o16, T1
  2229. lxvw4x vs2, o32, T1
  2230. lxvw4x vs3, o48, T1
  2231. #endif
  2232. #ifdef TRMMKERNEL
  2233. xvmulsp vs0, vs44, alpha_vr
  2234. xvmulsp vs1, vs45, alpha_vr
  2235. xvmulsp vs2, vs46, alpha_vr
  2236. xvmulsp vs3, vs47, alpha_vr
  2237. #else
  2238. xvmaddasp vs0, vs44, alpha_vr
  2239. xvmaddasp vs1, vs45, alpha_vr
  2240. xvmaddasp vs2, vs46, alpha_vr
  2241. xvmaddasp vs3, vs47, alpha_vr
  2242. #endif
  2243. stxvw4x vs0, o0, T1
  2244. stxvw4x vs1, o16, T1
  2245. stxvw4x vs2, o32, T1
  2246. stxvw4x vs3, o48, T1
  2247. add T1, T1, LDC
  2248. addi CO, CO, 64
  2249. #if defined(_AIX)
  2250. ')
  2251. #else
  2252. .endm
  2253. #endif
  2254. /**********************************************************************************************
  2255. * Macros for N=4 and M=8
  2256. **********************************************************************************************/
  2257. #if defined(_AIX)
  2258. define(`LOAD4x8_1', `
  2259. #else
  2260. .macro LOAD4x8_1
  2261. #endif
  2262. lxvw4x vs0, o0, AO
  2263. lxvw4x vs1, o16, AO
  2264. addi AO, AO, 32
  2265. lxvw4x vs28, o0, BO
  2266. xxspltw vs8, vs28, 0
  2267. xxspltw vs9, vs28, 1
  2268. xxspltw vs10, vs28, 2
  2269. xxspltw vs11, vs28, 3
  2270. addi BO, BO, 16
  2271. #if defined(_AIX)
  2272. ')
  2273. #else
  2274. .endm
  2275. #endif
  2276. #if defined(_AIX)
  2277. define(`KERNEL4x8_I1', `
  2278. #else
  2279. .macro KERNEL4x8_I1
  2280. #endif
  2281. lxvw4x vs4, o0, AO
  2282. lxvw4x vs5, o16, AO
  2283. addi AO, AO, 32
  2284. lxvw4x vs28, o0, BO
  2285. xxspltw vs16, vs28, 0
  2286. xxspltw vs17, vs28, 1
  2287. xxspltw vs18, vs28, 2
  2288. xxspltw vs19, vs28, 3
  2289. addi BO, BO, 16
  2290. xvmulsp vs32, vs0, vs8
  2291. xvmulsp vs33, vs1, vs8
  2292. xvmulsp vs34, vs0, vs9
  2293. xvmulsp vs35, vs1, vs9
  2294. xvmulsp vs36, vs0, vs10
  2295. xvmulsp vs37, vs1, vs10
  2296. xvmulsp vs38, vs0, vs11
  2297. xvmulsp vs39, vs1, vs11
  2298. #if defined(_AIX)
  2299. ')
  2300. #else
  2301. .endm
  2302. #endif
  2303. #if defined(_AIX)
  2304. define(`KERNEL4x8_1', `
  2305. #else
  2306. .macro KERNEL4x8_1
  2307. #endif
  2308. lxvw4x vs4, o0, AO
  2309. lxvw4x vs5, o16, AO
  2310. addi AO, AO, 32
  2311. lxvw4x vs28, o0, BO
  2312. xxspltw vs16, vs28, 0
  2313. xxspltw vs17, vs28, 1
  2314. xxspltw vs18, vs28, 2
  2315. xxspltw vs19, vs28, 3
  2316. addi BO, BO, 16
  2317. xvmaddasp vs32, vs0, vs8
  2318. xvmaddasp vs33, vs1, vs8
  2319. xvmaddasp vs34, vs0, vs9
  2320. xvmaddasp vs35, vs1, vs9
  2321. xvmaddasp vs36, vs0, vs10
  2322. xvmaddasp vs37, vs1, vs10
  2323. xvmaddasp vs38, vs0, vs11
  2324. xvmaddasp vs39, vs1, vs11
  2325. #if defined(_AIX)
  2326. ')
  2327. #else
  2328. .endm
  2329. #endif
  2330. #if defined(_AIX)
  2331. define(`KERNEL4x8_2', `
  2332. #else
  2333. .macro KERNEL4x8_2
  2334. #endif
  2335. lxvw4x vs0, o0, AO
  2336. lxvw4x vs1, o16, AO
  2337. addi AO, AO, 32
  2338. lxvw4x vs28, o0, BO
  2339. xxspltw vs8, vs28, 0
  2340. xxspltw vs9, vs28, 1
  2341. xxspltw vs10, vs28, 2
  2342. xxspltw vs11, vs28, 3
  2343. addi BO, BO, 16
  2344. xvmaddasp vs32, vs4, vs16
  2345. xvmaddasp vs33, vs5, vs16
  2346. xvmaddasp vs34, vs4, vs17
  2347. xvmaddasp vs35, vs5, vs17
  2348. xvmaddasp vs36, vs4, vs18
  2349. xvmaddasp vs37, vs5, vs18
  2350. xvmaddasp vs38, vs4, vs19
  2351. xvmaddasp vs39, vs5, vs19
  2352. #if defined(_AIX)
  2353. ')
  2354. #else
  2355. .endm
  2356. #endif
  2357. #if defined(_AIX)
  2358. define(`KERNEL4x8_E2', `
  2359. #else
  2360. .macro KERNEL4x8_E2
  2361. #endif
  2362. xvmaddasp vs32, vs4, vs16
  2363. xvmaddasp vs33, vs5, vs16
  2364. xvmaddasp vs34, vs4, vs17
  2365. xvmaddasp vs35, vs5, vs17
  2366. xvmaddasp vs36, vs4, vs18
  2367. xvmaddasp vs37, vs5, vs18
  2368. xvmaddasp vs38, vs4, vs19
  2369. xvmaddasp vs39, vs5, vs19
  2370. #if defined(_AIX)
  2371. ')
  2372. #else
  2373. .endm
  2374. #endif
  2375. #if defined(_AIX)
  2376. define(`KERNEL4x8_SUBI1', `
  2377. #else
  2378. .macro KERNEL4x8_SUBI1
  2379. #endif
  2380. lxvw4x vs0, o0, AO
  2381. lxvw4x vs1, o16, AO
  2382. addi AO, AO, 32
  2383. lxvw4x vs28, o0, BO
  2384. xxspltw vs8, vs28, 0
  2385. xxspltw vs9, vs28, 1
  2386. xxspltw vs10, vs28, 2
  2387. xxspltw vs11, vs28, 3
  2388. addi BO, BO, 16
  2389. xvmulsp vs32, vs0, vs8
  2390. xvmulsp vs33, vs1, vs8
  2391. xvmulsp vs34, vs0, vs9
  2392. xvmulsp vs35, vs1, vs9
  2393. xvmulsp vs36, vs0, vs10
  2394. xvmulsp vs37, vs1, vs10
  2395. xvmulsp vs38, vs0, vs11
  2396. xvmulsp vs39, vs1, vs11
  2397. #if defined(_AIX)
  2398. ')
  2399. #else
  2400. .endm
  2401. #endif
  2402. #if defined(_AIX)
  2403. define(`KERNEL4x8_SUB1', `
  2404. #else
  2405. .macro KERNEL4x8_SUB1
  2406. #endif
  2407. lxvw4x vs0, o0, AO
  2408. lxvw4x vs1, o16, AO
  2409. addi AO, AO, 32
  2410. lxvw4x vs28, o0, BO
  2411. xxspltw vs8, vs28, 0
  2412. xxspltw vs9, vs28, 1
  2413. xxspltw vs10, vs28, 2
  2414. xxspltw vs11, vs28, 3
  2415. addi BO, BO, 16
  2416. xvmaddasp vs32, vs0, vs8
  2417. xvmaddasp vs33, vs1, vs8
  2418. xvmaddasp vs34, vs0, vs9
  2419. xvmaddasp vs35, vs1, vs9
  2420. xvmaddasp vs36, vs0, vs10
  2421. xvmaddasp vs37, vs1, vs10
  2422. xvmaddasp vs38, vs0, vs11
  2423. xvmaddasp vs39, vs1, vs11
  2424. #if defined(_AIX)
  2425. ')
  2426. #else
  2427. .endm
  2428. #endif
  2429. #if defined(_AIX)
  2430. define(`SAVE4x8', `
  2431. #else
  2432. .macro SAVE4x8
  2433. #endif
  2434. mr T1, CO
  2435. #ifndef TRMMKERNEL
  2436. lxvw4x vs0, o0, T1
  2437. lxvw4x vs1, o16, T1
  2438. #endif
  2439. #ifdef TRMMKERNEL
  2440. xvmulsp vs0, vs32, alpha_vr
  2441. xvmulsp vs1, vs33, alpha_vr
  2442. #else
  2443. xvmaddasp vs0, vs32, alpha_vr
  2444. xvmaddasp vs1, vs33, alpha_vr
  2445. #endif
  2446. stxvw4x vs0, o0, T1
  2447. stxvw4x vs1, o16, T1
  2448. add T1, T1, LDC
  2449. #ifndef TRMMKERNEL
  2450. lxvw4x vs0, o0, T1
  2451. lxvw4x vs1, o16, T1
  2452. #endif
  2453. #ifdef TRMMKERNEL
  2454. xvmulsp vs0, vs34, alpha_vr
  2455. xvmulsp vs1, vs35, alpha_vr
  2456. #else
  2457. xvmaddasp vs0, vs34, alpha_vr
  2458. xvmaddasp vs1, vs35, alpha_vr
  2459. #endif
  2460. stxvw4x vs0, o0, T1
  2461. stxvw4x vs1, o16, T1
  2462. add T1, T1, LDC
  2463. #ifndef TRMMKERNEL
  2464. lxvw4x vs0, o0, T1
  2465. lxvw4x vs1, o16, T1
  2466. #endif
  2467. #ifdef TRMMKERNEL
  2468. xvmulsp vs0, vs36, alpha_vr
  2469. xvmulsp vs1, vs37, alpha_vr
  2470. #else
  2471. xvmaddasp vs0, vs36, alpha_vr
  2472. xvmaddasp vs1, vs37, alpha_vr
  2473. #endif
  2474. stxvw4x vs0, o0, T1
  2475. stxvw4x vs1, o16, T1
  2476. add T1, T1, LDC
  2477. #ifndef TRMMKERNEL
  2478. lxvw4x vs0, o0, T1
  2479. lxvw4x vs1, o16, T1
  2480. #endif
  2481. #ifdef TRMMKERNEL
  2482. xvmulsp vs0, vs38, alpha_vr
  2483. xvmulsp vs1, vs39, alpha_vr
  2484. #else
  2485. xvmaddasp vs0, vs38, alpha_vr
  2486. xvmaddasp vs1, vs39, alpha_vr
  2487. #endif
  2488. stxvw4x vs0, o0, T1
  2489. stxvw4x vs1, o16, T1
  2490. add T1, T1, LDC
  2491. addi CO, CO, 32
  2492. #if defined(_AIX)
  2493. ')
  2494. #else
  2495. .endm
  2496. #endif
  2497. /**********************************************************************************************
  2498. * Macros for N=4 and M=4
  2499. **********************************************************************************************/
  2500. #if defined(_AIX)
  2501. define(`LOAD4x4_1', `
  2502. #else
  2503. .macro LOAD4x4_1
  2504. #endif
  2505. lxvw4x vs0, o0, AO
  2506. addi AO, AO, 16
  2507. lxvw4x vs28, o0, BO
  2508. xxspltw vs8, vs28, 0
  2509. xxspltw vs9, vs28, 1
  2510. xxspltw vs10, vs28, 2
  2511. xxspltw vs11, vs28, 3
  2512. addi BO, BO, 16
  2513. #if defined(_AIX)
  2514. ')
  2515. #else
  2516. .endm
  2517. #endif
  2518. #if defined(_AIX)
  2519. define(`KERNEL4x4_I1', `
  2520. #else
  2521. .macro KERNEL4x4_I1
  2522. #endif
  2523. lxvw4x vs4, o0, AO
  2524. addi AO, AO, 16
  2525. lxvw4x vs28, o0, BO
  2526. xxspltw vs16, vs28, 0
  2527. xxspltw vs17, vs28, 1
  2528. xxspltw vs18, vs28, 2
  2529. xxspltw vs19, vs28, 3
  2530. addi BO, BO, 16
  2531. xvmulsp vs32, vs0, vs8
  2532. xvmulsp vs33, vs0, vs9
  2533. xvmulsp vs34, vs0, vs10
  2534. xvmulsp vs35, vs0, vs11
  2535. #if defined(_AIX)
  2536. ')
  2537. #else
  2538. .endm
  2539. #endif
  2540. #if defined(_AIX)
  2541. define(`KERNEL4x4_1', `
  2542. #else
  2543. .macro KERNEL4x4_1
  2544. #endif
  2545. lxvw4x vs4, o0, AO
  2546. addi AO, AO, 16
  2547. lxvw4x vs28, o0, BO
  2548. xxspltw vs16, vs28, 0
  2549. xxspltw vs17, vs28, 1
  2550. xxspltw vs18, vs28, 2
  2551. xxspltw vs19, vs28, 3
  2552. addi BO, BO, 16
  2553. xvmaddasp vs32, vs0, vs8
  2554. xvmaddasp vs33, vs0, vs9
  2555. xvmaddasp vs34, vs0, vs10
  2556. xvmaddasp vs35, vs0, vs11
  2557. #if defined(_AIX)
  2558. ')
  2559. #else
  2560. .endm
  2561. #endif
  2562. #if defined(_AIX)
  2563. define(`KERNEL4x4_2', `
  2564. #else
  2565. .macro KERNEL4x4_2
  2566. #endif
  2567. lxvw4x vs0, o0, AO
  2568. addi AO, AO, 16
  2569. lxvw4x vs28, o0, BO
  2570. xxspltw vs8, vs28, 0
  2571. xxspltw vs9, vs28, 1
  2572. xxspltw vs10, vs28, 2
  2573. xxspltw vs11, vs28, 3
  2574. addi BO, BO, 16
  2575. xvmaddasp vs32, vs4, vs16
  2576. xvmaddasp vs33, vs4, vs17
  2577. xvmaddasp vs34, vs4, vs18
  2578. xvmaddasp vs35, vs4, vs19
  2579. #if defined(_AIX)
  2580. ')
  2581. #else
  2582. .endm
  2583. #endif
  2584. #if defined(_AIX)
  2585. define(`KERNEL4x4_E2', `
  2586. #else
  2587. .macro KERNEL4x4_E2
  2588. #endif
  2589. xvmaddasp vs32, vs4, vs16
  2590. xvmaddasp vs33, vs4, vs17
  2591. xvmaddasp vs34, vs4, vs18
  2592. xvmaddasp vs35, vs4, vs19
  2593. #if defined(_AIX)
  2594. ')
  2595. #else
  2596. .endm
  2597. #endif
  2598. #if defined(_AIX)
  2599. define(`KERNEL4x4_SUBI1', `
  2600. #else
  2601. .macro KERNEL4x4_SUBI1
  2602. #endif
  2603. lxvw4x vs0, o0, AO
  2604. addi AO, AO, 16
  2605. lxvw4x vs28, o0, BO
  2606. xxspltw vs8, vs28, 0
  2607. xxspltw vs9, vs28, 1
  2608. xxspltw vs10, vs28, 2
  2609. xxspltw vs11, vs28, 3
  2610. addi BO, BO, 16
  2611. xvmulsp vs32, vs0, vs8
  2612. xvmulsp vs33, vs0, vs9
  2613. xvmulsp vs34, vs0, vs10
  2614. xvmulsp vs35, vs0, vs11
  2615. #if defined(_AIX)
  2616. ')
  2617. #else
  2618. .endm
  2619. #endif
  2620. #if defined(_AIX)
  2621. define(`KERNEL4x4_SUB1', `
  2622. #else
  2623. .macro KERNEL4x4_SUB1
  2624. #endif
  2625. lxvw4x vs0, o0, AO
  2626. addi AO, AO, 16
  2627. lxvw4x vs28, o0, BO
  2628. xxspltw vs8, vs28, 0
  2629. xxspltw vs9, vs28, 1
  2630. xxspltw vs10, vs28, 2
  2631. xxspltw vs11, vs28, 3
  2632. addi BO, BO, 16
  2633. xvmaddasp vs32, vs0, vs8
  2634. xvmaddasp vs33, vs0, vs9
  2635. xvmaddasp vs34, vs0, vs10
  2636. xvmaddasp vs35, vs0, vs11
  2637. #if defined(_AIX)
  2638. ')
  2639. #else
  2640. .endm
  2641. #endif
  2642. #if defined(_AIX)
  2643. define(`SAVE4x4', `
  2644. #else
  2645. .macro SAVE4x4
  2646. #endif
  2647. mr T1, CO
  2648. #ifndef TRMMKERNEL
  2649. lxvw4x vs0, o0, T1
  2650. #endif
  2651. #ifdef TRMMKERNEL
  2652. xvmulsp vs0, vs32, alpha_vr
  2653. #else
  2654. xvmaddasp vs0, vs32, alpha_vr
  2655. #endif
  2656. stxvw4x vs0, o0, T1
  2657. add T1, T1, LDC
  2658. #ifndef TRMMKERNEL
  2659. lxvw4x vs0, o0, T1
  2660. #endif
  2661. #ifdef TRMMKERNEL
  2662. xvmulsp vs0, vs33, alpha_vr
  2663. #else
  2664. xvmaddasp vs0, vs33, alpha_vr
  2665. #endif
  2666. stxvw4x vs0, o0, T1
  2667. add T1, T1, LDC
  2668. #ifndef TRMMKERNEL
  2669. lxvw4x vs0, o0, T1
  2670. #endif
  2671. #ifdef TRMMKERNEL
  2672. xvmulsp vs0, vs34, alpha_vr
  2673. #else
  2674. xvmaddasp vs0, vs34, alpha_vr
  2675. #endif
  2676. stxvw4x vs0, o0, T1
  2677. add T1, T1, LDC
  2678. #ifndef TRMMKERNEL
  2679. lxvw4x vs0, o0, T1
  2680. #endif
  2681. #ifdef TRMMKERNEL
  2682. xvmulsp vs0, vs35, alpha_vr
  2683. #else
  2684. xvmaddasp vs0, vs35, alpha_vr
  2685. #endif
  2686. stxvw4x vs0, o0, T1
  2687. add T1, T1, LDC
  2688. addi CO, CO, 16
  2689. #if defined(_AIX)
  2690. ')
  2691. #else
  2692. .endm
  2693. #endif
  2694. /**********************************************************************************************
  2695. * Macros for N=4 and M=2
  2696. **********************************************************************************************/
  2697. #if defined(_AIX)
  2698. define(`LOAD4x2_1', `
  2699. #else
  2700. .macro LOAD4x2_1
  2701. #endif
  2702. lxsspx vs0, o0, AO
  2703. lxsspx vs1, o4, AO
  2704. addi AO, AO, 8
  2705. mr T1, BO
  2706. lxsspx vs8, o0, T1
  2707. lxsspx vs9, o4, T1
  2708. lxsspx vs10, o8, T1
  2709. lxsspx vs11, o12, T1
  2710. addi BO, BO, 16
  2711. #if defined(_AIX)
  2712. ')
  2713. #else
  2714. .endm
  2715. #endif
  2716. #if defined(_AIX)
  2717. define(`KERNEL4x2_I1', `
  2718. #else
  2719. .macro KERNEL4x2_I1
  2720. #endif
  2721. lxsspx vs4, o0, AO
  2722. lxsspx vs5, o4, AO
  2723. addi AO, AO, 8
  2724. mr T1, BO
  2725. lxsspx vs16, o0, T1
  2726. lxsspx vs17, o4, T1
  2727. lxsspx vs18, o8, T1
  2728. lxsspx vs19, o12, T1
  2729. addi BO, BO, 16
  2730. xsmuldp vs32, vs0, vs8
  2731. xsmuldp vs33, vs1, vs8
  2732. xsmuldp vs34, vs0, vs9
  2733. xsmuldp vs35, vs1, vs9
  2734. xsmuldp vs36, vs0, vs10
  2735. xsmuldp vs37, vs1, vs10
  2736. xsmuldp vs38, vs0, vs11
  2737. xsmuldp vs39, vs1, vs11
  2738. #if defined(_AIX)
  2739. ')
  2740. #else
  2741. .endm
  2742. #endif
  2743. #if defined(_AIX)
  2744. define(`KERNEL4x2_1', `
  2745. #else
  2746. .macro KERNEL4x2_1
  2747. #endif
  2748. lxsspx vs4, o0, AO
  2749. lxsspx vs5, o4, AO
  2750. addi AO, AO, 8
  2751. mr T1, BO
  2752. lxsspx vs16, o0, T1
  2753. lxsspx vs17, o4, T1
  2754. lxsspx vs18, o8, T1
  2755. lxsspx vs19, o12, T1
  2756. addi BO, BO, 16
  2757. xsmaddadp vs32, vs0, vs8
  2758. xsmaddadp vs33, vs1, vs8
  2759. xsmaddadp vs34, vs0, vs9
  2760. xsmaddadp vs35, vs1, vs9
  2761. xsmaddadp vs36, vs0, vs10
  2762. xsmaddadp vs37, vs1, vs10
  2763. xsmaddadp vs38, vs0, vs11
  2764. xsmaddadp vs39, vs1, vs11
  2765. #if defined(_AIX)
  2766. ')
  2767. #else
  2768. .endm
  2769. #endif
  2770. #if defined(_AIX)
  2771. define(`KERNEL4x2_2', `
  2772. #else
  2773. .macro KERNEL4x2_2
  2774. #endif
  2775. lxsspx vs0, o0, AO
  2776. lxsspx vs1, o4, AO
  2777. addi AO, AO, 8
  2778. mr T1, BO
  2779. lxsspx vs8, o0, T1
  2780. lxsspx vs9, o4, T1
  2781. lxsspx vs10, o8, T1
  2782. lxsspx vs11, o12, T1
  2783. addi BO, BO, 16
  2784. xsmaddadp vs32, vs4, vs16
  2785. xsmaddadp vs33, vs5, vs16
  2786. xsmaddadp vs34, vs4, vs17
  2787. xsmaddadp vs35, vs5, vs17
  2788. xsmaddadp vs36, vs4, vs18
  2789. xsmaddadp vs37, vs5, vs18
  2790. xsmaddadp vs38, vs4, vs19
  2791. xsmaddadp vs39, vs5, vs19
  2792. #if defined(_AIX)
  2793. ')
  2794. #else
  2795. .endm
  2796. #endif
  2797. #if defined(_AIX)
  2798. define(`KERNEL4x2_E2', `
  2799. #else
  2800. .macro KERNEL4x2_E2
  2801. #endif
  2802. xsmaddadp vs32, vs4, vs16
  2803. xsmaddadp vs33, vs5, vs16
  2804. xsmaddadp vs34, vs4, vs17
  2805. xsmaddadp vs35, vs5, vs17
  2806. xsmaddadp vs36, vs4, vs18
  2807. xsmaddadp vs37, vs5, vs18
  2808. xsmaddadp vs38, vs4, vs19
  2809. xsmaddadp vs39, vs5, vs19
  2810. #if defined(_AIX)
  2811. ')
  2812. #else
  2813. .endm
  2814. #endif
  2815. #if defined(_AIX)
  2816. define(`KERNEL4x2_SUBI1', `
  2817. #else
  2818. .macro KERNEL4x2_SUBI1
  2819. #endif
  2820. lxsspx vs0, o0, AO
  2821. lxsspx vs1, o4, AO
  2822. addi AO, AO, 8
  2823. mr T1, BO
  2824. lxsspx vs8, o0, T1
  2825. lxsspx vs9, o4, T1
  2826. lxsspx vs10, o8, T1
  2827. lxsspx vs11, o12, T1
  2828. addi BO, BO, 16
  2829. xsmuldp vs32, vs0, vs8
  2830. xsmuldp vs33, vs1, vs8
  2831. xsmuldp vs34, vs0, vs9
  2832. xsmuldp vs35, vs1, vs9
  2833. xsmuldp vs36, vs0, vs10
  2834. xsmuldp vs37, vs1, vs10
  2835. xsmuldp vs38, vs0, vs11
  2836. xsmuldp vs39, vs1, vs11
  2837. #if defined(_AIX)
  2838. ')
  2839. #else
  2840. .endm
  2841. #endif
  2842. #if defined(_AIX)
  2843. define(`KERNEL4x2_SUB1', `
  2844. #else
  2845. .macro KERNEL4x2_SUB1
  2846. #endif
  2847. lxsspx vs0, o0, AO
  2848. lxsspx vs1, o4, AO
  2849. addi AO, AO, 8
  2850. mr T1, BO
  2851. lxsspx vs8, o0, T1
  2852. lxsspx vs9, o4, T1
  2853. lxsspx vs10, o8, T1
  2854. lxsspx vs11, o12, T1
  2855. addi BO, BO, 16
  2856. xsmaddadp vs32, vs0, vs8
  2857. xsmaddadp vs33, vs1, vs8
  2858. xsmaddadp vs34, vs0, vs9
  2859. xsmaddadp vs35, vs1, vs9
  2860. xsmaddadp vs36, vs0, vs10
  2861. xsmaddadp vs37, vs1, vs10
  2862. xsmaddadp vs38, vs0, vs11
  2863. xsmaddadp vs39, vs1, vs11
  2864. #if defined(_AIX)
  2865. ')
  2866. #else
  2867. .endm
  2868. #endif
  2869. #if defined(_AIX)
  2870. define(`SAVE4x2', `
  2871. #else
  2872. .macro SAVE4x2
  2873. #endif
  2874. mr T1, CO
  2875. #ifndef TRMMKERNEL
  2876. lxsspx vs0, o0, T1
  2877. lxsspx vs1, o4, T1
  2878. #endif
  2879. #ifdef TRMMKERNEL
  2880. xsmuldp vs0, vs32, alpha_r
  2881. xsmuldp vs1, vs33, alpha_r
  2882. #else
  2883. xsmaddadp vs0, vs32, alpha_r
  2884. xsmaddadp vs1, vs33, alpha_r
  2885. #endif
  2886. stxsspx vs0, o0, T1
  2887. stxsspx vs1, o4, T1
  2888. add T1, T1, LDC
  2889. #ifndef TRMMKERNEL
  2890. lxsspx vs0, o0, T1
  2891. lxsspx vs1, o4, T1
  2892. #endif
  2893. #ifdef TRMMKERNEL
  2894. xsmuldp vs0, vs34, alpha_r
  2895. xsmuldp vs1, vs35, alpha_r
  2896. #else
  2897. xsmaddadp vs0, vs34, alpha_r
  2898. xsmaddadp vs1, vs35, alpha_r
  2899. #endif
  2900. stxsspx vs0, o0, T1
  2901. stxsspx vs1, o4, T1
  2902. add T1, T1, LDC
  2903. #ifndef TRMMKERNEL
  2904. lxsspx vs0, o0, T1
  2905. lxsspx vs1, o4, T1
  2906. #endif
  2907. #ifdef TRMMKERNEL
  2908. xsmuldp vs0, vs36, alpha_r
  2909. xsmuldp vs1, vs37, alpha_r
  2910. #else
  2911. xsmaddadp vs0, vs36, alpha_r
  2912. xsmaddadp vs1, vs37, alpha_r
  2913. #endif
  2914. stxsspx vs0, o0, T1
  2915. stxsspx vs1, o4, T1
  2916. add T1, T1, LDC
  2917. #ifndef TRMMKERNEL
  2918. lxsspx vs0, o0, T1
  2919. lxsspx vs1, o4, T1
  2920. #endif
  2921. #ifdef TRMMKERNEL
  2922. xsmuldp vs0, vs38, alpha_r
  2923. xsmuldp vs1, vs39, alpha_r
  2924. #else
  2925. xsmaddadp vs0, vs38, alpha_r
  2926. xsmaddadp vs1, vs39, alpha_r
  2927. #endif
  2928. stxsspx vs0, o0, T1
  2929. stxsspx vs1, o4, T1
  2930. add T1, T1, LDC
  2931. addi CO, CO, 8
  2932. #if defined(_AIX)
  2933. ')
  2934. #else
  2935. .endm
  2936. #endif
  2937. /**********************************************************************************************
  2938. * Macros for N=4 and M=1
  2939. **********************************************************************************************/
  2940. #if defined(_AIX)
  2941. define(`LOAD4x1_1', `
  2942. #else
  2943. .macro LOAD4x1_1
  2944. #endif
  2945. lxsspx vs0, o0, AO
  2946. addi AO, AO, 4
  2947. mr T1, BO
  2948. lxsspx vs8, o0, T1
  2949. lxsspx vs9, o4, T1
  2950. lxsspx vs10, o8, T1
  2951. lxsspx vs11, o12, T1
  2952. addi BO, BO, 16
  2953. #if defined(_AIX)
  2954. ')
  2955. #else
  2956. .endm
  2957. #endif
  2958. #if defined(_AIX)
  2959. define(`KERNEL4x1_I1', `
  2960. #else
  2961. .macro KERNEL4x1_I1
  2962. #endif
  2963. lxsspx vs4, o0, AO
  2964. addi AO, AO, 4
  2965. mr T1, BO
  2966. lxsspx vs16, o0, T1
  2967. lxsspx vs17, o4, T1
  2968. lxsspx vs18, o8, T1
  2969. lxsspx vs19, o12, T1
  2970. addi BO, BO, 16
  2971. xsmuldp vs32, vs0, vs8
  2972. xsmuldp vs33, vs0, vs9
  2973. xsmuldp vs34, vs0, vs10
  2974. xsmuldp vs35, vs0, vs11
  2975. #if defined(_AIX)
  2976. ')
  2977. #else
  2978. .endm
  2979. #endif
  2980. #if defined(_AIX)
  2981. define(`KERNEL4x1_1', `
  2982. #else
  2983. .macro KERNEL4x1_1
  2984. #endif
  2985. lxsspx vs4, o0, AO
  2986. addi AO, AO, 4
  2987. mr T1, BO
  2988. lxsspx vs16, o0, T1
  2989. lxsspx vs17, o4, T1
  2990. lxsspx vs18, o8, T1
  2991. lxsspx vs19, o12, T1
  2992. addi BO, BO, 16
  2993. xsmaddadp vs32, vs0, vs8
  2994. xsmaddadp vs33, vs0, vs9
  2995. xsmaddadp vs34, vs0, vs10
  2996. xsmaddadp vs35, vs0, vs11
  2997. #if defined(_AIX)
  2998. ')
  2999. #else
  3000. .endm
  3001. #endif
  3002. #if defined(_AIX)
  3003. define(`KERNEL4x1_2', `
  3004. #else
  3005. .macro KERNEL4x1_2
  3006. #endif
  3007. lxsspx vs0, o0, AO
  3008. addi AO, AO, 4
  3009. mr T1, BO
  3010. lxsspx vs8, o0, T1
  3011. lxsspx vs9, o4, T1
  3012. lxsspx vs10, o8, T1
  3013. lxsspx vs11, o12, T1
  3014. addi BO, BO, 16
  3015. xsmaddadp vs32, vs4, vs16
  3016. xsmaddadp vs33, vs4, vs17
  3017. xsmaddadp vs34, vs4, vs18
  3018. xsmaddadp vs35, vs4, vs19
  3019. #if defined(_AIX)
  3020. ')
  3021. #else
  3022. .endm
  3023. #endif
  3024. #if defined(_AIX)
  3025. define(`KERNEL4x1_E2', `
  3026. #else
  3027. .macro KERNEL4x1_E2
  3028. #endif
  3029. xsmaddadp vs32, vs4, vs16
  3030. xsmaddadp vs33, vs4, vs17
  3031. xsmaddadp vs34, vs4, vs18
  3032. xsmaddadp vs35, vs4, vs19
  3033. #if defined(_AIX)
  3034. ')
  3035. #else
  3036. .endm
  3037. #endif
  3038. #if defined(_AIX)
  3039. define(`KERNEL4x1_SUBI1', `
  3040. #else
  3041. .macro KERNEL4x1_SUBI1
  3042. #endif
  3043. lxsspx vs0, o0, AO
  3044. addi AO, AO, 4
  3045. mr T1, BO
  3046. lxsspx vs8, o0, T1
  3047. lxsspx vs9, o4, T1
  3048. lxsspx vs10, o8, T1
  3049. lxsspx vs11, o12, T1
  3050. addi BO, BO, 16
  3051. xsmuldp vs32, vs0, vs8
  3052. xsmuldp vs33, vs0, vs9
  3053. xsmuldp vs34, vs0, vs10
  3054. xsmuldp vs35, vs0, vs11
  3055. #if defined(_AIX)
  3056. ')
  3057. #else
  3058. .endm
  3059. #endif
  3060. #if defined(_AIX)
  3061. define(`KERNEL4x1_SUB1', `
  3062. #else
  3063. .macro KERNEL4x1_SUB1
  3064. #endif
  3065. lxsspx vs0, o0, AO
  3066. addi AO, AO, 4
  3067. mr T1, BO
  3068. lxsspx vs8, o0, T1
  3069. lxsspx vs9, o4, T1
  3070. lxsspx vs10, o8, T1
  3071. lxsspx vs11, o12, T1
  3072. addi BO, BO, 16
  3073. xsmaddadp vs32, vs0, vs8
  3074. xsmaddadp vs33, vs0, vs9
  3075. xsmaddadp vs34, vs0, vs10
  3076. xsmaddadp vs35, vs0, vs11
  3077. #if defined(_AIX)
  3078. ')
  3079. #else
  3080. .endm
  3081. #endif
  3082. #if defined(_AIX)
  3083. define(`SAVE4x1', `
  3084. #else
  3085. .macro SAVE4x1
  3086. #endif
  3087. mr T1, CO
  3088. #ifndef TRMMKERNEL
  3089. lxsspx vs0, o0, T1
  3090. #endif
  3091. #ifdef TRMMKERNEL
  3092. xsmuldp vs0, vs32, alpha_r
  3093. #else
  3094. xsmaddadp vs0, vs32, alpha_r
  3095. #endif
  3096. stxsspx vs0, o0, T1
  3097. add T1, T1, LDC
  3098. #ifndef TRMMKERNEL
  3099. lxsspx vs0, o0, T1
  3100. #endif
  3101. #ifdef TRMMKERNEL
  3102. xsmuldp vs0, vs33, alpha_r
  3103. #else
  3104. xsmaddadp vs0, vs33, alpha_r
  3105. #endif
  3106. stxsspx vs0, o0, T1
  3107. add T1, T1, LDC
  3108. #ifndef TRMMKERNEL
  3109. lxsspx vs0, o0, T1
  3110. #endif
  3111. #ifdef TRMMKERNEL
  3112. xsmuldp vs0, vs34, alpha_r
  3113. #else
  3114. xsmaddadp vs0, vs34, alpha_r
  3115. #endif
  3116. stxsspx vs0, o0, T1
  3117. add T1, T1, LDC
  3118. #ifndef TRMMKERNEL
  3119. lxsspx vs0, o0, T1
  3120. #endif
  3121. #ifdef TRMMKERNEL
  3122. xsmuldp vs0, vs35, alpha_r
  3123. #else
  3124. xsmaddadp vs0, vs35, alpha_r
  3125. #endif
  3126. stxsspx vs0, o0, T1
  3127. add T1, T1, LDC
  3128. addi CO, CO, 4
  3129. #if defined(_AIX)
  3130. ')
  3131. #else
  3132. .endm
  3133. #endif
  3134. /**********************************************************************************************
  3135. * Macros for N=2 and M=16
  3136. **********************************************************************************************/
  3137. #if defined(_AIX)
  3138. define(`LOAD2x16_1', `
  3139. #else
  3140. .macro LOAD2x16_1
  3141. #endif
  3142. lxvw4x vs0, o0, AO
  3143. lxvw4x vs1, o16, AO
  3144. lxvw4x vs2, o32, AO
  3145. lxvw4x vs3, o48, AO
  3146. addi AO, AO, 64
  3147. lxvw4x vs28, o0, BO
  3148. xxspltw vs8, vs28, 0
  3149. xxspltw vs9, vs28, 1
  3150. addi BO, BO, 8
  3151. #if defined(_AIX)
  3152. ')
  3153. #else
  3154. .endm
  3155. #endif
  3156. #if defined(_AIX)
  3157. define(`KERNEL2x16_I1', `
  3158. #else
  3159. .macro KERNEL2x16_I1
  3160. #endif
  3161. lxvw4x vs4, o0, AO
  3162. lxvw4x vs5, o16, AO
  3163. lxvw4x vs6, o32, AO
  3164. lxvw4x vs7, o48, AO
  3165. addi AO, AO, 64
  3166. lxvw4x vs28, o0, BO
  3167. xxspltw vs16, vs28, 0
  3168. xxspltw vs17, vs28, 1
  3169. addi BO, BO, 8
  3170. xvmulsp vs32, vs0, vs8
  3171. xvmulsp vs33, vs1, vs8
  3172. xvmulsp vs34, vs2, vs8
  3173. xvmulsp vs35, vs3, vs8
  3174. xvmulsp vs36, vs0, vs9
  3175. xvmulsp vs37, vs1, vs9
  3176. xvmulsp vs38, vs2, vs9
  3177. xvmulsp vs39, vs3, vs9
  3178. #if defined(_AIX)
  3179. ')
  3180. #else
  3181. .endm
  3182. #endif
  3183. #if defined(_AIX)
  3184. define(`KERNEL2x16_1', `
  3185. #else
  3186. .macro KERNEL2x16_1
  3187. #endif
  3188. lxvw4x vs4, o0, AO
  3189. lxvw4x vs5, o16, AO
  3190. lxvw4x vs6, o32, AO
  3191. lxvw4x vs7, o48, AO
  3192. addi AO, AO, 64
  3193. lxvw4x vs28, o0, BO
  3194. xxspltw vs16, vs28, 0
  3195. xxspltw vs17, vs28, 1
  3196. addi BO, BO, 8
  3197. xvmaddasp vs32, vs0, vs8
  3198. xvmaddasp vs33, vs1, vs8
  3199. xvmaddasp vs34, vs2, vs8
  3200. xvmaddasp vs35, vs3, vs8
  3201. xvmaddasp vs36, vs0, vs9
  3202. xvmaddasp vs37, vs1, vs9
  3203. xvmaddasp vs38, vs2, vs9
  3204. xvmaddasp vs39, vs3, vs9
  3205. #if defined(_AIX)
  3206. ')
  3207. #else
  3208. .endm
  3209. #endif
  3210. #if defined(_AIX)
  3211. define(`KERNEL2x16_2', `
  3212. #else
  3213. .macro KERNEL2x16_2
  3214. #endif
  3215. lxvw4x vs0, o0, AO
  3216. lxvw4x vs1, o16, AO
  3217. lxvw4x vs2, o32, AO
  3218. lxvw4x vs3, o48, AO
  3219. addi AO, AO, 64
  3220. lxvw4x vs28, o0, BO
  3221. xxspltw vs8, vs28, 0
  3222. xxspltw vs9, vs28, 1
  3223. addi BO, BO, 8
  3224. xvmaddasp vs32, vs4, vs16
  3225. xvmaddasp vs33, vs5, vs16
  3226. xvmaddasp vs34, vs6, vs16
  3227. xvmaddasp vs35, vs7, vs16
  3228. xvmaddasp vs36, vs4, vs17
  3229. xvmaddasp vs37, vs5, vs17
  3230. xvmaddasp vs38, vs6, vs17
  3231. xvmaddasp vs39, vs7, vs17
  3232. #if defined(_AIX)
  3233. ')
  3234. #else
  3235. .endm
  3236. #endif
  3237. #if defined(_AIX)
  3238. define(`KERNEL2x16_E2', `
  3239. #else
  3240. .macro KERNEL2x16_E2
  3241. #endif
  3242. xvmaddasp vs32, vs4, vs16
  3243. xvmaddasp vs33, vs5, vs16
  3244. xvmaddasp vs34, vs6, vs16
  3245. xvmaddasp vs35, vs7, vs16
  3246. xvmaddasp vs36, vs4, vs17
  3247. xvmaddasp vs37, vs5, vs17
  3248. xvmaddasp vs38, vs6, vs17
  3249. xvmaddasp vs39, vs7, vs17
  3250. #if defined(_AIX)
  3251. ')
  3252. #else
  3253. .endm
  3254. #endif
  3255. #if defined(_AIX)
  3256. define(`KERNEL2x16_SUBI1', `
  3257. #else
  3258. .macro KERNEL2x16_SUBI1
  3259. #endif
  3260. lxvw4x vs0, o0, AO
  3261. lxvw4x vs1, o16, AO
  3262. lxvw4x vs2, o32, AO
  3263. lxvw4x vs3, o48, AO
  3264. addi AO, AO, 64
  3265. lxvw4x vs28, o0, BO
  3266. xxspltw vs8, vs28, 0
  3267. xxspltw vs9, vs28, 1
  3268. addi BO, BO, 8
  3269. xvmulsp vs32, vs0, vs8
  3270. xvmulsp vs33, vs1, vs8
  3271. xvmulsp vs34, vs2, vs8
  3272. xvmulsp vs35, vs3, vs8
  3273. xvmulsp vs36, vs0, vs9
  3274. xvmulsp vs37, vs1, vs9
  3275. xvmulsp vs38, vs2, vs9
  3276. xvmulsp vs39, vs3, vs9
  3277. #if defined(_AIX)
  3278. ')
  3279. #else
  3280. .endm
  3281. #endif
  3282. #if defined(_AIX)
  3283. define(`KERNEL2x16_SUB1', `
  3284. #else
  3285. .macro KERNEL2x16_SUB1
  3286. #endif
  3287. lxvw4x vs0, o0, AO
  3288. lxvw4x vs1, o16, AO
  3289. lxvw4x vs2, o32, AO
  3290. lxvw4x vs3, o48, AO
  3291. addi AO, AO, 64
  3292. lxvw4x vs28, o0, BO
  3293. xxspltw vs8, vs28, 0
  3294. xxspltw vs9, vs28, 1
  3295. addi BO, BO, 8
  3296. xvmaddasp vs32, vs0, vs8
  3297. xvmaddasp vs33, vs1, vs8
  3298. xvmaddasp vs34, vs2, vs8
  3299. xvmaddasp vs35, vs3, vs8
  3300. xvmaddasp vs36, vs0, vs9
  3301. xvmaddasp vs37, vs1, vs9
  3302. xvmaddasp vs38, vs2, vs9
  3303. xvmaddasp vs39, vs3, vs9
  3304. #if defined(_AIX)
  3305. ')
  3306. #else
  3307. .endm
  3308. #endif
  3309. #if defined(_AIX)
  3310. define(`SAVE2x16', `
  3311. #else
  3312. .macro SAVE2x16
  3313. #endif
  3314. mr T1, CO
  3315. #ifndef TRMMKERNEL
  3316. lxvw4x vs0, o0, T1
  3317. lxvw4x vs1, o16, T1
  3318. lxvw4x vs2, o32, T1
  3319. lxvw4x vs3, o48, T1
  3320. #endif
  3321. #ifdef TRMMKERNEL
  3322. xvmulsp vs0, vs32, alpha_vr
  3323. xvmulsp vs1, vs33, alpha_vr
  3324. xvmulsp vs2, vs34, alpha_vr
  3325. xvmulsp vs3, vs35, alpha_vr
  3326. #else
  3327. xvmaddasp vs0, vs32, alpha_vr
  3328. xvmaddasp vs1, vs33, alpha_vr
  3329. xvmaddasp vs2, vs34, alpha_vr
  3330. xvmaddasp vs3, vs35, alpha_vr
  3331. #endif
  3332. stxvw4x vs0, o0, T1
  3333. stxvw4x vs1, o16, T1
  3334. stxvw4x vs2, o32, T1
  3335. stxvw4x vs3, o48, T1
  3336. add T1, T1, LDC
  3337. #ifndef TRMMKERNEL
  3338. lxvw4x vs0, o0, T1
  3339. lxvw4x vs1, o16, T1
  3340. lxvw4x vs2, o32, T1
  3341. lxvw4x vs3, o48, T1
  3342. #endif
  3343. #ifdef TRMMKERNEL
  3344. xvmulsp vs0, vs36, alpha_vr
  3345. xvmulsp vs1, vs37, alpha_vr
  3346. xvmulsp vs2, vs38, alpha_vr
  3347. xvmulsp vs3, vs39, alpha_vr
  3348. #else
  3349. xvmaddasp vs0, vs36, alpha_vr
  3350. xvmaddasp vs1, vs37, alpha_vr
  3351. xvmaddasp vs2, vs38, alpha_vr
  3352. xvmaddasp vs3, vs39, alpha_vr
  3353. #endif
  3354. stxvw4x vs0, o0, T1
  3355. stxvw4x vs1, o16, T1
  3356. stxvw4x vs2, o32, T1
  3357. stxvw4x vs3, o48, T1
  3358. add T1, T1, LDC
  3359. addi CO, CO, 64
  3360. #if defined(_AIX)
  3361. ')
  3362. #else
  3363. .endm
  3364. #endif
  3365. /**********************************************************************************************
  3366. * Macros for N=2 and M=8
  3367. **********************************************************************************************/
  3368. #if defined(_AIX)
  3369. define(`LOAD2x8_1', `
  3370. #else
  3371. .macro LOAD2x8_1
  3372. #endif
  3373. lxvw4x vs0, o0, AO
  3374. lxvw4x vs1, o16, AO
  3375. addi AO, AO, 32
  3376. lxvw4x vs28, o0, BO
  3377. xxspltw vs8, vs28, 0
  3378. xxspltw vs9, vs28, 1
  3379. addi BO, BO, 8
  3380. #if defined(_AIX)
  3381. ')
  3382. #else
  3383. .endm
  3384. #endif
  3385. #if defined(_AIX)
  3386. define(`KERNEL2x8_I1', `
  3387. #else
  3388. .macro KERNEL2x8_I1
  3389. #endif
  3390. lxvw4x vs4, o0, AO
  3391. lxvw4x vs5, o16, AO
  3392. addi AO, AO, 32
  3393. lxvw4x vs28, o0, BO
  3394. xxspltw vs16, vs28, 0
  3395. xxspltw vs17, vs28, 1
  3396. addi BO, BO, 8
  3397. xvmulsp vs32, vs0, vs8
  3398. xvmulsp vs33, vs1, vs8
  3399. xvmulsp vs34, vs0, vs9
  3400. xvmulsp vs35, vs1, vs9
  3401. #if defined(_AIX)
  3402. ')
  3403. #else
  3404. .endm
  3405. #endif
  3406. #if defined(_AIX)
  3407. define(`KERNEL2x8_1', `
  3408. #else
  3409. .macro KERNEL2x8_1
  3410. #endif
  3411. lxvw4x vs4, o0, AO
  3412. lxvw4x vs5, o16, AO
  3413. addi AO, AO, 32
  3414. lxvw4x vs28, o0, BO
  3415. xxspltw vs16, vs28, 0
  3416. xxspltw vs17, vs28, 1
  3417. addi BO, BO, 8
  3418. xvmaddasp vs32, vs0, vs8
  3419. xvmaddasp vs33, vs1, vs8
  3420. xvmaddasp vs34, vs0, vs9
  3421. xvmaddasp vs35, vs1, vs9
  3422. #if defined(_AIX)
  3423. ')
  3424. #else
  3425. .endm
  3426. #endif
  3427. #if defined(_AIX)
  3428. define(`KERNEL2x8_2', `
  3429. #else
  3430. .macro KERNEL2x8_2
  3431. #endif
  3432. lxvw4x vs0, o0, AO
  3433. lxvw4x vs1, o16, AO
  3434. addi AO, AO, 32
  3435. lxvw4x vs28, o0, BO
  3436. xxspltw vs8, vs28, 0
  3437. xxspltw vs9, vs28, 1
  3438. addi BO, BO, 8
  3439. xvmaddasp vs32, vs4, vs16
  3440. xvmaddasp vs33, vs5, vs16
  3441. xvmaddasp vs34, vs4, vs17
  3442. xvmaddasp vs35, vs5, vs17
  3443. #if defined(_AIX)
  3444. ')
  3445. #else
  3446. .endm
  3447. #endif
  3448. #if defined(_AIX)
  3449. define(`KERNEL2x8_E2', `
  3450. #else
  3451. .macro KERNEL2x8_E2
  3452. #endif
  3453. xvmaddasp vs32, vs4, vs16
  3454. xvmaddasp vs33, vs5, vs16
  3455. xvmaddasp vs34, vs4, vs17
  3456. xvmaddasp vs35, vs5, vs17
  3457. #if defined(_AIX)
  3458. ')
  3459. #else
  3460. .endm
  3461. #endif
  3462. #if defined(_AIX)
  3463. define(`KERNEL2x8_SUBI1', `
  3464. #else
  3465. .macro KERNEL2x8_SUBI1
  3466. #endif
  3467. lxvw4x vs0, o0, AO
  3468. lxvw4x vs1, o16, AO
  3469. addi AO, AO, 32
  3470. lxvw4x vs28, o0, BO
  3471. xxspltw vs8, vs28, 0
  3472. xxspltw vs9, vs28, 1
  3473. addi BO, BO, 8
  3474. xvmulsp vs32, vs0, vs8
  3475. xvmulsp vs33, vs1, vs8
  3476. xvmulsp vs34, vs0, vs9
  3477. xvmulsp vs35, vs1, vs9
  3478. #if defined(_AIX)
  3479. ')
  3480. #else
  3481. .endm
  3482. #endif
  3483. #if defined(_AIX)
  3484. define(`KERNEL2x8_SUB1', `
  3485. #else
  3486. .macro KERNEL2x8_SUB1
  3487. #endif
  3488. lxvw4x vs0, o0, AO
  3489. lxvw4x vs1, o16, AO
  3490. addi AO, AO, 32
  3491. lxvw4x vs28, o0, BO
  3492. xxspltw vs8, vs28, 0
  3493. xxspltw vs9, vs28, 1
  3494. addi BO, BO, 8
  3495. xvmaddasp vs32, vs0, vs8
  3496. xvmaddasp vs33, vs1, vs8
  3497. xvmaddasp vs34, vs0, vs9
  3498. xvmaddasp vs35, vs1, vs9
  3499. #if defined(_AIX)
  3500. ')
  3501. #else
  3502. .endm
  3503. #endif
  3504. #if defined(_AIX)
  3505. define(`SAVE2x8', `
  3506. #else
  3507. .macro SAVE2x8
  3508. #endif
  3509. mr T1, CO
  3510. #ifndef TRMMKERNEL
  3511. lxvw4x vs0, o0, T1
  3512. lxvw4x vs1, o16, T1
  3513. #endif
  3514. #ifdef TRMMKERNEL
  3515. xvmulsp vs0, vs32, alpha_vr
  3516. xvmulsp vs1, vs33, alpha_vr
  3517. #else
  3518. xvmaddasp vs0, vs32, alpha_vr
  3519. xvmaddasp vs1, vs33, alpha_vr
  3520. #endif
  3521. stxvw4x vs0, o0, T1
  3522. stxvw4x vs1, o16, T1
  3523. add T1, T1, LDC
  3524. #ifndef TRMMKERNEL
  3525. lxvw4x vs0, o0, T1
  3526. lxvw4x vs1, o16, T1
  3527. #endif
  3528. #ifdef TRMMKERNEL
  3529. xvmulsp vs0, vs34, alpha_vr
  3530. xvmulsp vs1, vs35, alpha_vr
  3531. #else
  3532. xvmaddasp vs0, vs34, alpha_vr
  3533. xvmaddasp vs1, vs35, alpha_vr
  3534. #endif
  3535. stxvw4x vs0, o0, T1
  3536. stxvw4x vs1, o16, T1
  3537. add T1, T1, LDC
  3538. addi CO, CO, 32
  3539. #if defined(_AIX)
  3540. ')
  3541. #else
  3542. .endm
  3543. #endif
  3544. /**********************************************************************************************
  3545. * Macros for N=2 and M=4
  3546. **********************************************************************************************/
  3547. #if defined(_AIX)
  3548. define(`LOAD2x4_1', `
  3549. #else
  3550. .macro LOAD2x4_1
  3551. #endif
  3552. lxvw4x vs0, o0, AO
  3553. addi AO, AO, 16
  3554. lxvw4x vs28, o0, BO
  3555. xxspltw vs8, vs28, 0
  3556. xxspltw vs9, vs28, 1
  3557. addi BO, BO, 8
  3558. #if defined(_AIX)
  3559. ')
  3560. #else
  3561. .endm
  3562. #endif
  3563. #if defined(_AIX)
  3564. define(`KERNEL2x4_I1', `
  3565. #else
  3566. .macro KERNEL2x4_I1
  3567. #endif
  3568. lxvw4x vs4, o0, AO
  3569. addi AO, AO, 16
  3570. lxvw4x vs28, o0, BO
  3571. xxspltw vs16, vs28, 0
  3572. xxspltw vs17, vs28, 1
  3573. addi BO, BO, 8
  3574. xvmulsp vs32, vs0, vs8
  3575. xvmulsp vs33, vs0, vs9
  3576. #if defined(_AIX)
  3577. ')
  3578. #else
  3579. .endm
  3580. #endif
  3581. #if defined(_AIX)
  3582. define(`KERNEL2x4_1', `
  3583. #else
  3584. .macro KERNEL2x4_1
  3585. #endif
  3586. lxvw4x vs4, o0, AO
  3587. addi AO, AO, 16
  3588. lxvw4x vs28, o0, BO
  3589. xxspltw vs16, vs28, 0
  3590. xxspltw vs17, vs28, 1
  3591. addi BO, BO, 8
  3592. xvmaddasp vs32, vs0, vs8
  3593. xvmaddasp vs33, vs0, vs9
  3594. #if defined(_AIX)
  3595. ')
  3596. #else
  3597. .endm
  3598. #endif
  3599. #if defined(_AIX)
  3600. define(`KERNEL2x4_2', `
  3601. #else
  3602. .macro KERNEL2x4_2
  3603. #endif
  3604. lxvw4x vs0, o0, AO
  3605. addi AO, AO, 16
  3606. lxvw4x vs28, o0, BO
  3607. xxspltw vs8, vs28, 0
  3608. xxspltw vs9, vs28, 1
  3609. addi BO, BO, 8
  3610. xvmaddasp vs32, vs4, vs16
  3611. xvmaddasp vs33, vs4, vs17
  3612. #if defined(_AIX)
  3613. ')
  3614. #else
  3615. .endm
  3616. #endif
  3617. #if defined(_AIX)
  3618. define(`KERNEL2x4_E2', `
  3619. #else
  3620. .macro KERNEL2x4_E2
  3621. #endif
  3622. xvmaddasp vs32, vs4, vs16
  3623. xvmaddasp vs33, vs4, vs17
  3624. #if defined(_AIX)
  3625. ')
  3626. #else
  3627. .endm
  3628. #endif
  3629. #if defined(_AIX)
  3630. define(`KERNEL2x4_SUBI1', `
  3631. #else
  3632. .macro KERNEL2x4_SUBI1
  3633. #endif
  3634. lxvw4x vs0, o0, AO
  3635. addi AO, AO, 16
  3636. lxvw4x vs28, o0, BO
  3637. xxspltw vs8, vs28, 0
  3638. xxspltw vs9, vs28, 1
  3639. addi BO, BO, 8
  3640. xvmulsp vs32, vs0, vs8
  3641. xvmulsp vs33, vs0, vs9
  3642. #if defined(_AIX)
  3643. ')
  3644. #else
  3645. .endm
  3646. #endif
  3647. #if defined(_AIX)
  3648. define(`KERNEL2x4_SUB1', `
  3649. #else
  3650. .macro KERNEL2x4_SUB1
  3651. #endif
  3652. lxvw4x vs0, o0, AO
  3653. addi AO, AO, 16
  3654. lxvw4x vs28, o0, BO
  3655. xxspltw vs8, vs28, 0
  3656. xxspltw vs9, vs28, 1
  3657. addi BO, BO, 8
  3658. xvmaddasp vs32, vs0, vs8
  3659. xvmaddasp vs33, vs0, vs9
  3660. #if defined(_AIX)
  3661. ')
  3662. #else
  3663. .endm
  3664. #endif
  3665. #if defined(_AIX)
  3666. define(`SAVE2x4', `
  3667. #else
  3668. .macro SAVE2x4
  3669. #endif
  3670. mr T1, CO
  3671. #ifndef TRMMKERNEL
  3672. lxvw4x vs0, o0, T1
  3673. #endif
  3674. #ifdef TRMMKERNEL
  3675. xvmulsp vs0, vs32, alpha_vr
  3676. #else
  3677. xvmaddasp vs0, vs32, alpha_vr
  3678. #endif
  3679. stxvw4x vs0, o0, T1
  3680. add T1, T1, LDC
  3681. #ifndef TRMMKERNEL
  3682. lxvw4x vs0, o0, T1
  3683. #endif
  3684. #ifdef TRMMKERNEL
  3685. xvmulsp vs0, vs33, alpha_vr
  3686. #else
  3687. xvmaddasp vs0, vs33, alpha_vr
  3688. #endif
  3689. stxvw4x vs0, o0, T1
  3690. add T1, T1, LDC
  3691. addi CO, CO, 16
  3692. #if defined(_AIX)
  3693. ')
  3694. #else
  3695. .endm
  3696. #endif
  3697. /**********************************************************************************************
  3698. * Macros for N=2 and M=2
  3699. **********************************************************************************************/
  3700. #if defined(_AIX)
  3701. define(`LOAD2x2_1', `
  3702. #else
  3703. .macro LOAD2x2_1
  3704. #endif
  3705. lxsspx vs0, o0, AO
  3706. lxsspx vs1, o4, AO
  3707. addi AO, AO, 8
  3708. mr T1, BO
  3709. lxsspx vs8, o0, T1
  3710. lxsspx vs9, o4, T1
  3711. addi BO, BO, 8
  3712. #if defined(_AIX)
  3713. ')
  3714. #else
  3715. .endm
  3716. #endif
  3717. #if defined(_AIX)
  3718. define(`KERNEL2x2_I1', `
  3719. #else
  3720. .macro KERNEL2x2_I1
  3721. #endif
  3722. lxsspx vs4, o0, AO
  3723. lxsspx vs5, o4, AO
  3724. addi AO, AO, 8
  3725. mr T1, BO
  3726. lxsspx vs16, o0, T1
  3727. lxsspx vs17, o4, T1
  3728. addi BO, BO, 8
  3729. xsmuldp vs32, vs0, vs8
  3730. xsmuldp vs33, vs1, vs8
  3731. xsmuldp vs34, vs0, vs9
  3732. xsmuldp vs35, vs1, vs9
  3733. #if defined(_AIX)
  3734. ')
  3735. #else
  3736. .endm
  3737. #endif
  3738. #if defined(_AIX)
  3739. define(`KERNEL2x2_1', `
  3740. #else
  3741. .macro KERNEL2x2_1
  3742. #endif
  3743. lxsspx vs4, o0, AO
  3744. lxsspx vs5, o4, AO
  3745. addi AO, AO, 8
  3746. mr T1, BO
  3747. lxsspx vs16, o0, T1
  3748. lxsspx vs17, o4, T1
  3749. addi BO, BO, 8
  3750. xsmaddadp vs32, vs0, vs8
  3751. xsmaddadp vs33, vs1, vs8
  3752. xsmaddadp vs34, vs0, vs9
  3753. xsmaddadp vs35, vs1, vs9
  3754. #if defined(_AIX)
  3755. ')
  3756. #else
  3757. .endm
  3758. #endif
  3759. #if defined(_AIX)
  3760. define(`KERNEL2x2_2', `
  3761. #else
  3762. .macro KERNEL2x2_2
  3763. #endif
  3764. lxsspx vs0, o0, AO
  3765. lxsspx vs1, o4, AO
  3766. addi AO, AO, 8
  3767. mr T1, BO
  3768. lxsspx vs8, o0, T1
  3769. lxsspx vs9, o4, T1
  3770. addi BO, BO, 8
  3771. xsmaddadp vs32, vs4, vs16
  3772. xsmaddadp vs33, vs5, vs16
  3773. xsmaddadp vs34, vs4, vs17
  3774. xsmaddadp vs35, vs5, vs17
  3775. #if defined(_AIX)
  3776. ')
  3777. #else
  3778. .endm
  3779. #endif
  3780. #if defined(_AIX)
  3781. define(`KERNEL2x2_E2', `
  3782. #else
  3783. .macro KERNEL2x2_E2
  3784. #endif
  3785. xsmaddadp vs32, vs4, vs16
  3786. xsmaddadp vs33, vs5, vs16
  3787. xsmaddadp vs34, vs4, vs17
  3788. xsmaddadp vs35, vs5, vs17
  3789. #if defined(_AIX)
  3790. ')
  3791. #else
  3792. .endm
  3793. #endif
  3794. #if defined(_AIX)
  3795. define(`KERNEL2x2_SUBI1', `
  3796. #else
  3797. .macro KERNEL2x2_SUBI1
  3798. #endif
  3799. lxsspx vs0, o0, AO
  3800. lxsspx vs1, o4, AO
  3801. addi AO, AO, 8
  3802. mr T1, BO
  3803. lxsspx vs8, o0, T1
  3804. lxsspx vs9, o4, T1
  3805. addi BO, BO, 8
  3806. xsmuldp vs32, vs0, vs8
  3807. xsmuldp vs33, vs1, vs8
  3808. xsmuldp vs34, vs0, vs9
  3809. xsmuldp vs35, vs1, vs9
  3810. #if defined(_AIX)
  3811. ')
  3812. #else
  3813. .endm
  3814. #endif
  3815. #if defined(_AIX)
  3816. define(`KERNEL2x2_SUB1', `
  3817. #else
  3818. .macro KERNEL2x2_SUB1
  3819. #endif
  3820. lxsspx vs0, o0, AO
  3821. lxsspx vs1, o4, AO
  3822. addi AO, AO, 8
  3823. mr T1, BO
  3824. lxsspx vs8, o0, T1
  3825. lxsspx vs9, o4, T1
  3826. addi BO, BO, 8
  3827. xsmaddadp vs32, vs0, vs8
  3828. xsmaddadp vs33, vs1, vs8
  3829. xsmaddadp vs34, vs0, vs9
  3830. xsmaddadp vs35, vs1, vs9
  3831. #if defined(_AIX)
  3832. ')
  3833. #else
  3834. .endm
  3835. #endif
  3836. #if defined(_AIX)
  3837. define(`SAVE2x2', `
  3838. #else
  3839. .macro SAVE2x2
  3840. #endif
  3841. mr T1, CO
  3842. #ifndef TRMMKERNEL
  3843. lxsspx vs0, o0, T1
  3844. lxsspx vs1, o4, T1
  3845. #endif
  3846. #ifdef TRMMKERNEL
  3847. xsmuldp vs0, vs32, alpha_r
  3848. xsmuldp vs1, vs33, alpha_r
  3849. #else
  3850. xsmaddadp vs0, vs32, alpha_r
  3851. xsmaddadp vs1, vs33, alpha_r
  3852. #endif
  3853. stxsspx vs0, o0, T1
  3854. stxsspx vs1, o4, T1
  3855. add T1, T1, LDC
  3856. #ifndef TRMMKERNEL
  3857. lxsspx vs0, o0, T1
  3858. lxsspx vs1, o4, T1
  3859. #endif
  3860. #ifdef TRMMKERNEL
  3861. xsmuldp vs0, vs34, alpha_r
  3862. xsmuldp vs1, vs35, alpha_r
  3863. #else
  3864. xsmaddadp vs0, vs34, alpha_r
  3865. xsmaddadp vs1, vs35, alpha_r
  3866. #endif
  3867. stxsspx vs0, o0, T1
  3868. stxsspx vs1, o4, T1
  3869. add T1, T1, LDC
  3870. addi CO, CO, 8
  3871. #if defined(_AIX)
  3872. ')
  3873. #else
  3874. .endm
  3875. #endif
  3876. /**********************************************************************************************
  3877. * Macros for N=2 and M=1
  3878. **********************************************************************************************/
  3879. #if defined(_AIX)
  3880. define(`LOAD2x1_1', `
  3881. #else
  3882. .macro LOAD2x1_1
  3883. #endif
  3884. lxsspx vs0, o0, AO
  3885. addi AO, AO, 4
  3886. mr T1, BO
  3887. lxsspx vs8, o0, T1
  3888. lxsspx vs9, o4, T1
  3889. addi BO, BO, 8
  3890. #if defined(_AIX)
  3891. ')
  3892. #else
  3893. .endm
  3894. #endif
  3895. #if defined(_AIX)
  3896. define(`KERNEL2x1_I1', `
  3897. #else
  3898. .macro KERNEL2x1_I1
  3899. #endif
  3900. lxsspx vs4, o0, AO
  3901. addi AO, AO, 4
  3902. mr T1, BO
  3903. lxsspx vs16, o0, T1
  3904. lxsspx vs17, o4, T1
  3905. addi BO, BO, 8
  3906. xsmuldp vs32, vs0, vs8
  3907. xsmuldp vs33, vs0, vs9
  3908. #if defined(_AIX)
  3909. ')
  3910. #else
  3911. .endm
  3912. #endif
  3913. #if defined(_AIX)
  3914. define(`KERNEL2x1_1', `
  3915. #else
  3916. .macro KERNEL2x1_1
  3917. #endif
  3918. lxsspx vs4, o0, AO
  3919. addi AO, AO, 4
  3920. mr T1, BO
  3921. lxsspx vs16, o0, T1
  3922. lxsspx vs17, o4, T1
  3923. addi BO, BO, 8
  3924. xsmaddadp vs32, vs0, vs8
  3925. xsmaddadp vs33, vs0, vs9
  3926. #if defined(_AIX)
  3927. ')
  3928. #else
  3929. .endm
  3930. #endif
  3931. #if defined(_AIX)
  3932. define(`KERNEL2x1_2', `
  3933. #else
  3934. .macro KERNEL2x1_2
  3935. #endif
  3936. lxsspx vs0, o0, AO
  3937. addi AO, AO, 4
  3938. mr T1, BO
  3939. lxsspx vs8, o0, T1
  3940. lxsspx vs9, o4, T1
  3941. addi BO, BO, 8
  3942. xsmaddadp vs32, vs4, vs16
  3943. xsmaddadp vs33, vs4, vs17
  3944. #if defined(_AIX)
  3945. ')
  3946. #else
  3947. .endm
  3948. #endif
  3949. #if defined(_AIX)
  3950. define(`KERNEL2x1_E2', `
  3951. #else
  3952. .macro KERNEL2x1_E2
  3953. #endif
  3954. xsmaddadp vs32, vs4, vs16
  3955. xsmaddadp vs33, vs4, vs17
  3956. #if defined(_AIX)
  3957. ')
  3958. #else
  3959. .endm
  3960. #endif
  3961. #if defined(_AIX)
  3962. define(`KERNEL2x1_SUBI1', `
  3963. #else
  3964. .macro KERNEL2x1_SUBI1
  3965. #endif
  3966. lxsspx vs0, o0, AO
  3967. addi AO, AO, 4
  3968. mr T1, BO
  3969. lxsspx vs8, o0, T1
  3970. lxsspx vs9, o4, T1
  3971. addi BO, BO, 8
  3972. xsmuldp vs32, vs0, vs8
  3973. xsmuldp vs33, vs0, vs9
  3974. #if defined(_AIX)
  3975. ')
  3976. #else
  3977. .endm
  3978. #endif
  3979. #if defined(_AIX)
  3980. define(`KERNEL2x1_SUB1', `
  3981. #else
  3982. .macro KERNEL2x1_SUB1
  3983. #endif
  3984. lxsspx vs0, o0, AO
  3985. addi AO, AO, 4
  3986. mr T1, BO
  3987. lxsspx vs8, o0, T1
  3988. lxsspx vs9, o4, T1
  3989. addi BO, BO, 8
  3990. xsmaddadp vs32, vs0, vs8
  3991. xsmaddadp vs33, vs0, vs9
  3992. #if defined(_AIX)
  3993. ')
  3994. #else
  3995. .endm
  3996. #endif
  3997. #if defined(_AIX)
  3998. define(`SAVE2x1', `
  3999. #else
  4000. .macro SAVE2x1
  4001. #endif
  4002. mr T1, CO
  4003. #ifndef TRMMKERNEL
  4004. lxsspx vs0, o0, T1
  4005. #endif
  4006. #ifdef TRMMKERNEL
  4007. xsmuldp vs0, vs32, alpha_r
  4008. #else
  4009. xsmaddadp vs0, vs32, alpha_r
  4010. #endif
  4011. stxsspx vs0, o0, T1
  4012. add T1, T1, LDC
  4013. #ifndef TRMMKERNEL
  4014. lxsspx vs0, o0, T1
  4015. #endif
  4016. #ifdef TRMMKERNEL
  4017. xsmuldp vs0, vs33, alpha_r
  4018. #else
  4019. xsmaddadp vs0, vs33, alpha_r
  4020. #endif
  4021. stxsspx vs0, o0, T1
  4022. add T1, T1, LDC
  4023. addi CO, CO, 4
  4024. #if defined(_AIX)
  4025. ')
  4026. #else
  4027. .endm
  4028. #endif
  4029. /**********************************************************************************************
  4030. * Macros for N=1 and M=16
  4031. **********************************************************************************************/
  4032. #if defined(_AIX)
  4033. define(`LOAD1x16_1', `
  4034. #else
  4035. .macro LOAD1x16_1
  4036. #endif
  4037. lxvw4x vs0, o0, AO
  4038. lxvw4x vs1, o16, AO
  4039. lxvw4x vs2, o32, AO
  4040. lxvw4x vs3, o48, AO
  4041. addi AO, AO, 64
  4042. lxvw4x vs28, o0, BO
  4043. xxspltw vs8, vs28, 0
  4044. addi BO, BO, 4
  4045. #if defined(_AIX)
  4046. ')
  4047. #else
  4048. .endm
  4049. #endif
  4050. #if defined(_AIX)
  4051. define(`KERNEL1x16_I1', `
  4052. #else
  4053. .macro KERNEL1x16_I1
  4054. #endif
  4055. lxvw4x vs4, o0, AO
  4056. lxvw4x vs5, o16, AO
  4057. lxvw4x vs6, o32, AO
  4058. lxvw4x vs7, o48, AO
  4059. addi AO, AO, 64
  4060. lxvw4x vs28, o0, BO
  4061. xxspltw vs16, vs28, 0
  4062. addi BO, BO, 4
  4063. xvmulsp vs32, vs0, vs8
  4064. xvmulsp vs33, vs1, vs8
  4065. xvmulsp vs34, vs2, vs8
  4066. xvmulsp vs35, vs3, vs8
  4067. #if defined(_AIX)
  4068. ')
  4069. #else
  4070. .endm
  4071. #endif
  4072. #if defined(_AIX)
  4073. define(`KERNEL1x16_1', `
  4074. #else
  4075. .macro KERNEL1x16_1
  4076. #endif
  4077. lxvw4x vs4, o0, AO
  4078. lxvw4x vs5, o16, AO
  4079. lxvw4x vs6, o32, AO
  4080. lxvw4x vs7, o48, AO
  4081. addi AO, AO, 64
  4082. lxvw4x vs28, o0, BO
  4083. xxspltw vs16, vs28, 0
  4084. addi BO, BO, 4
  4085. xvmaddasp vs32, vs0, vs8
  4086. xvmaddasp vs33, vs1, vs8
  4087. xvmaddasp vs34, vs2, vs8
  4088. xvmaddasp vs35, vs3, vs8
  4089. #if defined(_AIX)
  4090. ')
  4091. #else
  4092. .endm
  4093. #endif
  4094. #if defined(_AIX)
  4095. define(`KERNEL1x16_2', `
  4096. #else
  4097. .macro KERNEL1x16_2
  4098. #endif
  4099. lxvw4x vs0, o0, AO
  4100. lxvw4x vs1, o16, AO
  4101. lxvw4x vs2, o32, AO
  4102. lxvw4x vs3, o48, AO
  4103. addi AO, AO, 64
  4104. lxvw4x vs28, o0, BO
  4105. xxspltw vs8, vs28, 0
  4106. addi BO, BO, 4
  4107. xvmaddasp vs32, vs4, vs16
  4108. xvmaddasp vs33, vs5, vs16
  4109. xvmaddasp vs34, vs6, vs16
  4110. xvmaddasp vs35, vs7, vs16
  4111. #if defined(_AIX)
  4112. ')
  4113. #else
  4114. .endm
  4115. #endif
  4116. #if defined(_AIX)
  4117. define(`KERNEL1x16_E2', `
  4118. #else
  4119. .macro KERNEL1x16_E2
  4120. #endif
  4121. xvmaddasp vs32, vs4, vs16
  4122. xvmaddasp vs33, vs5, vs16
  4123. xvmaddasp vs34, vs6, vs16
  4124. xvmaddasp vs35, vs7, vs16
  4125. #if defined(_AIX)
  4126. ')
  4127. #else
  4128. .endm
  4129. #endif
  4130. #if defined(_AIX)
  4131. define(`KERNEL1x16_SUBI1', `
  4132. #else
  4133. .macro KERNEL1x16_SUBI1
  4134. #endif
  4135. lxvw4x vs0, o0, AO
  4136. lxvw4x vs1, o16, AO
  4137. lxvw4x vs2, o32, AO
  4138. lxvw4x vs3, o48, AO
  4139. addi AO, AO, 64
  4140. lxvw4x vs28, o0, BO
  4141. xxspltw vs8, vs28, 0
  4142. addi BO, BO, 4
  4143. xvmulsp vs32, vs0, vs8
  4144. xvmulsp vs33, vs1, vs8
  4145. xvmulsp vs34, vs2, vs8
  4146. xvmulsp vs35, vs3, vs8
  4147. #if defined(_AIX)
  4148. ')
  4149. #else
  4150. .endm
  4151. #endif
  4152. #if defined(_AIX)
  4153. define(`KERNEL1x16_SUB1', `
  4154. #else
  4155. .macro KERNEL1x16_SUB1
  4156. #endif
  4157. lxvw4x vs0, o0, AO
  4158. lxvw4x vs1, o16, AO
  4159. lxvw4x vs2, o32, AO
  4160. lxvw4x vs3, o48, AO
  4161. addi AO, AO, 64
  4162. lxvw4x vs28, o0, BO
  4163. xxspltw vs8, vs28, 0
  4164. addi BO, BO, 4
  4165. xvmaddasp vs32, vs0, vs8
  4166. xvmaddasp vs33, vs1, vs8
  4167. xvmaddasp vs34, vs2, vs8
  4168. xvmaddasp vs35, vs3, vs8
  4169. #if defined(_AIX)
  4170. ')
  4171. #else
  4172. .endm
  4173. #endif
  4174. #if defined(_AIX)
  4175. define(`SAVE1x16', `
  4176. #else
  4177. .macro SAVE1x16
  4178. #endif
  4179. mr T1, CO
  4180. #ifndef TRMMKERNEL
  4181. lxvw4x vs0, o0, T1
  4182. lxvw4x vs1, o16, T1
  4183. lxvw4x vs2, o32, T1
  4184. lxvw4x vs3, o48, T1
  4185. #endif
  4186. #ifdef TRMMKERNEL
  4187. xvmulsp vs0, vs32, alpha_vr
  4188. xvmulsp vs1, vs33, alpha_vr
  4189. xvmulsp vs2, vs34, alpha_vr
  4190. xvmulsp vs3, vs35, alpha_vr
  4191. #else
  4192. xvmaddasp vs0, vs32, alpha_vr
  4193. xvmaddasp vs1, vs33, alpha_vr
  4194. xvmaddasp vs2, vs34, alpha_vr
  4195. xvmaddasp vs3, vs35, alpha_vr
  4196. #endif
  4197. stxvw4x vs0, o0, T1
  4198. stxvw4x vs1, o16, T1
  4199. stxvw4x vs2, o32, T1
  4200. stxvw4x vs3, o48, T1
  4201. add T1, T1, LDC
  4202. addi CO, CO, 64
  4203. #if defined(_AIX)
  4204. ')
  4205. #else
  4206. .endm
  4207. #endif
  4208. /**********************************************************************************************
  4209. * Macros for N=1 and M=8
  4210. **********************************************************************************************/
  4211. #if defined(_AIX)
  4212. define(`LOAD1x8_1', `
  4213. #else
  4214. .macro LOAD1x8_1
  4215. #endif
  4216. lxvw4x vs0, o0, AO
  4217. lxvw4x vs1, o16, AO
  4218. addi AO, AO, 32
  4219. lxvw4x vs28, o0, BO
  4220. xxspltw vs8, vs28, 0
  4221. addi BO, BO, 4
  4222. #if defined(_AIX)
  4223. ')
  4224. #else
  4225. .endm
  4226. #endif
  4227. #if defined(_AIX)
  4228. define(`KERNEL1x8_I1', `
  4229. #else
  4230. .macro KERNEL1x8_I1
  4231. #endif
  4232. lxvw4x vs4, o0, AO
  4233. lxvw4x vs5, o16, AO
  4234. addi AO, AO, 32
  4235. lxvw4x vs28, o0, BO
  4236. xxspltw vs16, vs28, 0
  4237. addi BO, BO, 4
  4238. xvmulsp vs32, vs0, vs8
  4239. xvmulsp vs33, vs1, vs8
  4240. #if defined(_AIX)
  4241. ')
  4242. #else
  4243. .endm
  4244. #endif
  4245. #if defined(_AIX)
  4246. define(`KERNEL1x8_1', `
  4247. #else
  4248. .macro KERNEL1x8_1
  4249. #endif
  4250. lxvw4x vs4, o0, AO
  4251. lxvw4x vs5, o16, AO
  4252. addi AO, AO, 32
  4253. lxvw4x vs28, o0, BO
  4254. xxspltw vs16, vs28, 0
  4255. addi BO, BO, 4
  4256. xvmaddasp vs32, vs0, vs8
  4257. xvmaddasp vs33, vs1, vs8
  4258. #if defined(_AIX)
  4259. ')
  4260. #else
  4261. .endm
  4262. #endif
  4263. #if defined(_AIX)
  4264. define(`KERNEL1x8_2', `
  4265. #else
  4266. .macro KERNEL1x8_2
  4267. #endif
  4268. lxvw4x vs0, o0, AO
  4269. lxvw4x vs1, o16, AO
  4270. addi AO, AO, 32
  4271. lxvw4x vs28, o0, BO
  4272. xxspltw vs8, vs28, 0
  4273. addi BO, BO, 4
  4274. xvmaddasp vs32, vs4, vs16
  4275. xvmaddasp vs33, vs5, vs16
  4276. #if defined(_AIX)
  4277. ')
  4278. #else
  4279. .endm
  4280. #endif
  4281. #if defined(_AIX)
  4282. define(`KERNEL1x8_E2', `
  4283. #else
  4284. .macro KERNEL1x8_E2
  4285. #endif
  4286. xvmaddasp vs32, vs4, vs16
  4287. xvmaddasp vs33, vs5, vs16
  4288. #if defined(_AIX)
  4289. ')
  4290. #else
  4291. .endm
  4292. #endif
  4293. #if defined(_AIX)
  4294. define(`KERNEL1x8_SUBI1', `
  4295. #else
  4296. .macro KERNEL1x8_SUBI1
  4297. #endif
  4298. lxvw4x vs0, o0, AO
  4299. lxvw4x vs1, o16, AO
  4300. addi AO, AO, 32
  4301. lxvw4x vs28, o0, BO
  4302. xxspltw vs8, vs28, 0
  4303. addi BO, BO, 4
  4304. xvmulsp vs32, vs0, vs8
  4305. xvmulsp vs33, vs1, vs8
  4306. #if defined(_AIX)
  4307. ')
  4308. #else
  4309. .endm
  4310. #endif
  4311. #if defined(_AIX)
  4312. define(`KERNEL1x8_SUB1', `
  4313. #else
  4314. .macro KERNEL1x8_SUB1
  4315. #endif
  4316. lxvw4x vs0, o0, AO
  4317. lxvw4x vs1, o16, AO
  4318. addi AO, AO, 32
  4319. lxvw4x vs28, o0, BO
  4320. xxspltw vs8, vs28, 0
  4321. addi BO, BO, 4
  4322. xvmaddasp vs32, vs0, vs8
  4323. xvmaddasp vs33, vs1, vs8
  4324. #if defined(_AIX)
  4325. ')
  4326. #else
  4327. .endm
  4328. #endif
  4329. #if defined(_AIX)
  4330. define(`SAVE1x8', `
  4331. #else
  4332. .macro SAVE1x8
  4333. #endif
  4334. mr T1, CO
  4335. #ifndef TRMMKERNEL
  4336. lxvw4x vs0, o0, T1
  4337. lxvw4x vs1, o16, T1
  4338. #endif
  4339. #ifdef TRMMKERNEL
  4340. xvmulsp vs0, vs32, alpha_vr
  4341. xvmulsp vs1, vs33, alpha_vr
  4342. #else
  4343. xvmaddasp vs0, vs32, alpha_vr
  4344. xvmaddasp vs1, vs33, alpha_vr
  4345. #endif
  4346. stxvw4x vs0, o0, T1
  4347. stxvw4x vs1, o16, T1
  4348. add T1, T1, LDC
  4349. addi CO, CO, 32
  4350. #if defined(_AIX)
  4351. ')
  4352. #else
  4353. .endm
  4354. #endif
  4355. /**********************************************************************************************
  4356. * Macros for N=1 and M=4
  4357. **********************************************************************************************/
  4358. #if defined(_AIX)
  4359. define(`LOAD1x4_1', `
  4360. #else
  4361. .macro LOAD1x4_1
  4362. #endif
  4363. lxvw4x vs0, o0, AO
  4364. addi AO, AO, 16
  4365. lxvw4x vs28, o0, BO
  4366. xxspltw vs8, vs28, 0
  4367. addi BO, BO, 4
  4368. #if defined(_AIX)
  4369. ')
  4370. #else
  4371. .endm
  4372. #endif
  4373. #if defined(_AIX)
  4374. define(`KERNEL1x4_I1', `
  4375. #else
  4376. .macro KERNEL1x4_I1
  4377. #endif
  4378. lxvw4x vs4, o0, AO
  4379. addi AO, AO, 16
  4380. lxvw4x vs28, o0, BO
  4381. xxspltw vs16, vs28, 0
  4382. addi BO, BO, 4
  4383. xvmulsp vs32, vs0, vs8
  4384. #if defined(_AIX)
  4385. ')
  4386. #else
  4387. .endm
  4388. #endif
  4389. #if defined(_AIX)
  4390. define(`KERNEL1x4_1', `
  4391. #else
  4392. .macro KERNEL1x4_1
  4393. #endif
  4394. lxvw4x vs4, o0, AO
  4395. addi AO, AO, 16
  4396. lxvw4x vs28, o0, BO
  4397. xxspltw vs16, vs28, 0
  4398. addi BO, BO, 4
  4399. xvmaddasp vs32, vs0, vs8
  4400. #if defined(_AIX)
  4401. ')
  4402. #else
  4403. .endm
  4404. #endif
  4405. #if defined(_AIX)
  4406. define(`KERNEL1x4_2', `
  4407. #else
  4408. .macro KERNEL1x4_2
  4409. #endif
  4410. lxvw4x vs0, o0, AO
  4411. addi AO, AO, 16
  4412. lxvw4x vs28, o0, BO
  4413. xxspltw vs8, vs28, 0
  4414. addi BO, BO, 4
  4415. xvmaddasp vs32, vs4, vs16
  4416. #if defined(_AIX)
  4417. ')
  4418. #else
  4419. .endm
  4420. #endif
  4421. #if defined(_AIX)
  4422. define(`KERNEL1x4_E2', `
  4423. #else
  4424. .macro KERNEL1x4_E2
  4425. #endif
  4426. xvmaddasp vs32, vs4, vs16
  4427. #if defined(_AIX)
  4428. ')
  4429. #else
  4430. .endm
  4431. #endif
  4432. #if defined(_AIX)
  4433. define(`KERNEL1x4_SUBI1', `
  4434. #else
  4435. .macro KERNEL1x4_SUBI1
  4436. #endif
  4437. lxvw4x vs0, o0, AO
  4438. addi AO, AO, 16
  4439. lxvw4x vs28, o0, BO
  4440. xxspltw vs8, vs28, 0
  4441. addi BO, BO, 4
  4442. xvmulsp vs32, vs0, vs8
  4443. #if defined(_AIX)
  4444. ')
  4445. #else
  4446. .endm
  4447. #endif
  4448. #if defined(_AIX)
  4449. define(`KERNEL1x4_SUB1', `
  4450. #else
  4451. .macro KERNEL1x4_SUB1
  4452. #endif
  4453. lxvw4x vs0, o0, AO
  4454. addi AO, AO, 16
  4455. lxvw4x vs28, o0, BO
  4456. xxspltw vs8, vs28, 0
  4457. addi BO, BO, 4
  4458. xvmaddasp vs32, vs0, vs8
  4459. #if defined(_AIX)
  4460. ')
  4461. #else
  4462. .endm
  4463. #endif
  4464. #if defined(_AIX)
  4465. define(`SAVE1x4', `
  4466. #else
  4467. .macro SAVE1x4
  4468. #endif
  4469. mr T1, CO
  4470. #ifndef TRMMKERNEL
  4471. lxvw4x vs0, o0, T1
  4472. #endif
  4473. #ifdef TRMMKERNEL
  4474. xvmulsp vs0, vs32, alpha_vr
  4475. #else
  4476. xvmaddasp vs0, vs32, alpha_vr
  4477. #endif
  4478. stxvw4x vs0, o0, T1
  4479. add T1, T1, LDC
  4480. addi CO, CO, 16
  4481. #if defined(_AIX)
  4482. ')
  4483. #else
  4484. .endm
  4485. #endif
  4486. /**********************************************************************************************
  4487. * Macros for N=1 and M=2
  4488. **********************************************************************************************/
  4489. #if defined(_AIX)
  4490. define(`LOAD1x2_1', `
  4491. #else
  4492. .macro LOAD1x2_1
  4493. #endif
  4494. lxsspx vs0, o0, AO
  4495. lxsspx vs1, o4, AO
  4496. addi AO, AO, 8
  4497. mr T1, BO
  4498. lxsspx vs8, o0, T1
  4499. addi BO, BO, 4
  4500. #if defined(_AIX)
  4501. ')
  4502. #else
  4503. .endm
  4504. #endif
  4505. #if defined(_AIX)
  4506. define(`KERNEL1x2_I1', `
  4507. #else
  4508. .macro KERNEL1x2_I1
  4509. #endif
  4510. lxsspx vs4, o0, AO
  4511. lxsspx vs5, o4, AO
  4512. addi AO, AO, 8
  4513. mr T1, BO
  4514. lxsspx vs16, o0, T1
  4515. addi BO, BO, 4
  4516. xsmuldp vs32, vs0, vs8
  4517. xsmuldp vs33, vs1, vs8
  4518. #if defined(_AIX)
  4519. ')
  4520. #else
  4521. .endm
  4522. #endif
  4523. #if defined(_AIX)
  4524. define(`KERNEL1x2_1', `
  4525. #else
  4526. .macro KERNEL1x2_1
  4527. #endif
  4528. lxsspx vs4, o0, AO
  4529. lxsspx vs5, o4, AO
  4530. addi AO, AO, 8
  4531. mr T1, BO
  4532. lxsspx vs16, o0, T1
  4533. addi BO, BO, 4
  4534. xsmaddadp vs32, vs0, vs8
  4535. xsmaddadp vs33, vs1, vs8
  4536. #if defined(_AIX)
  4537. ')
  4538. #else
  4539. .endm
  4540. #endif
  4541. #if defined(_AIX)
  4542. define(`KERNEL1x2_2', `
  4543. #else
  4544. .macro KERNEL1x2_2
  4545. #endif
  4546. lxsspx vs0, o0, AO
  4547. lxsspx vs1, o4, AO
  4548. addi AO, AO, 8
  4549. mr T1, BO
  4550. lxsspx vs8, o0, T1
  4551. addi BO, BO, 4
  4552. xsmaddadp vs32, vs4, vs16
  4553. xsmaddadp vs33, vs5, vs16
  4554. #if defined(_AIX)
  4555. ')
  4556. #else
  4557. .endm
  4558. #endif
  4559. #if defined(_AIX)
  4560. define(`KERNEL1x2_E2', `
  4561. #else
  4562. .macro KERNEL1x2_E2
  4563. #endif
  4564. xsmaddadp vs32, vs4, vs16
  4565. xsmaddadp vs33, vs5, vs16
  4566. #if defined(_AIX)
  4567. ')
  4568. #else
  4569. .endm
  4570. #endif
  4571. #if defined(_AIX)
  4572. define(`KERNEL1x2_SUBI1', `
  4573. #else
  4574. .macro KERNEL1x2_SUBI1
  4575. #endif
  4576. lxsspx vs0, o0, AO
  4577. lxsspx vs1, o4, AO
  4578. addi AO, AO, 8
  4579. mr T1, BO
  4580. lxsspx vs8, o0, T1
  4581. addi BO, BO, 4
  4582. xsmuldp vs32, vs0, vs8
  4583. xsmuldp vs33, vs1, vs8
  4584. #if defined(_AIX)
  4585. ')
  4586. #else
  4587. .endm
  4588. #endif
  4589. #if defined(_AIX)
  4590. define(`KERNEL1x2_SUB1', `
  4591. #else
  4592. .macro KERNEL1x2_SUB1
  4593. #endif
  4594. lxsspx vs0, o0, AO
  4595. lxsspx vs1, o4, AO
  4596. addi AO, AO, 8
  4597. mr T1, BO
  4598. lxsspx vs8, o0, T1
  4599. addi BO, BO, 4
  4600. xsmaddadp vs32, vs0, vs8
  4601. xsmaddadp vs33, vs1, vs8
  4602. #if defined(_AIX)
  4603. ')
  4604. #else
  4605. .endm
  4606. #endif
  4607. #if defined(_AIX)
  4608. define(`SAVE1x2', `
  4609. #else
  4610. .macro SAVE1x2
  4611. #endif
  4612. mr T1, CO
  4613. #ifndef TRMMKERNEL
  4614. lxsspx vs0, o0, T1
  4615. lxsspx vs1, o4, T1
  4616. #endif
  4617. #ifdef TRMMKERNEL
  4618. xsmuldp vs0, vs32, alpha_r
  4619. xsmuldp vs1, vs33, alpha_r
  4620. #else
  4621. xsmaddadp vs0, vs32, alpha_r
  4622. xsmaddadp vs1, vs33, alpha_r
  4623. #endif
  4624. stxsspx vs0, o0, T1
  4625. stxsspx vs1, o4, T1
  4626. add T1, T1, LDC
  4627. addi CO, CO, 8
  4628. #if defined(_AIX)
  4629. ')
  4630. #else
  4631. .endm
  4632. #endif
  4633. /**********************************************************************************************
  4634. * Macros for N=1 and M=1
  4635. **********************************************************************************************/
  4636. #if defined(_AIX)
  4637. define(`LOAD1x1_1', `
  4638. #else
  4639. .macro LOAD1x1_1
  4640. #endif
  4641. lxsspx vs0, o0, AO
  4642. addi AO, AO, 4
  4643. mr T1, BO
  4644. lxsspx vs8, o0, T1
  4645. addi BO, BO, 4
  4646. #if defined(_AIX)
  4647. ')
  4648. #else
  4649. .endm
  4650. #endif
  4651. #if defined(_AIX)
  4652. define(`KERNEL1x1_I1', `
  4653. #else
  4654. .macro KERNEL1x1_I1
  4655. #endif
  4656. lxsspx vs4, o0, AO
  4657. addi AO, AO, 4
  4658. mr T1, BO
  4659. lxsspx vs16, o0, T1
  4660. addi BO, BO, 4
  4661. xsmuldp vs32, vs0, vs8
  4662. #if defined(_AIX)
  4663. ')
  4664. #else
  4665. .endm
  4666. #endif
  4667. #if defined(_AIX)
  4668. define(`KERNEL1x1_1', `
  4669. #else
  4670. .macro KERNEL1x1_1
  4671. #endif
  4672. lxsspx vs4, o0, AO
  4673. addi AO, AO, 4
  4674. mr T1, BO
  4675. lxsspx vs16, o0, T1
  4676. addi BO, BO, 4
  4677. xsmaddadp vs32, vs0, vs8
  4678. #if defined(_AIX)
  4679. ')
  4680. #else
  4681. .endm
  4682. #endif
  4683. #if defined(_AIX)
  4684. define(`KERNEL1x1_2', `
  4685. #else
  4686. .macro KERNEL1x1_2
  4687. #endif
  4688. lxsspx vs0, o0, AO
  4689. addi AO, AO, 4
  4690. mr T1, BO
  4691. lxsspx vs8, o0, T1
  4692. addi BO, BO, 4
  4693. xsmaddadp vs32, vs4, vs16
  4694. #if defined(_AIX)
  4695. ')
  4696. #else
  4697. .endm
  4698. #endif
  4699. #if defined(_AIX)
  4700. define(`KERNEL1x1_E2', `
  4701. #else
  4702. .macro KERNEL1x1_E2
  4703. #endif
  4704. xsmaddadp vs32, vs4, vs16
  4705. #if defined(_AIX)
  4706. ')
  4707. #else
  4708. .endm
  4709. #endif
  4710. #if defined(_AIX)
  4711. define(`KERNEL1x1_SUBI1', `
  4712. #else
  4713. .macro KERNEL1x1_SUBI1
  4714. #endif
  4715. lxsspx vs0, o0, AO
  4716. addi AO, AO, 4
  4717. mr T1, BO
  4718. lxsspx vs8, o0, T1
  4719. addi BO, BO, 4
  4720. xsmuldp vs32, vs0, vs8
  4721. #if defined(_AIX)
  4722. ')
  4723. #else
  4724. .endm
  4725. #endif
  4726. #if defined(_AIX)
  4727. define(`KERNEL1x1_SUB1', `
  4728. #else
  4729. .macro KERNEL1x1_SUB1
  4730. #endif
  4731. lxsspx vs0, o0, AO
  4732. addi AO, AO, 4
  4733. mr T1, BO
  4734. lxsspx vs8, o0, T1
  4735. addi BO, BO, 4
  4736. xsmaddadp vs32, vs0, vs8
  4737. #if defined(_AIX)
  4738. ')
  4739. #else
  4740. .endm
  4741. #endif
  4742. #if defined(_AIX)
  4743. define(`SAVE1x1', `
  4744. #else
  4745. .macro SAVE1x1
  4746. #endif
  4747. mr T1, CO
  4748. #ifndef TRMMKERNEL
  4749. lxsspx vs0, o0, T1
  4750. #endif
  4751. #ifdef TRMMKERNEL
  4752. xsmuldp vs0, vs32, alpha_r
  4753. #else
  4754. xsmaddadp vs0, vs32, alpha_r
  4755. #endif
  4756. stxsspx vs0, o0, T1
  4757. add T1, T1, LDC
  4758. addi CO, CO, 4
  4759. #if defined(_AIX)
  4760. ')
  4761. #else
  4762. .endm
  4763. #endif