You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

sgemv_t.S 121 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402
  1. /* Copyright 2009, 2010 The University of Texas at Austin. */
  2. /* All rights reserved. */
  3. /* */
  4. /* Redistribution and use in source and binary forms, with or */
  5. /* without modification, are permitted provided that the following */
  6. /* conditions are met: */
  7. /* */
  8. /* 1. Redistributions of source code must retain the above */
  9. /* copyright notice, this list of conditions and the following */
  10. /* disclaimer. */
  11. /* */
  12. /* 2. Redistributions in binary form must reproduce the above */
  13. /* copyright notice, this list of conditions and the following */
  14. /* disclaimer in the documentation and/or other materials */
  15. /* provided with the distribution. */
  16. /* */
  17. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  18. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  19. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  20. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  21. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  22. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  23. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  24. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  25. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  26. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  27. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  28. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  29. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  30. /* POSSIBILITY OF SUCH DAMAGE. */
  31. /* */
  32. /* The views and conclusions contained in the software and */
  33. /* documentation are those of the authors and should not be */
  34. /* interpreted as representing official policies, either expressed */
  35. /* or implied, of The University of Texas at Austin. */
  36. /*********************************************************************/
  37. #define ASSEMBLER
  38. #include "common.h"
  39. #include "l2param.h"
  40. #if GEMV_UNROLL < 4
  41. #undef GEMV_UNROLL
  42. #define GEMV_UNROLL 4
  43. #endif
  44. #ifndef WINDOWS_ABI
  45. #define STACKSIZE 128
  46. #define OLD_M %rdi
  47. #define OLD_N %rsi
  48. #define OLD_A %rcx
  49. #define OLD_LDA %r8
  50. #define STACK_INCX 8 + STACKSIZE(%rsp)
  51. #define STACK_Y 16 + STACKSIZE(%rsp)
  52. #define STACK_INCY 24 + STACKSIZE(%rsp)
  53. #define STACK_BUFFER 32 + STACKSIZE(%rsp)
  54. #define MMM 56(%rsp)
  55. #define NN 64(%rsp)
  56. #define AA 72(%rsp)
  57. #define LDAX 80(%rsp)
  58. #else
  59. #define STACKSIZE 288
  60. #define OLD_M %rcx
  61. #define OLD_N %rdx
  62. #define OLD_A 40 + STACKSIZE(%rsp)
  63. #define OLD_LDA 48 + STACKSIZE(%rsp)
  64. #define OLD_X 56 + STACKSIZE(%rsp)
  65. #define STACK_INCX 64 + STACKSIZE(%rsp)
  66. #define STACK_Y 72 + STACKSIZE(%rsp)
  67. #define STACK_INCY 80 + STACKSIZE(%rsp)
  68. #define STACK_BUFFER 88 + STACKSIZE(%rsp)
  69. #define MMM 232(%rsp)
  70. #define NN 240(%rsp)
  71. #define AA 248(%rsp)
  72. #define LDAX 256(%rsp)
  73. #endif
  74. #define LDA %r8
  75. #define X %r9
  76. #define INCX %rsi
  77. #define INCY %rdi
  78. #define M %r10
  79. #define N %r11
  80. #define A %r12
  81. #define Y %r14
  82. #define BUFFER %r13
  83. #define I %rax
  84. #define A1 %rbx
  85. #define A2 %rcx
  86. #define LDA3 %rdx
  87. #define X1 %rbp
  88. #define Y1 INCX
  89. #ifdef ALIGNED_ACCESS
  90. #define MM %r15
  91. #else
  92. #define MM M
  93. #endif
  94. #define ALPHA %xmm7
  95. PROLOGUE
  96. PROFCODE
  97. subq $STACKSIZE, %rsp
  98. movq %rbx, 0(%rsp)
  99. movq %rbp, 8(%rsp)
  100. movq %r12, 16(%rsp)
  101. movq %r13, 24(%rsp)
  102. movq %r14, 32(%rsp)
  103. movq %r15, 40(%rsp)
  104. #ifdef WINDOWS_ABI
  105. movq %rdi, 48(%rsp)
  106. movq %rsi, 56(%rsp)
  107. movups %xmm6, 64(%rsp)
  108. movups %xmm7, 80(%rsp)
  109. movups %xmm8, 96(%rsp)
  110. movups %xmm9, 112(%rsp)
  111. movups %xmm10, 128(%rsp)
  112. movups %xmm11, 144(%rsp)
  113. movups %xmm12, 160(%rsp)
  114. movups %xmm13, 176(%rsp)
  115. movups %xmm14, 192(%rsp)
  116. movups %xmm15, 208(%rsp)
  117. movq OLD_M, MMM
  118. movq OLD_N, NN
  119. movq OLD_A, X
  120. movq X, AA
  121. movq OLD_LDA, X
  122. movq X, LDAX
  123. movq OLD_X, X
  124. #else
  125. movq OLD_M, MMM
  126. movq OLD_N, NN
  127. movq OLD_A, AA
  128. movq OLD_LDA, LDAX
  129. #endif
  130. #ifndef WINDOWS_ABI
  131. pshufd $0, %xmm0, ALPHA
  132. #else
  133. pshufd $0, %xmm3, ALPHA
  134. #endif
  135. .L0t:
  136. xorq M,M
  137. addq $1,M
  138. salq $22,M
  139. subq M,MMM
  140. jge .L00t
  141. ALIGN_4
  142. movq MMM,%rax
  143. addq M,%rax
  144. jle .L999x
  145. movq %rax,M
  146. .L00t:
  147. movq LDAX,LDA
  148. movq NN,N
  149. movq AA,A
  150. movq STACK_INCX, INCX
  151. movq STACK_Y, Y
  152. movq STACK_INCY, INCY
  153. movq STACK_BUFFER, BUFFER
  154. leaq (,INCX, SIZE), INCX
  155. leaq (,INCY, SIZE), INCY
  156. leaq (,LDA, SIZE), LDA
  157. leaq (LDA, LDA, 2), LDA3
  158. #ifdef ALIGNED_ACCESS
  159. movq M, MM
  160. testq $4 * SIZE - 1, A
  161. je .L0X
  162. cmpq $3, M
  163. jle .L0X
  164. movq A, MM
  165. sarq $BASE_SHIFT, MM
  166. andq $3, MM
  167. subq $4, MM
  168. addq M, MM
  169. .L0X:
  170. #endif
  171. testq M, M
  172. jle .L999
  173. testq N, N
  174. jle .L999
  175. ALIGN_4
  176. subq $-32 * SIZE, A
  177. #ifdef ALIGNED_ACCESS
  178. movq A, %rax
  179. andq $4 * SIZE - 1, %rax
  180. addq %rax, BUFFER
  181. #endif
  182. movq BUFFER, X1
  183. movq M, I
  184. sarq $3, I
  185. jle .L05
  186. ALIGN_4
  187. .L02:
  188. movss (X), %xmm0
  189. addq INCX, X
  190. movss (X), %xmm1
  191. addq INCX, X
  192. movss (X), %xmm2
  193. addq INCX, X
  194. movss (X), %xmm3
  195. addq INCX, X
  196. movss (X), %xmm4
  197. addq INCX, X
  198. movss (X), %xmm5
  199. addq INCX, X
  200. movss (X), %xmm6
  201. addq INCX, X
  202. movss (X), %xmm8
  203. addq INCX, X
  204. movss %xmm0, 0 * SIZE(X1)
  205. movss %xmm1, 1 * SIZE(X1)
  206. movss %xmm2, 2 * SIZE(X1)
  207. movss %xmm3, 3 * SIZE(X1)
  208. movss %xmm4, 4 * SIZE(X1)
  209. movss %xmm5, 5 * SIZE(X1)
  210. movss %xmm6, 6 * SIZE(X1)
  211. movss %xmm8, 7 * SIZE(X1)
  212. addq $8 * SIZE, X1
  213. decq I
  214. jg .L02
  215. ALIGN_4
  216. .L05:
  217. movq M, I
  218. andq $7, I
  219. jle .L10
  220. ALIGN_2
  221. .L06:
  222. movss (X), %xmm0
  223. addq INCX, X
  224. movss %xmm0, 0 * SIZE(X1)
  225. addq $SIZE, X1
  226. decq I
  227. jg .L06
  228. ALIGN_4
  229. .L10:
  230. movq Y, Y1
  231. #ifdef ALIGNED_ACCESS
  232. testq $4 * SIZE - 1, LDA
  233. jne .L100
  234. #endif
  235. #if GEMV_UNROLL >= 8
  236. cmpq $8, N
  237. jl .L20
  238. ALIGN_3
  239. .L11:
  240. subq $8, N
  241. leaq 32 * SIZE(BUFFER), X1
  242. movq A, A1
  243. leaq (A1, LDA, 4), A2
  244. leaq (A1, LDA, 8), A
  245. xorps %xmm8, %xmm8
  246. xorps %xmm9, %xmm9
  247. xorps %xmm10, %xmm10
  248. xorps %xmm11, %xmm11
  249. xorps %xmm12, %xmm12
  250. xorps %xmm13, %xmm13
  251. xorps %xmm14, %xmm14
  252. xorps %xmm15, %xmm15
  253. #ifdef ALIGNED_ACCESS
  254. cmpq $3, M
  255. jle .L17
  256. testq $SIZE, A1
  257. je .L1X
  258. movss -32 * SIZE(A1), %xmm0
  259. movss -32 * SIZE(X1), %xmm4
  260. mulss %xmm4, %xmm0
  261. addss %xmm0, %xmm8
  262. movss -32 * SIZE(A1, LDA, 1), %xmm1
  263. mulss %xmm4, %xmm1
  264. addss %xmm1, %xmm9
  265. movss -32 * SIZE(A1, LDA, 2), %xmm2
  266. mulss %xmm4, %xmm2
  267. addss %xmm2, %xmm10
  268. movss -32 * SIZE(A1, LDA3, 1), %xmm3
  269. mulss %xmm4, %xmm3
  270. addss %xmm3, %xmm11
  271. movss -32 * SIZE(A2), %xmm0
  272. mulss %xmm4, %xmm0
  273. addss %xmm0, %xmm12
  274. movss -32 * SIZE(A2, LDA, 1), %xmm1
  275. mulss %xmm4, %xmm1
  276. addss %xmm1, %xmm13
  277. movss -32 * SIZE(A2, LDA, 2), %xmm2
  278. mulss %xmm4, %xmm2
  279. addss %xmm2, %xmm14
  280. movss -32 * SIZE(A2, LDA3, 1), %xmm3
  281. mulss %xmm4, %xmm3
  282. addss %xmm3, %xmm15
  283. addq $1 * SIZE, A1
  284. addq $1 * SIZE, A2
  285. addq $1 * SIZE, X1
  286. ALIGN_3
  287. .L1X:
  288. testq $2 * SIZE, A1
  289. je .L1XX
  290. #ifdef movsd
  291. xorps %xmm0, %xmm0
  292. xorps %xmm4, %xmm4
  293. #endif
  294. movsd -32 * SIZE(A1), %xmm0
  295. movsd -32 * SIZE(X1), %xmm4
  296. mulps %xmm4, %xmm0
  297. addps %xmm0, %xmm8
  298. #ifdef movsd
  299. xorps %xmm1, %xmm1
  300. #endif
  301. movsd -32 * SIZE(A1, LDA, 1), %xmm1
  302. mulps %xmm4, %xmm1
  303. addps %xmm1, %xmm9
  304. #ifdef movsd
  305. xorps %xmm2, %xmm2
  306. #endif
  307. movsd -32 * SIZE(A1, LDA, 2), %xmm2
  308. mulps %xmm4, %xmm2
  309. addps %xmm2, %xmm10
  310. #ifdef movsd
  311. xorps %xmm3, %xmm3
  312. #endif
  313. movsd -32 * SIZE(A1, LDA3, 1), %xmm3
  314. mulps %xmm4, %xmm3
  315. addps %xmm3, %xmm11
  316. movsd -32 * SIZE(A2), %xmm0
  317. mulps %xmm4, %xmm0
  318. addps %xmm0, %xmm12
  319. movsd -32 * SIZE(A2, LDA, 1), %xmm1
  320. mulps %xmm4, %xmm1
  321. addps %xmm1, %xmm13
  322. movsd -32 * SIZE(A2, LDA, 2), %xmm2
  323. mulps %xmm4, %xmm2
  324. addps %xmm2, %xmm14
  325. movsd -32 * SIZE(A2, LDA3, 1), %xmm3
  326. mulps %xmm4, %xmm3
  327. addps %xmm3, %xmm15
  328. addq $2 * SIZE, A1
  329. addq $2 * SIZE, A2
  330. addq $2 * SIZE, X1
  331. ALIGN_3
  332. .L1XX:
  333. #endif
  334. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  335. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  336. #ifdef PREFETCHW
  337. PREFETCHW 8 * SIZE(Y1)
  338. #endif
  339. movq MM, I
  340. sarq $4, I
  341. jle .L15
  342. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  343. MOVUPS_A2 (-32 * SIZE, A1, LDA, 1, %xmm1)
  344. MOVUPS_A2 (-32 * SIZE, A1, LDA, 2, %xmm2)
  345. MOVUPS_A2 (-32 * SIZE, A1, LDA3, 1, %xmm3)
  346. decq I
  347. jle .L13
  348. ALIGN_4
  349. .L12:
  350. #ifdef PREFETCH
  351. PREFETCH (PREFETCHSIZE) - 128 + PREOFFSET(A1)
  352. #endif
  353. mulps %xmm4, %xmm0
  354. addps %xmm0, %xmm8
  355. MOVUPS_A1 (-32 * SIZE, A2, %xmm0)
  356. mulps %xmm4, %xmm1
  357. addps %xmm1, %xmm9
  358. MOVUPS_A2 (-32 * SIZE, A2, LDA, 1, %xmm1)
  359. mulps %xmm4, %xmm2
  360. addps %xmm2, %xmm10
  361. MOVUPS_A2 (-32 * SIZE, A2, LDA, 2, %xmm2)
  362. mulps %xmm4, %xmm3
  363. addps %xmm3, %xmm11
  364. MOVUPS_A2 (-32 * SIZE, A2, LDA3, 1, %xmm3)
  365. #ifdef PREFETCH
  366. PREFETCH (PREFETCHSIZE) - 128 + PREOFFSET(A1, LDA, 1)
  367. #endif
  368. mulps %xmm4, %xmm0
  369. addps %xmm0, %xmm12
  370. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  371. mulps %xmm4, %xmm1
  372. addps %xmm1, %xmm13
  373. MOVUPS_A2 (-28 * SIZE, A1, LDA, 1, %xmm1)
  374. mulps %xmm4, %xmm2
  375. addps %xmm2, %xmm14
  376. MOVUPS_A2 (-28 * SIZE, A1, LDA, 2, %xmm2)
  377. mulps %xmm4, %xmm3
  378. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  379. addps %xmm3, %xmm15
  380. MOVUPS_A2 (-28 * SIZE, A1, LDA3, 1, %xmm3)
  381. #ifdef PREFETCH
  382. PREFETCH (PREFETCHSIZE) - 128 + PREOFFSET(A1, LDA, 2)
  383. #endif
  384. mulps %xmm5, %xmm0
  385. addps %xmm0, %xmm8
  386. MOVUPS_A1 (-28 * SIZE, A2, %xmm0)
  387. mulps %xmm5, %xmm1
  388. addps %xmm1, %xmm9
  389. MOVUPS_A2 (-28 * SIZE, A2, LDA, 1, %xmm1)
  390. mulps %xmm5, %xmm2
  391. addps %xmm2, %xmm10
  392. MOVUPS_A2 (-28 * SIZE, A2, LDA, 2, %xmm2)
  393. mulps %xmm5, %xmm3
  394. addps %xmm3, %xmm11
  395. MOVUPS_A2 (-28 * SIZE, A2, LDA3, 1, %xmm3)
  396. #ifdef PREFETCH
  397. PREFETCH (PREFETCHSIZE) - 128 + PREOFFSET(A1, LDA3)
  398. #endif
  399. mulps %xmm5, %xmm0
  400. addps %xmm0, %xmm12
  401. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  402. mulps %xmm5, %xmm1
  403. addps %xmm1, %xmm13
  404. MOVUPS_A2 (-24 * SIZE, A1, LDA, 1, %xmm1)
  405. mulps %xmm5, %xmm2
  406. addps %xmm2, %xmm14
  407. MOVUPS_A2 (-24 * SIZE, A1, LDA, 2, %xmm2)
  408. mulps %xmm5, %xmm3
  409. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  410. addps %xmm3, %xmm15
  411. MOVUPS_A2 (-24 * SIZE, A1, LDA3, 1, %xmm3)
  412. #ifdef PREFETCH
  413. PREFETCH (PREFETCHSIZE) - 128 + PREOFFSET(A2)
  414. #endif
  415. mulps %xmm4, %xmm0
  416. addps %xmm0, %xmm8
  417. MOVUPS_A1 (-24 * SIZE, A2, %xmm0)
  418. mulps %xmm4, %xmm1
  419. addps %xmm1, %xmm9
  420. MOVUPS_A2 (-24 * SIZE, A2, LDA, 1, %xmm1)
  421. mulps %xmm4, %xmm2
  422. addps %xmm2, %xmm10
  423. MOVUPS_A2 (-24 * SIZE, A2, LDA, 2, %xmm2)
  424. mulps %xmm4, %xmm3
  425. addps %xmm3, %xmm11
  426. MOVUPS_A2 (-24 * SIZE, A2, LDA3, 1, %xmm3)
  427. mulps %xmm4, %xmm0
  428. addps %xmm0, %xmm12
  429. #ifdef PREFETCH
  430. PREFETCH (PREFETCHSIZE) - 128 + PREOFFSET(A2, LDA, 1)
  431. #endif
  432. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  433. mulps %xmm4, %xmm1
  434. addps %xmm1, %xmm13
  435. MOVUPS_A2 (-20 * SIZE, A1, LDA, 1, %xmm1)
  436. mulps %xmm4, %xmm2
  437. addps %xmm2, %xmm14
  438. MOVUPS_A2 (-20 * SIZE, A1, LDA, 2, %xmm2)
  439. mulps %xmm4, %xmm3
  440. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  441. addps %xmm3, %xmm15
  442. MOVUPS_A2 (-20 * SIZE, A1, LDA3, 1, %xmm3)
  443. #ifdef PREFETCH
  444. PREFETCH (PREFETCHSIZE) - 128 + PREOFFSET(A2, LDA, 2)
  445. #endif
  446. mulps %xmm5, %xmm0
  447. addps %xmm0, %xmm8
  448. MOVUPS_A1 (-20 * SIZE, A2, %xmm0)
  449. mulps %xmm5, %xmm1
  450. addps %xmm1, %xmm9
  451. MOVUPS_A2 (-20 * SIZE, A2, LDA, 1, %xmm1)
  452. mulps %xmm5, %xmm2
  453. addps %xmm2, %xmm10
  454. MOVUPS_A2 (-20 * SIZE, A2, LDA, 2, %xmm2)
  455. mulps %xmm5, %xmm3
  456. addps %xmm3, %xmm11
  457. MOVUPS_A2 (-20 * SIZE, A2, LDA3, 1, %xmm3)
  458. mulps %xmm5, %xmm0
  459. addps %xmm0, %xmm12
  460. #ifdef PREFETCH
  461. PREFETCH (PREFETCHSIZE) - 128 + PREOFFSET(A2, LDA3)
  462. #endif
  463. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  464. mulps %xmm5, %xmm1
  465. addps %xmm1, %xmm13
  466. MOVUPS_A2 (-16 * SIZE, A1, LDA, 1, %xmm1)
  467. mulps %xmm5, %xmm2
  468. addps %xmm2, %xmm14
  469. #ifdef PREFETCHW
  470. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(X1)
  471. #endif
  472. MOVUPS_A2 (-16 * SIZE, A1, LDA, 2, %xmm2)
  473. mulps %xmm5, %xmm3
  474. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  475. addps %xmm3, %xmm15
  476. MOVUPS_A2 (-16 * SIZE, A1, LDA3, 1, %xmm3)
  477. addq $16 * SIZE, A1
  478. addq $16 * SIZE, A2
  479. addq $16 * SIZE, X1
  480. decq I
  481. jg .L12
  482. ALIGN_4
  483. .L13:
  484. mulps %xmm4, %xmm0
  485. addps %xmm0, %xmm8
  486. MOVUPS_A1 (-32 * SIZE, A2, %xmm0)
  487. mulps %xmm4, %xmm1
  488. addps %xmm1, %xmm9
  489. MOVUPS_A2 (-32 * SIZE, A2, LDA, 1, %xmm1)
  490. mulps %xmm4, %xmm2
  491. addps %xmm2, %xmm10
  492. MOVUPS_A2 (-32 * SIZE, A2, LDA, 2, %xmm2)
  493. mulps %xmm4, %xmm3
  494. addps %xmm3, %xmm11
  495. MOVUPS_A2 (-32 * SIZE, A2, LDA3, 1, %xmm3)
  496. mulps %xmm4, %xmm0
  497. addps %xmm0, %xmm12
  498. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  499. mulps %xmm4, %xmm1
  500. addps %xmm1, %xmm13
  501. MOVUPS_A2 (-28 * SIZE, A1, LDA, 1, %xmm1)
  502. mulps %xmm4, %xmm2
  503. addps %xmm2, %xmm14
  504. MOVUPS_A2 (-28 * SIZE, A1, LDA, 2, %xmm2)
  505. mulps %xmm4, %xmm3
  506. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  507. addps %xmm3, %xmm15
  508. MOVUPS_A2 (-28 * SIZE, A1, LDA3, 1, %xmm3)
  509. mulps %xmm5, %xmm0
  510. addps %xmm0, %xmm8
  511. MOVUPS_A1 (-28 * SIZE, A2, %xmm0)
  512. mulps %xmm5, %xmm1
  513. addps %xmm1, %xmm9
  514. MOVUPS_A2 (-28 * SIZE, A2, LDA, 1, %xmm1)
  515. mulps %xmm5, %xmm2
  516. addps %xmm2, %xmm10
  517. MOVUPS_A2 (-28 * SIZE, A2, LDA, 2, %xmm2)
  518. mulps %xmm5, %xmm3
  519. addps %xmm3, %xmm11
  520. MOVUPS_A2 (-28 * SIZE, A2, LDA3, 1, %xmm3)
  521. mulps %xmm5, %xmm0
  522. addps %xmm0, %xmm12
  523. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  524. mulps %xmm5, %xmm1
  525. addps %xmm1, %xmm13
  526. MOVUPS_A2 (-24 * SIZE, A1, LDA, 1, %xmm1)
  527. mulps %xmm5, %xmm2
  528. addps %xmm2, %xmm14
  529. MOVUPS_A2 (-24 * SIZE, A1, LDA, 2, %xmm2)
  530. mulps %xmm5, %xmm3
  531. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  532. addps %xmm3, %xmm15
  533. MOVUPS_A2 (-24 * SIZE, A1, LDA3, 1, %xmm3)
  534. mulps %xmm4, %xmm0
  535. addps %xmm0, %xmm8
  536. MOVUPS_A1 (-24 * SIZE, A2, %xmm0)
  537. mulps %xmm4, %xmm1
  538. addps %xmm1, %xmm9
  539. MOVUPS_A2 (-24 * SIZE, A2, LDA, 1, %xmm1)
  540. mulps %xmm4, %xmm2
  541. addps %xmm2, %xmm10
  542. MOVUPS_A2 (-24 * SIZE, A2, LDA, 2, %xmm2)
  543. mulps %xmm4, %xmm3
  544. addps %xmm3, %xmm11
  545. MOVUPS_A2 (-24 * SIZE, A2, LDA3, 1, %xmm3)
  546. mulps %xmm4, %xmm0
  547. addps %xmm0, %xmm12
  548. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  549. mulps %xmm4, %xmm1
  550. addps %xmm1, %xmm13
  551. MOVUPS_A2 (-20 * SIZE, A1, LDA, 1, %xmm1)
  552. mulps %xmm4, %xmm2
  553. addps %xmm2, %xmm14
  554. MOVUPS_A2 (-20 * SIZE, A1, LDA, 2, %xmm2)
  555. mulps %xmm4, %xmm3
  556. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  557. addps %xmm3, %xmm15
  558. MOVUPS_A2 (-20 * SIZE, A1, LDA3, 1, %xmm3)
  559. mulps %xmm5, %xmm0
  560. addps %xmm0, %xmm8
  561. MOVUPS_A1 (-20 * SIZE, A2, %xmm0)
  562. mulps %xmm5, %xmm1
  563. addps %xmm1, %xmm9
  564. MOVUPS_A2 (-20 * SIZE, A2, LDA, 1, %xmm1)
  565. mulps %xmm5, %xmm2
  566. addps %xmm2, %xmm10
  567. MOVUPS_A2 (-20 * SIZE, A2, LDA, 2, %xmm2)
  568. mulps %xmm5, %xmm3
  569. addps %xmm3, %xmm11
  570. MOVUPS_A2 (-20 * SIZE, A2, LDA3, 1, %xmm3)
  571. mulps %xmm5, %xmm0
  572. addps %xmm0, %xmm12
  573. mulps %xmm5, %xmm1
  574. addps %xmm1, %xmm13
  575. mulps %xmm5, %xmm2
  576. addps %xmm2, %xmm14
  577. mulps %xmm5, %xmm3
  578. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  579. addps %xmm3, %xmm15
  580. addq $16 * SIZE, A1
  581. addq $16 * SIZE, A2
  582. addq $16 * SIZE, X1
  583. ALIGN_4
  584. .L15:
  585. testq $8, MM
  586. jle .L16
  587. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  588. MOVUPS_A2 (-32 * SIZE, A1, LDA, 1, %xmm1)
  589. MOVUPS_A2 (-32 * SIZE, A1, LDA, 2, %xmm2)
  590. MOVUPS_A2 (-32 * SIZE, A1, LDA3, 1, %xmm3)
  591. mulps %xmm4, %xmm0
  592. addps %xmm0, %xmm8
  593. MOVUPS_A1 (-32 * SIZE, A2, %xmm0)
  594. mulps %xmm4, %xmm1
  595. addps %xmm1, %xmm9
  596. MOVUPS_A2 (-32 * SIZE, A2, LDA, 1, %xmm1)
  597. mulps %xmm4, %xmm2
  598. addps %xmm2, %xmm10
  599. MOVUPS_A2 (-32 * SIZE, A2, LDA, 2, %xmm2)
  600. mulps %xmm4, %xmm3
  601. addps %xmm3, %xmm11
  602. MOVUPS_A2 (-32 * SIZE, A2, LDA3, 1, %xmm3)
  603. mulps %xmm4, %xmm0
  604. addps %xmm0, %xmm12
  605. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  606. mulps %xmm4, %xmm1
  607. addps %xmm1, %xmm13
  608. MOVUPS_A2 (-28 * SIZE, A1, LDA, 1, %xmm1)
  609. mulps %xmm4, %xmm2
  610. addps %xmm2, %xmm14
  611. MOVUPS_A2 (-28 * SIZE, A1, LDA, 2, %xmm2)
  612. mulps %xmm4, %xmm3
  613. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  614. addps %xmm3, %xmm15
  615. MOVUPS_A2 (-28 * SIZE, A1, LDA3, 1, %xmm3)
  616. mulps %xmm5, %xmm0
  617. addps %xmm0, %xmm8
  618. MOVUPS_A1 (-28 * SIZE, A2, %xmm0)
  619. mulps %xmm5, %xmm1
  620. addps %xmm1, %xmm9
  621. MOVUPS_A2 (-28 * SIZE, A2, LDA, 1, %xmm1)
  622. mulps %xmm5, %xmm2
  623. addps %xmm2, %xmm10
  624. MOVUPS_A2 (-28 * SIZE, A2, LDA, 2, %xmm2)
  625. mulps %xmm5, %xmm3
  626. addps %xmm3, %xmm11
  627. MOVUPS_A2 (-28 * SIZE, A2, LDA3, 1, %xmm3)
  628. mulps %xmm5, %xmm0
  629. addps %xmm0, %xmm12
  630. mulps %xmm5, %xmm1
  631. addps %xmm1, %xmm13
  632. mulps %xmm5, %xmm2
  633. addps %xmm2, %xmm14
  634. mulps %xmm5, %xmm3
  635. addps %xmm3, %xmm15
  636. addq $8 * SIZE, A1
  637. addq $8 * SIZE, A2
  638. addq $8 * SIZE, X1
  639. ALIGN_4
  640. .L16:
  641. testq $4, MM
  642. jle .L17
  643. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  644. MOVUPS_A2 (-32 * SIZE, A1, LDA, 1, %xmm1)
  645. MOVUPS_A2 (-32 * SIZE, A1, LDA, 2, %xmm2)
  646. MOVUPS_A2 (-32 * SIZE, A1, LDA3, 1, %xmm3)
  647. mulps %xmm4, %xmm0
  648. addps %xmm0, %xmm8
  649. MOVUPS_A1 (-32 * SIZE, A2, %xmm0)
  650. mulps %xmm4, %xmm1
  651. addps %xmm1, %xmm9
  652. MOVUPS_A2 (-32 * SIZE, A2, LDA, 1, %xmm1)
  653. mulps %xmm4, %xmm2
  654. addps %xmm2, %xmm10
  655. MOVUPS_A2 (-32 * SIZE, A2, LDA, 2, %xmm2)
  656. mulps %xmm4, %xmm3
  657. addps %xmm3, %xmm11
  658. MOVUPS_A2 (-32 * SIZE, A2, LDA3, 1, %xmm3)
  659. mulps %xmm4, %xmm0
  660. addps %xmm0, %xmm12
  661. mulps %xmm4, %xmm1
  662. addps %xmm1, %xmm13
  663. mulps %xmm4, %xmm2
  664. addps %xmm2, %xmm14
  665. mulps %xmm4, %xmm3
  666. addps %xmm3, %xmm15
  667. addq $4 * SIZE, A1
  668. addq $4 * SIZE, A2
  669. addq $4 * SIZE, X1
  670. ALIGN_4
  671. .L17:
  672. testq $2, MM
  673. jle .L18
  674. #ifdef movsd
  675. xorps %xmm0, %xmm0
  676. #endif
  677. movsd -32 * SIZE(A1), %xmm0
  678. #ifdef movsd
  679. xorps %xmm4, %xmm4
  680. #endif
  681. movsd -32 * SIZE(X1), %xmm4
  682. mulps %xmm4, %xmm0
  683. addps %xmm0, %xmm8
  684. #ifdef movsd
  685. xorps %xmm1, %xmm1
  686. #endif
  687. movsd -32 * SIZE(A1, LDA, 1), %xmm1
  688. mulps %xmm4, %xmm1
  689. addps %xmm1, %xmm9
  690. #ifdef movsd
  691. xorps %xmm2, %xmm2
  692. #endif
  693. movsd -32 * SIZE(A1, LDA, 2), %xmm2
  694. mulps %xmm4, %xmm2
  695. addps %xmm2, %xmm10
  696. #ifdef movsd
  697. xorps %xmm3, %xmm3
  698. #endif
  699. movsd -32 * SIZE(A1, LDA3, 1), %xmm3
  700. mulps %xmm4, %xmm3
  701. addps %xmm3, %xmm11
  702. movsd -32 * SIZE(A2), %xmm0
  703. mulps %xmm4, %xmm0
  704. addps %xmm0, %xmm12
  705. movsd -32 * SIZE(A2, LDA, 1), %xmm1
  706. mulps %xmm4, %xmm1
  707. addps %xmm1, %xmm13
  708. movsd -32 * SIZE(A2, LDA, 2), %xmm2
  709. mulps %xmm4, %xmm2
  710. addps %xmm2, %xmm14
  711. movsd -32 * SIZE(A2, LDA3, 1), %xmm3
  712. mulps %xmm4, %xmm3
  713. addps %xmm3, %xmm15
  714. addq $2 * SIZE, A1
  715. addq $2 * SIZE, A2
  716. addq $2 * SIZE, X1
  717. ALIGN_4
  718. .L18:
  719. testq $1, MM
  720. jle .L19
  721. movss -32 * SIZE(A1), %xmm0
  722. movss -32 * SIZE(X1), %xmm4
  723. mulss %xmm4, %xmm0
  724. addss %xmm0, %xmm8
  725. movss -32 * SIZE(A1, LDA, 1), %xmm1
  726. mulss %xmm4, %xmm1
  727. addss %xmm1, %xmm9
  728. movss -32 * SIZE(A1, LDA, 2), %xmm2
  729. mulss %xmm4, %xmm2
  730. addss %xmm2, %xmm10
  731. movss -32 * SIZE(A1, LDA3, 1), %xmm3
  732. mulss %xmm4, %xmm3
  733. addss %xmm3, %xmm11
  734. movss -32 * SIZE(A2), %xmm0
  735. mulss %xmm4, %xmm0
  736. addss %xmm0, %xmm12
  737. movss -32 * SIZE(A2, LDA, 1), %xmm1
  738. mulss %xmm4, %xmm1
  739. addss %xmm1, %xmm13
  740. movss -32 * SIZE(A2, LDA, 2), %xmm2
  741. mulss %xmm4, %xmm2
  742. addss %xmm2, %xmm14
  743. movss -32 * SIZE(A2, LDA3, 1), %xmm3
  744. mulss %xmm4, %xmm3
  745. addss %xmm3, %xmm15
  746. ALIGN_4
  747. .L19:
  748. #ifdef HAVE_SSE3
  749. haddps %xmm9, %xmm8
  750. haddps %xmm11, %xmm10
  751. haddps %xmm10, %xmm8
  752. pshufd $0x1, %xmm8, %xmm9
  753. pshufd $0x2, %xmm8, %xmm10
  754. pshufd $0x3, %xmm8, %xmm11
  755. haddps %xmm13, %xmm12
  756. haddps %xmm15, %xmm14
  757. haddps %xmm14, %xmm12
  758. pshufd $0x1, %xmm12, %xmm13
  759. pshufd $0x2, %xmm12, %xmm14
  760. pshufd $0x3, %xmm12, %xmm15
  761. #else
  762. movaps %xmm8, %xmm0
  763. unpcklps %xmm9, %xmm8
  764. unpckhps %xmm9, %xmm0
  765. movaps %xmm10, %xmm1
  766. unpcklps %xmm11, %xmm10
  767. unpckhps %xmm11, %xmm1
  768. movaps %xmm8, %xmm9
  769. unpcklps %xmm10, %xmm8
  770. unpckhps %xmm10, %xmm9
  771. movaps %xmm0, %xmm10
  772. unpcklps %xmm1, %xmm0
  773. unpckhps %xmm1, %xmm10
  774. addps %xmm9, %xmm8
  775. addps %xmm0, %xmm10
  776. addps %xmm10, %xmm8
  777. pshufd $0x2, %xmm8, %xmm9
  778. pshufd $0x1, %xmm8, %xmm10
  779. pshufd $0x3, %xmm8, %xmm11
  780. movaps %xmm12, %xmm0
  781. unpcklps %xmm13, %xmm12
  782. unpckhps %xmm13, %xmm0
  783. movaps %xmm14, %xmm1
  784. unpcklps %xmm15, %xmm14
  785. unpckhps %xmm15, %xmm1
  786. movaps %xmm12, %xmm13
  787. unpcklps %xmm14, %xmm12
  788. unpckhps %xmm14, %xmm13
  789. movaps %xmm0, %xmm14
  790. unpcklps %xmm1, %xmm0
  791. unpckhps %xmm1, %xmm14
  792. addps %xmm13, %xmm12
  793. addps %xmm0, %xmm14
  794. addps %xmm14, %xmm12
  795. pshufd $0x2, %xmm12, %xmm13
  796. pshufd $0x1, %xmm12, %xmm14
  797. pshufd $0x3, %xmm12, %xmm15
  798. #endif
  799. mulss ALPHA, %xmm8
  800. mulss ALPHA, %xmm9
  801. mulss ALPHA, %xmm10
  802. mulss ALPHA, %xmm11
  803. mulss ALPHA, %xmm12
  804. mulss ALPHA, %xmm13
  805. mulss ALPHA, %xmm14
  806. mulss ALPHA, %xmm15
  807. addss (Y), %xmm8
  808. addq INCY, Y
  809. addss (Y), %xmm9
  810. addq INCY, Y
  811. addss (Y), %xmm10
  812. addq INCY, Y
  813. addss (Y), %xmm11
  814. addq INCY, Y
  815. addss (Y), %xmm12
  816. addq INCY, Y
  817. addss (Y), %xmm13
  818. addq INCY, Y
  819. addss (Y), %xmm14
  820. addq INCY, Y
  821. addss (Y), %xmm15
  822. addq INCY, Y
  823. movss %xmm8, (Y1)
  824. addq INCY, Y1
  825. movss %xmm9, (Y1)
  826. addq INCY, Y1
  827. movss %xmm10, (Y1)
  828. addq INCY, Y1
  829. movss %xmm11, (Y1)
  830. addq INCY, Y1
  831. movss %xmm12, (Y1)
  832. addq INCY, Y1
  833. movss %xmm13, (Y1)
  834. addq INCY, Y1
  835. movss %xmm14, (Y1)
  836. addq INCY, Y1
  837. movss %xmm15, (Y1)
  838. addq INCY, Y1
  839. cmpq $8, N
  840. jge .L11
  841. ALIGN_4
  842. .L20:
  843. #endif
  844. cmpq $4, N
  845. jl .L30
  846. #if GEMV_UNROLL == 4
  847. ALIGN_3
  848. .L21:
  849. #endif
  850. subq $4, N
  851. leaq 32 * SIZE(BUFFER), X1
  852. movq A, A1
  853. leaq (A1, LDA, 2), A2
  854. leaq (A1, LDA, 4), A
  855. xorps %xmm8, %xmm8
  856. xorps %xmm9, %xmm9
  857. xorps %xmm10, %xmm10
  858. xorps %xmm11, %xmm11
  859. #ifdef ALIGNED_ACCESS
  860. cmpq $3, M
  861. jle .L27
  862. testq $SIZE, A1
  863. je .L2X
  864. movss -32 * SIZE(A1), %xmm0
  865. movss -32 * SIZE(X1), %xmm4
  866. mulss %xmm4, %xmm0
  867. addss %xmm0, %xmm8
  868. movss -32 * SIZE(A1, LDA), %xmm1
  869. mulss %xmm4, %xmm1
  870. addss %xmm1, %xmm9
  871. movss -32 * SIZE(A2), %xmm2
  872. mulss %xmm4, %xmm2
  873. addss %xmm2, %xmm10
  874. movss -32 * SIZE(A2, LDA), %xmm3
  875. mulss %xmm4, %xmm3
  876. addss %xmm3, %xmm11
  877. addq $1 * SIZE, A1
  878. addq $1 * SIZE, A2
  879. addq $1 * SIZE, X1
  880. ALIGN_3
  881. .L2X:
  882. testq $2 * SIZE, A1
  883. je .L2XX
  884. #ifdef movsd
  885. xorps %xmm0, %xmm0
  886. xorps %xmm4, %xmm4
  887. #endif
  888. movsd -32 * SIZE(A1), %xmm0
  889. movsd -32 * SIZE(X1), %xmm4
  890. mulps %xmm4, %xmm0
  891. addps %xmm0, %xmm8
  892. #ifdef movsd
  893. xorps %xmm1, %xmm1
  894. #endif
  895. movsd -32 * SIZE(A1, LDA), %xmm1
  896. mulps %xmm4, %xmm1
  897. addps %xmm1, %xmm9
  898. #ifdef movsd
  899. xorps %xmm2, %xmm2
  900. #endif
  901. movsd -32 * SIZE(A2), %xmm2
  902. mulps %xmm4, %xmm2
  903. addps %xmm2, %xmm10
  904. #ifdef movsd
  905. xorps %xmm3, %xmm3
  906. #endif
  907. movsd -32 * SIZE(A2, LDA), %xmm3
  908. mulps %xmm4, %xmm3
  909. addps %xmm3, %xmm11
  910. addq $2 * SIZE, A1
  911. addq $2 * SIZE, A2
  912. addq $2 * SIZE, X1
  913. ALIGN_3
  914. .L2XX:
  915. #endif
  916. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  917. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  918. #if (GEMV_UNROLL == 4) && defined(PREFETCHW)
  919. PREFETCHW 4 * SIZE(Y1)
  920. #endif
  921. movq MM, I
  922. sarq $4, I
  923. jle .L25
  924. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  925. MOVUPS_A2 (-32 * SIZE, A1, LDA, 1, %xmm1)
  926. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  927. MOVUPS_A2 (-32 * SIZE, A2, LDA, 1, %xmm3)
  928. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  929. MOVUPS_A2 (-28 * SIZE, A1, LDA, 1, %xmm13)
  930. MOVUPS_A1 (-28 * SIZE, A2, %xmm14)
  931. MOVUPS_A2 (-28 * SIZE, A2, LDA, 1, %xmm15)
  932. decq I
  933. jle .L23
  934. ALIGN_4
  935. .L22:
  936. #ifdef PREFETCH
  937. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A1)
  938. #endif
  939. mulps %xmm4, %xmm0
  940. addps %xmm0, %xmm8
  941. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  942. mulps %xmm4, %xmm1
  943. addps %xmm1, %xmm9
  944. MOVUPS_A2 (-24 * SIZE, A1, LDA, 1, %xmm1)
  945. mulps %xmm4, %xmm2
  946. addps %xmm2, %xmm10
  947. MOVUPS_A1 (-24 * SIZE, A2, %xmm2)
  948. mulps %xmm4, %xmm3
  949. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  950. addps %xmm3, %xmm11
  951. MOVUPS_A2 (-24 * SIZE, A2, LDA, 1, %xmm3)
  952. #ifdef PREFETCH
  953. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A1, LDA)
  954. #endif
  955. mulps %xmm5, %xmm12
  956. addps %xmm12, %xmm8
  957. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  958. mulps %xmm5, %xmm13
  959. addps %xmm13, %xmm9
  960. MOVUPS_A2 (-20 * SIZE, A1, LDA, 1, %xmm13)
  961. mulps %xmm5, %xmm14
  962. addps %xmm14, %xmm10
  963. MOVUPS_A1 (-20 * SIZE, A2, %xmm14)
  964. mulps %xmm5, %xmm15
  965. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  966. addps %xmm15, %xmm11
  967. MOVUPS_A2 (-20 * SIZE, A2, LDA, 1, %xmm15)
  968. #ifdef PREFETCH
  969. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A2)
  970. #endif
  971. mulps %xmm4, %xmm0
  972. addps %xmm0, %xmm8
  973. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  974. mulps %xmm4, %xmm1
  975. addps %xmm1, %xmm9
  976. MOVUPS_A2 (-16 * SIZE, A1, LDA, 1, %xmm1)
  977. mulps %xmm4, %xmm2
  978. addps %xmm2, %xmm10
  979. MOVUPS_A1 (-16 * SIZE, A2, %xmm2)
  980. mulps %xmm4, %xmm3
  981. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  982. addps %xmm3, %xmm11
  983. MOVUPS_A2 (-16 * SIZE, A2, LDA, 1, %xmm3)
  984. #ifdef PREFETCH
  985. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A2, LDA)
  986. #endif
  987. mulps %xmm5, %xmm12
  988. addps %xmm12, %xmm8
  989. MOVUPS_A1 (-12 * SIZE, A1, %xmm12)
  990. mulps %xmm5, %xmm13
  991. addps %xmm13, %xmm9
  992. MOVUPS_A2 (-12 * SIZE, A1, LDA, 1, %xmm13)
  993. #ifdef PREFETCHW
  994. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(X1)
  995. #endif
  996. mulps %xmm5, %xmm14
  997. addps %xmm14, %xmm10
  998. MOVUPS_A1 (-12 * SIZE, A2, %xmm14)
  999. mulps %xmm5, %xmm15
  1000. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  1001. addps %xmm15, %xmm11
  1002. MOVUPS_A2 (-12 * SIZE, A2, LDA, 1, %xmm15)
  1003. addq $16 * SIZE, A1
  1004. addq $16 * SIZE, A2
  1005. addq $16 * SIZE, X1
  1006. decq I
  1007. jg .L22
  1008. ALIGN_4
  1009. .L23:
  1010. mulps %xmm4, %xmm0
  1011. addps %xmm0, %xmm8
  1012. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  1013. mulps %xmm4, %xmm1
  1014. addps %xmm1, %xmm9
  1015. MOVUPS_A2 (-24 * SIZE, A1, LDA, 1, %xmm1)
  1016. mulps %xmm4, %xmm2
  1017. addps %xmm2, %xmm10
  1018. MOVUPS_A1 (-24 * SIZE, A2, %xmm2)
  1019. mulps %xmm4, %xmm3
  1020. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1021. addps %xmm3, %xmm11
  1022. MOVUPS_A2 (-24 * SIZE, A2, LDA, 1, %xmm3)
  1023. mulps %xmm5, %xmm12
  1024. addps %xmm12, %xmm8
  1025. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  1026. mulps %xmm5, %xmm13
  1027. addps %xmm13, %xmm9
  1028. MOVUPS_A2 (-20 * SIZE, A1, LDA, 1, %xmm13)
  1029. mulps %xmm5, %xmm14
  1030. addps %xmm14, %xmm10
  1031. MOVUPS_A1 (-20 * SIZE, A2, %xmm14)
  1032. mulps %xmm5, %xmm15
  1033. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  1034. addps %xmm15, %xmm11
  1035. MOVUPS_A2 (-20 * SIZE, A2, LDA, 1, %xmm15)
  1036. mulps %xmm4, %xmm0
  1037. addps %xmm0, %xmm8
  1038. mulps %xmm4, %xmm1
  1039. addps %xmm1, %xmm9
  1040. mulps %xmm4, %xmm2
  1041. addps %xmm2, %xmm10
  1042. mulps %xmm4, %xmm3
  1043. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  1044. addps %xmm3, %xmm11
  1045. mulps %xmm5, %xmm12
  1046. addps %xmm12, %xmm8
  1047. mulps %xmm5, %xmm13
  1048. addps %xmm13, %xmm9
  1049. mulps %xmm5, %xmm14
  1050. addps %xmm14, %xmm10
  1051. mulps %xmm5, %xmm15
  1052. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  1053. addps %xmm15, %xmm11
  1054. addq $16 * SIZE, A1
  1055. addq $16 * SIZE, A2
  1056. addq $16 * SIZE, X1
  1057. ALIGN_4
  1058. .L25:
  1059. testq $8, MM
  1060. jle .L26
  1061. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1062. mulps %xmm4, %xmm0
  1063. addps %xmm0, %xmm8
  1064. MOVUPS_A2 (-32 * SIZE, A1, LDA, 1, %xmm1)
  1065. mulps %xmm4, %xmm1
  1066. addps %xmm1, %xmm9
  1067. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  1068. mulps %xmm4, %xmm2
  1069. addps %xmm2, %xmm10
  1070. MOVUPS_A2 (-32 * SIZE, A2, LDA, 1, %xmm3)
  1071. mulps %xmm4, %xmm3
  1072. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1073. addps %xmm3, %xmm11
  1074. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  1075. mulps %xmm5, %xmm12
  1076. addps %xmm12, %xmm8
  1077. MOVUPS_A2 (-28 * SIZE, A1, LDA, 1, %xmm13)
  1078. mulps %xmm5, %xmm13
  1079. addps %xmm13, %xmm9
  1080. MOVUPS_A1 (-28 * SIZE, A2, %xmm14)
  1081. mulps %xmm5, %xmm14
  1082. addps %xmm14, %xmm10
  1083. MOVUPS_A2 (-28 * SIZE, A2, LDA, 1, %xmm15)
  1084. mulps %xmm5, %xmm15
  1085. addps %xmm15, %xmm11
  1086. addq $8 * SIZE, A1
  1087. addq $8 * SIZE, A2
  1088. addq $8 * SIZE, X1
  1089. ALIGN_4
  1090. .L26:
  1091. testq $4, MM
  1092. jle .L27
  1093. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1094. mulps %xmm4, %xmm0
  1095. addps %xmm0, %xmm8
  1096. MOVUPS_A2 (-32 * SIZE, A1, LDA, 1, %xmm1)
  1097. mulps %xmm4, %xmm1
  1098. addps %xmm1, %xmm9
  1099. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  1100. mulps %xmm4, %xmm2
  1101. addps %xmm2, %xmm10
  1102. MOVUPS_A2 (-32 * SIZE, A2, LDA, 1, %xmm3)
  1103. mulps %xmm4, %xmm3
  1104. addps %xmm3, %xmm11
  1105. addq $4 * SIZE, A1
  1106. addq $4 * SIZE, A2
  1107. addq $4 * SIZE, X1
  1108. ALIGN_4
  1109. .L27:
  1110. testq $2, MM
  1111. jle .L28
  1112. #ifdef movsd
  1113. xorps %xmm0, %xmm0
  1114. #endif
  1115. movsd -32 * SIZE(A1), %xmm0
  1116. #ifdef movsd
  1117. xorps %xmm4, %xmm4
  1118. #endif
  1119. movsd -32 * SIZE(X1), %xmm4
  1120. mulps %xmm4, %xmm0
  1121. addps %xmm0, %xmm8
  1122. #ifdef movsd
  1123. xorps %xmm1, %xmm1
  1124. #endif
  1125. movsd -32 * SIZE(A1, LDA), %xmm1
  1126. mulps %xmm4, %xmm1
  1127. addps %xmm1, %xmm9
  1128. #ifdef movsd
  1129. xorps %xmm2, %xmm2
  1130. #endif
  1131. movsd -32 * SIZE(A2), %xmm2
  1132. mulps %xmm4, %xmm2
  1133. addps %xmm2, %xmm10
  1134. #ifdef movsd
  1135. xorps %xmm3, %xmm3
  1136. #endif
  1137. movsd -32 * SIZE(A2, LDA), %xmm3
  1138. mulps %xmm4, %xmm3
  1139. addps %xmm3, %xmm11
  1140. shufps $0xe, %xmm4, %xmm4
  1141. addq $2 * SIZE, A1
  1142. addq $2 * SIZE, A2
  1143. addq $2 * SIZE, X1
  1144. ALIGN_4
  1145. .L28:
  1146. testq $1, MM
  1147. jle .L29
  1148. movss -32 * SIZE(A1), %xmm0
  1149. movss -32 * SIZE(X1), %xmm4
  1150. mulss %xmm4, %xmm0
  1151. addss %xmm0, %xmm8
  1152. movss -32 * SIZE(A1, LDA), %xmm1
  1153. mulss %xmm4, %xmm1
  1154. addss %xmm1, %xmm9
  1155. movss -32 * SIZE(A2), %xmm2
  1156. mulss %xmm4, %xmm2
  1157. addss %xmm2, %xmm10
  1158. movss -32 * SIZE(A2, LDA), %xmm3
  1159. mulss %xmm4, %xmm3
  1160. addss %xmm3, %xmm11
  1161. ALIGN_4
  1162. .L29:
  1163. #ifdef HAVE_SSE3
  1164. haddps %xmm9, %xmm8
  1165. haddps %xmm11, %xmm10
  1166. haddps %xmm10, %xmm8
  1167. pshufd $0x1, %xmm8, %xmm9
  1168. pshufd $0x2, %xmm8, %xmm10
  1169. pshufd $0x3, %xmm8, %xmm11
  1170. #else
  1171. movaps %xmm8, %xmm0
  1172. unpcklps %xmm9, %xmm8
  1173. unpckhps %xmm9, %xmm0
  1174. movaps %xmm10, %xmm1
  1175. unpcklps %xmm11, %xmm10
  1176. unpckhps %xmm11, %xmm1
  1177. movaps %xmm8, %xmm9
  1178. unpcklps %xmm10, %xmm8
  1179. unpckhps %xmm10, %xmm9
  1180. movaps %xmm0, %xmm10
  1181. unpcklps %xmm1, %xmm0
  1182. unpckhps %xmm1, %xmm10
  1183. addps %xmm9, %xmm8
  1184. addps %xmm0, %xmm10
  1185. addps %xmm10, %xmm8
  1186. pshufd $0x2, %xmm8, %xmm9
  1187. pshufd $0x1, %xmm8, %xmm10
  1188. pshufd $0x3, %xmm8, %xmm11
  1189. #endif
  1190. mulss ALPHA, %xmm8
  1191. mulss ALPHA, %xmm9
  1192. mulss ALPHA, %xmm10
  1193. mulss ALPHA, %xmm11
  1194. addss (Y), %xmm8
  1195. addq INCY, Y
  1196. addss (Y), %xmm9
  1197. addq INCY, Y
  1198. addss (Y), %xmm10
  1199. addq INCY, Y
  1200. addss (Y), %xmm11
  1201. addq INCY, Y
  1202. movss %xmm8, (Y1)
  1203. addq INCY, Y1
  1204. movss %xmm9, (Y1)
  1205. addq INCY, Y1
  1206. movss %xmm10, (Y1)
  1207. addq INCY, Y1
  1208. movss %xmm11, (Y1)
  1209. addq INCY, Y1
  1210. #if GEMV_UNROLL == 4
  1211. cmpq $4, N
  1212. jge .L21
  1213. #endif
  1214. ALIGN_4
  1215. .L30:
  1216. cmpq $3, N
  1217. jne .L40
  1218. leaq 32 * SIZE(BUFFER), X1
  1219. movq A, A1
  1220. leaq (A1, LDA, 2), A2
  1221. leaq (A1, LDA, 4), A
  1222. xorps %xmm8, %xmm8
  1223. xorps %xmm9, %xmm9
  1224. xorps %xmm10, %xmm10
  1225. #ifdef ALIGNED_ACCESS
  1226. cmpq $3, M
  1227. jle .L37
  1228. testq $SIZE, A1
  1229. je .L3X
  1230. movss -32 * SIZE(A1), %xmm0
  1231. movss -32 * SIZE(X1), %xmm4
  1232. mulss %xmm4, %xmm0
  1233. addss %xmm0, %xmm8
  1234. movss -32 * SIZE(A1, LDA), %xmm1
  1235. mulss %xmm4, %xmm1
  1236. addss %xmm1, %xmm9
  1237. movss -32 * SIZE(A2), %xmm2
  1238. mulss %xmm4, %xmm2
  1239. addss %xmm2, %xmm10
  1240. movss -32 * SIZE(A2, LDA), %xmm3
  1241. mulss %xmm4, %xmm3
  1242. addss %xmm3, %xmm11
  1243. addq $1 * SIZE, A1
  1244. addq $1 * SIZE, A2
  1245. addq $1 * SIZE, X1
  1246. ALIGN_3
  1247. .L3X:
  1248. testq $2 * SIZE, A1
  1249. je .L3XX
  1250. #ifdef movsd
  1251. xorps %xmm0, %xmm0
  1252. xorps %xmm4, %xmm4
  1253. #endif
  1254. movsd -32 * SIZE(A1), %xmm0
  1255. movsd -32 * SIZE(X1), %xmm4
  1256. mulps %xmm4, %xmm0
  1257. addps %xmm0, %xmm8
  1258. #ifdef movsd
  1259. xorps %xmm1, %xmm1
  1260. #endif
  1261. movsd -32 * SIZE(A1, LDA), %xmm1
  1262. mulps %xmm4, %xmm1
  1263. addps %xmm1, %xmm9
  1264. #ifdef movsd
  1265. xorps %xmm2, %xmm2
  1266. #endif
  1267. movsd -32 * SIZE(A2), %xmm2
  1268. mulps %xmm4, %xmm2
  1269. addps %xmm2, %xmm10
  1270. #ifdef movsd
  1271. xorps %xmm3, %xmm3
  1272. #endif
  1273. movsd -32 * SIZE(A2, LDA), %xmm3
  1274. mulps %xmm4, %xmm3
  1275. addps %xmm3, %xmm11
  1276. addq $2 * SIZE, A1
  1277. addq $2 * SIZE, A2
  1278. addq $2 * SIZE, X1
  1279. ALIGN_3
  1280. .L3XX:
  1281. #endif
  1282. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  1283. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  1284. #if (GEMV_UNROLL == 4) && defined(PREFETCHW)
  1285. PREFETCHW 4 * SIZE(Y1)
  1286. #endif
  1287. movq MM, I
  1288. sarq $4, I
  1289. jle .L35
  1290. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1291. MOVUPS_A2 (-32 * SIZE, A1, LDA, 1, %xmm1)
  1292. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  1293. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  1294. MOVUPS_A2 (-28 * SIZE, A1, LDA, 1, %xmm13)
  1295. MOVUPS_A1 (-28 * SIZE, A2, %xmm14)
  1296. decq I
  1297. jle .L33
  1298. ALIGN_4
  1299. .L32:
  1300. #ifdef PREFETCH
  1301. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A1)
  1302. #endif
  1303. mulps %xmm4, %xmm0
  1304. addps %xmm0, %xmm8
  1305. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  1306. mulps %xmm4, %xmm1
  1307. addps %xmm1, %xmm9
  1308. MOVUPS_A2 (-24 * SIZE, A1, LDA, 1, %xmm1)
  1309. mulps %xmm4, %xmm2
  1310. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1311. addps %xmm2, %xmm10
  1312. MOVUPS_A1 (-24 * SIZE, A2, %xmm2)
  1313. #ifdef PREFETCH
  1314. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A1, LDA)
  1315. #endif
  1316. mulps %xmm5, %xmm12
  1317. addps %xmm12, %xmm8
  1318. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  1319. mulps %xmm5, %xmm13
  1320. addps %xmm13, %xmm9
  1321. MOVUPS_A2 (-20 * SIZE, A1, LDA, 1, %xmm13)
  1322. mulps %xmm5, %xmm14
  1323. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  1324. addps %xmm14, %xmm10
  1325. MOVUPS_A1 (-20 * SIZE, A2, %xmm14)
  1326. #ifdef PREFETCH
  1327. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A2)
  1328. #endif
  1329. mulps %xmm4, %xmm0
  1330. addps %xmm0, %xmm8
  1331. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  1332. mulps %xmm4, %xmm1
  1333. addps %xmm1, %xmm9
  1334. MOVUPS_A2 (-16 * SIZE, A1, LDA, 1, %xmm1)
  1335. mulps %xmm4, %xmm2
  1336. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  1337. addps %xmm2, %xmm10
  1338. MOVUPS_A1 (-16 * SIZE, A2, %xmm2)
  1339. #ifdef PREFETCHW
  1340. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(X1)
  1341. #endif
  1342. mulps %xmm5, %xmm12
  1343. addps %xmm12, %xmm8
  1344. MOVUPS_A1 (-12 * SIZE, A1, %xmm12)
  1345. mulps %xmm5, %xmm13
  1346. addps %xmm13, %xmm9
  1347. MOVUPS_A2 (-12 * SIZE, A1, LDA, 1, %xmm13)
  1348. mulps %xmm5, %xmm14
  1349. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  1350. addps %xmm14, %xmm10
  1351. MOVUPS_A1 (-12 * SIZE, A2, %xmm14)
  1352. addq $16 * SIZE, A1
  1353. addq $16 * SIZE, A2
  1354. addq $16 * SIZE, X1
  1355. decq I
  1356. jg .L32
  1357. ALIGN_4
  1358. .L33:
  1359. mulps %xmm4, %xmm0
  1360. addps %xmm0, %xmm8
  1361. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  1362. mulps %xmm4, %xmm1
  1363. addps %xmm1, %xmm9
  1364. MOVUPS_A2 (-24 * SIZE, A1, LDA, 1, %xmm1)
  1365. mulps %xmm4, %xmm2
  1366. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1367. addps %xmm2, %xmm10
  1368. MOVUPS_A1 (-24 * SIZE, A2, %xmm2)
  1369. mulps %xmm5, %xmm12
  1370. addps %xmm12, %xmm8
  1371. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  1372. mulps %xmm5, %xmm13
  1373. addps %xmm13, %xmm9
  1374. MOVUPS_A2 (-20 * SIZE, A1, LDA, 1, %xmm13)
  1375. mulps %xmm5, %xmm14
  1376. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  1377. addps %xmm14, %xmm10
  1378. MOVUPS_A1 (-20 * SIZE, A2, %xmm14)
  1379. mulps %xmm4, %xmm0
  1380. addps %xmm0, %xmm8
  1381. mulps %xmm4, %xmm1
  1382. addps %xmm1, %xmm9
  1383. mulps %xmm4, %xmm2
  1384. addps %xmm2, %xmm10
  1385. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  1386. mulps %xmm5, %xmm12
  1387. addps %xmm12, %xmm8
  1388. mulps %xmm5, %xmm13
  1389. addps %xmm13, %xmm9
  1390. mulps %xmm5, %xmm14
  1391. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  1392. addps %xmm14, %xmm10
  1393. addq $16 * SIZE, A1
  1394. addq $16 * SIZE, A2
  1395. addq $16 * SIZE, X1
  1396. ALIGN_4
  1397. .L35:
  1398. testq $8, MM
  1399. jle .L36
  1400. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1401. mulps %xmm4, %xmm0
  1402. addps %xmm0, %xmm8
  1403. MOVUPS_A2 (-32 * SIZE, A1, LDA, 1, %xmm1)
  1404. mulps %xmm4, %xmm1
  1405. addps %xmm1, %xmm9
  1406. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  1407. mulps %xmm4, %xmm2
  1408. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1409. addps %xmm2, %xmm10
  1410. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  1411. mulps %xmm5, %xmm12
  1412. addps %xmm12, %xmm8
  1413. MOVUPS_A2 (-28 * SIZE, A1, LDA, 1, %xmm13)
  1414. mulps %xmm5, %xmm13
  1415. addps %xmm13, %xmm9
  1416. MOVUPS_A1 (-28 * SIZE, A2, %xmm14)
  1417. mulps %xmm5, %xmm14
  1418. addps %xmm14, %xmm10
  1419. addq $8 * SIZE, A1
  1420. addq $8 * SIZE, A2
  1421. addq $8 * SIZE, X1
  1422. ALIGN_4
  1423. .L36:
  1424. testq $4, MM
  1425. jle .L37
  1426. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1427. mulps %xmm4, %xmm0
  1428. addps %xmm0, %xmm8
  1429. MOVUPS_A2 (-32 * SIZE, A1, LDA, 1, %xmm1)
  1430. mulps %xmm4, %xmm1
  1431. addps %xmm1, %xmm9
  1432. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  1433. mulps %xmm4, %xmm2
  1434. addps %xmm2, %xmm10
  1435. addq $4 * SIZE, A1
  1436. addq $4 * SIZE, A2
  1437. addq $4 * SIZE, X1
  1438. ALIGN_4
  1439. .L37:
  1440. testq $2, MM
  1441. jle .L38
  1442. #ifdef movsd
  1443. xorps %xmm0, %xmm0
  1444. #endif
  1445. movsd -32 * SIZE(A1), %xmm0
  1446. #ifdef movsd
  1447. xorps %xmm4, %xmm4
  1448. #endif
  1449. movsd -32 * SIZE(X1), %xmm4
  1450. mulps %xmm4, %xmm0
  1451. addps %xmm0, %xmm8
  1452. #ifdef movsd
  1453. xorps %xmm1, %xmm1
  1454. #endif
  1455. movsd -32 * SIZE(A1, LDA), %xmm1
  1456. mulps %xmm4, %xmm1
  1457. addps %xmm1, %xmm9
  1458. #ifdef movsd
  1459. xorps %xmm2, %xmm2
  1460. #endif
  1461. movsd -32 * SIZE(A2), %xmm2
  1462. mulps %xmm4, %xmm2
  1463. addps %xmm2, %xmm10
  1464. #ifdef movsd
  1465. xorps %xmm3, %xmm3
  1466. #endif
  1467. addq $2 * SIZE, A1
  1468. addq $2 * SIZE, A2
  1469. addq $2 * SIZE, X1
  1470. ALIGN_4
  1471. .L38:
  1472. testq $1, MM
  1473. jle .L39
  1474. movss -32 * SIZE(A1), %xmm0
  1475. movss -32 * SIZE(X1), %xmm4
  1476. mulss %xmm4, %xmm0
  1477. addss %xmm0, %xmm8
  1478. movss -32 * SIZE(A1, LDA), %xmm1
  1479. mulss %xmm4, %xmm1
  1480. addss %xmm1, %xmm9
  1481. movss -32 * SIZE(A2), %xmm2
  1482. mulss %xmm4, %xmm2
  1483. addss %xmm2, %xmm10
  1484. ALIGN_4
  1485. .L39:
  1486. #ifdef HAVE_SSE3
  1487. haddps %xmm9, %xmm8
  1488. haddps %xmm11, %xmm10
  1489. haddps %xmm10, %xmm8
  1490. pshufd $0x1, %xmm8, %xmm9
  1491. pshufd $0x2, %xmm8, %xmm10
  1492. #else
  1493. movaps %xmm8, %xmm0
  1494. unpcklps %xmm9, %xmm8
  1495. unpckhps %xmm9, %xmm0
  1496. movaps %xmm10, %xmm1
  1497. unpcklps %xmm11, %xmm10
  1498. unpckhps %xmm11, %xmm1
  1499. movaps %xmm8, %xmm9
  1500. unpcklps %xmm10, %xmm8
  1501. unpckhps %xmm10, %xmm9
  1502. movaps %xmm0, %xmm10
  1503. unpcklps %xmm1, %xmm0
  1504. unpckhps %xmm1, %xmm10
  1505. addps %xmm9, %xmm8
  1506. addps %xmm0, %xmm10
  1507. addps %xmm10, %xmm8
  1508. pshufd $0x2, %xmm8, %xmm9
  1509. pshufd $0x1, %xmm8, %xmm10
  1510. #endif
  1511. mulss ALPHA, %xmm8
  1512. mulss ALPHA, %xmm9
  1513. mulss ALPHA, %xmm10
  1514. addss (Y), %xmm8
  1515. addq INCY, Y
  1516. addss (Y), %xmm9
  1517. addq INCY, Y
  1518. addss (Y), %xmm10
  1519. addq INCY, Y
  1520. movss %xmm8, (Y1)
  1521. addq INCY, Y1
  1522. movss %xmm9, (Y1)
  1523. addq INCY, Y1
  1524. movss %xmm10, (Y1)
  1525. addq INCY, Y1
  1526. jmp .L999
  1527. ALIGN_4
  1528. .L40:
  1529. cmpq $2, N
  1530. jne .L50
  1531. leaq 32 * SIZE(BUFFER), X1
  1532. movq A, A1
  1533. leaq (A1, LDA), A2
  1534. leaq (A1, LDA, 2), A
  1535. xorps %xmm8, %xmm8
  1536. xorps %xmm9, %xmm9
  1537. #ifdef ALIGNED_ACCESS
  1538. cmpq $3, M
  1539. jle .L47
  1540. testq $SIZE, A1
  1541. je .L4X
  1542. movss -32 * SIZE(A1), %xmm0
  1543. movss -32 * SIZE(X1), %xmm4
  1544. mulss %xmm4, %xmm0
  1545. addss %xmm0, %xmm8
  1546. movss -32 * SIZE(A2), %xmm1
  1547. mulss %xmm4, %xmm1
  1548. addss %xmm1, %xmm9
  1549. addq $1 * SIZE, A1
  1550. addq $1 * SIZE, A2
  1551. addq $1 * SIZE, X1
  1552. ALIGN_3
  1553. .L4X:
  1554. testq $2 * SIZE, A1
  1555. je .L4XX
  1556. #ifdef movsd
  1557. xorps %xmm0, %xmm0
  1558. xorps %xmm4, %xmm4
  1559. #endif
  1560. movsd -32 * SIZE(A1), %xmm0
  1561. movsd -32 * SIZE(X1), %xmm4
  1562. mulps %xmm4, %xmm0
  1563. addps %xmm0, %xmm8
  1564. #ifdef movsd
  1565. xorps %xmm1, %xmm1
  1566. #endif
  1567. movsd -32 * SIZE(A2), %xmm1
  1568. mulps %xmm4, %xmm1
  1569. addps %xmm1, %xmm9
  1570. addq $2 * SIZE, A1
  1571. addq $2 * SIZE, A2
  1572. addq $2 * SIZE, X1
  1573. ALIGN_3
  1574. .L4XX:
  1575. #endif
  1576. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  1577. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  1578. movq MM, I
  1579. sarq $4, I
  1580. jle .L45
  1581. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1582. MOVUPS_A1 (-32 * SIZE, A2, %xmm1)
  1583. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  1584. MOVUPS_A1 (-28 * SIZE, A2, %xmm13)
  1585. decq I
  1586. jle .L43
  1587. ALIGN_4
  1588. .L42:
  1589. #ifdef PREFETCH
  1590. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(A1)
  1591. #endif
  1592. mulps %xmm4, %xmm0
  1593. addps %xmm0, %xmm8
  1594. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  1595. mulps %xmm4, %xmm1
  1596. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1597. addps %xmm1, %xmm9
  1598. MOVUPS_A1 (-24 * SIZE, A2, %xmm1)
  1599. mulps %xmm5, %xmm12
  1600. addps %xmm12, %xmm8
  1601. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  1602. mulps %xmm5, %xmm13
  1603. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  1604. addps %xmm13, %xmm9
  1605. MOVUPS_A1 (-20 * SIZE, A2, %xmm13)
  1606. #ifdef PREFETCH
  1607. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(A2)
  1608. #endif
  1609. mulps %xmm4, %xmm0
  1610. addps %xmm0, %xmm8
  1611. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  1612. mulps %xmm4, %xmm1
  1613. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  1614. addps %xmm1, %xmm9
  1615. MOVUPS_A1 (-16 * SIZE, A2, %xmm1)
  1616. #ifdef PREFETCHW
  1617. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(X1)
  1618. #endif
  1619. mulps %xmm5, %xmm12
  1620. addps %xmm12, %xmm8
  1621. MOVUPS_A1 (-12 * SIZE, A1, %xmm12)
  1622. mulps %xmm5, %xmm13
  1623. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  1624. addps %xmm13, %xmm9
  1625. MOVUPS_A1 (-12 * SIZE, A2, %xmm13)
  1626. addq $16 * SIZE, A1
  1627. addq $16 * SIZE, A2
  1628. addq $16 * SIZE, X1
  1629. decq I
  1630. jg .L42
  1631. ALIGN_4
  1632. .L43:
  1633. mulps %xmm4, %xmm0
  1634. addps %xmm0, %xmm8
  1635. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  1636. mulps %xmm4, %xmm1
  1637. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1638. addps %xmm1, %xmm9
  1639. MOVUPS_A1 (-24 * SIZE, A2, %xmm1)
  1640. mulps %xmm5, %xmm12
  1641. addps %xmm12, %xmm8
  1642. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  1643. mulps %xmm5, %xmm13
  1644. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  1645. addps %xmm13, %xmm9
  1646. MOVUPS_A1 (-20 * SIZE, A2, %xmm13)
  1647. mulps %xmm4, %xmm0
  1648. addps %xmm0, %xmm8
  1649. mulps %xmm4, %xmm1
  1650. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  1651. addps %xmm1, %xmm9
  1652. mulps %xmm5, %xmm12
  1653. addps %xmm12, %xmm8
  1654. mulps %xmm5, %xmm13
  1655. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  1656. addps %xmm13, %xmm9
  1657. addq $16 * SIZE, A1
  1658. addq $16 * SIZE, A2
  1659. addq $16 * SIZE, X1
  1660. ALIGN_4
  1661. .L45:
  1662. testq $8, MM
  1663. jle .L46
  1664. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1665. mulps %xmm4, %xmm0
  1666. addps %xmm0, %xmm8
  1667. MOVUPS_A1 (-32 * SIZE, A2, %xmm1)
  1668. mulps %xmm4, %xmm1
  1669. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1670. addps %xmm1, %xmm9
  1671. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  1672. mulps %xmm5, %xmm12
  1673. addps %xmm12, %xmm8
  1674. MOVUPS_A1 (-28 * SIZE, A2, %xmm13)
  1675. mulps %xmm5, %xmm13
  1676. addps %xmm13, %xmm9
  1677. addq $8 * SIZE, A1
  1678. addq $8 * SIZE, A2
  1679. addq $8 * SIZE, X1
  1680. ALIGN_4
  1681. .L46:
  1682. testq $4, MM
  1683. jle .L47
  1684. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1685. mulps %xmm4, %xmm0
  1686. addps %xmm0, %xmm8
  1687. MOVUPS_A1 (-32 * SIZE, A2, %xmm1)
  1688. mulps %xmm4, %xmm1
  1689. addps %xmm1, %xmm9
  1690. addq $4 * SIZE, A1
  1691. addq $4 * SIZE, A2
  1692. addq $4 * SIZE, X1
  1693. ALIGN_4
  1694. .L47:
  1695. testq $2, MM
  1696. jle .L48
  1697. #ifdef movsd
  1698. xorps %xmm0, %xmm0
  1699. #endif
  1700. movsd -32 * SIZE(A1), %xmm0
  1701. #ifdef movsd
  1702. xorps %xmm4, %xmm4
  1703. #endif
  1704. movsd -32 * SIZE(X1), %xmm4
  1705. mulps %xmm4, %xmm0
  1706. addps %xmm0, %xmm8
  1707. #ifdef movsd
  1708. xorps %xmm1, %xmm1
  1709. #endif
  1710. movsd -32 * SIZE(A2), %xmm1
  1711. mulps %xmm4, %xmm1
  1712. addps %xmm1, %xmm9
  1713. shufps $0xe, %xmm4, %xmm4
  1714. addq $2 * SIZE, A1
  1715. addq $2 * SIZE, A2
  1716. addq $2 * SIZE, X1
  1717. ALIGN_4
  1718. .L48:
  1719. testq $1, MM
  1720. jle .L49
  1721. movss -32 * SIZE(A1), %xmm0
  1722. movss -32 * SIZE(X1), %xmm4
  1723. mulss %xmm4, %xmm0
  1724. addss %xmm0, %xmm8
  1725. movss -32 * SIZE(A2), %xmm1
  1726. mulss %xmm4, %xmm1
  1727. addss %xmm1, %xmm9
  1728. ALIGN_4
  1729. .L49:
  1730. #ifdef HAVE_SSE3
  1731. haddps %xmm9, %xmm8
  1732. haddps %xmm8, %xmm8
  1733. #else
  1734. movaps %xmm8, %xmm10
  1735. unpcklps %xmm9, %xmm8
  1736. unpckhps %xmm9, %xmm10
  1737. addps %xmm10, %xmm8
  1738. movhlps %xmm8, %xmm9
  1739. addps %xmm9, %xmm8
  1740. #endif
  1741. pshufd $0x1, %xmm8, %xmm9
  1742. mulss ALPHA, %xmm8
  1743. mulss ALPHA, %xmm9
  1744. addss (Y), %xmm8
  1745. addq INCY, Y
  1746. addss (Y), %xmm9
  1747. addq INCY, Y
  1748. movss %xmm8, (Y1)
  1749. addq INCY, Y1
  1750. movss %xmm9, (Y1)
  1751. addq INCY, Y1
  1752. jmp .L999
  1753. ALIGN_4
  1754. .L50:
  1755. cmpq $1, N
  1756. jne .L999
  1757. leaq 32 * SIZE(BUFFER), X1
  1758. movq A, A1
  1759. xorps %xmm8, %xmm8
  1760. xorps %xmm9, %xmm9
  1761. #ifdef ALIGNED_ACCESS
  1762. cmpq $3, M
  1763. jle .L57
  1764. testq $SIZE, A1
  1765. je .L5X
  1766. movss -32 * SIZE(A1), %xmm0
  1767. movss -32 * SIZE(X1), %xmm4
  1768. mulss %xmm4, %xmm0
  1769. addss %xmm0, %xmm8
  1770. addq $1 * SIZE, A1
  1771. addq $1 * SIZE, X1
  1772. ALIGN_3
  1773. .L5X:
  1774. testq $2 * SIZE, A1
  1775. je .L5XX
  1776. #ifdef movsd
  1777. xorps %xmm0, %xmm0
  1778. xorps %xmm4, %xmm4
  1779. #endif
  1780. movsd -32 * SIZE(A1), %xmm0
  1781. movsd -32 * SIZE(X1), %xmm4
  1782. mulps %xmm4, %xmm0
  1783. addps %xmm0, %xmm8
  1784. shufps $0xe, %xmm4, %xmm4
  1785. addq $2 * SIZE, A1
  1786. addq $2 * SIZE, X1
  1787. ALIGN_3
  1788. .L5XX:
  1789. #endif
  1790. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  1791. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  1792. movq MM, I
  1793. sarq $4, I
  1794. jle .L55
  1795. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1796. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  1797. decq I
  1798. jle .L53
  1799. ALIGN_4
  1800. .L52:
  1801. #ifdef PREFETCH
  1802. PREFETCH (PREFETCHSIZE) * 8 - 128 + PREOFFSET(A1)
  1803. #endif
  1804. mulps %xmm4, %xmm0
  1805. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1806. addps %xmm0, %xmm8
  1807. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  1808. mulps %xmm5, %xmm12
  1809. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  1810. addps %xmm12, %xmm9
  1811. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  1812. #ifdef PREFETCHW
  1813. PREFETCH (PREFETCHSIZE) * 8 - 128 + PREOFFSET(X1)
  1814. #endif
  1815. mulps %xmm4, %xmm0
  1816. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  1817. addps %xmm0, %xmm8
  1818. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  1819. mulps %xmm5, %xmm12
  1820. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  1821. addps %xmm12, %xmm9
  1822. MOVUPS_A1 (-12 * SIZE, A1, %xmm12)
  1823. addq $16 * SIZE, A1
  1824. addq $16 * SIZE, X1
  1825. decq I
  1826. jg .L52
  1827. ALIGN_4
  1828. .L53:
  1829. mulps %xmm4, %xmm0
  1830. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1831. addps %xmm0, %xmm8
  1832. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  1833. mulps %xmm5, %xmm12
  1834. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  1835. addps %xmm12, %xmm9
  1836. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  1837. mulps %xmm4, %xmm0
  1838. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  1839. addps %xmm0, %xmm8
  1840. mulps %xmm5, %xmm12
  1841. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  1842. addps %xmm12, %xmm9
  1843. addq $16 * SIZE, A1
  1844. addq $16 * SIZE, X1
  1845. ALIGN_4
  1846. .L55:
  1847. testq $8, MM
  1848. jle .L56
  1849. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1850. mulps %xmm4, %xmm0
  1851. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  1852. addps %xmm0, %xmm8
  1853. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  1854. mulps %xmm5, %xmm12
  1855. addps %xmm12, %xmm9
  1856. addq $8 * SIZE, A1
  1857. addq $8 * SIZE, X1
  1858. ALIGN_4
  1859. .L56:
  1860. testq $4, MM
  1861. jle .L57
  1862. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1863. mulps %xmm4, %xmm0
  1864. addps %xmm0, %xmm8
  1865. addq $4 * SIZE, A1
  1866. addq $4 * SIZE, X1
  1867. ALIGN_4
  1868. .L57:
  1869. testq $2, MM
  1870. jle .L58
  1871. #ifdef movsd
  1872. xorps %xmm0, %xmm0
  1873. #endif
  1874. movsd -32 * SIZE(A1), %xmm0
  1875. #ifdef movsd
  1876. xorps %xmm4, %xmm4
  1877. #endif
  1878. movsd -32 * SIZE(X1), %xmm4
  1879. mulps %xmm4, %xmm0
  1880. addps %xmm0, %xmm8
  1881. shufps $0xe, %xmm4, %xmm4
  1882. addq $2 * SIZE, A1
  1883. addq $2 * SIZE, X1
  1884. ALIGN_4
  1885. .L58:
  1886. testq $1, MM
  1887. jle .L59
  1888. movss -32 * SIZE(A1), %xmm0
  1889. movss -32 * SIZE(X1), %xmm4
  1890. mulss %xmm4, %xmm0
  1891. addss %xmm0, %xmm8
  1892. ALIGN_4
  1893. .L59:
  1894. addps %xmm9, %xmm8
  1895. #ifdef HAVE_SSE3
  1896. haddps %xmm8, %xmm8
  1897. haddps %xmm8, %xmm8
  1898. #else
  1899. pshufd $1, %xmm8, %xmm9
  1900. pshufd $2, %xmm8, %xmm10
  1901. pshufd $3, %xmm8, %xmm11
  1902. addss %xmm9, %xmm8
  1903. addss %xmm11, %xmm10
  1904. addss %xmm10, %xmm8
  1905. #endif
  1906. mulss ALPHA, %xmm8
  1907. addss (Y), %xmm8
  1908. movss %xmm8, (Y1)
  1909. #ifdef ALIGNED_ACCESS
  1910. jmp .L999
  1911. ALIGN_4
  1912. .L100:
  1913. testq $2 * SIZE - 1, LDA
  1914. jne .L200
  1915. cmpq $4, N
  1916. jl .L110
  1917. ALIGN_3
  1918. .L101:
  1919. subq $4, N
  1920. leaq 32 * SIZE(BUFFER), X1
  1921. movq A, A1
  1922. leaq (A1, LDA, 2), A2
  1923. leaq (A1, LDA, 4), A
  1924. xorps %xmm8, %xmm8
  1925. xorps %xmm9, %xmm9
  1926. xorps %xmm10, %xmm10
  1927. xorps %xmm11, %xmm11
  1928. cmpq $3, M
  1929. jle .L107
  1930. testq $SIZE, A1
  1931. je .L10X
  1932. movss -32 * SIZE(A1), %xmm0
  1933. movss -32 * SIZE(X1), %xmm4
  1934. mulss %xmm4, %xmm0
  1935. addss %xmm0, %xmm8
  1936. movss -32 * SIZE(A1, LDA), %xmm1
  1937. mulss %xmm4, %xmm1
  1938. addss %xmm1, %xmm9
  1939. movss -32 * SIZE(A2), %xmm2
  1940. mulss %xmm4, %xmm2
  1941. addss %xmm2, %xmm10
  1942. movss -32 * SIZE(A2, LDA), %xmm3
  1943. mulss %xmm4, %xmm3
  1944. addss %xmm3, %xmm11
  1945. addq $1 * SIZE, A1
  1946. addq $1 * SIZE, A2
  1947. addq $1 * SIZE, X1
  1948. ALIGN_3
  1949. .L10X:
  1950. testq $2 * SIZE, A1
  1951. je .L10XX
  1952. #ifdef movsd
  1953. xorps %xmm0, %xmm0
  1954. xorps %xmm4, %xmm4
  1955. #endif
  1956. movsd -32 * SIZE(A1), %xmm0
  1957. movsd -32 * SIZE(X1), %xmm4
  1958. mulps %xmm4, %xmm0
  1959. addps %xmm0, %xmm8
  1960. #ifdef movsd
  1961. xorps %xmm1, %xmm1
  1962. #endif
  1963. movsd -32 * SIZE(A1, LDA), %xmm1
  1964. mulps %xmm4, %xmm1
  1965. addps %xmm1, %xmm9
  1966. #ifdef movsd
  1967. xorps %xmm2, %xmm2
  1968. #endif
  1969. movsd -32 * SIZE(A2), %xmm2
  1970. mulps %xmm4, %xmm2
  1971. addps %xmm2, %xmm10
  1972. #ifdef movsd
  1973. xorps %xmm3, %xmm3
  1974. #endif
  1975. movsd -32 * SIZE(A2, LDA), %xmm3
  1976. mulps %xmm4, %xmm3
  1977. addps %xmm3, %xmm11
  1978. addq $2 * SIZE, A1
  1979. addq $2 * SIZE, A2
  1980. addq $2 * SIZE, X1
  1981. ALIGN_3
  1982. .L10XX:
  1983. MOVUPS_A2 (-34 * SIZE, A1, LDA, 1, %xmm12)
  1984. MOVUPS_A2 (-34 * SIZE, A2, LDA, 1, %xmm13)
  1985. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  1986. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  1987. #ifdef PREFETCHW
  1988. PREFETCHW 4 * SIZE(Y1)
  1989. #endif
  1990. movq MM, I
  1991. sarq $4, I
  1992. jle .L105
  1993. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  1994. MOVUPS_A2 (-30 * SIZE, A1, LDA, 1, %xmm1)
  1995. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  1996. MOVUPS_A2 (-30 * SIZE, A2, LDA, 1, %xmm3)
  1997. decq I
  1998. jle .L103
  1999. ALIGN_4
  2000. .L102:
  2001. #ifdef PREFETCH
  2002. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A1)
  2003. #endif
  2004. mulps %xmm4, %xmm0
  2005. addps %xmm0, %xmm8
  2006. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  2007. shufps $0x4e, %xmm1, %xmm12
  2008. mulps %xmm4, %xmm12
  2009. addps %xmm12, %xmm9
  2010. MOVUPS_A2 (-26 * SIZE, A1, LDA, 1, %xmm12)
  2011. mulps %xmm4, %xmm2
  2012. addps %xmm2, %xmm10
  2013. MOVUPS_A1 (-28 * SIZE, A2, %xmm2)
  2014. shufps $0x4e, %xmm3, %xmm13
  2015. mulps %xmm4, %xmm13
  2016. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2017. addps %xmm13, %xmm11
  2018. MOVUPS_A2 (-26 * SIZE, A2, LDA, 1, %xmm13)
  2019. #ifdef PREFETCH
  2020. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A1, LDA)
  2021. #endif
  2022. mulps %xmm5, %xmm0
  2023. addps %xmm0, %xmm8
  2024. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  2025. shufps $0x4e, %xmm12, %xmm1
  2026. mulps %xmm5, %xmm1
  2027. addps %xmm1, %xmm9
  2028. MOVUPS_A2 (-22 * SIZE, A1, LDA, 1, %xmm1)
  2029. mulps %xmm5, %xmm2
  2030. addps %xmm2, %xmm10
  2031. MOVUPS_A1 (-24 * SIZE, A2, %xmm2)
  2032. shufps $0x4e, %xmm13, %xmm3
  2033. mulps %xmm5, %xmm3
  2034. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  2035. addps %xmm3, %xmm11
  2036. MOVUPS_A2 (-22 * SIZE, A2, LDA, 1, %xmm3)
  2037. #ifdef PREFETCH
  2038. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A2)
  2039. #endif
  2040. mulps %xmm4, %xmm0
  2041. addps %xmm0, %xmm8
  2042. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  2043. shufps $0x4e, %xmm1, %xmm12
  2044. mulps %xmm4, %xmm12
  2045. addps %xmm12, %xmm9
  2046. MOVUPS_A2 (-18 * SIZE, A1, LDA, 1, %xmm12)
  2047. mulps %xmm4, %xmm2
  2048. addps %xmm2, %xmm10
  2049. MOVUPS_A1 (-20 * SIZE, A2, %xmm2)
  2050. shufps $0x4e, %xmm3, %xmm13
  2051. mulps %xmm4, %xmm13
  2052. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  2053. addps %xmm13, %xmm11
  2054. MOVUPS_A2 (-18 * SIZE, A2, LDA, 1, %xmm13)
  2055. #ifdef PREFETCH
  2056. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A2, LDA)
  2057. #endif
  2058. mulps %xmm5, %xmm0
  2059. addps %xmm0, %xmm8
  2060. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  2061. shufps $0x4e, %xmm12, %xmm1
  2062. mulps %xmm5, %xmm1
  2063. addps %xmm1, %xmm9
  2064. MOVUPS_A2 (-14 * SIZE, A1, LDA, 1, %xmm1)
  2065. #ifdef PREFETCHW
  2066. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(X1)
  2067. #endif
  2068. mulps %xmm5, %xmm2
  2069. addps %xmm2, %xmm10
  2070. MOVUPS_A1 (-16 * SIZE, A2, %xmm2)
  2071. shufps $0x4e, %xmm13, %xmm3
  2072. mulps %xmm5, %xmm3
  2073. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  2074. addps %xmm3, %xmm11
  2075. MOVUPS_A2 (-14 * SIZE, A2, LDA, 1, %xmm3)
  2076. addq $16 * SIZE, A1
  2077. addq $16 * SIZE, A2
  2078. addq $16 * SIZE, X1
  2079. decq I
  2080. jg .L102
  2081. ALIGN_4
  2082. .L103:
  2083. mulps %xmm4, %xmm0
  2084. addps %xmm0, %xmm8
  2085. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  2086. shufps $0x4e, %xmm1, %xmm12
  2087. mulps %xmm4, %xmm12
  2088. addps %xmm12, %xmm9
  2089. MOVUPS_A2 (-26 * SIZE, A1, LDA, 1, %xmm12)
  2090. mulps %xmm4, %xmm2
  2091. addps %xmm2, %xmm10
  2092. MOVUPS_A1 (-28 * SIZE, A2, %xmm2)
  2093. shufps $0x4e, %xmm3, %xmm13
  2094. mulps %xmm4, %xmm13
  2095. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2096. addps %xmm13, %xmm11
  2097. MOVUPS_A2 (-26 * SIZE, A2, LDA, 1, %xmm13)
  2098. mulps %xmm5, %xmm0
  2099. addps %xmm0, %xmm8
  2100. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  2101. shufps $0x4e, %xmm12, %xmm1
  2102. mulps %xmm5, %xmm1
  2103. addps %xmm1, %xmm9
  2104. MOVUPS_A2 (-22 * SIZE, A1, LDA, 1, %xmm1)
  2105. mulps %xmm5, %xmm2
  2106. addps %xmm2, %xmm10
  2107. MOVUPS_A1 (-24 * SIZE, A2, %xmm2)
  2108. shufps $0x4e, %xmm13, %xmm3
  2109. mulps %xmm5, %xmm3
  2110. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  2111. addps %xmm3, %xmm11
  2112. MOVUPS_A2 (-22 * SIZE, A2, LDA, 1, %xmm3)
  2113. mulps %xmm4, %xmm0
  2114. addps %xmm0, %xmm8
  2115. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  2116. shufps $0x4e, %xmm1, %xmm12
  2117. mulps %xmm4, %xmm12
  2118. addps %xmm12, %xmm9
  2119. MOVUPS_A2 (-18 * SIZE, A1, LDA, 1, %xmm12)
  2120. mulps %xmm4, %xmm2
  2121. addps %xmm2, %xmm10
  2122. MOVUPS_A1 (-20 * SIZE, A2, %xmm2)
  2123. shufps $0x4e, %xmm3, %xmm13
  2124. mulps %xmm4, %xmm13
  2125. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  2126. addps %xmm13, %xmm11
  2127. MOVUPS_A2 (-18 * SIZE, A2, LDA, 1, %xmm13)
  2128. mulps %xmm5, %xmm0
  2129. addps %xmm0, %xmm8
  2130. shufps $0x4e, %xmm12, %xmm1
  2131. mulps %xmm5, %xmm1
  2132. addps %xmm1, %xmm9
  2133. mulps %xmm5, %xmm2
  2134. addps %xmm2, %xmm10
  2135. shufps $0x4e, %xmm13, %xmm3
  2136. mulps %xmm5, %xmm3
  2137. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  2138. addps %xmm3, %xmm11
  2139. addq $16 * SIZE, A1
  2140. addq $16 * SIZE, A2
  2141. addq $16 * SIZE, X1
  2142. ALIGN_4
  2143. .L105:
  2144. testq $8, MM
  2145. jle .L106
  2146. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2147. MOVUPS_A2 (-30 * SIZE, A1, LDA, 1, %xmm1)
  2148. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  2149. MOVUPS_A2 (-30 * SIZE, A2, LDA, 1, %xmm3)
  2150. mulps %xmm4, %xmm0
  2151. addps %xmm0, %xmm8
  2152. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  2153. shufps $0x4e, %xmm1, %xmm12
  2154. mulps %xmm4, %xmm12
  2155. addps %xmm12, %xmm9
  2156. MOVUPS_A2 (-26 * SIZE, A1, LDA, 1, %xmm12)
  2157. mulps %xmm4, %xmm2
  2158. addps %xmm2, %xmm10
  2159. MOVUPS_A1 (-28 * SIZE, A2, %xmm2)
  2160. shufps $0x4e, %xmm3, %xmm13
  2161. mulps %xmm4, %xmm13
  2162. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2163. addps %xmm13, %xmm11
  2164. MOVUPS_A2 (-26 * SIZE, A2, LDA, 1, %xmm13)
  2165. mulps %xmm5, %xmm0
  2166. addps %xmm0, %xmm8
  2167. shufps $0x4e, %xmm12, %xmm1
  2168. mulps %xmm5, %xmm1
  2169. addps %xmm1, %xmm9
  2170. mulps %xmm5, %xmm2
  2171. addps %xmm2, %xmm10
  2172. shufps $0x4e, %xmm13, %xmm3
  2173. mulps %xmm5, %xmm3
  2174. addps %xmm3, %xmm11
  2175. addq $8 * SIZE, A1
  2176. addq $8 * SIZE, A2
  2177. addq $8 * SIZE, X1
  2178. ALIGN_4
  2179. .L106:
  2180. testq $4, MM
  2181. jle .L107
  2182. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2183. mulps %xmm4, %xmm0
  2184. addps %xmm0, %xmm8
  2185. MOVUPS_A2 (-30 * SIZE, A1, LDA, 1, %xmm1)
  2186. shufps $0x4e, %xmm1, %xmm12
  2187. mulps %xmm4, %xmm12
  2188. addps %xmm12, %xmm9
  2189. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  2190. mulps %xmm4, %xmm2
  2191. addps %xmm2, %xmm10
  2192. MOVUPS_A2 (-30 * SIZE, A2, LDA, 1, %xmm3)
  2193. shufps $0x4e, %xmm3, %xmm13
  2194. mulps %xmm4, %xmm13
  2195. addps %xmm13, %xmm11
  2196. addq $4 * SIZE, A1
  2197. addq $4 * SIZE, A2
  2198. addq $4 * SIZE, X1
  2199. ALIGN_4
  2200. .L107:
  2201. testq $2, MM
  2202. jle .L108
  2203. #ifdef movsd
  2204. xorps %xmm0, %xmm0
  2205. #endif
  2206. movsd -32 * SIZE(A1), %xmm0
  2207. #ifdef movsd
  2208. xorps %xmm4, %xmm4
  2209. #endif
  2210. movsd -32 * SIZE(X1), %xmm4
  2211. mulps %xmm4, %xmm0
  2212. addps %xmm0, %xmm8
  2213. #ifdef movsd
  2214. xorps %xmm1, %xmm1
  2215. #endif
  2216. movsd -32 * SIZE(A1, LDA), %xmm1
  2217. mulps %xmm4, %xmm1
  2218. addps %xmm1, %xmm9
  2219. #ifdef movsd
  2220. xorps %xmm2, %xmm2
  2221. #endif
  2222. movsd -32 * SIZE(A2), %xmm2
  2223. mulps %xmm4, %xmm2
  2224. addps %xmm2, %xmm10
  2225. #ifdef movsd
  2226. xorps %xmm3, %xmm3
  2227. #endif
  2228. movsd -32 * SIZE(A2, LDA), %xmm3
  2229. mulps %xmm4, %xmm3
  2230. addps %xmm3, %xmm11
  2231. shufps $0xe, %xmm4, %xmm4
  2232. addq $2 * SIZE, A1
  2233. addq $2 * SIZE, A2
  2234. addq $2 * SIZE, X1
  2235. ALIGN_4
  2236. .L108:
  2237. testq $1, MM
  2238. jle .L109
  2239. movss -32 * SIZE(A1), %xmm0
  2240. movss -32 * SIZE(X1), %xmm4
  2241. mulss %xmm4, %xmm0
  2242. addss %xmm0, %xmm8
  2243. movss -32 * SIZE(A1, LDA), %xmm1
  2244. mulss %xmm4, %xmm1
  2245. addss %xmm1, %xmm9
  2246. movss -32 * SIZE(A2), %xmm2
  2247. mulss %xmm4, %xmm2
  2248. addss %xmm2, %xmm10
  2249. movss -32 * SIZE(A2, LDA), %xmm3
  2250. mulss %xmm4, %xmm3
  2251. addss %xmm3, %xmm11
  2252. ALIGN_4
  2253. .L109:
  2254. #ifdef HAVE_SSE3
  2255. haddps %xmm9, %xmm8
  2256. haddps %xmm11, %xmm10
  2257. haddps %xmm10, %xmm8
  2258. pshufd $0x1, %xmm8, %xmm9
  2259. pshufd $0x2, %xmm8, %xmm10
  2260. pshufd $0x3, %xmm8, %xmm11
  2261. #else
  2262. movaps %xmm8, %xmm0
  2263. unpcklps %xmm9, %xmm8
  2264. unpckhps %xmm9, %xmm0
  2265. movaps %xmm10, %xmm1
  2266. unpcklps %xmm11, %xmm10
  2267. unpckhps %xmm11, %xmm1
  2268. movaps %xmm8, %xmm9
  2269. unpcklps %xmm10, %xmm8
  2270. unpckhps %xmm10, %xmm9
  2271. movaps %xmm0, %xmm10
  2272. unpcklps %xmm1, %xmm0
  2273. unpckhps %xmm1, %xmm10
  2274. addps %xmm9, %xmm8
  2275. addps %xmm0, %xmm10
  2276. addps %xmm10, %xmm8
  2277. pshufd $0x2, %xmm8, %xmm9
  2278. pshufd $0x1, %xmm8, %xmm10
  2279. pshufd $0x3, %xmm8, %xmm11
  2280. #endif
  2281. mulss ALPHA, %xmm8
  2282. mulss ALPHA, %xmm9
  2283. mulss ALPHA, %xmm10
  2284. mulss ALPHA, %xmm11
  2285. addss (Y), %xmm8
  2286. addq INCY, Y
  2287. addss (Y), %xmm9
  2288. addq INCY, Y
  2289. addss (Y), %xmm10
  2290. addq INCY, Y
  2291. addss (Y), %xmm11
  2292. addq INCY, Y
  2293. movss %xmm8, (Y1)
  2294. addq INCY, Y1
  2295. movss %xmm9, (Y1)
  2296. addq INCY, Y1
  2297. movss %xmm10, (Y1)
  2298. addq INCY, Y1
  2299. movss %xmm11, (Y1)
  2300. addq INCY, Y1
  2301. cmpq $4, N
  2302. jge .L101
  2303. ALIGN_4
  2304. .L110:
  2305. cmpq $3, N
  2306. jne .L120
  2307. leaq 32 * SIZE(BUFFER), X1
  2308. movq A, A1
  2309. leaq (A1, LDA, 2), A2
  2310. leaq (A1, LDA, 4), A
  2311. xorps %xmm8, %xmm8
  2312. xorps %xmm9, %xmm9
  2313. xorps %xmm10, %xmm10
  2314. cmpq $3, M
  2315. jle .L117
  2316. testq $SIZE, A1
  2317. je .L11X
  2318. movss -32 * SIZE(A1), %xmm0
  2319. movss -32 * SIZE(X1), %xmm4
  2320. mulss %xmm4, %xmm0
  2321. addss %xmm0, %xmm8
  2322. movss -32 * SIZE(A1, LDA), %xmm1
  2323. mulss %xmm4, %xmm1
  2324. addss %xmm1, %xmm9
  2325. movss -32 * SIZE(A2), %xmm2
  2326. mulss %xmm4, %xmm2
  2327. addss %xmm2, %xmm10
  2328. addq $1 * SIZE, A1
  2329. addq $1 * SIZE, A2
  2330. addq $1 * SIZE, X1
  2331. ALIGN_3
  2332. .L11X:
  2333. testq $2 * SIZE, A1
  2334. je .L11XX
  2335. #ifdef movsd
  2336. xorps %xmm0, %xmm0
  2337. xorps %xmm4, %xmm4
  2338. #endif
  2339. movsd -32 * SIZE(A1), %xmm0
  2340. movsd -32 * SIZE(X1), %xmm4
  2341. mulps %xmm4, %xmm0
  2342. addps %xmm0, %xmm8
  2343. #ifdef movsd
  2344. xorps %xmm1, %xmm1
  2345. #endif
  2346. movsd -32 * SIZE(A1, LDA), %xmm1
  2347. mulps %xmm4, %xmm1
  2348. addps %xmm1, %xmm9
  2349. #ifdef movsd
  2350. xorps %xmm2, %xmm2
  2351. #endif
  2352. movsd -32 * SIZE(A2), %xmm2
  2353. mulps %xmm4, %xmm2
  2354. addps %xmm2, %xmm10
  2355. addq $2 * SIZE, A1
  2356. addq $2 * SIZE, A2
  2357. addq $2 * SIZE, X1
  2358. ALIGN_3
  2359. .L11XX:
  2360. MOVUPS_A2 (-34 * SIZE, A1, LDA, 1, %xmm12)
  2361. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  2362. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  2363. movq MM, I
  2364. sarq $4, I
  2365. jle .L115
  2366. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2367. MOVUPS_A2 (-30 * SIZE, A1, LDA, 1, %xmm1)
  2368. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  2369. decq I
  2370. jle .L113
  2371. ALIGN_4
  2372. .L112:
  2373. #ifdef PREFETCH
  2374. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A1)
  2375. #endif
  2376. mulps %xmm4, %xmm0
  2377. addps %xmm0, %xmm8
  2378. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  2379. shufps $0x4e, %xmm1, %xmm12
  2380. mulps %xmm4, %xmm12
  2381. addps %xmm12, %xmm9
  2382. MOVUPS_A2 (-26 * SIZE, A1, LDA, 1, %xmm12)
  2383. mulps %xmm4, %xmm2
  2384. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2385. addps %xmm2, %xmm10
  2386. MOVUPS_A1 (-28 * SIZE, A2, %xmm2)
  2387. #ifdef PREFETCH
  2388. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A1, LDA)
  2389. #endif
  2390. mulps %xmm5, %xmm0
  2391. addps %xmm0, %xmm8
  2392. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  2393. shufps $0x4e, %xmm12, %xmm1
  2394. mulps %xmm5, %xmm1
  2395. addps %xmm1, %xmm9
  2396. MOVUPS_A2 (-22 * SIZE, A1, LDA, 1, %xmm1)
  2397. mulps %xmm5, %xmm2
  2398. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  2399. addps %xmm2, %xmm10
  2400. MOVUPS_A1 (-24 * SIZE, A2, %xmm2)
  2401. #ifdef PREFETCH
  2402. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A2)
  2403. #endif
  2404. mulps %xmm4, %xmm0
  2405. addps %xmm0, %xmm8
  2406. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  2407. shufps $0x4e, %xmm1, %xmm12
  2408. mulps %xmm4, %xmm12
  2409. addps %xmm12, %xmm9
  2410. MOVUPS_A2 (-18 * SIZE, A1, LDA, 1, %xmm12)
  2411. mulps %xmm4, %xmm2
  2412. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  2413. addps %xmm2, %xmm10
  2414. MOVUPS_A1 (-20 * SIZE, A2, %xmm2)
  2415. #ifdef PREFETCHW
  2416. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(X1)
  2417. #endif
  2418. mulps %xmm5, %xmm0
  2419. addps %xmm0, %xmm8
  2420. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  2421. shufps $0x4e, %xmm12, %xmm1
  2422. mulps %xmm5, %xmm1
  2423. addps %xmm1, %xmm9
  2424. MOVUPS_A2 (-14 * SIZE, A1, LDA, 1, %xmm1)
  2425. mulps %xmm5, %xmm2
  2426. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  2427. addps %xmm2, %xmm10
  2428. MOVUPS_A1 (-16 * SIZE, A2, %xmm2)
  2429. addq $16 * SIZE, A1
  2430. addq $16 * SIZE, A2
  2431. addq $16 * SIZE, X1
  2432. decq I
  2433. jg .L112
  2434. ALIGN_4
  2435. .L113:
  2436. mulps %xmm4, %xmm0
  2437. addps %xmm0, %xmm8
  2438. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  2439. shufps $0x4e, %xmm1, %xmm12
  2440. mulps %xmm4, %xmm12
  2441. addps %xmm12, %xmm9
  2442. MOVUPS_A2 (-26 * SIZE, A1, LDA, 1, %xmm12)
  2443. mulps %xmm4, %xmm2
  2444. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2445. addps %xmm2, %xmm10
  2446. MOVUPS_A1 (-28 * SIZE, A2, %xmm2)
  2447. mulps %xmm5, %xmm0
  2448. addps %xmm0, %xmm8
  2449. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  2450. shufps $0x4e, %xmm12, %xmm1
  2451. mulps %xmm5, %xmm1
  2452. addps %xmm1, %xmm9
  2453. MOVUPS_A2 (-22 * SIZE, A1, LDA, 1, %xmm1)
  2454. mulps %xmm5, %xmm2
  2455. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  2456. addps %xmm2, %xmm10
  2457. MOVUPS_A1 (-24 * SIZE, A2, %xmm2)
  2458. mulps %xmm4, %xmm0
  2459. addps %xmm0, %xmm8
  2460. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  2461. shufps $0x4e, %xmm1, %xmm12
  2462. mulps %xmm4, %xmm12
  2463. addps %xmm12, %xmm9
  2464. MOVUPS_A2 (-18 * SIZE, A1, LDA, 1, %xmm12)
  2465. mulps %xmm4, %xmm2
  2466. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  2467. addps %xmm2, %xmm10
  2468. MOVUPS_A1 (-20 * SIZE, A2, %xmm2)
  2469. mulps %xmm5, %xmm0
  2470. addps %xmm0, %xmm8
  2471. shufps $0x4e, %xmm12, %xmm1
  2472. mulps %xmm5, %xmm1
  2473. addps %xmm1, %xmm9
  2474. mulps %xmm5, %xmm2
  2475. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  2476. addps %xmm2, %xmm10
  2477. addq $16 * SIZE, A1
  2478. addq $16 * SIZE, A2
  2479. addq $16 * SIZE, X1
  2480. ALIGN_4
  2481. .L115:
  2482. testq $8, MM
  2483. jle .L116
  2484. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2485. MOVUPS_A2 (-30 * SIZE, A1, LDA, 1, %xmm1)
  2486. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  2487. mulps %xmm4, %xmm0
  2488. addps %xmm0, %xmm8
  2489. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  2490. shufps $0x4e, %xmm1, %xmm12
  2491. mulps %xmm4, %xmm12
  2492. addps %xmm12, %xmm9
  2493. MOVUPS_A2 (-26 * SIZE, A1, LDA, 1, %xmm12)
  2494. mulps %xmm4, %xmm2
  2495. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2496. addps %xmm2, %xmm10
  2497. MOVUPS_A1 (-28 * SIZE, A2, %xmm2)
  2498. mulps %xmm5, %xmm0
  2499. addps %xmm0, %xmm8
  2500. shufps $0x4e, %xmm12, %xmm1
  2501. mulps %xmm5, %xmm1
  2502. addps %xmm1, %xmm9
  2503. mulps %xmm5, %xmm2
  2504. addps %xmm2, %xmm10
  2505. addq $8 * SIZE, A1
  2506. addq $8 * SIZE, A2
  2507. addq $8 * SIZE, X1
  2508. ALIGN_4
  2509. .L116:
  2510. testq $4, MM
  2511. jle .L117
  2512. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2513. mulps %xmm4, %xmm0
  2514. addps %xmm0, %xmm8
  2515. MOVUPS_A2 (-30 * SIZE, A1, LDA, 1, %xmm1)
  2516. shufps $0x4e, %xmm1, %xmm12
  2517. mulps %xmm4, %xmm12
  2518. addps %xmm12, %xmm9
  2519. MOVUPS_A1 (-32 * SIZE, A2, %xmm2)
  2520. mulps %xmm4, %xmm2
  2521. addps %xmm2, %xmm10
  2522. addq $4 * SIZE, A1
  2523. addq $4 * SIZE, A2
  2524. addq $4 * SIZE, X1
  2525. ALIGN_4
  2526. .L117:
  2527. testq $2, MM
  2528. jle .L118
  2529. #ifdef movsd
  2530. xorps %xmm0, %xmm0
  2531. #endif
  2532. movsd -32 * SIZE(A1), %xmm0
  2533. #ifdef movsd
  2534. xorps %xmm4, %xmm4
  2535. #endif
  2536. movsd -32 * SIZE(X1), %xmm4
  2537. mulps %xmm4, %xmm0
  2538. addps %xmm0, %xmm8
  2539. #ifdef movsd
  2540. xorps %xmm1, %xmm1
  2541. #endif
  2542. movsd -32 * SIZE(A1, LDA), %xmm1
  2543. mulps %xmm4, %xmm1
  2544. addps %xmm1, %xmm9
  2545. #ifdef movsd
  2546. xorps %xmm2, %xmm2
  2547. #endif
  2548. movsd -32 * SIZE(A2), %xmm2
  2549. mulps %xmm4, %xmm2
  2550. addps %xmm2, %xmm10
  2551. addq $2 * SIZE, A1
  2552. addq $2 * SIZE, A2
  2553. addq $2 * SIZE, X1
  2554. ALIGN_4
  2555. .L118:
  2556. testq $1, MM
  2557. jle .L119
  2558. movss -32 * SIZE(A1), %xmm0
  2559. movss -32 * SIZE(X1), %xmm4
  2560. mulss %xmm4, %xmm0
  2561. addss %xmm0, %xmm8
  2562. movss -32 * SIZE(A1, LDA), %xmm1
  2563. mulss %xmm4, %xmm1
  2564. addss %xmm1, %xmm9
  2565. movss -32 * SIZE(A2), %xmm2
  2566. mulss %xmm4, %xmm2
  2567. addss %xmm2, %xmm10
  2568. ALIGN_4
  2569. .L119:
  2570. #ifdef HAVE_SSE3
  2571. haddps %xmm9, %xmm8
  2572. haddps %xmm11, %xmm10
  2573. haddps %xmm10, %xmm8
  2574. pshufd $0x1, %xmm8, %xmm9
  2575. pshufd $0x2, %xmm8, %xmm10
  2576. #else
  2577. movaps %xmm8, %xmm0
  2578. unpcklps %xmm9, %xmm8
  2579. unpckhps %xmm9, %xmm0
  2580. movaps %xmm10, %xmm1
  2581. unpcklps %xmm11, %xmm10
  2582. unpckhps %xmm11, %xmm1
  2583. movaps %xmm8, %xmm9
  2584. unpcklps %xmm10, %xmm8
  2585. unpckhps %xmm10, %xmm9
  2586. movaps %xmm0, %xmm10
  2587. unpcklps %xmm1, %xmm0
  2588. unpckhps %xmm1, %xmm10
  2589. addps %xmm9, %xmm8
  2590. addps %xmm0, %xmm10
  2591. addps %xmm10, %xmm8
  2592. pshufd $0x2, %xmm8, %xmm9
  2593. pshufd $0x1, %xmm8, %xmm10
  2594. #endif
  2595. mulss ALPHA, %xmm8
  2596. mulss ALPHA, %xmm9
  2597. mulss ALPHA, %xmm10
  2598. addss (Y), %xmm8
  2599. addq INCY, Y
  2600. addss (Y), %xmm9
  2601. addq INCY, Y
  2602. addss (Y), %xmm10
  2603. movss %xmm8, (Y1)
  2604. addq INCY, Y1
  2605. movss %xmm9, (Y1)
  2606. addq INCY, Y1
  2607. movss %xmm10, (Y1)
  2608. jmp .L999
  2609. ALIGN_4
  2610. .L120:
  2611. cmpq $2, N
  2612. jne .L130
  2613. leaq 32 * SIZE(BUFFER), X1
  2614. movq A, A1
  2615. leaq (A1, LDA), A2
  2616. leaq (A1, LDA, 2), A
  2617. xorps %xmm8, %xmm8
  2618. xorps %xmm9, %xmm9
  2619. cmpq $3, M
  2620. jle .L127
  2621. testq $SIZE, A1
  2622. je .L12X
  2623. movss -32 * SIZE(A1), %xmm0
  2624. movss -32 * SIZE(X1), %xmm4
  2625. mulss %xmm4, %xmm0
  2626. addss %xmm0, %xmm8
  2627. movss -32 * SIZE(A2), %xmm1
  2628. mulss %xmm4, %xmm1
  2629. addss %xmm1, %xmm9
  2630. addq $1 * SIZE, A1
  2631. addq $1 * SIZE, A2
  2632. addq $1 * SIZE, X1
  2633. ALIGN_3
  2634. .L12X:
  2635. testq $2 * SIZE, A1
  2636. je .L12XX
  2637. #ifdef movsd
  2638. xorps %xmm0, %xmm0
  2639. xorps %xmm4, %xmm4
  2640. #endif
  2641. movsd -32 * SIZE(A1), %xmm0
  2642. movsd -32 * SIZE(X1), %xmm4
  2643. mulps %xmm4, %xmm0
  2644. addps %xmm0, %xmm8
  2645. #ifdef movsd
  2646. xorps %xmm1, %xmm1
  2647. #endif
  2648. movsd -32 * SIZE(A2), %xmm1
  2649. mulps %xmm4, %xmm1
  2650. addps %xmm1, %xmm9
  2651. addq $2 * SIZE, A1
  2652. addq $2 * SIZE, A2
  2653. addq $2 * SIZE, X1
  2654. ALIGN_3
  2655. .L12XX:
  2656. MOVUPS_A1 (-34 * SIZE, A2, %xmm12)
  2657. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  2658. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  2659. movq MM, I
  2660. sarq $4, I
  2661. jle .L125
  2662. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2663. MOVUPS_A1 (-30 * SIZE, A2, %xmm1)
  2664. decq I
  2665. jle .L123
  2666. ALIGN_4
  2667. .L122:
  2668. #ifdef PREFETCH
  2669. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(A1)
  2670. #endif
  2671. mulps %xmm4, %xmm0
  2672. addps %xmm0, %xmm8
  2673. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  2674. shufps $0x4e, %xmm1, %xmm12
  2675. mulps %xmm4, %xmm12
  2676. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2677. addps %xmm12, %xmm9
  2678. MOVUPS_A1 (-26 * SIZE, A2, %xmm12)
  2679. mulps %xmm5, %xmm0
  2680. addps %xmm0, %xmm8
  2681. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  2682. shufps $0x4e, %xmm12, %xmm1
  2683. mulps %xmm5, %xmm1
  2684. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  2685. addps %xmm1, %xmm9
  2686. MOVUPS_A1 (-22 * SIZE, A2, %xmm1)
  2687. #ifdef PREFETCH
  2688. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(A2)
  2689. #endif
  2690. mulps %xmm4, %xmm0
  2691. addps %xmm0, %xmm8
  2692. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  2693. shufps $0x4e, %xmm1, %xmm12
  2694. mulps %xmm4, %xmm12
  2695. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  2696. addps %xmm12, %xmm9
  2697. MOVUPS_A1 (-18 * SIZE, A2, %xmm12)
  2698. #ifdef PREFETCHW
  2699. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(X1)
  2700. #endif
  2701. mulps %xmm5, %xmm0
  2702. addps %xmm0, %xmm8
  2703. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  2704. shufps $0x4e, %xmm12, %xmm1
  2705. mulps %xmm5, %xmm1
  2706. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  2707. addps %xmm1, %xmm9
  2708. MOVUPS_A1 (-14 * SIZE, A2, %xmm1)
  2709. addq $16 * SIZE, A1
  2710. addq $16 * SIZE, A2
  2711. addq $16 * SIZE, X1
  2712. decq I
  2713. jg .L122
  2714. ALIGN_4
  2715. .L123:
  2716. mulps %xmm4, %xmm0
  2717. addps %xmm0, %xmm8
  2718. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  2719. shufps $0x4e, %xmm1, %xmm12
  2720. mulps %xmm4, %xmm12
  2721. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2722. addps %xmm12, %xmm9
  2723. MOVUPS_A1 (-26 * SIZE, A2, %xmm12)
  2724. mulps %xmm5, %xmm0
  2725. addps %xmm0, %xmm8
  2726. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  2727. shufps $0x4e, %xmm12, %xmm1
  2728. mulps %xmm5, %xmm1
  2729. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  2730. addps %xmm1, %xmm9
  2731. MOVUPS_A1 (-22 * SIZE, A2, %xmm1)
  2732. mulps %xmm4, %xmm0
  2733. addps %xmm0, %xmm8
  2734. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  2735. shufps $0x4e, %xmm1, %xmm12
  2736. mulps %xmm4, %xmm12
  2737. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  2738. addps %xmm12, %xmm9
  2739. MOVUPS_A1 (-18 * SIZE, A2, %xmm12)
  2740. mulps %xmm5, %xmm0
  2741. addps %xmm0, %xmm8
  2742. shufps $0x4e, %xmm12, %xmm1
  2743. mulps %xmm5, %xmm1
  2744. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  2745. addps %xmm1, %xmm9
  2746. addq $16 * SIZE, A1
  2747. addq $16 * SIZE, A2
  2748. addq $16 * SIZE, X1
  2749. ALIGN_4
  2750. .L125:
  2751. testq $8, MM
  2752. jle .L126
  2753. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2754. MOVUPS_A1 (-30 * SIZE, A2, %xmm1)
  2755. mulps %xmm4, %xmm0
  2756. addps %xmm0, %xmm8
  2757. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  2758. shufps $0x4e, %xmm1, %xmm12
  2759. mulps %xmm4, %xmm12
  2760. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2761. addps %xmm12, %xmm9
  2762. MOVUPS_A1 (-26 * SIZE, A2, %xmm12)
  2763. mulps %xmm5, %xmm0
  2764. addps %xmm0, %xmm8
  2765. shufps $0x4e, %xmm12, %xmm1
  2766. mulps %xmm5, %xmm1
  2767. addps %xmm1, %xmm9
  2768. addq $8 * SIZE, A1
  2769. addq $8 * SIZE, A2
  2770. addq $8 * SIZE, X1
  2771. ALIGN_4
  2772. .L126:
  2773. testq $4, MM
  2774. jle .L127
  2775. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2776. mulps %xmm4, %xmm0
  2777. addps %xmm0, %xmm8
  2778. MOVUPS_A1 (-30 * SIZE, A2, %xmm1)
  2779. shufps $0x4e, %xmm1, %xmm12
  2780. mulps %xmm4, %xmm12
  2781. addps %xmm12, %xmm9
  2782. addq $4 * SIZE, A1
  2783. addq $4 * SIZE, A2
  2784. addq $4 * SIZE, X1
  2785. ALIGN_4
  2786. .L127:
  2787. testq $2, MM
  2788. jle .L128
  2789. #ifdef movsd
  2790. xorps %xmm0, %xmm0
  2791. #endif
  2792. movsd -32 * SIZE(A1), %xmm0
  2793. #ifdef movsd
  2794. xorps %xmm4, %xmm4
  2795. #endif
  2796. movsd -32 * SIZE(X1), %xmm4
  2797. mulps %xmm4, %xmm0
  2798. addps %xmm0, %xmm8
  2799. #ifdef movsd
  2800. xorps %xmm1, %xmm1
  2801. #endif
  2802. movsd -32 * SIZE(A2), %xmm1
  2803. mulps %xmm4, %xmm1
  2804. addps %xmm1, %xmm9
  2805. shufps $0xe, %xmm4, %xmm4
  2806. addq $2 * SIZE, A1
  2807. addq $2 * SIZE, A2
  2808. addq $2 * SIZE, X1
  2809. ALIGN_4
  2810. .L128:
  2811. testq $1, MM
  2812. jle .L129
  2813. movss -32 * SIZE(A1), %xmm0
  2814. movss -32 * SIZE(X1), %xmm4
  2815. mulss %xmm4, %xmm0
  2816. addss %xmm0, %xmm8
  2817. movss -32 * SIZE(A2), %xmm1
  2818. mulss %xmm4, %xmm1
  2819. addss %xmm1, %xmm9
  2820. ALIGN_4
  2821. .L129:
  2822. #ifdef HAVE_SSE3
  2823. haddps %xmm9, %xmm8
  2824. haddps %xmm8, %xmm8
  2825. #else
  2826. movaps %xmm8, %xmm10
  2827. unpcklps %xmm9, %xmm8
  2828. unpckhps %xmm9, %xmm10
  2829. addps %xmm10, %xmm8
  2830. movhlps %xmm8, %xmm9
  2831. addps %xmm9, %xmm8
  2832. #endif
  2833. pshufd $0x1, %xmm8, %xmm9
  2834. mulss ALPHA, %xmm8
  2835. mulss ALPHA, %xmm9
  2836. addss (Y), %xmm8
  2837. addq INCY, Y
  2838. addss (Y), %xmm9
  2839. addq INCY, Y
  2840. movss %xmm8, (Y1)
  2841. addq INCY, Y1
  2842. movss %xmm9, (Y1)
  2843. addq INCY, Y1
  2844. jmp .L999
  2845. ALIGN_4
  2846. .L130:
  2847. cmpq $1, N
  2848. jne .L999
  2849. leaq 32 * SIZE(BUFFER), X1
  2850. movq A, A1
  2851. xorps %xmm8, %xmm8
  2852. xorps %xmm9, %xmm9
  2853. cmpq $3, M
  2854. jle .L137
  2855. testq $SIZE, A1
  2856. je .L13X
  2857. movss -32 * SIZE(A1), %xmm0
  2858. movss -32 * SIZE(X1), %xmm4
  2859. mulss %xmm4, %xmm0
  2860. addss %xmm0, %xmm8
  2861. addq $1 * SIZE, A1
  2862. addq $1 * SIZE, X1
  2863. ALIGN_3
  2864. .L13X:
  2865. testq $2 * SIZE, A1
  2866. je .L13XX
  2867. #ifdef movsd
  2868. xorps %xmm0, %xmm0
  2869. xorps %xmm4, %xmm4
  2870. #endif
  2871. movsd -32 * SIZE(A1), %xmm0
  2872. movsd -32 * SIZE(X1), %xmm4
  2873. mulps %xmm4, %xmm0
  2874. addps %xmm0, %xmm8
  2875. shufps $0xe, %xmm4, %xmm4
  2876. addq $2 * SIZE, A1
  2877. addq $2 * SIZE, X1
  2878. ALIGN_3
  2879. .L13XX:
  2880. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  2881. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  2882. movq MM, I
  2883. sarq $4, I
  2884. jle .L135
  2885. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2886. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  2887. decq I
  2888. jle .L133
  2889. ALIGN_4
  2890. .L132:
  2891. #ifdef PREFETCH
  2892. PREFETCH (PREFETCHSIZE) * 8 - 128 + PREOFFSET(A1)
  2893. #endif
  2894. mulps %xmm4, %xmm0
  2895. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2896. addps %xmm0, %xmm8
  2897. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  2898. mulps %xmm5, %xmm12
  2899. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  2900. addps %xmm12, %xmm9
  2901. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  2902. #ifdef PREFETCHW
  2903. PREFETCH (PREFETCHSIZE) * 8 - 128 + PREOFFSET(X1)
  2904. #endif
  2905. mulps %xmm4, %xmm0
  2906. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  2907. addps %xmm0, %xmm8
  2908. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  2909. mulps %xmm5, %xmm12
  2910. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  2911. addps %xmm12, %xmm9
  2912. MOVUPS_A1 (-12 * SIZE, A1, %xmm12)
  2913. addq $16 * SIZE, A1
  2914. addq $16 * SIZE, X1
  2915. decq I
  2916. jg .L132
  2917. ALIGN_4
  2918. .L133:
  2919. mulps %xmm4, %xmm0
  2920. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2921. addps %xmm0, %xmm8
  2922. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  2923. mulps %xmm5, %xmm12
  2924. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  2925. addps %xmm12, %xmm9
  2926. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  2927. mulps %xmm4, %xmm0
  2928. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  2929. addps %xmm0, %xmm8
  2930. mulps %xmm5, %xmm12
  2931. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  2932. addps %xmm12, %xmm9
  2933. addq $16 * SIZE, A1
  2934. addq $16 * SIZE, X1
  2935. ALIGN_4
  2936. .L135:
  2937. testq $8, MM
  2938. jle .L136
  2939. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2940. mulps %xmm4, %xmm0
  2941. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  2942. addps %xmm0, %xmm8
  2943. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  2944. mulps %xmm5, %xmm12
  2945. addps %xmm12, %xmm9
  2946. addq $8 * SIZE, A1
  2947. addq $8 * SIZE, X1
  2948. ALIGN_4
  2949. .L136:
  2950. testq $4, MM
  2951. jle .L137
  2952. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  2953. mulps %xmm4, %xmm0
  2954. addps %xmm0, %xmm8
  2955. addq $4 * SIZE, A1
  2956. addq $4 * SIZE, X1
  2957. ALIGN_4
  2958. .L137:
  2959. testq $2, MM
  2960. jle .L138
  2961. #ifdef movsd
  2962. xorps %xmm0, %xmm0
  2963. #endif
  2964. movsd -32 * SIZE(A1), %xmm0
  2965. #ifdef movsd
  2966. xorps %xmm4, %xmm4
  2967. #endif
  2968. movsd -32 * SIZE(X1), %xmm4
  2969. mulps %xmm4, %xmm0
  2970. addps %xmm0, %xmm8
  2971. shufps $0xe, %xmm4, %xmm4
  2972. addq $2 * SIZE, A1
  2973. addq $2 * SIZE, X1
  2974. ALIGN_4
  2975. .L138:
  2976. testq $1, MM
  2977. jle .L139
  2978. movss -32 * SIZE(A1), %xmm0
  2979. movss -32 * SIZE(X1), %xmm4
  2980. mulss %xmm4, %xmm0
  2981. addss %xmm0, %xmm8
  2982. ALIGN_4
  2983. .L139:
  2984. addps %xmm9, %xmm8
  2985. #ifdef HAVE_SSE3
  2986. haddps %xmm8, %xmm8
  2987. haddps %xmm8, %xmm8
  2988. #else
  2989. pshufd $1, %xmm8, %xmm9
  2990. pshufd $2, %xmm8, %xmm10
  2991. pshufd $3, %xmm8, %xmm11
  2992. addss %xmm9, %xmm8
  2993. addss %xmm11, %xmm10
  2994. addss %xmm10, %xmm8
  2995. #endif
  2996. mulss ALPHA, %xmm8
  2997. addss (Y), %xmm8
  2998. movss %xmm8, (Y1)
  2999. jmp .L999
  3000. ALIGN_4
  3001. .L200:
  3002. testq $2 * SIZE, LDA
  3003. jne .L300
  3004. cmpq $4, N
  3005. jl .L210
  3006. ALIGN_3
  3007. .L201:
  3008. subq $4, N
  3009. leaq 32 * SIZE(BUFFER), X1
  3010. movq A, A1
  3011. leaq (A1, LDA, 2), A2
  3012. leaq (A1, LDA, 4), A
  3013. xorps %xmm8, %xmm8
  3014. xorps %xmm9, %xmm9
  3015. xorps %xmm10, %xmm10
  3016. xorps %xmm11, %xmm11
  3017. cmpq $3, M
  3018. jle .L207
  3019. testq $SIZE, A1
  3020. je .L20X
  3021. movss -32 * SIZE(A1), %xmm0
  3022. movss -32 * SIZE(X1), %xmm4
  3023. mulss %xmm4, %xmm0
  3024. addss %xmm0, %xmm8
  3025. movss -32 * SIZE(A1, LDA), %xmm1
  3026. mulss %xmm4, %xmm1
  3027. addss %xmm1, %xmm9
  3028. movss -32 * SIZE(A2), %xmm2
  3029. mulss %xmm4, %xmm2
  3030. addss %xmm2, %xmm10
  3031. movss -32 * SIZE(A2, LDA), %xmm3
  3032. mulss %xmm4, %xmm3
  3033. addss %xmm3, %xmm11
  3034. addq $1 * SIZE, A1
  3035. addq $1 * SIZE, A2
  3036. addq $1 * SIZE, X1
  3037. ALIGN_3
  3038. .L20X:
  3039. testq $2 * SIZE, A1
  3040. je .L20XX
  3041. #ifdef movsd
  3042. xorps %xmm0, %xmm0
  3043. xorps %xmm4, %xmm4
  3044. #endif
  3045. movsd -32 * SIZE(A1), %xmm0
  3046. movsd -32 * SIZE(X1), %xmm4
  3047. mulps %xmm4, %xmm0
  3048. addps %xmm0, %xmm8
  3049. #ifdef movsd
  3050. xorps %xmm1, %xmm1
  3051. #endif
  3052. movsd -32 * SIZE(A1, LDA), %xmm1
  3053. mulps %xmm4, %xmm1
  3054. addps %xmm1, %xmm9
  3055. #ifdef movsd
  3056. xorps %xmm2, %xmm2
  3057. #endif
  3058. movsd -32 * SIZE(A2), %xmm2
  3059. mulps %xmm4, %xmm2
  3060. addps %xmm2, %xmm10
  3061. #ifdef movsd
  3062. xorps %xmm3, %xmm3
  3063. #endif
  3064. movsd -32 * SIZE(A2, LDA), %xmm3
  3065. mulps %xmm4, %xmm3
  3066. addps %xmm3, %xmm11
  3067. addq $2 * SIZE, A1
  3068. addq $2 * SIZE, A2
  3069. addq $2 * SIZE, X1
  3070. ALIGN_3
  3071. .L20XX:
  3072. movaps -33 * SIZE(A1, LDA), %xmm12
  3073. movaps -34 * SIZE(A2), %xmm13
  3074. movaps -35 * SIZE(A2, LDA), %xmm14
  3075. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  3076. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  3077. #ifdef PREFETCHW
  3078. PREFETCHW 4 * SIZE(Y1)
  3079. #endif
  3080. movq MM, I
  3081. sarq $4, I
  3082. jle .L205
  3083. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  3084. MOVUPS_A2 (-29 * SIZE, A1, LDA, 1, %xmm1)
  3085. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  3086. MOVUPS_A2 (-31 * SIZE, A2, LDA, 1, %xmm3)
  3087. decq I
  3088. jle .L203
  3089. ALIGN_4
  3090. .L202:
  3091. #ifdef PREFETCH
  3092. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A1)
  3093. #endif
  3094. mulps %xmm4, %xmm0
  3095. addps %xmm0, %xmm8
  3096. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  3097. movss %xmm1, %xmm12
  3098. shufps $0x39, %xmm12, %xmm12
  3099. mulps %xmm4, %xmm12
  3100. addps %xmm12, %xmm9
  3101. MOVUPS_A2 (-25 * SIZE, A1, LDA, 1, %xmm12)
  3102. shufps $0x4e, %xmm2, %xmm13
  3103. mulps %xmm4, %xmm13
  3104. addps %xmm13, %xmm10
  3105. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  3106. movss %xmm3, %xmm14
  3107. shufps $0x93, %xmm3, %xmm14
  3108. mulps %xmm4, %xmm14
  3109. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  3110. addps %xmm14, %xmm11
  3111. MOVUPS_A2 (-27 * SIZE, A2, LDA, 1, %xmm14)
  3112. #ifdef PREFETCH
  3113. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A1, LDA)
  3114. #endif
  3115. mulps %xmm5, %xmm0
  3116. addps %xmm0, %xmm8
  3117. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  3118. movss %xmm12, %xmm1
  3119. shufps $0x39, %xmm1, %xmm1
  3120. mulps %xmm5, %xmm1
  3121. addps %xmm1, %xmm9
  3122. MOVUPS_A2 (-21 * SIZE, A1, LDA, 1, %xmm1)
  3123. shufps $0x4e, %xmm13, %xmm2
  3124. mulps %xmm5, %xmm2
  3125. addps %xmm2, %xmm10
  3126. MOVUPS_A1 (-22 * SIZE, A2, %xmm2)
  3127. movss %xmm14, %xmm3
  3128. shufps $0x93, %xmm14, %xmm3
  3129. mulps %xmm5, %xmm3
  3130. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  3131. addps %xmm3, %xmm11
  3132. MOVUPS_A2 (-23 * SIZE, A2, LDA, 1, %xmm3)
  3133. #ifdef PREFETCH
  3134. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A2)
  3135. #endif
  3136. mulps %xmm4, %xmm0
  3137. addps %xmm0, %xmm8
  3138. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  3139. movss %xmm1, %xmm12
  3140. shufps $0x39, %xmm12, %xmm12
  3141. mulps %xmm4, %xmm12
  3142. addps %xmm12, %xmm9
  3143. MOVUPS_A2 (-17 * SIZE, A1, LDA, 1, %xmm12)
  3144. shufps $0x4e, %xmm2, %xmm13
  3145. mulps %xmm4, %xmm13
  3146. addps %xmm13, %xmm10
  3147. MOVUPS_A1 (-18 * SIZE, A2, %xmm13)
  3148. movss %xmm3, %xmm14
  3149. shufps $0x93, %xmm3, %xmm14
  3150. mulps %xmm4, %xmm14
  3151. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  3152. addps %xmm14, %xmm11
  3153. MOVUPS_A2 (-19 * SIZE, A2, LDA, 1, %xmm14)
  3154. #ifdef PREFETCH
  3155. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A2, LDA)
  3156. #endif
  3157. mulps %xmm5, %xmm0
  3158. addps %xmm0, %xmm8
  3159. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  3160. movss %xmm12, %xmm1
  3161. shufps $0x39, %xmm1, %xmm1
  3162. mulps %xmm5, %xmm1
  3163. addps %xmm1, %xmm9
  3164. MOVUPS_A2 (-13 * SIZE, A1, LDA, 1, %xmm1)
  3165. #ifdef PREFETCHW
  3166. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(X1)
  3167. #endif
  3168. shufps $0x4e, %xmm13, %xmm2
  3169. mulps %xmm5, %xmm2
  3170. addps %xmm2, %xmm10
  3171. MOVUPS_A1 (-14 * SIZE, A2, %xmm2)
  3172. movss %xmm14, %xmm3
  3173. shufps $0x93, %xmm14, %xmm3
  3174. mulps %xmm5, %xmm3
  3175. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  3176. addps %xmm3, %xmm11
  3177. MOVUPS_A2 (-15 * SIZE, A2, LDA, 1, %xmm3)
  3178. addq $16 * SIZE, A1
  3179. addq $16 * SIZE, A2
  3180. addq $16 * SIZE, X1
  3181. decq I
  3182. jg .L202
  3183. ALIGN_4
  3184. .L203:
  3185. mulps %xmm4, %xmm0
  3186. addps %xmm0, %xmm8
  3187. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  3188. movss %xmm1, %xmm12
  3189. shufps $0x39, %xmm12, %xmm12
  3190. mulps %xmm4, %xmm12
  3191. addps %xmm12, %xmm9
  3192. MOVUPS_A2 (-25 * SIZE, A1, LDA, 1, %xmm12)
  3193. shufps $0x4e, %xmm2, %xmm13
  3194. mulps %xmm4, %xmm13
  3195. addps %xmm13, %xmm10
  3196. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  3197. movss %xmm3, %xmm14
  3198. shufps $0x93, %xmm3, %xmm14
  3199. mulps %xmm4, %xmm14
  3200. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  3201. addps %xmm14, %xmm11
  3202. MOVUPS_A2 (-27 * SIZE, A2, LDA, 1, %xmm14)
  3203. mulps %xmm5, %xmm0
  3204. addps %xmm0, %xmm8
  3205. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  3206. movss %xmm12, %xmm1
  3207. shufps $0x39, %xmm1, %xmm1
  3208. mulps %xmm5, %xmm1
  3209. addps %xmm1, %xmm9
  3210. MOVUPS_A2 (-21 * SIZE, A1, LDA, 1, %xmm1)
  3211. shufps $0x4e, %xmm13, %xmm2
  3212. mulps %xmm5, %xmm2
  3213. addps %xmm2, %xmm10
  3214. MOVUPS_A1 (-22 * SIZE, A2, %xmm2)
  3215. movss %xmm14, %xmm3
  3216. shufps $0x93, %xmm14, %xmm3
  3217. mulps %xmm5, %xmm3
  3218. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  3219. addps %xmm3, %xmm11
  3220. MOVUPS_A2 (-23 * SIZE, A2, LDA, 1, %xmm3)
  3221. mulps %xmm4, %xmm0
  3222. addps %xmm0, %xmm8
  3223. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  3224. movss %xmm1, %xmm12
  3225. shufps $0x39, %xmm12, %xmm12
  3226. mulps %xmm4, %xmm12
  3227. addps %xmm12, %xmm9
  3228. MOVUPS_A2 (-17 * SIZE, A1, LDA, 1, %xmm12)
  3229. shufps $0x4e, %xmm2, %xmm13
  3230. mulps %xmm4, %xmm13
  3231. addps %xmm13, %xmm10
  3232. MOVUPS_A1 (-18 * SIZE, A2, %xmm13)
  3233. movss %xmm3, %xmm14
  3234. shufps $0x93, %xmm3, %xmm14
  3235. mulps %xmm4, %xmm14
  3236. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  3237. addps %xmm14, %xmm11
  3238. MOVUPS_A2 (-19 * SIZE, A2, LDA, 1, %xmm14)
  3239. mulps %xmm5, %xmm0
  3240. addps %xmm0, %xmm8
  3241. movss %xmm12, %xmm1
  3242. shufps $0x39, %xmm1, %xmm1
  3243. mulps %xmm5, %xmm1
  3244. addps %xmm1, %xmm9
  3245. shufps $0x4e, %xmm13, %xmm2
  3246. mulps %xmm5, %xmm2
  3247. addps %xmm2, %xmm10
  3248. movss %xmm14, %xmm3
  3249. shufps $0x93, %xmm14, %xmm3
  3250. mulps %xmm5, %xmm3
  3251. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  3252. addps %xmm3, %xmm11
  3253. addq $16 * SIZE, A1
  3254. addq $16 * SIZE, A2
  3255. addq $16 * SIZE, X1
  3256. ALIGN_4
  3257. .L205:
  3258. testq $8, MM
  3259. jle .L206
  3260. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  3261. MOVUPS_A2 (-29 * SIZE, A1, LDA, 1, %xmm1)
  3262. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  3263. MOVUPS_A2 (-31 * SIZE, A2, LDA, 1, %xmm3)
  3264. mulps %xmm4, %xmm0
  3265. addps %xmm0, %xmm8
  3266. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  3267. movss %xmm1, %xmm12
  3268. shufps $0x39, %xmm12, %xmm12
  3269. mulps %xmm4, %xmm12
  3270. addps %xmm12, %xmm9
  3271. MOVUPS_A2 (-25 * SIZE, A1, LDA, 1, %xmm12)
  3272. shufps $0x4e, %xmm2, %xmm13
  3273. mulps %xmm4, %xmm13
  3274. addps %xmm13, %xmm10
  3275. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  3276. movss %xmm3, %xmm14
  3277. shufps $0x93, %xmm3, %xmm14
  3278. mulps %xmm4, %xmm14
  3279. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  3280. addps %xmm14, %xmm11
  3281. MOVUPS_A2 (-27 * SIZE, A2, LDA, 1, %xmm14)
  3282. mulps %xmm5, %xmm0
  3283. addps %xmm0, %xmm8
  3284. movss %xmm12, %xmm1
  3285. shufps $0x39, %xmm1, %xmm1
  3286. mulps %xmm5, %xmm1
  3287. addps %xmm1, %xmm9
  3288. shufps $0x4e, %xmm13, %xmm2
  3289. mulps %xmm5, %xmm2
  3290. addps %xmm2, %xmm10
  3291. movss %xmm14, %xmm3
  3292. shufps $0x93, %xmm14, %xmm3
  3293. mulps %xmm5, %xmm3
  3294. addps %xmm3, %xmm11
  3295. addq $8 * SIZE, A1
  3296. addq $8 * SIZE, A2
  3297. addq $8 * SIZE, X1
  3298. ALIGN_4
  3299. .L206:
  3300. testq $4, MM
  3301. jle .L207
  3302. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  3303. MOVUPS_A2 (-29 * SIZE, A1, LDA, 1, %xmm1)
  3304. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  3305. MOVUPS_A2 (-31 * SIZE, A2, LDA, 1, %xmm3)
  3306. mulps %xmm4, %xmm0
  3307. addps %xmm0, %xmm8
  3308. movss %xmm1, %xmm12
  3309. shufps $0x39, %xmm12, %xmm12
  3310. mulps %xmm4, %xmm12
  3311. addps %xmm12, %xmm9
  3312. shufps $0x4e, %xmm2, %xmm13
  3313. mulps %xmm4, %xmm13
  3314. addps %xmm13, %xmm10
  3315. movss %xmm3, %xmm14
  3316. shufps $0x93, %xmm3, %xmm14
  3317. mulps %xmm4, %xmm14
  3318. addps %xmm14, %xmm11
  3319. addq $4 * SIZE, A1
  3320. addq $4 * SIZE, A2
  3321. addq $4 * SIZE, X1
  3322. ALIGN_4
  3323. .L207:
  3324. testq $2, MM
  3325. jle .L208
  3326. #ifdef movsd
  3327. xorps %xmm0, %xmm0
  3328. #endif
  3329. movsd -32 * SIZE(A1), %xmm0
  3330. #ifdef movsd
  3331. xorps %xmm4, %xmm4
  3332. #endif
  3333. movsd -32 * SIZE(X1), %xmm4
  3334. mulps %xmm4, %xmm0
  3335. addps %xmm0, %xmm8
  3336. #ifdef movsd
  3337. xorps %xmm1, %xmm1
  3338. #endif
  3339. movsd -32 * SIZE(A1, LDA), %xmm1
  3340. mulps %xmm4, %xmm1
  3341. addps %xmm1, %xmm9
  3342. #ifdef movsd
  3343. xorps %xmm2, %xmm2
  3344. #endif
  3345. movsd -32 * SIZE(A2), %xmm2
  3346. mulps %xmm4, %xmm2
  3347. addps %xmm2, %xmm10
  3348. #ifdef movsd
  3349. xorps %xmm3, %xmm3
  3350. #endif
  3351. movsd -32 * SIZE(A2, LDA), %xmm3
  3352. mulps %xmm4, %xmm3
  3353. addps %xmm3, %xmm11
  3354. shufps $0xe, %xmm4, %xmm4
  3355. addq $2 * SIZE, A1
  3356. addq $2 * SIZE, A2
  3357. addq $2 * SIZE, X1
  3358. ALIGN_4
  3359. .L208:
  3360. testq $1, MM
  3361. jle .L209
  3362. movss -32 * SIZE(A1), %xmm0
  3363. movss -32 * SIZE(X1), %xmm4
  3364. mulss %xmm4, %xmm0
  3365. addss %xmm0, %xmm8
  3366. movss -32 * SIZE(A1, LDA), %xmm1
  3367. mulss %xmm4, %xmm1
  3368. addss %xmm1, %xmm9
  3369. movss -32 * SIZE(A2), %xmm2
  3370. mulss %xmm4, %xmm2
  3371. addss %xmm2, %xmm10
  3372. movss -32 * SIZE(A2, LDA), %xmm3
  3373. mulss %xmm4, %xmm3
  3374. addss %xmm3, %xmm11
  3375. ALIGN_4
  3376. .L209:
  3377. #ifdef HAVE_SSE3
  3378. haddps %xmm9, %xmm8
  3379. haddps %xmm11, %xmm10
  3380. haddps %xmm10, %xmm8
  3381. pshufd $0x1, %xmm8, %xmm9
  3382. pshufd $0x2, %xmm8, %xmm10
  3383. pshufd $0x3, %xmm8, %xmm11
  3384. #else
  3385. movaps %xmm8, %xmm0
  3386. unpcklps %xmm9, %xmm8
  3387. unpckhps %xmm9, %xmm0
  3388. movaps %xmm10, %xmm1
  3389. unpcklps %xmm11, %xmm10
  3390. unpckhps %xmm11, %xmm1
  3391. movaps %xmm8, %xmm9
  3392. unpcklps %xmm10, %xmm8
  3393. unpckhps %xmm10, %xmm9
  3394. movaps %xmm0, %xmm10
  3395. unpcklps %xmm1, %xmm0
  3396. unpckhps %xmm1, %xmm10
  3397. addps %xmm9, %xmm8
  3398. addps %xmm0, %xmm10
  3399. addps %xmm10, %xmm8
  3400. pshufd $0x2, %xmm8, %xmm9
  3401. pshufd $0x1, %xmm8, %xmm10
  3402. pshufd $0x3, %xmm8, %xmm11
  3403. #endif
  3404. mulss ALPHA, %xmm8
  3405. mulss ALPHA, %xmm9
  3406. mulss ALPHA, %xmm10
  3407. mulss ALPHA, %xmm11
  3408. addss (Y), %xmm8
  3409. addq INCY, Y
  3410. addss (Y), %xmm9
  3411. addq INCY, Y
  3412. addss (Y), %xmm10
  3413. addq INCY, Y
  3414. addss (Y), %xmm11
  3415. addq INCY, Y
  3416. movss %xmm8, (Y1)
  3417. addq INCY, Y1
  3418. movss %xmm9, (Y1)
  3419. addq INCY, Y1
  3420. movss %xmm10, (Y1)
  3421. addq INCY, Y1
  3422. movss %xmm11, (Y1)
  3423. addq INCY, Y1
  3424. cmpq $4, N
  3425. jge .L201
  3426. ALIGN_4
  3427. .L210:
  3428. cmpq $3, N
  3429. jne .L220
  3430. leaq 32 * SIZE(BUFFER), X1
  3431. movq A, A1
  3432. leaq (A1, LDA, 2), A2
  3433. leaq (A1, LDA, 4), A
  3434. xorps %xmm8, %xmm8
  3435. xorps %xmm9, %xmm9
  3436. xorps %xmm10, %xmm10
  3437. cmpq $3, M
  3438. jle .L217
  3439. testq $SIZE, A1
  3440. je .L21X
  3441. movss -32 * SIZE(A1), %xmm0
  3442. movss -32 * SIZE(X1), %xmm4
  3443. mulss %xmm4, %xmm0
  3444. addss %xmm0, %xmm8
  3445. movss -32 * SIZE(A1, LDA), %xmm1
  3446. mulss %xmm4, %xmm1
  3447. addss %xmm1, %xmm9
  3448. movss -32 * SIZE(A2), %xmm2
  3449. mulss %xmm4, %xmm2
  3450. addss %xmm2, %xmm10
  3451. addq $1 * SIZE, A1
  3452. addq $1 * SIZE, A2
  3453. addq $1 * SIZE, X1
  3454. ALIGN_3
  3455. .L21X:
  3456. testq $2 * SIZE, A1
  3457. je .L21XX
  3458. #ifdef movsd
  3459. xorps %xmm0, %xmm0
  3460. xorps %xmm4, %xmm4
  3461. #endif
  3462. movsd -32 * SIZE(A1), %xmm0
  3463. movsd -32 * SIZE(X1), %xmm4
  3464. mulps %xmm4, %xmm0
  3465. addps %xmm0, %xmm8
  3466. #ifdef movsd
  3467. xorps %xmm1, %xmm1
  3468. #endif
  3469. movsd -32 * SIZE(A1, LDA), %xmm1
  3470. mulps %xmm4, %xmm1
  3471. addps %xmm1, %xmm9
  3472. #ifdef movsd
  3473. xorps %xmm2, %xmm2
  3474. #endif
  3475. movsd -32 * SIZE(A2), %xmm2
  3476. mulps %xmm4, %xmm2
  3477. addps %xmm2, %xmm10
  3478. addq $2 * SIZE, A1
  3479. addq $2 * SIZE, A2
  3480. addq $2 * SIZE, X1
  3481. ALIGN_3
  3482. .L21XX:
  3483. movaps -33 * SIZE(A1, LDA), %xmm12
  3484. movaps -34 * SIZE(A2), %xmm13
  3485. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  3486. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  3487. #ifdef PREFETCHW
  3488. PREFETCHW 4 * SIZE(Y1)
  3489. #endif
  3490. movq MM, I
  3491. sarq $4, I
  3492. jle .L215
  3493. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  3494. MOVUPS_A2 (-29 * SIZE, A1, LDA, 1, %xmm1)
  3495. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  3496. decq I
  3497. jle .L213
  3498. ALIGN_4
  3499. .L212:
  3500. #ifdef PREFETCH
  3501. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A1)
  3502. #endif
  3503. mulps %xmm4, %xmm0
  3504. addps %xmm0, %xmm8
  3505. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  3506. movss %xmm1, %xmm12
  3507. shufps $0x39, %xmm12, %xmm12
  3508. mulps %xmm4, %xmm12
  3509. addps %xmm12, %xmm9
  3510. MOVUPS_A2 (-25 * SIZE, A1, LDA, 1, %xmm12)
  3511. shufps $0x4e, %xmm2, %xmm13
  3512. mulps %xmm4, %xmm13
  3513. addps %xmm13, %xmm10
  3514. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  3515. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  3516. #ifdef PREFETCH
  3517. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A1, LDA)
  3518. #endif
  3519. mulps %xmm5, %xmm0
  3520. addps %xmm0, %xmm8
  3521. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  3522. movss %xmm12, %xmm1
  3523. shufps $0x39, %xmm1, %xmm1
  3524. mulps %xmm5, %xmm1
  3525. addps %xmm1, %xmm9
  3526. MOVUPS_A2 (-21 * SIZE, A1, LDA, 1, %xmm1)
  3527. shufps $0x4e, %xmm13, %xmm2
  3528. mulps %xmm5, %xmm2
  3529. addps %xmm2, %xmm10
  3530. MOVUPS_A1 (-22 * SIZE, A2, %xmm2)
  3531. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  3532. #ifdef PREFETCH
  3533. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A2)
  3534. #endif
  3535. mulps %xmm4, %xmm0
  3536. addps %xmm0, %xmm8
  3537. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  3538. movss %xmm1, %xmm12
  3539. shufps $0x39, %xmm12, %xmm12
  3540. mulps %xmm4, %xmm12
  3541. addps %xmm12, %xmm9
  3542. MOVUPS_A2 (-17 * SIZE, A1, LDA, 1, %xmm12)
  3543. shufps $0x4e, %xmm2, %xmm13
  3544. mulps %xmm4, %xmm13
  3545. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  3546. addps %xmm13, %xmm10
  3547. MOVUPS_A1 (-18 * SIZE, A2, %xmm13)
  3548. #ifdef PREFETCHW
  3549. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(X1)
  3550. #endif
  3551. mulps %xmm5, %xmm0
  3552. addps %xmm0, %xmm8
  3553. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  3554. movss %xmm12, %xmm1
  3555. shufps $0x39, %xmm1, %xmm1
  3556. mulps %xmm5, %xmm1
  3557. addps %xmm1, %xmm9
  3558. MOVUPS_A2 (-13 * SIZE, A1, LDA, 1, %xmm1)
  3559. shufps $0x4e, %xmm13, %xmm2
  3560. mulps %xmm5, %xmm2
  3561. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  3562. addps %xmm2, %xmm10
  3563. MOVUPS_A1 (-14 * SIZE, A2, %xmm2)
  3564. addq $16 * SIZE, A1
  3565. addq $16 * SIZE, A2
  3566. addq $16 * SIZE, X1
  3567. decq I
  3568. jg .L212
  3569. ALIGN_4
  3570. .L213:
  3571. mulps %xmm4, %xmm0
  3572. addps %xmm0, %xmm8
  3573. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  3574. movss %xmm1, %xmm12
  3575. shufps $0x39, %xmm12, %xmm12
  3576. mulps %xmm4, %xmm12
  3577. addps %xmm12, %xmm9
  3578. MOVUPS_A2 (-25 * SIZE, A1, LDA, 1, %xmm12)
  3579. shufps $0x4e, %xmm2, %xmm13
  3580. mulps %xmm4, %xmm13
  3581. addps %xmm13, %xmm10
  3582. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  3583. movss %xmm3, %xmm14
  3584. shufps $0x93, %xmm3, %xmm14
  3585. mulps %xmm4, %xmm14
  3586. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  3587. addps %xmm14, %xmm11
  3588. MOVUPS_A2 (-27 * SIZE, A2, LDA, 1, %xmm14)
  3589. mulps %xmm5, %xmm0
  3590. addps %xmm0, %xmm8
  3591. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  3592. movss %xmm12, %xmm1
  3593. shufps $0x39, %xmm1, %xmm1
  3594. mulps %xmm5, %xmm1
  3595. addps %xmm1, %xmm9
  3596. MOVUPS_A2 (-21 * SIZE, A1, LDA, 1, %xmm1)
  3597. shufps $0x4e, %xmm13, %xmm2
  3598. mulps %xmm5, %xmm2
  3599. addps %xmm2, %xmm10
  3600. MOVUPS_A1 (-22 * SIZE, A2, %xmm2)
  3601. movss %xmm14, %xmm3
  3602. shufps $0x93, %xmm14, %xmm3
  3603. mulps %xmm5, %xmm3
  3604. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  3605. addps %xmm3, %xmm11
  3606. MOVUPS_A2 (-23 * SIZE, A2, LDA, 1, %xmm3)
  3607. mulps %xmm4, %xmm0
  3608. addps %xmm0, %xmm8
  3609. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  3610. movss %xmm1, %xmm12
  3611. shufps $0x39, %xmm12, %xmm12
  3612. mulps %xmm4, %xmm12
  3613. addps %xmm12, %xmm9
  3614. MOVUPS_A2 (-17 * SIZE, A1, LDA, 1, %xmm12)
  3615. shufps $0x4e, %xmm2, %xmm13
  3616. mulps %xmm4, %xmm13
  3617. addps %xmm13, %xmm10
  3618. MOVUPS_A1 (-18 * SIZE, A2, %xmm13)
  3619. movss %xmm3, %xmm14
  3620. shufps $0x93, %xmm3, %xmm14
  3621. mulps %xmm4, %xmm14
  3622. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  3623. addps %xmm14, %xmm11
  3624. MOVUPS_A2 (-19 * SIZE, A2, LDA, 1, %xmm14)
  3625. mulps %xmm5, %xmm0
  3626. addps %xmm0, %xmm8
  3627. movss %xmm12, %xmm1
  3628. shufps $0x39, %xmm1, %xmm1
  3629. mulps %xmm5, %xmm1
  3630. addps %xmm1, %xmm9
  3631. shufps $0x4e, %xmm13, %xmm2
  3632. mulps %xmm5, %xmm2
  3633. addps %xmm2, %xmm10
  3634. movss %xmm14, %xmm3
  3635. shufps $0x93, %xmm14, %xmm3
  3636. mulps %xmm5, %xmm3
  3637. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  3638. addps %xmm3, %xmm11
  3639. addq $16 * SIZE, A1
  3640. addq $16 * SIZE, A2
  3641. addq $16 * SIZE, X1
  3642. ALIGN_4
  3643. .L215:
  3644. testq $8, MM
  3645. jle .L216
  3646. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  3647. MOVUPS_A2 (-29 * SIZE, A1, LDA, 1, %xmm1)
  3648. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  3649. mulps %xmm4, %xmm0
  3650. addps %xmm0, %xmm8
  3651. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  3652. movss %xmm1, %xmm12
  3653. shufps $0x39, %xmm12, %xmm12
  3654. mulps %xmm4, %xmm12
  3655. addps %xmm12, %xmm9
  3656. MOVUPS_A2 (-25 * SIZE, A1, LDA, 1, %xmm12)
  3657. shufps $0x4e, %xmm2, %xmm13
  3658. mulps %xmm4, %xmm13
  3659. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  3660. addps %xmm13, %xmm10
  3661. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  3662. mulps %xmm5, %xmm0
  3663. addps %xmm0, %xmm8
  3664. movss %xmm12, %xmm1
  3665. shufps $0x39, %xmm1, %xmm1
  3666. mulps %xmm5, %xmm1
  3667. addps %xmm1, %xmm9
  3668. shufps $0x4e, %xmm13, %xmm2
  3669. mulps %xmm5, %xmm2
  3670. addps %xmm2, %xmm10
  3671. addq $8 * SIZE, A1
  3672. addq $8 * SIZE, A2
  3673. addq $8 * SIZE, X1
  3674. ALIGN_4
  3675. .L216:
  3676. testq $4, MM
  3677. jle .L217
  3678. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  3679. MOVUPS_A2 (-29 * SIZE, A1, LDA, 1, %xmm1)
  3680. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  3681. mulps %xmm4, %xmm0
  3682. addps %xmm0, %xmm8
  3683. movss %xmm1, %xmm12
  3684. shufps $0x39, %xmm12, %xmm12
  3685. mulps %xmm4, %xmm12
  3686. addps %xmm12, %xmm9
  3687. shufps $0x4e, %xmm2, %xmm13
  3688. mulps %xmm4, %xmm13
  3689. addps %xmm13, %xmm10
  3690. addq $4 * SIZE, A1
  3691. addq $4 * SIZE, A2
  3692. addq $4 * SIZE, X1
  3693. ALIGN_4
  3694. .L217:
  3695. testq $2, MM
  3696. jle .L218
  3697. #ifdef movsd
  3698. xorps %xmm0, %xmm0
  3699. #endif
  3700. movsd -32 * SIZE(A1), %xmm0
  3701. #ifdef movsd
  3702. xorps %xmm4, %xmm4
  3703. #endif
  3704. movsd -32 * SIZE(X1), %xmm4
  3705. mulps %xmm4, %xmm0
  3706. addps %xmm0, %xmm8
  3707. #ifdef movsd
  3708. xorps %xmm1, %xmm1
  3709. #endif
  3710. movsd -32 * SIZE(A1, LDA), %xmm1
  3711. mulps %xmm4, %xmm1
  3712. addps %xmm1, %xmm9
  3713. #ifdef movsd
  3714. xorps %xmm2, %xmm2
  3715. #endif
  3716. movsd -32 * SIZE(A2), %xmm2
  3717. mulps %xmm4, %xmm2
  3718. addps %xmm2, %xmm10
  3719. addq $2 * SIZE, A1
  3720. addq $2 * SIZE, A2
  3721. addq $2 * SIZE, X1
  3722. ALIGN_4
  3723. .L218:
  3724. testq $1, MM
  3725. jle .L219
  3726. movss -32 * SIZE(A1), %xmm0
  3727. movss -32 * SIZE(X1), %xmm4
  3728. mulss %xmm4, %xmm0
  3729. addss %xmm0, %xmm8
  3730. movss -32 * SIZE(A1, LDA), %xmm1
  3731. mulss %xmm4, %xmm1
  3732. addss %xmm1, %xmm9
  3733. movss -32 * SIZE(A2), %xmm2
  3734. mulss %xmm4, %xmm2
  3735. addss %xmm2, %xmm10
  3736. ALIGN_4
  3737. .L219:
  3738. #ifdef HAVE_SSE3
  3739. haddps %xmm9, %xmm8
  3740. haddps %xmm11, %xmm10
  3741. haddps %xmm10, %xmm8
  3742. pshufd $0x1, %xmm8, %xmm9
  3743. pshufd $0x2, %xmm8, %xmm10
  3744. #else
  3745. movaps %xmm8, %xmm0
  3746. unpcklps %xmm9, %xmm8
  3747. unpckhps %xmm9, %xmm0
  3748. movaps %xmm10, %xmm1
  3749. unpcklps %xmm11, %xmm10
  3750. unpckhps %xmm11, %xmm1
  3751. movaps %xmm8, %xmm9
  3752. unpcklps %xmm10, %xmm8
  3753. unpckhps %xmm10, %xmm9
  3754. movaps %xmm0, %xmm10
  3755. unpcklps %xmm1, %xmm0
  3756. unpckhps %xmm1, %xmm10
  3757. addps %xmm9, %xmm8
  3758. addps %xmm0, %xmm10
  3759. addps %xmm10, %xmm8
  3760. pshufd $0x2, %xmm8, %xmm9
  3761. pshufd $0x1, %xmm8, %xmm10
  3762. #endif
  3763. mulss ALPHA, %xmm8
  3764. mulss ALPHA, %xmm9
  3765. mulss ALPHA, %xmm10
  3766. addss (Y), %xmm8
  3767. addq INCY, Y
  3768. addss (Y), %xmm9
  3769. addq INCY, Y
  3770. addss (Y), %xmm10
  3771. movss %xmm8, (Y1)
  3772. addq INCY, Y1
  3773. movss %xmm9, (Y1)
  3774. addq INCY, Y1
  3775. movss %xmm10, (Y1)
  3776. jmp .L999
  3777. ALIGN_4
  3778. .L220:
  3779. testq N, N
  3780. jle .L999
  3781. cmpq $2, N
  3782. jne .L230
  3783. leaq 32 * SIZE(BUFFER), X1
  3784. movq A, A1
  3785. leaq (A1, LDA), A2
  3786. leaq (A1, LDA, 2), A
  3787. xorps %xmm8, %xmm8
  3788. xorps %xmm9, %xmm9
  3789. cmpq $3, M
  3790. jle .L227
  3791. testq $SIZE, A1
  3792. je .L22X
  3793. movss -32 * SIZE(A1), %xmm0
  3794. movss -32 * SIZE(X1), %xmm4
  3795. mulss %xmm4, %xmm0
  3796. addss %xmm0, %xmm8
  3797. movss -32 * SIZE(A2), %xmm1
  3798. mulss %xmm4, %xmm1
  3799. addss %xmm1, %xmm9
  3800. addq $1 * SIZE, A1
  3801. addq $1 * SIZE, A2
  3802. addq $1 * SIZE, X1
  3803. ALIGN_3
  3804. .L22X:
  3805. testq $2 * SIZE, A1
  3806. je .L22XX
  3807. #ifdef movsd
  3808. xorps %xmm0, %xmm0
  3809. xorps %xmm4, %xmm4
  3810. #endif
  3811. movsd -32 * SIZE(A1), %xmm0
  3812. movsd -32 * SIZE(X1), %xmm4
  3813. mulps %xmm4, %xmm0
  3814. addps %xmm0, %xmm8
  3815. #ifdef movsd
  3816. xorps %xmm1, %xmm1
  3817. #endif
  3818. movsd -32 * SIZE(A2), %xmm1
  3819. mulps %xmm4, %xmm1
  3820. addps %xmm1, %xmm9
  3821. addq $2 * SIZE, A1
  3822. addq $2 * SIZE, A2
  3823. addq $2 * SIZE, X1
  3824. ALIGN_3
  3825. .L22XX:
  3826. movaps -33 * SIZE(A2), %xmm12
  3827. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  3828. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  3829. movq MM, I
  3830. sarq $4, I
  3831. jle .L225
  3832. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  3833. MOVUPS_A1 (-29 * SIZE, A2, %xmm1)
  3834. decq I
  3835. jle .L223
  3836. ALIGN_4
  3837. .L222:
  3838. #ifdef PREFETCH
  3839. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(A1)
  3840. #endif
  3841. mulps %xmm4, %xmm0
  3842. addps %xmm0, %xmm8
  3843. MOVUPS_A1 (-28 * SIZE, A1, %xmm2)
  3844. movss %xmm1, %xmm12
  3845. shufps $0x39, %xmm12, %xmm12
  3846. mulps %xmm4, %xmm12
  3847. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  3848. addps %xmm12, %xmm9
  3849. MOVUPS_A1 (-25 * SIZE, A2, %xmm12)
  3850. mulps %xmm5, %xmm2
  3851. addps %xmm2, %xmm8
  3852. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  3853. movss %xmm12, %xmm1
  3854. shufps $0x39, %xmm1, %xmm1
  3855. mulps %xmm5, %xmm1
  3856. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  3857. addps %xmm1, %xmm9
  3858. MOVUPS_A1 (-21 * SIZE, A2, %xmm1)
  3859. #ifdef PREFETCH
  3860. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(A2)
  3861. #endif
  3862. mulps %xmm4, %xmm0
  3863. addps %xmm0, %xmm8
  3864. MOVUPS_A1 (-20 * SIZE, A1, %xmm2)
  3865. movss %xmm1, %xmm12
  3866. shufps $0x39, %xmm12, %xmm12
  3867. mulps %xmm4, %xmm12
  3868. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  3869. addps %xmm12, %xmm9
  3870. MOVUPS_A1 (-17 * SIZE, A2, %xmm12)
  3871. #ifdef PREFETCHW
  3872. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(X1)
  3873. #endif
  3874. mulps %xmm5, %xmm2
  3875. addps %xmm2, %xmm8
  3876. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  3877. movss %xmm12, %xmm1
  3878. shufps $0x39, %xmm1, %xmm1
  3879. mulps %xmm5, %xmm1
  3880. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  3881. addps %xmm1, %xmm9
  3882. MOVUPS_A1 (-13 * SIZE, A2, %xmm1)
  3883. addq $16 * SIZE, A1
  3884. addq $16 * SIZE, A2
  3885. addq $16 * SIZE, X1
  3886. decq I
  3887. jg .L222
  3888. ALIGN_4
  3889. .L223:
  3890. mulps %xmm4, %xmm0
  3891. addps %xmm0, %xmm8
  3892. MOVUPS_A1 (-28 * SIZE, A1, %xmm2)
  3893. movss %xmm1, %xmm12
  3894. shufps $0x39, %xmm12, %xmm12
  3895. mulps %xmm4, %xmm12
  3896. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  3897. addps %xmm12, %xmm9
  3898. MOVUPS_A1 (-25 * SIZE, A2, %xmm12)
  3899. mulps %xmm5, %xmm2
  3900. addps %xmm2, %xmm8
  3901. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  3902. movss %xmm12, %xmm1
  3903. shufps $0x39, %xmm1, %xmm1
  3904. mulps %xmm5, %xmm1
  3905. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  3906. addps %xmm1, %xmm9
  3907. MOVUPS_A1 (-21 * SIZE, A2, %xmm1)
  3908. mulps %xmm4, %xmm0
  3909. addps %xmm0, %xmm8
  3910. MOVUPS_A1 (-20 * SIZE, A1, %xmm2)
  3911. movss %xmm1, %xmm12
  3912. shufps $0x39, %xmm12, %xmm12
  3913. mulps %xmm4, %xmm12
  3914. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  3915. addps %xmm12, %xmm9
  3916. MOVUPS_A1 (-17 * SIZE, A2, %xmm12)
  3917. mulps %xmm5, %xmm2
  3918. addps %xmm2, %xmm8
  3919. movss %xmm12, %xmm1
  3920. shufps $0x39, %xmm1, %xmm1
  3921. mulps %xmm5, %xmm1
  3922. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  3923. addps %xmm1, %xmm9
  3924. addq $16 * SIZE, A1
  3925. addq $16 * SIZE, A2
  3926. addq $16 * SIZE, X1
  3927. ALIGN_4
  3928. .L225:
  3929. testq $8, MM
  3930. jle .L226
  3931. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  3932. MOVUPS_A1 (-29 * SIZE, A2, %xmm1)
  3933. mulps %xmm4, %xmm0
  3934. addps %xmm0, %xmm8
  3935. MOVUPS_A1 (-28 * SIZE, A1, %xmm2)
  3936. movss %xmm1, %xmm12
  3937. shufps $0x39, %xmm12, %xmm12
  3938. mulps %xmm4, %xmm12
  3939. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  3940. addps %xmm12, %xmm9
  3941. MOVUPS_A1 (-25 * SIZE, A2, %xmm12)
  3942. mulps %xmm5, %xmm2
  3943. addps %xmm2, %xmm8
  3944. movss %xmm12, %xmm1
  3945. shufps $0x39, %xmm1, %xmm1
  3946. mulps %xmm5, %xmm1
  3947. addps %xmm1, %xmm9
  3948. addq $8 * SIZE, A1
  3949. addq $8 * SIZE, A2
  3950. addq $8 * SIZE, X1
  3951. ALIGN_4
  3952. .L226:
  3953. testq $4, MM
  3954. jle .L227
  3955. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  3956. MOVUPS_A1 (-29 * SIZE, A2, %xmm1)
  3957. mulps %xmm4, %xmm0
  3958. addps %xmm0, %xmm8
  3959. movss %xmm1, %xmm12
  3960. shufps $0x39, %xmm12, %xmm12
  3961. mulps %xmm4, %xmm12
  3962. addps %xmm12, %xmm9
  3963. addq $4 * SIZE, A1
  3964. addq $4 * SIZE, A2
  3965. addq $4 * SIZE, X1
  3966. ALIGN_4
  3967. .L227:
  3968. testq $2, MM
  3969. jle .L228
  3970. #ifdef movsd
  3971. xorps %xmm0, %xmm0
  3972. #endif
  3973. movsd -32 * SIZE(A1), %xmm0
  3974. #ifdef movsd
  3975. xorps %xmm4, %xmm4
  3976. #endif
  3977. movsd -32 * SIZE(X1), %xmm4
  3978. mulps %xmm4, %xmm0
  3979. addps %xmm0, %xmm8
  3980. #ifdef movsd
  3981. xorps %xmm1, %xmm1
  3982. #endif
  3983. movsd -32 * SIZE(A2), %xmm1
  3984. mulps %xmm4, %xmm1
  3985. addps %xmm1, %xmm9
  3986. shufps $0xe, %xmm4, %xmm4
  3987. addq $2 * SIZE, A1
  3988. addq $2 * SIZE, A2
  3989. addq $2 * SIZE, X1
  3990. ALIGN_4
  3991. .L228:
  3992. testq $1, MM
  3993. jle .L229
  3994. movss -32 * SIZE(A1), %xmm0
  3995. movss -32 * SIZE(X1), %xmm4
  3996. mulss %xmm4, %xmm0
  3997. addss %xmm0, %xmm8
  3998. movss -32 * SIZE(A2), %xmm1
  3999. mulss %xmm4, %xmm1
  4000. addss %xmm1, %xmm9
  4001. ALIGN_4
  4002. .L229:
  4003. #ifdef HAVE_SSE3
  4004. haddps %xmm9, %xmm8
  4005. haddps %xmm8, %xmm8
  4006. #else
  4007. movaps %xmm8, %xmm10
  4008. unpcklps %xmm9, %xmm8
  4009. unpckhps %xmm9, %xmm10
  4010. addps %xmm10, %xmm8
  4011. movhlps %xmm8, %xmm9
  4012. addps %xmm9, %xmm8
  4013. #endif
  4014. pshufd $0x1, %xmm8, %xmm9
  4015. mulss ALPHA, %xmm8
  4016. mulss ALPHA, %xmm9
  4017. addss (Y), %xmm8
  4018. addq INCY, Y
  4019. addss (Y), %xmm9
  4020. addq INCY, Y
  4021. movss %xmm8, (Y1)
  4022. addq INCY, Y1
  4023. movss %xmm9, (Y1)
  4024. addq INCY, Y1
  4025. jmp .L999
  4026. ALIGN_4
  4027. .L230:
  4028. cmpq $1, N
  4029. jne .L999
  4030. leaq 32 * SIZE(BUFFER), X1
  4031. movq A, A1
  4032. xorps %xmm8, %xmm8
  4033. xorps %xmm9, %xmm9
  4034. cmpq $3, M
  4035. jle .L237
  4036. testq $SIZE, A1
  4037. je .L23X
  4038. movss -32 * SIZE(A1), %xmm0
  4039. movss -32 * SIZE(X1), %xmm4
  4040. mulss %xmm4, %xmm0
  4041. addss %xmm0, %xmm8
  4042. addq $1 * SIZE, A1
  4043. addq $1 * SIZE, X1
  4044. ALIGN_3
  4045. .L23X:
  4046. testq $2 * SIZE, A1
  4047. je .L23XX
  4048. #ifdef movsd
  4049. xorps %xmm0, %xmm0
  4050. xorps %xmm4, %xmm4
  4051. #endif
  4052. movsd -32 * SIZE(A1), %xmm0
  4053. movsd -32 * SIZE(X1), %xmm4
  4054. mulps %xmm4, %xmm0
  4055. addps %xmm0, %xmm8
  4056. shufps $0xe, %xmm4, %xmm4
  4057. addq $2 * SIZE, A1
  4058. addq $2 * SIZE, X1
  4059. ALIGN_3
  4060. .L23XX:
  4061. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  4062. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  4063. movq MM, I
  4064. sarq $4, I
  4065. jle .L235
  4066. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  4067. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  4068. decq I
  4069. jle .L233
  4070. ALIGN_4
  4071. .L232:
  4072. #ifdef PREFETCH
  4073. PREFETCH (PREFETCHSIZE) * 8 - 128 + PREOFFSET(A1)
  4074. #endif
  4075. mulps %xmm4, %xmm0
  4076. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  4077. addps %xmm0, %xmm8
  4078. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  4079. mulps %xmm5, %xmm12
  4080. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  4081. addps %xmm12, %xmm9
  4082. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  4083. #ifdef PREFETCHW
  4084. PREFETCH (PREFETCHSIZE) * 8 - 128 + PREOFFSET(X1)
  4085. #endif
  4086. mulps %xmm4, %xmm0
  4087. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  4088. addps %xmm0, %xmm8
  4089. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  4090. mulps %xmm5, %xmm12
  4091. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  4092. addps %xmm12, %xmm9
  4093. MOVUPS_A1 (-12 * SIZE, A1, %xmm12)
  4094. addq $16 * SIZE, A1
  4095. addq $16 * SIZE, X1
  4096. decq I
  4097. jg .L232
  4098. ALIGN_4
  4099. .L233:
  4100. mulps %xmm4, %xmm0
  4101. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  4102. addps %xmm0, %xmm8
  4103. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  4104. mulps %xmm5, %xmm12
  4105. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  4106. addps %xmm12, %xmm9
  4107. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  4108. mulps %xmm4, %xmm0
  4109. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  4110. addps %xmm0, %xmm8
  4111. mulps %xmm5, %xmm12
  4112. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  4113. addps %xmm12, %xmm9
  4114. addq $16 * SIZE, A1
  4115. addq $16 * SIZE, X1
  4116. ALIGN_4
  4117. .L235:
  4118. testq $8, MM
  4119. jle .L236
  4120. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  4121. mulps %xmm4, %xmm0
  4122. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  4123. addps %xmm0, %xmm8
  4124. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  4125. mulps %xmm5, %xmm12
  4126. addps %xmm12, %xmm9
  4127. addq $8 * SIZE, A1
  4128. addq $8 * SIZE, X1
  4129. ALIGN_4
  4130. .L236:
  4131. testq $4, MM
  4132. jle .L237
  4133. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  4134. mulps %xmm4, %xmm0
  4135. addps %xmm0, %xmm8
  4136. addq $4 * SIZE, A1
  4137. addq $4 * SIZE, X1
  4138. ALIGN_4
  4139. .L237:
  4140. testq $2, MM
  4141. jle .L238
  4142. #ifdef movsd
  4143. xorps %xmm0, %xmm0
  4144. #endif
  4145. movsd -32 * SIZE(A1), %xmm0
  4146. #ifdef movsd
  4147. xorps %xmm4, %xmm4
  4148. #endif
  4149. movsd -32 * SIZE(X1), %xmm4
  4150. mulps %xmm4, %xmm0
  4151. addps %xmm0, %xmm8
  4152. shufps $0xe, %xmm4, %xmm4
  4153. addq $2 * SIZE, A1
  4154. addq $2 * SIZE, X1
  4155. ALIGN_4
  4156. .L238:
  4157. testq $1, MM
  4158. jle .L239
  4159. movss -32 * SIZE(A1), %xmm0
  4160. movss -32 * SIZE(X1), %xmm4
  4161. mulss %xmm4, %xmm0
  4162. addss %xmm0, %xmm8
  4163. ALIGN_4
  4164. .L239:
  4165. addps %xmm9, %xmm8
  4166. #ifdef HAVE_SSE3
  4167. haddps %xmm8, %xmm8
  4168. haddps %xmm8, %xmm8
  4169. #else
  4170. pshufd $1, %xmm8, %xmm9
  4171. pshufd $2, %xmm8, %xmm10
  4172. pshufd $3, %xmm8, %xmm11
  4173. addss %xmm9, %xmm8
  4174. addss %xmm11, %xmm10
  4175. addss %xmm10, %xmm8
  4176. #endif
  4177. mulss ALPHA, %xmm8
  4178. addss (Y), %xmm8
  4179. movss %xmm8, (Y1)
  4180. jmp .L999
  4181. ALIGN_4
  4182. .L300:
  4183. cmpq $4, N
  4184. jl .L310
  4185. ALIGN_3
  4186. .L301:
  4187. subq $4, N
  4188. leaq 32 * SIZE(BUFFER), X1
  4189. movq A, A1
  4190. leaq (A1, LDA, 2), A2
  4191. leaq (A1, LDA, 4), A
  4192. xorps %xmm8, %xmm8
  4193. xorps %xmm9, %xmm9
  4194. xorps %xmm10, %xmm10
  4195. xorps %xmm11, %xmm11
  4196. cmpq $3, M
  4197. jle .L307
  4198. testq $SIZE, A1
  4199. je .L30X
  4200. movss -32 * SIZE(A1), %xmm0
  4201. movss -32 * SIZE(X1), %xmm4
  4202. mulss %xmm4, %xmm0
  4203. addss %xmm0, %xmm8
  4204. movss -32 * SIZE(A1, LDA), %xmm1
  4205. mulss %xmm4, %xmm1
  4206. addss %xmm1, %xmm9
  4207. movss -32 * SIZE(A2), %xmm2
  4208. mulss %xmm4, %xmm2
  4209. addss %xmm2, %xmm10
  4210. movss -32 * SIZE(A2, LDA), %xmm3
  4211. mulss %xmm4, %xmm3
  4212. addss %xmm3, %xmm11
  4213. addq $1 * SIZE, A1
  4214. addq $1 * SIZE, A2
  4215. addq $1 * SIZE, X1
  4216. ALIGN_3
  4217. .L30X:
  4218. testq $2 * SIZE, A1
  4219. je .L30XX
  4220. #ifdef movsd
  4221. xorps %xmm0, %xmm0
  4222. xorps %xmm4, %xmm4
  4223. #endif
  4224. movsd -32 * SIZE(A1), %xmm0
  4225. movsd -32 * SIZE(X1), %xmm4
  4226. mulps %xmm4, %xmm0
  4227. addps %xmm0, %xmm8
  4228. #ifdef movsd
  4229. xorps %xmm1, %xmm1
  4230. #endif
  4231. movsd -32 * SIZE(A1, LDA), %xmm1
  4232. mulps %xmm4, %xmm1
  4233. addps %xmm1, %xmm9
  4234. #ifdef movsd
  4235. xorps %xmm2, %xmm2
  4236. #endif
  4237. movsd -32 * SIZE(A2), %xmm2
  4238. mulps %xmm4, %xmm2
  4239. addps %xmm2, %xmm10
  4240. #ifdef movsd
  4241. xorps %xmm3, %xmm3
  4242. #endif
  4243. movsd -32 * SIZE(A2, LDA), %xmm3
  4244. mulps %xmm4, %xmm3
  4245. addps %xmm3, %xmm11
  4246. addq $2 * SIZE, A1
  4247. addq $2 * SIZE, A2
  4248. addq $2 * SIZE, X1
  4249. ALIGN_3
  4250. .L30XX:
  4251. movaps -35 * SIZE(A1, LDA), %xmm12
  4252. movaps -34 * SIZE(A2), %xmm13
  4253. movaps -33 * SIZE(A2, LDA), %xmm14
  4254. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  4255. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  4256. #ifdef PREFETCHW
  4257. PREFETCHW 4 * SIZE(Y1)
  4258. #endif
  4259. movq MM, I
  4260. sarq $4, I
  4261. jle .L305
  4262. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  4263. MOVUPS_A2 (-31 * SIZE, A1, LDA, 1, %xmm1)
  4264. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  4265. MOVUPS_A2 (-29 * SIZE, A2, LDA, 1, %xmm3)
  4266. decq I
  4267. jle .L303
  4268. ALIGN_4
  4269. .L302:
  4270. #ifdef PREFETCH
  4271. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A1)
  4272. #endif
  4273. mulps %xmm4, %xmm0
  4274. addps %xmm0, %xmm8
  4275. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  4276. movss %xmm1, %xmm12
  4277. shufps $0x93, %xmm1, %xmm12
  4278. mulps %xmm4, %xmm12
  4279. addps %xmm12, %xmm9
  4280. MOVUPS_A2 (-27 * SIZE, A1, LDA, 1, %xmm12)
  4281. shufps $0x4e, %xmm2, %xmm13
  4282. mulps %xmm4, %xmm13
  4283. addps %xmm13, %xmm10
  4284. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  4285. movss %xmm3, %xmm14
  4286. shufps $0x39, %xmm14, %xmm14
  4287. mulps %xmm4, %xmm14
  4288. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  4289. addps %xmm14, %xmm11
  4290. MOVUPS_A2 (-25 * SIZE, A2, LDA, 1, %xmm14)
  4291. #ifdef PREFETCH
  4292. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A1, LDA)
  4293. #endif
  4294. mulps %xmm5, %xmm0
  4295. addps %xmm0, %xmm8
  4296. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  4297. movss %xmm12, %xmm1
  4298. shufps $0x93, %xmm12, %xmm1
  4299. mulps %xmm5, %xmm1
  4300. addps %xmm1, %xmm9
  4301. MOVUPS_A2 (-23 * SIZE, A1, LDA, 1, %xmm1)
  4302. shufps $0x4e, %xmm13, %xmm2
  4303. mulps %xmm5, %xmm2
  4304. addps %xmm2, %xmm10
  4305. MOVUPS_A1 (-22 * SIZE, A2, %xmm2)
  4306. movss %xmm14, %xmm3
  4307. shufps $0x39, %xmm3, %xmm3
  4308. mulps %xmm5, %xmm3
  4309. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  4310. addps %xmm3, %xmm11
  4311. MOVUPS_A2 (-21 * SIZE, A2, LDA, 1, %xmm3)
  4312. #ifdef PREFETCH
  4313. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A2)
  4314. #endif
  4315. mulps %xmm4, %xmm0
  4316. addps %xmm0, %xmm8
  4317. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  4318. movss %xmm1, %xmm12
  4319. shufps $0x93, %xmm1, %xmm12
  4320. mulps %xmm4, %xmm12
  4321. addps %xmm12, %xmm9
  4322. MOVUPS_A2 (-19 * SIZE, A1, LDA, 1, %xmm12)
  4323. shufps $0x4e, %xmm2, %xmm13
  4324. mulps %xmm4, %xmm13
  4325. addps %xmm13, %xmm10
  4326. MOVUPS_A1 (-18 * SIZE, A2, %xmm13)
  4327. movss %xmm3, %xmm14
  4328. shufps $0x39, %xmm14, %xmm14
  4329. mulps %xmm4, %xmm14
  4330. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  4331. addps %xmm14, %xmm11
  4332. MOVUPS_A2 (-17 * SIZE, A2, LDA, 1, %xmm14)
  4333. #ifdef PREFETCH
  4334. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(A2, LDA)
  4335. #endif
  4336. mulps %xmm5, %xmm0
  4337. addps %xmm0, %xmm8
  4338. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  4339. movss %xmm12, %xmm1
  4340. shufps $0x93, %xmm12, %xmm1
  4341. mulps %xmm5, %xmm1
  4342. addps %xmm1, %xmm9
  4343. MOVUPS_A2 (-15 * SIZE, A1, LDA, 1, %xmm1)
  4344. #ifdef PREFETCHW
  4345. PREFETCH (PREFETCHSIZE) * 2 - 128 + PREOFFSET(X1)
  4346. #endif
  4347. shufps $0x4e, %xmm13, %xmm2
  4348. mulps %xmm5, %xmm2
  4349. addps %xmm2, %xmm10
  4350. MOVUPS_A1 (-14 * SIZE, A2, %xmm2)
  4351. movss %xmm14, %xmm3
  4352. shufps $0x39, %xmm3, %xmm3
  4353. mulps %xmm5, %xmm3
  4354. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  4355. addps %xmm3, %xmm11
  4356. MOVUPS_A2 (-13 * SIZE, A2, LDA, 1, %xmm3)
  4357. addq $16 * SIZE, A1
  4358. addq $16 * SIZE, A2
  4359. addq $16 * SIZE, X1
  4360. decq I
  4361. jg .L302
  4362. ALIGN_4
  4363. .L303:
  4364. mulps %xmm4, %xmm0
  4365. addps %xmm0, %xmm8
  4366. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  4367. movss %xmm1, %xmm12
  4368. shufps $0x93, %xmm1, %xmm12
  4369. mulps %xmm4, %xmm12
  4370. addps %xmm12, %xmm9
  4371. MOVUPS_A2 (-27 * SIZE, A1, LDA, 1, %xmm12)
  4372. shufps $0x4e, %xmm2, %xmm13
  4373. mulps %xmm4, %xmm13
  4374. addps %xmm13, %xmm10
  4375. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  4376. movss %xmm3, %xmm14
  4377. shufps $0x39, %xmm14, %xmm14
  4378. mulps %xmm4, %xmm14
  4379. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  4380. addps %xmm14, %xmm11
  4381. MOVUPS_A2 (-25 * SIZE, A2, LDA, 1, %xmm14)
  4382. mulps %xmm5, %xmm0
  4383. addps %xmm0, %xmm8
  4384. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  4385. movss %xmm12, %xmm1
  4386. shufps $0x93, %xmm12, %xmm1
  4387. mulps %xmm5, %xmm1
  4388. addps %xmm1, %xmm9
  4389. MOVUPS_A2 (-23 * SIZE, A1, LDA, 1, %xmm1)
  4390. shufps $0x4e, %xmm13, %xmm2
  4391. mulps %xmm5, %xmm2
  4392. addps %xmm2, %xmm10
  4393. MOVUPS_A1 (-22 * SIZE, A2, %xmm2)
  4394. movss %xmm14, %xmm3
  4395. shufps $0x39, %xmm3, %xmm3
  4396. mulps %xmm5, %xmm3
  4397. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  4398. addps %xmm3, %xmm11
  4399. MOVUPS_A2 (-21 * SIZE, A2, LDA, 1, %xmm3)
  4400. mulps %xmm4, %xmm0
  4401. addps %xmm0, %xmm8
  4402. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  4403. movss %xmm1, %xmm12
  4404. shufps $0x93, %xmm1, %xmm12
  4405. mulps %xmm4, %xmm12
  4406. addps %xmm12, %xmm9
  4407. MOVUPS_A2 (-19 * SIZE, A1, LDA, 1, %xmm12)
  4408. shufps $0x4e, %xmm2, %xmm13
  4409. mulps %xmm4, %xmm13
  4410. addps %xmm13, %xmm10
  4411. MOVUPS_A1 (-18 * SIZE, A2, %xmm13)
  4412. movss %xmm3, %xmm14
  4413. shufps $0x39, %xmm14, %xmm14
  4414. mulps %xmm4, %xmm14
  4415. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  4416. addps %xmm14, %xmm11
  4417. MOVUPS_A2 (-17 * SIZE, A2, LDA, 1, %xmm14)
  4418. mulps %xmm5, %xmm0
  4419. addps %xmm0, %xmm8
  4420. movss %xmm12, %xmm1
  4421. shufps $0x93, %xmm12, %xmm1
  4422. mulps %xmm5, %xmm1
  4423. addps %xmm1, %xmm9
  4424. shufps $0x4e, %xmm13, %xmm2
  4425. mulps %xmm5, %xmm2
  4426. addps %xmm2, %xmm10
  4427. movss %xmm14, %xmm3
  4428. shufps $0x39, %xmm3, %xmm3
  4429. mulps %xmm5, %xmm3
  4430. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  4431. addps %xmm3, %xmm11
  4432. addq $16 * SIZE, A1
  4433. addq $16 * SIZE, A2
  4434. addq $16 * SIZE, X1
  4435. ALIGN_4
  4436. .L305:
  4437. testq $8, MM
  4438. jle .L306
  4439. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  4440. MOVUPS_A2 (-31 * SIZE, A1, LDA, 1, %xmm1)
  4441. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  4442. MOVUPS_A2 (-29 * SIZE, A2, LDA, 1, %xmm3)
  4443. mulps %xmm4, %xmm0
  4444. addps %xmm0, %xmm8
  4445. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  4446. movss %xmm1, %xmm12
  4447. shufps $0x93, %xmm1, %xmm12
  4448. mulps %xmm4, %xmm12
  4449. addps %xmm12, %xmm9
  4450. MOVUPS_A2 (-27 * SIZE, A1, LDA, 1, %xmm12)
  4451. shufps $0x4e, %xmm2, %xmm13
  4452. mulps %xmm4, %xmm13
  4453. addps %xmm13, %xmm10
  4454. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  4455. movss %xmm3, %xmm14
  4456. shufps $0x39, %xmm14, %xmm14
  4457. mulps %xmm4, %xmm14
  4458. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  4459. addps %xmm14, %xmm11
  4460. MOVUPS_A2 (-25 * SIZE, A2, LDA, 1, %xmm14)
  4461. mulps %xmm5, %xmm0
  4462. addps %xmm0, %xmm8
  4463. movss %xmm12, %xmm1
  4464. shufps $0x93, %xmm12, %xmm1
  4465. mulps %xmm5, %xmm1
  4466. addps %xmm1, %xmm9
  4467. shufps $0x4e, %xmm13, %xmm2
  4468. mulps %xmm5, %xmm2
  4469. addps %xmm2, %xmm10
  4470. movss %xmm14, %xmm3
  4471. shufps $0x39, %xmm3, %xmm3
  4472. mulps %xmm5, %xmm3
  4473. addps %xmm3, %xmm11
  4474. addq $8 * SIZE, A1
  4475. addq $8 * SIZE, A2
  4476. addq $8 * SIZE, X1
  4477. ALIGN_4
  4478. .L306:
  4479. testq $4, MM
  4480. jle .L307
  4481. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  4482. MOVUPS_A2 (-31 * SIZE, A1, LDA, 1, %xmm1)
  4483. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  4484. MOVUPS_A2 (-29 * SIZE, A2, LDA, 1, %xmm3)
  4485. mulps %xmm4, %xmm0
  4486. addps %xmm0, %xmm8
  4487. movss %xmm1, %xmm12
  4488. shufps $0x93, %xmm1, %xmm12
  4489. mulps %xmm4, %xmm12
  4490. addps %xmm12, %xmm9
  4491. shufps $0x4e, %xmm2, %xmm13
  4492. mulps %xmm4, %xmm13
  4493. addps %xmm13, %xmm10
  4494. movss %xmm3, %xmm14
  4495. shufps $0x39, %xmm14, %xmm14
  4496. mulps %xmm4, %xmm14
  4497. addps %xmm14, %xmm11
  4498. addq $4 * SIZE, A1
  4499. addq $4 * SIZE, A2
  4500. addq $4 * SIZE, X1
  4501. ALIGN_4
  4502. .L307:
  4503. testq $2, MM
  4504. jle .L308
  4505. #ifdef movsd
  4506. xorps %xmm0, %xmm0
  4507. #endif
  4508. movsd -32 * SIZE(A1), %xmm0
  4509. #ifdef movsd
  4510. xorps %xmm4, %xmm4
  4511. #endif
  4512. movsd -32 * SIZE(X1), %xmm4
  4513. mulps %xmm4, %xmm0
  4514. addps %xmm0, %xmm8
  4515. #ifdef movsd
  4516. xorps %xmm1, %xmm1
  4517. #endif
  4518. movsd -32 * SIZE(A1, LDA), %xmm1
  4519. mulps %xmm4, %xmm1
  4520. addps %xmm1, %xmm9
  4521. #ifdef movsd
  4522. xorps %xmm2, %xmm2
  4523. #endif
  4524. movsd -32 * SIZE(A2), %xmm2
  4525. mulps %xmm4, %xmm2
  4526. addps %xmm2, %xmm10
  4527. #ifdef movsd
  4528. xorps %xmm3, %xmm3
  4529. #endif
  4530. movsd -32 * SIZE(A2, LDA), %xmm3
  4531. mulps %xmm4, %xmm3
  4532. addps %xmm3, %xmm11
  4533. shufps $0xe, %xmm4, %xmm4
  4534. addq $2 * SIZE, A1
  4535. addq $2 * SIZE, A2
  4536. addq $2 * SIZE, X1
  4537. ALIGN_4
  4538. .L308:
  4539. testq $1, MM
  4540. jle .L309
  4541. movss -32 * SIZE(A1), %xmm0
  4542. movss -32 * SIZE(X1), %xmm4
  4543. mulss %xmm4, %xmm0
  4544. addss %xmm0, %xmm8
  4545. movss -32 * SIZE(A1, LDA), %xmm1
  4546. mulss %xmm4, %xmm1
  4547. addss %xmm1, %xmm9
  4548. movss -32 * SIZE(A2), %xmm2
  4549. mulss %xmm4, %xmm2
  4550. addss %xmm2, %xmm10
  4551. movss -32 * SIZE(A2, LDA), %xmm3
  4552. mulss %xmm4, %xmm3
  4553. addss %xmm3, %xmm11
  4554. ALIGN_4
  4555. .L309:
  4556. #ifdef HAVE_SSE3
  4557. haddps %xmm9, %xmm8
  4558. haddps %xmm11, %xmm10
  4559. haddps %xmm10, %xmm8
  4560. pshufd $0x1, %xmm8, %xmm9
  4561. pshufd $0x2, %xmm8, %xmm10
  4562. pshufd $0x3, %xmm8, %xmm11
  4563. #else
  4564. movaps %xmm8, %xmm0
  4565. unpcklps %xmm9, %xmm8
  4566. unpckhps %xmm9, %xmm0
  4567. movaps %xmm10, %xmm1
  4568. unpcklps %xmm11, %xmm10
  4569. unpckhps %xmm11, %xmm1
  4570. movaps %xmm8, %xmm9
  4571. unpcklps %xmm10, %xmm8
  4572. unpckhps %xmm10, %xmm9
  4573. movaps %xmm0, %xmm10
  4574. unpcklps %xmm1, %xmm0
  4575. unpckhps %xmm1, %xmm10
  4576. addps %xmm9, %xmm8
  4577. addps %xmm0, %xmm10
  4578. addps %xmm10, %xmm8
  4579. pshufd $0x2, %xmm8, %xmm9
  4580. pshufd $0x1, %xmm8, %xmm10
  4581. pshufd $0x3, %xmm8, %xmm11
  4582. #endif
  4583. mulss ALPHA, %xmm8
  4584. mulss ALPHA, %xmm9
  4585. mulss ALPHA, %xmm10
  4586. mulss ALPHA, %xmm11
  4587. addss (Y), %xmm8
  4588. addq INCY, Y
  4589. addss (Y), %xmm9
  4590. addq INCY, Y
  4591. addss (Y), %xmm10
  4592. addq INCY, Y
  4593. addss (Y), %xmm11
  4594. addq INCY, Y
  4595. movss %xmm8, (Y1)
  4596. addq INCY, Y1
  4597. movss %xmm9, (Y1)
  4598. addq INCY, Y1
  4599. movss %xmm10, (Y1)
  4600. addq INCY, Y1
  4601. movss %xmm11, (Y1)
  4602. addq INCY, Y1
  4603. cmpq $4, N
  4604. jge .L301
  4605. ALIGN_4
  4606. .L310:
  4607. testq N, N
  4608. jle .L999
  4609. cmpq $3, N
  4610. jne .L320
  4611. leaq 32 * SIZE(BUFFER), X1
  4612. movq A, A1
  4613. leaq (A1, LDA, 2), A2
  4614. leaq (A1, LDA, 4), A
  4615. xorps %xmm8, %xmm8
  4616. xorps %xmm9, %xmm9
  4617. xorps %xmm10, %xmm10
  4618. cmpq $3, M
  4619. jle .L317
  4620. testq $SIZE, A1
  4621. je .L31X
  4622. movss -32 * SIZE(A1), %xmm0
  4623. movss -32 * SIZE(X1), %xmm4
  4624. mulss %xmm4, %xmm0
  4625. addss %xmm0, %xmm8
  4626. movss -32 * SIZE(A1, LDA), %xmm1
  4627. mulss %xmm4, %xmm1
  4628. addss %xmm1, %xmm9
  4629. movss -32 * SIZE(A2), %xmm2
  4630. mulss %xmm4, %xmm2
  4631. addss %xmm2, %xmm10
  4632. addq $1 * SIZE, A1
  4633. addq $1 * SIZE, A2
  4634. addq $1 * SIZE, X1
  4635. ALIGN_3
  4636. .L31X:
  4637. testq $2 * SIZE, A1
  4638. je .L31XX
  4639. #ifdef movsd
  4640. xorps %xmm0, %xmm0
  4641. xorps %xmm4, %xmm4
  4642. #endif
  4643. movsd -32 * SIZE(A1), %xmm0
  4644. movsd -32 * SIZE(X1), %xmm4
  4645. mulps %xmm4, %xmm0
  4646. addps %xmm0, %xmm8
  4647. #ifdef movsd
  4648. xorps %xmm1, %xmm1
  4649. #endif
  4650. movsd -32 * SIZE(A1, LDA), %xmm1
  4651. mulps %xmm4, %xmm1
  4652. addps %xmm1, %xmm9
  4653. #ifdef movsd
  4654. xorps %xmm2, %xmm2
  4655. #endif
  4656. movsd -32 * SIZE(A2), %xmm2
  4657. mulps %xmm4, %xmm2
  4658. addps %xmm2, %xmm10
  4659. addq $2 * SIZE, A1
  4660. addq $2 * SIZE, A2
  4661. addq $2 * SIZE, X1
  4662. ALIGN_3
  4663. .L31XX:
  4664. movaps -35 * SIZE(A1, LDA), %xmm12
  4665. movaps -34 * SIZE(A2), %xmm13
  4666. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  4667. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  4668. movq MM, I
  4669. sarq $4, I
  4670. jle .L315
  4671. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  4672. MOVUPS_A2 (-31 * SIZE, A1, LDA, 1, %xmm1)
  4673. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  4674. decq I
  4675. jle .L313
  4676. ALIGN_4
  4677. .L312:
  4678. #ifdef PREFETCH
  4679. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A1)
  4680. #endif
  4681. mulps %xmm4, %xmm0
  4682. addps %xmm0, %xmm8
  4683. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  4684. movss %xmm1, %xmm12
  4685. shufps $0x93, %xmm1, %xmm12
  4686. mulps %xmm4, %xmm12
  4687. addps %xmm12, %xmm9
  4688. MOVUPS_A2 (-27 * SIZE, A1, LDA, 1, %xmm12)
  4689. shufps $0x4e, %xmm2, %xmm13
  4690. mulps %xmm4, %xmm13
  4691. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  4692. addps %xmm13, %xmm10
  4693. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  4694. #ifdef PREFETCH
  4695. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A1, LDA)
  4696. #endif
  4697. mulps %xmm5, %xmm0
  4698. addps %xmm0, %xmm8
  4699. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  4700. movss %xmm12, %xmm1
  4701. shufps $0x93, %xmm12, %xmm1
  4702. mulps %xmm5, %xmm1
  4703. addps %xmm1, %xmm9
  4704. MOVUPS_A2 (-23 * SIZE, A1, LDA, 1, %xmm1)
  4705. shufps $0x4e, %xmm13, %xmm2
  4706. mulps %xmm5, %xmm2
  4707. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  4708. addps %xmm2, %xmm10
  4709. MOVUPS_A1 (-22 * SIZE, A2, %xmm2)
  4710. #ifdef PREFETCH
  4711. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(A2)
  4712. #endif
  4713. mulps %xmm4, %xmm0
  4714. addps %xmm0, %xmm8
  4715. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  4716. movss %xmm1, %xmm12
  4717. shufps $0x93, %xmm1, %xmm12
  4718. mulps %xmm4, %xmm12
  4719. addps %xmm12, %xmm9
  4720. MOVUPS_A2 (-19 * SIZE, A1, LDA, 1, %xmm12)
  4721. shufps $0x4e, %xmm2, %xmm13
  4722. mulps %xmm4, %xmm13
  4723. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  4724. addps %xmm13, %xmm10
  4725. MOVUPS_A1 (-18 * SIZE, A2, %xmm13)
  4726. #ifdef PREFETCHW
  4727. PREFETCH (PREFETCHSIZE) * 3 - 128 + PREOFFSET(X1)
  4728. #endif
  4729. mulps %xmm5, %xmm0
  4730. addps %xmm0, %xmm8
  4731. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  4732. movss %xmm12, %xmm1
  4733. shufps $0x93, %xmm12, %xmm1
  4734. mulps %xmm5, %xmm1
  4735. addps %xmm1, %xmm9
  4736. MOVUPS_A2 (-15 * SIZE, A1, LDA, 1, %xmm1)
  4737. shufps $0x4e, %xmm13, %xmm2
  4738. mulps %xmm5, %xmm2
  4739. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  4740. addps %xmm2, %xmm10
  4741. MOVUPS_A1 (-14 * SIZE, A2, %xmm2)
  4742. addq $16 * SIZE, A1
  4743. addq $16 * SIZE, A2
  4744. addq $16 * SIZE, X1
  4745. decq I
  4746. jg .L312
  4747. ALIGN_4
  4748. .L313:
  4749. mulps %xmm4, %xmm0
  4750. addps %xmm0, %xmm8
  4751. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  4752. movss %xmm1, %xmm12
  4753. shufps $0x93, %xmm1, %xmm12
  4754. mulps %xmm4, %xmm12
  4755. addps %xmm12, %xmm9
  4756. MOVUPS_A2 (-27 * SIZE, A1, LDA, 1, %xmm12)
  4757. shufps $0x4e, %xmm2, %xmm13
  4758. mulps %xmm4, %xmm13
  4759. addps %xmm13, %xmm10
  4760. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  4761. movss %xmm3, %xmm14
  4762. shufps $0x39, %xmm14, %xmm14
  4763. mulps %xmm4, %xmm14
  4764. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  4765. addps %xmm14, %xmm11
  4766. MOVUPS_A2 (-25 * SIZE, A2, LDA, 1, %xmm14)
  4767. mulps %xmm5, %xmm0
  4768. addps %xmm0, %xmm8
  4769. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  4770. movss %xmm12, %xmm1
  4771. shufps $0x93, %xmm12, %xmm1
  4772. mulps %xmm5, %xmm1
  4773. addps %xmm1, %xmm9
  4774. MOVUPS_A2 (-23 * SIZE, A1, LDA, 1, %xmm1)
  4775. shufps $0x4e, %xmm13, %xmm2
  4776. mulps %xmm5, %xmm2
  4777. addps %xmm2, %xmm10
  4778. MOVUPS_A1 (-22 * SIZE, A2, %xmm2)
  4779. movss %xmm14, %xmm3
  4780. shufps $0x39, %xmm3, %xmm3
  4781. mulps %xmm5, %xmm3
  4782. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  4783. addps %xmm3, %xmm11
  4784. MOVUPS_A2 (-21 * SIZE, A2, LDA, 1, %xmm3)
  4785. mulps %xmm4, %xmm0
  4786. addps %xmm0, %xmm8
  4787. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  4788. movss %xmm1, %xmm12
  4789. shufps $0x93, %xmm1, %xmm12
  4790. mulps %xmm4, %xmm12
  4791. addps %xmm12, %xmm9
  4792. MOVUPS_A2 (-19 * SIZE, A1, LDA, 1, %xmm12)
  4793. shufps $0x4e, %xmm2, %xmm13
  4794. mulps %xmm4, %xmm13
  4795. addps %xmm13, %xmm10
  4796. MOVUPS_A1 (-18 * SIZE, A2, %xmm13)
  4797. movss %xmm3, %xmm14
  4798. shufps $0x39, %xmm14, %xmm14
  4799. mulps %xmm4, %xmm14
  4800. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  4801. addps %xmm14, %xmm11
  4802. MOVUPS_A2 (-17 * SIZE, A2, LDA, 1, %xmm14)
  4803. mulps %xmm5, %xmm0
  4804. addps %xmm0, %xmm8
  4805. movss %xmm12, %xmm1
  4806. shufps $0x93, %xmm12, %xmm1
  4807. mulps %xmm5, %xmm1
  4808. addps %xmm1, %xmm9
  4809. shufps $0x4e, %xmm13, %xmm2
  4810. mulps %xmm5, %xmm2
  4811. addps %xmm2, %xmm10
  4812. movss %xmm14, %xmm3
  4813. shufps $0x39, %xmm3, %xmm3
  4814. mulps %xmm5, %xmm3
  4815. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  4816. addps %xmm3, %xmm11
  4817. addq $16 * SIZE, A1
  4818. addq $16 * SIZE, A2
  4819. addq $16 * SIZE, X1
  4820. ALIGN_4
  4821. .L315:
  4822. testq $8, MM
  4823. jle .L316
  4824. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  4825. MOVUPS_A2 (-31 * SIZE, A1, LDA, 1, %xmm1)
  4826. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  4827. mulps %xmm4, %xmm0
  4828. addps %xmm0, %xmm8
  4829. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  4830. movss %xmm1, %xmm12
  4831. shufps $0x93, %xmm1, %xmm12
  4832. mulps %xmm4, %xmm12
  4833. addps %xmm12, %xmm9
  4834. MOVUPS_A2 (-27 * SIZE, A1, LDA, 1, %xmm12)
  4835. shufps $0x4e, %xmm2, %xmm13
  4836. mulps %xmm4, %xmm13
  4837. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  4838. addps %xmm13, %xmm10
  4839. MOVUPS_A1 (-26 * SIZE, A2, %xmm13)
  4840. mulps %xmm5, %xmm0
  4841. addps %xmm0, %xmm8
  4842. movss %xmm12, %xmm1
  4843. shufps $0x93, %xmm12, %xmm1
  4844. mulps %xmm5, %xmm1
  4845. addps %xmm1, %xmm9
  4846. shufps $0x4e, %xmm13, %xmm2
  4847. mulps %xmm5, %xmm2
  4848. addps %xmm2, %xmm10
  4849. addq $8 * SIZE, A1
  4850. addq $8 * SIZE, A2
  4851. addq $8 * SIZE, X1
  4852. ALIGN_4
  4853. .L316:
  4854. testq $4, MM
  4855. jle .L317
  4856. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  4857. MOVUPS_A2 (-31 * SIZE, A1, LDA, 1, %xmm1)
  4858. MOVUPS_A1 (-30 * SIZE, A2, %xmm2)
  4859. mulps %xmm4, %xmm0
  4860. addps %xmm0, %xmm8
  4861. movss %xmm1, %xmm12
  4862. shufps $0x93, %xmm1, %xmm12
  4863. mulps %xmm4, %xmm12
  4864. addps %xmm12, %xmm9
  4865. shufps $0x4e, %xmm2, %xmm13
  4866. mulps %xmm4, %xmm13
  4867. addps %xmm13, %xmm10
  4868. addq $4 * SIZE, A1
  4869. addq $4 * SIZE, A2
  4870. addq $4 * SIZE, X1
  4871. ALIGN_4
  4872. .L317:
  4873. testq $2, MM
  4874. jle .L318
  4875. #ifdef movsd
  4876. xorps %xmm0, %xmm0
  4877. #endif
  4878. movsd -32 * SIZE(A1), %xmm0
  4879. #ifdef movsd
  4880. xorps %xmm4, %xmm4
  4881. #endif
  4882. movsd -32 * SIZE(X1), %xmm4
  4883. mulps %xmm4, %xmm0
  4884. addps %xmm0, %xmm8
  4885. #ifdef movsd
  4886. xorps %xmm1, %xmm1
  4887. #endif
  4888. movsd -32 * SIZE(A1, LDA), %xmm1
  4889. mulps %xmm4, %xmm1
  4890. addps %xmm1, %xmm9
  4891. #ifdef movsd
  4892. xorps %xmm2, %xmm2
  4893. #endif
  4894. movsd -32 * SIZE(A2), %xmm2
  4895. mulps %xmm4, %xmm2
  4896. addps %xmm2, %xmm10
  4897. addq $2 * SIZE, A1
  4898. addq $2 * SIZE, A2
  4899. addq $2 * SIZE, X1
  4900. ALIGN_4
  4901. .L318:
  4902. testq $1, MM
  4903. jle .L319
  4904. movss -32 * SIZE(A1), %xmm0
  4905. movss -32 * SIZE(X1), %xmm4
  4906. mulss %xmm4, %xmm0
  4907. addss %xmm0, %xmm8
  4908. movss -32 * SIZE(A1, LDA), %xmm1
  4909. mulss %xmm4, %xmm1
  4910. addss %xmm1, %xmm9
  4911. movss -32 * SIZE(A2), %xmm2
  4912. mulss %xmm4, %xmm2
  4913. addss %xmm2, %xmm10
  4914. ALIGN_4
  4915. .L319:
  4916. #ifdef HAVE_SSE3
  4917. haddps %xmm9, %xmm8
  4918. haddps %xmm11, %xmm10
  4919. haddps %xmm10, %xmm8
  4920. pshufd $0x1, %xmm8, %xmm9
  4921. pshufd $0x2, %xmm8, %xmm10
  4922. #else
  4923. movaps %xmm8, %xmm0
  4924. unpcklps %xmm9, %xmm8
  4925. unpckhps %xmm9, %xmm0
  4926. movaps %xmm10, %xmm1
  4927. unpcklps %xmm11, %xmm10
  4928. unpckhps %xmm11, %xmm1
  4929. movaps %xmm8, %xmm9
  4930. unpcklps %xmm10, %xmm8
  4931. unpckhps %xmm10, %xmm9
  4932. movaps %xmm0, %xmm10
  4933. unpcklps %xmm1, %xmm0
  4934. unpckhps %xmm1, %xmm10
  4935. addps %xmm9, %xmm8
  4936. addps %xmm0, %xmm10
  4937. addps %xmm10, %xmm8
  4938. pshufd $0x2, %xmm8, %xmm9
  4939. pshufd $0x1, %xmm8, %xmm10
  4940. #endif
  4941. mulss ALPHA, %xmm8
  4942. mulss ALPHA, %xmm9
  4943. mulss ALPHA, %xmm10
  4944. addss (Y), %xmm8
  4945. addq INCY, Y
  4946. addss (Y), %xmm9
  4947. addq INCY, Y
  4948. addss (Y), %xmm10
  4949. movss %xmm8, (Y1)
  4950. addq INCY, Y1
  4951. movss %xmm9, (Y1)
  4952. addq INCY, Y1
  4953. movss %xmm10, (Y1)
  4954. jmp .L999
  4955. ALIGN_3
  4956. .L320:
  4957. cmpq $2, N
  4958. jne .L330
  4959. leaq 32 * SIZE(BUFFER), X1
  4960. movq A, A1
  4961. leaq (A1, LDA), A2
  4962. xorps %xmm8, %xmm8
  4963. xorps %xmm9, %xmm9
  4964. cmpq $3, M
  4965. jle .L327
  4966. testq $SIZE, A1
  4967. je .L32X
  4968. movss -32 * SIZE(A1), %xmm0
  4969. movss -32 * SIZE(X1), %xmm4
  4970. mulss %xmm4, %xmm0
  4971. addss %xmm0, %xmm8
  4972. movss -32 * SIZE(A2), %xmm1
  4973. mulss %xmm4, %xmm1
  4974. addss %xmm1, %xmm9
  4975. addq $1 * SIZE, A1
  4976. addq $1 * SIZE, A2
  4977. addq $1 * SIZE, X1
  4978. ALIGN_3
  4979. .L32X:
  4980. testq $2 * SIZE, A1
  4981. je .L32XX
  4982. #ifdef movsd
  4983. xorps %xmm0, %xmm0
  4984. xorps %xmm4, %xmm4
  4985. #endif
  4986. movsd -32 * SIZE(A1), %xmm0
  4987. movsd -32 * SIZE(X1), %xmm4
  4988. mulps %xmm4, %xmm0
  4989. addps %xmm0, %xmm8
  4990. #ifdef movsd
  4991. xorps %xmm1, %xmm1
  4992. #endif
  4993. movsd -32 * SIZE(A2), %xmm1
  4994. mulps %xmm4, %xmm1
  4995. addps %xmm1, %xmm9
  4996. addq $2 * SIZE, A1
  4997. addq $2 * SIZE, A2
  4998. addq $2 * SIZE, X1
  4999. ALIGN_3
  5000. .L32XX:
  5001. movaps -35 * SIZE(A2), %xmm12
  5002. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  5003. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  5004. movq MM, I
  5005. sarq $4, I
  5006. jle .L325
  5007. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  5008. MOVUPS_A1 (-31 * SIZE, A2, %xmm1)
  5009. decq I
  5010. jle .L323
  5011. ALIGN_4
  5012. .L322:
  5013. #ifdef PREFETCH
  5014. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(A1)
  5015. #endif
  5016. mulps %xmm4, %xmm0
  5017. addps %xmm0, %xmm8
  5018. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  5019. movss %xmm1, %xmm12
  5020. shufps $0x93, %xmm1, %xmm12
  5021. mulps %xmm4, %xmm12
  5022. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  5023. addps %xmm12, %xmm9
  5024. MOVUPS_A1 (-27 * SIZE, A2, %xmm12)
  5025. mulps %xmm5, %xmm0
  5026. addps %xmm0, %xmm8
  5027. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  5028. movss %xmm12, %xmm1
  5029. shufps $0x93, %xmm12, %xmm1
  5030. mulps %xmm5, %xmm1
  5031. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  5032. addps %xmm1, %xmm9
  5033. MOVUPS_A1 (-23 * SIZE, A2, %xmm1)
  5034. #ifdef PREFETCH
  5035. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(A2)
  5036. #endif
  5037. mulps %xmm4, %xmm0
  5038. addps %xmm0, %xmm8
  5039. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  5040. movss %xmm1, %xmm12
  5041. shufps $0x93, %xmm1, %xmm12
  5042. mulps %xmm4, %xmm12
  5043. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  5044. addps %xmm12, %xmm9
  5045. MOVUPS_A1 (-19 * SIZE, A2, %xmm12)
  5046. #ifdef PREFETCHW
  5047. PREFETCH (PREFETCHSIZE) * 4 - 128 + PREOFFSET(X1)
  5048. #endif
  5049. mulps %xmm5, %xmm0
  5050. addps %xmm0, %xmm8
  5051. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  5052. movss %xmm12, %xmm1
  5053. shufps $0x93, %xmm12, %xmm1
  5054. mulps %xmm5, %xmm1
  5055. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  5056. addps %xmm1, %xmm9
  5057. MOVUPS_A1 (-15 * SIZE, A2, %xmm1)
  5058. addq $16 * SIZE, A1
  5059. addq $16 * SIZE, A2
  5060. addq $16 * SIZE, X1
  5061. decq I
  5062. jg .L322
  5063. ALIGN_4
  5064. .L323:
  5065. mulps %xmm4, %xmm0
  5066. addps %xmm0, %xmm8
  5067. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  5068. movss %xmm1, %xmm12
  5069. shufps $0x93, %xmm1, %xmm12
  5070. mulps %xmm4, %xmm12
  5071. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  5072. addps %xmm12, %xmm9
  5073. MOVUPS_A1 (-27 * SIZE, A2, %xmm12)
  5074. mulps %xmm5, %xmm0
  5075. addps %xmm0, %xmm8
  5076. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  5077. movss %xmm12, %xmm1
  5078. shufps $0x93, %xmm12, %xmm1
  5079. mulps %xmm5, %xmm1
  5080. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  5081. addps %xmm1, %xmm9
  5082. MOVUPS_A1 (-23 * SIZE, A2, %xmm1)
  5083. mulps %xmm4, %xmm0
  5084. addps %xmm0, %xmm8
  5085. MOVUPS_A1 (-20 * SIZE, A1, %xmm0)
  5086. movss %xmm1, %xmm12
  5087. shufps $0x93, %xmm1, %xmm12
  5088. mulps %xmm4, %xmm12
  5089. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  5090. addps %xmm12, %xmm9
  5091. MOVUPS_A1 (-19 * SIZE, A2, %xmm12)
  5092. mulps %xmm5, %xmm0
  5093. addps %xmm0, %xmm8
  5094. movss %xmm12, %xmm1
  5095. shufps $0x93, %xmm12, %xmm1
  5096. mulps %xmm5, %xmm1
  5097. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  5098. addps %xmm1, %xmm9
  5099. addq $16 * SIZE, A1
  5100. addq $16 * SIZE, A2
  5101. addq $16 * SIZE, X1
  5102. ALIGN_4
  5103. .L325:
  5104. testq $8, MM
  5105. jle .L326
  5106. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  5107. MOVUPS_A1 (-31 * SIZE, A2, %xmm1)
  5108. mulps %xmm4, %xmm0
  5109. addps %xmm0, %xmm8
  5110. MOVUPS_A1 (-28 * SIZE, A1, %xmm0)
  5111. movss %xmm1, %xmm12
  5112. shufps $0x93, %xmm1, %xmm12
  5113. mulps %xmm4, %xmm12
  5114. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  5115. addps %xmm12, %xmm9
  5116. MOVUPS_A1 (-27 * SIZE, A2, %xmm12)
  5117. mulps %xmm5, %xmm0
  5118. addps %xmm0, %xmm8
  5119. movss %xmm12, %xmm1
  5120. shufps $0x93, %xmm12, %xmm1
  5121. mulps %xmm5, %xmm1
  5122. addps %xmm1, %xmm9
  5123. addq $8 * SIZE, A1
  5124. addq $8 * SIZE, A2
  5125. addq $8 * SIZE, X1
  5126. ALIGN_4
  5127. .L326:
  5128. testq $4, MM
  5129. jle .L327
  5130. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  5131. MOVUPS_A1 (-31 * SIZE, A2, %xmm1)
  5132. mulps %xmm4, %xmm0
  5133. addps %xmm0, %xmm8
  5134. movss %xmm1, %xmm12
  5135. shufps $0x93, %xmm1, %xmm12
  5136. mulps %xmm4, %xmm12
  5137. addps %xmm12, %xmm9
  5138. addq $4 * SIZE, A1
  5139. addq $4 * SIZE, A2
  5140. addq $4 * SIZE, X1
  5141. ALIGN_4
  5142. .L327:
  5143. testq $2, MM
  5144. jle .L328
  5145. #ifdef movsd
  5146. xorps %xmm0, %xmm0
  5147. #endif
  5148. movsd -32 * SIZE(A1), %xmm0
  5149. #ifdef movsd
  5150. xorps %xmm4, %xmm4
  5151. #endif
  5152. movsd -32 * SIZE(X1), %xmm4
  5153. mulps %xmm4, %xmm0
  5154. addps %xmm0, %xmm8
  5155. #ifdef movsd
  5156. xorps %xmm1, %xmm1
  5157. #endif
  5158. movsd -32 * SIZE(A2), %xmm1
  5159. mulps %xmm4, %xmm1
  5160. addps %xmm1, %xmm9
  5161. shufps $0xe, %xmm4, %xmm4
  5162. addq $2 * SIZE, A1
  5163. addq $2 * SIZE, A2
  5164. addq $2 * SIZE, X1
  5165. ALIGN_4
  5166. .L328:
  5167. testq $1, MM
  5168. jle .L329
  5169. movss -32 * SIZE(A1), %xmm0
  5170. movss -32 * SIZE(X1), %xmm4
  5171. mulss %xmm4, %xmm0
  5172. addss %xmm0, %xmm8
  5173. movss -32 * SIZE(A2), %xmm1
  5174. mulss %xmm4, %xmm1
  5175. addss %xmm1, %xmm9
  5176. ALIGN_4
  5177. .L329:
  5178. #ifdef HAVE_SSE3
  5179. haddps %xmm9, %xmm8
  5180. haddps %xmm8, %xmm8
  5181. #else
  5182. movaps %xmm8, %xmm10
  5183. unpcklps %xmm9, %xmm8
  5184. unpckhps %xmm9, %xmm10
  5185. addps %xmm10, %xmm8
  5186. movhlps %xmm8, %xmm9
  5187. addps %xmm9, %xmm8
  5188. #endif
  5189. pshufd $0x1, %xmm8, %xmm9
  5190. mulss ALPHA, %xmm8
  5191. mulss ALPHA, %xmm9
  5192. addss (Y), %xmm8
  5193. addq INCY, Y
  5194. addss (Y), %xmm9
  5195. addq INCY, Y
  5196. movss %xmm8, (Y1)
  5197. addq INCY, Y1
  5198. movss %xmm9, (Y1)
  5199. addq INCY, Y1
  5200. jmp .L999
  5201. ALIGN_4
  5202. .L330:
  5203. cmpq $1, N
  5204. jne .L999
  5205. leaq 32 * SIZE(BUFFER), X1
  5206. movq A, A1
  5207. xorps %xmm8, %xmm8
  5208. xorps %xmm9, %xmm9
  5209. cmpq $3, M
  5210. jle .L337
  5211. testq $SIZE, A1
  5212. je .L33X
  5213. movss -32 * SIZE(A1), %xmm0
  5214. movss -32 * SIZE(X1), %xmm4
  5215. mulss %xmm4, %xmm0
  5216. addss %xmm0, %xmm8
  5217. addq $1 * SIZE, A1
  5218. addq $1 * SIZE, X1
  5219. ALIGN_3
  5220. .L33X:
  5221. testq $2 * SIZE, A1
  5222. je .L33XX
  5223. #ifdef movsd
  5224. xorps %xmm0, %xmm0
  5225. xorps %xmm4, %xmm4
  5226. #endif
  5227. movsd -32 * SIZE(A1), %xmm0
  5228. movsd -32 * SIZE(X1), %xmm4
  5229. mulps %xmm4, %xmm0
  5230. addps %xmm0, %xmm8
  5231. shufps $0xe, %xmm4, %xmm4
  5232. addq $2 * SIZE, A1
  5233. addq $2 * SIZE, X1
  5234. ALIGN_3
  5235. .L33XX:
  5236. MOVUPS_XL1 (-32 * SIZE, X1, %xmm4)
  5237. MOVUPS_XL1 (-28 * SIZE, X1, %xmm5)
  5238. movq MM, I
  5239. sarq $4, I
  5240. jle .L335
  5241. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  5242. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  5243. decq I
  5244. jle .L333
  5245. ALIGN_4
  5246. .L332:
  5247. #ifdef PREFETCH
  5248. PREFETCH (PREFETCHSIZE) * 8 - 128 + PREOFFSET(A1)
  5249. #endif
  5250. mulps %xmm4, %xmm0
  5251. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  5252. addps %xmm0, %xmm8
  5253. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  5254. mulps %xmm5, %xmm12
  5255. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  5256. addps %xmm12, %xmm9
  5257. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  5258. #ifdef PREFETCHW
  5259. PREFETCH (PREFETCHSIZE) * 8 - 128 + PREOFFSET(X1)
  5260. #endif
  5261. mulps %xmm4, %xmm0
  5262. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  5263. addps %xmm0, %xmm8
  5264. MOVUPS_A1 (-16 * SIZE, A1, %xmm0)
  5265. mulps %xmm5, %xmm12
  5266. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  5267. addps %xmm12, %xmm9
  5268. MOVUPS_A1 (-12 * SIZE, A1, %xmm12)
  5269. addq $16 * SIZE, A1
  5270. addq $16 * SIZE, X1
  5271. decq I
  5272. jg .L332
  5273. ALIGN_4
  5274. .L333:
  5275. mulps %xmm4, %xmm0
  5276. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  5277. addps %xmm0, %xmm8
  5278. MOVUPS_A1 (-24 * SIZE, A1, %xmm0)
  5279. mulps %xmm5, %xmm12
  5280. MOVUPS_XL1 (-20 * SIZE, X1, %xmm5)
  5281. addps %xmm12, %xmm9
  5282. MOVUPS_A1 (-20 * SIZE, A1, %xmm12)
  5283. mulps %xmm4, %xmm0
  5284. MOVUPS_XL1 (-16 * SIZE, X1, %xmm4)
  5285. addps %xmm0, %xmm8
  5286. mulps %xmm5, %xmm12
  5287. MOVUPS_XL1 (-12 * SIZE, X1, %xmm5)
  5288. addps %xmm12, %xmm9
  5289. addq $16 * SIZE, A1
  5290. addq $16 * SIZE, X1
  5291. ALIGN_4
  5292. .L335:
  5293. testq $8, MM
  5294. jle .L336
  5295. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  5296. mulps %xmm4, %xmm0
  5297. MOVUPS_XL1 (-24 * SIZE, X1, %xmm4)
  5298. addps %xmm0, %xmm8
  5299. MOVUPS_A1 (-28 * SIZE, A1, %xmm12)
  5300. mulps %xmm5, %xmm12
  5301. addps %xmm12, %xmm9
  5302. addq $8 * SIZE, A1
  5303. addq $8 * SIZE, X1
  5304. ALIGN_4
  5305. .L336:
  5306. testq $4, MM
  5307. jle .L337
  5308. MOVUPS_A1 (-32 * SIZE, A1, %xmm0)
  5309. mulps %xmm4, %xmm0
  5310. addps %xmm0, %xmm8
  5311. addq $4 * SIZE, A1
  5312. addq $4 * SIZE, X1
  5313. ALIGN_4
  5314. .L337:
  5315. testq $2, MM
  5316. jle .L338
  5317. #ifdef movsd
  5318. xorps %xmm0, %xmm0
  5319. #endif
  5320. movsd -32 * SIZE(A1), %xmm0
  5321. #ifdef movsd
  5322. xorps %xmm4, %xmm4
  5323. #endif
  5324. movsd -32 * SIZE(X1), %xmm4
  5325. mulps %xmm4, %xmm0
  5326. addps %xmm0, %xmm8
  5327. shufps $0xe, %xmm4, %xmm4
  5328. addq $2 * SIZE, A1
  5329. addq $2 * SIZE, X1
  5330. ALIGN_4
  5331. .L338:
  5332. testq $1, MM
  5333. jle .L339
  5334. movss -32 * SIZE(A1), %xmm0
  5335. movss -32 * SIZE(X1), %xmm4
  5336. mulss %xmm4, %xmm0
  5337. addss %xmm0, %xmm8
  5338. ALIGN_4
  5339. .L339:
  5340. addps %xmm9, %xmm8
  5341. #ifdef HAVE_SSE3
  5342. haddps %xmm8, %xmm8
  5343. haddps %xmm8, %xmm8
  5344. #else
  5345. pshufd $1, %xmm8, %xmm9
  5346. pshufd $2, %xmm8, %xmm10
  5347. pshufd $3, %xmm8, %xmm11
  5348. addss %xmm9, %xmm8
  5349. addss %xmm11, %xmm10
  5350. addss %xmm10, %xmm8
  5351. #endif
  5352. mulss ALPHA, %xmm8
  5353. addss (Y), %xmm8
  5354. movss %xmm8, (Y1)
  5355. jmp .L999
  5356. #endif
  5357. ALIGN_4
  5358. .L999:
  5359. leaq (,M,SIZE),%rax
  5360. addq %rax,AA
  5361. jmp .L0t
  5362. ALIGN_4
  5363. .L999x:
  5364. movq 0(%rsp), %rbx
  5365. movq 8(%rsp), %rbp
  5366. movq 16(%rsp), %r12
  5367. movq 24(%rsp), %r13
  5368. movq 32(%rsp), %r14
  5369. movq 40(%rsp), %r15
  5370. #ifdef WINDOWS_ABI
  5371. movq 48(%rsp), %rdi
  5372. movq 56(%rsp), %rsi
  5373. movups 64(%rsp), %xmm6
  5374. movups 80(%rsp), %xmm7
  5375. movups 96(%rsp), %xmm8
  5376. movups 112(%rsp), %xmm9
  5377. movups 128(%rsp), %xmm10
  5378. movups 144(%rsp), %xmm11
  5379. movups 160(%rsp), %xmm12
  5380. movups 176(%rsp), %xmm13
  5381. movups 192(%rsp), %xmm14
  5382. movups 208(%rsp), %xmm15
  5383. #endif
  5384. addq $STACKSIZE, %rsp
  5385. ret
  5386. ALIGN_4
  5387. EPILOGUE