|
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118 |
- /*******************************************************************************
- Copyright (c) 2016, The OpenBLAS Project
- All rights reserved.
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are
- met:
- 1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in
- the documentation and/or other materials provided with the
- distribution.
- 3. Neither the name of the OpenBLAS project nor the names of
- its contributors may be used to endorse or promote products
- derived from this software without specific prior written permission.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- ARE DISCLAIMED. IN NO EVENT SHALL THE OPENBLAS PROJECT OR CONTRIBUTORS BE
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *******************************************************************************/
-
- #include "common.h"
- #include "macros_msa.h"
-
- static void ssolve_8x8_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- v4f32 src_c0, src_c1, src_c2, src_c3, src_c4, src_c5, src_c6, src_c7;
- v4f32 src_c8, src_c9, src_c10, src_c11, src_c12, src_c13, src_c14, src_c15;
- v4f32 src_b, src_b0, src_b8, src_b9, src_b16, src_b17, src_b18, src_b24;
- v4f32 src_b25, src_b26, src_b27, src_b32, src_b33, src_b34, src_b35;
- v4f32 src_b36, src_b40, src_b41, src_b42, src_b43, src_b44, src_b45;
- v4f32 src_b48, src_b49, src_b50, src_b51, src_b52, src_b53, src_b54;
- v4f32 src_b56, src_b57, src_b58, src_b59, src_b60, src_b61, src_b62, src_b63;
- FLOAT *c_nxt1line = c + ldc;
- FLOAT *c_nxt2line = c + 2 * ldc;
- FLOAT *c_nxt3line = c + 3 * ldc;
- FLOAT *c_nxt4line = c + 4 * ldc;
- FLOAT *c_nxt5line = c + 5 * ldc;
- FLOAT *c_nxt6line = c + 6 * ldc;
- FLOAT *c_nxt7line = c + 7 * ldc;
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- v4f32 src_a0, src_a1, src_b1, src_b2, src_b3;
- v4f32 res0, res1, res2, res3, res4, res5, res6, res7;
- v4f32 res8, res9, res10, res11, res12, res13, res14, res15;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b = LD_SP(bb + 0);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res0 = src_a0 * src_b0;
- res1 = src_a1 * src_b0;
- res2 = src_a0 * src_b1;
- res3 = src_a1 * src_b1;
- res4 = src_a0 * src_b2;
- res5 = src_a1 * src_b2;
- res6 = src_a0 * src_b3;
- res7 = src_a1 * src_b3;
-
- src_b = LD_SP(bb + 4);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res8 = src_a0 * src_b0;
- res9 = src_a1 * src_b0;
- res10 = src_a0 * src_b1;
- res11 = src_a1 * src_b1;
- res12 = src_a0 * src_b2;
- res13 = src_a1 * src_b2;
- res14 = src_a0 * src_b3;
- res15 = src_a1 * src_b3;
-
- for (k = (bk - 1); k--;)
- {
- aa += 8;
- bb += 8;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b = LD_SP(bb + 0);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- res2 += src_a0 * src_b1;
- res3 += src_a1 * src_b1;
- res4 += src_a0 * src_b2;
- res5 += src_a1 * src_b2;
- res6 += src_a0 * src_b3;
- res7 += src_a1 * src_b3;
-
- src_b = LD_SP(bb + 4);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res8 += src_a0 * src_b0;
- res9 += src_a1 * src_b0;
- res10 += src_a0 * src_b1;
- res11 += src_a1 * src_b1;
- res12 += src_a0 * src_b2;
- res13 += src_a1 * src_b2;
- res14 += src_a0 * src_b3;
- res15 += src_a1 * src_b3;
- }
-
- LD_SP2(c, 4, src_c0, src_c1);
- LD_SP2(c_nxt1line, 4, src_c2, src_c3);
- LD_SP2(c_nxt2line, 4, src_c4, src_c5);
- LD_SP2(c_nxt3line, 4, src_c6, src_c7);
- LD_SP2(c_nxt4line, 4, src_c8, src_c9);
- LD_SP2(c_nxt5line, 4, src_c10, src_c11);
- LD_SP2(c_nxt6line, 4, src_c12, src_c13);
- LD_SP2(c_nxt7line, 4, src_c14, src_c15);
-
- src_c0 -= res0;
- src_c1 -= res1;
- src_c2 -= res2;
- src_c3 -= res3;
- src_c4 -= res4;
- src_c5 -= res5;
- src_c6 -= res6;
- src_c7 -= res7;
- src_c8 -= res8;
- src_c9 -= res9;
- src_c10 -= res10;
- src_c11 -= res11;
- src_c12 -= res12;
- src_c13 -= res13;
- src_c14 -= res14;
- src_c15 -= res15;
- }
- else
- {
- LD_SP2(c, 4, src_c0, src_c1);
- LD_SP2(c_nxt1line, 4, src_c2, src_c3);
- LD_SP2(c_nxt2line, 4, src_c4, src_c5);
- LD_SP2(c_nxt3line, 4, src_c6, src_c7);
- LD_SP2(c_nxt4line, 4, src_c8, src_c9);
- LD_SP2(c_nxt5line, 4, src_c10, src_c11);
- LD_SP2(c_nxt6line, 4, src_c12, src_c13);
- LD_SP2(c_nxt7line, 4, src_c14, src_c15);
- }
-
- b -= 64;
-
- src_b = LD_SP(b + 60);
- SPLATI_W4_SP(src_b, src_b60, src_b61, src_b62, src_b63);
- src_b = LD_SP(b + 56);
- SPLATI_W4_SP(src_b, src_b56, src_b57, src_b58, src_b59);
-
- src_c15 *= src_b63;
- src_c14 *= src_b63;
- src_c13 -= src_c15 * src_b62;
- src_c12 -= src_c14 * src_b62;
- src_c11 -= src_c15 * src_b61;
- src_c10 -= src_c14 * src_b61;
- src_c9 -= src_c15 * src_b60;
- src_c8 -= src_c14 * src_b60;
- src_c7 -= src_c15 * src_b59;
- src_c6 -= src_c14 * src_b59;
- src_c5 -= src_c15 * src_b58;
- src_c4 -= src_c14 * src_b58;
- src_c3 -= src_c15 * src_b57;
- src_c2 -= src_c14 * src_b57;
- src_c1 -= src_c15 * src_b56;
- src_c0 -= src_c14 * src_b56;
-
- src_b = LD_SP(b + 48);
- SPLATI_W4_SP(src_b, src_b48, src_b49, src_b50, src_b51);
- src_b52 = LD_SP(b + 52);
- src_b54 = (v4f32) __msa_splati_w((v4i32) src_b52, 2);
- src_b53 = (v4f32) __msa_splati_w((v4i32) src_b52, 1);
- src_b52 = (v4f32) __msa_splati_w((v4i32) src_b52, 0);
-
- src_c12 *= src_b54;
- src_c13 *= src_b54;
- src_c10 -= src_c12 * src_b53;
- src_c11 -= src_c13 * src_b53;
- src_c8 -= src_c12 * src_b52;
- src_c9 -= src_c13 * src_b52;
- src_c6 -= src_c12 * src_b51;
- src_c7 -= src_c13 * src_b51;
- src_c4 -= src_c12 * src_b50;
- src_c5 -= src_c13 * src_b50;
- src_c2 -= src_c12 * src_b49;
- src_c3 -= src_c13 * src_b49;
- src_c0 -= src_c12 * src_b48;
- src_c1 -= src_c13 * src_b48;
-
- ST_SP4(src_c12, src_c13, src_c14, src_c15, a - 16, 4);
- ST_SP2(src_c12, src_c13, c_nxt6line, 4);
- ST_SP2(src_c14, src_c15, c_nxt7line, 4);
-
- src_b = LD_SP(b + 40);
- SPLATI_W4_SP(src_b, src_b40, src_b41, src_b42, src_b43);
- src_b44 = LD_SP(b + 44);
- src_b45 = (v4f32) __msa_splati_w((v4i32) src_b44, 1);
- src_b44 = (v4f32) __msa_splati_w((v4i32) src_b44, 0);
-
- src_c10 *= src_b45;
- src_c11 *= src_b45;
- src_c8 -= src_c10 * src_b44;
- src_c9 -= src_c11 * src_b44;
- src_c6 -= src_c10 * src_b43;
- src_c7 -= src_c11 * src_b43;
- src_c4 -= src_c10 * src_b42;
- src_c5 -= src_c11 * src_b42;
- src_c2 -= src_c10 * src_b41;
- src_c3 -= src_c11 * src_b41;
- src_c0 -= src_c10 * src_b40;
- src_c1 -= src_c11 * src_b40;
-
- src_b = LD_SP(b + 32);
- SPLATI_W4_SP(src_b, src_b32, src_b33, src_b34, src_b35);
- src_b36 = __msa_cast_to_vector_float(*(b + 36));
- src_b36 = (v4f32) __msa_splati_w((v4i32) src_b36, 0);
-
- src_c8 *= src_b36;
- src_c9 *= src_b36;
- src_c6 -= src_c8 * src_b35;
- src_c7 -= src_c9 * src_b35;
- src_c4 -= src_c8 * src_b34;
- src_c5 -= src_c9 * src_b34;
- src_c2 -= src_c8 * src_b33;
- src_c3 -= src_c9 * src_b33;
- src_c0 -= src_c8 * src_b32;
- src_c1 -= src_c9 * src_b32;
-
- ST_SP4(src_c8, src_c9, src_c10, src_c11, a - 32, 4);
- ST_SP2(src_c8, src_c9, c_nxt4line, 4);
- ST_SP2(src_c10, src_c11, c_nxt5line, 4);
-
- src_b = LD_SP(b + 24);
- SPLATI_W4_SP(src_b, src_b24, src_b25, src_b26, src_b27);
-
- src_c6 *= src_b27;
- src_c7 *= src_b27;
- src_c4 -= src_c6 * src_b26;
- src_c5 -= src_c7 * src_b26;
- src_c2 -= src_c6 * src_b25;
- src_c3 -= src_c7 * src_b25;
- src_c0 -= src_c6 * src_b24;
- src_c1 -= src_c7 * src_b24;
-
- src_b16 = LD_SP(b + 16);
- src_b18 = (v4f32) __msa_splati_w((v4i32) src_b16, 2);
- src_b17 = (v4f32) __msa_splati_w((v4i32) src_b16, 1);
- src_b16 = (v4f32) __msa_splati_w((v4i32) src_b16, 0);
-
- src_c4 *= src_b18;
- src_c5 *= src_b18;
- src_c2 -= src_c4 * src_b17;
- src_c3 -= src_c5 * src_b17;
- src_c0 -= src_c4 * src_b16;
- src_c1 -= src_c5 * src_b16;
-
- ST_SP4(src_c4, src_c5, src_c6, src_c7, a - 48, 4);
- ST_SP2(src_c4, src_c5, c_nxt2line, 4);
- ST_SP2(src_c6, src_c7, c_nxt3line, 4);
-
- src_b9 = __msa_cast_to_vector_float(*(b + 9));
- src_b9 = (v4f32) __msa_splati_w((v4i32) src_b9, 0);
- src_b8 = __msa_cast_to_vector_float(*(b + 8));
- src_b8 = (v4f32) __msa_splati_w((v4i32) src_b8, 0);
- src_b0 = __msa_cast_to_vector_float(*(b + 0));
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- src_c2 *= src_b9;
- src_c3 *= src_b9;
- src_c0 -= src_c2 * src_b8;
- src_c1 -= src_c3 * src_b8;
-
- src_c0 *= src_b0;
- src_c1 *= src_b0;
-
- ST_SP4(src_c0, src_c1, src_c2, src_c3, a - 64, 4);
-
- ST_SP2(src_c0, src_c1, c, 4);
- ST_SP2(src_c2, src_c3, c_nxt1line, 4);
- }
-
- static void ssolve_8x4_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- v4f32 src_c0, src_c1, src_c2, src_c3, src_c4, src_c5, src_c6, src_c7;
- v4f32 src_b, src_b0, src_b4, src_b5, src_b8, src_b9, src_b10, src_b12;
- v4f32 src_b13, src_b14, src_b15;
- FLOAT *c_nxt1line = c + ldc;
- FLOAT *c_nxt2line = c + 2 * ldc;
- FLOAT *c_nxt3line = c + 3 * ldc;
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- v4f32 src_a0, src_a1, src_b1, src_b2, src_b3;
- v4f32 res0, res1, res2, res3, res4, res5, res6, res7;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b = LD_SP(bb + 0);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res0 = src_a0 * src_b0;
- res1 = src_a1 * src_b0;
- res2 = src_a0 * src_b1;
- res3 = src_a1 * src_b1;
- res4 = src_a0 * src_b2;
- res5 = src_a1 * src_b2;
- res6 = src_a0 * src_b3;
- res7 = src_a1 * src_b3;
-
- for (k = (bk - 1) / 2; k--;)
- {
- aa += 8;
- bb += 4;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b = LD_SP(bb + 0);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- res2 += src_a0 * src_b1;
- res3 += src_a1 * src_b1;
- res4 += src_a0 * src_b2;
- res5 += src_a1 * src_b2;
- res6 += src_a0 * src_b3;
- res7 += src_a1 * src_b3;
-
- aa += 8;
- bb += 4;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b = LD_SP(bb + 0);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- res2 += src_a0 * src_b1;
- res3 += src_a1 * src_b1;
- res4 += src_a0 * src_b2;
- res5 += src_a1 * src_b2;
- res6 += src_a0 * src_b3;
- res7 += src_a1 * src_b3;
- }
-
- if ((bk - 1) & 1)
- {
- aa += 8;
- bb += 4;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b = LD_SP(bb + 0);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- res2 += src_a0 * src_b1;
- res3 += src_a1 * src_b1;
- res4 += src_a0 * src_b2;
- res5 += src_a1 * src_b2;
- res6 += src_a0 * src_b3;
- res7 += src_a1 * src_b3;
-
- }
-
- LD_SP2(c, 4, src_c0, src_c1);
- LD_SP2(c_nxt1line, 4, src_c2, src_c3);
- LD_SP2(c_nxt2line, 4, src_c4, src_c5);
- LD_SP2(c_nxt3line, 4, src_c6, src_c7);
-
- src_c0 -= res0;
- src_c1 -= res1;
- src_c2 -= res2;
- src_c3 -= res3;
- src_c4 -= res4;
- src_c5 -= res5;
- src_c6 -= res6;
- src_c7 -= res7;
- }
- else
- {
- LD_SP2(c, 4, src_c0, src_c1);
- LD_SP2(c_nxt1line, 4, src_c2, src_c3);
- LD_SP2(c_nxt2line, 4, src_c4, src_c5);
- LD_SP2(c_nxt3line, 4, src_c6, src_c7);
- }
-
- a -= 32;
- b -= 16;
-
- src_b = LD_SP(b + 12);
- SPLATI_W4_SP(src_b, src_b12, src_b13, src_b14, src_b15);
- src_b8 = LD_SP(b + 8);
- src_b10 = (v4f32) __msa_splati_w((v4i32) src_b8, 2);
- src_b9 = (v4f32) __msa_splati_w((v4i32) src_b8, 1);
- src_b8 = (v4f32) __msa_splati_w((v4i32) src_b8, 0);
- src_b5 = __msa_cast_to_vector_float(*(b + 5));
- src_b5 = (v4f32) __msa_splati_w((v4i32) src_b5, 0);
- src_b4 = __msa_cast_to_vector_float(*(b + 4));
- src_b4 = (v4f32) __msa_splati_w((v4i32) src_b4, 0);
- src_b0 = __msa_cast_to_vector_float(*(b + 0));
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- src_c7 *= src_b15;
- src_c6 *= src_b15;
- src_c5 -= src_c7 * src_b14;
- src_c4 -= src_c6 * src_b14;
- src_c3 -= src_c7 * src_b13;
- src_c2 -= src_c6 * src_b13;
- src_c1 -= src_c7 * src_b12;
- src_c0 -= src_c6 * src_b12;
-
- src_c5 *= src_b10;
- src_c4 *= src_b10;
- src_c3 -= src_c5 * src_b9;
- src_c2 -= src_c4 * src_b9;
- src_c1 -= src_c5 * src_b8;
- src_c0 -= src_c4 * src_b8;
-
- src_c3 *= src_b5;
- src_c2 *= src_b5;
- src_c1 -= src_c3 * src_b4;
- src_c0 -= src_c2 * src_b4;
-
- src_c1 *= src_b0;
- src_c0 *= src_b0;
-
- ST_SP4(src_c0, src_c1, src_c2, src_c3, a, 4);
- ST_SP4(src_c4, src_c5, src_c6, src_c7, a + 16, 4);
-
- ST_SP2(src_c0, src_c1, c, 4);
- ST_SP2(src_c2, src_c3, c_nxt1line, 4);
- ST_SP2(src_c4, src_c5, c_nxt2line, 4);
- ST_SP2(src_c6, src_c7, c_nxt3line, 4);
- }
-
- static void ssolve_8x2_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- v4f32 src_c0, src_c1, src_c2, src_c3, src_b0, src_b2, src_b3;
- FLOAT *c_nxt1line = c + ldc;
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- v4f32 src_a0, src_a1, src_b1, res0, res1, res2, res3;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
- src_b1 = __msa_cast_to_vector_float(*(bb + 1));
- src_b1 = (v4f32) __msa_splati_w((v4i32) src_b1, 0);
-
- res0 = src_a0 * src_b0;
- res1 = src_a1 * src_b0;
- res2 = src_a0 * src_b1;
- res3 = src_a1 * src_b1;
-
- for (k = (bk - 1) >> 1; k--;)
- {
- aa += 8;
- bb += 2;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
- src_b1 = __msa_cast_to_vector_float(*(bb + 1));
- src_b1 = (v4f32) __msa_splati_w((v4i32) src_b1, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- res2 += src_a0 * src_b1;
- res3 += src_a1 * src_b1;
-
- aa += 8;
- bb += 2;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
- src_b1 = __msa_cast_to_vector_float(*(bb + 1));
- src_b1 = (v4f32) __msa_splati_w((v4i32) src_b1, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- res2 += src_a0 * src_b1;
- res3 += src_a1 * src_b1;
- }
-
- if ((bk - 1) & 1)
- {
- aa += 8;
- bb += 2;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
- src_b1 = __msa_cast_to_vector_float(*(bb + 1));
- src_b1 = (v4f32) __msa_splati_w((v4i32) src_b1, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- res2 += src_a0 * src_b1;
- res3 += src_a1 * src_b1;
- }
-
- LD_SP2(c, 4, src_c0, src_c1);
- LD_SP2(c_nxt1line, 4, src_c2, src_c3);
-
- src_c0 -= res0;
- src_c1 -= res1;
- src_c2 -= res2;
- src_c3 -= res3;
- }
- else
- {
- LD_SP2(c, 4, src_c0, src_c1);
- LD_SP2(c_nxt1line, 4, src_c2, src_c3);
- }
-
- a -= 16;
- b -= 4;
-
- src_b0 = __msa_cast_to_vector_float(*(b + 0));
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
- src_b2 = __msa_cast_to_vector_float(*(b + 2));
- src_b2 = (v4f32) __msa_splati_w((v4i32) src_b2, 0);
- src_b3 = __msa_cast_to_vector_float(*(b + 3));
- src_b3 = (v4f32) __msa_splati_w((v4i32) src_b3, 0);
-
- src_c2 *= src_b3;
- src_c3 *= src_b3;
- src_c0 -= src_c2 * src_b2;
- src_c1 -= src_c3 * src_b2;
- src_c0 *= src_b0;
- src_c1 *= src_b0;
-
- ST_SP4(src_c0, src_c1, src_c2, src_c3, a, 4);
- ST_SP2(src_c0, src_c1, c, 4);
- ST_SP2(src_c2, src_c3, c_nxt1line, 4);
- }
-
- static void ssolve_8x1_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG bk)
- {
- v4f32 src_c0, src_c1, src_b0;
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- v4f32 src_a0, src_a1, res0, res1;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 = src_a0 * src_b0;
- res1 = src_a1 * src_b0;
-
- for (k = (bk - 1) >> 2; k--;)
- {
- aa += 8;
- bb += 1;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
-
- aa += 8;
- bb += 1;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
-
- aa += 8;
- bb += 1;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
-
- aa += 8;
- bb += 1;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- }
-
- if ((bk - 1) & 3)
- {
- if ((bk - 1) & 2)
- {
- aa += 8;
- bb += 1;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
-
- aa += 8;
- bb += 1;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- }
-
- if ((bk - 1) & 1)
- {
- aa += 8;
- bb += 1;
-
- LD_SP2(aa, 4, src_a0, src_a1);
-
- src_b0 = __msa_cast_to_vector_float(*bb);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a0 * src_b0;
- res1 += src_a1 * src_b0;
- }
- }
-
- LD_SP2(c, 4, src_c0, src_c1);
-
- src_c0 -= res0;
- src_c1 -= res1;
- }
- else
- {
- LD_SP2(c, 4, src_c0, src_c1);
- }
-
- a -= 8;
- b -= 1;
-
- src_b0 = __msa_cast_to_vector_float(*(b + 0));
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- src_c0 *= src_b0;
- src_c1 *= src_b0;
-
- ST_SP2(src_c0, src_c1, a, 4);
- ST_SP2(src_c0, src_c1, c, 4);
- }
-
- static void ssolve_4x8_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- v4f32 src_c0, src_c1, src_c2, src_c3, src_c4, src_c5, src_c6, src_c7;
- v4f32 src_b, src_b0, src_b8, src_b9, src_b16, src_b17, src_b18, src_b24;
- v4f32 src_b25, src_b26, src_b27, src_b32, src_b33, src_b34, src_b35;
- v4f32 src_b36, src_b40, src_b41, src_b42, src_b43, src_b44, src_b45;
- v4f32 src_b48, src_b49, src_b50, src_b51, src_b52, src_b53, src_b54;
- v4f32 src_b56, src_b57, src_b58, src_b59, src_b60, src_b61, src_b62, src_b63;
- FLOAT *c_nxt1line = c + ldc;
- FLOAT *c_nxt2line = c + 2 * ldc;
- FLOAT *c_nxt3line = c + 3 * ldc;
- FLOAT *c_nxt4line = c + 4 * ldc;
- FLOAT *c_nxt5line = c + 5 * ldc;
- FLOAT *c_nxt6line = c + 6 * ldc;
- FLOAT *c_nxt7line = c + 7 * ldc;
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- v4f32 src_a0, src_b1, src_b2, src_b3;
- v4f32 res0, res1, res2, res3, res4, res5, res6, res7;
-
- src_a0 = LD_SP(aa);
-
- src_b = LD_SP(bb + 0);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res0 = src_a0 * src_b0;
- res1 = src_a0 * src_b1;
- res2 = src_a0 * src_b2;
- res3 = src_a0 * src_b3;
-
- src_b = LD_SP(bb + 4);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res4 = src_a0 * src_b0;
- res5 = src_a0 * src_b1;
- res6 = src_a0 * src_b2;
- res7 = src_a0 * src_b3;
-
- for (k = (bk - 1); k--;)
- {
- aa += 4;
- bb += 8;
-
- src_a0 = LD_SP(aa);
-
- src_b = LD_SP(bb + 0);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res0 += src_a0 * src_b0;
- res1 += src_a0 * src_b1;
- res2 += src_a0 * src_b2;
- res3 += src_a0 * src_b3;
-
- src_b = LD_SP(bb + 4);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res4 += src_a0 * src_b0;
- res5 += src_a0 * src_b1;
- res6 += src_a0 * src_b2;
- res7 += src_a0 * src_b3;
- }
-
- src_c0 = LD_SP(c);
- src_c1 = LD_SP(c_nxt1line);
- src_c2 = LD_SP(c_nxt2line);
- src_c3 = LD_SP(c_nxt3line);
- src_c4 = LD_SP(c_nxt4line);
- src_c5 = LD_SP(c_nxt5line);
- src_c6 = LD_SP(c_nxt6line);
- src_c7 = LD_SP(c_nxt7line);
-
- src_c0 -= res0;
- src_c1 -= res1;
- src_c2 -= res2;
- src_c3 -= res3;
- src_c4 -= res4;
- src_c5 -= res5;
- src_c6 -= res6;
- src_c7 -= res7;
- }
- else
- {
- src_c0 = LD_SP(c);
- src_c1 = LD_SP(c_nxt1line);
- src_c2 = LD_SP(c_nxt2line);
- src_c3 = LD_SP(c_nxt3line);
- src_c4 = LD_SP(c_nxt4line);
- src_c5 = LD_SP(c_nxt5line);
- src_c6 = LD_SP(c_nxt6line);
- src_c7 = LD_SP(c_nxt7line);
- }
-
- a -= 32;
- b -= 64;
-
- src_b = LD_SP(b + 60);
- SPLATI_W4_SP(src_b, src_b60, src_b61, src_b62, src_b63);
- src_b = LD_SP(b + 56);
- SPLATI_W4_SP(src_b, src_b56, src_b57, src_b58, src_b59);
-
- src_b = LD_SP(b + 48);
- SPLATI_W4_SP(src_b, src_b48, src_b49, src_b50, src_b51);
- src_b52 = LD_SP(b + 52);
- src_b54 = (v4f32) __msa_splati_w((v4i32) src_b52, 2);
- src_b53 = (v4f32) __msa_splati_w((v4i32) src_b52, 1);
- src_b52 = (v4f32) __msa_splati_w((v4i32) src_b52, 0);
-
- src_b = LD_SP(b + 40);
- SPLATI_W4_SP(src_b, src_b40, src_b41, src_b42, src_b43);
- src_b44 = LD_SP(b + 44);
- src_b45 = (v4f32) __msa_splati_w((v4i32) src_b44, 1);
- src_b44 = (v4f32) __msa_splati_w((v4i32) src_b44, 0);
-
- src_b = LD_SP(b + 32);
- SPLATI_W4_SP(src_b, src_b32, src_b33, src_b34, src_b35);
- src_b36 = __msa_cast_to_vector_float(*(b + 36));
- src_b36 = (v4f32) __msa_splati_w((v4i32) src_b36, 0);
-
- src_b = LD_SP(b + 24);
- SPLATI_W4_SP(src_b, src_b24, src_b25, src_b26, src_b27);
-
- src_b16 = LD_SP(b + 16);
- src_b18 = (v4f32) __msa_splati_w((v4i32) src_b16, 2);
- src_b17 = (v4f32) __msa_splati_w((v4i32) src_b16, 1);
- src_b16 = (v4f32) __msa_splati_w((v4i32) src_b16, 0);
-
- src_b9 = __msa_cast_to_vector_float(*(b + 9));
- src_b9 = (v4f32) __msa_splati_w((v4i32) src_b9, 0);
- src_b8 = __msa_cast_to_vector_float(*(b + 8));
- src_b8 = (v4f32) __msa_splati_w((v4i32) src_b8, 0);
- src_b0 = __msa_cast_to_vector_float(*(b + 0));
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- src_c7 *= src_b63;
- src_c6 -= src_c7 * src_b62;
- src_c5 -= src_c7 * src_b61;
- src_c4 -= src_c7 * src_b60;
- src_c3 -= src_c7 * src_b59;
- src_c2 -= src_c7 * src_b58;
- src_c1 -= src_c7 * src_b57;
- src_c0 -= src_c7 * src_b56;
-
- src_c6 *= src_b54;
- src_c5 -= src_c6 * src_b53;
- src_c4 -= src_c6 * src_b52;
- src_c3 -= src_c6 * src_b51;
- src_c2 -= src_c6 * src_b50;
- src_c1 -= src_c6 * src_b49;
- src_c0 -= src_c6 * src_b48;
-
- src_c5 *= src_b45;
- src_c4 -= src_c5 * src_b44;
- src_c3 -= src_c5 * src_b43;
- src_c2 -= src_c5 * src_b42;
- src_c1 -= src_c5 * src_b41;
- src_c0 -= src_c5 * src_b40;
-
- src_c4 *= src_b36;
- src_c3 -= src_c4 * src_b35;
- src_c2 -= src_c4 * src_b34;
- src_c1 -= src_c4 * src_b33;
- src_c0 -= src_c4 * src_b32;
-
- src_c3 *= src_b27;
- src_c2 -= src_c3 * src_b26;
- src_c1 -= src_c3 * src_b25;
- src_c0 -= src_c3 * src_b24;
-
- src_c2 *= src_b18;
- src_c1 -= src_c2 * src_b17;
- src_c0 -= src_c2 * src_b16;
-
- src_c1 *= src_b9;
- src_c0 -= src_c1 * src_b8;
-
- src_c0 *= src_b0;
-
- ST_SP4(src_c0, src_c1, src_c2, src_c3, a, 4);
- ST_SP4(src_c4, src_c5, src_c6, src_c7, a + 16, 4);
-
- ST_SP(src_c0, c);
- ST_SP(src_c1, c_nxt1line);
- ST_SP(src_c2, c_nxt2line);
- ST_SP(src_c3, c_nxt3line);
- ST_SP(src_c4, c_nxt4line);
- ST_SP(src_c5, c_nxt5line);
- ST_SP(src_c6, c_nxt6line);
- ST_SP(src_c7, c_nxt7line);
- }
-
- static void ssolve_4x4_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- v4f32 src_c0, src_c1, src_c2, src_c3, src_b;
- v4f32 src_b0, src_b4, src_b5, src_b8, src_b9, src_b10, src_b12, src_b13;
- v4f32 src_b14, src_b15;
- FLOAT *c_nxt1line = c + ldc;
- FLOAT *c_nxt2line = c + 2 * ldc;
- FLOAT *c_nxt3line = c + 3 * ldc;
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- v4f32 src_a, src_b1, src_b2, src_b3, res0, res1, res2, res3;
-
- src_a = LD_SP(aa);
-
- src_b = LD_SP(bb);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
- res0 = src_a * src_b0;
- res1 = src_a * src_b1;
- res2 = src_a * src_b2;
- res3 = src_a * src_b3;
-
- for (k = ((bk - 1) >> 1); k--;)
- {
- aa += 4;
- bb += 4;
-
- src_a = LD_SP(aa);
-
- src_b = LD_SP(bb);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
-
- res0 += src_a * src_b0;
- res1 += src_a * src_b1;
- res2 += src_a * src_b2;
- res3 += src_a * src_b3;
-
- aa += 4;
- bb += 4;
-
- src_a = LD_SP(aa);
-
- src_b = LD_SP(bb);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
-
- res0 += src_a * src_b0;
- res1 += src_a * src_b1;
- res2 += src_a * src_b2;
- res3 += src_a * src_b3;
- }
-
- if ((bk - 1) & 1)
- {
- aa += 4;
- bb += 4;
-
- src_a = LD_SP(aa);
-
- src_b = LD_SP(bb);
- SPLATI_W4_SP(src_b, src_b0, src_b1, src_b2, src_b3);
-
- res0 += src_a * src_b0;
- res1 += src_a * src_b1;
- res2 += src_a * src_b2;
- res3 += src_a * src_b3;
- }
-
- src_c0 = LD_SP(c);
- src_c1 = LD_SP(c_nxt1line);
- src_c2 = LD_SP(c_nxt2line);
- src_c3 = LD_SP(c_nxt3line);
-
- src_c0 -= res0;
- src_c1 -= res1;
- src_c2 -= res2;
- src_c3 -= res3;
- }
- else
- {
- src_c0 = LD_SP(c);
- src_c1 = LD_SP(c_nxt1line);
- src_c2 = LD_SP(c_nxt2line);
- src_c3 = LD_SP(c_nxt3line);
- }
-
- a -= 16;
- b -= 16;
-
- src_b = LD_SP(b + 12);
- SPLATI_W4_SP(src_b, src_b12, src_b13, src_b14, src_b15);
- src_b8 = LD_SP(b + 8);
- src_b10 = (v4f32) __msa_splati_w((v4i32) src_b8, 2);
- src_b9 = (v4f32) __msa_splati_w((v4i32) src_b8, 1);
- src_b8 = (v4f32) __msa_splati_w((v4i32) src_b8, 0);
- src_b5 = __msa_cast_to_vector_float(*(b + 5));
- src_b5 = (v4f32) __msa_splati_w((v4i32) src_b5, 0);
- src_b4 = __msa_cast_to_vector_float(*(b + 4));
- src_b4 = (v4f32) __msa_splati_w((v4i32) src_b4, 0);
- src_b0 = __msa_cast_to_vector_float(*(b + 0));
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- src_c3 *= src_b15;
- src_c2 -= src_c3 * src_b14;
- src_c1 -= src_c3 * src_b13;
- src_c0 -= src_c3 * src_b12;
-
- src_c2 *= src_b10;
- src_c1 -= src_c2 * src_b9;
- src_c0 -= src_c2 * src_b8;
-
- src_c1 *= src_b5;
- src_c0 -= src_c1 * src_b4;
-
- src_c0 *= src_b0;
-
- ST_SP4(src_c0, src_c1, src_c2, src_c3, a, 4);
-
- ST_SP(src_c0, c);
- ST_SP(src_c1, c_nxt1line);
- ST_SP(src_c2, c_nxt2line);
- ST_SP(src_c3, c_nxt3line);
- }
-
- static void ssolve_4x2_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- v4f32 src_c0, src_c1, src_b0, src_b2, src_b3;
- FLOAT *c_nxt1line = c + ldc;
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- v4f32 src_a, src_b1, res0, res1;
-
- src_a = LD_SP(aa);
- src_b0 = LD_SP(bb);
- src_b1 = (v4f32) __msa_splati_w((v4i32) src_b0, 1);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 = src_a * src_b0;
- res1 = src_a * src_b1;
-
- for (k = ((bk - 1) >> 1); k--;)
- {
- aa += 4;
- bb += 2;
-
- src_a = LD_SP(aa);
- src_b0 = LD_SP(bb);
- src_b1 = (v4f32) __msa_splati_w((v4i32) src_b0, 1);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a * src_b0;
- res1 += src_a * src_b1;
-
- aa += 4;
- bb += 2;
-
- src_a = LD_SP(aa);
- src_b0 = LD_SP(bb);
- src_b1 = (v4f32) __msa_splati_w((v4i32) src_b0, 1);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a * src_b0;
- res1 += src_a * src_b1;
- }
-
- if ((bk - 1) & 1)
- {
- aa += 4;
- bb += 2;
-
- src_a = LD_SP(aa);
- src_b0 = LD_SP(bb);
- src_b1 = (v4f32) __msa_splati_w((v4i32) src_b0, 1);
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- res0 += src_a * src_b0;
- res1 += src_a * src_b1;
- }
-
- src_c0 = LD_SP(c);
- src_c1 = LD_SP(c_nxt1line);
-
- src_c0 -= res0;
- src_c1 -= res1;
- }
- else
- {
- src_c0 = LD_SP(c);
- src_c1 = LD_SP(c_nxt1line);
- }
-
- a -= 8;
- b -= 4;
-
- src_b3 = __msa_cast_to_vector_float(*(b + 3));
- src_b3 = (v4f32) __msa_splati_w((v4i32) src_b3, 0);
- src_b2 = __msa_cast_to_vector_float(*(b + 2));
- src_b2 = (v4f32) __msa_splati_w((v4i32) src_b2, 0);
- src_b0 = __msa_cast_to_vector_float(*(b + 0));
- src_b0 = (v4f32) __msa_splati_w((v4i32) src_b0, 0);
-
- src_c1 *= src_b3;
- src_c0 -= src_c1 * src_b2;
- src_c0 *= src_b0;
-
- ST_SP2(src_c0, src_c1, a, 4);
-
- ST_SP(src_c0, c);
- ST_SP(src_c1, c_nxt1line);
- }
-
- static void ssolve_4x1_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG bk)
- {
- FLOAT b0, c0, c1, c2, c3;
-
- c0 = *(c + 0);
- c1 = *(c + 1);
- c2 = *(c + 2);
- c3 = *(c + 3);
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- FLOAT t0, t1, t2, t3;
-
- t0 = aa[0] * bb[0];
- t1 = aa[1] * bb[0];
- t2 = aa[2] * bb[0];
- t3 = aa[3] * bb[0];
-
- for (k = (bk - 1); k--;)
- {
- aa += 4;
- bb += 1;
-
- t0 += aa[0] * bb[0];
- t1 += aa[1] * bb[0];
- t2 += aa[2] * bb[0];
- t3 += aa[3] * bb[0];
- }
-
- c0 -= t0;
- c1 -= t1;
- c2 -= t2;
- c3 -= t3;
- }
-
- a -= 4;
- b -= 1;
-
- b0 = *b;
-
- c0 *= b0;
- c1 *= b0;
- c2 *= b0;
- c3 *= b0;
-
- *(a + 0) = c0;
- *(a + 1) = c1;
- *(a + 2) = c2;
- *(a + 3) = c3;
-
- *(c + 0) = c0;
- *(c + 1) = c1;
- *(c + 2) = c2;
- *(c + 3) = c3;
- }
-
- static void ssolve_2x8_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- FLOAT b0, b8, b9, b16, b17, b18, b24, b25, b26, b27, b32, b33, b34, b35;
- FLOAT b36, b40, b41, b42, b43, b44, b45, b48, b49, b50, b51, b52, b53, b54;
- FLOAT b56, b57, b58, b59, b60, b61, b62, b63, c0_nxt7, c1_nxt7;
- FLOAT c0, c1, c0_nxt1, c1_nxt1, c0_nxt2, c1_nxt2, c0_nxt3, c1_nxt3;
- FLOAT c0_nxt4, c1_nxt4, c0_nxt5, c1_nxt5, c0_nxt6, c1_nxt6;
-
- c0 = *(c + 0);
- c1 = *(c + 1);
- c0_nxt1 = *(c + 0 + 1 * ldc);
- c1_nxt1 = *(c + 1 + 1 * ldc);
- c0_nxt2 = *(c + 0 + 2 * ldc);
- c1_nxt2 = *(c + 1 + 2 * ldc);
- c0_nxt3 = *(c + 0 + 3 * ldc);
- c1_nxt3 = *(c + 1 + 3 * ldc);
- c0_nxt4 = *(c + 0 + 4 * ldc);
- c1_nxt4 = *(c + 1 + 4 * ldc);
- c0_nxt5 = *(c + 0 + 5 * ldc);
- c1_nxt5 = *(c + 1 + 5 * ldc);
- c0_nxt6 = *(c + 0 + 6 * ldc);
- c1_nxt6 = *(c + 1 + 6 * ldc);
- c0_nxt7 = *(c + 0 + 7 * ldc);
- c1_nxt7 = *(c + 1 + 7 * ldc);
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- FLOAT res[16];
-
- res[0] = aa[0] * bb[0];
- res[1] = aa[1] * bb[0];
- res[2] = aa[0] * bb[1];
- res[3] = aa[1] * bb[1];
- res[4] = aa[0] * bb[2];
- res[5] = aa[1] * bb[2];
- res[6] = aa[0] * bb[3];
- res[7] = aa[1] * bb[3];
- res[8] = aa[0] * bb[4];
- res[9] = aa[1] * bb[4];
- res[10] = aa[0] * bb[5];
- res[11] = aa[1] * bb[5];
- res[12] = aa[0] * bb[6];
- res[13] = aa[1] * bb[6];
- res[14] = aa[0] * bb[7];
- res[15] = aa[1] * bb[7];
-
- for (k = (bk - 1); k--;)
- {
- aa += 2;
- bb += 8;
-
- res[0] += aa[0] * bb[0];
- res[1] += aa[1] * bb[0];
- res[2] += aa[0] * bb[1];
- res[3] += aa[1] * bb[1];
- res[4] += aa[0] * bb[2];
- res[5] += aa[1] * bb[2];
- res[6] += aa[0] * bb[3];
- res[7] += aa[1] * bb[3];
- res[8] += aa[0] * bb[4];
- res[9] += aa[1] * bb[4];
- res[10] += aa[0] * bb[5];
- res[11] += aa[1] * bb[5];
- res[12] += aa[0] * bb[6];
- res[13] += aa[1] * bb[6];
- res[14] += aa[0] * bb[7];
- res[15] += aa[1] * bb[7];
- }
-
- c0 -= res[0];
- c1 -= res[1];
- c0_nxt1 -= res[2];
- c1_nxt1 -= res[3];
- c0_nxt2 -= res[4];
- c1_nxt2 -= res[5];
- c0_nxt3 -= res[6];
- c1_nxt3 -= res[7];
- c0_nxt4 -= res[8];
- c1_nxt4 -= res[9];
- c0_nxt5 -= res[10];
- c1_nxt5 -= res[11];
- c0_nxt6 -= res[12];
- c1_nxt6 -= res[13];
- c0_nxt7 -= res[14];
- c1_nxt7 -= res[15];
- }
-
- a -= 16;
- b -= 64;
-
- b0 = *(b + 0);
- b8 = *(b + 8);
- b9 = *(b + 9);
- b16 = *(b + 16);
- b17 = *(b + 17);
- b18 = *(b + 18);
- b24 = *(b + 24);
- b25 = *(b + 25);
- b26 = *(b + 26);
- b27 = *(b + 27);
- b32 = *(b + 32);
- b33 = *(b + 33);
- b34 = *(b + 34);
- b35 = *(b + 35);
- b36 = *(b + 36);
- b40 = *(b + 40);
- b41 = *(b + 41);
- b42 = *(b + 42);
- b43 = *(b + 43);
- b44 = *(b + 44);
- b45 = *(b + 45);
- b48 = *(b + 48);
- b49 = *(b + 49);
- b50 = *(b + 50);
- b51 = *(b + 51);
- b52 = *(b + 52);
- b53 = *(b + 53);
- b54 = *(b + 54);
- b56 = *(b + 56);
- b57 = *(b + 57);
- b58 = *(b + 58);
- b59 = *(b + 59);
- b60 = *(b + 60);
- b61 = *(b + 61);
- b62 = *(b + 62);
- b63 = *(b + 63);
-
- c0_nxt7 *= b63;
- c1_nxt7 *= b63;
-
- c0_nxt6 -= c0_nxt7 * b62;
- c1_nxt6 -= c1_nxt7 * b62;
-
- c0_nxt6 *= b54;
- c1_nxt6 *= b54;
-
- c0_nxt5 -= c0_nxt7 * b61;
- c1_nxt5 -= c1_nxt7 * b61;
-
- c0_nxt5 -= c0_nxt6 * b53;
- c1_nxt5 -= c1_nxt6 * b53;
-
- c0_nxt5 *= b45;
- c1_nxt5 *= b45;
-
- c0_nxt4 -= c0_nxt7 * b60;
- c1_nxt4 -= c1_nxt7 * b60;
-
- c0_nxt4 -= c0_nxt6 * b52;
- c1_nxt4 -= c1_nxt6 * b52;
-
- c0_nxt4 -= c0_nxt5 * b44;
- c1_nxt4 -= c1_nxt5 * b44;
-
- c0_nxt4 *= b36;
- c1_nxt4 *= b36;
-
- c0_nxt3 -= c0_nxt7 * b59;
- c1_nxt3 -= c1_nxt7 * b59;
-
- c0_nxt3 -= c0_nxt6 * b51;
- c1_nxt3 -= c1_nxt6 * b51;
-
- c0_nxt3 -= c0_nxt5 * b43;
- c1_nxt3 -= c1_nxt5 * b43;
-
- c0_nxt3 -= c0_nxt4 * b35;
- c1_nxt3 -= c1_nxt4 * b35;
-
- c0_nxt3 *= b27;
- c1_nxt3 *= b27;
-
- c0_nxt2 -= c0_nxt7 * b58;
- c1_nxt2 -= c1_nxt7 * b58;
-
- c0_nxt2 -= c0_nxt6 * b50;
- c1_nxt2 -= c1_nxt6 * b50;
-
- c0_nxt2 -= c0_nxt5 * b42;
- c1_nxt2 -= c1_nxt5 * b42;
-
- c0_nxt2 -= c0_nxt4 * b34;
- c1_nxt2 -= c1_nxt4 * b34;
-
- c0_nxt2 -= c0_nxt3 * b26;
- c1_nxt2 -= c1_nxt3 * b26;
-
- c0_nxt2 *= b18;
- c1_nxt2 *= b18;
-
- c0_nxt1 -= c0_nxt7 * b57;
- c1_nxt1 -= c1_nxt7 * b57;
-
- c0_nxt1 -= c0_nxt6 * b49;
- c1_nxt1 -= c1_nxt6 * b49;
-
- c0_nxt1 -= c0_nxt5 * b41;
- c1_nxt1 -= c1_nxt5 * b41;
-
- c0_nxt1 -= c0_nxt4 * b33;
- c1_nxt1 -= c1_nxt4 * b33;
-
- c0_nxt1 -= c0_nxt3 * b25;
- c1_nxt1 -= c1_nxt3 * b25;
-
- c0_nxt1 -= c0_nxt2 * b17;
- c1_nxt1 -= c1_nxt2 * b17;
-
- c0_nxt1 *= b9;
- c1_nxt1 *= b9;
-
- c0 -= c0_nxt7 * b56;
- c1 -= c1_nxt7 * b56;
-
- c0 -= c0_nxt6 * b48;
- c1 -= c1_nxt6 * b48;
-
- c0 -= c0_nxt5 * b40;
- c1 -= c1_nxt5 * b40;
-
- c0 -= c0_nxt4 * b32;
- c1 -= c1_nxt4 * b32;
-
- c0 -= c0_nxt3 * b24;
- c1 -= c1_nxt3 * b24;
-
- c0 -= c0_nxt2 * b16;
- c1 -= c1_nxt2 * b16;
-
- c0 -= c0_nxt1 * b8;
- c1 -= c1_nxt1 * b8;
-
- c0 *= b0;
- c1 *= b0;
-
- *(a + 0) = c0;
- *(a + 1) = c1;
- *(a + 2) = c0_nxt1;
- *(a + 3) = c1_nxt1;
- *(a + 4) = c0_nxt2;
- *(a + 5) = c1_nxt2;
- *(a + 6) = c0_nxt3;
- *(a + 7) = c1_nxt3;
- *(a + 8) = c0_nxt4;
- *(a + 9) = c1_nxt4;
- *(a + 10) = c0_nxt5;
- *(a + 11) = c1_nxt5;
- *(a + 12) = c0_nxt6;
- *(a + 13) = c1_nxt6;
- *(a + 14) = c0_nxt7;
- *(a + 15) = c1_nxt7;
-
- *(c + 0) = c0;
- *(c + 1) = c1;
- *(c + 0 + 1 * ldc) = c0_nxt1;
- *(c + 1 + 1 * ldc) = c1_nxt1;
- *(c + 0 + 2 * ldc) = c0_nxt2;
- *(c + 1 + 2 * ldc) = c1_nxt2;
- *(c + 0 + 3 * ldc) = c0_nxt3;
- *(c + 1 + 3 * ldc) = c1_nxt3;
- *(c + 0 + 4 * ldc) = c0_nxt4;
- *(c + 1 + 4 * ldc) = c1_nxt4;
- *(c + 0 + 5 * ldc) = c0_nxt5;
- *(c + 1 + 5 * ldc) = c1_nxt5;
- *(c + 0 + 6 * ldc) = c0_nxt6;
- *(c + 1 + 6 * ldc) = c1_nxt6;
- *(c + 0 + 7 * ldc) = c0_nxt7;
- *(c + 1 + 7 * ldc) = c1_nxt7;
- }
-
- static void ssolve_2x4_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- FLOAT b0, b4, b5, b8, b9, b10, b12, b13, b14, b15;
- FLOAT c0, c1, c0_nxt1, c1_nxt1, c0_nxt2, c1_nxt2, c0_nxt3, c1_nxt3;
-
- c0 = *(c + 0);
- c1 = *(c + 1);
- c0_nxt1 = *(c + 0 + 1 * ldc);
- c1_nxt1 = *(c + 1 + 1 * ldc);
- c0_nxt2 = *(c + 0 + 2 * ldc);
- c1_nxt2 = *(c + 1 + 2 * ldc);
- c0_nxt3 = *(c + 0 + 3 * ldc);
- c1_nxt3 = *(c + 1 + 3 * ldc);
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- FLOAT res[8];
-
- res[0] = aa[0] * bb[0];
- res[1] = aa[1] * bb[0];
- res[2] = aa[0] * bb[1];
- res[3] = aa[1] * bb[1];
- res[4] = aa[0] * bb[2];
- res[5] = aa[1] * bb[2];
- res[6] = aa[0] * bb[3];
- res[7] = aa[1] * bb[3];
-
- for (k = (bk - 1); k--;)
- {
- aa += 2;
- bb += 4;
-
- res[0] += aa[0] * bb[0];
- res[1] += aa[1] * bb[0];
- res[2] += aa[0] * bb[1];
- res[3] += aa[1] * bb[1];
- res[4] += aa[0] * bb[2];
- res[5] += aa[1] * bb[2];
- res[6] += aa[0] * bb[3];
- res[7] += aa[1] * bb[3];
- }
-
- c0 -= res[0];
- c1 -= res[1];
- c0_nxt1 -= res[2];
- c1_nxt1 -= res[3];
- c0_nxt2 -= res[4];
- c1_nxt2 -= res[5];
- c0_nxt3 -= res[6];
- c1_nxt3 -= res[7];
- }
-
- a -= 8;
- b -= 16;
-
- b0 = *b;
- b4 = *(b + 4);
- b5 = *(b + 5);
- b8 = *(b + 8);
- b9 = *(b + 9);
- b10 = *(b + 10);
- b12 = *(b + 12);
- b13 = *(b + 13);
- b14 = *(b + 14);
- b15 = *(b + 15);
-
- c0_nxt3 *= b15;
- c1_nxt3 *= b15;
-
- c0_nxt2 = (c0_nxt2 - c0_nxt3 * b14) * b10;
- c1_nxt2 = (c1_nxt2 - c1_nxt3 * b14) * b10;
-
- c0_nxt1 = ((c0_nxt1 - c0_nxt3 * b13) - c0_nxt2 * b9) * b5;
- c1_nxt1 = ((c1_nxt1 - c1_nxt3 * b13) - c1_nxt2 * b9) * b5;
-
- c0 = (((c0 - c0_nxt3 * b12) - c0_nxt2 * b8) - c0_nxt1 * b4) * b0;
- c1 = (((c1 - c1_nxt3 * b12) - c1_nxt2 * b8) - c1_nxt1 * b4) * b0;
-
- *(a + 0) = c0;
- *(a + 1) = c1;
- *(a + 2) = c0_nxt1;
- *(a + 3) = c1_nxt1;
- *(a + 4) = c0_nxt2;
- *(a + 5) = c1_nxt2;
- *(a + 6) = c0_nxt3;
- *(a + 7) = c1_nxt3;
-
- *(c + 0) = c0;
- *(c + 1) = c1;
- *(c + 0 + 1 * ldc) = c0_nxt1;
- *(c + 1 + 1 * ldc) = c1_nxt1;
- *(c + 0 + 2 * ldc) = c0_nxt2;
- *(c + 1 + 2 * ldc) = c1_nxt2;
- *(c + 0 + 3 * ldc) = c0_nxt3;
- *(c + 1 + 3 * ldc) = c1_nxt3;
- }
-
- static void ssolve_2x2_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- FLOAT b0, b2, b3, c0, c1, c0_nxt, c1_nxt;
-
- c0 = *(c + 0);
- c1 = *(c + 1);
- c0_nxt = *(c + 0 + ldc);
- c1_nxt = *(c + 1 + ldc);
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- FLOAT res[4];
-
- res[0] = aa[0] * bb[0];
- res[1] = aa[1] * bb[0];
- res[2] = aa[0] * bb[1];
- res[3] = aa[1] * bb[1];
-
- for (k = (bk - 1); k--;)
- {
- aa += 2;
- bb += 2;
-
- res[0] += aa[0] * bb[0];
- res[1] += aa[1] * bb[0];
- res[2] += aa[0] * bb[1];
- res[3] += aa[1] * bb[1];
- }
-
- c0 -= res[0];
- c1 -= res[1];
- c0_nxt -= res[2];
- c1_nxt -= res[3];
- }
-
- a -= 4;
- b -= 4;
-
- b3 = *(b + 3);
- b2 = *(b + 2);
- b0 = *b;
-
- c0_nxt *= b3;
- c1_nxt *= b3;
-
- c0 -= c0_nxt * b2;
- c1 -= c1_nxt * b2;
-
- c0 *= b0;
- c1 *= b0;
-
- *(a + 0) = c0;
- *(a + 1) = c1;
- *(a + 2) = c0_nxt;
- *(a + 3) = c1_nxt;
-
- *(c + 0) = c0;
- *(c + 1) = c1;
- *(c + 0 + ldc) = c0_nxt;
- *(c + 1 + ldc) = c1_nxt;
- }
-
- static void ssolve_2x1_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG bk)
- {
- FLOAT b0, c0, c1;
-
- c0 = *(c + 0);
- c1 = *(c + 1);
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- FLOAT res0, res1;
-
- res0 = aa[0] * bb[0];
- res1 = aa[1] * bb[0];
-
- for (k = (bk - 1); k--;)
- {
- aa += 2;
- bb += 1;
-
- res0 += aa[0] * bb[0];
- res1 += aa[1] * bb[0];
- }
-
- c0 -= res0;
- c1 -= res1;
- }
-
- a -= 2;
- b -= 1;
-
- b0 = *b;
-
- c0 *= b0;
- c1 *= b0;
-
- *(a + 0) = c0;
- *(a + 1) = c1;
-
- *(c + 0) = c0;
- *(c + 1) = c1;
- }
-
- static void ssolve_1x8_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- FLOAT b0, b8, b9, b16, b17, b18, b24, b25, b26, b27, b32, b33, b34, b35;
- FLOAT b36, b40, b41, b42, b43, b44, b45, b48, b49, b50, b51, b52, b53, b54;
- FLOAT b56, b57, b58, b59, b60, b61, b62, b63;
- FLOAT c0, c1, c2, c3, c4, c5, c6, c7;
-
- c0 = *(c + 0);
- c1 = *(c + 1 * ldc);
- c2 = *(c + 2 * ldc);
- c3 = *(c + 3 * ldc);
- c4 = *(c + 4 * ldc);
- c5 = *(c + 5 * ldc);
- c6 = *(c + 6 * ldc);
- c7 = *(c + 7 * ldc);
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- FLOAT t0, t1, t2, t3, t4, t5, t6, t7;
-
- t0 = aa[0] * bb[0];
- t1 = aa[0] * bb[1];
- t2 = aa[0] * bb[2];
- t3 = aa[0] * bb[3];
- t4 = aa[0] * bb[4];
- t5 = aa[0] * bb[5];
- t6 = aa[0] * bb[6];
- t7 = aa[0] * bb[7];
-
- for (k = (bk - 1); k--;)
- {
- aa += 1;
- bb += 8;
-
- t0 += aa[0] * bb[0];
- t1 += aa[0] * bb[1];
- t2 += aa[0] * bb[2];
- t3 += aa[0] * bb[3];
- t4 += aa[0] * bb[4];
- t5 += aa[0] * bb[5];
- t6 += aa[0] * bb[6];
- t7 += aa[0] * bb[7];
- }
-
- c0 -= t0;
- c1 -= t1;
- c2 -= t2;
- c3 -= t3;
- c4 -= t4;
- c5 -= t5;
- c6 -= t6;
- c7 -= t7;
- }
-
- a -= 8;
- b -= 64;
-
- b0 = *(b + 0);
- b8 = *(b + 8);
- b9 = *(b + 9);
- b16 = *(b + 16);
- b17 = *(b + 17);
- b18 = *(b + 18);
- b24 = *(b + 24);
- b25 = *(b + 25);
- b26 = *(b + 26);
- b27 = *(b + 27);
- b32 = *(b + 32);
- b33 = *(b + 33);
- b34 = *(b + 34);
- b35 = *(b + 35);
- b36 = *(b + 36);
- b40 = *(b + 40);
- b41 = *(b + 41);
- b42 = *(b + 42);
- b43 = *(b + 43);
- b44 = *(b + 44);
- b45 = *(b + 45);
- b48 = *(b + 48);
- b49 = *(b + 49);
- b50 = *(b + 50);
- b51 = *(b + 51);
- b52 = *(b + 52);
- b53 = *(b + 53);
- b54 = *(b + 54);
- b56 = *(b + 56);
- b57 = *(b + 57);
- b58 = *(b + 58);
- b59 = *(b + 59);
- b60 = *(b + 60);
- b61 = *(b + 61);
- b62 = *(b + 62);
- b63 = *(b + 63);
-
- c7 *= b63;
-
- c6 -= c7 * b62;
- c6 *= b54;
-
- c5 -= c7 * b61;
- c5 -= c6 * b53;
- c5 *= b45;
-
- c4 -= c7 * b60;
- c4 -= c6 * b52;
- c4 -= c5 * b44;
- c4 *= b36;
-
- c3 -= c7 * b59;
- c3 -= c6 * b51;
- c3 -= c5 * b43;
- c3 -= c4 * b35;
- c3 *= b27;
-
- c2 -= c7 * b58;
- c2 -= c6 * b50;
- c2 -= c5 * b42;
- c2 -= c4 * b34;
- c2 -= c3 * b26;
- c2 *= b18;
-
- c1 -= c7 * b57;
- c1 -= c6 * b49;
- c1 -= c5 * b41;
- c1 -= c4 * b33;
- c1 -= c3 * b25;
- c1 -= c2 * b17;
- c1 *= b9;
-
- c0 -= c7 * b56;
- c0 -= c6 * b48;
- c0 -= c5 * b40;
- c0 -= c4 * b32;
- c0 -= c3 * b24;
- c0 -= c2 * b16;
- c0 -= c1 * b8;
- c0 *= b0;
-
- *(a + 0) = c0;
- *(a + 1) = c1;
- *(a + 2) = c2;
- *(a + 3) = c3;
- *(a + 4) = c4;
- *(a + 5) = c5;
- *(a + 6) = c6;
- *(a + 7) = c7;
-
- *(c + 0) = c0;
- *(c + 1 * ldc) = c1;
- *(c + 2 * ldc) = c2;
- *(c + 3 * ldc) = c3;
- *(c + 4 * ldc) = c4;
- *(c + 5 * ldc) = c5;
- *(c + 6 * ldc) = c6;
- *(c + 7 * ldc) = c7;
- }
-
- static void ssolve_1x4_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- FLOAT b0, b4, b5, b8, b9, b10, b12, b13, b14, b15;
- FLOAT c0, c1, c2, c3;
-
- c0 = *(c + 0);
- c1 = *(c + 1 * ldc);
- c2 = *(c + 2 * ldc);
- c3 = *(c + 3 * ldc);
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- FLOAT res0, res1, res2, res3;
-
- res0 = aa[0] * bb[0];
- res1 = aa[0] * bb[1];
- res2 = aa[0] * bb[2];
- res3 = aa[0] * bb[3];
-
- for (k = (bk - 1); k--;)
- {
- aa += 1;
- bb += 4;
-
- res0 += aa[0] * bb[0];
- res1 += aa[0] * bb[1];
- res2 += aa[0] * bb[2];
- res3 += aa[0] * bb[3];
- }
-
- c0 -= res0;
- c1 -= res1;
- c2 -= res2;
- c3 -= res3;
- }
-
- a -= 4;
- b -= 16;
-
- b0 = *b;
- b4 = *(b + 4);
- b5 = *(b + 5);
- b8 = *(b + 8);
- b9 = *(b + 9);
- b10 = *(b + 10);
- b12 = *(b + 12);
- b13 = *(b + 13);
- b14 = *(b + 14);
- b15 = *(b + 15);
-
- c3 *= b15;
- c2 = (c2 - c3 * b14) * b10;
- c1 = ((c1 - c3 * b13) - c2 * b9) * b5;
- c0 = (((c0 - c3 * b12) - c2 * b8) - c1 * b4) * b0;
-
- *(a + 0) = c0;
- *(a + 1) = c1;
- *(a + 2) = c2;
- *(a + 3) = c3;
-
- *(c) = c0;
- *(c + 1 * ldc) = c1;
- *(c + 2 * ldc) = c2;
- *(c + 3 * ldc) = c3;
- }
-
- static void ssolve_1x2_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG ldc, BLASLONG bk)
- {
- FLOAT b0, b2, b3, c0, c1;
-
- c0 = *(c + 0);
- c1 = *(c + ldc);
-
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- FLOAT res0, res1;
-
- res0 = aa[0] * bb[0];
- res1 = aa[0] * bb[1];
-
- for (k = (bk - 1); k--;)
- {
- aa += 1;
- bb += 2;
-
- res0 += aa[0] * bb[0];
- res1 += aa[0] * bb[1];
- }
-
- c0 -= res0;
- c1 -= res1;
- }
-
- a -= 2;
- b -= 4;
-
- b3 = *(b + 3);
- b2 = *(b + 2);
- b0 = *b;
-
- c1 *= b3;
-
- c0 -= c1 * b2;
- c0 *= b0;
-
- *(a + 0) = c0;
- *(a + 1) = c1;
-
- *(c + 0) = c0;
- *(c + ldc) = c1;
- }
-
- static void ssolve_1x1_rt_msa(FLOAT *a, FLOAT *b, FLOAT *c, BLASLONG bk)
- {
- if (bk > 0)
- {
- BLASLONG k;
- FLOAT *aa = a, *bb = b;
- FLOAT res;
-
- res = *aa * *bb;
-
- for (k = (bk - 1); k--;)
- {
- aa++;
- bb++;
-
- res += *aa * *bb;
- }
-
- *c -= res;
- }
-
- *c *= *(a - 1);
- *(b - 1) = *c;
- }
-
- int CNAME(BLASLONG m, BLASLONG n, BLASLONG k, FLOAT dummy1, FLOAT *a, FLOAT *b,
- FLOAT *c, BLASLONG ldc, BLASLONG offset)
- {
- FLOAT *aa, *cc;
- BLASLONG i, j, kk;
-
- kk = n - offset;
- c += n * ldc;
- b += n * k;
-
- if (n & 7)
- {
- if (n & 1)
- {
- aa = a;
- b -= k;
- c -= ldc;
- cc = c;
-
- for (i = (m >> 3); i--;)
- {
- ssolve_8x1_rt_msa(aa + 8 * kk, b + kk, cc, (k - kk));
-
- aa += 8 * k;
- cc += 8;
- }
-
- if (m & 7)
- {
- if (m & 4)
- {
- ssolve_4x1_rt_msa(aa + 4 * kk, b + kk, cc, (k - kk));
-
- aa += 4 * k;
- cc += 4;
- }
-
- if (m & 2)
- {
- ssolve_2x1_rt_msa(aa + 2 * kk, b + kk, cc, (k - kk));
-
- aa += 2 * k;
- cc += 2;
- }
-
- if (m & 1)
- {
- ssolve_1x1_rt_msa(b + kk, aa + kk, cc, (k - kk));
-
- aa += k;
- cc += 1;
- }
- }
-
- kk -= 1;
- }
-
- if (n & 2)
- {
- aa = a;
- b -= 2 * k;
- c -= 2 * ldc;
- cc = c;
-
- for (i = (m >> 3); i--;)
- {
- ssolve_8x2_rt_msa(aa + 8 * kk, b + 2 * kk, cc, ldc, (k - kk));
-
- aa += 8 * k;
- cc += 8;
- }
-
- if (m & 7)
- {
- if (m & 4)
- {
- ssolve_4x2_rt_msa(aa + 4 * kk, b + 2 * kk, cc, ldc, (k - kk));
-
- aa += 4 * k;
- cc += 4;
- }
-
- if (m & 2)
- {
- ssolve_2x2_rt_msa(aa + 2 * kk, b + 2 * kk, cc, ldc, (k - kk));
-
- aa += 2 * k;
- cc += 2;
- }
-
- if (m & 1)
- {
- ssolve_1x2_rt_msa(aa + kk, b + 2 * kk, cc, ldc, (k - kk));
-
- aa += k;
- cc += 1;
- }
- }
-
- kk -= 2;
- }
-
- if (n & 4)
- {
- aa = a;
- b -= 4 * k;
- c -= 4 * ldc;
- cc = c;
-
- for (i = (m >> 3); i--;)
- {
- ssolve_8x4_rt_msa(aa + 8 * kk, b + 4 * kk, cc, ldc, (k - kk));
-
- aa += 8 * k;
- cc += 8;
- }
-
- if (m & 7)
- {
- if (m & 4)
- {
- ssolve_4x4_rt_msa(aa + 4 * kk, b + 4 * kk, cc, ldc, (k - kk));
-
- aa += 4 * k;
- cc += 4;
- }
-
- if (m & 2)
- {
- ssolve_2x4_rt_msa(aa + 2 * kk, b + 4 * kk, cc, ldc, (k - kk));
-
- aa += 2 * k;
- cc += 2;
- }
-
- if (m & 1)
- {
- ssolve_1x4_rt_msa(aa + kk, b + 4 * kk, cc, ldc, (k - kk));
-
- aa += k;
- cc += 1;
- }
- }
-
- kk -= 4;
- }
- }
-
- for (j = (n >> 3); j--;)
- {
- aa = a;
- b -= 8 * k;
- c -= 8 * ldc;
- cc = c;
-
- for (i = (m >> 3); i--;)
- {
- ssolve_8x8_rt_msa(aa + 8 * kk, b + 8 * kk, cc, ldc, (k - kk));
-
- aa += 8 * k;
- cc += 8;
- }
-
- if (m & 7)
- {
- if (m & 4)
- {
- ssolve_4x8_rt_msa(aa + 4 * kk, b + 8 * kk, cc, ldc, (k - kk));
-
- aa += 4 * k;
- cc += 4;
- }
-
- if (m & 2)
- {
- ssolve_2x8_rt_msa(aa + 2 * kk, b + 8 * kk, cc, ldc, (k - kk));
-
- aa += 2 * k;
- cc += 2;
- }
-
- if (m & 1)
- {
- ssolve_1x8_rt_msa(aa + kk, b + 8 * kk, cc, ldc, (k - kk));
-
- aa += k;
- cc += 1;
- }
- }
-
- kk -= 8;
- }
-
- return 0;
- }
|