|
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891 |
- /***************************************************************************
- Copyright (c) 2013-2019, The OpenBLAS Project
- All rights reserved.
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are
- met:
- 1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in
- the documentation and/or other materials provided with the
- distribution.
- 3. Neither the name of the OpenBLAS project nor the names of
- its contributors may be used to endorse or promote products
- derived from this software without specific prior written permission.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- ARE DISCLAIMED. IN NO EVENT SHALL THE OPENBLAS PROJECT OR CONTRIBUTORS BE
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *****************************************************************************/
- #define MY_ALIGN .align 3
- b ZGEMM_L2
- /* MINI SUBROUTINES */
- /* 2x8 MAIN 128x+2 LOOP */
-
-
- ZGEMM_L2x8_LMAIN_SUB:
- /*----------------------------------------*/
- mtctr T8
- LOAD2x8_2
- MY_ALIGN
- ZGEMM_L2x8_LOOP:
- /*----------------------------------------*/
- dcbt AO, PRE
- dcbt BO, PRE
- KERNEL2x8_L2 256,64,0,0
- ZGEMM_L2x8_K128:
- /*----------------------------------------*/
- KERNEL2x8_L2 256,64,1,0
- dcbt AO, T2
- KERNEL2x8_L2 256,64,2,0
- KERNEL2x8_L2 256,64,3,0
- dcbt AO, T3
- dcbt BO, T2
- KERNEL2x8_L2 256,64,4,0
- KERNEL2x8_L2 256,64,5,0
- dcbt AO, T4
- KERNEL2x8_L2 256,64,6,0
- KERNEL2x8_L2 256,64,7,0
- dcbt AO, T5
- dcbt BO, T3
- KERNEL2x8_L2 256,64,8,0
- KERNEL2x8_L2 256,64,9,0
- KERNEL2x8_L2 256,64,10,0
- KERNEL2x8_L2 256,64,11,0
- dcbt BO, T4
- KERNEL2x8_L2 256,64,12,0
- KERNEL2x8_L2 256,64,13,0
- KERNEL2x8_L2 256,64,14,0
- KERNEL2x8_L2 256,64,15,0
- KERNEL2x8_L2 256,64,16,0
- KERNEL2x8_L2 256,64,17,0
- KERNEL2x8_L2 256,64,18,0
- KERNEL2x8_L2 256,64,19,0
- KERNEL2x8_L2 256,64,20,0
- KERNEL2x8_L2 256,64,21,0
- KERNEL2x8_L2 256,64,22,0
- KERNEL2x8_L2 256,64,23,0
- KERNEL2x8_L2 256,64,24,0
- KERNEL2x8_L2 256,64,25,0
- KERNEL2x8_L2 256,64,26,0
- KERNEL2x8_L2 256,64,27,0
- KERNEL2x8_L2 256,64,28,0
- KERNEL2x8_L2 256,64,29,0
- KERNEL2x8_L2 256,64,30,0
- KERNEL2x8_L2 256,64,31,0
- KERNEL2x8_L2 256,64,32,0
- KERNEL2x8_L2 256,64,33,0
- KERNEL2x8_L2 256,64,34,0
- KERNEL2x8_L2 256,64,35,0
- KERNEL2x8_L2 256,64,36,0
- KERNEL2x8_L2 256,64,37,0
- KERNEL2x8_L2 256,64,38,0
- KERNEL2x8_L2 256,64,39,0
- KERNEL2x8_L2 256,64,40,0
- KERNEL2x8_L2 256,64,41,0
- KERNEL2x8_L2 256,64,42,0
- KERNEL2x8_L2 256,64,43,0
- KERNEL2x8_L2 256,64,44,0
- KERNEL2x8_L2 256,64,45,0
- KERNEL2x8_L2 256,64,46,0
- KERNEL2x8_L2 256,64,47,0
- KERNEL2x8_L2 256,64,48,0
- KERNEL2x8_L2 256,64,49,0
- KERNEL2x8_L2 256,64,50,0
- KERNEL2x8_L2 256,64,51,0
- KERNEL2x8_L2 256,64,52,0
- KERNEL2x8_L2 256,64,53,0
- KERNEL2x8_L2 256,64,54,0
- KERNEL2x8_L2 256,64,55,0
- KERNEL2x8_L2 256,64,56,0
- KERNEL2x8_L2 256,64,57,0
- KERNEL2x8_L2 256,64,58,0
- KERNEL2x8_L2 256,64,59,0
- KERNEL2x8_L2 256,64,60,0
- KERNEL2x8_L2 256,64,61,0
- KERNEL2x8_L2 256,64,62,0
- KERNEL2x8_L2 256,64,63,1
- bdnz ZGEMM_L2x8_LOOP
- MY_ALIGN
- ZGEMM_L2x8_LOOP_END:
- /*----------------------------------------*/
- END2x8_2
- blr
- MY_ALIGN
-
-
- ZGEMM_2x8_L64_SUB:
- /*----------------------------------------*/
- LOAD2x8_2
- dcbt AO, PRE
- dcbt BO, PRE
- KERNEL2x8_L2 256,64,0,0
- KERNEL2x8_L2 256,64,1,0
- dcbt AO, T2
- KERNEL2x8_L2 256,64,2,0
- KERNEL2x8_L2 256,64,3,0
- dcbt AO, T3
- dcbt BO, T2
- KERNEL2x8_L2 256,64,4,0
- KERNEL2x8_L2 256,64,5,0
- dcbt AO, T4
- KERNEL2x8_L2 256,64,6,0
- KERNEL2x8_L2 256,64,7,0
- dcbt AO, T5
- dcbt BO, T3
- KERNEL2x8_L2 256,64,8,0
- KERNEL2x8_L2 256,64,9,0
- KERNEL2x8_L2 256,64,10,0
- KERNEL2x8_L2 256,64,11,0
- dcbt BO, T4
- KERNEL2x8_L2 256,64,12,0
- KERNEL2x8_L2 256,64,13,0
- KERNEL2x8_L2 256,64,14,0
- KERNEL2x8_L2 256,64,15,0
- KERNEL2x8_L2 256,64,16,0
- KERNEL2x8_L2 256,64,17,0
- KERNEL2x8_L2 256,64,18,0
- KERNEL2x8_L2 256,64,19,0
- KERNEL2x8_L2 256,64,20,0
- KERNEL2x8_L2 256,64,21,0
- KERNEL2x8_L2 256,64,22,0
- KERNEL2x8_L2 256,64,23,0
- KERNEL2x8_L2 256,64,24,0
- KERNEL2x8_L2 256,64,25,0
- KERNEL2x8_L2 256,64,26,0
- KERNEL2x8_L2 256,64,27,0
- KERNEL2x8_L2 256,64,28,0
- KERNEL2x8_L2 256,64,29,0
- KERNEL2x8_L2 256,64,30,0
- KERNEL2x8_E2 256,64,31,1
- blr
- MY_ALIGN
-
-
- ZGEMM_2x8_L32_SUB:
- /*----------------------------------------*/
- LOAD2x8_2
- dcbt AO, PRE
- dcbt BO, PRE
- KERNEL2x8_L2 256,64,0,0
- KERNEL2x8_L2 256,64,1,0
- dcbt AO, T2
- KERNEL2x8_L2 256,64,2,0
- KERNEL2x8_L2 256,64,3,0
- dcbt AO, T3
- dcbt BO, T2
- KERNEL2x8_L2 256,64,4,0
- KERNEL2x8_L2 256,64,5,0
- dcbt AO, T4
- KERNEL2x8_L2 256,64,6,0
- KERNEL2x8_L2 256,64,7,0
- dcbt AO, T5
- dcbt BO, T3
- KERNEL2x8_L2 256,64,8,0
- KERNEL2x8_L2 256,64,9,0
- KERNEL2x8_L2 256,64,10,0
- KERNEL2x8_L2 256,64,11,0
- dcbt BO, T4
- KERNEL2x8_L2 256,64,12,0
- KERNEL2x8_L2 256,64,13,0
- KERNEL2x8_L2 256,64,14,0
- KERNEL2x8_E2 256,64,15,1
- blr
- MY_ALIGN
-
-
- ZGEMM_2x8_L16_SUB:
- /*----------------------------------------*/
- LOAD2x8_2
- dcbt AO, PRE
- dcbt BO, PRE
- KERNEL2x8_L2 256,64,0,0
- KERNEL2x8_L2 256,64,1,0
- dcbt AO, T2
- KERNEL2x8_L2 256,64,2,0
- KERNEL2x8_L2 256,64,3,0
- dcbt AO, T3
- dcbt BO, T2
- KERNEL2x8_L2 256,64,4,0
- KERNEL2x8_L2 256,64,5,0
- dcbt AO, T4
- KERNEL2x8_L2 256,64,6,0
- KERNEL2x8_E2 256,64,7,1
- blr
- MY_ALIGN
-
-
- ZGEMM_2x4_LMAIN_SUB:
- /*----------------------------------------*/
- mtctr T8
- LOAD2x4_2
- MY_ALIGN
- ZGEMM_L2x4_LOOP:
- /*----------------------------------------*/
- KERNEL2x4_L2 128,64,0,0
- ZGEMM_L2x4_K32:
- /*----------------------------------------*/
- KERNEL2x4_L2 128,64,1,0
- KERNEL2x4_L2 128,64,2,0
- KERNEL2x4_L2 128,64,3,0
- KERNEL2x4_L2 128,64,4,0
- KERNEL2x4_L2 128,64,5,0
- KERNEL2x4_L2 128,64,6,0
- KERNEL2x4_L2 128,64,7,0
- KERNEL2x4_L2 128,64,8,0
- KERNEL2x4_L2 128,64,9,0
- KERNEL2x4_L2 128,64,10,0
- KERNEL2x4_L2 128,64,11,0
- KERNEL2x4_L2 128,64,12,0
- KERNEL2x4_L2 128,64,13,0
- KERNEL2x4_L2 128,64,14,0
- KERNEL2x4_L2 128,64,15,1
- bdnz ZGEMM_L2x4_LOOP
- MY_ALIGN
- ZGEMM_L2x4_LOOP_END:
- /*----------------------------------------*/
- END2x4_2
- blr
- MY_ALIGN
-
-
- ZGEMM_2x4_L16_SUB:
- /*----------------------------------------*/
- LOAD2x4_2
- KERNEL2x4_L2 128,64,0,0
- KERNEL2x4_L2 128,64,1,0
- KERNEL2x4_L2 128,64,2,0
- KERNEL2x4_L2 128,64,3,0
- KERNEL2x4_L2 128,64,4,0
- KERNEL2x4_L2 128,64,5,0
- KERNEL2x4_L2 128,64,6,0
- KERNEL2x4_E2 128,64,7,1
- blr
- MY_ALIGN
-
-
- ZGEMM_2x4_L8_SUB:
- /*----------------------------------------*/
- LOAD2x4_2
- KERNEL2x4_L2 128,64,0,0
- KERNEL2x4_L2 128,64,1,0
- KERNEL2x4_L2 128,64,2,0
- KERNEL2x4_E2 128,64,3,1
- blr
-
-
- ZGEMM_2x2_LMAIN_SUB:
- /*----------------------------------------*/
- mtctr T8
- LOAD2x2_2
- MY_ALIGN
- ZGEMM_L2x2_LOOP:
- /*----------------------------------------*/
- KERNEL2x2_L2 64,64,0,0
- ZGEMM_L2x2_K32:
- /*----------------------------------------*/
- KERNEL2x2_L2 64,64,1,0
- KERNEL2x2_L2 64,64,2,0
- KERNEL2x2_L2 64,64,3,0
- KERNEL2x2_L2 64,64,4,0
- KERNEL2x2_L2 64,64,5,0
- KERNEL2x2_L2 64,64,6,0
- KERNEL2x2_L2 64,64,7,0
- KERNEL2x2_L2 64,64,8,0
- KERNEL2x2_L2 64,64,9,0
- KERNEL2x2_L2 64,64,10,0
- KERNEL2x2_L2 64,64,11,0
- KERNEL2x2_L2 64,64,12,0
- KERNEL2x2_L2 64,64,13,0
- KERNEL2x2_L2 64,64,14,0
- KERNEL2x2_L2 64,64,15,1
- bdnz ZGEMM_L2x2_LOOP
- MY_ALIGN
-
-
- ZGEMM_L2x2_LOOP_END:
- /*----------------------------------------*/
- END2x2_2
- blr
- MY_ALIGN
- ZGEMM_2x2_L16_SUB:
- /*----------------------------------------*/
- LOAD2x2_2
- KERNEL2x2_L2 64,64,0,0
- KERNEL2x2_L2 64,64,1,0
- KERNEL2x2_L2 64,64,2,0
- KERNEL2x2_L2 64,64,3,0
- KERNEL2x2_L2 64,64,4,0
- KERNEL2x2_L2 64,64,5,0
- KERNEL2x2_L2 64,64,6,0
- KERNEL2x2_E2 64,64,7,1
- blr
- MY_ALIGN
- ZGEMM_2x2_L8_SUB:
- /*----------------------------------------*/
- LOAD2x2_2
- KERNEL2x2_L2 64,64,0,0
- KERNEL2x2_L2 64,64,1,0
- KERNEL2x2_L2 64,64,2,0
- KERNEL2x2_E2 64,64,3,1
- blr
-
-
- ZGEMM_2x1_LMAIN_SUB:
- /*----------------------------------------*/
- mtctr T8
- LOAD2x1_2
- MY_ALIGN
- ZGEMM_L2x1_LOOP:
- /*----------------------------------------*/
- KERNEL2x1_L2 32,64,0,0
- ZGEMM_L2x1_K32:
- /*----------------------------------------*/
- KERNEL2x1_L2 32,64,1,0
- KERNEL2x1_L2 32,64,2,0
- KERNEL2x1_L2 32,64,3,0
- KERNEL2x1_L2 32,64,4,0
- KERNEL2x1_L2 32,64,5,0
- KERNEL2x1_L2 32,64,6,0
- KERNEL2x1_L2 32,64,7,0
- KERNEL2x1_L2 32,64,8,0
- KERNEL2x1_L2 32,64,9,0
- KERNEL2x1_L2 32,64,10,0
- KERNEL2x1_L2 32,64,11,0
- KERNEL2x1_L2 32,64,12,0
- KERNEL2x1_L2 32,64,13,0
- KERNEL2x1_L2 32,64,14,0
- KERNEL2x1_L2 32,64,15,1
- bdnz ZGEMM_L2x1_LOOP
- MY_ALIGN
- ZGEMM_L2x1_LOOP_END:
- /*----------------------------------------*/
- END2x1_2
- blr
-
- MY_ALIGN
- ZGEMM_2x1_L16_SUB:
- /*----------------------------------------*/
- LOAD2x1_2
- KERNEL2x1_L2 32,64,0,0
- KERNEL2x1_L2 32,64,1,0
- KERNEL2x1_L2 32,64,2,0
- KERNEL2x1_L2 32,64,3,0
- KERNEL2x1_L2 32,64,4,0
- KERNEL2x1_L2 32,64,5,0
- KERNEL2x1_L2 32,64,6,0
- KERNEL2x1_E2 32,64,7,1
- blr
- MY_ALIGN
-
-
- ZGEMM_2x1_L8_SUB:
- /*----------------------------------------*/
- LOAD2x1_2
- KERNEL2x1_L2 32,64,0,0
- KERNEL2x1_L2 32,64,1,0
- KERNEL2x1_L2 32,64,2,0
- KERNEL2x1_E2 32,64,3,1
- blr
-
-
-
- /* MAIN LOOP BEGINS */
- MY_ALIGN
-
-
- ZGEMM_L2:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL) && !defined(LEFT)
- neg TEMP_REG, OFFSET
- #endif
- srawi. J, N, 1
- ble ZGEMM_L2_END
-
-
- ZGEMM_L2_BEGIN:
- /*----------------------------------------*/
- mr CO, C
- slwi T1, LDC , 1
- add T2,C,LDC
- mr AO, A
- add C, C, T1
- #if defined(TRMMKERNEL) && defined(LEFT)
- mr TEMP_REG, OFFSET /*off = offset;*/
- #endif
- srawi. I, M, 3
- ble ZGEMM_L2x8_END
- dcbt CO,r0 /*just prefetch*/
- dcbt T2,r0
-
-
- ZGEMM_L2x8_BEGIN:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- REFRESH_POINTERS AO,BO,TEMP_REG,B,8,2
- #else
- mr BO, B
- dcbt B, r0
- #endif
- dcbt AO, r0
- #if defined(TRMMKERNEL)
- REFRESH_TEMP_BK T6,K,TEMP_REG,8,2
- mr T1, T6
- /* TEMPS FOR PREFETCH */
- li T2, 1024
- li T3, 1024+512
- addi T1,T1, -2
- /* TEMPS FOR PREFETCH */
- li T4, 2048
- li T5, 2048+512
- srawi. T8, T1, 7 /**(T11-2) % 128x */
- #else
- mr T1, K
- /* TEMPS FOR PREFETCH */
- li T2, 1024
- li T3, 1024+512
- addi T1,T1, -2
- /* TEMPS FOR PREFETCH */
- li T4, 2048
- li T5, 2048+512
- srawi. T8, T1, 7 /**(K-2) % 128x */
- #endif
- ZERO2x8
- ble ZGEMM_L2x8_SUB0
- bl ZGEMM_L2x8_LMAIN_SUB
- andi. L, T1, 127
- ble ZGEMM_L2x8_SAVE
- b ZGEMM_L2x8_SUB2
-
-
- ZGEMM_L2x8_SUB0:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- andi. L, T6, 255
- cmpwi T6,129
- #else
- andi. L, K, 255
- cmpwi K,129
- #endif
- li T8,1
- bne CMP2x8_128K
- addi BO,BO,-32
- addi AO,AO,-128
- LOAD2x8O 128,32
- END2x8_WITHOUT_ADD
- LOAD2x8_2O 256, 64
- mtctr T8
- bl ZGEMM_L2x8_K128
- b ZGEMM_L2x8_SAVE
- CMP2x8_128K:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- cmpwi T6,128
- #else
- cmpwi K,128
- #endif
- bne ZGEMM_L2x8_SUB2
- MY_ALIGN
- mtctr T8
- addi BO,BO,-64
- addi AO,AO,-256
- LOAD2x8_2O 256,64
- bl ZGEMM_L2x8_K128
- b ZGEMM_L2x8_SAVE
- MY_ALIGN
-
-
- ZGEMM_L2x8_SUB2:
- /*----------------------------------------*/
- andi. T1,L, 64
- ble ZGEMM_L2x8_SUB2_32
- bl ZGEMM_2x8_L64_SUB
- MY_ALIGN
-
-
- ZGEMM_L2x8_SUB2_32:
- /*----------------------------------------*/
- andi. T1,L, 32
- ble ZGEMM_L2x8_SUB2_16
- bl ZGEMM_2x8_L32_SUB
- MY_ALIGN
-
-
- ZGEMM_L2x8_SUB2_16:
- /*----------------------------------------*/
- andi. T1,L, 16
- ble ZGEMM_L2x8_SUB2_8
- bl ZGEMM_2x8_L16_SUB
- MY_ALIGN
-
-
- ZGEMM_L2x8_SUB2_8:
- /*----------------------------------------*/
- andi. T1,L, 8
- ble ZGEMM_L2x8_SUB2_4
- LOAD2x8_2
- KERNEL2x8_L2 256,64, 0,0
- KERNEL2x8_L2 256,64, 1,0
- KERNEL2x8_L2 256,64, 2,0
- KERNEL2x8_E2 256,64, 3,1
- MY_ALIGN
-
-
- ZGEMM_L2x8_SUB2_4:
- /*----------------------------------------*/
- andi. T1,L, 4
- ble ZGEMM_L2x8_SUB2_2
- LOAD2x8_2
- KERNEL2x8_L2 256,64, 0,0
- KERNEL2x8_E2 256,64, 1,1
- MY_ALIGN
-
-
- ZGEMM_L2x8_SUB2_2:
- /*----------------------------------------*/
- andi. T1,L, 2
- ble ZGEMM_L2x8_SUB2_1
- LOAD2x8_2
- KERNEL2x8_E2 256,64, 0,1
- MY_ALIGN
-
-
- ZGEMM_L2x8_SUB2_1:
- /*----------------------------------------*/
- andi. T1,L, 1
- ble ZGEMM_L2x8_SAVE
- KERNEL2x8
-
-
- ZGEMM_L2x8_SAVE:
- /*----------------------------------------*/
- addic. I, I, -1
- SAVE2x8
- #if defined(TRMMKERNEL)
- REFRESH_AFTER_SAVE T6,K,TEMP_REG,BO,AO,8,2
- #endif
- bgt ZGEMM_L2x8_BEGIN
- andi. T2, M, 7
- ble ZGEMM_L2x1_END
- andi. T1, M, 4
- ble ZGEMM_L2x4_END
- b ZGEMM_L2x4_BEGIN
- MY_ALIGN
-
-
- ZGEMM_L2x8_END:
- /*----------------------------------------*/
-
-
- ZGEMM_L2x4_BEGIN:
- /*----------------------------------------*/
- andi. T2, M, 7
- ble ZGEMM_L2x1_END
- andi. T1, M, 4
- ble ZGEMM_L2x4_END
- #if defined(TRMMKERNEL)
- REFRESH_POINTERS AO,BO,TEMP_REG,B,4,2
- #else
- mr BO, B
- #endif
- #if defined(TRMMKERNEL)
- REFRESH_TEMP_BK T6,K,TEMP_REG,4,2
- mr T1, T6
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(T11-2) % 32x */
- #else
- mr T1, K
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(K-2) % 32x */
- #endif
- ZERO2x4
- ble ZGEMM_L2x4_SUB0
- bl ZGEMM_2x4_LMAIN_SUB
- andi. L, T1, 31
- ble ZGEMM_L2x4_SAVE
- b ZGEMM_L2x4_SUB2
-
-
- ZGEMM_L2x4_SUB0:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- andi. L, T6, 63
- cmpwi T6,33
- #else
- andi. L, K, 63
- cmpwi K,33
- #endif
- li T8,1
- bne CMP2x4_32K
- addi BO,BO,-32
- addi AO,AO,-64
- LOAD2x4O 64,32
- END2x4_WITHOUT_ADD
- LOAD2x4_2O 128, 64
- mtctr T8
- bl ZGEMM_L2x4_K32
- b ZGEMM_L2x4_SAVE
- CMP2x4_32K:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- cmpwi T6,32
- #else
- cmpwi K,32
- #endif
- bne ZGEMM_L2x4_SUB2
- MY_ALIGN
- mtctr T8
- addi BO,BO,-64
- addi AO,AO,-128
- LOAD2x4_2O 128,64
- bl ZGEMM_L2x4_K32
- b ZGEMM_L2x4_SAVE
- MY_ALIGN
- MY_ALIGN
-
-
- ZGEMM_L2x4_SUB2:
- /*----------------------------------------*/
- andi. T1,L, 16
- ble ZGEMM_L2x4_SUB2_8
- bl ZGEMM_2x4_L16_SUB
- MY_ALIGN
-
-
- ZGEMM_L2x4_SUB2_8:
- /*----------------------------------------*/
- andi. T1,L, 8
- ble ZGEMM_L2x4_SUB2_4
- bl ZGEMM_2x4_L8_SUB
- MY_ALIGN
-
-
- ZGEMM_L2x4_SUB2_4:
- /*----------------------------------------*/
- andi. T1,L, 4
- ble ZGEMM_L2x4_SUB2_2
- LOAD2x4_2
- KERNEL2x4_L2 128,64, 0,0
- KERNEL2x4_E2 128,64, 1,1
- MY_ALIGN
-
-
- ZGEMM_L2x4_SUB2_2:
- /*----------------------------------------*/
- andi. T1,L, 2
- ble ZGEMM_L2x4_SUB2_1
- LOAD2x4_2
- KERNEL2x4_E2 128,64, 0,1
- MY_ALIGN
-
-
- ZGEMM_L2x4_SUB2_1:
- /*----------------------------------------*/
- andi. T1,L, 1
- ble ZGEMM_L2x4_SAVE
- KERNEL2x4
-
-
- ZGEMM_L2x4_SAVE:
- /*----------------------------------------*/
- SAVE2x4
- #if defined(TRMMKERNEL)
- REFRESH_AFTER_SAVE T6,K,TEMP_REG,BO,AO,4,2
- #endif
-
-
- ZGEMM_L2x4_END:
- /*----------------------------------------*/
-
-
- ZGEMM_L2x2_BEGIN:
- /*----------------------------------------*/
- andi. T1, M, 2
- ble ZGEMM_L2x2_END
- #if defined(TRMMKERNEL)
- REFRESH_POINTERS AO,BO,TEMP_REG,B,2,2
- #else
- mr BO, B
- #endif
- #if defined(TRMMKERNEL)
- REFRESH_TEMP_BK T6,K,TEMP_REG,2,2
- mr T1, T6
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(T11-2) % 32x */
- #else
- mr T1, K
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(K-2) % 32x */
- #endif
- ZERO2x2
- ble ZGEMM_L2x2_SUB0
- bl ZGEMM_2x2_LMAIN_SUB
- andi. L, T1, 31
- ble ZGEMM_L2x2_SAVE
- b ZGEMM_L2x2_SUB2
-
-
- ZGEMM_L2x2_SUB0:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- andi. L, T6, 63
- cmpwi T6,33
- #else
- andi. L, K, 63
- cmpwi K,33
- #endif
- li T8,1
- bne CMP2x2_32K
- addi BO,BO,-32
- addi AO,AO,-32
- LOAD2x2O 32,32
- END2x2_WITHOUT_ADD
- LOAD2x2_2O 64, 64
- mtctr T8
- bl ZGEMM_L2x2_K32
- b ZGEMM_L2x2_SAVE
- CMP2x2_32K:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- cmpwi T6,32
- #else
- cmpwi K,32
- #endif
- bne ZGEMM_L2x2_SUB2
- MY_ALIGN
- mtctr T8
- addi BO,BO,-64
- addi AO,AO,-64
- LOAD2x2_2O 64,64
- bl ZGEMM_L2x2_K32
- b ZGEMM_L2x2_SAVE
- MY_ALIGN
- MY_ALIGN
-
-
- ZGEMM_L2x2_SUB2:
- /*----------------------------------------*/
- andi. T1,L, 16
- ble ZGEMM_L2x2_SUB2_8
- bl ZGEMM_2x2_L16_SUB
- MY_ALIGN
-
-
- ZGEMM_L2x2_SUB2_8:
- /*----------------------------------------*/
- andi. T1,L, 8
- ble ZGEMM_L2x2_SUB2_4
- bl ZGEMM_2x2_L8_SUB
- MY_ALIGN
-
-
- ZGEMM_L2x2_SUB2_4:
- /*----------------------------------------*/
- andi. T1,L, 4
- ble ZGEMM_L2x2_SUB2_2
- LOAD2x2_2
- KERNEL2x2_L2 64,64, 0,0
- KERNEL2x2_E2 64,64, 1,1
- MY_ALIGN
-
-
- ZGEMM_L2x2_SUB2_2:
- /*----------------------------------------*/
- andi. T1,L, 2
- ble ZGEMM_L2x2_SUB2_1
- LOAD2x2_2
- KERNEL2x2_E2 64,64, 0,1
- MY_ALIGN
-
-
- ZGEMM_L2x2_SUB2_1:
- /*----------------------------------------*/
- andi. T1,L, 1
- ble ZGEMM_L2x2_SAVE
- KERNEL2x2
-
-
- ZGEMM_L2x2_SAVE:
- /*----------------------------------------*/
- SAVE2x2
- #if defined(TRMMKERNEL)
- REFRESH_AFTER_SAVE T6,K,TEMP_REG,BO,AO,2,2
- #endif
-
-
- ZGEMM_L2x2_END:
- /*----------------------------------------*/
-
-
- ZGEMM_L2x1_BEGIN:
- /*----------------------------------------*/
- andi. T1, M, 1
- ble ZGEMM_L2x1_END
- #if defined(TRMMKERNEL)
- REFRESH_POINTERS AO,BO,TEMP_REG,B,1,2
- #else
- mr BO, B
- #endif
- #if defined(TRMMKERNEL)
- REFRESH_TEMP_BK T6,K,TEMP_REG,1,2
- mr T1, T6
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(T11-2) % 32x */
- #else
- mr T1, K
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(K-2) % 32x */
- #endif
- ZERO2x1
- ble ZGEMM_L2x1_SUB0
- bl ZGEMM_2x1_LMAIN_SUB
- andi. L, T1, 31
- ble ZGEMM_L2x1_SAVE
- b ZGEMM_L2x1_SUB2
-
-
- ZGEMM_L2x1_SUB0:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- andi. L, T6, 63
- cmpwi T6,33
- #else
- andi. L, K, 63
- cmpwi K,33
- #endif
- li T8,1
- bne CMP2x1_32K
- addi BO,BO,-32
- addi AO,AO,-16
- LOAD2x1O 16,32
- END2x1_WITHOUT_ADD
- LOAD2x1_2O 32, 64
- mtctr T8
- bl ZGEMM_L2x1_K32
- b ZGEMM_L2x1_SAVE
- CMP2x1_32K:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- cmpwi T6,32
- #else
- cmpwi K,32
- #endif
- bne ZGEMM_L2x1_SUB2
- MY_ALIGN
- mtctr T8
- addi BO,BO,-64
- addi AO,AO,-32
- LOAD2x1_2O 32,64
- bl ZGEMM_L2x1_K32
- b ZGEMM_L2x1_SAVE
- MY_ALIGN
- MY_ALIGN
-
-
- ZGEMM_L2x1_SUB2:
- /*----------------------------------------*/
- andi. T1,L, 16
- ble ZGEMM_L2x1_SUB2_8
- bl ZGEMM_2x1_L16_SUB
- MY_ALIGN
-
-
- ZGEMM_L2x1_SUB2_8:
- /*----------------------------------------*/
- andi. T1,L, 8
- ble ZGEMM_L2x1_SUB2_4
- bl ZGEMM_2x1_L8_SUB
- MY_ALIGN
-
-
- ZGEMM_L2x1_SUB2_4:
- /*----------------------------------------*/
- andi. T1,L, 4
- ble ZGEMM_L2x1_SUB2_2
- LOAD2x1_2
- KERNEL2x1_L2 32,64, 0,0
- KERNEL2x1_E2 32,64, 1,1
- MY_ALIGN
-
-
- ZGEMM_L2x1_SUB2_2:
- /*----------------------------------------*/
- andi. T1,L, 2
- ble ZGEMM_L2x1_SUB2_1
- LOAD2x1_2
- KERNEL2x1_E2 32,64, 0,1
- MY_ALIGN
-
-
- ZGEMM_L2x1_SUB2_1:
- /*----------------------------------------*/
- andi. T1,L, 1
- ble ZGEMM_L2x1_SAVE
- KERNEL2x1
-
-
- ZGEMM_L2x1_SAVE:
- /*----------------------------------------*/
- SAVE2x1
- #if defined(TRMMKERNEL)
- REFRESH_AFTER_SAVE T6,K,TEMP_REG,BO,AO,1,2
- #endif
-
-
- ZGEMM_L2x1_END:
- /*----------------------------------------*/
- slwi T1, K, 5
- addic. J, J, -1
- add B, B, T1
- #if defined(TRMMKERNEL) && !defined(LEFT)
- addi TEMP_REG, TEMP_REG, 2
- #endif
- bgt ZGEMM_L2_BEGIN
-
-
- ZGEMM_L2_END:
-
- b ZGEMM_L1
- /* MINI SUBROUTINES */
- /* 1x8 MAIN 128x+2 LOOP */
-
-
- ZGEMM_L1x8_LMAIN_SUB:
- /*----------------------------------------*/
- mtctr T8
- LOAD1x8_2
- MY_ALIGN
- ZGEMM_L1x8_LOOP:
- /*----------------------------------------*/
- dcbt AO, PRE
- dcbt BO, PRE
- KERNEL1x8_L2 256,32,0,0
- ZGEMM_L1x8_K128:
- /*----------------------------------------*/
- KERNEL1x8_L2 256,32,1,0
- dcbt AO, T2
- KERNEL1x8_L2 256,32,2,0
- KERNEL1x8_L2 256,32,3,0
- dcbt AO, T3
- dcbt BO, T2
- KERNEL1x8_L2 256,32,4,0
- KERNEL1x8_L2 256,32,5,0
- dcbt AO, T4
- KERNEL1x8_L2 256,32,6,0
- KERNEL1x8_L2 256,32,7,0
- dcbt AO, T5
- dcbt BO, T3
- KERNEL1x8_L2 256,32,8,0
- KERNEL1x8_L2 256,32,9,0
- KERNEL1x8_L2 256,32,10,0
- KERNEL1x8_L2 256,32,11,0
- dcbt BO, T4
- KERNEL1x8_L2 256,32,12,0
- KERNEL1x8_L2 256,32,13,0
- KERNEL1x8_L2 256,32,14,0
- KERNEL1x8_L2 256,32,15,0
- KERNEL1x8_L2 256,32,16,0
- KERNEL1x8_L2 256,32,17,0
- KERNEL1x8_L2 256,32,18,0
- KERNEL1x8_L2 256,32,19,0
- KERNEL1x8_L2 256,32,20,0
- KERNEL1x8_L2 256,32,21,0
- KERNEL1x8_L2 256,32,22,0
- KERNEL1x8_L2 256,32,23,0
- KERNEL1x8_L2 256,32,24,0
- KERNEL1x8_L2 256,32,25,0
- KERNEL1x8_L2 256,32,26,0
- KERNEL1x8_L2 256,32,27,0
- KERNEL1x8_L2 256,32,28,0
- KERNEL1x8_L2 256,32,29,0
- KERNEL1x8_L2 256,32,30,0
- KERNEL1x8_L2 256,32,31,0
- KERNEL1x8_L2 256,32,32,0
- KERNEL1x8_L2 256,32,33,0
- KERNEL1x8_L2 256,32,34,0
- KERNEL1x8_L2 256,32,35,0
- KERNEL1x8_L2 256,32,36,0
- KERNEL1x8_L2 256,32,37,0
- KERNEL1x8_L2 256,32,38,0
- KERNEL1x8_L2 256,32,39,0
- KERNEL1x8_L2 256,32,40,0
- KERNEL1x8_L2 256,32,41,0
- KERNEL1x8_L2 256,32,42,0
- KERNEL1x8_L2 256,32,43,0
- KERNEL1x8_L2 256,32,44,0
- KERNEL1x8_L2 256,32,45,0
- KERNEL1x8_L2 256,32,46,0
- KERNEL1x8_L2 256,32,47,0
- KERNEL1x8_L2 256,32,48,0
- KERNEL1x8_L2 256,32,49,0
- KERNEL1x8_L2 256,32,50,0
- KERNEL1x8_L2 256,32,51,0
- KERNEL1x8_L2 256,32,52,0
- KERNEL1x8_L2 256,32,53,0
- KERNEL1x8_L2 256,32,54,0
- KERNEL1x8_L2 256,32,55,0
- KERNEL1x8_L2 256,32,56,0
- KERNEL1x8_L2 256,32,57,0
- KERNEL1x8_L2 256,32,58,0
- KERNEL1x8_L2 256,32,59,0
- KERNEL1x8_L2 256,32,60,0
- KERNEL1x8_L2 256,32,61,0
- KERNEL1x8_L2 256,32,62,0
- KERNEL1x8_L2 256,32,63,1
- bdnz ZGEMM_L1x8_LOOP
- MY_ALIGN
- ZGEMM_L1x8_LOOP_END:
- /*----------------------------------------*/
- END1x8_2
- blr
- MY_ALIGN
-
-
- ZGEMM_1x8_L64_SUB:
- /*----------------------------------------*/
- LOAD1x8_2
- dcbt AO, PRE
- dcbt BO, PRE
- KERNEL1x8_L2 256,32,0,0
- KERNEL1x8_L2 256,32,1,0
- dcbt AO, T2
- KERNEL1x8_L2 256,32,2,0
- KERNEL1x8_L2 256,32,3,0
- dcbt AO, T3
- dcbt BO, T2
- KERNEL1x8_L2 256,32,4,0
- KERNEL1x8_L2 256,32,5,0
- dcbt AO, T4
- KERNEL1x8_L2 256,32,6,0
- KERNEL1x8_L2 256,32,7,0
- dcbt AO, T5
- dcbt BO, T3
- KERNEL1x8_L2 256,32,8,0
- KERNEL1x8_L2 256,32,9,0
- KERNEL1x8_L2 256,32,10,0
- KERNEL1x8_L2 256,32,11,0
- dcbt BO, T4
- KERNEL1x8_L2 256,32,12,0
- KERNEL1x8_L2 256,32,13,0
- KERNEL1x8_L2 256,32,14,0
- KERNEL1x8_L2 256,32,15,0
- KERNEL1x8_L2 256,32,16,0
- KERNEL1x8_L2 256,32,17,0
- KERNEL1x8_L2 256,32,18,0
- KERNEL1x8_L2 256,32,19,0
- KERNEL1x8_L2 256,32,20,0
- KERNEL1x8_L2 256,32,21,0
- KERNEL1x8_L2 256,32,22,0
- KERNEL1x8_L2 256,32,23,0
- KERNEL1x8_L2 256,32,24,0
- KERNEL1x8_L2 256,32,25,0
- KERNEL1x8_L2 256,32,26,0
- KERNEL1x8_L2 256,32,27,0
- KERNEL1x8_L2 256,32,28,0
- KERNEL1x8_L2 256,32,29,0
- KERNEL1x8_L2 256,32,30,0
- KERNEL1x8_E2 256,32,31,1
- blr
- MY_ALIGN
-
-
- ZGEMM_1x8_L32_SUB:
- /*----------------------------------------*/
- LOAD1x8_2
- dcbt AO, PRE
- dcbt BO, PRE
- KERNEL1x8_L2 256,32,0,0
- KERNEL1x8_L2 256,32,1,0
- dcbt AO, T2
- KERNEL1x8_L2 256,32,2,0
- KERNEL1x8_L2 256,32,3,0
- dcbt AO, T3
- dcbt BO, T2
- KERNEL1x8_L2 256,32,4,0
- KERNEL1x8_L2 256,32,5,0
- dcbt AO, T4
- KERNEL1x8_L2 256,32,6,0
- KERNEL1x8_L2 256,32,7,0
- dcbt AO, T5
- dcbt BO, T3
- KERNEL1x8_L2 256,32,8,0
- KERNEL1x8_L2 256,32,9,0
- KERNEL1x8_L2 256,32,10,0
- KERNEL1x8_L2 256,32,11,0
- dcbt BO, T4
- KERNEL1x8_L2 256,32,12,0
- KERNEL1x8_L2 256,32,13,0
- KERNEL1x8_L2 256,32,14,0
- KERNEL1x8_E2 256,32,15,1
- blr
- MY_ALIGN
-
-
- ZGEMM_1x8_L16_SUB:
- /*----------------------------------------*/
- LOAD1x8_2
- dcbt AO, PRE
- dcbt BO, PRE
- KERNEL1x8_L2 256,32,0,0
- KERNEL1x8_L2 256,32,1,0
- dcbt AO, T2
- KERNEL1x8_L2 256,32,2,0
- KERNEL1x8_L2 256,32,3,0
- dcbt AO, T3
- dcbt BO, T2
- KERNEL1x8_L2 256,32,4,0
- KERNEL1x8_L2 256,32,5,0
- dcbt AO, T4
- KERNEL1x8_L2 256,32,6,0
- KERNEL1x8_E2 256,32,7,1
- blr
- MY_ALIGN
-
-
- ZGEMM_1x4_LMAIN_SUB:
- /*----------------------------------------*/
- mtctr T8
- LOAD1x4_2
- MY_ALIGN
-
-
- ZGEMM_L1x4_LOOP:
- /*----------------------------------------*/
- KERNEL1x4_L2 128,32,0,0
-
-
- ZGEMM_L1x4_K32:
- /*----------------------------------------*/
- KERNEL1x4_L2 128,32,1,0
- KERNEL1x4_L2 128,32,2,0
- KERNEL1x4_L2 128,32,3,0
- KERNEL1x4_L2 128,32,4,0
- KERNEL1x4_L2 128,32,5,0
- KERNEL1x4_L2 128,32,6,0
- KERNEL1x4_L2 128,32,7,0
- KERNEL1x4_L2 128,32,8,0
- KERNEL1x4_L2 128,32,9,0
- KERNEL1x4_L2 128,32,10,0
- KERNEL1x4_L2 128,32,11,0
- KERNEL1x4_L2 128,32,12,0
- KERNEL1x4_L2 128,32,13,0
- KERNEL1x4_L2 128,32,14,0
- KERNEL1x4_L2 128,32,15,1
- bdnz ZGEMM_L1x4_LOOP
- MY_ALIGN
-
-
- ZGEMM_L1x4_LOOP_END:
- /*----------------------------------------*/
- END1x4_2
- blr
- MY_ALIGN
-
-
- ZGEMM_1x4_L16_SUB:
- /*----------------------------------------*/
- LOAD1x4_2
- KERNEL1x4_L2 128,32,0,0
- KERNEL1x4_L2 128,32,1,0
- KERNEL1x4_L2 128,32,2,0
- KERNEL1x4_L2 128,32,3,0
- KERNEL1x4_L2 128,32,4,0
- KERNEL1x4_L2 128,32,5,0
- KERNEL1x4_L2 128,32,6,0
- KERNEL1x4_E2 128,32,7,1
- blr
- MY_ALIGN
-
-
- ZGEMM_1x4_L8_SUB:
- /*----------------------------------------*/
- LOAD1x4_2
- KERNEL1x4_L2 128,32,0,0
- KERNEL1x4_L2 128,32,1,0
- KERNEL1x4_L2 128,32,2,0
- KERNEL1x4_E2 128,32,3,1
- blr
-
-
- ZGEMM_1x2_LMAIN_SUB:
- /*----------------------------------------*/
- mtctr T8
- LOAD1x2_2
- MY_ALIGN
-
-
- ZGEMM_L1x2_LOOP:
- /*----------------------------------------*/
- KERNEL1x2_L2 64,32,0,0
-
-
- ZGEMM_L1x2_K32:
- /*----------------------------------------*/
- KERNEL1x2_L2 64,32,1,0
- KERNEL1x2_L2 64,32,2,0
- KERNEL1x2_L2 64,32,3,0
- KERNEL1x2_L2 64,32,4,0
- KERNEL1x2_L2 64,32,5,0
- KERNEL1x2_L2 64,32,6,0
- KERNEL1x2_L2 64,32,7,0
- KERNEL1x2_L2 64,32,8,0
- KERNEL1x2_L2 64,32,9,0
- KERNEL1x2_L2 64,32,10,0
- KERNEL1x2_L2 64,32,11,0
- KERNEL1x2_L2 64,32,12,0
- KERNEL1x2_L2 64,32,13,0
- KERNEL1x2_L2 64,32,14,0
- KERNEL1x2_L2 64,32,15,1
- bdnz ZGEMM_L1x2_LOOP
- MY_ALIGN
-
-
- ZGEMM_L1x2_LOOP_END:
- /*----------------------------------------*/
- END1x2_2
- blr
- MY_ALIGN
-
-
- ZGEMM_1x2_L16_SUB:
- /*----------------------------------------*/
- LOAD1x2_2
- KERNEL1x2_L2 64,32,0,0
- KERNEL1x2_L2 64,32,1,0
- KERNEL1x2_L2 64,32,2,0
- KERNEL1x2_L2 64,32,3,0
- KERNEL1x2_L2 64,32,4,0
- KERNEL1x2_L2 64,32,5,0
- KERNEL1x2_L2 64,32,6,0
- KERNEL1x2_E2 64,32,7,1
- blr
- MY_ALIGN
-
-
- ZGEMM_1x2_L8_SUB:
- /*----------------------------------------*/
- LOAD1x2_2
- KERNEL1x2_L2 64,32,0,0
- KERNEL1x2_L2 64,32,1,0
- KERNEL1x2_L2 64,32,2,0
- KERNEL1x2_E2 64,32,3,1
- blr
-
-
- ZGEMM_1x1_LMAIN_SUB:
- /*----------------------------------------*/
- mtctr T8
- LOAD1x1_2
- MY_ALIGN
-
-
- ZGEMM_L1x1_LOOP:
- /*----------------------------------------*/
- KERNEL1x1_L2 32,32,0,0
-
-
- ZGEMM_L1x1_K32:
- /*----------------------------------------*/
- KERNEL1x1_L2 32,32,1,0
- KERNEL1x1_L2 32,32,2,0
- KERNEL1x1_L2 32,32,3,0
- KERNEL1x1_L2 32,32,4,0
- KERNEL1x1_L2 32,32,5,0
- KERNEL1x1_L2 32,32,6,0
- KERNEL1x1_L2 32,32,7,0
- KERNEL1x1_L2 32,32,8,0
- KERNEL1x1_L2 32,32,9,0
- KERNEL1x1_L2 32,32,10,0
- KERNEL1x1_L2 32,32,11,0
- KERNEL1x1_L2 32,32,12,0
- KERNEL1x1_L2 32,32,13,0
- KERNEL1x1_L2 32,32,14,0
- KERNEL1x1_L2 32,32,15,1
- bdnz ZGEMM_L1x1_LOOP
- MY_ALIGN
-
-
- ZGEMM_L1x1_LOOP_END:
- /*----------------------------------------*/
- END1x1_2
- blr
- MY_ALIGN
-
-
- ZGEMM_1x1_L16_SUB:
- /*----------------------------------------*/
- LOAD1x1_2
- KERNEL1x1_L2 32,32,0,0
- KERNEL1x1_L2 32,32,1,0
- KERNEL1x1_L2 32,32,2,0
- KERNEL1x1_L2 32,32,3,0
- KERNEL1x1_L2 32,32,4,0
- KERNEL1x1_L2 32,32,5,0
- KERNEL1x1_L2 32,32,6,0
- KERNEL1x1_E2 32,32,7,1
- blr
- MY_ALIGN
-
-
- ZGEMM_1x1_L8_SUB:
- /*----------------------------------------*/
- LOAD1x1_2
- KERNEL1x1_L2 32,32,0,0
- KERNEL1x1_L2 32,32,1,0
- KERNEL1x1_L2 32,32,2,0
- KERNEL1x1_E2 32,32,3,1
- blr
-
-
- /*----------------------N1 BEGINS---------*/
- ZGEMM_L1:
- /*----------------------------------------*/
- andi. T1, N, 1
- ble ZGEMM_L1_END
-
- ZGEMM_L1_BEGIN:
- /*----------------------------------------*/
- mr CO, C
-
- add T2,C,LDC
- mr AO, A
- add C, C, T1
- #if defined(TRMMKERNEL) && defined(LEFT)
- mr TEMP_REG, OFFSET /*off = offset;*/
- #endif
- srawi. I, M, 3
- ble ZGEMM_L1x8_END
- dcbt CO,r0 /*just prefetch*/
- dcbt T2,r0
-
-
- ZGEMM_L1x8_BEGIN:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- REFRESH_POINTERS AO,BO,TEMP_REG,B,8,1
- #else
- mr BO, B
- dcbt B, r0
- #endif
- dcbt AO, r0
- #if defined(TRMMKERNEL)
- REFRESH_TEMP_BK T6,K,TEMP_REG,8,1
- mr T1, T6
- /* TEMPS FOR PREFETCH */
- li T2, 1024
- li T3, 1024+512
- addi T1,T1, -2
- /* TEMPS FOR PREFETCH */
- li T4, 2048
- li T5, 2048+512
- srawi. T8, T1, 7 /**(T11-2) % 128x */
- #else
- mr T1, K
- /* TEMPS FOR PREFETCH */
- li T2, 1024
- li T3, 1024+512
- addi T1,T1, -2
- /* TEMPS FOR PREFETCH */
- li T4, 2048
- li T5, 2048+512
- srawi. T8, T1, 7 /**(K-2) % 128x */
- #endif
- ZERO1x8
- ble ZGEMM_L1x8_SUB0
- bl ZGEMM_L1x8_LMAIN_SUB
- andi. L, T1, 127
- ble ZGEMM_L1x8_SAVE
- b ZGEMM_L1x8_SUB2
-
-
- ZGEMM_L1x8_SUB0:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- andi. L, T6, 255
- cmpwi T6,129
- #else
- andi. L, K, 255
- cmpwi K,129
- #endif
- li T8,1
- bne CMP1x8_128K
- addi BO,BO,-16
- addi AO,AO,-128
- LOAD1x8O 128,16
- END1x8_WITHOUT_ADD
- LOAD1x8_2O 256, 32
- mtctr T8
- bl ZGEMM_L1x8_K128
- b ZGEMM_L1x8_SAVE
- CMP1x8_128K:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- cmpwi T6,128
- #else
- cmpwi K,128
- #endif
- bne ZGEMM_L1x8_SUB2
- MY_ALIGN
- mtctr T8
- addi BO,BO,-32
- addi AO,AO,-256
- LOAD1x8_2O 256,32
- bl ZGEMM_L1x8_K128
- b ZGEMM_L1x8_SAVE
- MY_ALIGN
-
-
- ZGEMM_L1x8_SUB2:
- /*----------------------------------------*/
- andi. T1,L, 64
- ble ZGEMM_L1x8_SUB2_32
- bl ZGEMM_1x8_L64_SUB
- MY_ALIGN
-
-
- ZGEMM_L1x8_SUB2_32:
- /*----------------------------------------*/
- andi. T1,L, 32
- ble ZGEMM_L1x8_SUB2_16
- bl ZGEMM_1x8_L32_SUB
- MY_ALIGN
-
-
- ZGEMM_L1x8_SUB2_16:
- /*----------------------------------------*/
- andi. T1,L, 16
- ble ZGEMM_L1x8_SUB2_8
- bl ZGEMM_1x8_L16_SUB
- MY_ALIGN
-
-
- ZGEMM_L1x8_SUB2_8:
- /*----------------------------------------*/
- andi. T1,L, 8
- ble ZGEMM_L1x8_SUB2_4
- LOAD1x8_2
- KERNEL1x8_L2 256,32, 0,0
- KERNEL1x8_L2 256,32, 1,0
- KERNEL1x8_L2 256,32, 2,0
- KERNEL1x8_E2 256,32, 3,1
- MY_ALIGN
-
-
- ZGEMM_L1x8_SUB2_4:
- /*----------------------------------------*/
- andi. T1,L, 4
- ble ZGEMM_L1x8_SUB2_2
- LOAD1x8_2
- KERNEL1x8_L2 256,32, 0,0
- KERNEL1x8_E2 256,32, 1,1
- MY_ALIGN
-
-
- ZGEMM_L1x8_SUB2_2:
- /*----------------------------------------*/
- andi. T1,L, 2
- ble ZGEMM_L1x8_SUB2_1
- LOAD1x8_2
- KERNEL1x8_E2 256,32, 0,1
- MY_ALIGN
-
-
- ZGEMM_L1x8_SUB2_1:
- /*----------------------------------------*/
- andi. T1,L, 1
- ble ZGEMM_L1x8_SAVE
- KERNEL1x8
-
-
- ZGEMM_L1x8_SAVE:
- /*----------------------------------------*/
- addic. I, I, -1
- SAVE1x8
- #if defined(TRMMKERNEL)
- REFRESH_AFTER_SAVE T6,K,TEMP_REG,BO,AO,8,1
- #endif
- bgt ZGEMM_L1x8_BEGIN
- andi. T2, M, 7
- ble ZGEMM_L1x1_END
- andi. T1, M, 4
- ble ZGEMM_L1x4_END
- b ZGEMM_L1x4_BEGIN
- MY_ALIGN
-
-
- ZGEMM_L1x8_END:
- /*----------------------------------------*/
-
-
- ZGEMM_L1x4_BEGIN:
- /*----------------------------------------*/
- andi. T2, M, 7
- ble ZGEMM_L1x1_END
- andi. T1, M, 4
- ble ZGEMM_L1x4_END
- #if defined(TRMMKERNEL)
- REFRESH_POINTERS AO,BO,TEMP_REG,B,4,1
- #else
- mr BO, B
- #endif
- #if defined(TRMMKERNEL)
- REFRESH_TEMP_BK T6,K,TEMP_REG,4,1
- mr T1, T6
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(T11-2) % 32x */
- #else
- mr T1, K
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(K-2) % 32x */
- #endif
- ZERO1x4
- ble ZGEMM_L1x4_SUB0
- bl ZGEMM_1x4_LMAIN_SUB
- andi. L, T1, 31
- ble ZGEMM_L1x4_SAVE
- b ZGEMM_L1x4_SUB2
-
-
- ZGEMM_L1x4_SUB0:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- andi. L, T6, 63
- cmpwi T6,33
- #else
- andi. L, K, 63
- cmpwi K,33
- #endif
- li T8,1
- bne CMP1x4_32K
- addi BO,BO,-16
- addi AO,AO,-64
- LOAD1x4O 64,16
- END1x4_WITHOUT_ADD
- LOAD1x4_2O 128, 32
- mtctr T8
- bl ZGEMM_L1x4_K32
- b ZGEMM_L1x4_SAVE
- CMP1x4_32K:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- cmpwi T6,32
- #else
- cmpwi K,32
- #endif
- bne ZGEMM_L1x4_SUB2
- MY_ALIGN
- mtctr T8
- addi BO,BO,-32
- addi AO,AO,-128
- LOAD1x4_2O 128,32
- bl ZGEMM_L1x4_K32
- b ZGEMM_L1x4_SAVE
- MY_ALIGN
- MY_ALIGN
-
-
- ZGEMM_L1x4_SUB2:
- /*----------------------------------------*/
- andi. T1,L, 16
- ble ZGEMM_L1x4_SUB2_8
- bl ZGEMM_1x4_L16_SUB
- MY_ALIGN
-
-
- ZGEMM_L1x4_SUB2_8:
- /*----------------------------------------*/
- andi. T1,L, 8
- ble ZGEMM_L1x4_SUB2_4
- bl ZGEMM_1x4_L8_SUB
- MY_ALIGN
-
-
- ZGEMM_L1x4_SUB2_4:
- /*----------------------------------------*/
- andi. T1,L, 4
- ble ZGEMM_L1x4_SUB2_2
- LOAD1x4_2
- KERNEL1x4_L2 128,32, 0,0
- KERNEL1x4_E2 128,32, 1,1
- MY_ALIGN
-
-
- ZGEMM_L1x4_SUB2_2:
- /*----------------------------------------*/
- andi. T1,L, 2
- ble ZGEMM_L1x4_SUB2_1
- LOAD1x4_2
- KERNEL1x4_E2 128,32, 0,1
- MY_ALIGN
-
-
- ZGEMM_L1x4_SUB2_1:
- /*----------------------------------------*/
- andi. T1,L, 1
- ble ZGEMM_L1x4_SAVE
- KERNEL1x4
-
-
- ZGEMM_L1x4_SAVE:
- /*----------------------------------------*/
- SAVE1x4
- #if defined(TRMMKERNEL)
- REFRESH_AFTER_SAVE T6,K,TEMP_REG,BO,AO,4,1
- #endif
-
-
- ZGEMM_L1x4_END:
- /*----------------------------------------*/
-
-
- ZGEMM_L1x2_BEGIN:
- /*----------------------------------------*/
- andi. T1, M, 2
- ble ZGEMM_L1x2_END
- #if defined(TRMMKERNEL)
- REFRESH_POINTERS AO,BO,TEMP_REG,B,2,1
- #else
- mr BO, B
- #endif
- #if defined(TRMMKERNEL)
- REFRESH_TEMP_BK T6,K,TEMP_REG,2,1
- mr T1, T6
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(T11-2) % 32x */
- #else
- mr T1, K
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(K-2) % 32x */
- #endif
- ZERO1x2
- ble ZGEMM_L1x2_SUB0
- bl ZGEMM_1x2_LMAIN_SUB
- andi. L, T1, 31
- ble ZGEMM_L1x2_SAVE
- b ZGEMM_L1x2_SUB2
-
-
- ZGEMM_L1x2_SUB0:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- andi. L, T6, 63
- cmpwi T6,33
- #else
- andi. L, K, 63
- cmpwi K,33
- #endif
- li T8,1
- bne CMP1x2_32K
- addi BO,BO,-16
- addi AO,AO,-32
- LOAD1x2O 32,16
- END1x2_WITHOUT_ADD
- LOAD1x2_2O 64, 32
- mtctr T8
- bl ZGEMM_L1x2_K32
- b ZGEMM_L1x2_SAVE
- CMP1x2_32K:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- cmpwi T6,32
- #else
- cmpwi K,32
- #endif
- bne ZGEMM_L1x2_SUB2
- MY_ALIGN
- mtctr T8
- addi BO,BO,-32
- addi AO,AO,-64
- LOAD1x2_2O 64,32
- bl ZGEMM_L1x2_K32
- b ZGEMM_L1x2_SAVE
- MY_ALIGN
- MY_ALIGN
-
-
- ZGEMM_L1x2_SUB2:
- /*----------------------------------------*/
- andi. T1,L, 16
- ble ZGEMM_L1x2_SUB2_8
- bl ZGEMM_1x2_L16_SUB
- MY_ALIGN
-
-
- ZGEMM_L1x2_SUB2_8:
- /*----------------------------------------*/
- andi. T1,L, 8
- ble ZGEMM_L1x2_SUB2_4
- bl ZGEMM_1x2_L8_SUB
- MY_ALIGN
-
-
- ZGEMM_L1x2_SUB2_4:
- /*----------------------------------------*/
- andi. T1,L, 4
- ble ZGEMM_L1x2_SUB2_2
- LOAD1x2_2
- KERNEL1x2_L2 64,32, 0,0
- KERNEL1x2_E2 64,32, 1,1
- MY_ALIGN
-
-
- ZGEMM_L1x2_SUB2_2:
- /*----------------------------------------*/
- andi. T1,L, 2
- ble ZGEMM_L1x2_SUB2_1
- LOAD1x2_2
- KERNEL1x2_E2 64,32, 0,1
- MY_ALIGN
-
-
- ZGEMM_L1x2_SUB2_1:
- /*----------------------------------------*/
- andi. T1,L, 1
- ble ZGEMM_L1x2_SAVE
- KERNEL1x2
-
-
- ZGEMM_L1x2_SAVE:
- /*----------------------------------------*/
- SAVE1x2
- #if defined(TRMMKERNEL)
- REFRESH_AFTER_SAVE T6,K,TEMP_REG,BO,AO,2,1
- #endif
-
-
- ZGEMM_L1x2_END:
- /*----------------------------------------*/
-
-
- ZGEMM_L1x1_BEGIN:
- /*----------------------------------------*/
- andi. T1, M, 1
- ble ZGEMM_L1x1_END
- #if defined(TRMMKERNEL)
- REFRESH_POINTERS AO,BO,TEMP_REG,B,1,1
- #else
- mr BO, B
- #endif
- #if defined(TRMMKERNEL)
- REFRESH_TEMP_BK T6,K,TEMP_REG,1,1
- mr T1, T6
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(T11-2) % 32x */
- #else
- mr T1, K
- addi T1,T1, -2
- srawi. T8, T1, 5 /**(K-2) % 32x */
- #endif
- ZERO1x1
- ble ZGEMM_L1x1_SUB0
- bl ZGEMM_1x1_LMAIN_SUB
- andi. L, T1, 31
- ble ZGEMM_L1x1_SAVE
- b ZGEMM_L1x1_SUB2
-
-
- ZGEMM_L1x1_SUB0:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- andi. L, T6, 63
- cmpwi T6,33
- #else
- andi. L, K, 63
- cmpwi K,33
- #endif
- li T8,1
- bne CMP1x1_32K
- addi BO,BO,-16
- addi AO,AO,-16
- LOAD1x1O 16,16
- END1x1_WITHOUT_ADD
- LOAD1x1_2O 32, 32
- mtctr T8
- bl ZGEMM_L1x1_K32
- b ZGEMM_L1x1_SAVE
- CMP1x1_32K:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL)
- cmpwi T6,32
- #else
- cmpwi K,32
- #endif
- bne ZGEMM_L1x1_SUB2
- MY_ALIGN
- mtctr T8
- addi BO,BO,-32
- addi AO,AO,-32
- LOAD1x1_2O 32,32
- bl ZGEMM_L1x1_K32
- b ZGEMM_L1x1_SAVE
- MY_ALIGN
- MY_ALIGN
-
-
- ZGEMM_L1x1_SUB2:
- /*----------------------------------------*/
- andi. T1,L, 16
- ble ZGEMM_L1x1_SUB2_8
- bl ZGEMM_1x1_L16_SUB
- MY_ALIGN
-
-
- ZGEMM_L1x1_SUB2_8:
- /*----------------------------------------*/
- andi. T1,L, 8
- ble ZGEMM_L1x1_SUB2_4
- bl ZGEMM_1x1_L8_SUB
- MY_ALIGN
-
-
- ZGEMM_L1x1_SUB2_4:
- /*----------------------------------------*/
- andi. T1,L, 4
- ble ZGEMM_L1x1_SUB2_2
- LOAD1x1_2
- KERNEL1x1_L2 32,32, 0,0
- KERNEL1x1_E2 32,32, 1,1
- MY_ALIGN
-
-
- ZGEMM_L1x1_SUB2_2:
- /*----------------------------------------*/
- andi. T1,L, 2
- ble ZGEMM_L1x1_SUB2_1
- LOAD1x1_2
- KERNEL1x1_E2 32,32, 0,1
- MY_ALIGN
-
-
- ZGEMM_L1x1_SUB2_1:
- /*----------------------------------------*/
- andi. T1,L, 1
- ble ZGEMM_L1x1_SAVE
- KERNEL1x1
-
-
- ZGEMM_L1x1_SAVE:
- /*----------------------------------------*/
- SAVE1x1
- #if defined(TRMMKERNEL)
- REFRESH_AFTER_SAVE T6,K,TEMP_REG,BO,AO,1,1
- #endif
-
-
- ZGEMM_L1x1_END:
- /*----------------------------------------*/
- #if defined(TRMMKERNEL) && !defined(LEFT)
- addi TEMP_REG, TEMP_REG, 1
- #endif
-
-
- ZGEMM_L1_END:
- /*----------------------------------------*/
-
|