You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemm_kernel_altivec_cell.S 35 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #ifndef __64BIT__
  41. #define LOAD lwz
  42. #else
  43. #define LOAD ld
  44. #endif
  45. #ifdef __64BIT__
  46. #define STACKSIZE 360
  47. #else
  48. #define STACKSIZE 272
  49. #endif
  50. #define ALIGN_SIZE 0xffff
  51. #define SWAP 0
  52. #define NEG 16
  53. #define ALPHA_R 32
  54. #define ALPHA_I 48
  55. #define FZERO 64
  56. #define M r3
  57. #define N r4
  58. #define K r5
  59. #if defined(linux) || defined(__FreeBSD__)
  60. #ifndef __64BIT__
  61. #define A r6
  62. #define B r7
  63. #define C r8
  64. #define LDC r9
  65. #else
  66. #define A r8
  67. #define B r9
  68. #define C r10
  69. #define LDC r6
  70. #endif
  71. #endif
  72. #if defined(_AIX) || defined(__APPLE__)
  73. #if !defined(__64BIT__) && defined(DOUBLE)
  74. #define A r10
  75. #define B r6
  76. #define C r7
  77. #define LDC r8
  78. #else
  79. #define A r8
  80. #define B r9
  81. #define C r10
  82. #define LDC r6
  83. #endif
  84. #endif
  85. #define STACK r11
  86. #define I r21
  87. #define J r22
  88. #define AO r23
  89. #define BO r24
  90. #define CO1 r25
  91. #define CO2 r26
  92. #define PREA r29
  93. #define PREB r29
  94. #define PREC r30
  95. #define VREG r31
  96. #define LOAD_A lvx
  97. #define LOAD_B lvx
  98. #define OFFSET_0 0
  99. #define OFFSET_1 r14
  100. #define OFFSET_2 r15
  101. #define OFFSET_3 r16
  102. #define OFFSET_4 r17
  103. #define OFFSET_5 r18
  104. #define OFFSET_6 r19
  105. #define OFFSET_7 r20
  106. #define c01 v0
  107. #define c02 v1
  108. #define c03 v2
  109. #define c04 v3
  110. #define c05 v4
  111. #define c06 v5
  112. #define c07 v6
  113. #define c08 v7
  114. #define c09 v8
  115. #define c10 v9
  116. #define c11 v10
  117. #define c12 v11
  118. #define c13 v12
  119. #define c14 v13
  120. #define c15 v14
  121. #define c16 v15
  122. #define a1 v16
  123. #define a2 v17
  124. #define a3 v18
  125. #define a4 v19
  126. #define a5 v20
  127. #define a6 v21
  128. #define a7 v22
  129. #define a8 v23
  130. #define b1 v24
  131. #define b2 v25
  132. #define bp1 v26
  133. #define bp2 v27
  134. #define C1 v16
  135. #define C2 v17
  136. #define C3 v18
  137. #define C4 v19
  138. #define C5 v20
  139. #define c00 v24
  140. #define VZERO v25
  141. #define PERMRSHIFT1 v26
  142. #define PERMRSHIFT2 v27
  143. #define swap v28
  144. #define neg v29
  145. #define alpha_r v30
  146. #define alpha_i v31
  147. #ifndef NEEDPARAM
  148. #ifndef DOUBLE
  149. #include "../cparam.h"
  150. #else
  151. #include "../zparam.h"
  152. #endif
  153. PROLOGUE
  154. PROFCODE
  155. addi SP, SP, -STACKSIZE
  156. mr STACK, SP
  157. li r0, 0 * 16
  158. stvx v20, SP, r0
  159. li r0, 1 * 16
  160. stvx v21, SP, r0
  161. li r0, 2 * 16
  162. stvx v22, SP, r0
  163. li r0, 3 * 16
  164. stvx v23, SP, r0
  165. li r0, 4 * 16
  166. stvx v24, SP, r0
  167. li r0, 5 * 16
  168. stvx v25, SP, r0
  169. li r0, 6 * 16
  170. stvx v26, SP, r0
  171. li r0, 7 * 16
  172. stvx v27, SP, r0
  173. li r0, 8 * 16
  174. stvx v28, SP, r0
  175. li r0, 9 * 16
  176. stvx v29, SP, r0
  177. li r0, 10 * 16
  178. stvx v30, SP, r0
  179. li r0, 11 * 16
  180. stvx v31, SP, r0
  181. #ifdef __64BIT__
  182. std r31, 192(SP)
  183. std r30, 200(SP)
  184. std r29, 208(SP)
  185. std r28, 216(SP)
  186. std r27, 224(SP)
  187. std r26, 232(SP)
  188. std r25, 240(SP)
  189. std r24, 248(SP)
  190. std r23, 256(SP)
  191. std r22, 264(SP)
  192. std r21, 272(SP)
  193. std r20, 280(SP)
  194. std r19, 288(SP)
  195. std r18, 296(SP)
  196. std r17, 304(SP)
  197. std r16, 312(SP)
  198. std r15, 320(SP)
  199. std r14, 328(SP)
  200. #else
  201. stw r31, 192(SP)
  202. stw r30, 196(SP)
  203. stw r29, 200(SP)
  204. stw r28, 204(SP)
  205. stw r27, 208(SP)
  206. stw r26, 212(SP)
  207. stw r25, 216(SP)
  208. stw r24, 220(SP)
  209. stw r23, 224(SP)
  210. stw r22, 228(SP)
  211. stw r21, 232(SP)
  212. stw r20, 236(SP)
  213. stw r19, 240(SP)
  214. stw r18, 244(SP)
  215. stw r17, 248(SP)
  216. stw r16, 252(SP)
  217. stw r15, 256(SP)
  218. stw r14, 260(SP)
  219. #endif
  220. #if defined(linux) || defined(__FreeBSD__)
  221. #ifdef __64BIT__
  222. ld LDC, FRAMESLOT(0) + STACKSIZE(SP)
  223. #endif
  224. #endif
  225. #if defined(_AIX) || defined(__APPLE__)
  226. #ifdef __64BIT__
  227. ld LDC, FRAMESLOT(0) + STACKSIZE(SP)
  228. #else
  229. #ifdef DOUBLE
  230. lwz B, FRAMESLOT(0) + STACKSIZE(SP)
  231. lwz C, FRAMESLOT(1) + STACKSIZE(SP)
  232. lwz LDC, FRAMESLOT(2) + STACKSIZE(SP)
  233. #else
  234. lwz LDC, FRAMESLOT(0) + STACKSIZE(SP)
  235. #endif
  236. #endif
  237. #endif
  238. #ifndef PREFETCHTEST
  239. #ifdef PPC970
  240. li PREC, 16 * SIZE
  241. #endif
  242. #else
  243. #if defined(linux) || defined(__FreeBSD__)
  244. #ifndef __64BIT__
  245. lwz PREB, FRAMESLOT(2) + STACKSIZE(SP)
  246. lwz PREC, FRAMESLOT(3) + STACKSIZE(SP)
  247. #else
  248. ld PREB, FRAMESLOT(3) + STACKSIZE(SP)
  249. ld PREC, FRAMESLOT(4) + STACKSIZE(SP)
  250. #endif
  251. #endif
  252. #if defined(_AIX) || defined(__APPLE__)
  253. #ifdef __64BIT__
  254. ld PREB, FRAMESLOT(3) + STACKSIZE(SP)
  255. ld PREC, FRAMESLOT(4) + STACKSIZE(SP)
  256. #else
  257. #ifdef DOUBLE
  258. lwz PREB, FRAMESLOT(4) + STACKSIZE(SP)
  259. lwz PREC, FRAMESLOT(5) + STACKSIZE(SP)
  260. #else
  261. lwz PREB, FRAMESLOT(3) + STACKSIZE(SP)
  262. lwz PREC, FRAMESLOT(4) + STACKSIZE(SP)
  263. #endif
  264. #endif
  265. #endif
  266. #endif
  267. #ifndef PREFETCHTEST
  268. #ifdef CELL
  269. li PREB, (3 * 32 * SIZE)
  270. #else
  271. li PREB, (5 * 32 * SIZE)
  272. #endif
  273. #endif
  274. li r0, -1
  275. mfspr VREG, VRsave
  276. mtspr VRsave, r0
  277. addi SP, SP, -128
  278. li r0, -8192
  279. and SP, SP, r0
  280. fneg f3, f1
  281. fneg f4, f2
  282. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  283. defined(NC) || defined(TC) || defined(NR) || defined(TR)
  284. stfs f1, ALPHA_R + 0(SP)
  285. stfs f1, ALPHA_R + 4(SP)
  286. stfs f1, ALPHA_R + 8(SP)
  287. stfs f1, ALPHA_R + 12(SP)
  288. stfs f4, ALPHA_I + 0(SP)
  289. stfs f2, ALPHA_I + 4(SP)
  290. stfs f4, ALPHA_I + 8(SP)
  291. stfs f2, ALPHA_I + 12(SP)
  292. #else
  293. stfs f1, ALPHA_R + 0(SP)
  294. stfs f3, ALPHA_R + 4(SP)
  295. stfs f1, ALPHA_R + 8(SP)
  296. stfs f3, ALPHA_R + 12(SP)
  297. stfs f2, ALPHA_I + 0(SP)
  298. stfs f2, ALPHA_I + 4(SP)
  299. stfs f2, ALPHA_I + 8(SP)
  300. stfs f2, ALPHA_I + 12(SP)
  301. #endif
  302. li I, Address_L(0x04050607)
  303. addis I, I, Address_H(0x04050607)
  304. stw I, SWAP + 0(SP)
  305. li I, Address_L(0x00010203)
  306. addis I, I, Address_H(0x00010203)
  307. stw I, SWAP + 4(SP)
  308. li I, Address_L(0x0c0d0e0f)
  309. addis I, I, Address_H(0x0c0d0e0f)
  310. stw I, SWAP + 8(SP)
  311. li I, Address_L(0x08090a0b)
  312. addis I, I, Address_H(0x08090a0b)
  313. stw I, SWAP + 12(SP)
  314. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  315. defined(RR) || defined(RC) || defined(CR) || defined(CC)
  316. lis I, 0x8000
  317. stw I, NEG + 0(SP)
  318. stw I, NEG + 8(SP)
  319. li I, 0
  320. stw I, NEG + 4(SP)
  321. stw I, NEG + 12(SP)
  322. #else
  323. li I, 0
  324. stw I, NEG + 0(SP)
  325. stw I, NEG + 8(SP)
  326. lis I, 0x8000
  327. stw I, NEG + 4(SP)
  328. stw I, NEG + 12(SP)
  329. #endif
  330. li r0, 0
  331. stw r0, FZERO(SP)
  332. slwi LDC, LDC, ZBASE_SHIFT
  333. li OFFSET_1, 4 * SIZE
  334. li OFFSET_2, 8 * SIZE
  335. li OFFSET_3, 12 * SIZE
  336. li OFFSET_4, 16 * SIZE
  337. li OFFSET_5, 20 * SIZE
  338. li OFFSET_6, 24 * SIZE
  339. li OFFSET_7, 28 * SIZE
  340. cmpwi cr0, M, 0
  341. ble LL(999)
  342. cmpwi cr0, N, 0
  343. ble LL(999)
  344. cmpwi cr0, K, 0
  345. ble LL(999)
  346. srawi. J, N, 1
  347. ble LL(50)
  348. .align 4
  349. LL(01):
  350. mr CO1, C
  351. add CO2, C, LDC
  352. add C, CO2, LDC
  353. mr AO, A
  354. srawi. I, M, 3
  355. ble LL(20)
  356. .align 4
  357. LL(11):
  358. vxor c01, c01, c01
  359. LOAD_B b1, OFFSET_0, B
  360. vxor c02, c02, c02
  361. LOAD_A a1, OFFSET_0, AO
  362. vxor c03, c03, c03
  363. LOAD_A a2, OFFSET_1, AO
  364. vxor c04, c04, c04
  365. LOAD_A a3, OFFSET_2, AO
  366. vxor c04, c04, c04
  367. vxor c05, c05, c05
  368. vxor c06, c06, c06
  369. vxor c07, c07, c07
  370. vxor c08, c08, c08
  371. vxor c09, c09, c09
  372. dcbtst CO1, PREC
  373. vxor c10, c10, c10
  374. dcbtst CO2, PREC
  375. vxor c11, c11, c11
  376. vxor c12, c12, c12
  377. vxor c13, c13, c13
  378. mr BO, B
  379. vxor c14, c14, c14
  380. srawi. r0, K, 2
  381. vxor c15, c15, c15
  382. mtspr CTR, r0
  383. vxor c16, c16, c16
  384. vspltw bp1, b1, 0
  385. ble LL(13)
  386. .align 4
  387. #define NOP1 mr r3, r3
  388. #define NOP2 mr r4, r4
  389. LL(12):
  390. vmaddfp c01, a1, bp1, c01
  391. vspltw bp2, b1, 1
  392. vmaddfp c02, a2, bp1, c02
  393. LOAD_A a4, OFFSET_3, AO
  394. vmaddfp c03, a3, bp1, c03
  395. dcbt AO, PREA
  396. vmaddfp c04, a4, bp1, c04
  397. NOP2
  398. vmaddfp c05, a1, bp2, c05
  399. vspltw bp1, b1, 2
  400. vmaddfp c06, a2, bp2, c06
  401. NOP2
  402. vmaddfp c07, a3, bp2, c07
  403. NOP1
  404. vmaddfp c08, a4, bp2, c08
  405. dcbt BO, PREB
  406. vmaddfp c09, a1, bp1, c09
  407. vspltw bp2, b1, 3
  408. vmaddfp c10, a2, bp1, c10
  409. LOAD_B b2, OFFSET_1, BO
  410. vmaddfp c11, a3, bp1, c11
  411. addi BO, BO, 8 * SIZE
  412. vmaddfp c12, a4, bp1, c12
  413. NOP1
  414. vmaddfp c13, a1, bp2, c13
  415. vspltw bp1, b2, 0
  416. vmaddfp c14, a2, bp2, c14
  417. LOAD_A a5, OFFSET_4, AO
  418. vmaddfp c15, a3, bp2, c15
  419. LOAD_A a6, OFFSET_5, AO
  420. vmaddfp c16, a4, bp2, c16
  421. vspltw bp2, b2, 1
  422. vmaddfp c01, a5, bp1, c01
  423. LOAD_A a7, OFFSET_6, AO
  424. vmaddfp c02, a6, bp1, c02
  425. LOAD_A a8, OFFSET_7, AO
  426. vmaddfp c03, a7, bp1, c03
  427. NOP1
  428. vmaddfp c04, a8, bp1, c04
  429. NOP2
  430. vmaddfp c05, a5, bp2, c05
  431. vspltw bp1, b2, 2
  432. vmaddfp c06, a6, bp2, c06
  433. addi AO, AO, 32 * SIZE
  434. vmaddfp c07, a7, bp2, c07
  435. LOAD_B b1, OFFSET_0, BO
  436. vmaddfp c08, a8, bp2, c08
  437. NOP1
  438. vmaddfp c09, a5, bp1, c09
  439. vspltw bp2, b2, 3
  440. vmaddfp c10, a6, bp1, c10
  441. NOP2
  442. vmaddfp c11, a7, bp1, c11
  443. NOP1
  444. vmaddfp c12, a8, bp1, c12
  445. dcbt AO, PREA
  446. vmaddfp c13, a5, bp2, c13
  447. vspltw bp1, b1, 0
  448. vmaddfp c14, a6, bp2, c14
  449. LOAD_A a1, OFFSET_0, AO //
  450. vmaddfp c15, a7, bp2, c15
  451. LOAD_A a2, OFFSET_1, AO
  452. vmaddfp c16, a8, bp2, c16
  453. vspltw bp2, b1, 1
  454. vmaddfp c01, a1, bp1, c01
  455. LOAD_A a3, OFFSET_2, AO
  456. vmaddfp c02, a2, bp1, c02
  457. LOAD_A a4, OFFSET_3, AO
  458. vmaddfp c03, a3, bp1, c03
  459. NOP1
  460. vmaddfp c04, a4, bp1, c04
  461. NOP2
  462. vmaddfp c05, a1, bp2, c05
  463. vspltw bp1, b1, 2
  464. vmaddfp c06, a2, bp2, c06
  465. NOP2
  466. vmaddfp c07, a3, bp2, c07
  467. NOP1
  468. vmaddfp c08, a4, bp2, c08
  469. LOAD_B b2, OFFSET_1, BO
  470. vmaddfp c09, a1, bp1, c09
  471. vspltw bp2, b1, 3
  472. vmaddfp c10, a2, bp1, c10
  473. NOP2
  474. vmaddfp c11, a3, bp1, c11
  475. NOP1
  476. vmaddfp c12, a4, bp1, c12
  477. addi BO, BO, 8 * SIZE
  478. vmaddfp c13, a1, bp2, c13
  479. vspltw bp1, b2, 0
  480. vmaddfp c14, a2, bp2, c14
  481. LOAD_A a5, OFFSET_4, AO
  482. vmaddfp c15, a3, bp2, c15
  483. LOAD_A a6, OFFSET_5, AO
  484. vmaddfp c16, a4, bp2, c16
  485. vspltw bp2, b2, 1
  486. vmaddfp c01, a5, bp1, c01
  487. LOAD_A a7, OFFSET_6, AO
  488. vmaddfp c02, a6, bp1, c02
  489. LOAD_A a8, OFFSET_7, AO
  490. vmaddfp c03, a7, bp1, c03
  491. addi AO, AO, 32 * SIZE
  492. vmaddfp c04, a8, bp1, c04
  493. NOP2
  494. vmaddfp c05, a5, bp2, c05
  495. vspltw bp1, b2, 2
  496. vmaddfp c06, a6, bp2, c06
  497. NOP2
  498. vmaddfp c07, a7, bp2, c07
  499. NOP1
  500. vmaddfp c08, a8, bp2, c08
  501. LOAD_B b1, OFFSET_0, BO
  502. vmaddfp c09, a5, bp1, c09
  503. vspltw bp2, b2, 3
  504. vmaddfp c10, a6, bp1, c10
  505. LOAD_A a1, OFFSET_0, AO //
  506. vmaddfp c11, a7, bp1, c11
  507. NOP2
  508. vmaddfp c12, a8, bp1, c12
  509. vspltw bp1, b1, 0
  510. vmaddfp c13, a5, bp2, c13
  511. LOAD_A a2, OFFSET_1, AO
  512. vmaddfp c14, a6, bp2, c14
  513. LOAD_A a3, OFFSET_2, AO
  514. vmaddfp c15, a7, bp2, c15
  515. NOP1
  516. vmaddfp c16, a8, bp2, c16
  517. bdnz+ LL(12)
  518. .align 4
  519. LL(13):
  520. andi. r0, K, 2
  521. nop
  522. nop
  523. ble+ LL(15)
  524. .align 4
  525. vmaddfp c01, a1, bp1, c01
  526. vspltw bp2, b1, 1
  527. vmaddfp c02, a2, bp1, c02
  528. LOAD_A a4, OFFSET_3, AO
  529. vmaddfp c03, a3, bp1, c03
  530. NOP1
  531. vmaddfp c04, a4, bp1, c04
  532. NOP2
  533. vmaddfp c05, a1, bp2, c05
  534. vspltw bp1, b1, 2
  535. vmaddfp c06, a2, bp2, c06
  536. NOP2
  537. vmaddfp c07, a3, bp2, c07
  538. NOP1
  539. vmaddfp c08, a4, bp2, c08
  540. LOAD_B b2, OFFSET_1, BO
  541. vmaddfp c09, a1, bp1, c09
  542. vspltw bp2, b1, 3
  543. vmaddfp c10, a2, bp1, c10
  544. LOAD_A a5, OFFSET_4, AO
  545. vmaddfp c11, a3, bp1, c11
  546. LOAD_A a6, OFFSET_5, AO
  547. vmaddfp c12, a4, bp1, c12
  548. addi BO, BO, 8 * SIZE
  549. vmaddfp c13, a1, bp2, c13
  550. vspltw bp1, b2, 0
  551. vmaddfp c14, a2, bp2, c14
  552. LOAD_A a7, OFFSET_6, AO
  553. vmaddfp c15, a3, bp2, c15
  554. LOAD_A a8, OFFSET_7, AO
  555. vmaddfp c16, a4, bp2, c16
  556. addi AO, AO, 32 * SIZE
  557. vmaddfp c01, a5, bp1, c01
  558. vspltw bp2, b2, 1
  559. vmaddfp c02, a6, bp1, c02
  560. NOP2
  561. vmaddfp c03, a7, bp1, c03
  562. NOP1
  563. vmaddfp c04, a8, bp1, c04
  564. NOP2
  565. vmaddfp c05, a5, bp2, c05
  566. vspltw bp1, b2, 2
  567. vmaddfp c06, a6, bp2, c06
  568. NOP2
  569. vmaddfp c07, a7, bp2, c07
  570. NOP1
  571. vmaddfp c08, a8, bp2, c08
  572. LOAD_B b1, OFFSET_0, BO
  573. vmaddfp c09, a5, bp1, c09
  574. vspltw bp2, b2, 3
  575. vmaddfp c10, a6, bp1, c10
  576. LOAD_A a1, OFFSET_0, AO
  577. vmaddfp c11, a7, bp1, c11
  578. LOAD_A a2, OFFSET_1, AO
  579. vmaddfp c12, a8, bp1, c12
  580. NOP2
  581. vmaddfp c13, a5, bp2, c13
  582. vspltw bp1, b1, 0
  583. vmaddfp c14, a6, bp2, c14
  584. LOAD_A a3, OFFSET_2, AO
  585. vmaddfp c15, a7, bp2, c15
  586. vmaddfp c16, a8, bp2, c16
  587. .align 4
  588. LL(15):
  589. andi. r0, K, 1
  590. vxor VZERO, VZERO, VZERO
  591. ble+ LL(18)
  592. .align 4
  593. vmaddfp c01, a1, bp1, c01
  594. vspltw bp2, b1, 1
  595. vmaddfp c02, a2, bp1, c02
  596. LOAD_A a4, OFFSET_3, AO
  597. vmaddfp c03, a3, bp1, c03
  598. nop
  599. vmaddfp c04, a4, bp1, c04
  600. nop
  601. vmaddfp c05, a1, bp2, c05
  602. vspltw bp1, b1, 2
  603. vmaddfp c06, a2, bp2, c06
  604. nop
  605. vmaddfp c07, a3, bp2, c07
  606. nop
  607. vmaddfp c08, a4, bp2, c08
  608. nop
  609. vmaddfp c09, a1, bp1, c09
  610. vspltw bp2, b1, 3
  611. vmaddfp c10, a2, bp1, c10
  612. addi AO, AO, 16 * SIZE
  613. vmaddfp c11, a3, bp1, c11
  614. addi BO, BO, 4 * SIZE
  615. vmaddfp c12, a4, bp1, c12
  616. nop
  617. vmaddfp c13, a1, bp2, c13
  618. vmaddfp c14, a2, bp2, c14
  619. vmaddfp c15, a3, bp2, c15
  620. vmaddfp c16, a4, bp2, c16
  621. .align 4
  622. LL(18):
  623. lvx swap, OFFSET_0, SP
  624. lvx neg, OFFSET_1, SP
  625. lvx alpha_r, OFFSET_2, SP
  626. lvx alpha_i, OFFSET_3, SP
  627. vxor VZERO, VZERO, VZERO
  628. vperm c05, c05, c05, swap
  629. vperm c06, c06, c06, swap
  630. vperm c07, c07, c07, swap
  631. vperm c08, c08, c08, swap
  632. vperm c13, c13, c13, swap
  633. vperm c14, c14, c14, swap
  634. vperm c15, c15, c15, swap
  635. vperm c16, c16, c16, swap
  636. vxor c05, c05, neg
  637. vxor c06, c06, neg
  638. vxor c07, c07, neg
  639. vxor c08, c08, neg
  640. vxor c13, c13, neg
  641. vxor c14, c14, neg
  642. vxor c15, c15, neg
  643. vxor c16, c16, neg
  644. vaddfp c01, c01, c05
  645. vaddfp c02, c02, c06
  646. vaddfp c03, c03, c07
  647. vaddfp c04, c04, c08
  648. vaddfp c09, c09, c13
  649. vaddfp c10, c10, c14
  650. vaddfp c11, c11, c15
  651. vaddfp c12, c12, c16
  652. vperm c05, c01, c01, swap
  653. vperm c06, c02, c02, swap
  654. vperm c07, c03, c03, swap
  655. vperm c08, c04, c04, swap
  656. vperm c13, c09, c09, swap
  657. vperm c14, c10, c10, swap
  658. vperm c15, c11, c11, swap
  659. vperm c16, c12, c12, swap
  660. vmaddfp c01, alpha_r, c01, VZERO
  661. vmaddfp c02, alpha_r, c02, VZERO
  662. vmaddfp c03, alpha_r, c03, VZERO
  663. vmaddfp c04, alpha_r, c04, VZERO
  664. vmaddfp c01, alpha_i, c05, c01
  665. vmaddfp c02, alpha_i, c06, c02
  666. vmaddfp c03, alpha_i, c07, c03
  667. vmaddfp c04, alpha_i, c08, c04
  668. vmaddfp c09, alpha_r, c09, VZERO
  669. vmaddfp c10, alpha_r, c10, VZERO
  670. vmaddfp c11, alpha_r, c11, VZERO
  671. vmaddfp c12, alpha_r, c12, VZERO
  672. vmaddfp c09, alpha_i, c13, c09
  673. vmaddfp c10, alpha_i, c14, c10
  674. vmaddfp c11, alpha_i, c15, c11
  675. vmaddfp c12, alpha_i, c16, c12
  676. lvx C1, OFFSET_0, CO1
  677. lvx C2, OFFSET_1, CO1
  678. lvx C3, OFFSET_2, CO1
  679. lvx C4, OFFSET_3, CO1
  680. lvx C5, OFFSET_4, CO1
  681. lvsr PERMRSHIFT1, 0, CO1
  682. lvsr PERMRSHIFT2, 0, CO2
  683. vperm c00, VZERO, c01, PERMRSHIFT1
  684. vperm c01, c01, c02, PERMRSHIFT1
  685. vperm c02, c02, c03, PERMRSHIFT1
  686. vperm c03, c03, c04, PERMRSHIFT1
  687. vperm c04, c04, VZERO, PERMRSHIFT1
  688. vaddfp c00, c00, C1
  689. vaddfp c01, c01, C2
  690. vaddfp c02, c02, C3
  691. vaddfp c03, c03, C4
  692. vaddfp c04, c04, C5
  693. stvx c00, OFFSET_0, CO1
  694. stvx c01, OFFSET_1, CO1
  695. stvx c02, OFFSET_2, CO1
  696. stvx c03, OFFSET_3, CO1
  697. stvx c04, OFFSET_4, CO1
  698. lvx C1, OFFSET_0, CO2
  699. lvx C2, OFFSET_1, CO2
  700. lvx C3, OFFSET_2, CO2
  701. lvx C4, OFFSET_3, CO2
  702. lvx C5, OFFSET_4, CO2
  703. vperm c00, VZERO, c09, PERMRSHIFT2
  704. vperm c09, c09, c10, PERMRSHIFT2
  705. vperm c10, c10, c11, PERMRSHIFT2
  706. vperm c11, c11, c12, PERMRSHIFT2
  707. vperm c12, c12, VZERO, PERMRSHIFT2
  708. vaddfp c00, c00, C1
  709. vaddfp c09, c09, C2
  710. vaddfp c10, c10, C3
  711. vaddfp c11, c11, C4
  712. vaddfp c12, c12, C5
  713. stvx c00, OFFSET_0, CO2
  714. stvx c09, OFFSET_1, CO2
  715. stvx c10, OFFSET_2, CO2
  716. stvx c11, OFFSET_3, CO2
  717. stvx c12, OFFSET_4, CO2
  718. addi CO1, CO1, 16 * SIZE
  719. addi CO2, CO2, 16 * SIZE
  720. addic. I, I, -1
  721. bgt+ LL(11)
  722. .align 4
  723. LL(20):
  724. andi. I, M, 4
  725. ble LL(30)
  726. vxor c01, c01, c01
  727. LOAD_A a1, OFFSET_0, AO
  728. vxor c02, c02, c02
  729. LOAD_A a2, OFFSET_1, AO
  730. vxor c05, c05, c05
  731. LOAD_A a3, OFFSET_2, AO
  732. vxor c06, c06, c06
  733. LOAD_A a4, OFFSET_3, AO
  734. vxor c09, c09, c09
  735. LOAD_B b1, OFFSET_0, B
  736. vxor c10, c10, c10
  737. LOAD_B b2, OFFSET_1, B
  738. vxor c13, c13, c13
  739. vxor c14, c14, c14
  740. mr BO, B
  741. vspltw bp1, b1, 0
  742. srawi. r0, K, 1
  743. mtspr CTR, r0
  744. ble LL(25)
  745. .align 4
  746. LL(22):
  747. vmaddfp c01, a1, bp1, c01
  748. vspltw bp2, b1, 1
  749. addi AO, AO, 16 * SIZE
  750. vmaddfp c02, a2, bp1, c02
  751. addi BO, BO, 8 * SIZE
  752. vmaddfp c05, a1, bp2, c05
  753. vspltw bp1, b1, 2
  754. vmaddfp c06, a2, bp2, c06
  755. vmaddfp c09, a1, bp1, c09
  756. vspltw bp2, b1, 3
  757. LOAD_B b1, OFFSET_0, BO
  758. vmaddfp c10, a2, bp1, c10
  759. vmaddfp c13, a1, bp2, c13
  760. LOAD_A a1, OFFSET_0, AO
  761. vspltw bp1, b2, 0
  762. vmaddfp c14, a2, bp2, c14
  763. LOAD_A a2, OFFSET_1, AO
  764. vmaddfp c01, a3, bp1, c01
  765. vspltw bp2, b2, 1
  766. vmaddfp c02, a4, bp1, c02
  767. vmaddfp c05, a3, bp2, c05
  768. vspltw bp1, b2, 2
  769. vmaddfp c06, a4, bp2, c06
  770. vmaddfp c09, a3, bp1, c09
  771. vspltw bp2, b2, 3
  772. LOAD_B b2, OFFSET_1, BO
  773. vmaddfp c10, a4, bp1, c10
  774. vmaddfp c13, a3, bp2, c13
  775. LOAD_A a3, OFFSET_2, AO
  776. vmaddfp c14, a4, bp2, c14
  777. LOAD_A a4, OFFSET_3, AO
  778. vspltw bp1, b1, 0
  779. bdnz LL(22)
  780. .align 4
  781. LL(25):
  782. andi. r0, K, 1
  783. ble+ LL(28)
  784. .align 4
  785. LL(26):
  786. vmaddfp c01, a1, bp1, c01
  787. vspltw bp2, b1, 1
  788. vmaddfp c02, a2, bp1, c02
  789. nop
  790. vmaddfp c05, a1, bp2, c05
  791. vspltw bp1, b1, 2
  792. vmaddfp c06, a2, bp2, c06
  793. nop
  794. vmaddfp c09, a1, bp1, c09
  795. vspltw bp2, b1, 3
  796. vmaddfp c10, a2, bp1, c10
  797. addi AO, AO, 8 * SIZE
  798. vmaddfp c13, a1, bp2, c13
  799. addi BO, BO, 4 * SIZE
  800. vmaddfp c14, a2, bp2, c14
  801. nop
  802. .align 4
  803. LL(28):
  804. vxor VZERO, VZERO, VZERO
  805. lvx swap, OFFSET_0, SP
  806. lvx neg, OFFSET_1, SP
  807. lvx alpha_r, OFFSET_2, SP
  808. lvx alpha_i, OFFSET_3, SP
  809. vperm c05, c05, c05, swap
  810. vperm c06, c06, c06, swap
  811. vperm c13, c13, c13, swap
  812. vperm c14, c14, c14, swap
  813. vxor c05, c05, neg
  814. vxor c06, c06, neg
  815. vxor c13, c13, neg
  816. vxor c14, c14, neg
  817. vaddfp c01, c01, c05
  818. vaddfp c02, c02, c06
  819. vaddfp c09, c09, c13
  820. vaddfp c10, c10, c14
  821. vperm c05, c01, c01, swap
  822. vperm c06, c02, c02, swap
  823. vperm c13, c09, c09, swap
  824. vperm c14, c10, c10, swap
  825. vmaddfp c01, alpha_r, c01, VZERO
  826. vmaddfp c02, alpha_r, c02, VZERO
  827. vmaddfp c01, alpha_i, c05, c01
  828. vmaddfp c02, alpha_i, c06, c02
  829. vmaddfp c09, alpha_r, c09, VZERO
  830. vmaddfp c10, alpha_r, c10, VZERO
  831. vmaddfp c09, alpha_i, c13, c09
  832. vmaddfp c10, alpha_i, c14, c10
  833. lvx C1, OFFSET_0, CO1
  834. lvx C2, OFFSET_1, CO1
  835. lvx C3, OFFSET_2, CO1
  836. lvsr PERMRSHIFT1, 0, CO1
  837. lvsr PERMRSHIFT2, 0, CO2
  838. vperm c00, VZERO, c01, PERMRSHIFT1
  839. vperm c01, c01, c02, PERMRSHIFT1
  840. vperm c02, c02, VZERO, PERMRSHIFT1
  841. vaddfp c00, c00, C1
  842. vaddfp c01, c01, C2
  843. vaddfp c02, c02, C3
  844. stvx c00, OFFSET_0, CO1
  845. stvx c01, OFFSET_1, CO1
  846. stvx c02, OFFSET_2, CO1
  847. lvx C1, OFFSET_0, CO2
  848. lvx C2, OFFSET_1, CO2
  849. lvx C3, OFFSET_2, CO2
  850. vperm c00, VZERO, c09, PERMRSHIFT2
  851. vperm c09, c09, c10, PERMRSHIFT2
  852. vperm c10, c10, VZERO, PERMRSHIFT2
  853. vaddfp c00, c00, C1
  854. vaddfp c09, c09, C2
  855. vaddfp c10, c10, C3
  856. stvx c00, OFFSET_0, CO2
  857. stvx c09, OFFSET_1, CO2
  858. stvx c10, OFFSET_2, CO2
  859. addi CO1, CO1, 8 * SIZE
  860. addi CO2, CO2, 8 * SIZE
  861. .align 4
  862. LL(30):
  863. andi. I, M, 2
  864. ble LL(40)
  865. vxor c01, c01, c01
  866. LOAD_A a1, OFFSET_0, AO
  867. vxor c02, c02, c02
  868. LOAD_A a2, OFFSET_1, AO
  869. vxor c05, c05, c05
  870. LOAD_B b1, OFFSET_0, B
  871. vxor c06, c06, c06
  872. LOAD_B b2, OFFSET_1, B
  873. vxor c09, c09, c09
  874. vxor c10, c10, c10
  875. vxor c13, c13, c13
  876. vxor c14, c14, c14
  877. vspltw bp1, b1, 0
  878. mr BO, B
  879. srawi. r0, K, 1
  880. mtspr CTR, r0
  881. ble LL(35)
  882. .align 4
  883. LL(32):
  884. vmaddfp c01, a1, bp1, c01
  885. addi AO, AO, 8 * SIZE
  886. vspltw bp2, b1, 1
  887. vmaddfp c05, a1, bp2, c05
  888. addi BO, BO, 8 * SIZE
  889. vspltw bp1, b1, 2
  890. vmaddfp c09, a1, bp1, c09
  891. vspltw bp2, b1, 3
  892. vmaddfp c13, a1, bp2, c13
  893. LOAD_A a1, OFFSET_0, AO
  894. vspltw bp1, b2, 0
  895. LOAD_B b1, OFFSET_0, BO
  896. vmaddfp c02, a2, bp1, c02
  897. vspltw bp2, b2, 1
  898. vmaddfp c06, a2, bp2, c06
  899. vspltw bp1, b2, 2
  900. vmaddfp c10, a2, bp1, c10
  901. vspltw bp2, b2, 3
  902. LOAD_B b2, OFFSET_1, BO
  903. vmaddfp c14, a2, bp2, c14
  904. LOAD_A a2, OFFSET_1, AO
  905. vspltw bp1, b1, 0
  906. bdnz LL(32)
  907. .align 4
  908. LL(35):
  909. andi. r0, K, 1
  910. ble+ LL(38)
  911. .align 4
  912. LL(36):
  913. vmaddfp c01, a1, bp1, c01
  914. vspltw bp2, b1, 1
  915. vmaddfp c05, a1, bp2, c05
  916. vspltw bp1, b1, 2
  917. vmaddfp c09, a1, bp1, c09
  918. vspltw bp2, b1, 3
  919. vmaddfp c13, a1, bp2, c13
  920. addi AO, AO, 4 * SIZE
  921. addi BO, BO, 4 * SIZE
  922. .align 4
  923. LL(38):
  924. vaddfp c01, c01, c02
  925. vaddfp c05, c05, c06
  926. vaddfp c09, c09, c10
  927. vaddfp c13, c13, c14
  928. vxor VZERO, VZERO, VZERO
  929. lvx swap, OFFSET_0, SP
  930. lvx neg, OFFSET_1, SP
  931. lvx alpha_r, OFFSET_2, SP
  932. lvx alpha_i, OFFSET_3, SP
  933. vperm c05, c05, c05, swap
  934. vperm c13, c13, c13, swap
  935. vxor c05, c05, neg
  936. vxor c13, c13, neg
  937. vaddfp c01, c01, c05
  938. vaddfp c09, c09, c13
  939. vperm c05, c01, c01, swap
  940. vperm c13, c09, c09, swap
  941. vmaddfp c01, alpha_r, c01, VZERO
  942. vmaddfp c01, alpha_i, c05, c01
  943. vmaddfp c09, alpha_r, c09, VZERO
  944. vmaddfp c09, alpha_i, c13, c09
  945. lvx C1, OFFSET_0, CO1
  946. lvx C2, OFFSET_1, CO1
  947. lvsr PERMRSHIFT1, 0, CO1
  948. lvsr PERMRSHIFT2, 0, CO2
  949. vperm c00, VZERO, c01, PERMRSHIFT1
  950. vperm c01, c01, VZERO, PERMRSHIFT1
  951. vaddfp c00, c00, C1
  952. vaddfp c01, c01, C2
  953. stvx c00, OFFSET_0, CO1
  954. stvx c01, OFFSET_1, CO1
  955. lvx C1, OFFSET_0, CO2
  956. lvx C2, OFFSET_1, CO2
  957. vperm c00, VZERO, c09, PERMRSHIFT2
  958. vperm c09, c09, VZERO, PERMRSHIFT2
  959. vaddfp c00, c00, C1
  960. vaddfp c09, c09, C2
  961. stvx c00, OFFSET_0, CO2
  962. stvx c09, OFFSET_1, CO2
  963. addi CO1, CO1, 4 * SIZE
  964. addi CO2, CO2, 4 * SIZE
  965. .align 4
  966. LL(40):
  967. andi. I, M, 1
  968. ble LL(49)
  969. mr BO, B
  970. LFD f8, 0 * SIZE(AO)
  971. LFD f9, 1 * SIZE(AO)
  972. LFD f10, 0 * SIZE(BO)
  973. LFD f11, 1 * SIZE(BO)
  974. LFD f12, 2 * SIZE(BO)
  975. LFD f13, 3 * SIZE(BO)
  976. lfs f0, FZERO(SP)
  977. fmr f1, f0
  978. fmr f2, f0
  979. fmr f3, f0
  980. fmr f4, f0
  981. fmr f5, f0
  982. fmr f6, f0
  983. fmr f7, f0
  984. srawi. r0, K, 1
  985. mtspr CTR, r0
  986. ble LL(45)
  987. .align 4
  988. LL(42):
  989. fmadd f0, f8, f10, f0
  990. fmadd f2, f8, f11, f2
  991. fmadd f4, f8, f12, f4
  992. fmadd f6, f8, f13, f6
  993. fmadd f1, f9, f10, f1
  994. fmadd f3, f9, f11, f3
  995. fmadd f5, f9, f12, f5
  996. fmadd f7, f9, f13, f7
  997. LFD f8, 2 * SIZE(AO)
  998. LFD f9, 3 * SIZE(AO)
  999. LFD f10, 4 * SIZE(BO)
  1000. LFD f11, 5 * SIZE(BO)
  1001. LFD f12, 6 * SIZE(BO)
  1002. LFD f13, 7 * SIZE(BO)
  1003. fmadd f0, f8, f10, f0
  1004. fmadd f2, f8, f11, f2
  1005. fmadd f4, f8, f12, f4
  1006. fmadd f6, f8, f13, f6
  1007. fmadd f1, f9, f10, f1
  1008. fmadd f3, f9, f11, f3
  1009. fmadd f5, f9, f12, f5
  1010. fmadd f7, f9, f13, f7
  1011. LFD f8, 4 * SIZE(AO)
  1012. LFD f9, 5 * SIZE(AO)
  1013. LFD f10, 8 * SIZE(BO)
  1014. LFD f11, 9 * SIZE(BO)
  1015. LFD f12, 10 * SIZE(BO)
  1016. LFD f13, 11 * SIZE(BO)
  1017. addi AO, AO, 4 * SIZE
  1018. addi BO, BO, 8 * SIZE
  1019. bdnz LL(42)
  1020. .align 4
  1021. LL(45):
  1022. andi. r0, K, 1
  1023. ble LL(48)
  1024. .align 4
  1025. LL(46):
  1026. fmadd f0, f8, f10, f0
  1027. fmadd f2, f8, f11, f2
  1028. fmadd f4, f8, f12, f4
  1029. fmadd f6, f8, f13, f6
  1030. fmadd f1, f9, f10, f1
  1031. fmadd f3, f9, f11, f3
  1032. fmadd f5, f9, f12, f5
  1033. fmadd f7, f9, f13, f7
  1034. addi AO, AO, 2 * SIZE
  1035. addi BO, BO, 4 * SIZE
  1036. .align 4
  1037. LL(48):
  1038. #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
  1039. fsub f0, f0, f3
  1040. fadd f1, f1, f2
  1041. fsub f4, f4, f7
  1042. fadd f5, f5, f6
  1043. #elif defined(NR) || defined(NC) || defined(TR) || defined(TC)
  1044. fadd f0, f0, f3
  1045. fsub f1, f1, f2
  1046. fadd f4, f4, f7
  1047. fsub f5, f5, f6
  1048. #elif defined(RN) || defined(RT) || defined(CN) || defined(CT)
  1049. fadd f0, f0, f3
  1050. fsub f1, f2, f1
  1051. fadd f4, f4, f7
  1052. fsub f5, f6, f5
  1053. #else /* RR, RC, CR, CC */
  1054. fsub f0, f0, f3
  1055. fadd f1, f1, f2
  1056. fsub f4, f4, f7
  1057. fadd f5, f5, f6
  1058. #endif
  1059. LFD f8, 0 * SIZE(CO1)
  1060. LFD f9, 1 * SIZE(CO1)
  1061. LFD f10, 0 * SIZE(CO2)
  1062. LFD f11, 1 * SIZE(CO2)
  1063. lfs f12, ALPHA_R + 0(SP)
  1064. lfs f13, ALPHA_I + 4(SP)
  1065. #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
  1066. fmadd f8, f12, f0, f8
  1067. fnmsub f9, f12, f1, f9
  1068. fmadd f10, f12, f4, f10
  1069. fnmsub f11, f12, f5, f11
  1070. fmadd f8, f13, f1, f8
  1071. fmadd f9, f13, f0, f9
  1072. fmadd f10, f13, f5, f10
  1073. fmadd f11, f13, f4, f11
  1074. #else
  1075. fmadd f8, f12, f0, f8
  1076. fmadd f9, f12, f1, f9
  1077. fmadd f10, f12, f4, f10
  1078. fmadd f11, f12, f5, f11
  1079. fnmsub f8, f13, f1, f8
  1080. fmadd f9, f13, f0, f9
  1081. fnmsub f10, f13, f5, f10
  1082. fmadd f11, f13, f4, f11
  1083. #endif
  1084. STFD f8, 0 * SIZE(CO1)
  1085. STFD f9, 1 * SIZE(CO1)
  1086. STFD f10, 0 * SIZE(CO2)
  1087. STFD f11, 1 * SIZE(CO2)
  1088. LL(49):
  1089. mr B, BO
  1090. addic. J, J, -1
  1091. bgt LL(01)
  1092. .align 4
  1093. LL(50):
  1094. andi. J, N, 1
  1095. ble LL(999)
  1096. mr CO1, C
  1097. mr AO, A
  1098. srawi. I, M, 3
  1099. ble LL(70)
  1100. .align 4
  1101. LL(61):
  1102. vxor c01, c01, c01
  1103. LOAD_B b1, OFFSET_0, B
  1104. vxor c02, c02, c02
  1105. vxor c03, c03, c03
  1106. LOAD_A a1, OFFSET_0, AO
  1107. vxor c04, c04, c04
  1108. LOAD_A a2, OFFSET_1, AO
  1109. vxor c05, c05, c05
  1110. LOAD_A a3, OFFSET_2, AO
  1111. vxor c06, c06, c06
  1112. LOAD_A a4, OFFSET_3, AO
  1113. vxor c07, c07, c07
  1114. vxor c08, c08, c08
  1115. mr BO, B
  1116. dcbtst CO1, PREC
  1117. dcbtst CO2, PREC
  1118. vspltw bp1, b1, 0
  1119. srawi. r0, K, 1
  1120. mtspr CTR, r0
  1121. ble LL(65)
  1122. .align 4
  1123. LL(62):
  1124. LOAD_A a5, OFFSET_4, AO
  1125. LOAD_A a6, OFFSET_5, AO
  1126. LOAD_A a7, OFFSET_6, AO
  1127. LOAD_A a8, OFFSET_7, AO
  1128. vmaddfp c01, a1, bp1, c01
  1129. vspltw bp2, b1, 1
  1130. vmaddfp c02, a2, bp1, c02
  1131. vmaddfp c03, a3, bp1, c03
  1132. vmaddfp c04, a4, bp1, c04
  1133. vmaddfp c05, a1, bp2, c05
  1134. vspltw bp1, b1, 2
  1135. vmaddfp c06, a2, bp2, c06
  1136. vmaddfp c07, a3, bp2, c07
  1137. vmaddfp c08, a4, bp2, c08
  1138. vmaddfp c01, a5, bp1, c01
  1139. vspltw bp2, b1, 3
  1140. vmaddfp c02, a6, bp1, c02
  1141. vmaddfp c03, a7, bp1, c03
  1142. vmaddfp c04, a8, bp1, c04
  1143. LOAD_B b1, OFFSET_1, BO
  1144. vspltw bp1, b1, 0
  1145. vmaddfp c05, a5, bp2, c05
  1146. vmaddfp c06, a6, bp2, c06
  1147. vmaddfp c07, a7, bp2, c07
  1148. vmaddfp c08, a8, bp2, c08
  1149. addi AO, AO, 32 * SIZE
  1150. addi BO, BO, 4 * SIZE
  1151. LOAD_A a1, OFFSET_0, AO
  1152. LOAD_A a2, OFFSET_1, AO
  1153. LOAD_A a3, OFFSET_2, AO
  1154. LOAD_A a4, OFFSET_3, AO
  1155. bdnz LL(62)
  1156. .align 4
  1157. LL(65):
  1158. andi. r0, K, 1
  1159. ble+ LL(68)
  1160. .align 4
  1161. LL(66):
  1162. vmaddfp c01, a1, bp1, c01
  1163. vspltw bp2, b1, 1
  1164. vmaddfp c02, a2, bp1, c02
  1165. addi AO, AO, 16 * SIZE
  1166. vmaddfp c03, a3, bp1, c03
  1167. addi BO, BO, 2 * SIZE
  1168. vmaddfp c04, a4, bp1, c04
  1169. nop
  1170. vmaddfp c05, a1, bp2, c05
  1171. vmaddfp c06, a2, bp2, c06
  1172. vmaddfp c07, a3, bp2, c07
  1173. vmaddfp c08, a4, bp2, c08
  1174. .align 4
  1175. LL(68):
  1176. vxor VZERO, VZERO, VZERO
  1177. lvx swap, OFFSET_0, SP
  1178. lvx neg, OFFSET_1, SP
  1179. lvx alpha_r, OFFSET_2, SP
  1180. lvx alpha_i, OFFSET_3, SP
  1181. vperm c05, c05, c05, swap
  1182. vperm c06, c06, c06, swap
  1183. vperm c07, c07, c07, swap
  1184. vperm c08, c08, c08, swap
  1185. vxor c05, c05, neg
  1186. vxor c06, c06, neg
  1187. vxor c07, c07, neg
  1188. vxor c08, c08, neg
  1189. vaddfp c01, c01, c05
  1190. vaddfp c02, c02, c06
  1191. vaddfp c03, c03, c07
  1192. vaddfp c04, c04, c08
  1193. vperm c05, c01, c01, swap
  1194. vperm c06, c02, c02, swap
  1195. vperm c07, c03, c03, swap
  1196. vperm c08, c04, c04, swap
  1197. vmaddfp c01, alpha_r, c01, VZERO
  1198. vmaddfp c02, alpha_r, c02, VZERO
  1199. vmaddfp c03, alpha_r, c03, VZERO
  1200. vmaddfp c04, alpha_r, c04, VZERO
  1201. vmaddfp c01, alpha_i, c05, c01
  1202. vmaddfp c02, alpha_i, c06, c02
  1203. vmaddfp c03, alpha_i, c07, c03
  1204. vmaddfp c04, alpha_i, c08, c04
  1205. lvx C1, OFFSET_0, CO1
  1206. lvx C2, OFFSET_1, CO1
  1207. lvx C3, OFFSET_2, CO1
  1208. lvx C4, OFFSET_3, CO1
  1209. lvx C5, OFFSET_4, CO1
  1210. lvsr PERMRSHIFT1, 0, CO1
  1211. vperm c00, VZERO, c01, PERMRSHIFT1
  1212. vperm c01, c01, c02, PERMRSHIFT1
  1213. vperm c02, c02, c03, PERMRSHIFT1
  1214. vperm c03, c03, c04, PERMRSHIFT1
  1215. vperm c04, c04, VZERO, PERMRSHIFT1
  1216. vaddfp c00, c00, C1
  1217. vaddfp c01, c01, C2
  1218. vaddfp c02, c02, C3
  1219. vaddfp c03, c03, C4
  1220. vaddfp c04, c04, C5
  1221. stvx c00, OFFSET_0, CO1
  1222. stvx c01, OFFSET_1, CO1
  1223. stvx c02, OFFSET_2, CO1
  1224. stvx c03, OFFSET_3, CO1
  1225. stvx c04, OFFSET_4, CO1
  1226. addi CO1, CO1, 16 * SIZE
  1227. addic. I, I, -1
  1228. bgt+ LL(61)
  1229. .align 4
  1230. LL(70):
  1231. andi. I, M, 4
  1232. ble LL(80)
  1233. vxor c01, c01, c01
  1234. LOAD_B b1, OFFSET_0, B
  1235. vxor c02, c02, c02
  1236. vxor c03, c03, c03
  1237. LOAD_A a1, OFFSET_0, AO
  1238. vxor c04, c04, c04
  1239. LOAD_A a2, OFFSET_1, AO
  1240. vxor c05, c05, c05
  1241. LOAD_A a3, OFFSET_2, AO
  1242. vxor c06, c06, c06
  1243. LOAD_A a4, OFFSET_3, AO
  1244. vxor c07, c07, c07
  1245. vxor c08, c08, c08
  1246. mr BO, B
  1247. vspltw bp1, b1, 0
  1248. srawi. r0, K, 1
  1249. mtspr CTR, r0
  1250. ble LL(75)
  1251. .align 4
  1252. LL(72):
  1253. vmaddfp c01, a1, bp1, c01
  1254. vspltw bp2, b1, 1
  1255. vmaddfp c02, a2, bp1, c02
  1256. vmaddfp c05, a1, bp2, c05
  1257. vspltw bp1, b1, 2
  1258. vmaddfp c06, a2, bp2, c06
  1259. vmaddfp c03, a3, bp1, c03
  1260. vspltw bp2, b1, 3
  1261. vmaddfp c04, a4, bp1, c04
  1262. LOAD_B b1, OFFSET_1, BO
  1263. vspltw bp1, b1, 0
  1264. vmaddfp c07, a3, bp2, c07
  1265. vmaddfp c08, a4, bp2, c08
  1266. addi AO, AO, 16 * SIZE
  1267. addi BO, BO, 4 * SIZE
  1268. LOAD_A a1, OFFSET_0, AO
  1269. LOAD_A a2, OFFSET_1, AO
  1270. LOAD_A a3, OFFSET_2, AO
  1271. LOAD_A a4, OFFSET_3, AO
  1272. bdnz LL(72)
  1273. .align 4
  1274. LL(75):
  1275. andi. r0, K, 1
  1276. ble+ LL(78)
  1277. .align 4
  1278. LL(76):
  1279. vmaddfp c01, a1, bp1, c01
  1280. vspltw bp2, b1, 1
  1281. vmaddfp c02, a2, bp1, c02
  1282. addi AO, AO, 8 * SIZE
  1283. vmaddfp c05, a1, bp2, c05
  1284. addi BO, BO, 2 * SIZE
  1285. vmaddfp c06, a2, bp2, c06
  1286. .align 4
  1287. LL(78):
  1288. vaddfp c01, c01, c03
  1289. vaddfp c02, c02, c04
  1290. vaddfp c05, c05, c07
  1291. vaddfp c06, c06, c08
  1292. vxor VZERO, VZERO, VZERO
  1293. lvx swap, OFFSET_0, SP
  1294. lvx neg, OFFSET_1, SP
  1295. lvx alpha_r, OFFSET_2, SP
  1296. lvx alpha_i, OFFSET_3, SP
  1297. vperm c05, c05, c05, swap
  1298. vperm c06, c06, c06, swap
  1299. vxor c05, c05, neg
  1300. vxor c06, c06, neg
  1301. vaddfp c01, c01, c05
  1302. vaddfp c02, c02, c06
  1303. vperm c05, c01, c01, swap
  1304. vperm c06, c02, c02, swap
  1305. vmaddfp c01, alpha_r, c01, VZERO
  1306. vmaddfp c02, alpha_r, c02, VZERO
  1307. vmaddfp c01, alpha_i, c05, c01
  1308. vmaddfp c02, alpha_i, c06, c02
  1309. lvx C1, OFFSET_0, CO1
  1310. lvx C2, OFFSET_1, CO1
  1311. lvx C3, OFFSET_2, CO1
  1312. lvsr PERMRSHIFT1, 0, CO1
  1313. vperm c00, VZERO, c01, PERMRSHIFT1
  1314. vperm c01, c01, c02, PERMRSHIFT1
  1315. vperm c02, c02, VZERO, PERMRSHIFT1
  1316. vaddfp c00, c00, C1
  1317. vaddfp c01, c01, C2
  1318. vaddfp c02, c02, C3
  1319. stvx c00, OFFSET_0, CO1
  1320. stvx c01, OFFSET_1, CO1
  1321. stvx c02, OFFSET_2, CO1
  1322. addi CO1, CO1, 8 * SIZE
  1323. .align 4
  1324. LL(80):
  1325. andi. I, M, 2
  1326. ble LL(90)
  1327. vxor c01, c01, c01
  1328. LOAD_B b1, OFFSET_0, B
  1329. vxor c02, c02, c02
  1330. LOAD_A a1, OFFSET_0, AO
  1331. LOAD_A a2, OFFSET_1, AO
  1332. vxor c05, c05, c05
  1333. vxor c06, c06, c06
  1334. mr BO, B
  1335. vspltw bp1, b1, 0
  1336. srawi. r0, K, 1
  1337. mtspr CTR, r0
  1338. ble LL(85)
  1339. .align 4
  1340. LL(82):
  1341. vmaddfp c01, a1, bp1, c01
  1342. vspltw bp2, b1, 1
  1343. vmaddfp c05, a1, bp2, c05
  1344. vspltw bp1, b1, 2
  1345. vmaddfp c02, a2, bp1, c02
  1346. vspltw bp2, b1, 3
  1347. LOAD_B b1, OFFSET_1, BO
  1348. vspltw bp1, b1, 0
  1349. vmaddfp c06, a2, bp2, c06
  1350. addi AO, AO, 8 * SIZE
  1351. addi BO, BO, 4 * SIZE
  1352. LOAD_A a1, OFFSET_0, AO
  1353. LOAD_A a2, OFFSET_1, AO
  1354. bdnz LL(82)
  1355. .align 4
  1356. LL(85):
  1357. andi. r0, K, 1
  1358. ble+ LL(88)
  1359. .align 4
  1360. LL(86):
  1361. vspltw bp2, b1, 1
  1362. vmaddfp c01, a1, bp1, c01
  1363. vmaddfp c05, a1, bp2, c05
  1364. addi AO, AO, 4 * SIZE
  1365. addi BO, BO, 2 * SIZE
  1366. .align 4
  1367. LL(88):
  1368. vaddfp c01, c01, c02
  1369. vaddfp c05, c05, c06
  1370. vaddfp c09, c09, c10
  1371. vaddfp c13, c13, c14
  1372. vxor VZERO, VZERO, VZERO
  1373. lvx swap, OFFSET_0, SP
  1374. lvx neg, OFFSET_1, SP
  1375. lvx alpha_r, OFFSET_2, SP
  1376. lvx alpha_i, OFFSET_3, SP
  1377. vperm c05, c05, c05, swap
  1378. vxor c05, c05, neg
  1379. vaddfp c01, c01, c05
  1380. vperm c05, c01, c01, swap
  1381. vmaddfp c01, alpha_r, c01, VZERO
  1382. vmaddfp c01, alpha_i, c05, c01
  1383. lvx C1, OFFSET_0, CO1
  1384. lvx C2, OFFSET_1, CO1
  1385. lvsr PERMRSHIFT1, 0, CO1
  1386. vperm c00, VZERO, c01, PERMRSHIFT1
  1387. vperm c01, c01, VZERO, PERMRSHIFT1
  1388. vaddfp c00, c00, C1
  1389. vaddfp c01, c01, C2
  1390. stvx c00, OFFSET_0, CO1
  1391. stvx c01, OFFSET_1, CO1
  1392. addi CO1, CO1, 4 * SIZE
  1393. .align 4
  1394. LL(90):
  1395. andi. I, M, 1
  1396. ble LL(999)
  1397. mr BO, B
  1398. LFD f8, 0 * SIZE(AO)
  1399. LFD f9, 1 * SIZE(AO)
  1400. LFD f10, 0 * SIZE(BO)
  1401. LFD f11, 1 * SIZE(BO)
  1402. LFD f12, 2 * SIZE(BO)
  1403. LFD f13, 3 * SIZE(BO)
  1404. lfs f0, FZERO(SP)
  1405. fmr f1, f0
  1406. fmr f2, f0
  1407. fmr f3, f0
  1408. srawi. r0, K, 1
  1409. mtspr CTR, r0
  1410. ble LL(95)
  1411. .align 4
  1412. LL(92):
  1413. fmadd f0, f8, f10, f0
  1414. fmadd f2, f8, f11, f2
  1415. fmadd f1, f9, f10, f1
  1416. fmadd f3, f9, f11, f3
  1417. LFD f8, 2 * SIZE(AO)
  1418. LFD f9, 3 * SIZE(AO)
  1419. LFD f10, 4 * SIZE(BO)
  1420. LFD f11, 5 * SIZE(BO)
  1421. fmadd f0, f8, f12, f0
  1422. fmadd f2, f8, f13, f2
  1423. fmadd f1, f9, f12, f1
  1424. fmadd f3, f9, f13, f3
  1425. LFD f8, 4 * SIZE(AO)
  1426. LFD f9, 5 * SIZE(AO)
  1427. LFD f12, 6 * SIZE(BO)
  1428. LFD f13, 7 * SIZE(BO)
  1429. addi AO, AO, 4 * SIZE
  1430. addi BO, BO, 4 * SIZE
  1431. bdnz LL(92)
  1432. .align 4
  1433. LL(95):
  1434. andi. r0, K, 1
  1435. ble LL(98)
  1436. .align 4
  1437. LL(96):
  1438. fmadd f0, f8, f10, f0
  1439. fmadd f2, f8, f11, f2
  1440. fmadd f1, f9, f10, f1
  1441. fmadd f3, f9, f11, f3
  1442. .align 4
  1443. LL(98):
  1444. #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
  1445. fsub f0, f0, f3
  1446. fadd f1, f1, f2
  1447. #elif defined(NR) || defined(NC) || defined(TR) || defined(TC)
  1448. fadd f0, f0, f3
  1449. fsub f1, f1, f2
  1450. #elif defined(RN) || defined(RT) || defined(CN) || defined(CT)
  1451. fadd f0, f0, f3
  1452. fsub f1, f2, f1
  1453. #else /* RR, RC, CR, CC */
  1454. fsub f0, f0, f3
  1455. fadd f1, f1, f2
  1456. #endif
  1457. LFD f8, 0 * SIZE(CO1)
  1458. LFD f9, 1 * SIZE(CO1)
  1459. lfs f12, ALPHA_R + 0(SP)
  1460. lfs f13, ALPHA_I + 4(SP)
  1461. #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
  1462. fmadd f8, f12, f0, f8
  1463. fnmsub f9, f12, f1, f9
  1464. fmadd f8, f13, f1, f8
  1465. fmadd f9, f13, f0, f9
  1466. #else
  1467. fmadd f8, f12, f0, f8
  1468. fmadd f9, f12, f1, f9
  1469. fnmsub f8, f13, f1, f8
  1470. fmadd f9, f13, f0, f9
  1471. #endif
  1472. STFD f8, 0 * SIZE(CO1)
  1473. STFD f9, 1 * SIZE(CO1)
  1474. .align 4
  1475. LL(999):
  1476. mr SP, STACK
  1477. li r0, 0 * 16
  1478. lvx v20, SP, r0
  1479. li r0, 1 * 16
  1480. lvx v21, SP, r0
  1481. li r0, 2 * 16
  1482. lvx v22, SP, r0
  1483. li r0, 3 * 16
  1484. lvx v23, SP, r0
  1485. li r0, 4 * 16
  1486. lvx v24, SP, r0
  1487. li r0, 5 * 16
  1488. lvx v25, SP, r0
  1489. li r0, 6 * 16
  1490. lvx v26, SP, r0
  1491. li r0, 7 * 16
  1492. lvx v27, SP, r0
  1493. li r0, 8 * 16
  1494. lvx v28, SP, r0
  1495. li r0, 9 * 16
  1496. lvx v29, SP, r0
  1497. li r0, 10 * 16
  1498. lvx v30, SP, r0
  1499. li r0, 11 * 16
  1500. lvx v31, SP, r0
  1501. mtspr VRsave, VREG
  1502. #ifdef __64BIT__
  1503. ld r31, 192(SP)
  1504. ld r30, 200(SP)
  1505. ld r29, 208(SP)
  1506. ld r28, 216(SP)
  1507. ld r27, 224(SP)
  1508. ld r26, 232(SP)
  1509. ld r25, 240(SP)
  1510. ld r24, 248(SP)
  1511. ld r23, 256(SP)
  1512. ld r22, 264(SP)
  1513. ld r21, 272(SP)
  1514. ld r20, 280(SP)
  1515. ld r19, 288(SP)
  1516. ld r18, 296(SP)
  1517. ld r17, 304(SP)
  1518. ld r16, 312(SP)
  1519. ld r15, 320(SP)
  1520. ld r14, 328(SP)
  1521. #else
  1522. lwz r31, 192(SP)
  1523. lwz r30, 196(SP)
  1524. lwz r29, 200(SP)
  1525. lwz r28, 204(SP)
  1526. lwz r27, 208(SP)
  1527. lwz r26, 212(SP)
  1528. lwz r25, 216(SP)
  1529. lwz r24, 220(SP)
  1530. lwz r23, 224(SP)
  1531. lwz r22, 228(SP)
  1532. lwz r21, 232(SP)
  1533. lwz r20, 236(SP)
  1534. lwz r19, 240(SP)
  1535. lwz r18, 244(SP)
  1536. lwz r17, 248(SP)
  1537. lwz r16, 252(SP)
  1538. lwz r15, 256(SP)
  1539. lwz r14, 260(SP)
  1540. #endif
  1541. addi SP, SP, STACKSIZE
  1542. blr
  1543. EPILOGUE
  1544. #endif