You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemm_kernel_altivec.S 32 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #ifndef __64BIT__
  41. #define LOAD lwz
  42. #else
  43. #define LOAD ld
  44. #endif
  45. #ifdef __64BIT__
  46. #define STACKSIZE 360
  47. #else
  48. #define STACKSIZE 272
  49. #endif
  50. #define ALIGN_SIZE 0xffff
  51. #define SWAP 0
  52. #define NEG 16
  53. #define ALPHA_R 32
  54. #define ALPHA_I 48
  55. #define FZERO 64
  56. #define M r3
  57. #define N r4
  58. #define K r5
  59. #ifdef linux
  60. #ifndef __64BIT__
  61. #define A r6
  62. #define B r7
  63. #define C r8
  64. #define LDC r9
  65. #else
  66. #define A r8
  67. #define B r9
  68. #define C r10
  69. #define LDC r6
  70. #endif
  71. #endif
  72. #if defined(_AIX) || defined(__APPLE__)
  73. #if !defined(__64BIT__) && defined(DOUBLE)
  74. #define A r10
  75. #define B r6
  76. #define C r7
  77. #define LDC r8
  78. #else
  79. #define A r8
  80. #define B r9
  81. #define C r10
  82. #define LDC r6
  83. #endif
  84. #endif
  85. #define STACK r11
  86. #define I r21
  87. #define J r22
  88. #define AO r23
  89. #define BO r24
  90. #define CO1 r25
  91. #define CO2 r26
  92. #define PREA r29
  93. #define PREB r29
  94. #define PREC r30
  95. #define VREG r31
  96. #define LOAD_A lvx
  97. #define LOAD_B lvx
  98. #define OFFSET_0 0
  99. #define OFFSET_1 r14
  100. #define OFFSET_2 r15
  101. #define OFFSET_3 r16
  102. #define OFFSET_4 r17
  103. #define OFFSET_5 r18
  104. #define OFFSET_6 r19
  105. #define OFFSET_7 r20
  106. #define c01 v0
  107. #define c02 v1
  108. #define c03 v2
  109. #define c04 v3
  110. #define c05 v4
  111. #define c06 v5
  112. #define c07 v6
  113. #define c08 v7
  114. #define c09 v8
  115. #define c10 v9
  116. #define c11 v10
  117. #define c12 v11
  118. #define c13 v12
  119. #define c14 v13
  120. #define c15 v14
  121. #define c16 v15
  122. #define a1 v16
  123. #define a2 v17
  124. #define a3 v18
  125. #define a4 v19
  126. #define a5 v20
  127. #define a6 v21
  128. #define a7 v22
  129. #define a8 v23
  130. #define b1 v24
  131. #define b2 v25
  132. #define bp1 v26
  133. #define bp2 v27
  134. #define C1 v16
  135. #define C2 v17
  136. #define C3 v18
  137. #define C4 v19
  138. #define C5 v20
  139. #define c00 v24
  140. #define VZERO v25
  141. #define PERMRSHIFT1 v26
  142. #define PERMRSHIFT2 v27
  143. #define swap v28
  144. #define neg v29
  145. #define alpha_r v30
  146. #define alpha_i v31
  147. #ifndef NEEDPARAM
  148. PROLOGUE
  149. PROFCODE
  150. addi SP, SP, -STACKSIZE
  151. mr STACK, SP
  152. li r0, 0 * 16
  153. stvx v20, SP, r0
  154. li r0, 1 * 16
  155. stvx v21, SP, r0
  156. li r0, 2 * 16
  157. stvx v22, SP, r0
  158. li r0, 3 * 16
  159. stvx v23, SP, r0
  160. li r0, 4 * 16
  161. stvx v24, SP, r0
  162. li r0, 5 * 16
  163. stvx v25, SP, r0
  164. li r0, 6 * 16
  165. stvx v26, SP, r0
  166. li r0, 7 * 16
  167. stvx v27, SP, r0
  168. li r0, 8 * 16
  169. stvx v28, SP, r0
  170. li r0, 9 * 16
  171. stvx v29, SP, r0
  172. li r0, 10 * 16
  173. stvx v30, SP, r0
  174. li r0, 11 * 16
  175. stvx v31, SP, r0
  176. #ifdef __64BIT__
  177. std r31, 192(SP)
  178. std r30, 200(SP)
  179. std r29, 208(SP)
  180. std r28, 216(SP)
  181. std r27, 224(SP)
  182. std r26, 232(SP)
  183. std r25, 240(SP)
  184. std r24, 248(SP)
  185. std r23, 256(SP)
  186. std r22, 264(SP)
  187. std r21, 272(SP)
  188. std r20, 280(SP)
  189. std r19, 288(SP)
  190. std r18, 296(SP)
  191. std r17, 304(SP)
  192. std r16, 312(SP)
  193. std r15, 320(SP)
  194. std r14, 328(SP)
  195. #else
  196. stw r31, 192(SP)
  197. stw r30, 196(SP)
  198. stw r29, 200(SP)
  199. stw r28, 204(SP)
  200. stw r27, 208(SP)
  201. stw r26, 212(SP)
  202. stw r25, 216(SP)
  203. stw r24, 220(SP)
  204. stw r23, 224(SP)
  205. stw r22, 228(SP)
  206. stw r21, 232(SP)
  207. stw r20, 236(SP)
  208. stw r19, 240(SP)
  209. stw r18, 244(SP)
  210. stw r17, 248(SP)
  211. stw r16, 252(SP)
  212. stw r15, 256(SP)
  213. stw r14, 260(SP)
  214. #endif
  215. #ifdef linux
  216. #ifdef __64BIT__
  217. ld LDC, 112 + STACKSIZE(SP)
  218. #endif
  219. #endif
  220. #if defined(_AIX) || defined(__APPLE__)
  221. #ifdef __64BIT__
  222. ld LDC, 112 + STACKSIZE(SP)
  223. #else
  224. #ifdef DOUBLE
  225. lwz B, 56 + STACKSIZE(SP)
  226. lwz C, 60 + STACKSIZE(SP)
  227. lwz LDC, 64 + STACKSIZE(SP)
  228. #else
  229. lwz LDC, 56 + STACKSIZE(SP)
  230. #endif
  231. #endif
  232. #endif
  233. #ifndef PREFETCHTEST
  234. #ifdef PPC970
  235. li PREC, 16 * SIZE
  236. #endif
  237. #else
  238. #ifdef linux
  239. #ifndef __64BIT__
  240. lwz PREB, 16 + STACKSIZE(SP)
  241. lwz PREC, 20 + STACKSIZE(SP)
  242. #else
  243. ld PREB, 136 + STACKSIZE(SP)
  244. ld PREC, 144 + STACKSIZE(SP)
  245. #endif
  246. #endif
  247. #if defined(_AIX) || defined(__APPLE__)
  248. #ifdef __64BIT__
  249. ld PREB, 136 + STACKSIZE(SP)
  250. ld PREC, 144 + STACKSIZE(SP)
  251. #else
  252. #ifdef DOUBLE
  253. lwz PREB, 72 + STACKSIZE(SP)
  254. lwz PREC, 76 + STACKSIZE(SP)
  255. #else
  256. lwz PREB, 68 + STACKSIZE(SP)
  257. lwz PREC, 72 + STACKSIZE(SP)
  258. #endif
  259. #endif
  260. #endif
  261. #endif
  262. #ifndef PREFETCHTEST
  263. #ifdef CELL
  264. li PREB, (3 * 32 * SIZE)
  265. #else
  266. li PREB, (5 * 32 * SIZE)
  267. #endif
  268. #endif
  269. li r0, -1
  270. mfspr VREG, VRsave
  271. mtspr VRsave, r0
  272. addi SP, SP, -128
  273. li r0, -8192
  274. and SP, SP, r0
  275. fneg f3, f1
  276. fneg f4, f2
  277. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  278. defined(NC) || defined(TC) || defined(NR) || defined(TR)
  279. stfs f1, ALPHA_R + 0(SP)
  280. stfs f1, ALPHA_R + 4(SP)
  281. stfs f1, ALPHA_R + 8(SP)
  282. stfs f1, ALPHA_R + 12(SP)
  283. stfs f4, ALPHA_I + 0(SP)
  284. stfs f2, ALPHA_I + 4(SP)
  285. stfs f4, ALPHA_I + 8(SP)
  286. stfs f2, ALPHA_I + 12(SP)
  287. #else
  288. stfs f1, ALPHA_R + 0(SP)
  289. stfs f3, ALPHA_R + 4(SP)
  290. stfs f1, ALPHA_R + 8(SP)
  291. stfs f3, ALPHA_R + 12(SP)
  292. stfs f2, ALPHA_I + 0(SP)
  293. stfs f2, ALPHA_I + 4(SP)
  294. stfs f2, ALPHA_I + 8(SP)
  295. stfs f2, ALPHA_I + 12(SP)
  296. #endif
  297. li I, Address_L(0x04050607)
  298. addis I, I, Address_H(0x04050607)
  299. stw I, SWAP + 0(SP)
  300. li I, Address_L(0x00010203)
  301. addis I, I, Address_H(0x00010203)
  302. stw I, SWAP + 4(SP)
  303. li I, Address_L(0x0c0d0e0f)
  304. addis I, I, Address_H(0x0c0d0e0f)
  305. stw I, SWAP + 8(SP)
  306. li I, Address_L(0x08090a0b)
  307. addis I, I, Address_H(0x08090a0b)
  308. stw I, SWAP + 12(SP)
  309. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  310. defined(RR) || defined(RC) || defined(CR) || defined(CC)
  311. lis I, 0x8000
  312. stw I, NEG + 0(SP)
  313. stw I, NEG + 8(SP)
  314. li I, 0
  315. stw I, NEG + 4(SP)
  316. stw I, NEG + 12(SP)
  317. #else
  318. li I, 0
  319. stw I, NEG + 0(SP)
  320. stw I, NEG + 8(SP)
  321. lis I, 0x8000
  322. stw I, NEG + 4(SP)
  323. stw I, NEG + 12(SP)
  324. #endif
  325. li r0, 0
  326. stw r0, FZERO(SP)
  327. slwi LDC, LDC, ZBASE_SHIFT
  328. li OFFSET_1, 4 * SIZE
  329. li OFFSET_2, 8 * SIZE
  330. li OFFSET_3, 12 * SIZE
  331. li OFFSET_4, 16 * SIZE
  332. li OFFSET_5, 20 * SIZE
  333. li OFFSET_6, 24 * SIZE
  334. li OFFSET_7, 28 * SIZE
  335. cmpwi cr0, M, 0
  336. ble LL(999)
  337. cmpwi cr0, N, 0
  338. ble LL(999)
  339. cmpwi cr0, K, 0
  340. ble LL(999)
  341. srawi. J, N, 1
  342. ble LL(50)
  343. .align 4
  344. LL(01):
  345. mr CO1, C
  346. add CO2, C, LDC
  347. add C, CO2, LDC
  348. mr AO, A
  349. srawi. I, M, 3
  350. ble LL(20)
  351. .align 4
  352. LL(11):
  353. vxor c01, c01, c01
  354. LOAD_B b1, OFFSET_0, B
  355. vxor c02, c02, c02
  356. LOAD_B b2, OFFSET_1, B
  357. vxor c03, c03, c03
  358. LOAD_A a1, OFFSET_0, AO
  359. vxor c04, c04, c04
  360. LOAD_A a2, OFFSET_1, AO
  361. vxor c05, c05, c05
  362. LOAD_A a3, OFFSET_2, AO
  363. vxor c06, c06, c06
  364. LOAD_A a4, OFFSET_3, AO
  365. vxor c07, c07, c07
  366. LOAD_A a5, OFFSET_4, AO
  367. vxor c08, c08, c08
  368. vxor c09, c09, c09
  369. dcbtst CO1, PREC
  370. vxor c10, c10, c10
  371. dcbtst CO2, PREC
  372. vxor c11, c11, c11
  373. vxor c12, c12, c12
  374. vxor c13, c13, c13
  375. mr BO, B
  376. vxor c14, c14, c14
  377. srawi. r0, K, 1
  378. vxor c15, c15, c15
  379. mtspr CTR, r0
  380. vxor c16, c16, c16
  381. vspltw bp1, b1, 0
  382. ble LL(15)
  383. .align 4
  384. LL(12):
  385. vmaddfp c01, a1, bp1, c01
  386. vspltw bp2, b1, 1
  387. vmaddfp c02, a2, bp1, c02
  388. DCBT(BO, PREB)
  389. vmaddfp c03, a3, bp1, c03
  390. nop
  391. vmaddfp c04, a4, bp1, c04
  392. LOAD_A a6, OFFSET_5, AO
  393. vmaddfp c05, a1, bp2, c05
  394. vspltw bp1, b1, 2
  395. vmaddfp c06, a2, bp2, c06
  396. #ifdef CELL
  397. DCBT(AO, PREA)
  398. #else
  399. nop
  400. #endif
  401. vmaddfp c07, a3, bp2, c07
  402. nop
  403. vmaddfp c08, a4, bp2, c08
  404. LOAD_A a7, OFFSET_6, AO
  405. vmaddfp c09, a1, bp1, c09
  406. vspltw bp2, b1, 3
  407. vmaddfp c10, a2, bp1, c10
  408. LOAD_B b1, OFFSET_2, BO
  409. vmaddfp c11, a3, bp1, c11
  410. nop
  411. vmaddfp c12, a4, bp1, c12
  412. LOAD_A a8, OFFSET_7, AO
  413. vmaddfp c13, a1, bp2, c13
  414. vspltw bp1, b2, 0
  415. vmaddfp c14, a2, bp2, c14
  416. addi AO, AO, 32 * SIZE
  417. vmaddfp c15, a3, bp2, c15
  418. nop
  419. vmaddfp c16, a4, bp2, c16
  420. LOAD_A a1, OFFSET_0, AO
  421. vmaddfp c01, a5, bp1, c01
  422. vspltw bp2, b2, 1
  423. vmaddfp c02, a6, bp1, c02
  424. nop
  425. vmaddfp c03, a7, bp1, c03
  426. nop
  427. vmaddfp c04, a8, bp1, c04
  428. LOAD_A a2, OFFSET_1, AO
  429. vmaddfp c05, a5, bp2, c05
  430. vspltw bp1, b2, 2
  431. vmaddfp c06, a6, bp2, c06
  432. nop
  433. vmaddfp c07, a7, bp2, c07
  434. nop
  435. vmaddfp c08, a8, bp2, c08
  436. LOAD_A a3, OFFSET_2, AO
  437. vmaddfp c09, a5, bp1, c09
  438. vspltw bp2, b2, 3
  439. vmaddfp c10, a6, bp1, c10
  440. LOAD_B b2, OFFSET_3, BO
  441. vmaddfp c11, a7, bp1, c11
  442. nop
  443. vmaddfp c12, a8, bp1, c12
  444. LOAD_A a4, OFFSET_3, AO
  445. vmaddfp c13, a5, bp2, c13
  446. vspltw bp1, b1, 0
  447. vmaddfp c14, a6, bp2, c14
  448. addi BO, BO, 8 * SIZE
  449. vmaddfp c15, a7, bp2, c15
  450. LOAD_A a5, OFFSET_4, AO
  451. vmaddfp c16, a8, bp2, c16
  452. bdnz+ LL(12)
  453. .align 4
  454. LL(15):
  455. lvx swap, OFFSET_0, SP
  456. lvx neg, OFFSET_1, SP
  457. lvx alpha_r, OFFSET_2, SP
  458. lvx alpha_i, OFFSET_3, SP
  459. andi. r0, K, 1
  460. ble+ LL(18)
  461. .align 4
  462. LL(16):
  463. vmaddfp c01, a1, bp1, c01
  464. vspltw bp2, b1, 1
  465. vmaddfp c02, a2, bp1, c02
  466. nop
  467. vmaddfp c03, a3, bp1, c03
  468. nop
  469. vmaddfp c04, a4, bp1, c04
  470. nop
  471. vmaddfp c05, a1, bp2, c05
  472. vspltw bp1, b1, 2
  473. vmaddfp c06, a2, bp2, c06
  474. nop
  475. vmaddfp c07, a3, bp2, c07
  476. nop
  477. vmaddfp c08, a4, bp2, c08
  478. nop
  479. vmaddfp c09, a1, bp1, c09
  480. vspltw bp2, b1, 3
  481. vmaddfp c10, a2, bp1, c10
  482. addi AO, AO, 16 * SIZE
  483. vmaddfp c11, a3, bp1, c11
  484. addi BO, BO, 4 * SIZE
  485. vmaddfp c12, a4, bp1, c12
  486. nop
  487. vmaddfp c13, a1, bp2, c13
  488. vmaddfp c14, a2, bp2, c14
  489. vmaddfp c15, a3, bp2, c15
  490. vmaddfp c16, a4, bp2, c16
  491. .align 4
  492. LL(18):
  493. vxor VZERO, VZERO, VZERO
  494. vperm c05, c05, c05, swap
  495. vperm c06, c06, c06, swap
  496. vperm c07, c07, c07, swap
  497. vperm c08, c08, c08, swap
  498. vperm c13, c13, c13, swap
  499. vperm c14, c14, c14, swap
  500. vperm c15, c15, c15, swap
  501. vperm c16, c16, c16, swap
  502. vxor c05, c05, neg
  503. vxor c06, c06, neg
  504. vxor c07, c07, neg
  505. vxor c08, c08, neg
  506. vxor c13, c13, neg
  507. vxor c14, c14, neg
  508. vxor c15, c15, neg
  509. vxor c16, c16, neg
  510. vaddfp c01, c01, c05
  511. vaddfp c02, c02, c06
  512. vaddfp c03, c03, c07
  513. vaddfp c04, c04, c08
  514. vaddfp c09, c09, c13
  515. vaddfp c10, c10, c14
  516. vaddfp c11, c11, c15
  517. vaddfp c12, c12, c16
  518. vperm c05, c01, c01, swap
  519. vperm c06, c02, c02, swap
  520. vperm c07, c03, c03, swap
  521. vperm c08, c04, c04, swap
  522. vperm c13, c09, c09, swap
  523. vperm c14, c10, c10, swap
  524. vperm c15, c11, c11, swap
  525. vperm c16, c12, c12, swap
  526. vmaddfp c01, alpha_r, c01, VZERO
  527. vmaddfp c02, alpha_r, c02, VZERO
  528. vmaddfp c03, alpha_r, c03, VZERO
  529. vmaddfp c04, alpha_r, c04, VZERO
  530. vmaddfp c01, alpha_i, c05, c01
  531. vmaddfp c02, alpha_i, c06, c02
  532. vmaddfp c03, alpha_i, c07, c03
  533. vmaddfp c04, alpha_i, c08, c04
  534. vmaddfp c09, alpha_r, c09, VZERO
  535. vmaddfp c10, alpha_r, c10, VZERO
  536. vmaddfp c11, alpha_r, c11, VZERO
  537. vmaddfp c12, alpha_r, c12, VZERO
  538. vmaddfp c09, alpha_i, c13, c09
  539. vmaddfp c10, alpha_i, c14, c10
  540. vmaddfp c11, alpha_i, c15, c11
  541. vmaddfp c12, alpha_i, c16, c12
  542. lvx C1, OFFSET_0, CO1
  543. lvx C2, OFFSET_1, CO1
  544. lvx C3, OFFSET_2, CO1
  545. lvx C4, OFFSET_3, CO1
  546. lvx C5, OFFSET_4, CO1
  547. lvsr PERMRSHIFT1, 0, CO1
  548. lvsr PERMRSHIFT2, 0, CO2
  549. vperm c00, VZERO, c01, PERMRSHIFT1
  550. vperm c01, c01, c02, PERMRSHIFT1
  551. vperm c02, c02, c03, PERMRSHIFT1
  552. vperm c03, c03, c04, PERMRSHIFT1
  553. vperm c04, c04, VZERO, PERMRSHIFT1
  554. vaddfp c00, c00, C1
  555. vaddfp c01, c01, C2
  556. vaddfp c02, c02, C3
  557. vaddfp c03, c03, C4
  558. vaddfp c04, c04, C5
  559. stvx c00, OFFSET_0, CO1
  560. stvx c01, OFFSET_1, CO1
  561. stvx c02, OFFSET_2, CO1
  562. stvx c03, OFFSET_3, CO1
  563. stvx c04, OFFSET_4, CO1
  564. lvx C1, OFFSET_0, CO2
  565. lvx C2, OFFSET_1, CO2
  566. lvx C3, OFFSET_2, CO2
  567. lvx C4, OFFSET_3, CO2
  568. lvx C5, OFFSET_4, CO2
  569. vperm c00, VZERO, c09, PERMRSHIFT2
  570. vperm c09, c09, c10, PERMRSHIFT2
  571. vperm c10, c10, c11, PERMRSHIFT2
  572. vperm c11, c11, c12, PERMRSHIFT2
  573. vperm c12, c12, VZERO, PERMRSHIFT2
  574. vaddfp c00, c00, C1
  575. vaddfp c09, c09, C2
  576. vaddfp c10, c10, C3
  577. vaddfp c11, c11, C4
  578. vaddfp c12, c12, C5
  579. stvx c00, OFFSET_0, CO2
  580. stvx c09, OFFSET_1, CO2
  581. stvx c10, OFFSET_2, CO2
  582. stvx c11, OFFSET_3, CO2
  583. stvx c12, OFFSET_4, CO2
  584. addi CO1, CO1, 16 * SIZE
  585. addi CO2, CO2, 16 * SIZE
  586. addic. I, I, -1
  587. bgt+ LL(11)
  588. .align 4
  589. LL(20):
  590. andi. I, M, 4
  591. ble LL(30)
  592. vxor c01, c01, c01
  593. LOAD_A a1, OFFSET_0, AO
  594. vxor c02, c02, c02
  595. LOAD_A a2, OFFSET_1, AO
  596. vxor c05, c05, c05
  597. LOAD_A a3, OFFSET_2, AO
  598. vxor c06, c06, c06
  599. LOAD_A a4, OFFSET_3, AO
  600. vxor c09, c09, c09
  601. LOAD_B b1, OFFSET_0, B
  602. vxor c10, c10, c10
  603. LOAD_B b2, OFFSET_1, B
  604. vxor c13, c13, c13
  605. vxor c14, c14, c14
  606. mr BO, B
  607. vspltw bp1, b1, 0
  608. srawi. r0, K, 1
  609. mtspr CTR, r0
  610. ble LL(25)
  611. .align 4
  612. LL(22):
  613. vmaddfp c01, a1, bp1, c01
  614. vspltw bp2, b1, 1
  615. addi AO, AO, 16 * SIZE
  616. vmaddfp c02, a2, bp1, c02
  617. addi BO, BO, 8 * SIZE
  618. vmaddfp c05, a1, bp2, c05
  619. vspltw bp1, b1, 2
  620. vmaddfp c06, a2, bp2, c06
  621. vmaddfp c09, a1, bp1, c09
  622. vspltw bp2, b1, 3
  623. LOAD_B b1, OFFSET_0, BO
  624. vmaddfp c10, a2, bp1, c10
  625. vmaddfp c13, a1, bp2, c13
  626. LOAD_A a1, OFFSET_0, AO
  627. vspltw bp1, b2, 0
  628. vmaddfp c14, a2, bp2, c14
  629. LOAD_A a2, OFFSET_1, AO
  630. vmaddfp c01, a3, bp1, c01
  631. vspltw bp2, b2, 1
  632. vmaddfp c02, a4, bp1, c02
  633. vmaddfp c05, a3, bp2, c05
  634. vspltw bp1, b2, 2
  635. vmaddfp c06, a4, bp2, c06
  636. vmaddfp c09, a3, bp1, c09
  637. vspltw bp2, b2, 3
  638. LOAD_B b2, OFFSET_1, BO
  639. vmaddfp c10, a4, bp1, c10
  640. vmaddfp c13, a3, bp2, c13
  641. LOAD_A a3, OFFSET_2, AO
  642. vmaddfp c14, a4, bp2, c14
  643. LOAD_A a4, OFFSET_3, AO
  644. vspltw bp1, b1, 0
  645. bdnz LL(22)
  646. .align 4
  647. LL(25):
  648. andi. r0, K, 1
  649. ble+ LL(28)
  650. .align 4
  651. LL(26):
  652. vmaddfp c01, a1, bp1, c01
  653. vspltw bp2, b1, 1
  654. vmaddfp c02, a2, bp1, c02
  655. nop
  656. vmaddfp c05, a1, bp2, c05
  657. vspltw bp1, b1, 2
  658. vmaddfp c06, a2, bp2, c06
  659. nop
  660. vmaddfp c09, a1, bp1, c09
  661. vspltw bp2, b1, 3
  662. vmaddfp c10, a2, bp1, c10
  663. addi AO, AO, 8 * SIZE
  664. vmaddfp c13, a1, bp2, c13
  665. addi BO, BO, 4 * SIZE
  666. vmaddfp c14, a2, bp2, c14
  667. nop
  668. .align 4
  669. LL(28):
  670. vxor VZERO, VZERO, VZERO
  671. lvx swap, OFFSET_0, SP
  672. lvx neg, OFFSET_1, SP
  673. lvx alpha_r, OFFSET_2, SP
  674. lvx alpha_i, OFFSET_3, SP
  675. vperm c05, c05, c05, swap
  676. vperm c06, c06, c06, swap
  677. vperm c13, c13, c13, swap
  678. vperm c14, c14, c14, swap
  679. vxor c05, c05, neg
  680. vxor c06, c06, neg
  681. vxor c13, c13, neg
  682. vxor c14, c14, neg
  683. vaddfp c01, c01, c05
  684. vaddfp c02, c02, c06
  685. vaddfp c09, c09, c13
  686. vaddfp c10, c10, c14
  687. vperm c05, c01, c01, swap
  688. vperm c06, c02, c02, swap
  689. vperm c13, c09, c09, swap
  690. vperm c14, c10, c10, swap
  691. vmaddfp c01, alpha_r, c01, VZERO
  692. vmaddfp c02, alpha_r, c02, VZERO
  693. vmaddfp c01, alpha_i, c05, c01
  694. vmaddfp c02, alpha_i, c06, c02
  695. vmaddfp c09, alpha_r, c09, VZERO
  696. vmaddfp c10, alpha_r, c10, VZERO
  697. vmaddfp c09, alpha_i, c13, c09
  698. vmaddfp c10, alpha_i, c14, c10
  699. lvx C1, OFFSET_0, CO1
  700. lvx C2, OFFSET_1, CO1
  701. lvx C3, OFFSET_2, CO1
  702. lvsr PERMRSHIFT1, 0, CO1
  703. lvsr PERMRSHIFT2, 0, CO2
  704. vperm c00, VZERO, c01, PERMRSHIFT1
  705. vperm c01, c01, c02, PERMRSHIFT1
  706. vperm c02, c02, VZERO, PERMRSHIFT1
  707. vaddfp c00, c00, C1
  708. vaddfp c01, c01, C2
  709. vaddfp c02, c02, C3
  710. stvx c00, OFFSET_0, CO1
  711. stvx c01, OFFSET_1, CO1
  712. stvx c02, OFFSET_2, CO1
  713. lvx C1, OFFSET_0, CO2
  714. lvx C2, OFFSET_1, CO2
  715. lvx C3, OFFSET_2, CO2
  716. vperm c00, VZERO, c09, PERMRSHIFT2
  717. vperm c09, c09, c10, PERMRSHIFT2
  718. vperm c10, c10, VZERO, PERMRSHIFT2
  719. vaddfp c00, c00, C1
  720. vaddfp c09, c09, C2
  721. vaddfp c10, c10, C3
  722. stvx c00, OFFSET_0, CO2
  723. stvx c09, OFFSET_1, CO2
  724. stvx c10, OFFSET_2, CO2
  725. addi CO1, CO1, 8 * SIZE
  726. addi CO2, CO2, 8 * SIZE
  727. .align 4
  728. LL(30):
  729. andi. I, M, 2
  730. ble LL(40)
  731. vxor c01, c01, c01
  732. LOAD_A a1, OFFSET_0, AO
  733. vxor c02, c02, c02
  734. LOAD_A a2, OFFSET_1, AO
  735. vxor c05, c05, c05
  736. LOAD_B b1, OFFSET_0, B
  737. vxor c06, c06, c06
  738. LOAD_B b2, OFFSET_1, B
  739. vxor c09, c09, c09
  740. vxor c10, c10, c10
  741. vxor c13, c13, c13
  742. vxor c14, c14, c14
  743. vspltw bp1, b1, 0
  744. mr BO, B
  745. srawi. r0, K, 1
  746. mtspr CTR, r0
  747. ble LL(35)
  748. .align 4
  749. LL(32):
  750. vmaddfp c01, a1, bp1, c01
  751. addi AO, AO, 8 * SIZE
  752. vspltw bp2, b1, 1
  753. vmaddfp c05, a1, bp2, c05
  754. addi BO, BO, 8 * SIZE
  755. vspltw bp1, b1, 2
  756. vmaddfp c09, a1, bp1, c09
  757. vspltw bp2, b1, 3
  758. vmaddfp c13, a1, bp2, c13
  759. LOAD_A a1, OFFSET_0, AO
  760. vspltw bp1, b2, 0
  761. LOAD_B b1, OFFSET_0, BO
  762. vmaddfp c02, a2, bp1, c02
  763. vspltw bp2, b2, 1
  764. vmaddfp c06, a2, bp2, c06
  765. vspltw bp1, b2, 2
  766. vmaddfp c10, a2, bp1, c10
  767. vspltw bp2, b2, 3
  768. LOAD_B b2, OFFSET_1, BO
  769. vmaddfp c14, a2, bp2, c14
  770. LOAD_A a2, OFFSET_1, AO
  771. vspltw bp1, b1, 0
  772. bdnz LL(32)
  773. .align 4
  774. LL(35):
  775. andi. r0, K, 1
  776. ble+ LL(38)
  777. .align 4
  778. LL(36):
  779. vmaddfp c01, a1, bp1, c01
  780. vspltw bp2, b1, 1
  781. vmaddfp c05, a1, bp2, c05
  782. vspltw bp1, b1, 2
  783. vmaddfp c09, a1, bp1, c09
  784. vspltw bp2, b1, 3
  785. vmaddfp c13, a1, bp2, c13
  786. addi AO, AO, 4 * SIZE
  787. addi BO, BO, 4 * SIZE
  788. .align 4
  789. LL(38):
  790. vaddfp c01, c01, c02
  791. vaddfp c05, c05, c06
  792. vaddfp c09, c09, c10
  793. vaddfp c13, c13, c14
  794. vxor VZERO, VZERO, VZERO
  795. lvx swap, OFFSET_0, SP
  796. lvx neg, OFFSET_1, SP
  797. lvx alpha_r, OFFSET_2, SP
  798. lvx alpha_i, OFFSET_3, SP
  799. vperm c05, c05, c05, swap
  800. vperm c13, c13, c13, swap
  801. vxor c05, c05, neg
  802. vxor c13, c13, neg
  803. vaddfp c01, c01, c05
  804. vaddfp c09, c09, c13
  805. vperm c05, c01, c01, swap
  806. vperm c13, c09, c09, swap
  807. vmaddfp c01, alpha_r, c01, VZERO
  808. vmaddfp c01, alpha_i, c05, c01
  809. vmaddfp c09, alpha_r, c09, VZERO
  810. vmaddfp c09, alpha_i, c13, c09
  811. lvx C1, OFFSET_0, CO1
  812. lvx C2, OFFSET_1, CO1
  813. lvsr PERMRSHIFT1, 0, CO1
  814. lvsr PERMRSHIFT2, 0, CO2
  815. vperm c00, VZERO, c01, PERMRSHIFT1
  816. vperm c01, c01, VZERO, PERMRSHIFT1
  817. vaddfp c00, c00, C1
  818. vaddfp c01, c01, C2
  819. stvx c00, OFFSET_0, CO1
  820. stvx c01, OFFSET_1, CO1
  821. lvx C1, OFFSET_0, CO2
  822. lvx C2, OFFSET_1, CO2
  823. vperm c00, VZERO, c09, PERMRSHIFT2
  824. vperm c09, c09, VZERO, PERMRSHIFT2
  825. vaddfp c00, c00, C1
  826. vaddfp c09, c09, C2
  827. stvx c00, OFFSET_0, CO2
  828. stvx c09, OFFSET_1, CO2
  829. addi CO1, CO1, 4 * SIZE
  830. addi CO2, CO2, 4 * SIZE
  831. .align 4
  832. LL(40):
  833. andi. I, M, 1
  834. ble LL(49)
  835. mr BO, B
  836. LFD f8, 0 * SIZE(AO)
  837. LFD f9, 1 * SIZE(AO)
  838. LFD f10, 0 * SIZE(BO)
  839. LFD f11, 1 * SIZE(BO)
  840. LFD f12, 2 * SIZE(BO)
  841. LFD f13, 3 * SIZE(BO)
  842. lfs f0, FZERO(SP)
  843. fmr f1, f0
  844. fmr f2, f0
  845. fmr f3, f0
  846. fmr f4, f0
  847. fmr f5, f0
  848. fmr f6, f0
  849. fmr f7, f0
  850. srawi. r0, K, 1
  851. mtspr CTR, r0
  852. ble LL(45)
  853. .align 4
  854. LL(42):
  855. fmadd f0, f8, f10, f0
  856. fmadd f2, f8, f11, f2
  857. fmadd f4, f8, f12, f4
  858. fmadd f6, f8, f13, f6
  859. fmadd f1, f9, f10, f1
  860. fmadd f3, f9, f11, f3
  861. fmadd f5, f9, f12, f5
  862. fmadd f7, f9, f13, f7
  863. LFD f8, 2 * SIZE(AO)
  864. LFD f9, 3 * SIZE(AO)
  865. LFD f10, 4 * SIZE(BO)
  866. LFD f11, 5 * SIZE(BO)
  867. LFD f12, 6 * SIZE(BO)
  868. LFD f13, 7 * SIZE(BO)
  869. fmadd f0, f8, f10, f0
  870. fmadd f2, f8, f11, f2
  871. fmadd f4, f8, f12, f4
  872. fmadd f6, f8, f13, f6
  873. fmadd f1, f9, f10, f1
  874. fmadd f3, f9, f11, f3
  875. fmadd f5, f9, f12, f5
  876. fmadd f7, f9, f13, f7
  877. LFD f8, 4 * SIZE(AO)
  878. LFD f9, 5 * SIZE(AO)
  879. LFD f10, 8 * SIZE(BO)
  880. LFD f11, 9 * SIZE(BO)
  881. LFD f12, 10 * SIZE(BO)
  882. LFD f13, 11 * SIZE(BO)
  883. addi AO, AO, 4 * SIZE
  884. addi BO, BO, 8 * SIZE
  885. bdnz LL(42)
  886. .align 4
  887. LL(45):
  888. andi. r0, K, 1
  889. ble LL(48)
  890. .align 4
  891. LL(46):
  892. fmadd f0, f8, f10, f0
  893. fmadd f2, f8, f11, f2
  894. fmadd f4, f8, f12, f4
  895. fmadd f6, f8, f13, f6
  896. fmadd f1, f9, f10, f1
  897. fmadd f3, f9, f11, f3
  898. fmadd f5, f9, f12, f5
  899. fmadd f7, f9, f13, f7
  900. addi AO, AO, 2 * SIZE
  901. addi BO, BO, 4 * SIZE
  902. .align 4
  903. LL(48):
  904. #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
  905. fsub f0, f0, f3
  906. fadd f1, f1, f2
  907. fsub f4, f4, f7
  908. fadd f5, f5, f6
  909. #elif defined(NR) || defined(NC) || defined(TR) || defined(TC)
  910. fadd f0, f0, f3
  911. fsub f1, f1, f2
  912. fadd f4, f4, f7
  913. fsub f5, f5, f6
  914. #elif defined(RN) || defined(RT) || defined(CN) || defined(CT)
  915. fadd f0, f0, f3
  916. fsub f1, f2, f1
  917. fadd f4, f4, f7
  918. fsub f5, f6, f5
  919. #else /* RR, RC, CR, CC */
  920. fsub f0, f0, f3
  921. fadd f1, f1, f2
  922. fsub f4, f4, f7
  923. fadd f5, f5, f6
  924. #endif
  925. LFD f8, 0 * SIZE(CO1)
  926. LFD f9, 1 * SIZE(CO1)
  927. LFD f10, 0 * SIZE(CO2)
  928. LFD f11, 1 * SIZE(CO2)
  929. lfs f12, ALPHA_R + 0(SP)
  930. lfs f13, ALPHA_I + 4(SP)
  931. #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
  932. fmadd f8, f12, f0, f8
  933. fnmsub f9, f12, f1, f9
  934. fmadd f10, f12, f4, f10
  935. fnmsub f11, f12, f5, f11
  936. fmadd f8, f13, f1, f8
  937. fmadd f9, f13, f0, f9
  938. fmadd f10, f13, f5, f10
  939. fmadd f11, f13, f4, f11
  940. #else
  941. fmadd f8, f12, f0, f8
  942. fmadd f9, f12, f1, f9
  943. fmadd f10, f12, f4, f10
  944. fmadd f11, f12, f5, f11
  945. fnmsub f8, f13, f1, f8
  946. fmadd f9, f13, f0, f9
  947. fnmsub f10, f13, f5, f10
  948. fmadd f11, f13, f4, f11
  949. #endif
  950. STFD f8, 0 * SIZE(CO1)
  951. STFD f9, 1 * SIZE(CO1)
  952. STFD f10, 0 * SIZE(CO2)
  953. STFD f11, 1 * SIZE(CO2)
  954. LL(49):
  955. mr B, BO
  956. addic. J, J, -1
  957. bgt LL(01)
  958. .align 4
  959. LL(50):
  960. andi. J, N, 1
  961. ble LL(999)
  962. mr CO1, C
  963. mr AO, A
  964. srawi. I, M, 3
  965. ble LL(70)
  966. .align 4
  967. LL(61):
  968. vxor c01, c01, c01
  969. LOAD_B b1, OFFSET_0, B
  970. vxor c02, c02, c02
  971. vxor c03, c03, c03
  972. LOAD_A a1, OFFSET_0, AO
  973. vxor c04, c04, c04
  974. LOAD_A a2, OFFSET_1, AO
  975. vxor c05, c05, c05
  976. LOAD_A a3, OFFSET_2, AO
  977. vxor c06, c06, c06
  978. LOAD_A a4, OFFSET_3, AO
  979. vxor c07, c07, c07
  980. vxor c08, c08, c08
  981. mr BO, B
  982. dcbtst CO1, PREC
  983. dcbtst CO2, PREC
  984. vspltw bp1, b1, 0
  985. srawi. r0, K, 1
  986. mtspr CTR, r0
  987. ble LL(65)
  988. .align 4
  989. LL(62):
  990. LOAD_A a5, OFFSET_4, AO
  991. LOAD_A a6, OFFSET_5, AO
  992. LOAD_A a7, OFFSET_6, AO
  993. LOAD_A a8, OFFSET_7, AO
  994. vmaddfp c01, a1, bp1, c01
  995. vspltw bp2, b1, 1
  996. vmaddfp c02, a2, bp1, c02
  997. vmaddfp c03, a3, bp1, c03
  998. vmaddfp c04, a4, bp1, c04
  999. vmaddfp c05, a1, bp2, c05
  1000. vspltw bp1, b1, 2
  1001. vmaddfp c06, a2, bp2, c06
  1002. vmaddfp c07, a3, bp2, c07
  1003. vmaddfp c08, a4, bp2, c08
  1004. vmaddfp c01, a5, bp1, c01
  1005. vspltw bp2, b1, 3
  1006. vmaddfp c02, a6, bp1, c02
  1007. vmaddfp c03, a7, bp1, c03
  1008. vmaddfp c04, a8, bp1, c04
  1009. LOAD_B b1, OFFSET_1, BO
  1010. vspltw bp1, b1, 0
  1011. vmaddfp c05, a5, bp2, c05
  1012. vmaddfp c06, a6, bp2, c06
  1013. vmaddfp c07, a7, bp2, c07
  1014. vmaddfp c08, a8, bp2, c08
  1015. addi AO, AO, 32 * SIZE
  1016. addi BO, BO, 4 * SIZE
  1017. LOAD_A a1, OFFSET_0, AO
  1018. LOAD_A a2, OFFSET_1, AO
  1019. LOAD_A a3, OFFSET_2, AO
  1020. LOAD_A a4, OFFSET_3, AO
  1021. bdnz LL(62)
  1022. .align 4
  1023. LL(65):
  1024. andi. r0, K, 1
  1025. ble+ LL(68)
  1026. .align 4
  1027. LL(66):
  1028. vmaddfp c01, a1, bp1, c01
  1029. vspltw bp2, b1, 1
  1030. vmaddfp c02, a2, bp1, c02
  1031. addi AO, AO, 16 * SIZE
  1032. vmaddfp c03, a3, bp1, c03
  1033. addi BO, BO, 2 * SIZE
  1034. vmaddfp c04, a4, bp1, c04
  1035. nop
  1036. vmaddfp c05, a1, bp2, c05
  1037. vmaddfp c06, a2, bp2, c06
  1038. vmaddfp c07, a3, bp2, c07
  1039. vmaddfp c08, a4, bp2, c08
  1040. .align 4
  1041. LL(68):
  1042. vxor VZERO, VZERO, VZERO
  1043. lvx swap, OFFSET_0, SP
  1044. lvx neg, OFFSET_1, SP
  1045. lvx alpha_r, OFFSET_2, SP
  1046. lvx alpha_i, OFFSET_3, SP
  1047. vperm c05, c05, c05, swap
  1048. vperm c06, c06, c06, swap
  1049. vperm c07, c07, c07, swap
  1050. vperm c08, c08, c08, swap
  1051. vxor c05, c05, neg
  1052. vxor c06, c06, neg
  1053. vxor c07, c07, neg
  1054. vxor c08, c08, neg
  1055. vaddfp c01, c01, c05
  1056. vaddfp c02, c02, c06
  1057. vaddfp c03, c03, c07
  1058. vaddfp c04, c04, c08
  1059. vperm c05, c01, c01, swap
  1060. vperm c06, c02, c02, swap
  1061. vperm c07, c03, c03, swap
  1062. vperm c08, c04, c04, swap
  1063. vmaddfp c01, alpha_r, c01, VZERO
  1064. vmaddfp c02, alpha_r, c02, VZERO
  1065. vmaddfp c03, alpha_r, c03, VZERO
  1066. vmaddfp c04, alpha_r, c04, VZERO
  1067. vmaddfp c01, alpha_i, c05, c01
  1068. vmaddfp c02, alpha_i, c06, c02
  1069. vmaddfp c03, alpha_i, c07, c03
  1070. vmaddfp c04, alpha_i, c08, c04
  1071. lvx C1, OFFSET_0, CO1
  1072. lvx C2, OFFSET_1, CO1
  1073. lvx C3, OFFSET_2, CO1
  1074. lvx C4, OFFSET_3, CO1
  1075. lvx C5, OFFSET_4, CO1
  1076. lvsr PERMRSHIFT1, 0, CO1
  1077. vperm c00, VZERO, c01, PERMRSHIFT1
  1078. vperm c01, c01, c02, PERMRSHIFT1
  1079. vperm c02, c02, c03, PERMRSHIFT1
  1080. vperm c03, c03, c04, PERMRSHIFT1
  1081. vperm c04, c04, VZERO, PERMRSHIFT1
  1082. vaddfp c00, c00, C1
  1083. vaddfp c01, c01, C2
  1084. vaddfp c02, c02, C3
  1085. vaddfp c03, c03, C4
  1086. vaddfp c04, c04, C5
  1087. stvx c00, OFFSET_0, CO1
  1088. stvx c01, OFFSET_1, CO1
  1089. stvx c02, OFFSET_2, CO1
  1090. stvx c03, OFFSET_3, CO1
  1091. stvx c04, OFFSET_4, CO1
  1092. addi CO1, CO1, 16 * SIZE
  1093. addic. I, I, -1
  1094. bgt+ LL(61)
  1095. .align 4
  1096. LL(70):
  1097. andi. I, M, 4
  1098. ble LL(80)
  1099. vxor c01, c01, c01
  1100. LOAD_B b1, OFFSET_0, B
  1101. vxor c02, c02, c02
  1102. vxor c03, c03, c03
  1103. LOAD_A a1, OFFSET_0, AO
  1104. vxor c04, c04, c04
  1105. LOAD_A a2, OFFSET_1, AO
  1106. vxor c05, c05, c05
  1107. LOAD_A a3, OFFSET_2, AO
  1108. vxor c06, c06, c06
  1109. LOAD_A a4, OFFSET_3, AO
  1110. vxor c07, c07, c07
  1111. vxor c08, c08, c08
  1112. mr BO, B
  1113. vspltw bp1, b1, 0
  1114. srawi. r0, K, 1
  1115. mtspr CTR, r0
  1116. ble LL(75)
  1117. .align 4
  1118. LL(72):
  1119. vmaddfp c01, a1, bp1, c01
  1120. vspltw bp2, b1, 1
  1121. vmaddfp c02, a2, bp1, c02
  1122. vmaddfp c05, a1, bp2, c05
  1123. vspltw bp1, b1, 2
  1124. vmaddfp c06, a2, bp2, c06
  1125. vmaddfp c03, a3, bp1, c03
  1126. vspltw bp2, b1, 3
  1127. vmaddfp c04, a4, bp1, c04
  1128. LOAD_B b1, OFFSET_1, BO
  1129. vspltw bp1, b1, 0
  1130. vmaddfp c07, a3, bp2, c07
  1131. vmaddfp c08, a4, bp2, c08
  1132. addi AO, AO, 16 * SIZE
  1133. addi BO, BO, 4 * SIZE
  1134. LOAD_A a1, OFFSET_0, AO
  1135. LOAD_A a2, OFFSET_1, AO
  1136. LOAD_A a3, OFFSET_2, AO
  1137. LOAD_A a4, OFFSET_3, AO
  1138. bdnz LL(72)
  1139. .align 4
  1140. LL(75):
  1141. andi. r0, K, 1
  1142. ble+ LL(78)
  1143. .align 4
  1144. LL(76):
  1145. vmaddfp c01, a1, bp1, c01
  1146. vspltw bp2, b1, 1
  1147. vmaddfp c02, a2, bp1, c02
  1148. addi AO, AO, 8 * SIZE
  1149. vmaddfp c05, a1, bp2, c05
  1150. addi BO, BO, 2 * SIZE
  1151. vmaddfp c06, a2, bp2, c06
  1152. .align 4
  1153. LL(78):
  1154. vaddfp c01, c01, c03
  1155. vaddfp c02, c02, c04
  1156. vaddfp c05, c05, c07
  1157. vaddfp c06, c06, c08
  1158. vxor VZERO, VZERO, VZERO
  1159. lvx swap, OFFSET_0, SP
  1160. lvx neg, OFFSET_1, SP
  1161. lvx alpha_r, OFFSET_2, SP
  1162. lvx alpha_i, OFFSET_3, SP
  1163. vperm c05, c05, c05, swap
  1164. vperm c06, c06, c06, swap
  1165. vxor c05, c05, neg
  1166. vxor c06, c06, neg
  1167. vaddfp c01, c01, c05
  1168. vaddfp c02, c02, c06
  1169. vperm c05, c01, c01, swap
  1170. vperm c06, c02, c02, swap
  1171. vmaddfp c01, alpha_r, c01, VZERO
  1172. vmaddfp c02, alpha_r, c02, VZERO
  1173. vmaddfp c01, alpha_i, c05, c01
  1174. vmaddfp c02, alpha_i, c06, c02
  1175. lvx C1, OFFSET_0, CO1
  1176. lvx C2, OFFSET_1, CO1
  1177. lvx C3, OFFSET_2, CO1
  1178. lvsr PERMRSHIFT1, 0, CO1
  1179. vperm c00, VZERO, c01, PERMRSHIFT1
  1180. vperm c01, c01, c02, PERMRSHIFT1
  1181. vperm c02, c02, VZERO, PERMRSHIFT1
  1182. vaddfp c00, c00, C1
  1183. vaddfp c01, c01, C2
  1184. vaddfp c02, c02, C3
  1185. stvx c00, OFFSET_0, CO1
  1186. stvx c01, OFFSET_1, CO1
  1187. stvx c02, OFFSET_2, CO1
  1188. addi CO1, CO1, 8 * SIZE
  1189. .align 4
  1190. LL(80):
  1191. andi. I, M, 2
  1192. ble LL(90)
  1193. vxor c01, c01, c01
  1194. LOAD_B b1, OFFSET_0, B
  1195. vxor c02, c02, c02
  1196. LOAD_A a1, OFFSET_0, AO
  1197. LOAD_A a2, OFFSET_1, AO
  1198. vxor c05, c05, c05
  1199. vxor c06, c06, c06
  1200. mr BO, B
  1201. vspltw bp1, b1, 0
  1202. srawi. r0, K, 1
  1203. mtspr CTR, r0
  1204. ble LL(85)
  1205. .align 4
  1206. LL(82):
  1207. vmaddfp c01, a1, bp1, c01
  1208. vspltw bp2, b1, 1
  1209. vmaddfp c05, a1, bp2, c05
  1210. vspltw bp1, b1, 2
  1211. vmaddfp c02, a2, bp1, c02
  1212. vspltw bp2, b1, 3
  1213. LOAD_B b1, OFFSET_1, BO
  1214. vspltw bp1, b1, 0
  1215. vmaddfp c06, a2, bp2, c06
  1216. addi AO, AO, 8 * SIZE
  1217. addi BO, BO, 4 * SIZE
  1218. LOAD_A a1, OFFSET_0, AO
  1219. LOAD_A a2, OFFSET_1, AO
  1220. bdnz LL(82)
  1221. .align 4
  1222. LL(85):
  1223. andi. r0, K, 1
  1224. ble+ LL(88)
  1225. .align 4
  1226. LL(86):
  1227. vspltw bp2, b1, 1
  1228. vmaddfp c01, a1, bp1, c01
  1229. vmaddfp c05, a1, bp2, c05
  1230. addi AO, AO, 4 * SIZE
  1231. addi BO, BO, 2 * SIZE
  1232. .align 4
  1233. LL(88):
  1234. vaddfp c01, c01, c02
  1235. vaddfp c05, c05, c06
  1236. vaddfp c09, c09, c10
  1237. vaddfp c13, c13, c14
  1238. vxor VZERO, VZERO, VZERO
  1239. lvx swap, OFFSET_0, SP
  1240. lvx neg, OFFSET_1, SP
  1241. lvx alpha_r, OFFSET_2, SP
  1242. lvx alpha_i, OFFSET_3, SP
  1243. vperm c05, c05, c05, swap
  1244. vxor c05, c05, neg
  1245. vaddfp c01, c01, c05
  1246. vperm c05, c01, c01, swap
  1247. vmaddfp c01, alpha_r, c01, VZERO
  1248. vmaddfp c01, alpha_i, c05, c01
  1249. lvx C1, OFFSET_0, CO1
  1250. lvx C2, OFFSET_1, CO1
  1251. lvsr PERMRSHIFT1, 0, CO1
  1252. vperm c00, VZERO, c01, PERMRSHIFT1
  1253. vperm c01, c01, VZERO, PERMRSHIFT1
  1254. vaddfp c00, c00, C1
  1255. vaddfp c01, c01, C2
  1256. stvx c00, OFFSET_0, CO1
  1257. stvx c01, OFFSET_1, CO1
  1258. addi CO1, CO1, 4 * SIZE
  1259. .align 4
  1260. LL(90):
  1261. andi. I, M, 1
  1262. ble LL(999)
  1263. mr BO, B
  1264. LFD f8, 0 * SIZE(AO)
  1265. LFD f9, 1 * SIZE(AO)
  1266. LFD f10, 0 * SIZE(BO)
  1267. LFD f11, 1 * SIZE(BO)
  1268. LFD f12, 2 * SIZE(BO)
  1269. LFD f13, 3 * SIZE(BO)
  1270. lfs f0, FZERO(SP)
  1271. fmr f1, f0
  1272. fmr f2, f0
  1273. fmr f3, f0
  1274. srawi. r0, K, 1
  1275. mtspr CTR, r0
  1276. ble LL(95)
  1277. .align 4
  1278. LL(92):
  1279. fmadd f0, f8, f10, f0
  1280. fmadd f2, f8, f11, f2
  1281. fmadd f1, f9, f10, f1
  1282. fmadd f3, f9, f11, f3
  1283. LFD f8, 2 * SIZE(AO)
  1284. LFD f9, 3 * SIZE(AO)
  1285. LFD f10, 4 * SIZE(BO)
  1286. LFD f11, 5 * SIZE(BO)
  1287. fmadd f0, f8, f12, f0
  1288. fmadd f2, f8, f13, f2
  1289. fmadd f1, f9, f12, f1
  1290. fmadd f3, f9, f13, f3
  1291. LFD f8, 4 * SIZE(AO)
  1292. LFD f9, 5 * SIZE(AO)
  1293. LFD f12, 6 * SIZE(BO)
  1294. LFD f13, 7 * SIZE(BO)
  1295. addi AO, AO, 4 * SIZE
  1296. addi BO, BO, 4 * SIZE
  1297. bdnz LL(92)
  1298. .align 4
  1299. LL(95):
  1300. andi. r0, K, 1
  1301. ble LL(98)
  1302. .align 4
  1303. LL(96):
  1304. fmadd f0, f8, f10, f0
  1305. fmadd f2, f8, f11, f2
  1306. fmadd f1, f9, f10, f1
  1307. fmadd f3, f9, f11, f3
  1308. .align 4
  1309. LL(98):
  1310. #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
  1311. fsub f0, f0, f3
  1312. fadd f1, f1, f2
  1313. #elif defined(NR) || defined(NC) || defined(TR) || defined(TC)
  1314. fadd f0, f0, f3
  1315. fsub f1, f1, f2
  1316. #elif defined(RN) || defined(RT) || defined(CN) || defined(CT)
  1317. fadd f0, f0, f3
  1318. fsub f1, f2, f1
  1319. #else /* RR, RC, CR, CC */
  1320. fsub f0, f0, f3
  1321. fadd f1, f1, f2
  1322. #endif
  1323. LFD f8, 0 * SIZE(CO1)
  1324. LFD f9, 1 * SIZE(CO1)
  1325. lfs f12, ALPHA_R + 0(SP)
  1326. lfs f13, ALPHA_I + 4(SP)
  1327. #if defined(RR) || defined(RC) || defined(CR) || defined(CC)
  1328. fmadd f8, f12, f0, f8
  1329. fnmsub f9, f12, f1, f9
  1330. fmadd f8, f13, f1, f8
  1331. fmadd f9, f13, f0, f9
  1332. #else
  1333. fmadd f8, f12, f0, f8
  1334. fmadd f9, f12, f1, f9
  1335. fnmsub f8, f13, f1, f8
  1336. fmadd f9, f13, f0, f9
  1337. #endif
  1338. STFD f8, 0 * SIZE(CO1)
  1339. STFD f9, 1 * SIZE(CO1)
  1340. .align 4
  1341. LL(999):
  1342. mr SP, STACK
  1343. li r0, 0 * 16
  1344. lvx v20, SP, r0
  1345. li r0, 1 * 16
  1346. lvx v21, SP, r0
  1347. li r0, 2 * 16
  1348. lvx v22, SP, r0
  1349. li r0, 3 * 16
  1350. lvx v23, SP, r0
  1351. li r0, 4 * 16
  1352. lvx v24, SP, r0
  1353. li r0, 5 * 16
  1354. lvx v25, SP, r0
  1355. li r0, 6 * 16
  1356. lvx v26, SP, r0
  1357. li r0, 7 * 16
  1358. lvx v27, SP, r0
  1359. li r0, 8 * 16
  1360. lvx v28, SP, r0
  1361. li r0, 9 * 16
  1362. lvx v29, SP, r0
  1363. li r0, 10 * 16
  1364. lvx v30, SP, r0
  1365. li r0, 11 * 16
  1366. lvx v31, SP, r0
  1367. mtspr VRsave, VREG
  1368. #ifdef __64BIT__
  1369. ld r31, 192(SP)
  1370. ld r30, 200(SP)
  1371. ld r29, 208(SP)
  1372. ld r28, 216(SP)
  1373. ld r27, 224(SP)
  1374. ld r26, 232(SP)
  1375. ld r25, 240(SP)
  1376. ld r24, 248(SP)
  1377. ld r23, 256(SP)
  1378. ld r22, 264(SP)
  1379. ld r21, 272(SP)
  1380. ld r20, 280(SP)
  1381. ld r19, 288(SP)
  1382. ld r18, 296(SP)
  1383. ld r17, 304(SP)
  1384. ld r16, 312(SP)
  1385. ld r15, 320(SP)
  1386. ld r14, 328(SP)
  1387. #else
  1388. lwz r31, 192(SP)
  1389. lwz r30, 196(SP)
  1390. lwz r29, 200(SP)
  1391. lwz r28, 204(SP)
  1392. lwz r27, 208(SP)
  1393. lwz r26, 212(SP)
  1394. lwz r25, 216(SP)
  1395. lwz r24, 220(SP)
  1396. lwz r23, 224(SP)
  1397. lwz r22, 228(SP)
  1398. lwz r21, 232(SP)
  1399. lwz r20, 236(SP)
  1400. lwz r19, 240(SP)
  1401. lwz r18, 244(SP)
  1402. lwz r17, 248(SP)
  1403. lwz r16, 252(SP)
  1404. lwz r15, 256(SP)
  1405. lwz r14, 260(SP)
  1406. #endif
  1407. addi SP, SP, STACKSIZE
  1408. blr
  1409. EPILOGUE
  1410. #endif