You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemm_kernel_1x2_penryn.S 15 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define STACK 16
  41. #define ARGS 16
  42. #define M 4 + STACK + ARGS(%esp)
  43. #define N 8 + STACK + ARGS(%esp)
  44. #define K 12 + STACK + ARGS(%esp)
  45. #define ALPHA_R 16 + STACK + ARGS(%esp)
  46. #define ALPHA_I 24 + STACK + ARGS(%esp)
  47. #define A 32 + STACK + ARGS(%esp)
  48. #define ARG_B 36 + STACK + ARGS(%esp)
  49. #define C 40 + STACK + ARGS(%esp)
  50. #define ARG_LDC 44 + STACK + ARGS(%esp)
  51. #define OFFSET 48 + STACK + ARGS(%esp)
  52. #define J 0 + STACK(%esp)
  53. #define BX 4 + STACK(%esp)
  54. #define KK 8 + STACK(%esp)
  55. #define KKK 12 + STACK(%esp)
  56. #ifdef NANO
  57. #define PREFETCHSIZE (8 * 3 + 4)
  58. #define PREFETCHW prefetcht0
  59. #define PREFETCHB prefetcht0
  60. #endif
  61. #if defined(NEHALEM) || defined(SANDYBRIDGE)
  62. #define PREFETCHSIZE (8 * 1 - 4)
  63. #define PREFETCHW prefetcht0
  64. #define PREFETCHB prefetcht0
  65. #endif
  66. #ifndef PREFETCH
  67. #define PREFETCH prefetcht0
  68. #endif
  69. #ifndef PREFETCHW
  70. #define PREFETCHW prefetcht0
  71. #endif
  72. #ifndef PREFETCHB
  73. #define PREFETCHB prefetcht0
  74. #endif
  75. #ifndef PREFETCHSIZE
  76. #define PREFETCHSIZE (8 * 13 + 4)
  77. #endif
  78. #define AA %edx
  79. #define BB %ecx
  80. #define LDC %ebp
  81. #define B %edi
  82. #define C1 %esi
  83. #define I %ebx
  84. #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
  85. #define ADD1 addpd
  86. #define ADD2 addpd
  87. #elif defined(NR) || defined(NC) || defined(TR) || defined(TC)
  88. #define ADD1 addpd
  89. #define ADD2 addpd
  90. #elif defined(RN) || defined(RT) || defined(CN) || defined(CT)
  91. #define ADD1 addpd
  92. #define ADD2 addpd
  93. #else
  94. #define ADD1 addpd
  95. #define ADD2 subpd
  96. #endif
  97. PROLOGUE
  98. subl $ARGS, %esp # Generate Stack Frame
  99. pushl %ebp
  100. pushl %edi
  101. pushl %esi
  102. pushl %ebx
  103. PROFCODE
  104. movl ARG_B, B
  105. movl ARG_LDC, LDC
  106. #ifdef TRMMKERNEL
  107. movl OFFSET, %eax
  108. #ifndef LEFT
  109. negl %eax
  110. #endif
  111. movl %eax, KK
  112. #endif
  113. movl M, %ebx
  114. testl %ebx, %ebx
  115. jle .L999
  116. subl $-16 * SIZE, A
  117. subl $-16 * SIZE, B
  118. sall $ZBASE_SHIFT, LDC
  119. movl N, %eax
  120. sarl $1, %eax
  121. movl %eax, J
  122. jle .L20
  123. ALIGN_2
  124. .L01:
  125. #if defined(TRMMKERNEL) && defined(LEFT)
  126. movl OFFSET, %eax
  127. movl %eax, KK
  128. #endif
  129. movl B, BX
  130. movl C, C1 # coffset = c
  131. movl A, AA # aoffset = a
  132. movl M, %ebx
  133. ALIGN_4
  134. .L10:
  135. #if !defined(TRMMKERNEL) || \
  136. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  137. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  138. movl B, BB
  139. #else
  140. movl B, BB
  141. movl KK, %eax
  142. leal (, %eax, SIZE), %eax
  143. leal (AA, %eax, 2), AA
  144. leal (BB, %eax, 4), BB
  145. #endif
  146. movl BX, %eax
  147. PREFETCHB -16 * SIZE(%eax)
  148. subl $-8 * SIZE, %eax
  149. movl %eax, BX
  150. movaps -16 * SIZE(AA), %xmm0
  151. pxor %xmm2, %xmm2
  152. movaps -16 * SIZE(BB), %xmm1
  153. pxor %xmm3, %xmm3
  154. xorps %xmm4, %xmm4
  155. PREFETCHW 1 * SIZE(C1)
  156. xorps %xmm5, %xmm5
  157. PREFETCHW 3 * SIZE(C1, LDC)
  158. xorps %xmm6, %xmm6
  159. xorps %xmm7, %xmm7
  160. #ifndef TRMMKERNEL
  161. movl K, %eax
  162. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  163. movl K, %eax
  164. subl KK, %eax
  165. movl %eax, KKK
  166. #else
  167. movl KK, %eax
  168. #ifdef LEFT
  169. addl $1, %eax
  170. #else
  171. addl $2, %eax
  172. #endif
  173. movl %eax, KKK
  174. #endif
  175. sarl $3, %eax
  176. je .L15
  177. ALIGN_4
  178. .L12:
  179. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  180. ADD1 %xmm3, %xmm6
  181. movaps -14 * SIZE(BB), %xmm3
  182. ADD2 %xmm2, %xmm7
  183. pshufd $0x4e, %xmm1, %xmm2
  184. mulpd %xmm0, %xmm1
  185. mulpd %xmm0, %xmm2
  186. ADD1 %xmm1, %xmm4
  187. movaps -12 * SIZE(BB), %xmm1
  188. ADD2 %xmm2, %xmm5
  189. pshufd $0x4e, %xmm3, %xmm2
  190. mulpd %xmm0, %xmm3
  191. mulpd %xmm0, %xmm2
  192. movaps -14 * SIZE(AA), %xmm0
  193. ADD1 %xmm3, %xmm6
  194. movaps -10 * SIZE(BB), %xmm3
  195. ADD2 %xmm2, %xmm7
  196. pshufd $0x4e, %xmm1, %xmm2
  197. mulpd %xmm0, %xmm1
  198. mulpd %xmm0, %xmm2
  199. ADD1 %xmm1, %xmm4
  200. movaps -8 * SIZE(BB), %xmm1
  201. ADD2 %xmm2, %xmm5
  202. pshufd $0x4e, %xmm3, %xmm2
  203. mulpd %xmm0, %xmm3
  204. mulpd %xmm0, %xmm2
  205. movaps -12 * SIZE(AA), %xmm0
  206. ADD1 %xmm3, %xmm6
  207. movaps -6 * SIZE(BB), %xmm3
  208. ADD2 %xmm2, %xmm7
  209. pshufd $0x4e, %xmm1, %xmm2
  210. mulpd %xmm0, %xmm1
  211. mulpd %xmm0, %xmm2
  212. ADD1 %xmm1, %xmm4
  213. movaps -4 * SIZE(BB), %xmm1
  214. ADD2 %xmm2, %xmm5
  215. pshufd $0x4e, %xmm3, %xmm2
  216. mulpd %xmm0, %xmm3
  217. mulpd %xmm0, %xmm2
  218. movaps -10 * SIZE(AA), %xmm0
  219. ADD1 %xmm3, %xmm6
  220. movaps -2 * SIZE(BB), %xmm3
  221. ADD2 %xmm2, %xmm7
  222. pshufd $0x4e, %xmm1, %xmm2
  223. mulpd %xmm0, %xmm1
  224. mulpd %xmm0, %xmm2
  225. ADD1 %xmm1, %xmm4
  226. movaps 0 * SIZE(BB), %xmm1
  227. ADD2 %xmm2, %xmm5
  228. pshufd $0x4e, %xmm3, %xmm2
  229. mulpd %xmm0, %xmm3
  230. mulpd %xmm0, %xmm2
  231. movaps -8 * SIZE(AA), %xmm0
  232. ADD1 %xmm3, %xmm6
  233. PREFETCH (PREFETCHSIZE + 8) * SIZE(AA)
  234. movaps 2 * SIZE(BB), %xmm3
  235. ADD2 %xmm2, %xmm7
  236. pshufd $0x4e, %xmm1, %xmm2
  237. mulpd %xmm0, %xmm1
  238. mulpd %xmm0, %xmm2
  239. ADD1 %xmm1, %xmm4
  240. movaps 4 * SIZE(BB), %xmm1
  241. ADD2 %xmm2, %xmm5
  242. pshufd $0x4e, %xmm3, %xmm2
  243. mulpd %xmm0, %xmm3
  244. mulpd %xmm0, %xmm2
  245. movaps -6 * SIZE(AA), %xmm0
  246. ADD1 %xmm3, %xmm6
  247. movaps 6 * SIZE(BB), %xmm3
  248. ADD2 %xmm2, %xmm7
  249. pshufd $0x4e, %xmm1, %xmm2
  250. mulpd %xmm0, %xmm1
  251. mulpd %xmm0, %xmm2
  252. ADD1 %xmm1, %xmm4
  253. movaps 8 * SIZE(BB), %xmm1
  254. ADD2 %xmm2, %xmm5
  255. pshufd $0x4e, %xmm3, %xmm2
  256. mulpd %xmm0, %xmm3
  257. mulpd %xmm0, %xmm2
  258. movaps -4 * SIZE(AA), %xmm0
  259. ADD1 %xmm3, %xmm6
  260. movaps 10 * SIZE(BB), %xmm3
  261. ADD2 %xmm2, %xmm7
  262. pshufd $0x4e, %xmm1, %xmm2
  263. mulpd %xmm0, %xmm1
  264. mulpd %xmm0, %xmm2
  265. ADD1 %xmm1, %xmm4
  266. movaps 12 * SIZE(BB), %xmm1
  267. ADD2 %xmm2, %xmm5
  268. pshufd $0x4e, %xmm3, %xmm2
  269. mulpd %xmm0, %xmm3
  270. mulpd %xmm0, %xmm2
  271. movaps -2 * SIZE(AA), %xmm0
  272. ADD1 %xmm3, %xmm6
  273. movaps 14 * SIZE(BB), %xmm3
  274. ADD2 %xmm2, %xmm7
  275. pshufd $0x4e, %xmm1, %xmm2
  276. mulpd %xmm0, %xmm1
  277. mulpd %xmm0, %xmm2
  278. ADD1 %xmm1, %xmm4
  279. movaps 16 * SIZE(BB), %xmm1
  280. ADD2 %xmm2, %xmm5
  281. subl $-32 * SIZE, BB
  282. pshufd $0x4e, %xmm3, %xmm2
  283. mulpd %xmm0, %xmm3
  284. mulpd %xmm0, %xmm2
  285. movaps 0 * SIZE(AA), %xmm0
  286. subl $-16 * SIZE, AA
  287. subl $1, %eax
  288. jne .L12
  289. ALIGN_4
  290. .L15:
  291. #ifndef TRMMKERNEL
  292. movl K, %eax
  293. #else
  294. movl KKK, %eax
  295. #endif
  296. andl $7, %eax
  297. BRANCH
  298. je .L18
  299. ALIGN_4
  300. .L16:
  301. ADD1 %xmm3, %xmm6
  302. movaps -14 * SIZE(BB), %xmm3
  303. ADD2 %xmm2, %xmm7
  304. pshufd $0x4e, %xmm1, %xmm2
  305. mulpd %xmm0, %xmm1
  306. mulpd %xmm0, %xmm2
  307. ADD1 %xmm1, %xmm4
  308. movaps -12 * SIZE(BB), %xmm1
  309. ADD2 %xmm2, %xmm5
  310. pshufd $0x4e, %xmm3, %xmm2
  311. mulpd %xmm0, %xmm3
  312. mulpd %xmm0, %xmm2
  313. movaps -14 * SIZE(AA), %xmm0
  314. addl $2 * SIZE, AA
  315. addl $4 * SIZE, BB
  316. decl %eax
  317. jg .L16
  318. ALIGN_4
  319. .L18:
  320. ADD1 %xmm3, %xmm6
  321. pcmpeqb %xmm0, %xmm0
  322. ADD2 %xmm2, %xmm7
  323. psllq $63, %xmm0
  324. movddup ALPHA_R, %xmm2
  325. movddup ALPHA_I, %xmm3
  326. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  327. defined(RR) || defined(RC) || defined(CR) || defined(CC)
  328. shufps $0x40, %xmm0, %xmm0
  329. pxor %xmm0, %xmm4
  330. pxor %xmm0, %xmm6
  331. #elif defined(NR) || defined(NC) || defined(TR) || defined(TC)
  332. shufps $0x04, %xmm0, %xmm0
  333. pxor %xmm0, %xmm5
  334. pxor %xmm0, %xmm7
  335. #elif defined(RN) || defined(RT) || defined(CN) || defined(CT)
  336. shufps $0x40, %xmm0, %xmm0
  337. pxor %xmm0, %xmm5
  338. pxor %xmm0, %xmm7
  339. #endif
  340. #ifndef TRMMKERNEL
  341. movsd 0 * SIZE(C1), %xmm0
  342. movhpd 1 * SIZE(C1), %xmm0
  343. movsd 0 * SIZE(C1, LDC), %xmm1
  344. movhpd 1 * SIZE(C1, LDC), %xmm1
  345. #endif
  346. haddpd %xmm5, %xmm4
  347. haddpd %xmm7, %xmm6
  348. pshufd $0x4e, %xmm4, %xmm5
  349. pshufd $0x4e, %xmm6, %xmm7
  350. mulpd %xmm2, %xmm4
  351. mulpd %xmm2, %xmm6
  352. mulpd %xmm3, %xmm5
  353. mulpd %xmm3, %xmm7
  354. addsubpd %xmm5, %xmm4
  355. addsubpd %xmm7, %xmm6
  356. #if! defined(TRMMKERNEL) && !defined(BETAZERO)
  357. addpd %xmm0, %xmm4
  358. addpd %xmm1, %xmm6
  359. #endif
  360. movsd %xmm4, 0 * SIZE(C1)
  361. movhpd %xmm4, 1 * SIZE(C1)
  362. movsd %xmm6, 0 * SIZE(C1, LDC)
  363. movhpd %xmm6, 1 * SIZE(C1, LDC)
  364. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  365. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  366. movl K, %eax
  367. subl KKK, %eax
  368. leal (,%eax, SIZE), %eax
  369. leal (AA, %eax, 2), AA
  370. leal (BB, %eax, 4), BB
  371. #endif
  372. #if defined(TRMMKERNEL) && defined(LEFT)
  373. addl $1, KK
  374. #endif
  375. addl $2 * SIZE, C1 # coffset += 4
  376. decl %ebx # i --
  377. jg .L10
  378. #if defined(TRMMKERNEL) && !defined(LEFT)
  379. addl $2, KK
  380. #endif
  381. movl BB, B
  382. leal (, LDC, 2), %eax
  383. addl %eax, C # c += ldc
  384. decl J # j --
  385. jg .L01
  386. ALIGN_4
  387. .L20:
  388. movl N, %eax
  389. testl $1, %eax
  390. jle .L999
  391. #if defined(TRMMKERNEL) && defined(LEFT)
  392. movl OFFSET, %eax
  393. movl %eax, KK
  394. #endif
  395. movl C, C1 # coffset = c
  396. movl A, AA # aoffset = a
  397. movl M, %ebx
  398. ALIGN_4
  399. .L21:
  400. #if !defined(TRMMKERNEL) || \
  401. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  402. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  403. movl B, BB
  404. #else
  405. movl B, BB
  406. movl KK, %eax
  407. leal (, %eax, SIZE), %eax
  408. leal (AA, %eax, 2), AA
  409. leal (BB, %eax, 2), BB
  410. #endif
  411. movaps -16 * SIZE(AA), %xmm0
  412. pxor %xmm2, %xmm2
  413. movaps -16 * SIZE(BB), %xmm1
  414. pxor %xmm3, %xmm3
  415. pxor %xmm4, %xmm4
  416. prefetcht0 1 * SIZE(C1)
  417. pxor %xmm5, %xmm5
  418. pxor %xmm6, %xmm6
  419. pxor %xmm7, %xmm7
  420. #ifndef TRMMKERNEL
  421. movl K, %eax
  422. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  423. movl K, %eax
  424. subl KK, %eax
  425. movl %eax, KKK
  426. #else
  427. movl KK, %eax
  428. #ifdef LEFT
  429. addl $1, %eax
  430. #else
  431. addl $1, %eax
  432. #endif
  433. movl %eax, KKK
  434. #endif
  435. sarl $3, %eax
  436. je .L25
  437. ALIGN_4
  438. .L22:
  439. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  440. pshufd $0x4e, %xmm1, %xmm2
  441. mulpd %xmm0, %xmm1
  442. mulpd %xmm0, %xmm2
  443. movaps -14 * SIZE(AA), %xmm0
  444. ADD1 %xmm1, %xmm4
  445. movaps -14 * SIZE(BB), %xmm1
  446. ADD2 %xmm2, %xmm5
  447. pshufd $0x4e, %xmm1, %xmm2
  448. mulpd %xmm0, %xmm1
  449. mulpd %xmm0, %xmm2
  450. movaps -12 * SIZE(AA), %xmm0
  451. ADD1 %xmm1, %xmm6
  452. movaps -12 * SIZE(BB), %xmm1
  453. ADD2 %xmm2, %xmm7
  454. pshufd $0x4e, %xmm1, %xmm2
  455. mulpd %xmm0, %xmm1
  456. mulpd %xmm0, %xmm2
  457. movaps -10 * SIZE(AA), %xmm0
  458. ADD1 %xmm1, %xmm4
  459. movaps -10 * SIZE(BB), %xmm1
  460. ADD2 %xmm2, %xmm5
  461. pshufd $0x4e, %xmm1, %xmm2
  462. mulpd %xmm0, %xmm1
  463. mulpd %xmm0, %xmm2
  464. movaps -8 * SIZE(AA), %xmm0
  465. ADD1 %xmm1, %xmm6
  466. movaps -8 * SIZE(BB), %xmm1
  467. ADD2 %xmm2, %xmm7
  468. PREFETCH (PREFETCHSIZE + 8) * SIZE(AA)
  469. pshufd $0x4e, %xmm1, %xmm2
  470. mulpd %xmm0, %xmm1
  471. mulpd %xmm0, %xmm2
  472. movaps -6 * SIZE(AA), %xmm0
  473. ADD1 %xmm1, %xmm4
  474. movaps -6 * SIZE(BB), %xmm1
  475. ADD2 %xmm2, %xmm5
  476. pshufd $0x4e, %xmm1, %xmm2
  477. mulpd %xmm0, %xmm1
  478. mulpd %xmm0, %xmm2
  479. movaps -4 * SIZE(AA), %xmm0
  480. ADD1 %xmm1, %xmm6
  481. movaps -4 * SIZE(BB), %xmm1
  482. ADD2 %xmm2, %xmm7
  483. pshufd $0x4e, %xmm1, %xmm2
  484. mulpd %xmm0, %xmm1
  485. mulpd %xmm0, %xmm2
  486. movaps -2 * SIZE(AA), %xmm0
  487. ADD1 %xmm1, %xmm4
  488. movaps -2 * SIZE(BB), %xmm1
  489. ADD2 %xmm2, %xmm5
  490. pshufd $0x4e, %xmm1, %xmm2
  491. mulpd %xmm0, %xmm1
  492. mulpd %xmm0, %xmm2
  493. movaps 0 * SIZE(AA), %xmm0
  494. ADD1 %xmm1, %xmm6
  495. movaps 0 * SIZE(BB), %xmm1
  496. ADD2 %xmm2, %xmm7
  497. subl $-16 * SIZE, AA
  498. subl $-16 * SIZE, BB
  499. subl $1, %eax
  500. jne .L22
  501. ALIGN_4
  502. .L25:
  503. #ifndef TRMMKERNEL
  504. movl K, %eax
  505. #else
  506. movl KKK, %eax
  507. #endif
  508. andl $7, %eax
  509. BRANCH
  510. je .L28
  511. ALIGN_4
  512. .L26:
  513. pshufd $0x4e, %xmm1, %xmm2
  514. mulpd %xmm0, %xmm1
  515. mulpd %xmm0, %xmm2
  516. movaps -14 * SIZE(AA), %xmm0
  517. ADD1 %xmm1, %xmm4
  518. movaps -14 * SIZE(BB), %xmm1
  519. ADD2 %xmm2, %xmm5
  520. addl $2 * SIZE, AA
  521. addl $2 * SIZE, BB
  522. decl %eax
  523. jg .L26
  524. ALIGN_4
  525. .L28:
  526. addpd %xmm6, %xmm4
  527. pcmpeqb %xmm0, %xmm0
  528. addpd %xmm7, %xmm5
  529. psllq $63, %xmm0
  530. movddup ALPHA_R, %xmm2
  531. movddup ALPHA_I, %xmm3
  532. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  533. defined(RR) || defined(RC) || defined(CR) || defined(CC)
  534. shufps $0x40, %xmm0, %xmm0
  535. pxor %xmm0, %xmm4
  536. #elif defined(NR) || defined(NC) || defined(TR) || defined(TC)
  537. shufps $0x04, %xmm0, %xmm0
  538. pxor %xmm0, %xmm5
  539. #elif defined(RN) || defined(RT) || defined(CN) || defined(CT)
  540. shufps $0x40, %xmm0, %xmm0
  541. pxor %xmm0, %xmm5
  542. #endif
  543. #ifndef TRMMKERNEL
  544. movsd 0 * SIZE(C1), %xmm0
  545. movhpd 1 * SIZE(C1), %xmm0
  546. #endif
  547. haddpd %xmm5, %xmm4
  548. pshufd $0x4e, %xmm4, %xmm5
  549. mulpd %xmm2, %xmm4
  550. mulpd %xmm3, %xmm5
  551. addsubpd %xmm5, %xmm4
  552. #if! defined(TRMMKERNEL) && !defined(BETAZERO)
  553. addpd %xmm0, %xmm4
  554. #endif
  555. movsd %xmm4, 0 * SIZE(C1)
  556. movhpd %xmm4, 1 * SIZE(C1)
  557. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  558. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  559. movl K, %eax
  560. subl KKK, %eax
  561. leal (,%eax, SIZE), %eax
  562. leal (AA, %eax, 2), AA
  563. leal (BB, %eax, 2), BB
  564. #endif
  565. #if defined(TRMMKERNEL) && defined(LEFT)
  566. addl $1, KK
  567. #endif
  568. addl $2 * SIZE, C1
  569. decl %ebx # i --
  570. jg .L21
  571. ALIGN_4
  572. .L999:
  573. popl %ebx
  574. popl %esi
  575. popl %edi
  576. popl %ebp
  577. addl $ARGS, %esp
  578. ret
  579. EPILOGUE