You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemm_kernel_2x1_sse2.S 19 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define PREFETCHSIZE (8 * 4)
  41. #if !defined(HAVE_SSE2) || !defined(HAVE_MMX)
  42. #error You have to check your configuration.
  43. #endif
  44. #define STACK 16
  45. #define ARGS 0
  46. #define STACK_M 4 + STACK + ARGS(%esi)
  47. #define STACK_N 8 + STACK + ARGS(%esi)
  48. #define STACK_K 12 + STACK + ARGS(%esi)
  49. #define STACK_ALPHA_R 16 + STACK + ARGS(%esi)
  50. #define STACK_ALPHA_I 24 + STACK + ARGS(%esi)
  51. #define STACK_A 32 + STACK + ARGS(%esi)
  52. #define STACK_B 36 + STACK + ARGS(%esi)
  53. #define STACK_C 40 + STACK + ARGS(%esi)
  54. #define STACK_LDC 44 + STACK + ARGS(%esi)
  55. #define STACK_OFFT 48 + STACK + ARGS(%esi)
  56. #define POSINV 0(%esp)
  57. #define ALPHA_R 16(%esp)
  58. #define ALPHA_I 32(%esp)
  59. #define K 48(%esp)
  60. #define N 52(%esp)
  61. #define M 56(%esp)
  62. #define A 60(%esp)
  63. #define C 64(%esp)
  64. #define J 68(%esp)
  65. #define BX 72(%esp)
  66. #define OLD_STACK 76(%esp)
  67. #define OFFSET 80(%esp)
  68. #define KK 84(%esp)
  69. #define KKK 88(%esp)
  70. #define BUFFER 128(%esp)
  71. #define STACK_ALIGN 4096
  72. #define STACK_OFFSET 1024
  73. #define B %edi
  74. #define LDC %ebp
  75. #define AA %edx
  76. #define BB %ecx
  77. #define KERNEL1(address) \
  78. movq (PREFETCHSIZE + 0) * SIZE + (address) * SIZE(AA), %mm2; \
  79. mulpd %xmm0, %xmm2; \
  80. mulpd 2 * SIZE + (address) * SIZE(BB), %xmm0; \
  81. addpd %xmm2, %xmm4; \
  82. movapd 0 * SIZE + (address) * SIZE(BB), %xmm2; \
  83. addpd %xmm0, %xmm5; \
  84. movapd 2 * SIZE + (address) * SIZE(AA), %xmm0; \
  85. mulpd %xmm0, %xmm2; \
  86. mulpd 2 * SIZE + (address) * SIZE(BB), %xmm0; \
  87. addpd %xmm2, %xmm6; \
  88. movapd 4 * SIZE + (address) * SIZE(BB), %xmm2; \
  89. addpd %xmm0, %xmm7; \
  90. movapd 4 * SIZE + (address) * SIZE(AA), %xmm0
  91. #define KERNEL2(address) \
  92. mulpd %xmm0, %xmm2; \
  93. mulpd 6 * SIZE + (address) * SIZE(BB), %xmm0; \
  94. addpd %xmm2, %xmm4; \
  95. movapd 4 * SIZE + (address) * SIZE(BB), %xmm2; \
  96. addpd %xmm0, %xmm5; \
  97. movapd 6 * SIZE + (address) * SIZE(AA), %xmm0; \
  98. mulpd %xmm0, %xmm2; \
  99. mulpd 6 * SIZE + (address) * SIZE(BB), %xmm0; \
  100. addpd %xmm2, %xmm6; \
  101. movapd 16 * SIZE + (address) * SIZE(BB), %xmm2; \
  102. addpd %xmm0, %xmm7; \
  103. movapd 16 * SIZE + (address) * SIZE(AA), %xmm0
  104. #define KERNEL3(address) \
  105. movq (PREFETCHSIZE + 8) * SIZE + (address) * SIZE(AA), %mm2; \
  106. mulpd %xmm1, %xmm3; \
  107. mulpd 10 * SIZE + (address) * SIZE(BB), %xmm1; \
  108. addpd %xmm3, %xmm4; \
  109. movapd 8 * SIZE + (address) * SIZE(BB), %xmm3; \
  110. addpd %xmm1, %xmm5; \
  111. movapd 10 * SIZE + (address) * SIZE(AA), %xmm1; \
  112. mulpd %xmm1, %xmm3; \
  113. mulpd 10 * SIZE + (address) * SIZE(BB), %xmm1; \
  114. addpd %xmm3, %xmm6; \
  115. movapd 12 * SIZE + (address) * SIZE(BB), %xmm3; \
  116. addpd %xmm1, %xmm7; \
  117. movapd 12 * SIZE + (address) * SIZE(AA), %xmm1
  118. #define KERNEL4(address) \
  119. mulpd %xmm1, %xmm3; \
  120. mulpd 14 * SIZE + (address) * SIZE(BB), %xmm1; \
  121. addpd %xmm3, %xmm4; \
  122. movapd 12 * SIZE + (address) * SIZE(BB), %xmm3; \
  123. addpd %xmm1, %xmm5; \
  124. movapd 14 * SIZE + (address) * SIZE(AA), %xmm1; \
  125. mulpd %xmm1, %xmm3; \
  126. mulpd 14 * SIZE + (address) * SIZE(BB), %xmm1; \
  127. addpd %xmm3, %xmm6; \
  128. movapd 24 * SIZE + (address) * SIZE(BB), %xmm3; \
  129. addpd %xmm1, %xmm7; \
  130. movapd 24 * SIZE + (address) * SIZE(AA), %xmm1
  131. #define KERNEL5(address) \
  132. movq (PREFETCHSIZE + 16) * SIZE + (address) * SIZE(AA), %mm2; \
  133. mulpd %xmm0, %xmm2; \
  134. mulpd 18 * SIZE + (address) * SIZE(BB), %xmm0; \
  135. addpd %xmm2, %xmm4; \
  136. movapd 16 * SIZE + (address) * SIZE(BB), %xmm2; \
  137. addpd %xmm0, %xmm5; \
  138. movapd 18 * SIZE + (address) * SIZE(AA), %xmm0; \
  139. mulpd %xmm0, %xmm2; \
  140. mulpd 18 * SIZE + (address) * SIZE(BB), %xmm0; \
  141. addpd %xmm2, %xmm6; \
  142. movapd 20 * SIZE + (address) * SIZE(BB), %xmm2; \
  143. addpd %xmm0, %xmm7; \
  144. movapd 20 * SIZE + (address) * SIZE(AA), %xmm0
  145. #define KERNEL6(address) \
  146. mulpd %xmm0, %xmm2; \
  147. mulpd 22 * SIZE + (address) * SIZE(BB), %xmm0; \
  148. addpd %xmm2, %xmm4; \
  149. movapd 20 * SIZE + (address) * SIZE(BB), %xmm2; \
  150. addpd %xmm0, %xmm5; \
  151. movapd 22 * SIZE + (address) * SIZE(AA), %xmm0; \
  152. mulpd %xmm0, %xmm2; \
  153. mulpd 22 * SIZE + (address) * SIZE(BB), %xmm0; \
  154. addpd %xmm2, %xmm6; \
  155. movapd 32 * SIZE + (address) * SIZE(BB), %xmm2; \
  156. addpd %xmm0, %xmm7; \
  157. movapd 32 * SIZE + (address) * SIZE(AA), %xmm0
  158. #define KERNEL7(address) \
  159. movq (PREFETCHSIZE + 24) * SIZE + (address) * SIZE(AA), %mm2; \
  160. mulpd %xmm1, %xmm3; \
  161. mulpd 26 * SIZE + (address) * SIZE(BB), %xmm1; \
  162. addpd %xmm3, %xmm4; \
  163. movapd 24 * SIZE + (address) * SIZE(BB), %xmm3; \
  164. addpd %xmm1, %xmm5; \
  165. movapd 26 * SIZE + (address) * SIZE(AA), %xmm1; \
  166. mulpd %xmm1, %xmm3; \
  167. mulpd 26 * SIZE + (address) * SIZE(BB), %xmm1; \
  168. addpd %xmm3, %xmm6; \
  169. movapd 28 * SIZE + (address) * SIZE(BB), %xmm3; \
  170. addpd %xmm1, %xmm7; \
  171. movapd 28 * SIZE + (address) * SIZE(AA), %xmm1
  172. #define KERNEL8(address) \
  173. mulpd %xmm1, %xmm3; \
  174. mulpd 30 * SIZE + (address) * SIZE(BB), %xmm1; \
  175. addpd %xmm3, %xmm4; \
  176. movapd 28 * SIZE + (address) * SIZE(BB), %xmm3; \
  177. addpd %xmm1, %xmm5; \
  178. movapd 30 * SIZE + (address) * SIZE(AA), %xmm1; \
  179. mulpd %xmm1, %xmm3; \
  180. mulpd 30 * SIZE + (address) * SIZE(BB), %xmm1; \
  181. addpd %xmm3, %xmm6; \
  182. movapd 40 * SIZE + (address) * SIZE(BB), %xmm3; \
  183. addpd %xmm1, %xmm7; \
  184. movapd 40 * SIZE + (address) * SIZE(AA), %xmm1
  185. PROLOGUE
  186. pushl %ebp
  187. pushl %edi
  188. pushl %esi
  189. pushl %ebx
  190. PROFCODE
  191. EMMS
  192. movl %esp, %esi # save old stack
  193. subl $128 + LOCAL_BUFFER_SIZE + STACK_OFFSET, %esp
  194. andl $-STACK_ALIGN, %esp # align stack
  195. addl $STACK_OFFSET, %esp
  196. STACK_TOUCHING
  197. movd STACK_M, %mm0
  198. movl STACK_N, %eax
  199. movd STACK_K, %mm1
  200. movd STACK_A, %mm2
  201. movl STACK_B, B
  202. movd STACK_C, %mm3
  203. movl STACK_LDC, LDC
  204. #ifdef TRMMKERNEL
  205. movd STACK_OFFT, %mm4
  206. #endif
  207. movsd STACK_ALPHA_R, %xmm0
  208. movsd STACK_ALPHA_I, %xmm1
  209. pxor %xmm7, %xmm7
  210. cmpeqpd %xmm7, %xmm7
  211. psllq $63, %xmm7 # Generate mask
  212. pxor %xmm2, %xmm2
  213. movsd %xmm0, 0 + ALPHA_R
  214. movsd %xmm0, 8 + ALPHA_R
  215. movsd %xmm1, 8 + ALPHA_I
  216. xorpd %xmm7, %xmm1
  217. movsd %xmm1, 0 + ALPHA_I
  218. #if defined(NN) || defined(NT) || defined(NR) || defined(NC) || \
  219. defined(TN) || defined(TT) || defined(TR) || defined(TC)
  220. movsd %xmm7, 0 + POSINV
  221. movsd %xmm2, 8 + POSINV
  222. #else
  223. movsd %xmm2, 0 + POSINV
  224. movsd %xmm7, 8 + POSINV
  225. #endif
  226. movd %mm1, K
  227. movl %eax, N
  228. movd %mm0, M
  229. movd %mm2, A
  230. movd %mm3, C
  231. movl %esi, OLD_STACK
  232. #ifdef TRMMKERNEL
  233. movd %mm4, OFFSET
  234. movd %mm4, KK
  235. #ifndef LEFT
  236. negl KK
  237. #endif
  238. #endif
  239. sall $ZBASE_SHIFT, LDC
  240. movl %eax, J # j = n
  241. testl %eax, %eax
  242. jle .L999
  243. ALIGN_2
  244. .L01:
  245. #if defined(TRMMKERNEL) && defined(LEFT)
  246. movl OFFSET, %eax
  247. movl %eax, KK
  248. #endif
  249. leal BUFFER, BB
  250. movapd POSINV, %xmm7
  251. movl K, %eax
  252. sarl $2, %eax
  253. jle .L03
  254. ALIGN_2
  255. .L02:
  256. movsd 0 * SIZE(B), %xmm0
  257. movsd 1 * SIZE(B), %xmm1
  258. movsd 2 * SIZE(B), %xmm2
  259. movsd 3 * SIZE(B), %xmm3
  260. unpcklpd %xmm0, %xmm0
  261. unpcklpd %xmm1, %xmm1
  262. unpcklpd %xmm2, %xmm2
  263. unpcklpd %xmm3, %xmm3
  264. #if defined(NN) || defined(NT) || defined(NR) || defined(NC) || \
  265. defined(TN) || defined(TT) || defined(TR) || defined(TC)
  266. xorpd %xmm7, %xmm1
  267. xorpd %xmm7, %xmm3
  268. #else
  269. xorpd %xmm7, %xmm0
  270. xorpd %xmm7, %xmm2
  271. #endif
  272. movapd %xmm0, 0 * SIZE(BB)
  273. movapd %xmm1, 2 * SIZE(BB)
  274. movapd %xmm2, 4 * SIZE(BB)
  275. movapd %xmm3, 6 * SIZE(BB)
  276. movsd 4 * SIZE(B), %xmm0
  277. movsd 5 * SIZE(B), %xmm1
  278. movsd 6 * SIZE(B), %xmm2
  279. movsd 7 * SIZE(B), %xmm3
  280. unpcklpd %xmm0, %xmm0
  281. unpcklpd %xmm1, %xmm1
  282. unpcklpd %xmm2, %xmm2
  283. unpcklpd %xmm3, %xmm3
  284. #if defined(NN) || defined(NT) || defined(NR) || defined(NC) || \
  285. defined(TN) || defined(TT) || defined(TR) || defined(TC)
  286. xorpd %xmm7, %xmm1
  287. xorpd %xmm7, %xmm3
  288. #else
  289. xorpd %xmm7, %xmm0
  290. xorpd %xmm7, %xmm2
  291. #endif
  292. movapd %xmm0, 8 * SIZE(BB)
  293. movapd %xmm1, 10 * SIZE(BB)
  294. movapd %xmm2, 12 * SIZE(BB)
  295. movapd %xmm3, 14 * SIZE(BB)
  296. prefetcht0 104 * SIZE(B)
  297. addl $ 8 * SIZE, B
  298. addl $16 * SIZE, BB
  299. decl %eax
  300. jne .L02
  301. ALIGN_2
  302. .L03:
  303. movl K, %eax
  304. andl $3, %eax
  305. BRANCH
  306. jle .L05
  307. ALIGN_2
  308. .L04:
  309. movsd 0 * SIZE(B), %xmm0
  310. movsd 1 * SIZE(B), %xmm1
  311. unpcklpd %xmm0, %xmm0
  312. unpcklpd %xmm1, %xmm1
  313. #if defined(NN) || defined(NT) || defined(NR) || defined(NC) || \
  314. defined(TN) || defined(TT) || defined(TR) || defined(TC)
  315. xorpd %xmm7, %xmm1
  316. #else
  317. xorpd %xmm7, %xmm0
  318. #endif
  319. movapd %xmm0, 0 * SIZE(BB)
  320. movapd %xmm1, 2 * SIZE(BB)
  321. addl $ 2 * SIZE, B
  322. addl $ 4 * SIZE, BB
  323. decl %eax
  324. jne .L04
  325. ALIGN_4
  326. .L05:
  327. movl B, BX
  328. movl C, %esi # coffset = c
  329. movl A, AA # aoffset = a
  330. movl M, %ebx
  331. sarl $1, %ebx # i = (m >> 2)
  332. jle .L50
  333. ALIGN_4
  334. .L10:
  335. movl BX, %eax
  336. prefetcht2 0 * SIZE(%eax)
  337. subl $-8 * SIZE, BX
  338. #if !defined(TRMMKERNEL) || \
  339. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  340. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  341. leal BUFFER, BB
  342. movapd 0 * SIZE + BUFFER, %xmm2
  343. pxor %xmm4, %xmm4
  344. movapd 0 * SIZE(AA), %xmm0
  345. pxor %xmm5, %xmm5
  346. movapd 8 * SIZE + BUFFER, %xmm3
  347. pxor %xmm6, %xmm6
  348. movapd 8 * SIZE(AA), %xmm1
  349. pxor %xmm7, %xmm7
  350. #else
  351. leal BUFFER, BB
  352. movl KK, %eax
  353. leal (, %eax, SIZE), %eax
  354. leal (AA, %eax, 4), AA
  355. leal (BB, %eax, 4), BB /* because it's doubled */
  356. movapd 0 * SIZE(BB), %xmm2
  357. pxor %xmm4, %xmm4
  358. movapd 0 * SIZE(AA), %xmm0
  359. pxor %xmm5, %xmm5
  360. movapd 8 * SIZE(BB), %xmm3
  361. pxor %xmm6, %xmm6
  362. movapd 8 * SIZE(AA), %xmm1
  363. pxor %xmm7, %xmm7
  364. #endif
  365. prefetchnta 3 * SIZE(%esi)
  366. #ifndef TRMMKERNEL
  367. movl K, %eax
  368. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  369. movl K, %eax
  370. subl KK, %eax
  371. movl %eax, KKK
  372. #else
  373. movl KK, %eax
  374. #ifdef LEFT
  375. addl $2, %eax
  376. #else
  377. addl $1, %eax
  378. #endif
  379. movl %eax, KKK
  380. #endif
  381. andl $-8, %eax
  382. NOBRANCH
  383. je .L12
  384. sall $3, %eax
  385. .L1X:
  386. KERNEL1(32 * 0)
  387. KERNEL2(32 * 0)
  388. KERNEL3(32 * 0)
  389. KERNEL4(32 * 0)
  390. KERNEL5(32 * 0)
  391. KERNEL6(32 * 0)
  392. KERNEL7(32 * 0)
  393. KERNEL8(32 * 0)
  394. cmpl $64 * 1, %eax
  395. NOBRANCH
  396. jle .L11
  397. KERNEL1(32 * 1)
  398. KERNEL2(32 * 1)
  399. KERNEL3(32 * 1)
  400. KERNEL4(32 * 1)
  401. KERNEL5(32 * 1)
  402. KERNEL6(32 * 1)
  403. KERNEL7(32 * 1)
  404. KERNEL8(32 * 1)
  405. cmpl $64 * 2, %eax
  406. NOBRANCH
  407. jle .L11
  408. KERNEL1(32 * 2)
  409. KERNEL2(32 * 2)
  410. KERNEL3(32 * 2)
  411. KERNEL4(32 * 2)
  412. KERNEL5(32 * 2)
  413. KERNEL6(32 * 2)
  414. KERNEL7(32 * 2)
  415. KERNEL8(32 * 2)
  416. cmpl $64 * 3, %eax
  417. NOBRANCH
  418. jle .L11
  419. KERNEL1(32 * 3)
  420. KERNEL2(32 * 3)
  421. KERNEL3(32 * 3)
  422. KERNEL4(32 * 3)
  423. KERNEL5(32 * 3)
  424. KERNEL6(32 * 3)
  425. KERNEL7(32 * 3)
  426. KERNEL8(32 * 3)
  427. cmpl $64 * 4, %eax
  428. NOBRANCH
  429. jle .L11
  430. KERNEL1(32 * 4)
  431. KERNEL2(32 * 4)
  432. KERNEL3(32 * 4)
  433. KERNEL4(32 * 4)
  434. KERNEL5(32 * 4)
  435. KERNEL6(32 * 4)
  436. KERNEL7(32 * 4)
  437. KERNEL8(32 * 4)
  438. cmpl $64 * 5, %eax
  439. NOBRANCH
  440. jle .L11
  441. KERNEL1(32 * 5)
  442. KERNEL2(32 * 5)
  443. KERNEL3(32 * 5)
  444. KERNEL4(32 * 5)
  445. KERNEL5(32 * 5)
  446. KERNEL6(32 * 5)
  447. KERNEL7(32 * 5)
  448. KERNEL8(32 * 5)
  449. cmpl $64 * 6, %eax
  450. NOBRANCH
  451. jle .L11
  452. KERNEL1(32 * 6)
  453. KERNEL2(32 * 6)
  454. KERNEL3(32 * 6)
  455. KERNEL4(32 * 6)
  456. KERNEL5(32 * 6)
  457. KERNEL6(32 * 6)
  458. KERNEL7(32 * 6)
  459. KERNEL8(32 * 6)
  460. cmpl $64 * 7, %eax
  461. NOBRANCH
  462. jle .L11
  463. KERNEL1(32 * 7)
  464. KERNEL2(32 * 7)
  465. KERNEL3(32 * 7)
  466. KERNEL4(32 * 7)
  467. KERNEL5(32 * 7)
  468. KERNEL6(32 * 7)
  469. KERNEL7(32 * 7)
  470. KERNEL8(32 * 7)
  471. addl $64 * 4 * SIZE, AA
  472. addl $64 * 4 * SIZE, BB
  473. subl $64 * 8, %eax
  474. BRANCH
  475. jg .L1X
  476. .L11:
  477. leal (BB, %eax, 4), BB
  478. leal (AA, %eax, 4), AA
  479. .L12:
  480. #ifndef TRMMKERNEL
  481. movl K, %eax
  482. #else
  483. movl KKK, %eax
  484. #endif
  485. andl $7, %eax # if (k & 1)
  486. BRANCH
  487. je .L14
  488. .L13:
  489. movapd 2 * SIZE(BB), %xmm1
  490. mulpd %xmm0, %xmm2
  491. addpd %xmm2, %xmm4
  492. movapd 0 * SIZE(BB), %xmm2
  493. mulpd %xmm0, %xmm1
  494. movapd 2 * SIZE(AA), %xmm0
  495. addpd %xmm1, %xmm5
  496. movapd 2 * SIZE(BB), %xmm1
  497. mulpd %xmm0, %xmm2
  498. addpd %xmm2, %xmm6
  499. movapd 4 * SIZE(BB), %xmm2
  500. mulpd %xmm0, %xmm1
  501. movapd 4 * SIZE(AA), %xmm0
  502. addpd %xmm1, %xmm7
  503. addl $4 * SIZE, AA # aoffset += 8
  504. addl $4 * SIZE, BB # boffset1 += 8
  505. subl $1, %eax
  506. jg .L13
  507. .L14:
  508. movapd ALPHA_R, %xmm2
  509. movapd ALPHA_I, %xmm3
  510. SHUFPD_1 %xmm5, %xmm5
  511. SHUFPD_1 %xmm7, %xmm7
  512. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  513. defined(RR) || defined(RC) || defined(CR) || defined(CC)
  514. subpd %xmm5, %xmm4
  515. subpd %xmm7, %xmm6
  516. #else
  517. addpd %xmm5, %xmm4
  518. addpd %xmm7, %xmm6
  519. #endif
  520. movapd %xmm4, %xmm5
  521. movapd %xmm6, %xmm7
  522. SHUFPD_1 %xmm4, %xmm4
  523. SHUFPD_1 %xmm6, %xmm6
  524. mulpd %xmm2, %xmm5
  525. mulpd %xmm3, %xmm4
  526. mulpd %xmm2, %xmm7
  527. mulpd %xmm3, %xmm6
  528. addpd %xmm5, %xmm4
  529. addpd %xmm7, %xmm6
  530. #ifndef TRMMKERNEL
  531. movsd 0 * SIZE(%esi), %xmm0
  532. movhpd 1 * SIZE(%esi), %xmm0
  533. movsd 2 * SIZE(%esi), %xmm1
  534. movhpd 3 * SIZE(%esi), %xmm1
  535. addpd %xmm0, %xmm4
  536. addpd %xmm1, %xmm6
  537. #endif
  538. movsd %xmm4, 0 * SIZE(%esi)
  539. movhpd %xmm4, 1 * SIZE(%esi)
  540. movsd %xmm6, 2 * SIZE(%esi)
  541. movhpd %xmm6, 3 * SIZE(%esi)
  542. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  543. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  544. movl K, %eax
  545. subl KKK, %eax
  546. leal (,%eax, SIZE), %eax
  547. leal (AA, %eax, 4), AA
  548. leal (BB, %eax, 4), BB
  549. #endif
  550. #if defined(TRMMKERNEL) && defined(LEFT)
  551. addl $2, KK
  552. #endif
  553. addl $4 * SIZE, %esi # coffset += 4
  554. decl %ebx # i --
  555. jg .L10
  556. .L50:
  557. movl M, %ebx
  558. testl $1, %ebx
  559. je .L99
  560. #if !defined(TRMMKERNEL) || \
  561. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  562. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  563. leal BUFFER, %ecx
  564. movapd 0 * SIZE + BUFFER, %xmm1
  565. pxor %xmm4, %xmm4
  566. movapd 0 * SIZE(AA), %xmm0
  567. pxor %xmm5, %xmm5
  568. movapd 8 * SIZE + BUFFER, %xmm2
  569. pxor %xmm6, %xmm6
  570. pxor %xmm7, %xmm7
  571. #else
  572. leal BUFFER, BB
  573. movl KK, %eax
  574. leal (, %eax, SIZE), %eax
  575. leal (AA, %eax, 2), AA
  576. leal (BB, %eax, 4), BB /* because it's doubled */
  577. movapd 0 * SIZE(BB), %xmm1
  578. pxor %xmm4, %xmm4
  579. movapd 0 * SIZE(AA), %xmm0
  580. pxor %xmm5, %xmm5
  581. movapd 8 * SIZE(BB), %xmm2
  582. pxor %xmm6, %xmm6
  583. pxor %xmm7, %xmm7
  584. #endif
  585. #ifndef TRMMKERNEL
  586. movl K, %eax
  587. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  588. movl K, %eax
  589. subl KK, %eax
  590. movl %eax, KKK
  591. #else
  592. movl KK, %eax
  593. addl $1, %eax
  594. movl %eax, KKK
  595. #endif
  596. sarl $2, %eax # l = (k >> 2)
  597. jle .L52
  598. .L51:
  599. mulpd %xmm0, %xmm1
  600. movapd 2 * SIZE(BB), %xmm3
  601. addpd %xmm1, %xmm4
  602. movapd 16 * SIZE(BB), %xmm1
  603. mulpd %xmm0, %xmm3
  604. movapd 2 * SIZE(AA), %xmm0
  605. addpd %xmm3, %xmm5
  606. movapd 4 * SIZE(BB), %xmm3
  607. mulpd %xmm0, %xmm3
  608. mulpd 6 * SIZE(BB), %xmm0
  609. addpd %xmm3, %xmm4
  610. addpd %xmm0, %xmm5
  611. movapd 4 * SIZE(AA), %xmm0
  612. mulpd %xmm0, %xmm2
  613. mulpd 10 * SIZE(BB), %xmm0
  614. addpd %xmm2, %xmm4
  615. addpd %xmm0, %xmm5
  616. movapd 6 * SIZE(AA), %xmm0
  617. movapd 12 * SIZE(BB), %xmm2
  618. mulpd %xmm0, %xmm2
  619. addpd %xmm2, %xmm4
  620. movapd 24 * SIZE(BB), %xmm2
  621. mulpd 14 * SIZE(BB), %xmm0
  622. addpd %xmm0, %xmm5
  623. movapd 8 * SIZE(AA), %xmm0
  624. addl $ 8 * SIZE, AA # aoffset += 2
  625. addl $16 * SIZE, BB # boffset1 += 4
  626. decl %eax # l--
  627. jg .L51
  628. ALIGN_2
  629. .L52:
  630. #ifndef TRMMKERNEL
  631. movl K, %eax
  632. #else
  633. movl KKK, %eax
  634. #endif
  635. andl $3, %eax # l = (k & 3)
  636. jle .L54
  637. ALIGN_2
  638. .L53:
  639. movapd 0 * SIZE(BB), %xmm1
  640. mulpd %xmm0, %xmm1
  641. addpd %xmm1, %xmm4
  642. movapd 2 * SIZE(BB), %xmm1
  643. mulpd %xmm0, %xmm1
  644. addpd %xmm1, %xmm5
  645. movapd 2 * SIZE(AA), %xmm0
  646. addl $2 * SIZE, AA # aoffset += 2
  647. addl $4 * SIZE, BB # boffset1 += 4
  648. decl %eax # l--
  649. jg .L53
  650. .L54:
  651. movapd ALPHA_R, %xmm2
  652. movapd ALPHA_I, %xmm3
  653. SHUFPD_1 %xmm5, %xmm5
  654. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  655. defined(RR) || defined(RC) || defined(CR) || defined(CC)
  656. subpd %xmm5, %xmm4
  657. #else
  658. addpd %xmm5, %xmm4
  659. #endif
  660. movapd %xmm4, %xmm5
  661. SHUFPD_1 %xmm4, %xmm4
  662. mulpd %xmm2, %xmm5
  663. mulpd %xmm3, %xmm4
  664. addpd %xmm5, %xmm4
  665. #ifndef TRMMKERNEL
  666. SHUFPD_2 %xmm4, %xmm4
  667. movsd 0 * SIZE(%esi), %xmm0
  668. movhpd 1 * SIZE(%esi), %xmm0
  669. addpd %xmm0, %xmm4
  670. #endif
  671. movsd %xmm4, 0 * SIZE(%esi)
  672. movhpd %xmm4, 1 * SIZE(%esi)
  673. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  674. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  675. movl K, %eax
  676. subl KKK, %eax
  677. leal (,%eax, SIZE), %eax
  678. leal (AA, %eax, 2), AA
  679. leal (BB, %eax, 4), BB
  680. #endif
  681. #if defined(TRMMKERNEL) && defined(LEFT)
  682. addl $1, KK
  683. #endif
  684. ALIGN_2
  685. .L99:
  686. #if defined(TRMMKERNEL) && !defined(LEFT)
  687. addl $1, KK
  688. #endif
  689. addl LDC, C # c += ldc
  690. decl J # j --
  691. jg .L01
  692. .L999:
  693. movl OLD_STACK, %esp
  694. EMMS
  695. popl %ebx
  696. popl %esi
  697. popl %edi
  698. popl %ebp
  699. ret
  700. ALIGN_2
  701. EPILOGUE