You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemm_kernel_1x2_sse3.S 19 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define STACK 16
  41. #define ARGS 16
  42. #define M 4 + STACK + ARGS(%esp)
  43. #define N 8 + STACK + ARGS(%esp)
  44. #define K 12 + STACK + ARGS(%esp)
  45. #define ALPHA_R 16 + STACK + ARGS(%esp)
  46. #define ALPHA_I 24 + STACK + ARGS(%esp)
  47. #define A 32 + STACK + ARGS(%esp)
  48. #define ARG_B 36 + STACK + ARGS(%esp)
  49. #define C 40 + STACK + ARGS(%esp)
  50. #define ARG_LDC 44 + STACK + ARGS(%esp)
  51. #define OFFSET 48 + STACK + ARGS(%esp)
  52. #define J 0 + STACK(%esp)
  53. #define BX 4 + STACK(%esp)
  54. #define KK 8 + STACK(%esp)
  55. #define KKK 12 + STACK(%esp)
  56. #ifdef PENTIUM4
  57. #define PREFETCH_R (8 * 4)
  58. #define PREFETCH prefetcht1
  59. #define PREFETCHSIZE 84
  60. #endif
  61. #ifdef PENTIUMM
  62. #define PREFETCH_R (8 * 4)
  63. #define PREFETCH prefetcht1
  64. #define PREFETCHSIZE 84
  65. #endif
  66. #define AA %edx
  67. #define BB %ecx
  68. #define LDC %ebp
  69. #define B %edi
  70. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  71. defined(NR) || defined(NC) || defined(TR) || defined(TC) || \
  72. defined(RN) || defined(RT) || defined(CN) || defined(CT)
  73. #define ADDSUB addpd
  74. #else
  75. #define ADDSUB subpd
  76. #endif
  77. #define KERNEL1(address) \
  78. mulpd %xmm0, %xmm2; \
  79. PREFETCH (PREFETCHSIZE + 0) * SIZE + (address) * 1 * SIZE(AA); \
  80. addpd %xmm2, %xmm4; \
  81. movddup 1 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  82. mulpd %xmm0, %xmm2; \
  83. ADDSUB %xmm2, %xmm5; \
  84. movddup 2 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  85. mulpd %xmm0, %xmm2; \
  86. addpd %xmm2, %xmm6; \
  87. movddup 3 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  88. mulpd %xmm0, %xmm2; \
  89. movapd 2 * SIZE + (address) * 1 * SIZE(AA), %xmm0; \
  90. ADDSUB %xmm2, %xmm7; \
  91. movddup 4 * SIZE + (address) * 2 * SIZE(BB), %xmm2
  92. #define KERNEL2(address) \
  93. mulpd %xmm0, %xmm2; \
  94. addpd %xmm2, %xmm4; \
  95. movddup 5 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  96. mulpd %xmm0, %xmm2; \
  97. ADDSUB %xmm2, %xmm5; \
  98. movddup 6 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  99. mulpd %xmm0, %xmm2; \
  100. addpd %xmm2, %xmm6; \
  101. movddup 7 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  102. mulpd %xmm0, %xmm2; \
  103. movapd 4 * SIZE + (address) * 1 * SIZE(AA), %xmm0; \
  104. ADDSUB %xmm2, %xmm7; \
  105. movddup 16 * SIZE + (address) * 2 * SIZE(BB), %xmm2
  106. #define KERNEL3(address) \
  107. mulpd %xmm0, %xmm3; \
  108. addpd %xmm3, %xmm4; \
  109. movddup 9 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  110. mulpd %xmm0, %xmm3; \
  111. ADDSUB %xmm3, %xmm5; \
  112. movddup 10 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  113. mulpd %xmm0, %xmm3; \
  114. addpd %xmm3, %xmm6; \
  115. movddup 11 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  116. mulpd %xmm0, %xmm3; \
  117. movapd 6 * SIZE + (address) * 1 * SIZE(AA), %xmm0; \
  118. ADDSUB %xmm3, %xmm7; \
  119. movddup 12 * SIZE + (address) * 2 * SIZE(BB), %xmm3
  120. #define KERNEL4(address) \
  121. mulpd %xmm0, %xmm3; \
  122. addpd %xmm3, %xmm4; \
  123. movddup 13 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  124. mulpd %xmm0, %xmm3; \
  125. ADDSUB %xmm3, %xmm5; \
  126. movddup 14 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  127. mulpd %xmm0, %xmm3; \
  128. addpd %xmm3, %xmm6; \
  129. movddup 15 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  130. mulpd %xmm0, %xmm3; \
  131. movapd 16 * SIZE + (address) * 1 * SIZE(AA), %xmm0; \
  132. ADDSUB %xmm3, %xmm7; \
  133. movddup 24 * SIZE + (address) * 2 * SIZE(BB), %xmm3
  134. #define KERNEL5(address) \
  135. mulpd %xmm1, %xmm2; \
  136. addpd %xmm2, %xmm4; \
  137. movddup 17 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  138. mulpd %xmm1, %xmm2; \
  139. ADDSUB %xmm2, %xmm5; \
  140. movddup 18 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  141. mulpd %xmm1, %xmm2; \
  142. addpd %xmm2, %xmm6; \
  143. movddup 19 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  144. mulpd %xmm1, %xmm2; \
  145. movapd 10 * SIZE + (address) * 1 * SIZE(AA), %xmm1; \
  146. ADDSUB %xmm2, %xmm7; \
  147. movddup 20 * SIZE + (address) * 2 * SIZE(BB), %xmm2
  148. #define KERNEL6(address) \
  149. mulpd %xmm1, %xmm2; \
  150. addpd %xmm2, %xmm4; \
  151. movddup 21 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  152. mulpd %xmm1, %xmm2; \
  153. ADDSUB %xmm2, %xmm5; \
  154. movddup 22 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  155. mulpd %xmm1, %xmm2; \
  156. addpd %xmm2, %xmm6; \
  157. movddup 23 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  158. mulpd %xmm1, %xmm2; \
  159. movapd 12 * SIZE + (address) * 1 * SIZE(AA), %xmm1; \
  160. ADDSUB %xmm2, %xmm7
  161. #define KERNEL7(address) \
  162. movddup 32 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
  163. mulpd %xmm1, %xmm3; \
  164. addpd %xmm3, %xmm4; \
  165. movddup 25 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  166. mulpd %xmm1, %xmm3; \
  167. ADDSUB %xmm3, %xmm5; \
  168. movddup 26 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  169. mulpd %xmm1, %xmm3; \
  170. addpd %xmm3, %xmm6; \
  171. movddup 27 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  172. mulpd %xmm1, %xmm3; \
  173. movapd 14 * SIZE + (address) * 1 * SIZE(AA), %xmm1; \
  174. ADDSUB %xmm3, %xmm7; \
  175. movddup 28 * SIZE + (address) * 2 * SIZE(BB), %xmm3
  176. #define KERNEL8(address) \
  177. mulpd %xmm1, %xmm3; \
  178. addpd %xmm3, %xmm4; \
  179. movddup 29 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  180. mulpd %xmm1, %xmm3; \
  181. ADDSUB %xmm3, %xmm5; \
  182. movddup 30 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  183. mulpd %xmm1, %xmm3; \
  184. addpd %xmm3, %xmm6; \
  185. movddup 31 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
  186. mulpd %xmm1, %xmm3; \
  187. movapd 24 * SIZE + (address) * 1 * SIZE(AA), %xmm1; \
  188. ADDSUB %xmm3, %xmm7; \
  189. movddup 40 * SIZE + (address) * 2 * SIZE(BB), %xmm3
  190. PROLOGUE
  191. subl $ARGS, %esp
  192. pushl %ebp
  193. pushl %edi
  194. pushl %esi
  195. pushl %ebx
  196. PROFCODE
  197. movl ARG_B, B
  198. movl ARG_LDC, LDC
  199. #ifdef TRMMKERNEL
  200. movl OFFSET, %eax
  201. #ifndef LEFT
  202. negl %eax
  203. #endif
  204. movl %eax, KK
  205. #endif
  206. sall $ZBASE_SHIFT, LDC
  207. movl N, %eax
  208. sarl $1, %eax
  209. movl %eax, J # j = n
  210. jle .L100
  211. ALIGN_4
  212. .L01:
  213. #if defined(TRMMKERNEL) && defined(LEFT)
  214. movl OFFSET, %eax
  215. movl %eax, KK
  216. #endif
  217. movl B, BX
  218. movl C, %esi # coffset = c
  219. movl A, AA # aoffset = a
  220. movl M, %ebx
  221. testl %ebx, %ebx
  222. jle .L100
  223. ALIGN_4
  224. .L10:
  225. #if !defined(TRMMKERNEL) || \
  226. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  227. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  228. movl B, BB
  229. #else
  230. movl KK, %eax
  231. leal (, %eax, SIZE), %eax
  232. leal (AA, %eax, 2), AA
  233. leal (B, %eax, 4), BB
  234. #endif
  235. movl BX, %eax
  236. prefetcht2 (PREFETCH_R + 0) * SIZE(%eax)
  237. prefetcht2 (PREFETCH_R + 16) * SIZE(%eax)
  238. subl $-8 * SIZE, BX
  239. movapd 0 * SIZE(AA), %xmm0
  240. pxor %xmm4, %xmm4
  241. movapd 8 * SIZE(AA), %xmm1
  242. pxor %xmm5, %xmm5
  243. movddup 0 * SIZE(BB), %xmm2
  244. pxor %xmm6, %xmm6
  245. movddup 8 * SIZE(BB), %xmm3
  246. pxor %xmm7, %xmm7
  247. #ifdef PENTIUM4
  248. prefetchnta 3 * SIZE(%esi)
  249. prefetchnta 3 * SIZE(%esi, LDC)
  250. #endif
  251. #ifndef TRMMKERNEL
  252. movl K, %eax
  253. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  254. movl K, %eax
  255. subl KK, %eax
  256. movl %eax, KKK
  257. #else
  258. movl KK, %eax
  259. #ifdef LEFT
  260. addl $1, %eax
  261. #else
  262. addl $2, %eax
  263. #endif
  264. movl %eax, KKK
  265. #endif
  266. #ifdef CORE_PRESCOTT
  267. andl $-8, %eax
  268. sall $4, %eax
  269. je .L12
  270. .L1X:
  271. KERNEL1(16 * 0)
  272. KERNEL2(16 * 0)
  273. KERNEL3(16 * 0)
  274. KERNEL4(16 * 0)
  275. KERNEL5(16 * 0)
  276. KERNEL6(16 * 0)
  277. KERNEL7(16 * 0)
  278. KERNEL8(16 * 0)
  279. cmpl $128 * 1, %eax
  280. jle .L11
  281. KERNEL1(16 * 1)
  282. KERNEL2(16 * 1)
  283. KERNEL3(16 * 1)
  284. KERNEL4(16 * 1)
  285. KERNEL5(16 * 1)
  286. KERNEL6(16 * 1)
  287. KERNEL7(16 * 1)
  288. KERNEL8(16 * 1)
  289. cmpl $128 * 2, %eax
  290. jle .L11
  291. KERNEL1(16 * 2)
  292. KERNEL2(16 * 2)
  293. KERNEL3(16 * 2)
  294. KERNEL4(16 * 2)
  295. KERNEL5(16 * 2)
  296. KERNEL6(16 * 2)
  297. KERNEL7(16 * 2)
  298. KERNEL8(16 * 2)
  299. cmpl $128 * 3, %eax
  300. jle .L11
  301. KERNEL1(16 * 3)
  302. KERNEL2(16 * 3)
  303. KERNEL3(16 * 3)
  304. KERNEL4(16 * 3)
  305. KERNEL5(16 * 3)
  306. KERNEL6(16 * 3)
  307. KERNEL7(16 * 3)
  308. KERNEL8(16 * 3)
  309. cmpl $128 * 4, %eax
  310. jle .L11
  311. KERNEL1(16 * 4)
  312. KERNEL2(16 * 4)
  313. KERNEL3(16 * 4)
  314. KERNEL4(16 * 4)
  315. KERNEL5(16 * 4)
  316. KERNEL6(16 * 4)
  317. KERNEL7(16 * 4)
  318. KERNEL8(16 * 4)
  319. cmpl $128 * 5, %eax
  320. jle .L11
  321. KERNEL1(16 * 5)
  322. KERNEL2(16 * 5)
  323. KERNEL3(16 * 5)
  324. KERNEL4(16 * 5)
  325. KERNEL5(16 * 5)
  326. KERNEL6(16 * 5)
  327. KERNEL7(16 * 5)
  328. KERNEL8(16 * 5)
  329. cmpl $128 * 6, %eax
  330. jle .L11
  331. KERNEL1(16 * 6)
  332. KERNEL2(16 * 6)
  333. KERNEL3(16 * 6)
  334. KERNEL4(16 * 6)
  335. KERNEL5(16 * 6)
  336. KERNEL6(16 * 6)
  337. KERNEL7(16 * 6)
  338. KERNEL8(16 * 6)
  339. cmpl $128 * 7, %eax
  340. jle .L11
  341. KERNEL1(16 * 7)
  342. KERNEL2(16 * 7)
  343. KERNEL3(16 * 7)
  344. KERNEL4(16 * 7)
  345. KERNEL5(16 * 7)
  346. KERNEL6(16 * 7)
  347. KERNEL7(16 * 7)
  348. KERNEL8(16 * 7)
  349. #if 1
  350. cmpl $128 * 8, %eax
  351. jle .L11
  352. KERNEL1(16 * 8)
  353. KERNEL2(16 * 8)
  354. KERNEL3(16 * 8)
  355. KERNEL4(16 * 8)
  356. KERNEL5(16 * 8)
  357. KERNEL6(16 * 8)
  358. KERNEL7(16 * 8)
  359. KERNEL8(16 * 8)
  360. cmpl $128 * 9, %eax
  361. jle .L11
  362. KERNEL1(16 * 9)
  363. KERNEL2(16 * 9)
  364. KERNEL3(16 * 9)
  365. KERNEL4(16 * 9)
  366. KERNEL5(16 * 9)
  367. KERNEL6(16 * 9)
  368. KERNEL7(16 * 9)
  369. KERNEL8(16 * 9)
  370. cmpl $128 * 10, %eax
  371. jle .L11
  372. KERNEL1(16 * 10)
  373. KERNEL2(16 * 10)
  374. KERNEL3(16 * 10)
  375. KERNEL4(16 * 10)
  376. KERNEL5(16 * 10)
  377. KERNEL6(16 * 10)
  378. KERNEL7(16 * 10)
  379. KERNEL8(16 * 10)
  380. cmpl $128 * 11, %eax
  381. jle .L11
  382. KERNEL1(16 * 11)
  383. KERNEL2(16 * 11)
  384. KERNEL3(16 * 11)
  385. KERNEL4(16 * 11)
  386. KERNEL5(16 * 11)
  387. KERNEL6(16 * 11)
  388. KERNEL7(16 * 11)
  389. KERNEL8(16 * 11)
  390. cmpl $128 * 12, %eax
  391. jle .L11
  392. KERNEL1(16 * 12)
  393. KERNEL2(16 * 12)
  394. KERNEL3(16 * 12)
  395. KERNEL4(16 * 12)
  396. KERNEL5(16 * 12)
  397. KERNEL6(16 * 12)
  398. KERNEL7(16 * 12)
  399. KERNEL8(16 * 12)
  400. cmpl $128 * 13, %eax
  401. jle .L11
  402. KERNEL1(16 * 13)
  403. KERNEL2(16 * 13)
  404. KERNEL3(16 * 13)
  405. KERNEL4(16 * 13)
  406. KERNEL5(16 * 13)
  407. KERNEL6(16 * 13)
  408. KERNEL7(16 * 13)
  409. KERNEL8(16 * 13)
  410. cmpl $128 * 14, %eax
  411. jle .L11
  412. KERNEL1(16 * 14)
  413. KERNEL2(16 * 14)
  414. KERNEL3(16 * 14)
  415. KERNEL4(16 * 14)
  416. KERNEL5(16 * 14)
  417. KERNEL6(16 * 14)
  418. KERNEL7(16 * 14)
  419. KERNEL8(16 * 14)
  420. cmpl $128 * 15, %eax
  421. jle .L11
  422. KERNEL1(16 * 15)
  423. KERNEL2(16 * 15)
  424. KERNEL3(16 * 15)
  425. KERNEL4(16 * 15)
  426. KERNEL5(16 * 15)
  427. KERNEL6(16 * 15)
  428. KERNEL7(16 * 15)
  429. KERNEL8(16 * 15)
  430. #else
  431. addl $32 * 4 * SIZE, AA
  432. addl $32 * 8 * SIZE, BB
  433. subl $128 * 8, %eax
  434. jg .L1X
  435. #endif
  436. .L11:
  437. leal (AA, %eax, 1), AA # * 16
  438. leal (BB, %eax, 2), BB # * 64
  439. #else
  440. sarl $3, %eax
  441. je .L12
  442. ALIGN_4
  443. .L11:
  444. KERNEL1(16 * 0)
  445. KERNEL2(16 * 0)
  446. KERNEL3(16 * 0)
  447. KERNEL4(16 * 0)
  448. KERNEL5(16 * 0)
  449. KERNEL6(16 * 0)
  450. KERNEL7(16 * 0)
  451. KERNEL8(16 * 0)
  452. addl $32 * SIZE, BB
  453. addl $16 * SIZE, AA
  454. decl %eax
  455. jne .L11
  456. ALIGN_4
  457. #endif
  458. .L12:
  459. #ifndef TRMMKERNEL
  460. movl K, %eax
  461. #else
  462. movl KKK, %eax
  463. #endif
  464. movddup ALPHA_R, %xmm1
  465. movddup ALPHA_I, %xmm3
  466. andl $7, %eax # if (k & 1)
  467. BRANCH
  468. je .L14
  469. ALIGN_4
  470. .L13:
  471. mulpd %xmm0, %xmm2
  472. addpd %xmm2, %xmm4
  473. movddup 1 * SIZE(BB), %xmm2
  474. mulpd %xmm0, %xmm2
  475. ADDSUB %xmm2, %xmm5
  476. movddup 2 * SIZE(BB), %xmm2
  477. mulpd %xmm0, %xmm2
  478. addpd %xmm2, %xmm6
  479. movddup 3 * SIZE(BB), %xmm2
  480. mulpd %xmm0, %xmm2
  481. movapd 2 * SIZE(AA), %xmm0
  482. ADDSUB %xmm2, %xmm7
  483. movddup 4 * SIZE(BB), %xmm2
  484. addl $2 * SIZE, AA
  485. addl $4 * SIZE, BB
  486. decl %eax
  487. jg .L13
  488. ALIGN_4
  489. .L14:
  490. pcmpeqb %xmm0, %xmm0
  491. SHUFPD_1 %xmm5, %xmm5
  492. psllq $63, %xmm0
  493. SHUFPD_1 %xmm7, %xmm7
  494. #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
  495. shufps $0x04, %xmm0, %xmm0
  496. pxor %xmm0, %xmm5
  497. pxor %xmm0, %xmm7
  498. #elif defined(NR) || defined(NC) || defined(TR) || defined(TC)
  499. shufps $0x40, %xmm0, %xmm0
  500. pxor %xmm0, %xmm5
  501. pxor %xmm0, %xmm7
  502. #elif defined(RN) || defined(RT) || defined(CN) || defined(CT)
  503. shufps $0x40, %xmm0, %xmm0
  504. pxor %xmm0, %xmm4
  505. pxor %xmm0, %xmm6
  506. #else
  507. shufps $0x40, %xmm0, %xmm0
  508. pxor %xmm0, %xmm4
  509. pxor %xmm0, %xmm6
  510. #endif
  511. addpd %xmm5, %xmm4
  512. addpd %xmm7, %xmm6
  513. movaps %xmm4, %xmm5
  514. movaps %xmm6, %xmm7
  515. SHUFPD_1 %xmm5, %xmm5
  516. SHUFPD_1 %xmm7, %xmm7
  517. mulpd %xmm1, %xmm4
  518. mulpd %xmm1, %xmm6
  519. mulpd %xmm3, %xmm5
  520. mulpd %xmm3, %xmm7
  521. addsubpd %xmm5, %xmm4
  522. addsubpd %xmm7, %xmm6
  523. #ifndef TRMMKERNEL
  524. movsd 0 * SIZE(%esi), %xmm0
  525. movhpd 1 * SIZE(%esi), %xmm0
  526. movsd 0 * SIZE(%esi, LDC), %xmm2
  527. movhpd 1 * SIZE(%esi, LDC), %xmm2
  528. addpd %xmm0, %xmm4
  529. addpd %xmm2, %xmm6
  530. #endif
  531. movsd %xmm4, 0 * SIZE(%esi)
  532. movhpd %xmm4, 1 * SIZE(%esi)
  533. movsd %xmm6, 0 * SIZE(%esi, LDC)
  534. movhpd %xmm6, 1 * SIZE(%esi, LDC)
  535. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  536. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  537. movl K, %eax
  538. subl KKK, %eax
  539. leal (,%eax, SIZE), %eax
  540. leal (AA, %eax, 2), AA
  541. leal (BB, %eax, 4), BB
  542. #endif
  543. #if defined(TRMMKERNEL) && defined(LEFT)
  544. addl $1, KK
  545. #endif
  546. addl $2 * SIZE, %esi # coffset += 4
  547. decl %ebx # i --
  548. jg .L10
  549. ALIGN_4
  550. .L99:
  551. #if defined(TRMMKERNEL) && !defined(LEFT)
  552. addl $2, KK
  553. #endif
  554. leal (, LDC, 2), %eax
  555. movl BB, B
  556. addl %eax, C # c += ldc
  557. decl J # j --
  558. jg .L01
  559. ALIGN_4
  560. .L100:
  561. movl N, %eax
  562. testl $1, %eax
  563. jle .L500
  564. movl C, %esi # coffset = c
  565. movl A, AA # aoffset = a
  566. #if defined(TRMMKERNEL) && defined(LEFT)
  567. movl OFFSET, %eax
  568. movl %eax, KK
  569. #endif
  570. movl M, %ebx
  571. testl %ebx, %ebx
  572. jle .L500
  573. ALIGN_4
  574. L110:
  575. #if !defined(TRMMKERNEL) || \
  576. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  577. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  578. movl B, BB
  579. #else
  580. movl KK, %eax
  581. leal (, %eax, SIZE), %eax
  582. leal (AA, %eax, 2), AA
  583. leal (B, %eax, 2), BB
  584. #endif
  585. movapd 0 * SIZE(AA), %xmm0
  586. pxor %xmm4, %xmm4
  587. movapd 8 * SIZE(AA), %xmm1
  588. pxor %xmm5, %xmm5
  589. movddup 0 * SIZE(BB), %xmm2
  590. pxor %xmm6, %xmm6
  591. movddup 8 * SIZE(BB), %xmm3
  592. pxor %xmm7, %xmm7
  593. #ifdef PENTIUM4
  594. prefetchnta 4 * SIZE(%esi)
  595. #endif
  596. #ifndef TRMMKERNEL
  597. movl K, %eax
  598. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  599. movl K, %eax
  600. subl KK, %eax
  601. movl %eax, KKK
  602. #else
  603. movl KK, %eax
  604. #ifdef LEFT
  605. addl $1, %eax
  606. #else
  607. addl $1, %eax
  608. #endif
  609. movl %eax, KKK
  610. #endif
  611. sarl $3, %eax
  612. je L112
  613. ALIGN_4
  614. L111:
  615. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  616. mulpd %xmm0, %xmm2
  617. addpd %xmm2, %xmm4
  618. movddup 1 * SIZE(BB), %xmm2
  619. mulpd %xmm0, %xmm2
  620. movapd 2 * SIZE(AA), %xmm0
  621. ADDSUB %xmm2, %xmm5
  622. movddup 2 * SIZE(BB), %xmm2
  623. mulpd %xmm0, %xmm2
  624. addpd %xmm2, %xmm6
  625. movddup 3 * SIZE(BB), %xmm2
  626. mulpd %xmm0, %xmm2
  627. movapd 4 * SIZE(AA), %xmm0
  628. ADDSUB %xmm2, %xmm7
  629. movddup 4 * SIZE(BB), %xmm2
  630. mulpd %xmm0, %xmm2
  631. addpd %xmm2, %xmm4
  632. movddup 5 * SIZE(BB), %xmm2
  633. mulpd %xmm0, %xmm2
  634. movapd 6 * SIZE(AA), %xmm0
  635. ADDSUB %xmm2, %xmm5
  636. movddup 6 * SIZE(BB), %xmm2
  637. mulpd %xmm0, %xmm2
  638. addpd %xmm2, %xmm6
  639. movddup 7 * SIZE(BB), %xmm2
  640. mulpd %xmm0, %xmm2
  641. movapd 16 * SIZE(AA), %xmm0
  642. ADDSUB %xmm2, %xmm7
  643. movddup 16 * SIZE(BB), %xmm2
  644. mulpd %xmm1, %xmm3
  645. addpd %xmm3, %xmm4
  646. movddup 9 * SIZE(BB), %xmm3
  647. mulpd %xmm1, %xmm3
  648. movapd 10 * SIZE(AA), %xmm1
  649. ADDSUB %xmm3, %xmm5
  650. movddup 10 * SIZE(BB), %xmm3
  651. mulpd %xmm1, %xmm3
  652. addpd %xmm3, %xmm6
  653. movddup 11 * SIZE(BB), %xmm3
  654. mulpd %xmm1, %xmm3
  655. movapd 12 * SIZE(AA), %xmm1
  656. ADDSUB %xmm3, %xmm7
  657. movddup 12 * SIZE(BB), %xmm3
  658. mulpd %xmm1, %xmm3
  659. addpd %xmm3, %xmm4
  660. movddup 13 * SIZE(BB), %xmm3
  661. mulpd %xmm1, %xmm3
  662. movapd 14 * SIZE(AA), %xmm1
  663. ADDSUB %xmm3, %xmm5
  664. movddup 14 * SIZE(BB), %xmm3
  665. mulpd %xmm1, %xmm3
  666. addpd %xmm3, %xmm6
  667. movddup 15 * SIZE(BB), %xmm3
  668. mulpd %xmm1, %xmm3
  669. movapd 24 * SIZE(AA), %xmm1
  670. ADDSUB %xmm3, %xmm7
  671. movddup 24 * SIZE(BB), %xmm3
  672. addl $16 * SIZE, AA
  673. addl $16 * SIZE, BB
  674. decl %eax
  675. jne L111
  676. ALIGN_4
  677. L112:
  678. #ifndef TRMMKERNEL
  679. movl K, %eax
  680. #else
  681. movl KKK, %eax
  682. #endif
  683. movddup ALPHA_R, %xmm1
  684. movddup ALPHA_I, %xmm3
  685. andl $7, %eax # if (k & 1)
  686. BRANCH
  687. je L114
  688. ALIGN_4
  689. L113:
  690. mulpd %xmm0, %xmm2
  691. addpd %xmm2, %xmm4
  692. movddup 1 * SIZE(BB), %xmm2
  693. mulpd %xmm0, %xmm2
  694. movapd 2 * SIZE(AA), %xmm0
  695. ADDSUB %xmm2, %xmm5
  696. movddup 2 * SIZE(BB), %xmm2
  697. addl $2 * SIZE, AA
  698. addl $2 * SIZE, BB
  699. decl %eax
  700. jg L113
  701. ALIGN_4
  702. L114:
  703. addpd %xmm6, %xmm4
  704. addpd %xmm7, %xmm5
  705. pcmpeqb %xmm0, %xmm0
  706. SHUFPD_1 %xmm5, %xmm5
  707. psllq $63, %xmm0
  708. #if defined(NN) || defined(NT) || defined(TN) || defined(TT)
  709. shufps $0x04, %xmm0, %xmm0
  710. pxor %xmm0, %xmm5
  711. #elif defined(NR) || defined(NC) || defined(TR) || defined(TC)
  712. shufps $0x40, %xmm0, %xmm0
  713. pxor %xmm0, %xmm5
  714. #elif defined(RN) || defined(RT) || defined(CN) || defined(CT)
  715. shufps $0x40, %xmm0, %xmm0
  716. pxor %xmm0, %xmm4
  717. #else
  718. shufps $0x40, %xmm0, %xmm0
  719. pxor %xmm0, %xmm4
  720. #endif
  721. addpd %xmm5, %xmm4
  722. movaps %xmm4, %xmm5
  723. SHUFPD_1 %xmm5, %xmm5
  724. mulpd %xmm1, %xmm4
  725. mulpd %xmm3, %xmm5
  726. addsubpd %xmm5, %xmm4
  727. #ifndef TRMMKERNEL
  728. movsd 0 * SIZE(%esi), %xmm0
  729. movhpd 1 * SIZE(%esi), %xmm0
  730. addpd %xmm0, %xmm4
  731. #endif
  732. movsd %xmm4, 0 * SIZE(%esi)
  733. movhpd %xmm4, 1 * SIZE(%esi)
  734. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  735. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  736. movl K, %eax
  737. subl KKK, %eax
  738. leal (,%eax, SIZE), %eax
  739. leal (AA, %eax, 2), AA
  740. leal (BB, %eax, 2), BB
  741. #endif
  742. #if defined(TRMMKERNEL) && defined(LEFT)
  743. addl $1, KK
  744. #endif
  745. addl $2 * SIZE, %esi # coffset += 4
  746. decl %ebx # i --
  747. jg L110
  748. ALIGN_4
  749. .L500:
  750. popl %ebx
  751. popl %esi
  752. popl %edi
  753. popl %ebp
  754. addl $ARGS, %esp
  755. ret
  756. EPILOGUE