You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemm_kernel_1x2.S 17 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define STACK 16
  41. #define ARGS 16
  42. #define J 0 + STACK(%esp)
  43. #define I 4 + STACK(%esp)
  44. #define KK 8 + STACK(%esp)
  45. #define KKK 12 + STACK(%esp)
  46. #define M 4 + STACK + ARGS(%esp)
  47. #define N 8 + STACK + ARGS(%esp)
  48. #define K 12 + STACK + ARGS(%esp)
  49. #ifdef DOUBLE
  50. #define ALPHA_R 16 + STACK + ARGS(%esp)
  51. #define ALPHA_I 24 + STACK + ARGS(%esp)
  52. #define STACK_A 32 + STACK + ARGS(%esp)
  53. #define STACK_B 36 + STACK + ARGS(%esp)
  54. #define C 40 + STACK + ARGS(%esp)
  55. #define STACK_LDC 44 + STACK + ARGS(%esp)
  56. #define OFFSET 48 + STACK + ARGS(%esp)
  57. #else
  58. #define ALPHA_R 16 + STACK + ARGS(%esp)
  59. #define ALPHA_I 20 + STACK + ARGS(%esp)
  60. #define STACK_A 24 + STACK + ARGS(%esp)
  61. #define STACK_B 28 + STACK + ARGS(%esp)
  62. #define C 32 + STACK + ARGS(%esp)
  63. #define STACK_LDC 36 + STACK + ARGS(%esp)
  64. #define OFFSET 40 + STACK + ARGS(%esp)
  65. #endif
  66. #define A %edx
  67. #define B %ecx
  68. #define BB %ebx
  69. #define LDC %ebp
  70. #define BX %esi
  71. #define ADD1 faddp
  72. #if defined(NN) || defined(CN)
  73. #define ADD2 faddp
  74. #else
  75. #define ADD2 fsubrp
  76. #endif
  77. #if defined(NN) || defined(CC)
  78. #define ADD3 fsubrp
  79. #else
  80. #define ADD3 faddp
  81. #endif
  82. #if defined(NN) || defined(NC)
  83. #define ADD4 faddp
  84. #else
  85. #define ADD4 fsubrp
  86. #endif
  87. #define PREFETCHSIZE (8 * 5 + 4)
  88. #define AOFFSET 1
  89. #define BOFFSET 1
  90. #ifdef HAVE_3DNOW
  91. #define PREFETCH prefetch
  92. #else
  93. #define PREFETCH prefetcht0
  94. #endif
  95. #define KERNEL \
  96. PREFETCH PREFETCHSIZE * SIZE + AOFFSET(A, %eax, 2);\
  97. fmul %st(1), %st;\
  98. ADD1 %st, %st(4);\
  99. FLD -15 * SIZE + BOFFSET(B, %eax, 4);\
  100. fmul %st(1), %st;\
  101. ADD2 %st, %st(5);\
  102. FLD -14 * SIZE + BOFFSET(B, %eax, 4);\
  103. fmul %st(1), %st;\
  104. ADD1 %st, %st(6);\
  105. FMUL -13 * SIZE + BOFFSET(B, %eax, 4);\
  106. ADD2 %st, %st(6);\
  107. FLD -15 * SIZE + AOFFSET(A, %eax, 2);\
  108. FLD -15 * SIZE + BOFFSET(B, %eax, 4);\
  109. fmul %st(1), %st;\
  110. ADD3 %st, %st(4);\
  111. FLD -16 * SIZE + BOFFSET(B, %eax, 4);\
  112. fmul %st(1), %st;\
  113. ADD4 %st, %st(5);\
  114. FLD -13 * SIZE + BOFFSET(B, %eax, 4);\
  115. fmul %st(1), %st;\
  116. ADD3 %st, %st(6);\
  117. FMUL -14 * SIZE + BOFFSET(B, %eax, 4);\
  118. ADD4 %st, %st(6);\
  119. FLD -14 * SIZE + AOFFSET(A, %eax, 2);\
  120. FLD -12 * SIZE + BOFFSET(B, %eax, 4);\
  121. fmul %st(1), %st;\
  122. ADD1 %st, %st(4);\
  123. FLD -11 * SIZE + BOFFSET(B, %eax, 4);\
  124. fmul %st(1), %st;\
  125. ADD2 %st, %st(5);\
  126. FLD -10 * SIZE + BOFFSET(B, %eax, 4);\
  127. fmul %st(1), %st;\
  128. ADD1 %st, %st(6);\
  129. FMUL -9 * SIZE + BOFFSET(B, %eax, 4);\
  130. ADD2 %st, %st(6);\
  131. FLD -13 * SIZE + AOFFSET(A, %eax, 2);\
  132. FLD -11 * SIZE + BOFFSET(B, %eax, 4);\
  133. fmul %st(1), %st;\
  134. ADD3 %st, %st(4);\
  135. FLD -12 * SIZE + BOFFSET(B, %eax, 4);\
  136. fmul %st(1), %st;\
  137. ADD4 %st, %st(5);\
  138. FLD -9 * SIZE + BOFFSET(B, %eax, 4);\
  139. fmul %st(1), %st;\
  140. ADD3 %st, %st(6);\
  141. FMUL -10 * SIZE + BOFFSET(B, %eax, 4);\
  142. ADD4 %st, %st(6);\
  143. FLD -12 * SIZE + AOFFSET(A, %eax, 2);\
  144. FLD -8 * SIZE + BOFFSET(B, %eax, 4);\
  145. fmul %st(1), %st;\
  146. ADD1 %st, %st(4);\
  147. FLD -7 * SIZE + BOFFSET(B, %eax, 4);\
  148. fmul %st(1), %st;\
  149. ADD2 %st, %st(5);\
  150. FLD -6 * SIZE + BOFFSET(B, %eax, 4);\
  151. fmul %st(1), %st;\
  152. ADD1 %st, %st(6);\
  153. FMUL -5 * SIZE + BOFFSET(B, %eax, 4);\
  154. ADD2 %st, %st(6);\
  155. FLD -11 * SIZE + AOFFSET(A, %eax, 2);\
  156. FLD -7 * SIZE + BOFFSET(B, %eax, 4);\
  157. fmul %st(1), %st;\
  158. ADD3 %st, %st(4);\
  159. FLD -8 * SIZE + BOFFSET(B, %eax, 4);\
  160. fmul %st(1), %st;\
  161. ADD4 %st, %st(5);\
  162. FLD -5 * SIZE + BOFFSET(B, %eax, 4);\
  163. fmul %st(1), %st;\
  164. ADD3 %st, %st(6);\
  165. FMUL -6 * SIZE + BOFFSET(B, %eax, 4);\
  166. ADD4 %st, %st(6);\
  167. FLD -10 * SIZE + AOFFSET(A, %eax, 2);\
  168. FLD -4 * SIZE + BOFFSET(B, %eax, 4);\
  169. fmul %st(1), %st;\
  170. ADD1 %st, %st(4);\
  171. FLD -3 * SIZE + BOFFSET(B, %eax, 4);\
  172. fmul %st(1), %st;\
  173. ADD2 %st, %st(5);\
  174. FLD -2 * SIZE + BOFFSET(B, %eax, 4);\
  175. fmul %st(1), %st;\
  176. ADD1 %st, %st(6);\
  177. FMUL -1 * SIZE + BOFFSET(B, %eax, 4);\
  178. ADD2 %st, %st(6);\
  179. FLD -9 * SIZE + AOFFSET(A, %eax, 2);\
  180. FLD -3 * SIZE + BOFFSET(B, %eax, 4);\
  181. fmul %st(1), %st;\
  182. ADD3 %st, %st(4);\
  183. FLD -4 * SIZE + BOFFSET(B, %eax, 4);\
  184. fmul %st(1), %st;\
  185. ADD4 %st, %st(5);\
  186. FLD -1 * SIZE + BOFFSET(B, %eax, 4);\
  187. fmul %st(1), %st;\
  188. ADD3 %st, %st(6);\
  189. FMUL -2 * SIZE + BOFFSET(B, %eax, 4);\
  190. ADD4 %st, %st(6);\
  191. FLD 8 * SIZE + AOFFSET(A, %eax, 2);\
  192. fxch %st(1);\
  193. FLD 0 * SIZE + BOFFSET(B, %eax, 4);\
  194. fmul %st(1), %st;\
  195. ADD1 %st, %st(4);\
  196. FLD 1 * SIZE + BOFFSET(B, %eax, 4);\
  197. fmul %st(1), %st;\
  198. PREFETCH (PREFETCHSIZE + 8) * SIZE + AOFFSET(A, %eax, 2);\
  199. ADD2 %st, %st(5);\
  200. FLD 2 * SIZE + BOFFSET(B, %eax, 4);\
  201. fmul %st(1), %st;\
  202. ADD1 %st, %st(6);\
  203. FMUL 3 * SIZE + BOFFSET(B, %eax, 4);\
  204. ADD2 %st, %st(6);\
  205. FLD -7 * SIZE + AOFFSET(A, %eax, 2);\
  206. FLD 1 * SIZE + BOFFSET(B, %eax, 4);\
  207. fmul %st(1), %st;\
  208. ADD3 %st, %st(4);\
  209. FLD 0 * SIZE + BOFFSET(B, %eax, 4);\
  210. fmul %st(1), %st;\
  211. ADD4 %st, %st(5);\
  212. FLD 3 * SIZE + BOFFSET(B, %eax, 4);\
  213. fmul %st(1), %st;\
  214. ADD3 %st, %st(6);\
  215. FMUL 2 * SIZE + BOFFSET(B, %eax, 4);\
  216. ADD4 %st, %st(6);\
  217. FLD -6 * SIZE + AOFFSET(A, %eax, 2);\
  218. FLD 4 * SIZE + BOFFSET(B, %eax, 4);\
  219. fmul %st(1), %st;\
  220. ADD1 %st, %st(4);\
  221. FLD 5 * SIZE + BOFFSET(B, %eax, 4);\
  222. fmul %st(1), %st;\
  223. ADD2 %st, %st(5);\
  224. FLD 6 * SIZE + BOFFSET(B, %eax, 4);\
  225. fmul %st(1), %st;\
  226. ADD1 %st, %st(6);\
  227. FMUL 7 * SIZE + BOFFSET(B, %eax, 4);\
  228. ADD2 %st, %st(6);\
  229. FLD -5 * SIZE + AOFFSET(A, %eax, 2);\
  230. FLD 5 * SIZE + BOFFSET(B, %eax, 4);\
  231. fmul %st(1), %st;\
  232. ADD3 %st, %st(4);\
  233. FLD 4 * SIZE + BOFFSET(B, %eax, 4);\
  234. fmul %st(1), %st;\
  235. ADD4 %st, %st(5);\
  236. FLD 7 * SIZE + BOFFSET(B, %eax, 4);\
  237. fmul %st(1), %st;\
  238. ADD3 %st, %st(6);\
  239. FMUL 6 * SIZE + BOFFSET(B, %eax, 4);\
  240. ADD4 %st, %st(6);\
  241. FLD -4 * SIZE + AOFFSET(A, %eax, 2);\
  242. FLD 8 * SIZE + BOFFSET(B, %eax, 4);\
  243. fmul %st(1), %st;\
  244. ADD1 %st, %st(4);\
  245. FLD 9 * SIZE + BOFFSET(B, %eax, 4);\
  246. fmul %st(1), %st;\
  247. ADD2 %st, %st(5);\
  248. FLD 10 * SIZE + BOFFSET(B, %eax, 4);\
  249. fmul %st(1), %st;\
  250. ADD1 %st, %st(6);\
  251. FMUL 11 * SIZE + BOFFSET(B, %eax, 4);\
  252. ADD2 %st, %st(6);\
  253. FLD -3 * SIZE + AOFFSET(A, %eax, 2);\
  254. FLD 9 * SIZE + BOFFSET(B, %eax, 4);\
  255. fmul %st(1), %st;\
  256. ADD3 %st, %st(4);\
  257. FLD 8 * SIZE + BOFFSET(B, %eax, 4);\
  258. fmul %st(1), %st;\
  259. ADD4 %st, %st(5);\
  260. FLD 11 * SIZE + BOFFSET(B, %eax, 4);\
  261. fmul %st(1), %st;\
  262. ADD3 %st, %st(6);\
  263. FMUL 10 * SIZE + BOFFSET(B, %eax, 4);\
  264. ADD4 %st, %st(6);\
  265. FLD -2 * SIZE + AOFFSET(A, %eax, 2);\
  266. FLD 12 * SIZE + BOFFSET(B, %eax, 4);\
  267. fmul %st(1), %st;\
  268. ADD1 %st, %st(4);\
  269. FLD 13 * SIZE + BOFFSET(B, %eax, 4);\
  270. fmul %st(1), %st;\
  271. ADD2 %st, %st(5);\
  272. FLD 14 * SIZE + BOFFSET(B, %eax, 4);\
  273. fmul %st(1), %st;\
  274. ADD1 %st, %st(6);\
  275. FMUL 15 * SIZE + BOFFSET(B, %eax, 4);\
  276. ADD2 %st, %st(6);\
  277. FLD -1 * SIZE + AOFFSET(A, %eax, 2);\
  278. FLD 13 * SIZE + BOFFSET(B, %eax, 4);\
  279. fmul %st(1), %st;\
  280. ADD3 %st, %st(4);\
  281. FLD 12 * SIZE + BOFFSET(B, %eax, 4);\
  282. fmul %st(1), %st;\
  283. ADD4 %st, %st(5);\
  284. FLD 15 * SIZE + BOFFSET(B, %eax, 4);\
  285. fmul %st(1), %st;\
  286. ADD3 %st, %st(6);\
  287. FMUL 14 * SIZE + BOFFSET(B, %eax, 4);\
  288. ADD4 %st, %st(6);\
  289. FLD 16 * SIZE + AOFFSET(A, %eax, 2);\
  290. fxch %st(2);\
  291. FLD 0 * SIZE + BOFFSET(BB, %eax, 4);\
  292. subl $-8 * SIZE, %eax
  293. /*
  294. A hint of scheduling is received from following URL
  295. http://www.netlib.org/atlas/atlas-comm/msg00260.html
  296. */
  297. PROLOGUE
  298. subl $ARGS, %esp # Generate Stack Frame
  299. pushl %ebp
  300. pushl %edi
  301. pushl %esi
  302. pushl %ebx
  303. PROFCODE
  304. #if defined(TRMMKERNEL) && !defined(LEFT)
  305. movl OFFSET, %eax
  306. negl %eax
  307. movl %eax, KK
  308. #endif
  309. movl STACK_LDC, LDC
  310. sall $ZBASE_SHIFT, LDC
  311. subl $(AOFFSET - 16 * SIZE), STACK_A
  312. subl $(BOFFSET - 16 * SIZE), STACK_B
  313. movl M, %eax
  314. testl %eax, %eax
  315. jle .L999
  316. movl N, %eax
  317. testl %eax, %eax
  318. jle .L999
  319. movl K, %eax
  320. testl %eax, %eax
  321. jle .L999
  322. movl N, %eax
  323. sarl $1, %eax
  324. movl %eax, J
  325. je .L20
  326. ALIGN_3
  327. .L11:
  328. #if defined(TRMMKERNEL) && defined(LEFT)
  329. movl OFFSET, %eax
  330. movl %eax, KK
  331. #endif
  332. movl STACK_A, A
  333. movl STACK_B, B
  334. movl C, %edi
  335. movl K, BX
  336. sall $ZBASE_SHIFT + 1, BX
  337. addl B, BX
  338. movl M, %eax
  339. movl %eax, I
  340. ALIGN_3
  341. .L14:
  342. prefetchnta -16 * SIZE + BOFFSET(BX)
  343. prefetchnta -8 * SIZE + BOFFSET(BX)
  344. subl $-16 * SIZE, BX
  345. movl STACK_B, B
  346. #if !defined(TRMMKERNEL) || \
  347. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  348. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  349. #else
  350. movl KK, %eax
  351. leal (, %eax, SIZE), %eax
  352. leal (A, %eax, 2), A
  353. leal (B, %eax, 4), B
  354. #endif
  355. fldz
  356. fldz
  357. fldz
  358. fldz
  359. FLD 0 * SIZE + AOFFSET(A)
  360. FLD -8 * SIZE + AOFFSET(A)
  361. FLD -16 * SIZE + AOFFSET(A)
  362. FLD -16 * SIZE + BOFFSET(B)
  363. #ifdef HAVE_3DNOW
  364. prefetchw 1 * SIZE(%edi)
  365. prefetchw 2 * SIZE(%edi, LDC)
  366. #elif defined(HAVE_SSE)
  367. prefetcht0 1 * SIZE(%edi)
  368. prefetcht0 2 * SIZE(%edi, LDC)
  369. #endif
  370. #ifndef TRMMKERNEL
  371. movl K, %eax
  372. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  373. movl K, %eax
  374. subl KK, %eax
  375. movl %eax, KKK
  376. #else
  377. movl KK, %eax
  378. #ifdef LEFT
  379. addl $1, %eax
  380. #else
  381. addl $2, %eax
  382. #endif
  383. movl %eax, KKK
  384. #endif
  385. andl $-8, %eax
  386. leal (, %eax, SIZE), %eax
  387. leal (A, %eax, 2), A
  388. leal 16 * SIZE(B, %eax, 4), BB
  389. leal (B, %eax, 4), B
  390. negl %eax
  391. NOBRANCH
  392. je .L16
  393. ALIGN_4
  394. .L15:
  395. KERNEL
  396. jge .L16
  397. KERNEL
  398. jge .L16
  399. KERNEL
  400. jge .L16
  401. KERNEL
  402. jl .L15
  403. ALIGN_4
  404. .L16:
  405. #ifndef TRMMKERNEL
  406. movl K, %eax
  407. #else
  408. movl KKK, %eax
  409. #endif
  410. and $7, %eax
  411. je .L19
  412. ALIGN_4
  413. .L17:
  414. fmul %st(1), %st
  415. ADD1 %st, %st(4)
  416. FLD -15 * SIZE + BOFFSET(B)
  417. fmul %st(1), %st
  418. ADD2 %st, %st(5)
  419. FLD -14 * SIZE + BOFFSET(B)
  420. fmul %st(1), %st
  421. ADD1 %st, %st(6)
  422. FMUL -13 * SIZE + BOFFSET(B)
  423. ADD2 %st, %st(6)
  424. FLD -15 * SIZE + AOFFSET(A)
  425. FLD -15 * SIZE + BOFFSET(B)
  426. fmul %st(1), %st
  427. ADD3 %st, %st(4)
  428. FLD -16 * SIZE + BOFFSET(B)
  429. fmul %st(1), %st
  430. ADD4 %st, %st(5)
  431. FLD -13 * SIZE + BOFFSET(B)
  432. fmul %st(1), %st
  433. ADD3 %st, %st(6)
  434. FMUL -14 * SIZE + BOFFSET(B)
  435. ADD4 %st, %st(6)
  436. FLD -14 * SIZE + AOFFSET(A)
  437. FLD -12 * SIZE + BOFFSET(B)
  438. addl $2 * SIZE,A
  439. addl $4 * SIZE,B
  440. decl %eax
  441. jne .L17
  442. ALIGN_4
  443. .L19:
  444. ffreep %st(0)
  445. ffreep %st(0)
  446. ffreep %st(0)
  447. ffreep %st(0)
  448. FLD ALPHA_R
  449. fmul %st(1), %st
  450. FLD ALPHA_I
  451. fmul %st(3), %st
  452. fsubrp %st, %st(1)
  453. fxch %st(2)
  454. FMUL ALPHA_R
  455. fxch %st(1)
  456. FMUL ALPHA_I
  457. faddp %st, %st(1)
  458. #ifndef TRMMKERNEL
  459. FADD 1 * SIZE(%edi)
  460. FST 1 * SIZE(%edi)
  461. FADD 0 * SIZE(%edi)
  462. FST 0 * SIZE(%edi)
  463. #else
  464. FST 1 * SIZE(%edi)
  465. FST 0 * SIZE(%edi)
  466. #endif
  467. FLD ALPHA_R
  468. fmul %st(1), %st
  469. FLD ALPHA_I
  470. fmul %st(3), %st
  471. fsubrp %st, %st(1)
  472. fxch %st(2)
  473. FMUL ALPHA_R
  474. fxch %st(1)
  475. FMUL ALPHA_I
  476. faddp %st, %st(1)
  477. #ifndef TRMMKERNEL
  478. FADD 1 * SIZE(%edi,LDC)
  479. FST 1 * SIZE(%edi,LDC)
  480. FADD 0 * SIZE(%edi,LDC)
  481. FST 0 * SIZE(%edi,LDC)
  482. #else
  483. FST 1 * SIZE(%edi,LDC)
  484. FST 0 * SIZE(%edi,LDC)
  485. #endif
  486. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  487. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  488. movl K, %eax
  489. subl KKK, %eax
  490. leal (,%eax, SIZE), %eax
  491. leal (A, %eax, 2), A
  492. leal (B, %eax, 4), B
  493. #endif
  494. #if defined(TRMMKERNEL) && defined(LEFT)
  495. addl $1, KK
  496. #endif
  497. addl $2 * SIZE, %edi
  498. decl I
  499. jne .L14
  500. #if defined(TRMMKERNEL) && !defined(LEFT)
  501. addl $2, KK
  502. #endif
  503. leal (, LDC, 2), %eax
  504. addl %eax, C
  505. movl B, STACK_B
  506. decl J
  507. jne .L11
  508. ALIGN_4
  509. .L20:
  510. movl N, %eax
  511. andl $1, %eax
  512. je .L999
  513. ALIGN_3
  514. #if defined(TRMMKERNEL) && defined(LEFT)
  515. movl OFFSET, %eax
  516. movl %eax, KK
  517. #endif
  518. movl STACK_A, A
  519. movl STACK_B, B
  520. movl C, %edi
  521. movl M, %eax
  522. movl %eax, I
  523. ALIGN_3
  524. .L24:
  525. movl STACK_B, B
  526. #if !defined(TRMMKERNEL) || \
  527. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  528. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  529. #else
  530. movl KK, %eax
  531. leal (, %eax, SIZE), %eax
  532. leal (A, %eax, 2), A
  533. leal (B, %eax, 2), B
  534. #endif
  535. fldz
  536. fldz
  537. fldz
  538. fldz
  539. FLD -16 * SIZE + AOFFSET(A)
  540. FLD -16 * SIZE + BOFFSET(B)
  541. prefetchw 1 * SIZE(%edi)
  542. #ifndef TRMMKERNEL
  543. movl K, %eax
  544. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  545. movl K, %eax
  546. subl KK, %eax
  547. movl %eax, KKK
  548. #else
  549. movl KK, %eax
  550. #ifdef LEFT
  551. addl $1, %eax
  552. #else
  553. addl $1, %eax
  554. #endif
  555. movl %eax, KKK
  556. #endif
  557. sarl $2, %eax
  558. je .L26
  559. ALIGN_3
  560. .L25:
  561. fmul %st(1), %st
  562. PADDING
  563. ADD1 %st, %st(2)
  564. FMUL -15 * SIZE + BOFFSET(B)
  565. ADD2 %st, %st(2)
  566. FLD -15 * SIZE + AOFFSET(A)
  567. FLD -16 * SIZE + BOFFSET(B)
  568. fmul %st(1), %st
  569. PADDING
  570. ADD4 %st, %st(4)
  571. FMUL -15 * SIZE + BOFFSET(B)
  572. ADD3 %st, %st(4)
  573. FLD -14 * SIZE + AOFFSET(A)
  574. FLD -14 * SIZE + BOFFSET(B)
  575. fmul %st(1), %st
  576. PADDING
  577. ADD1 %st, %st(2)
  578. FMUL -13 * SIZE + BOFFSET(B)
  579. ADD2 %st, %st(2)
  580. FLD -13 * SIZE + AOFFSET(A)
  581. FLD -14 * SIZE + BOFFSET(B)
  582. fmul %st(1), %st
  583. PADDING
  584. ADD4 %st, %st(4)
  585. FMUL -13 * SIZE + BOFFSET(B)
  586. ADD3 %st, %st(4)
  587. FLD -12 * SIZE + AOFFSET(A)
  588. FLD -12 * SIZE + BOFFSET(B)
  589. fmul %st(1), %st
  590. PADDING
  591. ADD1 %st, %st(2)
  592. FMUL -11 * SIZE + BOFFSET(B)
  593. ADD2 %st, %st(2)
  594. FLD -11 * SIZE + AOFFSET(A)
  595. FLD -12 * SIZE + BOFFSET(B)
  596. fmul %st(1), %st
  597. PADDING
  598. ADD4 %st, %st(4)
  599. FMUL -11 * SIZE + BOFFSET(B)
  600. ADD3 %st, %st(4)
  601. FLD -10 * SIZE + AOFFSET(A)
  602. FLD -10 * SIZE + BOFFSET(B)
  603. fmul %st(1), %st
  604. PADDING
  605. ADD1 %st, %st(2)
  606. FMUL -9 * SIZE + BOFFSET(B)
  607. ADD2 %st, %st(2)
  608. FLD -9 * SIZE + AOFFSET(A)
  609. FLD -10 * SIZE + BOFFSET(B)
  610. fmul %st(1), %st
  611. PADDING
  612. ADD4 %st, %st(4)
  613. FMUL -9 * SIZE + BOFFSET(B)
  614. ADD3 %st, %st(4)
  615. FLD -8 * SIZE + AOFFSET(A)
  616. FLD -8 * SIZE + BOFFSET(B)
  617. addl $8 * SIZE,A
  618. addl $8 * SIZE,B
  619. decl %eax
  620. jne .L25
  621. ALIGN_4
  622. .L26:
  623. #ifndef TRMMKERNEL
  624. movl K, %eax
  625. #else
  626. movl KKK, %eax
  627. #endif
  628. and $3, %eax
  629. je .L29
  630. ALIGN_4
  631. .L27:
  632. fmul %st(1), %st
  633. PADDING
  634. ADD1 %st, %st(2)
  635. FMUL -15 * SIZE + BOFFSET(B)
  636. ADD2 %st, %st(2)
  637. FLD -15 * SIZE + AOFFSET(A)
  638. FLD -16 * SIZE + BOFFSET(B)
  639. fmul %st(1), %st
  640. PADDING
  641. ADD4 %st, %st(4)
  642. FMUL -15 * SIZE + BOFFSET(B)
  643. ADD3 %st, %st(4)
  644. FLD -14 * SIZE + AOFFSET(A)
  645. FLD -14 * SIZE + BOFFSET(B)
  646. addl $2 * SIZE,A
  647. addl $2 * SIZE,B
  648. decl %eax
  649. jne .L27
  650. ALIGN_4
  651. .L29:
  652. ffreep %st(0)
  653. ffreep %st(0)
  654. faddp %st, %st(3)
  655. faddp %st, %st(1)
  656. fxch %st(1)
  657. FLD ALPHA_R
  658. fmul %st(1), %st
  659. FLD ALPHA_I
  660. fmul %st(3), %st
  661. fsubrp %st, %st(1)
  662. fxch %st(2)
  663. FMUL ALPHA_R
  664. fxch %st(1)
  665. FMUL ALPHA_I
  666. faddp %st, %st(1)
  667. #ifndef TRMMKERNEL
  668. FADD 1 * SIZE(%edi)
  669. FST 1 * SIZE(%edi)
  670. FADD 0 * SIZE(%edi)
  671. FST 0 * SIZE(%edi)
  672. #else
  673. FST 1 * SIZE(%edi)
  674. FST 0 * SIZE(%edi)
  675. #endif
  676. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  677. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  678. movl K, %eax
  679. subl KKK, %eax
  680. leal (,%eax, SIZE), %eax
  681. leal (A, %eax, 2), A
  682. leal (B, %eax, 2), B
  683. #endif
  684. #if defined(TRMMKERNEL) && defined(LEFT)
  685. addl $1, KK
  686. #endif
  687. addl $2 * SIZE, %edi
  688. decl I
  689. jne .L24
  690. #if defined(TRMMKERNEL) && !defined(LEFT)
  691. addl $1, KK
  692. #endif
  693. addl LDC, C
  694. movl B, STACK_B
  695. ALIGN_4
  696. .L999:
  697. popl %ebx
  698. popl %esi
  699. popl %edi
  700. popl %ebp
  701. addl $ARGS, %esp
  702. ret
  703. EPILOGUE