You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemm_kernel_2x2_sse3.S 29 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define STACK 16
  41. #define ARGS 0
  42. #define STACK_M 4 + STACK + ARGS(%esi)
  43. #define STACK_N 8 + STACK + ARGS(%esi)
  44. #define STACK_K 12 + STACK + ARGS(%esi)
  45. #define STACK_ALPHA_R 16 + STACK + ARGS(%esi)
  46. #define STACK_ALPHA_I 20 + STACK + ARGS(%esi)
  47. #define STACK_A 24 + STACK + ARGS(%esi)
  48. #define STACK_B 28 + STACK + ARGS(%esi)
  49. #define STACK_C 32 + STACK + ARGS(%esi)
  50. #define STACK_LDC 36 + STACK + ARGS(%esi)
  51. #define STACK_OFFT 40 + STACK + ARGS(%esi)
  52. #define POSINV 0(%esp)
  53. #define ALPHA_R 16(%esp)
  54. #define ALPHA_I 32(%esp)
  55. #define K 48(%esp)
  56. #define N 52(%esp)
  57. #define M 56(%esp)
  58. #define A 60(%esp)
  59. #define C 64(%esp)
  60. #define J 68(%esp)
  61. #define OLD_STACK 72(%esp)
  62. #define OFFSET 76(%esp)
  63. #define KK 80(%esp)
  64. #define KKK 84(%esp)
  65. #define BUFFER 128(%esp)
  66. #define B %edi
  67. #define LDC %ebp
  68. #define AA %edx
  69. #define BB %ecx
  70. #ifdef PENTIUM4
  71. #define PREFETCH prefetcht0
  72. #define PREFETCHSIZE 168
  73. #endif
  74. #ifdef PENTIUMM
  75. #define PREFETCH prefetcht0
  76. #define PREFETCHSIZE 168
  77. #endif
  78. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  79. defined(RN) || defined(RT) || defined(CN) || defined(CT)
  80. #define ADDSUB addps
  81. #else
  82. #define ADDSUB subps
  83. #endif
  84. #define KERNEL1(address) \
  85. mulps %xmm0, %xmm2; \
  86. PREFETCH (PREFETCHSIZE + 0) * SIZE + 1 * (address) * SIZE(AA); \
  87. addps %xmm2, %xmm4; \
  88. movshdup 0 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  89. mulps %xmm0, %xmm2; \
  90. ADDSUB %xmm2, %xmm5; \
  91. movsldup 4 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  92. mulps %xmm0, %xmm2; \
  93. addps %xmm2, %xmm6; \
  94. movshdup 4 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  95. mulps %xmm0, %xmm2; \
  96. movaps 4 * SIZE + 1 * (address) * SIZE(AA), %xmm0; \
  97. ADDSUB %xmm2, %xmm7; \
  98. movsldup 8 * SIZE + 2 * (address) * SIZE(BB), %xmm2
  99. #define KERNEL2(address) \
  100. mulps %xmm0, %xmm2; \
  101. addps %xmm2, %xmm4; \
  102. movshdup 8 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  103. mulps %xmm0, %xmm2; \
  104. ADDSUB %xmm2, %xmm5; \
  105. movsldup 12 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  106. mulps %xmm0, %xmm2; \
  107. addps %xmm2, %xmm6; \
  108. movshdup 12 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  109. mulps %xmm0, %xmm2; \
  110. movaps 8 * SIZE + 1 * (address) * SIZE(AA), %xmm0; \
  111. ADDSUB %xmm2, %xmm7; \
  112. movsldup 32 * SIZE + 2 * (address) * SIZE(BB), %xmm2
  113. #define KERNEL3(address) \
  114. mulps %xmm0, %xmm3; \
  115. addps %xmm3, %xmm4; \
  116. movshdup 16 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  117. mulps %xmm0, %xmm3; \
  118. ADDSUB %xmm3, %xmm5; \
  119. movsldup 20 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  120. mulps %xmm0, %xmm3; \
  121. addps %xmm3, %xmm6; \
  122. movshdup 20 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  123. mulps %xmm0, %xmm3; \
  124. movaps 12 * SIZE + 1 * (address) * SIZE(AA), %xmm0; \
  125. ADDSUB %xmm3, %xmm7; \
  126. movsldup 24 * SIZE + 2 * (address) * SIZE(BB), %xmm3
  127. #define KERNEL4(address) \
  128. mulps %xmm0, %xmm3; \
  129. addps %xmm3, %xmm4; \
  130. movshdup 24 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  131. mulps %xmm0, %xmm3; \
  132. ADDSUB %xmm3, %xmm5; \
  133. movsldup 28 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  134. mulps %xmm0, %xmm3; \
  135. addps %xmm3, %xmm6; \
  136. movshdup 28 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  137. mulps %xmm0, %xmm3; \
  138. movaps 32 * SIZE + 1 * (address) * SIZE(AA), %xmm0; \
  139. ADDSUB %xmm3, %xmm7; \
  140. movsldup 48 * SIZE + 2 * (address) * SIZE(BB), %xmm3
  141. #define KERNEL5(address) \
  142. mulps %xmm1, %xmm2; \
  143. addps %xmm2, %xmm4; \
  144. movshdup 32 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  145. mulps %xmm1, %xmm2; \
  146. ADDSUB %xmm2, %xmm5; \
  147. movsldup 36 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  148. mulps %xmm1, %xmm2; \
  149. addps %xmm2, %xmm6; \
  150. movshdup 36 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  151. mulps %xmm1, %xmm2; \
  152. movaps 20 * SIZE + 1 * (address) * SIZE(AA), %xmm1; \
  153. ADDSUB %xmm2, %xmm7; \
  154. movsldup 40 * SIZE + 2 * (address) * SIZE(BB), %xmm2
  155. #define KERNEL6(address) \
  156. mulps %xmm1, %xmm2; \
  157. addps %xmm2, %xmm4; \
  158. movshdup 40 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  159. mulps %xmm1, %xmm2; \
  160. ADDSUB %xmm2, %xmm5; \
  161. movsldup 44 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  162. mulps %xmm1, %xmm2; \
  163. addps %xmm2, %xmm6; \
  164. movshdup 44 * SIZE + 2 * (address) * SIZE(BB), %xmm2; \
  165. mulps %xmm1, %xmm2; \
  166. movaps 24 * SIZE + 1 * (address) * SIZE(AA), %xmm1; \
  167. ADDSUB %xmm2, %xmm7; \
  168. movsldup 64 * SIZE + 2 * (address) * SIZE(BB), %xmm2
  169. #define KERNEL7(address) \
  170. mulps %xmm1, %xmm3; \
  171. addps %xmm3, %xmm4; \
  172. movshdup 48 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  173. mulps %xmm1, %xmm3; \
  174. ADDSUB %xmm3, %xmm5; \
  175. movsldup 52 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  176. mulps %xmm1, %xmm3; \
  177. addps %xmm3, %xmm6; \
  178. movshdup 52 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  179. mulps %xmm1, %xmm3; \
  180. movaps 28 * SIZE + 1 * (address) * SIZE(AA), %xmm1; \
  181. ADDSUB %xmm3, %xmm7; \
  182. movsldup 56 * SIZE + 2 * (address) * SIZE(BB), %xmm3
  183. #define KERNEL8(address) \
  184. mulps %xmm1, %xmm3; \
  185. addps %xmm3, %xmm4; \
  186. movshdup 56 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  187. mulps %xmm1, %xmm3; \
  188. ADDSUB %xmm3, %xmm5; \
  189. movsldup 60 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  190. mulps %xmm1, %xmm3; \
  191. addps %xmm3, %xmm6; \
  192. movshdup 60 * SIZE + 2 * (address) * SIZE(BB), %xmm3; \
  193. mulps %xmm1, %xmm3; \
  194. movaps 48 * SIZE + 1 * (address) * SIZE(AA), %xmm1; \
  195. ADDSUB %xmm3, %xmm7; \
  196. movsldup 80 * SIZE + 2 * (address) * SIZE(BB), %xmm3
  197. PROLOGUE
  198. pushl %ebp
  199. pushl %edi
  200. pushl %esi
  201. pushl %ebx
  202. PROFCODE
  203. movl %esp, %esi # save old stack
  204. subl $128 + LOCAL_BUFFER_SIZE, %esp
  205. andl $-1024, %esp # align stack
  206. STACK_TOUCHING
  207. movl STACK_M, %ebx
  208. movl STACK_N, %eax
  209. movl STACK_K, %ecx
  210. movl STACK_A, %edx
  211. movl %ebx, M
  212. movl %eax, N
  213. movl %ecx, K
  214. movl %edx, A
  215. movl %esi, OLD_STACK
  216. movl STACK_B, %edi
  217. movl STACK_C, %ebx
  218. #ifdef TRMMKERNEL
  219. movss STACK_OFFT, %xmm4
  220. #endif
  221. movss STACK_ALPHA_R, %xmm0
  222. movss STACK_ALPHA_I, %xmm1
  223. pxor %xmm7, %xmm7
  224. cmpeqps %xmm7, %xmm7
  225. pslld $31, %xmm7 # Generate mask
  226. shufps $0, %xmm0, %xmm0
  227. movaps %xmm0, 0 + ALPHA_R
  228. movss %xmm1, 4 + ALPHA_I
  229. movss %xmm1, 12 + ALPHA_I
  230. xorps %xmm7, %xmm1
  231. movss %xmm1, 0 + ALPHA_I
  232. movss %xmm1, 8 + ALPHA_I
  233. movl %ebx, C
  234. movl STACK_LDC, LDC
  235. #ifdef TRMMKERNEL
  236. movss %xmm4, OFFSET
  237. movss %xmm4, KK
  238. #ifndef LEFT
  239. negl KK
  240. #endif
  241. #endif
  242. sall $ZBASE_SHIFT, LDC
  243. movl %eax, J # j = n
  244. sarl $1, J
  245. jle .L100
  246. ALIGN_4
  247. .L01:
  248. #if defined(TRMMKERNEL) && defined(LEFT)
  249. movl OFFSET, %eax
  250. movl %eax, KK
  251. #endif
  252. /* Copying to Sub Buffer */
  253. leal BUFFER, %ecx
  254. movl K, %eax
  255. sarl $2, %eax
  256. jle .L03
  257. ALIGN_4
  258. .L02:
  259. movddup 0 * SIZE(B), %xmm0
  260. movddup 2 * SIZE(B), %xmm1
  261. movddup 4 * SIZE(B), %xmm2
  262. movddup 6 * SIZE(B), %xmm3
  263. movddup 8 * SIZE(B), %xmm4
  264. movddup 10 * SIZE(B), %xmm5
  265. movddup 12 * SIZE(B), %xmm6
  266. movddup 14 * SIZE(B), %xmm7
  267. movaps %xmm0, 0 * SIZE(BB)
  268. movaps %xmm1, 4 * SIZE(BB)
  269. movaps %xmm2, 8 * SIZE(BB)
  270. movaps %xmm3, 12 * SIZE(BB)
  271. movaps %xmm4, 16 * SIZE(BB)
  272. movaps %xmm5, 20 * SIZE(BB)
  273. movaps %xmm6, 24 * SIZE(BB)
  274. movaps %xmm7, 28 * SIZE(BB)
  275. # prefetcht1 128 * SIZE(%ecx)
  276. prefetcht0 112 * SIZE(%edi)
  277. addl $16 * SIZE, B
  278. addl $32 * SIZE, BB
  279. decl %eax
  280. jne .L02
  281. ALIGN_4
  282. .L03:
  283. movl K, %eax
  284. andl $3, %eax
  285. BRANCH
  286. jle .L05
  287. ALIGN_4
  288. .L04:
  289. movddup 0 * SIZE(B), %xmm0
  290. movddup 2 * SIZE(B), %xmm1
  291. movaps %xmm0, 0 * SIZE(BB)
  292. movaps %xmm1, 4 * SIZE(BB)
  293. addl $4 * SIZE, B
  294. addl $8 * SIZE, BB
  295. decl %eax
  296. jne .L04
  297. ALIGN_4
  298. .L05:
  299. movl C, %esi
  300. movl A, %edx
  301. movl M, %ebx
  302. sarl $1, %ebx
  303. jle .L30
  304. ALIGN_4
  305. .L10:
  306. #if !defined(TRMMKERNEL) || \
  307. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  308. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  309. leal BUFFER, BB # boffset1 = boffset
  310. #else
  311. leal BUFFER, BB # boffset1 = boffset
  312. movl KK, %eax
  313. leal (, %eax, 8), %eax
  314. leal (AA, %eax, 2), AA
  315. leal (BB, %eax, 4), BB
  316. #endif
  317. movaps 0 * SIZE(AA), %xmm0
  318. pxor %xmm4, %xmm4
  319. movaps 16 * SIZE(AA), %xmm1
  320. pxor %xmm5, %xmm5
  321. movsldup 0 * SIZE(BB), %xmm2
  322. pxor %xmm6, %xmm6
  323. movsldup 16 * SIZE(BB), %xmm3
  324. pxor %xmm7, %xmm7
  325. prefetchnta 4 * SIZE(%esi)
  326. prefetchnta 4 * SIZE(%esi, LDC)
  327. #ifndef TRMMKERNEL
  328. movl K, %eax
  329. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  330. movl K, %eax
  331. subl KK, %eax
  332. movl %eax, KKK
  333. #else
  334. movl KK, %eax
  335. #ifdef LEFT
  336. addl $2, %eax
  337. #else
  338. addl $2, %eax
  339. #endif
  340. movl %eax, KKK
  341. #endif
  342. #if 1
  343. andl $-8, %eax
  344. sall $4, %eax
  345. je .L15
  346. .L1X:
  347. KERNEL1(32 * 0)
  348. KERNEL2(32 * 0)
  349. KERNEL3(32 * 0)
  350. KERNEL4(32 * 0)
  351. KERNEL5(32 * 0)
  352. KERNEL6(32 * 0)
  353. KERNEL7(32 * 0)
  354. KERNEL8(32 * 0)
  355. cmpl $128 * 1, %eax
  356. jle .L12
  357. KERNEL1(32 * 1)
  358. KERNEL2(32 * 1)
  359. KERNEL3(32 * 1)
  360. KERNEL4(32 * 1)
  361. KERNEL5(32 * 1)
  362. KERNEL6(32 * 1)
  363. KERNEL7(32 * 1)
  364. KERNEL8(32 * 1)
  365. cmpl $128 * 2, %eax
  366. jle .L12
  367. KERNEL1(32 * 2)
  368. KERNEL2(32 * 2)
  369. KERNEL3(32 * 2)
  370. KERNEL4(32 * 2)
  371. KERNEL5(32 * 2)
  372. KERNEL6(32 * 2)
  373. KERNEL7(32 * 2)
  374. KERNEL8(32 * 2)
  375. cmpl $128 * 3, %eax
  376. jle .L12
  377. KERNEL1(32 * 3)
  378. KERNEL2(32 * 3)
  379. KERNEL3(32 * 3)
  380. KERNEL4(32 * 3)
  381. KERNEL5(32 * 3)
  382. KERNEL6(32 * 3)
  383. KERNEL7(32 * 3)
  384. KERNEL8(32 * 3)
  385. cmpl $128 * 4, %eax
  386. jle .L12
  387. KERNEL1(32 * 4)
  388. KERNEL2(32 * 4)
  389. KERNEL3(32 * 4)
  390. KERNEL4(32 * 4)
  391. KERNEL5(32 * 4)
  392. KERNEL6(32 * 4)
  393. KERNEL7(32 * 4)
  394. KERNEL8(32 * 4)
  395. cmpl $128 * 5, %eax
  396. jle .L12
  397. KERNEL1(32 * 5)
  398. KERNEL2(32 * 5)
  399. KERNEL3(32 * 5)
  400. KERNEL4(32 * 5)
  401. KERNEL5(32 * 5)
  402. KERNEL6(32 * 5)
  403. KERNEL7(32 * 5)
  404. KERNEL8(32 * 5)
  405. cmpl $128 * 6, %eax
  406. jle .L12
  407. KERNEL1(32 * 6)
  408. KERNEL2(32 * 6)
  409. KERNEL3(32 * 6)
  410. KERNEL4(32 * 6)
  411. KERNEL5(32 * 6)
  412. KERNEL6(32 * 6)
  413. KERNEL7(32 * 6)
  414. KERNEL8(32 * 6)
  415. cmpl $128 * 7, %eax
  416. jle .L12
  417. KERNEL1(32 * 7)
  418. KERNEL2(32 * 7)
  419. KERNEL3(32 * 7)
  420. KERNEL4(32 * 7)
  421. KERNEL5(32 * 7)
  422. KERNEL6(32 * 7)
  423. KERNEL7(32 * 7)
  424. KERNEL8(32 * 7)
  425. #if 1
  426. cmpl $128 * 8, %eax
  427. jle .L12
  428. KERNEL1(32 * 8)
  429. KERNEL2(32 * 8)
  430. KERNEL3(32 * 8)
  431. KERNEL4(32 * 8)
  432. KERNEL5(32 * 8)
  433. KERNEL6(32 * 8)
  434. KERNEL7(32 * 8)
  435. KERNEL8(32 * 8)
  436. cmpl $128 * 9, %eax
  437. jle .L12
  438. KERNEL1(32 * 9)
  439. KERNEL2(32 * 9)
  440. KERNEL3(32 * 9)
  441. KERNEL4(32 * 9)
  442. KERNEL5(32 * 9)
  443. KERNEL6(32 * 9)
  444. KERNEL7(32 * 9)
  445. KERNEL8(32 * 9)
  446. cmpl $128 * 10, %eax
  447. jle .L12
  448. KERNEL1(32 * 10)
  449. KERNEL2(32 * 10)
  450. KERNEL3(32 * 10)
  451. KERNEL4(32 * 10)
  452. KERNEL5(32 * 10)
  453. KERNEL6(32 * 10)
  454. KERNEL7(32 * 10)
  455. KERNEL8(32 * 10)
  456. cmpl $128 * 11, %eax
  457. jle .L12
  458. KERNEL1(32 * 11)
  459. KERNEL2(32 * 11)
  460. KERNEL3(32 * 11)
  461. KERNEL4(32 * 11)
  462. KERNEL5(32 * 11)
  463. KERNEL6(32 * 11)
  464. KERNEL7(32 * 11)
  465. KERNEL8(32 * 11)
  466. cmpl $128 * 12, %eax
  467. jle .L12
  468. KERNEL1(32 * 12)
  469. KERNEL2(32 * 12)
  470. KERNEL3(32 * 12)
  471. KERNEL4(32 * 12)
  472. KERNEL5(32 * 12)
  473. KERNEL6(32 * 12)
  474. KERNEL7(32 * 12)
  475. KERNEL8(32 * 12)
  476. cmpl $128 * 13, %eax
  477. jle .L12
  478. KERNEL1(32 * 13)
  479. KERNEL2(32 * 13)
  480. KERNEL3(32 * 13)
  481. KERNEL4(32 * 13)
  482. KERNEL5(32 * 13)
  483. KERNEL6(32 * 13)
  484. KERNEL7(32 * 13)
  485. KERNEL8(32 * 13)
  486. cmpl $128 * 14, %eax
  487. jle .L12
  488. KERNEL1(32 * 14)
  489. KERNEL2(32 * 14)
  490. KERNEL3(32 * 14)
  491. KERNEL4(32 * 14)
  492. KERNEL5(32 * 14)
  493. KERNEL6(32 * 14)
  494. KERNEL7(32 * 14)
  495. KERNEL8(32 * 14)
  496. cmpl $128 * 15, %eax
  497. jle .L12
  498. KERNEL1(32 * 15)
  499. KERNEL2(32 * 15)
  500. KERNEL3(32 * 15)
  501. KERNEL4(32 * 15)
  502. KERNEL5(32 * 15)
  503. KERNEL6(32 * 15)
  504. KERNEL7(32 * 15)
  505. KERNEL8(32 * 15)
  506. #else
  507. addl $128 * 4 * SIZE, BB
  508. addl $128 * 2 * SIZE, AA
  509. subl $128 * 8, %eax
  510. jg .L1X
  511. jmp .L15
  512. #endif
  513. .L12:
  514. leal (AA, %eax, 1), AA
  515. leal (BB, %eax, 2), BB
  516. ALIGN_4
  517. #else
  518. sarl $3, %eax
  519. je .L15
  520. ALIGN_4
  521. .L11:
  522. KERNEL1(32 * 7)
  523. KERNEL2(32 * 7)
  524. KERNEL3(32 * 7)
  525. KERNEL4(32 * 7)
  526. KERNEL5(32 * 7)
  527. KERNEL6(32 * 7)
  528. KERNEL7(32 * 7)
  529. KERNEL8(32 * 7)
  530. addl $32 * SIZE, AA
  531. addl $64 * SIZE, BB
  532. decl %eax
  533. jne .L11
  534. ALIGN_4
  535. #endif
  536. .L15:
  537. #ifndef TRMMKERNEL
  538. movl K, %eax
  539. #else
  540. movl KKK, %eax
  541. #endif
  542. movaps ALPHA_R, %xmm1
  543. movaps ALPHA_I, %xmm3
  544. andl $7, %eax # if (k & 1)
  545. BRANCH
  546. je .L14
  547. ALIGN_4
  548. .L13:
  549. mulps %xmm0, %xmm2
  550. addps %xmm2, %xmm4
  551. movshdup 0 * SIZE(BB), %xmm2
  552. mulps %xmm0, %xmm2
  553. ADDSUB %xmm2, %xmm5
  554. movsldup 4 * SIZE(BB), %xmm2
  555. mulps %xmm0, %xmm2
  556. addps %xmm2, %xmm6
  557. movshdup 4 * SIZE(BB), %xmm2
  558. mulps %xmm0, %xmm2
  559. movaps 4 * SIZE(AA), %xmm0
  560. ADDSUB %xmm2, %xmm7
  561. movsldup 8 * SIZE(BB), %xmm2
  562. addl $4 * SIZE, AA
  563. addl $8 * SIZE, BB
  564. decl %eax
  565. jg .L13
  566. ALIGN_4
  567. .L14:
  568. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  569. defined(NR) || defined(NC) || defined(TR) || defined(TC)
  570. shufps $0xb1, %xmm5, %xmm5
  571. shufps $0xb1, %xmm7, %xmm7
  572. addsubps %xmm5, %xmm4
  573. addsubps %xmm7, %xmm6
  574. movaps %xmm4, %xmm5
  575. movaps %xmm6, %xmm7
  576. shufps $0xb1, %xmm4, %xmm4
  577. shufps $0xb1, %xmm6, %xmm6
  578. #else
  579. shufps $0xb1, %xmm4, %xmm4
  580. shufps $0xb1, %xmm6, %xmm6
  581. addsubps %xmm4, %xmm5
  582. addsubps %xmm6, %xmm7
  583. movaps %xmm5, %xmm4
  584. movaps %xmm7, %xmm6
  585. shufps $0xb1, %xmm5, %xmm5
  586. shufps $0xb1, %xmm7, %xmm7
  587. #endif
  588. mulps %xmm1, %xmm5
  589. mulps %xmm3, %xmm4
  590. mulps %xmm1, %xmm7
  591. mulps %xmm3, %xmm6
  592. addps %xmm5, %xmm4
  593. addps %xmm7, %xmm6
  594. #ifndef TRMMKERNEL
  595. shufps $0xe4, %xmm0, %xmm0
  596. movsd 0 * SIZE(%esi), %xmm0
  597. movhps 2 * SIZE(%esi), %xmm0
  598. shufps $0xe4, %xmm2, %xmm2
  599. movsd 0 * SIZE(%esi, LDC), %xmm2
  600. movhps 2 * SIZE(%esi, LDC), %xmm2
  601. addps %xmm0, %xmm4
  602. addps %xmm2, %xmm6
  603. #endif
  604. movsd %xmm4, 0 * SIZE(%esi)
  605. movhps %xmm4, 2 * SIZE(%esi)
  606. movsd %xmm6, 0 * SIZE(%esi, LDC)
  607. movhps %xmm6, 2 * SIZE(%esi, LDC)
  608. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  609. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  610. movl K, %eax
  611. subl KKK, %eax
  612. leal (,%eax, 8), %eax
  613. leal (AA, %eax, 2), AA
  614. leal (BB, %eax, 4), BB
  615. #endif
  616. #if defined(TRMMKERNEL) && defined(LEFT)
  617. addl $2, KK
  618. #endif
  619. addl $4 * SIZE, %esi # coffset += 4
  620. decl %ebx # i --
  621. jg .L10
  622. ALIGN_4
  623. .L30:
  624. movl M, %ebx
  625. andl $1, %ebx
  626. jle .L99
  627. ALIGN_4
  628. .L40:
  629. #if !defined(TRMMKERNEL) || \
  630. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  631. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  632. leal BUFFER, BB # boffset1 = boffset
  633. #else
  634. leal BUFFER, BB # boffset1 = boffset
  635. movl KK, %eax
  636. leal (, %eax, 8), %eax
  637. leal (AA, %eax, 1), AA
  638. leal (BB, %eax, 4), BB
  639. #endif
  640. movddup 0 * SIZE(AA), %xmm0
  641. pxor %xmm4, %xmm4
  642. movddup 8 * SIZE(AA), %xmm1
  643. pxor %xmm5, %xmm5
  644. movsd 0 * SIZE(BB), %xmm2
  645. movsd 16 * SIZE(BB), %xmm3
  646. #ifndef TRMMKERNEL
  647. movl K, %eax
  648. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  649. movl K, %eax
  650. subl KK, %eax
  651. movl %eax, KKK
  652. #else
  653. movl KK, %eax
  654. #ifdef LEFT
  655. addl $1, %eax
  656. #else
  657. addl $2, %eax
  658. #endif
  659. movl %eax, KKK
  660. #endif
  661. sarl $3, %eax
  662. je .L42
  663. ALIGN_4
  664. .L41:
  665. shufps $0x50, %xmm2, %xmm2
  666. mulps %xmm0, %xmm2
  667. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  668. addps %xmm2, %xmm4
  669. movsd 4 * SIZE(BB), %xmm2
  670. shufps $0x50, %xmm2, %xmm2
  671. mulps %xmm0, %xmm2
  672. movddup 2 * SIZE(AA), %xmm0
  673. addps %xmm2, %xmm5
  674. movsd 8 * SIZE(BB), %xmm2
  675. shufps $0x50, %xmm2, %xmm2
  676. mulps %xmm0, %xmm2
  677. addps %xmm2, %xmm4
  678. movsd 12 * SIZE(BB), %xmm2
  679. shufps $0x50, %xmm2, %xmm2
  680. mulps %xmm0, %xmm2
  681. movddup 4 * SIZE(AA), %xmm0
  682. addps %xmm2, %xmm5
  683. movsd 32 * SIZE(BB), %xmm2
  684. shufps $0x50, %xmm3, %xmm3
  685. mulps %xmm0, %xmm3
  686. addps %xmm3, %xmm4
  687. movsd 20 * SIZE(BB), %xmm3
  688. shufps $0x50, %xmm3, %xmm3
  689. mulps %xmm0, %xmm3
  690. movddup 6 * SIZE(AA), %xmm0
  691. addps %xmm3, %xmm5
  692. movsd 24 * SIZE(BB), %xmm3
  693. shufps $0x50, %xmm3, %xmm3
  694. mulps %xmm0, %xmm3
  695. addps %xmm3, %xmm4
  696. movsd 28 * SIZE(BB), %xmm3
  697. shufps $0x50, %xmm3, %xmm3
  698. mulps %xmm0, %xmm3
  699. movddup 16 * SIZE(AA), %xmm0
  700. addps %xmm3, %xmm5
  701. movsd 48 * SIZE(BB), %xmm3
  702. shufps $0x50, %xmm2, %xmm2
  703. mulps %xmm1, %xmm2
  704. addps %xmm2, %xmm4
  705. movsd 36 * SIZE(BB), %xmm2
  706. shufps $0x50, %xmm2, %xmm2
  707. mulps %xmm1, %xmm2
  708. movddup 10 * SIZE(AA), %xmm1
  709. addps %xmm2, %xmm5
  710. movsd 40 * SIZE(BB), %xmm2
  711. shufps $0x50, %xmm2, %xmm2
  712. mulps %xmm1, %xmm2
  713. addps %xmm2, %xmm4
  714. movsd 44 * SIZE(BB), %xmm2
  715. shufps $0x50, %xmm2, %xmm2
  716. mulps %xmm1, %xmm2
  717. movddup 12 * SIZE(AA), %xmm1
  718. addps %xmm2, %xmm5
  719. movsd 64 * SIZE(BB), %xmm2
  720. shufps $0x50, %xmm3, %xmm3
  721. mulps %xmm1, %xmm3
  722. addps %xmm3, %xmm4
  723. movsd 52 * SIZE(BB), %xmm3
  724. shufps $0x50, %xmm3, %xmm3
  725. mulps %xmm1, %xmm3
  726. movddup 14 * SIZE(AA), %xmm1
  727. addps %xmm3, %xmm5
  728. movsd 56 * SIZE(BB), %xmm3
  729. shufps $0x50, %xmm3, %xmm3
  730. mulps %xmm1, %xmm3
  731. addps %xmm3, %xmm4
  732. movsd 60 * SIZE(BB), %xmm3
  733. shufps $0x50, %xmm3, %xmm3
  734. mulps %xmm1, %xmm3
  735. movddup 24 * SIZE(AA), %xmm1
  736. addps %xmm3, %xmm5
  737. movsd 80 * SIZE(BB), %xmm3
  738. addl $16 * SIZE, AA
  739. addl $64 * SIZE, BB
  740. decl %eax
  741. jne .L41
  742. ALIGN_4
  743. .L42:
  744. #ifndef TRMMKERNEL
  745. movl K, %eax
  746. #else
  747. movl KKK, %eax
  748. #endif
  749. movaps ALPHA_R, %xmm1
  750. movaps ALPHA_I, %xmm3
  751. andl $7, %eax # if (k & 1)
  752. BRANCH
  753. je .L44
  754. ALIGN_4
  755. .L43:
  756. shufps $0x50, %xmm2, %xmm2
  757. mulps %xmm0, %xmm2
  758. addps %xmm2, %xmm4
  759. movsd 4 * SIZE(BB), %xmm2
  760. shufps $0x50, %xmm2, %xmm2
  761. mulps %xmm0, %xmm2
  762. movddup 2 * SIZE(AA), %xmm0
  763. addps %xmm2, %xmm5
  764. movsd 8 * SIZE(BB), %xmm2
  765. addl $2 * SIZE, AA
  766. addl $8 * SIZE, BB
  767. decl %eax
  768. jg .L43
  769. ALIGN_4
  770. .L44:
  771. movaps %xmm4, %xmm6
  772. movlhps %xmm5, %xmm4
  773. movhlps %xmm6, %xmm5
  774. #if defined(NR) || defined(NC) || defined(TR) || defined(TC) || \
  775. defined(RR) || defined(RC) || defined(CR) || defined(CC)
  776. cmpeqps %xmm7, %xmm7
  777. pslld $31, %xmm7
  778. xorps %xmm7, %xmm5
  779. #endif
  780. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  781. defined(NR) || defined(NC) || defined(TR) || defined(TC)
  782. shufps $0xb1, %xmm5, %xmm5
  783. addsubps %xmm5, %xmm4
  784. movaps %xmm4, %xmm5
  785. shufps $0xb1, %xmm4, %xmm4
  786. #else
  787. shufps $0xb1, %xmm4, %xmm4
  788. addsubps %xmm4, %xmm5
  789. movaps %xmm5, %xmm4
  790. shufps $0xb1, %xmm5, %xmm5
  791. #endif
  792. mulps %xmm1, %xmm5
  793. mulps %xmm3, %xmm4
  794. addps %xmm5, %xmm4
  795. #ifndef TRMMKERNEL
  796. movsd 0 * SIZE(%esi), %xmm0
  797. movhps 0 * SIZE(%esi, LDC), %xmm0
  798. addps %xmm0, %xmm4
  799. #endif
  800. movsd %xmm4, 0 * SIZE(%esi)
  801. movhps %xmm4, 0 * SIZE(%esi, LDC)
  802. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  803. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  804. movl K, %eax
  805. subl KKK, %eax
  806. leal (,%eax, 8), %eax
  807. leal (AA, %eax, 1), AA
  808. leal (BB, %eax, 4), BB
  809. #endif
  810. #if defined(TRMMKERNEL) && defined(LEFT)
  811. addl $1, KK
  812. #endif
  813. ALIGN_4
  814. .L99:
  815. #if defined(TRMMKERNEL) && !defined(LEFT)
  816. addl $2, KK
  817. #endif
  818. leal (LDC, LDC), %eax
  819. addl %eax, C # c += 2 * ldc
  820. decl J # j --
  821. jg .L01
  822. ALIGN_4
  823. .L100:
  824. movl N, %eax
  825. andl $1, %eax
  826. jle .L999
  827. ALIGN_4
  828. .L101:
  829. #if defined(TRMMKERNEL) && defined(LEFT)
  830. movl OFFSET, %eax
  831. movl %eax, KK
  832. #endif
  833. /* Copying to Sub Buffer */
  834. leal BUFFER, %ecx
  835. movl K, %eax
  836. sarl $3, %eax
  837. jle .L103
  838. ALIGN_4
  839. .L102:
  840. movddup 0 * SIZE(B), %xmm0
  841. movddup 2 * SIZE(B), %xmm1
  842. movddup 4 * SIZE(B), %xmm2
  843. movddup 6 * SIZE(B), %xmm3
  844. movddup 8 * SIZE(B), %xmm4
  845. movddup 10 * SIZE(B), %xmm5
  846. movddup 12 * SIZE(B), %xmm6
  847. movddup 14 * SIZE(B), %xmm7
  848. movaps %xmm0, 0 * SIZE(BB)
  849. movaps %xmm1, 4 * SIZE(BB)
  850. movaps %xmm2, 8 * SIZE(BB)
  851. movaps %xmm3, 12 * SIZE(BB)
  852. movaps %xmm4, 16 * SIZE(BB)
  853. movaps %xmm5, 20 * SIZE(BB)
  854. movaps %xmm6, 24 * SIZE(BB)
  855. movaps %xmm7, 28 * SIZE(BB)
  856. prefetcht0 104 * SIZE(B)
  857. addl $16 * SIZE, B
  858. addl $32 * SIZE, BB
  859. decl %eax
  860. jne .L102
  861. ALIGN_4
  862. .L103:
  863. movl K, %eax
  864. andl $7, %eax
  865. BRANCH
  866. jle .L105
  867. ALIGN_4
  868. .L104:
  869. movddup 0 * SIZE(B), %xmm0
  870. movaps %xmm0, 0 * SIZE(BB)
  871. addl $ 2 * SIZE, %edi
  872. addl $ 4 * SIZE, %ecx
  873. decl %eax
  874. jne .L104
  875. ALIGN_4
  876. .L105:
  877. movl C, %esi
  878. movl A, AA
  879. movl M, %ebx
  880. sarl $1, %ebx
  881. jle .L130
  882. ALIGN_4
  883. .L110:
  884. #if !defined(TRMMKERNEL) || \
  885. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  886. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  887. leal BUFFER, BB # boffset1 = boffset
  888. #else
  889. leal BUFFER, BB # boffset1 = boffset
  890. movl KK, %eax
  891. leal (, %eax, 8), %eax
  892. leal (AA, %eax, 2), AA
  893. leal (BB, %eax, 2), BB
  894. #endif
  895. movaps 0 * SIZE(AA), %xmm0
  896. pxor %xmm4, %xmm4
  897. movaps 16 * SIZE(AA), %xmm1
  898. pxor %xmm5, %xmm5
  899. movsldup 0 * SIZE(BB), %xmm2
  900. pxor %xmm6, %xmm6
  901. movsldup 16 * SIZE(BB), %xmm3
  902. pxor %xmm7, %xmm7
  903. #ifdef PENTIUM4
  904. prefetchnta 4 * SIZE(%esi)
  905. #endif
  906. #ifndef TRMMKERNEL
  907. movl K, %eax
  908. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  909. movl K, %eax
  910. subl KK, %eax
  911. movl %eax, KKK
  912. #else
  913. movl KK, %eax
  914. #ifdef LEFT
  915. addl $2, %eax
  916. #else
  917. addl $1, %eax
  918. #endif
  919. movl %eax, KKK
  920. #endif
  921. sarl $3, %eax
  922. je .L112
  923. ALIGN_4
  924. .L111:
  925. mulps %xmm0, %xmm2
  926. addps %xmm2, %xmm4
  927. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  928. movshdup 0 * SIZE(BB), %xmm2
  929. mulps %xmm0, %xmm2
  930. movaps 4 * SIZE(AA), %xmm0
  931. ADDSUB %xmm2, %xmm5
  932. movsldup 4 * SIZE(BB), %xmm2
  933. mulps %xmm0, %xmm2
  934. addps %xmm2, %xmm4
  935. movshdup 4 * SIZE(BB), %xmm2
  936. mulps %xmm0, %xmm2
  937. movaps 8 * SIZE(AA), %xmm0
  938. ADDSUB %xmm2, %xmm5
  939. movsldup 8 * SIZE(BB), %xmm2
  940. mulps %xmm0, %xmm2
  941. addps %xmm2, %xmm4
  942. movshdup 8 * SIZE(BB), %xmm2
  943. mulps %xmm0, %xmm2
  944. movaps 12 * SIZE(AA), %xmm0
  945. ADDSUB %xmm2, %xmm5
  946. movsldup 12 * SIZE(BB), %xmm2
  947. mulps %xmm0, %xmm2
  948. addps %xmm2, %xmm4
  949. movshdup 12 * SIZE(BB), %xmm2
  950. mulps %xmm0, %xmm2
  951. movaps 32 * SIZE(AA), %xmm0
  952. ADDSUB %xmm2, %xmm5
  953. movsldup 32 * SIZE(BB), %xmm2
  954. mulps %xmm1, %xmm3
  955. addps %xmm3, %xmm4
  956. movshdup 16 * SIZE(BB), %xmm3
  957. mulps %xmm1, %xmm3
  958. movaps 20 * SIZE(AA), %xmm1
  959. ADDSUB %xmm3, %xmm5
  960. movsldup 20 * SIZE(BB), %xmm3
  961. mulps %xmm1, %xmm3
  962. addps %xmm3, %xmm4
  963. movshdup 20 * SIZE(BB), %xmm3
  964. mulps %xmm1, %xmm3
  965. movaps 24 * SIZE(AA), %xmm1
  966. ADDSUB %xmm3, %xmm5
  967. movsldup 24 * SIZE(BB), %xmm3
  968. mulps %xmm1, %xmm3
  969. addps %xmm3, %xmm4
  970. movshdup 24 * SIZE(BB), %xmm3
  971. mulps %xmm1, %xmm3
  972. movaps 28 * SIZE(AA), %xmm1
  973. ADDSUB %xmm3, %xmm5
  974. movsldup 28 * SIZE(BB), %xmm3
  975. mulps %xmm1, %xmm3
  976. addps %xmm3, %xmm4
  977. movshdup 28 * SIZE(BB), %xmm3
  978. mulps %xmm1, %xmm3
  979. movaps 48 * SIZE(AA), %xmm1
  980. ADDSUB %xmm3, %xmm5
  981. movsldup 48 * SIZE(BB), %xmm3
  982. addl $32 * SIZE, AA
  983. addl $32 * SIZE, BB
  984. decl %eax
  985. jne .L111
  986. ALIGN_4
  987. .L112:
  988. #ifndef TRMMKERNEL
  989. movl K, %eax
  990. #else
  991. movl KKK, %eax
  992. #endif
  993. movaps ALPHA_R, %xmm1
  994. movaps ALPHA_I, %xmm3
  995. andl $7, %eax # if (k & 1)
  996. BRANCH
  997. je .L114
  998. ALIGN_4
  999. .L113:
  1000. mulps %xmm0, %xmm2
  1001. addps %xmm2, %xmm4
  1002. movshdup 0 * SIZE(BB), %xmm2
  1003. mulps %xmm0, %xmm2
  1004. movaps 4 * SIZE(AA), %xmm0
  1005. ADDSUB %xmm2, %xmm5
  1006. movsldup 4 * SIZE(BB), %xmm2
  1007. addl $ 4 * SIZE, AA
  1008. addl $ 4 * SIZE, BB
  1009. decl %eax
  1010. jg .L113
  1011. ALIGN_4
  1012. .L114:
  1013. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  1014. defined(NR) || defined(NC) || defined(TR) || defined(TC)
  1015. shufps $0xb1, %xmm5, %xmm5
  1016. addsubps %xmm5, %xmm4
  1017. movaps %xmm4, %xmm5
  1018. shufps $0xb1, %xmm4, %xmm4
  1019. #else
  1020. shufps $0xb1, %xmm4, %xmm4
  1021. addsubps %xmm4, %xmm5
  1022. movaps %xmm5, %xmm4
  1023. shufps $0xb1, %xmm5, %xmm5
  1024. #endif
  1025. mulps %xmm1, %xmm5
  1026. mulps %xmm3, %xmm4
  1027. addps %xmm5, %xmm4
  1028. #ifndef TRMMKERNEL
  1029. movsd 0 * SIZE(%esi), %xmm0
  1030. movhps 2 * SIZE(%esi), %xmm0
  1031. addps %xmm0, %xmm4
  1032. #endif
  1033. movsd %xmm4, 0 * SIZE(%esi)
  1034. movhps %xmm4, 2 * SIZE(%esi)
  1035. #if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  1036. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  1037. movl K, %eax
  1038. subl KKK, %eax
  1039. leal (,%eax, 8), %eax
  1040. leal (AA, %eax, 2), AA
  1041. leal (BB, %eax, 2), BB
  1042. #endif
  1043. #if defined(TRMMKERNEL) && defined(LEFT)
  1044. addl $2, KK
  1045. #endif
  1046. addl $4 * SIZE, %esi # coffset += 4
  1047. decl %ebx # i --
  1048. jg .L110
  1049. ALIGN_4
  1050. .L130:
  1051. movl M, %ebx
  1052. andl $1, %ebx
  1053. jle .L999
  1054. ALIGN_4
  1055. .L140:
  1056. #if !defined(TRMMKERNEL) || \
  1057. (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
  1058. (defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
  1059. leal BUFFER, BB # boffset1 = boffset
  1060. #else
  1061. leal BUFFER, BB # boffset1 = boffset
  1062. movl KK, %eax
  1063. leal (, %eax, 8), %eax
  1064. leal (AA, %eax, 1), AA
  1065. leal (BB, %eax, 2), BB
  1066. #endif
  1067. movddup 0 * SIZE(AA), %xmm0
  1068. pxor %xmm4, %xmm4
  1069. movddup 8 * SIZE(AA), %xmm1
  1070. pxor %xmm5, %xmm5
  1071. movsd 0 * SIZE(BB), %xmm2
  1072. movsd 16 * SIZE(BB), %xmm3
  1073. #ifndef TRMMKERNEL
  1074. movl K, %eax
  1075. #elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
  1076. movl K, %eax
  1077. subl KK, %eax
  1078. movl %eax, KKK
  1079. #else
  1080. movl KK, %eax
  1081. #ifdef LEFT
  1082. addl $1, %eax
  1083. #else
  1084. addl $1, %eax
  1085. #endif
  1086. movl %eax, KKK
  1087. #endif
  1088. sarl $3, %eax
  1089. je .L142
  1090. ALIGN_4
  1091. .L141:
  1092. shufps $0x50, %xmm2, %xmm2
  1093. mulps %xmm0, %xmm2
  1094. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  1095. movddup 2 * SIZE(AA), %xmm0
  1096. addps %xmm2, %xmm4
  1097. movsd 4 * SIZE(BB), %xmm2
  1098. shufps $0x50, %xmm2, %xmm2
  1099. mulps %xmm0, %xmm2
  1100. movddup 4 * SIZE(AA), %xmm0
  1101. addps %xmm2, %xmm5
  1102. movsd 8 * SIZE(BB), %xmm2
  1103. shufps $0x50, %xmm2, %xmm2
  1104. mulps %xmm0, %xmm2
  1105. movddup 6 * SIZE(AA), %xmm0
  1106. addps %xmm2, %xmm4
  1107. movsd 12 * SIZE(BB), %xmm2
  1108. shufps $0x50, %xmm2, %xmm2
  1109. mulps %xmm0, %xmm2
  1110. movddup 16 * SIZE(AA), %xmm0
  1111. addps %xmm2, %xmm5
  1112. movsd 32 * SIZE(BB), %xmm2
  1113. shufps $0x50, %xmm3, %xmm3
  1114. mulps %xmm1, %xmm3
  1115. movddup 10 * SIZE(AA), %xmm1
  1116. addps %xmm3, %xmm4
  1117. movsd 20 * SIZE(BB), %xmm3
  1118. shufps $0x50, %xmm3, %xmm3
  1119. mulps %xmm1, %xmm3
  1120. movddup 12 * SIZE(AA), %xmm1
  1121. addps %xmm3, %xmm5
  1122. movsd 24 * SIZE(BB), %xmm3
  1123. shufps $0x50, %xmm3, %xmm3
  1124. mulps %xmm1, %xmm3
  1125. movddup 14 * SIZE(AA), %xmm1
  1126. addps %xmm3, %xmm4
  1127. movsd 28 * SIZE(BB), %xmm3
  1128. shufps $0x50, %xmm3, %xmm3
  1129. mulps %xmm1, %xmm3
  1130. movddup 24 * SIZE(AA), %xmm1
  1131. addps %xmm3, %xmm5
  1132. movsd 48 * SIZE(BB), %xmm3
  1133. addl $ 16 * SIZE, AA
  1134. addl $ 32 * SIZE, BB
  1135. decl %eax
  1136. jne .L141
  1137. ALIGN_4
  1138. .L142:
  1139. #ifndef TRMMKERNEL
  1140. movl K, %eax
  1141. #else
  1142. movl KKK, %eax
  1143. #endif
  1144. movaps ALPHA_R, %xmm1
  1145. movaps ALPHA_I, %xmm3
  1146. andl $7, %eax # if (k & 1)
  1147. BRANCH
  1148. je .L144
  1149. ALIGN_4
  1150. .L143:
  1151. shufps $0x50, %xmm2, %xmm2
  1152. mulps %xmm0, %xmm2
  1153. movddup 2 * SIZE(AA), %xmm0
  1154. addps %xmm2, %xmm4
  1155. movsd 4 * SIZE(BB), %xmm2
  1156. addl $2 * SIZE, AA
  1157. addl $4 * SIZE, BB
  1158. decl %eax
  1159. jg .L143
  1160. ALIGN_4
  1161. .L144:
  1162. addps %xmm5, %xmm4
  1163. movhlps %xmm4, %xmm5
  1164. #if defined(NR) || defined(NC) || defined(TR) || defined(TC) || \
  1165. defined(RR) || defined(RC) || defined(CR) || defined(CC)
  1166. cmpeqps %xmm7, %xmm7
  1167. pslld $31, %xmm7
  1168. xorps %xmm7, %xmm5
  1169. #endif
  1170. #if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
  1171. defined(NR) || defined(NC) || defined(TR) || defined(TC)
  1172. shufps $0xb1, %xmm5, %xmm5
  1173. addsubps %xmm5, %xmm4
  1174. movaps %xmm4, %xmm5
  1175. shufps $0xb1, %xmm4, %xmm4
  1176. #else
  1177. shufps $0xb1, %xmm4, %xmm4
  1178. addsubps %xmm4, %xmm5
  1179. movaps %xmm5, %xmm4
  1180. shufps $0xb1, %xmm5, %xmm5
  1181. #endif
  1182. mulps %xmm1, %xmm5
  1183. mulps %xmm3, %xmm4
  1184. addps %xmm5, %xmm4
  1185. #ifndef TRMMKERNEL
  1186. movsd 0 * SIZE(%esi), %xmm0
  1187. addps %xmm0, %xmm4
  1188. #endif
  1189. movsd %xmm4, 0 * SIZE(%esi)
  1190. ALIGN_4
  1191. .L999:
  1192. movl OLD_STACK, %esp
  1193. popl %ebx
  1194. popl %esi
  1195. popl %edi
  1196. popl %ebp
  1197. ret
  1198. EPILOGUE