You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemv_t_sse.S 11 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #ifdef movsd
  41. #undef movsd
  42. #endif
  43. #ifdef PENTIUM3
  44. #ifdef HAVE_SSE
  45. #define PREFETCH prefetcht0
  46. #define PREFETCHW prefetcht0
  47. #define PREFETCHSIZE (16 * 2)
  48. #endif
  49. #define movsd movlps
  50. #endif
  51. #ifdef PENTIUM4
  52. #define PREFETCH prefetcht0
  53. #define PREFETCHW prefetcht0
  54. #define PREFETCHSIZE (16 * 2)
  55. #endif
  56. #if defined(CORE2) || defined(PENRYN) || defined(DUNNINGTON) || defined(NEHALEM) || defined(SANDYBRIDGE)
  57. #define PREFETCH prefetcht0
  58. #define PREFETCHW prefetcht0
  59. #define PREFETCHSIZE (16 * 7)
  60. #endif
  61. #ifdef OPTERON
  62. #define PREFETCH prefetchnta
  63. #define PREFETCHW prefetchw
  64. #define PREFETCHSIZE (16 * 3)
  65. #define movsd movlps
  66. #endif
  67. #if defined(BARCELONA) || defined(BULLDOZER)
  68. #define PREFETCH prefetchnta
  69. #define PREFETCHW prefetchw
  70. #define PREFETCHSIZE (16 * 5)
  71. #endif
  72. #ifdef ATOM
  73. #define PREFETCH prefetchnta
  74. #define PREFETCHW prefetcht0
  75. #define PREFETCHSIZE (16 * 6)
  76. #endif
  77. #ifdef NANO
  78. #define PREFETCH prefetcht0
  79. #define PREFETCHSIZE (16 * 4)
  80. #endif
  81. #define STACKSIZE 16
  82. #define ARGS 20
  83. #define M 4 + STACKSIZE+ARGS(%esp)
  84. #define N 8 + STACKSIZE+ARGS(%esp)
  85. #define ALPHA_R 16 + STACKSIZE+ARGS(%esp)
  86. #define ALPHA_I 20 + STACKSIZE+ARGS(%esp)
  87. #define A 24 + STACKSIZE+ARGS(%esp)
  88. #define STACK_LDA 28 + STACKSIZE+ARGS(%esp)
  89. #define STACK_X 32 + STACKSIZE+ARGS(%esp)
  90. #define STACK_INCX 36 + STACKSIZE+ARGS(%esp)
  91. #define Y 40 + STACKSIZE+ARGS(%esp)
  92. #define STACK_INCY 44 + STACKSIZE+ARGS(%esp)
  93. #define BUFFER 48 + STACKSIZE+ARGS(%esp)
  94. #define MMM 0+ARGS(%esp)
  95. #define XX 4+ARGS(%esp)
  96. #define AA 8+ARGS(%esp)
  97. #define I %eax
  98. #define J %ebx
  99. #define INCX J
  100. #define INCY %ecx
  101. #define A1 %esi
  102. #define X %edx
  103. #define Y1 %edi
  104. #define LDA %ebp
  105. #undef SUBPS
  106. #ifndef CONJ
  107. #define SUBPS addps
  108. #else
  109. #define SUBPS subps
  110. #endif
  111. PROLOGUE
  112. subl $ARGS,%esp
  113. pushl %ebp
  114. pushl %edi
  115. pushl %esi
  116. pushl %ebx
  117. PROFCODE
  118. movl STACK_X, X
  119. movl X,XX
  120. movl A,J
  121. movl J,AA #backup A
  122. movl M,J
  123. movl J,MMM
  124. .L0t:
  125. xorl J,J
  126. addl $1,J
  127. sall $20,J
  128. subl $8,J
  129. subl J,MMM #MMM-=J
  130. movl J,M
  131. jge .L00t
  132. ALIGN_4
  133. movl MMM,%eax
  134. addl J,%eax
  135. jle .L999x
  136. movl %eax,M
  137. .L00t:
  138. movl AA,%eax
  139. movl %eax,A
  140. movl XX,%eax
  141. movl %eax,X
  142. movl STACK_LDA,LDA
  143. movl STACK_INCX, INCX
  144. movl STACK_INCY, INCY
  145. sall $ZBASE_SHIFT, INCX
  146. sall $ZBASE_SHIFT, LDA
  147. sall $ZBASE_SHIFT, INCY
  148. subl $-32 * SIZE, A
  149. cmpl $0, N
  150. jle .L999
  151. cmpl $0, M
  152. jle .L999
  153. movl BUFFER, Y1
  154. movl M, I
  155. sarl $2, I
  156. jle .L05
  157. ALIGN_4
  158. .L02:
  159. movsd (X), %xmm0
  160. addl INCX, X
  161. movhps (X), %xmm0
  162. addl INCX, X
  163. movsd (X), %xmm1
  164. addl INCX, X
  165. movhps (X), %xmm1
  166. addl INCX, X
  167. movsd (X), %xmm2
  168. addl INCX, X
  169. movhps (X), %xmm2
  170. addl INCX, X
  171. movsd (X), %xmm3
  172. addl INCX, X
  173. movhps (X), %xmm3
  174. addl INCX, X
  175. movaps %xmm0, 0 * SIZE(Y1)
  176. movaps %xmm1, 4 * SIZE(Y1)
  177. movaps %xmm2, 8 * SIZE(Y1)
  178. movaps %xmm3, 12 * SIZE(Y1)
  179. addl $16 * SIZE, Y1
  180. decl I
  181. jg .L02
  182. ALIGN_4
  183. .L05:
  184. movl M, I
  185. andl $3, I
  186. jle .L10
  187. ALIGN_2
  188. .L06:
  189. movsd (X), %xmm0
  190. addl INCX, X
  191. movlps %xmm0, (Y1)
  192. addl $2 * SIZE, Y1
  193. decl I
  194. jg .L06
  195. ALIGN_4
  196. .L10:
  197. movl Y, Y1
  198. movl N, J
  199. ALIGN_3
  200. .L11:
  201. movl BUFFER, X
  202. addl $32 * SIZE, X
  203. movl A, A1
  204. addl LDA, A
  205. xorps %xmm0, %xmm0
  206. xorps %xmm1, %xmm1
  207. movaps -32 * SIZE(X), %xmm2
  208. movaps -28 * SIZE(X), %xmm3
  209. movl M, I
  210. sarl $3, I
  211. jle .L15
  212. movsd -32 * SIZE(A1), %xmm4
  213. movhps -30 * SIZE(A1), %xmm4
  214. movsd -28 * SIZE(A1), %xmm6
  215. movhps -26 * SIZE(A1), %xmm6
  216. decl I
  217. jle .L13
  218. ALIGN_4
  219. .L12:
  220. #ifdef PREFETCH
  221. PREFETCH PREFETCHSIZE * SIZE(A1)
  222. #endif
  223. #ifdef HAVE_SSE2
  224. pshufd $0xb1, %xmm4, %xmm5
  225. #else
  226. movaps %xmm4, %xmm5
  227. shufps $0xb1, %xmm5, %xmm5
  228. #endif
  229. mulps %xmm2, %xmm4
  230. addps %xmm4, %xmm0
  231. mulps %xmm2, %xmm5
  232. movaps -24 * SIZE(X), %xmm2
  233. SUBPS %xmm5, %xmm1
  234. #ifdef HAVE_SSE2
  235. pshufd $0xb1, %xmm6, %xmm7
  236. #else
  237. movaps %xmm6, %xmm7
  238. shufps $0xb1, %xmm7, %xmm7
  239. #endif
  240. mulps %xmm3, %xmm6
  241. addps %xmm6, %xmm0
  242. mulps %xmm3, %xmm7
  243. movaps -20 * SIZE(X), %xmm3
  244. SUBPS %xmm7, %xmm1
  245. movsd -24 * SIZE(A1), %xmm4
  246. movhps -22 * SIZE(A1), %xmm4
  247. movsd -20 * SIZE(A1), %xmm6
  248. movhps -18 * SIZE(A1), %xmm6
  249. #ifdef HAVE_SSE2
  250. pshufd $0xb1, %xmm4, %xmm5
  251. #else
  252. movaps %xmm4, %xmm5
  253. shufps $0xb1, %xmm5, %xmm5
  254. #endif
  255. mulps %xmm2, %xmm4
  256. addps %xmm4, %xmm0
  257. mulps %xmm2, %xmm5
  258. movaps -16 * SIZE(X), %xmm2
  259. SUBPS %xmm5, %xmm1
  260. #ifdef HAVE_SSE2
  261. pshufd $0xb1, %xmm6, %xmm7
  262. #else
  263. movaps %xmm6, %xmm7
  264. shufps $0xb1, %xmm7, %xmm7
  265. #endif
  266. mulps %xmm3, %xmm6
  267. addps %xmm6, %xmm0
  268. mulps %xmm3, %xmm7
  269. movaps -12 * SIZE(X), %xmm3
  270. SUBPS %xmm7, %xmm1
  271. movsd -16 * SIZE(A1), %xmm4
  272. movhps -14 * SIZE(A1), %xmm4
  273. movsd -12 * SIZE(A1), %xmm6
  274. movhps -10 * SIZE(A1), %xmm6
  275. addl $16 * SIZE, A1
  276. addl $16 * SIZE, X
  277. decl I
  278. jg .L12
  279. ALIGN_4
  280. .L13:
  281. #ifdef HAVE_SSE2
  282. pshufd $0xb1, %xmm4, %xmm5
  283. #else
  284. movaps %xmm4, %xmm5
  285. shufps $0xb1, %xmm5, %xmm5
  286. #endif
  287. mulps %xmm2, %xmm4
  288. addps %xmm4, %xmm0
  289. mulps %xmm2, %xmm5
  290. movaps -24 * SIZE(X), %xmm2
  291. SUBPS %xmm5, %xmm1
  292. #ifdef HAVE_SSE2
  293. pshufd $0xb1, %xmm6, %xmm7
  294. #else
  295. movaps %xmm6, %xmm7
  296. shufps $0xb1, %xmm7, %xmm7
  297. #endif
  298. mulps %xmm3, %xmm6
  299. addps %xmm6, %xmm0
  300. mulps %xmm3, %xmm7
  301. movaps -20 * SIZE(X), %xmm3
  302. SUBPS %xmm7, %xmm1
  303. movsd -24 * SIZE(A1), %xmm4
  304. movhps -22 * SIZE(A1), %xmm4
  305. movsd -20 * SIZE(A1), %xmm6
  306. movhps -18 * SIZE(A1), %xmm6
  307. #ifdef HAVE_SSE2
  308. pshufd $0xb1, %xmm4, %xmm5
  309. #else
  310. movaps %xmm4, %xmm5
  311. shufps $0xb1, %xmm5, %xmm5
  312. #endif
  313. mulps %xmm2, %xmm4
  314. addps %xmm4, %xmm0
  315. mulps %xmm2, %xmm5
  316. movaps -16 * SIZE(X), %xmm2
  317. SUBPS %xmm5, %xmm1
  318. #ifdef HAVE_SSE2
  319. pshufd $0xb1, %xmm6, %xmm7
  320. #else
  321. movaps %xmm6, %xmm7
  322. shufps $0xb1, %xmm7, %xmm7
  323. #endif
  324. mulps %xmm3, %xmm6
  325. addps %xmm6, %xmm0
  326. mulps %xmm3, %xmm7
  327. movaps -12 * SIZE(X), %xmm3
  328. SUBPS %xmm7, %xmm1
  329. addl $16 * SIZE, A1
  330. addl $16 * SIZE, X
  331. ALIGN_4
  332. .L15:
  333. testl $4, M
  334. jle .L17
  335. movsd -32 * SIZE(A1), %xmm4
  336. movhps -30 * SIZE(A1), %xmm4
  337. movsd -28 * SIZE(A1), %xmm6
  338. movhps -26 * SIZE(A1), %xmm6
  339. #ifdef HAVE_SSE2
  340. pshufd $0xb1, %xmm4, %xmm5
  341. #else
  342. movaps %xmm4, %xmm5
  343. shufps $0xb1, %xmm5, %xmm5
  344. #endif
  345. mulps %xmm2, %xmm4
  346. addps %xmm4, %xmm0
  347. mulps %xmm2, %xmm5
  348. movaps -24 * SIZE(X), %xmm2
  349. SUBPS %xmm5, %xmm1
  350. #ifdef HAVE_SSE2
  351. pshufd $0xb1, %xmm6, %xmm7
  352. #else
  353. movaps %xmm6, %xmm7
  354. shufps $0xb1, %xmm7, %xmm7
  355. #endif
  356. mulps %xmm3, %xmm6
  357. addps %xmm6, %xmm0
  358. mulps %xmm3, %xmm7
  359. movaps -20 * SIZE(X), %xmm3
  360. SUBPS %xmm7, %xmm1
  361. addl $8 * SIZE, A1
  362. addl $8 * SIZE, X
  363. ALIGN_4
  364. .L17:
  365. testl $2, M
  366. jle .L18
  367. movsd -32 * SIZE(A1), %xmm4
  368. movhps -30 * SIZE(A1), %xmm4
  369. #ifdef HAVE_SSE2
  370. pshufd $0xb1, %xmm4, %xmm5
  371. #else
  372. movaps %xmm4, %xmm5
  373. shufps $0xb1, %xmm5, %xmm5
  374. #endif
  375. mulps %xmm2, %xmm4
  376. addps %xmm4, %xmm0
  377. mulps %xmm2, %xmm5
  378. SUBPS %xmm5, %xmm1
  379. movaps %xmm3, %xmm2
  380. addl $4 * SIZE, A1
  381. ALIGN_4
  382. .L18:
  383. testl $1, M
  384. jle .L19
  385. #ifdef movsd
  386. xorps %xmm4, %xmm4
  387. #endif
  388. movsd -32 * SIZE(A1), %xmm4
  389. shufps $0x44, %xmm2, %xmm2
  390. #ifdef HAVE_SSE2
  391. pshufd $0xb1, %xmm4, %xmm5
  392. #else
  393. movaps %xmm4, %xmm5
  394. shufps $0xb1, %xmm5, %xmm5
  395. #endif
  396. mulps %xmm2, %xmm4
  397. addps %xmm4, %xmm0
  398. mulps %xmm2, %xmm5
  399. SUBPS %xmm5, %xmm1
  400. ALIGN_4
  401. .L19:
  402. #ifdef HAVE_SSE2
  403. pcmpeqb %xmm5, %xmm5
  404. psllq $63, %xmm5
  405. #else
  406. subl $8, %esp
  407. movl $0x00000000, 0(%esp)
  408. movl $0x80000000, 4(%esp)
  409. movlps (%esp), %xmm5
  410. addl $8, %esp
  411. movlhps %xmm5, %xmm5
  412. #endif
  413. #if (!defined(CONJ) && !defined(XCONJ)) || (defined(CONJ) && defined(XCONJ))
  414. xorps %xmm5, %xmm0
  415. #else
  416. xorps %xmm5, %xmm1
  417. #endif
  418. #ifdef HAVE_SSE3
  419. haddps %xmm1, %xmm0
  420. haddps %xmm0, %xmm0
  421. #else
  422. movaps %xmm0, %xmm2
  423. unpcklps %xmm1, %xmm0
  424. unpckhps %xmm1, %xmm2
  425. addps %xmm2, %xmm0
  426. movhlps %xmm0, %xmm1
  427. addps %xmm1, %xmm0
  428. #endif
  429. #ifdef HAVE_SSE2
  430. pshufd $0xb1, %xmm0, %xmm1
  431. #else
  432. movaps %xmm0, %xmm1
  433. shufps $0xb1, %xmm1, %xmm1
  434. #endif
  435. movsd ALPHA_R, %xmm7
  436. movlhps %xmm7, %xmm7
  437. mulps %xmm7, %xmm0
  438. mulps %xmm7, %xmm1
  439. xorps %xmm5, %xmm0
  440. #ifdef HAVE_SSE3
  441. haddps %xmm1, %xmm0
  442. #else
  443. movaps %xmm0, %xmm2
  444. shufps $0x88, %xmm1, %xmm0
  445. shufps $0xdd, %xmm1, %xmm2
  446. addps %xmm2, %xmm0
  447. #endif
  448. movsd 0 * SIZE(Y1), %xmm4
  449. shufps $0xd8, %xmm0, %xmm0
  450. addps %xmm4, %xmm0
  451. movlps %xmm0, 0 * SIZE(Y1)
  452. addl INCY, Y1
  453. decl J
  454. jg .L11
  455. ALIGN_4
  456. .L999:
  457. movl M,%eax
  458. sall $ZBASE_SHIFT, %eax
  459. addl %eax,AA
  460. movl STACK_INCX,INCX
  461. imull INCX,%eax
  462. addl %eax,XX
  463. jmp .L0t
  464. ALIGN_4
  465. .L999x:
  466. popl %ebx
  467. popl %esi
  468. popl %edi
  469. popl %ebp
  470. addl $ARGS,%esp
  471. ret
  472. EPILOGUE