You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

gemv_t_sse2.S 12 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #ifdef PENTIUM4
  41. #define PREFETCH prefetcht0
  42. #define PREFETCHW prefetcht0
  43. #define PREFETCHSIZE (8 * 2)
  44. #endif
  45. #if defined(CORE2) || defined(PENRYN) || defined(DUNNINGTON) || defined(NEHALEM) || defined(SANDYBRIDGE)
  46. #define PREFETCH prefetcht0
  47. #define PREFETCHW prefetcht0
  48. #define PREFETCHSIZE (8 * 7)
  49. #endif
  50. #ifdef OPTERON
  51. #define PREFETCH prefetchnta
  52. #define PREFETCHW prefetchw
  53. #define PREFETCHSIZE (8 * 3)
  54. #define movsd movlps
  55. #endif
  56. #ifdef BARCELONA
  57. #define PREFETCH prefetchnta
  58. #define PREFETCHW prefetchw
  59. #define PREFETCHSIZE (8 * 5)
  60. #endif
  61. #ifdef ATOM
  62. #define PREFETCH prefetch
  63. #define PREFETCHW prefetcht0
  64. #define PREFETCHSIZE (8 * 6)
  65. #endif
  66. #ifdef NANO
  67. #define PREFETCH prefetcht0
  68. #define PREFETCHSIZE (8 * 4)
  69. #endif
  70. #define STACKSIZE 16
  71. #define ARGS 20
  72. #define M 4 + STACKSIZE+ARGS(%esp)
  73. #define N 8 + STACKSIZE+ARGS(%esp)
  74. #define ALPHA 16 + STACKSIZE+ARGS(%esp)
  75. #define A 24 + STACKSIZE+ARGS(%esp)
  76. #define STACK_LDA 28 + STACKSIZE+ARGS(%esp)
  77. #define STACK_X 32 + STACKSIZE+ARGS(%esp)
  78. #define STACK_INCX 36 + STACKSIZE+ARGS(%esp)
  79. #define Y 40 + STACKSIZE+ARGS(%esp)
  80. #define STACK_INCY 44 + STACKSIZE+ARGS(%esp)
  81. #define BUFFER 48 + STACKSIZE+ARGS(%esp)
  82. #define MMM 0+ARGS(%esp)
  83. #define AA 4+ARGS(%esp)
  84. #define XX 8+ARGS(%esp)
  85. #define I %eax
  86. #define J %ebx
  87. #define INCX J
  88. #define INCY %ecx
  89. #define A1 %esi
  90. #define X %edx
  91. #define Y1 %edi
  92. #define LDA %ebp
  93. PROLOGUE
  94. subl $ARGS,%esp
  95. pushl %ebp
  96. pushl %edi
  97. pushl %esi
  98. pushl %ebx
  99. PROFCODE
  100. movl STACK_X, X
  101. movl X,XX
  102. movl A,J
  103. movl J,AA # backup A
  104. movl M,J
  105. movl J,MMM # mov M to MMM
  106. .L0t:
  107. xorl J,J
  108. addl $1,J
  109. sall $21,J # J=2^21*sizeof(double)=buffer size(16MB)
  110. subl $4, J # Don't use last 4 double in the buffer.
  111. subl J,MMM # MMM=MMM-J
  112. movl J,M
  113. jge .L00t
  114. ALIGN_4
  115. movl MMM,%eax
  116. addl J,%eax
  117. jle .L999x
  118. movl %eax,M
  119. .L00t:
  120. movl XX,%eax
  121. movl %eax, X
  122. movl AA,%eax
  123. movl %eax,A # mov AA to A
  124. movl STACK_LDA, LDA
  125. movl STACK_INCX, INCX
  126. movl STACK_INCY, INCY
  127. leal (,INCX, SIZE), INCX
  128. leal (,INCY, SIZE), INCY
  129. leal (,LDA, SIZE), LDA
  130. subl $-16 * SIZE, A
  131. cmpl $0, N
  132. jle .L999
  133. cmpl $0, M
  134. jle .L999
  135. movl BUFFER, Y1
  136. movl M, I
  137. sarl $3, I
  138. jle .L05
  139. ALIGN_4
  140. .L02:
  141. movsd (X), %xmm0
  142. addl INCX, X
  143. movhpd (X), %xmm0
  144. addl INCX, X
  145. movsd (X), %xmm1
  146. addl INCX, X
  147. movhpd (X), %xmm1
  148. addl INCX, X
  149. movsd (X), %xmm2
  150. addl INCX, X
  151. movhpd (X), %xmm2
  152. addl INCX, X
  153. movsd (X), %xmm3
  154. addl INCX, X
  155. movhpd (X), %xmm3
  156. addl INCX, X
  157. movapd %xmm0, 0 * SIZE(Y1)
  158. movapd %xmm1, 2 * SIZE(Y1)
  159. movapd %xmm2, 4 * SIZE(Y1)
  160. movapd %xmm3, 6 * SIZE(Y1)
  161. addl $8 * SIZE, Y1
  162. decl I
  163. jg .L02
  164. ALIGN_4
  165. .L05:
  166. movl M, I
  167. andl $7, I
  168. jle .L10
  169. ALIGN_2
  170. .L06:
  171. movsd (X), %xmm0
  172. addl INCX, X
  173. movsd %xmm0, 0 * SIZE(Y1)
  174. addl $SIZE, Y1
  175. decl I
  176. jg .L06
  177. ALIGN_4
  178. .L10:
  179. movl Y, Y1
  180. movl N, J
  181. sarl $1, J
  182. jle .L20
  183. ALIGN_3
  184. .L11:
  185. movl BUFFER, X
  186. addl $16 * SIZE, X
  187. movl A, A1
  188. leal (A1, LDA, 2), %eax
  189. movl %eax, A
  190. xorps %xmm0, %xmm0
  191. xorps %xmm1, %xmm1
  192. movapd -16 * SIZE(X), %xmm2
  193. movapd -14 * SIZE(X), %xmm3
  194. movl M, I
  195. sarl $3, I
  196. jle .L15
  197. movsd -16 * SIZE(A1), %xmm4
  198. movhpd -15 * SIZE(A1), %xmm4
  199. movsd -16 * SIZE(A1, LDA), %xmm5
  200. movhpd -15 * SIZE(A1, LDA), %xmm5
  201. movsd -14 * SIZE(A1), %xmm6
  202. movhpd -13 * SIZE(A1), %xmm6
  203. movsd -14 * SIZE(A1, LDA), %xmm7
  204. movhpd -13 * SIZE(A1, LDA), %xmm7
  205. decl I
  206. jle .L13
  207. ALIGN_4
  208. .L12:
  209. #ifdef PREFETCH
  210. PREFETCH PREFETCHSIZE * SIZE(A1)
  211. #endif
  212. mulpd %xmm2, %xmm4
  213. addpd %xmm4, %xmm0
  214. movsd -12 * SIZE(A1), %xmm4
  215. movhpd -11 * SIZE(A1), %xmm4
  216. mulpd %xmm2, %xmm5
  217. movapd -12 * SIZE(X), %xmm2
  218. addpd %xmm5, %xmm1
  219. movsd -12 * SIZE(A1, LDA), %xmm5
  220. movhpd -11 * SIZE(A1, LDA), %xmm5
  221. mulpd %xmm3, %xmm6
  222. addpd %xmm6, %xmm0
  223. movsd -10 * SIZE(A1), %xmm6
  224. movhpd -9 * SIZE(A1), %xmm6
  225. mulpd %xmm3, %xmm7
  226. movapd -10 * SIZE(X), %xmm3
  227. addpd %xmm7, %xmm1
  228. movsd -10 * SIZE(A1, LDA), %xmm7
  229. movhpd -9 * SIZE(A1, LDA), %xmm7
  230. #ifdef PREFETCH
  231. PREFETCH PREFETCHSIZE * SIZE(A1, LDA)
  232. #endif
  233. mulpd %xmm2, %xmm4
  234. addpd %xmm4, %xmm0
  235. movsd -8 * SIZE(A1), %xmm4
  236. movhpd -7 * SIZE(A1), %xmm4
  237. mulpd %xmm2, %xmm5
  238. movapd -8 * SIZE(X), %xmm2
  239. addpd %xmm5, %xmm1
  240. movsd -8 * SIZE(A1, LDA), %xmm5
  241. movhpd -7 * SIZE(A1, LDA), %xmm5
  242. mulpd %xmm3, %xmm6
  243. addpd %xmm6, %xmm0
  244. movsd -6 * SIZE(A1), %xmm6
  245. movhpd -5 * SIZE(A1), %xmm6
  246. mulpd %xmm3, %xmm7
  247. movapd -6 * SIZE(X), %xmm3
  248. addpd %xmm7, %xmm1
  249. movsd -6 * SIZE(A1, LDA), %xmm7
  250. movhpd -5 * SIZE(A1, LDA), %xmm7
  251. addl $8 * SIZE, A1
  252. addl $8 * SIZE, X
  253. decl I
  254. jg .L12
  255. ALIGN_4
  256. .L13:
  257. mulpd %xmm2, %xmm4
  258. addpd %xmm4, %xmm0
  259. movsd -12 * SIZE(A1), %xmm4
  260. movhpd -11 * SIZE(A1), %xmm4
  261. mulpd %xmm2, %xmm5
  262. movapd -12 * SIZE(X), %xmm2
  263. addpd %xmm5, %xmm1
  264. movsd -12 * SIZE(A1, LDA), %xmm5
  265. movhpd -11 * SIZE(A1, LDA), %xmm5
  266. mulpd %xmm3, %xmm6
  267. addpd %xmm6, %xmm0
  268. movsd -10 * SIZE(A1), %xmm6
  269. movhpd -9 * SIZE(A1), %xmm6
  270. mulpd %xmm3, %xmm7
  271. movapd -10 * SIZE(X), %xmm3
  272. addpd %xmm7, %xmm1
  273. movsd -10 * SIZE(A1, LDA), %xmm7
  274. movhpd -9 * SIZE(A1, LDA), %xmm7
  275. mulpd %xmm2, %xmm4
  276. addpd %xmm4, %xmm0
  277. mulpd %xmm2, %xmm5
  278. movapd -8 * SIZE(X), %xmm2
  279. addpd %xmm5, %xmm1
  280. mulpd %xmm3, %xmm6
  281. addpd %xmm6, %xmm0
  282. mulpd %xmm3, %xmm7
  283. movapd -6 * SIZE(X), %xmm3
  284. addpd %xmm7, %xmm1
  285. addl $8 * SIZE, A1
  286. addl $8 * SIZE, X
  287. ALIGN_4
  288. .L15:
  289. testl $4, M
  290. jle .L16
  291. movsd -16 * SIZE(A1), %xmm4
  292. movhpd -15 * SIZE(A1), %xmm4
  293. movsd -16 * SIZE(A1, LDA), %xmm5
  294. movhpd -15 * SIZE(A1, LDA), %xmm5
  295. movsd -14 * SIZE(A1), %xmm6
  296. movhpd -13 * SIZE(A1), %xmm6
  297. movsd -14 * SIZE(A1, LDA), %xmm7
  298. movhpd -13 * SIZE(A1, LDA), %xmm7
  299. mulpd %xmm2, %xmm4
  300. addpd %xmm4, %xmm0
  301. mulpd %xmm2, %xmm5
  302. movapd -12 * SIZE(X), %xmm2
  303. addpd %xmm5, %xmm1
  304. mulpd %xmm3, %xmm6
  305. addpd %xmm6, %xmm0
  306. mulpd %xmm3, %xmm7
  307. movapd -10 * SIZE(X), %xmm3
  308. addpd %xmm7, %xmm1
  309. addl $4 * SIZE, A1
  310. addl $4 * SIZE, X
  311. ALIGN_4
  312. .L16:
  313. testl $2, M
  314. jle .L17
  315. movsd -16 * SIZE(A1), %xmm4
  316. movhpd -15 * SIZE(A1), %xmm4
  317. movsd -16 * SIZE(A1, LDA), %xmm5
  318. movhpd -15 * SIZE(A1, LDA), %xmm5
  319. mulpd %xmm2, %xmm4
  320. addpd %xmm4, %xmm0
  321. mulpd %xmm2, %xmm5
  322. addpd %xmm5, %xmm1
  323. movapd %xmm3, %xmm2
  324. addl $2 * SIZE, A1
  325. ALIGN_4
  326. .L17:
  327. testl $1, M
  328. jle .L18
  329. movsd -16 * SIZE(A1), %xmm4
  330. mulsd %xmm2, %xmm4
  331. addsd %xmm4, %xmm0
  332. movsd -16 * SIZE(A1, LDA), %xmm5
  333. mulsd %xmm2, %xmm5
  334. addsd %xmm5, %xmm1
  335. ALIGN_4
  336. .L18:
  337. #ifdef HAVE_SSE3
  338. haddpd %xmm1, %xmm0
  339. #else
  340. movapd %xmm0, %xmm2
  341. unpcklpd %xmm1, %xmm0
  342. unpckhpd %xmm1, %xmm2
  343. addpd %xmm2, %xmm0
  344. #endif
  345. #ifdef HAVE_SSE3
  346. movddup ALPHA, %xmm7
  347. #else
  348. movsd ALPHA, %xmm7
  349. unpcklpd %xmm7, %xmm7
  350. #endif
  351. mulpd %xmm7, %xmm0
  352. movsd (Y1), %xmm4
  353. movhpd (Y1, INCY), %xmm4
  354. addpd %xmm4, %xmm0
  355. movlpd %xmm0, (Y1)
  356. movhpd %xmm0, (Y1, INCY)
  357. leal (Y1, INCY, 2), Y1
  358. decl J
  359. jg .L11
  360. ALIGN_4
  361. .L20:
  362. testl $1, N
  363. jle .L999
  364. movl BUFFER, X
  365. addl $16 * SIZE, X
  366. movl A, A1
  367. leal (A1, LDA, 2), %eax
  368. movl %eax, A
  369. xorps %xmm0, %xmm0
  370. xorps %xmm1, %xmm1
  371. movapd -16 * SIZE(X), %xmm2
  372. movapd -14 * SIZE(X), %xmm3
  373. movl M, I
  374. sarl $3, I
  375. jle .L25
  376. movsd -16 * SIZE(A1), %xmm4
  377. movhpd -15 * SIZE(A1), %xmm4
  378. movsd -14 * SIZE(A1), %xmm6
  379. movhpd -13 * SIZE(A1), %xmm6
  380. decl I
  381. jle .L23
  382. ALIGN_4
  383. .L22:
  384. #ifdef PREFETCH
  385. PREFETCH PREFETCHSIZE * SIZE(A1)
  386. #endif
  387. mulpd %xmm2, %xmm4
  388. movapd -12 * SIZE(X), %xmm2
  389. addpd %xmm4, %xmm0
  390. movsd -12 * SIZE(A1), %xmm4
  391. movhpd -11 * SIZE(A1), %xmm4
  392. mulpd %xmm3, %xmm6
  393. movapd -10 * SIZE(X), %xmm3
  394. addpd %xmm6, %xmm0
  395. movsd -10 * SIZE(A1), %xmm6
  396. movhpd -9 * SIZE(A1), %xmm6
  397. mulpd %xmm2, %xmm4
  398. movapd -8 * SIZE(X), %xmm2
  399. addpd %xmm4, %xmm0
  400. movsd -8 * SIZE(A1), %xmm4
  401. movhpd -7 * SIZE(A1), %xmm4
  402. mulpd %xmm3, %xmm6
  403. movapd -6 * SIZE(X), %xmm3
  404. addpd %xmm6, %xmm0
  405. movsd -6 * SIZE(A1), %xmm6
  406. movhpd -5 * SIZE(A1), %xmm6
  407. addl $8 * SIZE, A1
  408. addl $8 * SIZE, X
  409. decl I
  410. jg .L22
  411. ALIGN_4
  412. .L23:
  413. mulpd %xmm2, %xmm4
  414. movapd -12 * SIZE(X), %xmm2
  415. addpd %xmm4, %xmm0
  416. movsd -12 * SIZE(A1), %xmm4
  417. movhpd -11 * SIZE(A1), %xmm4
  418. mulpd %xmm3, %xmm6
  419. movapd -10 * SIZE(X), %xmm3
  420. addpd %xmm6, %xmm0
  421. movsd -10 * SIZE(A1), %xmm6
  422. movhpd -9 * SIZE(A1), %xmm6
  423. mulpd %xmm2, %xmm4
  424. movapd -8 * SIZE(X), %xmm2
  425. addpd %xmm4, %xmm0
  426. mulpd %xmm3, %xmm6
  427. movapd -6 * SIZE(X), %xmm3
  428. addpd %xmm6, %xmm0
  429. addl $8 * SIZE, A1
  430. addl $8 * SIZE, X
  431. ALIGN_4
  432. .L25:
  433. testl $4, M
  434. jle .L26
  435. movsd -16 * SIZE(A1), %xmm4
  436. movhpd -15 * SIZE(A1), %xmm4
  437. movsd -14 * SIZE(A1), %xmm6
  438. movhpd -13 * SIZE(A1), %xmm6
  439. mulpd %xmm2, %xmm4
  440. movapd -12 * SIZE(X), %xmm2
  441. addpd %xmm4, %xmm0
  442. mulpd %xmm3, %xmm6
  443. movapd -10 * SIZE(X), %xmm3
  444. addpd %xmm6, %xmm0
  445. addl $4 * SIZE, A1
  446. addl $4 * SIZE, X
  447. ALIGN_4
  448. .L26:
  449. testl $2, M
  450. jle .L27
  451. movsd -16 * SIZE(A1), %xmm4
  452. movhpd -15 * SIZE(A1), %xmm4
  453. mulpd %xmm2, %xmm4
  454. addpd %xmm4, %xmm0
  455. movapd %xmm3, %xmm2
  456. addl $2 * SIZE, A1
  457. ALIGN_4
  458. .L27:
  459. testl $1, M
  460. jle .L28
  461. movsd -16 * SIZE(A1), %xmm4
  462. mulsd %xmm2, %xmm4
  463. addsd %xmm4, %xmm0
  464. ALIGN_4
  465. .L28:
  466. #ifdef HAVE_SSE3
  467. haddpd %xmm1, %xmm0
  468. #else
  469. movapd %xmm0, %xmm2
  470. unpcklpd %xmm1, %xmm0
  471. unpckhpd %xmm1, %xmm2
  472. addsd %xmm2, %xmm0
  473. #endif
  474. movsd ALPHA, %xmm7
  475. mulpd %xmm7, %xmm0
  476. addsd (Y1), %xmm0
  477. movlpd %xmm0, (Y1)
  478. ALIGN_4
  479. .L999:
  480. movl M,J
  481. leal (,J,SIZE),%eax
  482. addl %eax,AA
  483. movl STACK_INCX,INCX
  484. imull INCX,%eax
  485. addl %eax,XX
  486. jmp .L0t
  487. ALIGN_4
  488. .L999x:
  489. popl %ebx
  490. popl %esi
  491. popl %edi
  492. popl %ebp
  493. addl $ARGS,%esp
  494. ret
  495. EPILOGUE