You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemv_t_atom.S 9.3 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #ifdef ATOM
  41. #define PREFETCH prefetchnta
  42. #define PREFETCHW prefetcht0
  43. #define PREFETCHSIZE (8 * 6)
  44. #endif
  45. #define STACKSIZE 16
  46. #define M 4 + STACKSIZE(%esp)
  47. #define N 8 + STACKSIZE(%esp)
  48. #define ALPHA_R 16 + STACKSIZE(%esp)
  49. #define ALPHA_I 24 + STACKSIZE(%esp)
  50. #define A 32 + STACKSIZE(%esp)
  51. #define STACK_LDA 36 + STACKSIZE(%esp)
  52. #define STACK_X 40 + STACKSIZE(%esp)
  53. #define STACK_INCX 44 + STACKSIZE(%esp)
  54. #define Y 48 + STACKSIZE(%esp)
  55. #define STACK_INCY 52 + STACKSIZE(%esp)
  56. #define BUFFER 56 + STACKSIZE(%esp)
  57. #define I %eax
  58. #define J %ebx
  59. #define INCX J
  60. #define INCY %ecx
  61. #define A1 %esi
  62. #define X %edx
  63. #define Y1 %edi
  64. #define LDA %ebp
  65. #if !defined(CONJ) && !defined(XCONJ)
  66. #define ADD1 addsd
  67. #define ADD2 addsd
  68. #define ADD3 subsd
  69. #define ADD4 addsd
  70. #endif
  71. #if defined(CONJ) && !defined(XCONJ)
  72. #define ADD1 addsd
  73. #define ADD2 addsd
  74. #define ADD3 addsd
  75. #define ADD4 subsd
  76. #endif
  77. #if !defined(CONJ) && defined(XCONJ)
  78. #define ADD1 addsd
  79. #define ADD2 subsd
  80. #define ADD3 addsd
  81. #define ADD4 addsd
  82. #endif
  83. #if defined(CONJ) && defined(XCONJ)
  84. #define ADD1 addsd
  85. #define ADD2 subsd
  86. #define ADD3 subsd
  87. #define ADD4 subsd
  88. #endif
  89. PROLOGUE
  90. pushl %ebp
  91. pushl %edi
  92. pushl %esi
  93. pushl %ebx
  94. PROFCODE
  95. movl STACK_LDA, LDA
  96. movl STACK_X, X
  97. movl STACK_INCX, INCX
  98. movl STACK_INCY, INCY
  99. sall $ZBASE_SHIFT, INCX
  100. sall $ZBASE_SHIFT, INCY
  101. sall $ZBASE_SHIFT, LDA
  102. subl $-16 * SIZE, A
  103. cmpl $0, N
  104. jle .L999
  105. cmpl $0, M
  106. jle .L999
  107. movl BUFFER, Y1
  108. movl M, I
  109. sarl $2, I
  110. jle .L05
  111. ALIGN_4
  112. .L02:
  113. movsd 0 * SIZE(X), %xmm0
  114. movhpd 1 * SIZE(X), %xmm0
  115. addl INCX, X
  116. movsd 0 * SIZE(X), %xmm1
  117. movhpd 1 * SIZE(X), %xmm1
  118. addl INCX, X
  119. movsd 0 * SIZE(X), %xmm2
  120. movhpd 1 * SIZE(X), %xmm2
  121. addl INCX, X
  122. movsd 0 * SIZE(X), %xmm3
  123. movhpd 1 * SIZE(X), %xmm3
  124. addl INCX, X
  125. movapd %xmm0, 0 * SIZE(Y1)
  126. movapd %xmm1, 2 * SIZE(Y1)
  127. movapd %xmm2, 4 * SIZE(Y1)
  128. movapd %xmm3, 6 * SIZE(Y1)
  129. addl $8 * SIZE, Y1
  130. decl I
  131. jg .L02
  132. ALIGN_4
  133. .L05:
  134. movl M, I
  135. andl $3, I
  136. jle .L10
  137. ALIGN_2
  138. .L06:
  139. movsd 0 * SIZE(X), %xmm0
  140. movhpd 1 * SIZE(X), %xmm0
  141. addl INCX, X
  142. movapd %xmm0, 0 * SIZE(Y1)
  143. addl $2 * SIZE, Y1
  144. decl I
  145. jg .L06
  146. ALIGN_4
  147. .L10:
  148. movl Y, Y1
  149. movl N, J
  150. ALIGN_3
  151. .L11:
  152. movl BUFFER, X
  153. addl $16 * SIZE, X
  154. movl A, A1
  155. addl LDA, A
  156. xorps %xmm0, %xmm0
  157. xorps %xmm1, %xmm1
  158. movsd -16 * SIZE(X), %xmm2
  159. movsd -15 * SIZE(X), %xmm3
  160. movl M, I
  161. sarl $2, I
  162. jle .L15
  163. movsd -16 * SIZE(A1), %xmm4
  164. movsd -15 * SIZE(A1), %xmm5
  165. movapd %xmm4, %xmm6
  166. mulsd %xmm2, %xmm4
  167. mulsd %xmm3, %xmm6
  168. decl I
  169. jle .L13
  170. ALIGN_4
  171. .L12:
  172. #ifdef PREFETCH
  173. PREFETCH PREFETCHSIZE * SIZE(A1)
  174. #endif
  175. movapd %xmm5, %xmm7
  176. mulsd %xmm3, %xmm5
  177. movsd -13 * SIZE(X), %xmm3
  178. ADD1 %xmm4, %xmm0
  179. movsd -14 * SIZE(A1), %xmm4
  180. mulsd %xmm2, %xmm7
  181. movsd -14 * SIZE(X), %xmm2
  182. ADD2 %xmm6, %xmm1
  183. movapd %xmm4, %xmm6
  184. mulsd %xmm2, %xmm4
  185. ADD3 %xmm5, %xmm0
  186. movsd -13 * SIZE(A1), %xmm5
  187. mulsd %xmm3, %xmm6
  188. ADD4 %xmm7, %xmm1
  189. movapd %xmm5, %xmm7
  190. mulsd %xmm3, %xmm5
  191. movsd -11 * SIZE(X), %xmm3
  192. ADD1 %xmm4, %xmm0
  193. movsd -12 * SIZE(A1), %xmm4
  194. mulsd %xmm2, %xmm7
  195. movsd -12 * SIZE(X), %xmm2
  196. ADD2 %xmm6, %xmm1
  197. movapd %xmm4, %xmm6
  198. mulsd %xmm2, %xmm4
  199. ADD3 %xmm5, %xmm0
  200. movsd -11 * SIZE(A1), %xmm5
  201. mulsd %xmm3, %xmm6
  202. ADD4 %xmm7, %xmm1
  203. movapd %xmm5, %xmm7
  204. mulsd %xmm3, %xmm5
  205. movsd -9 * SIZE(X), %xmm3
  206. ADD1 %xmm4, %xmm0
  207. movsd -10 * SIZE(A1), %xmm4
  208. mulsd %xmm2, %xmm7
  209. movsd -10 * SIZE(X), %xmm2
  210. ADD2 %xmm6, %xmm1
  211. movapd %xmm4, %xmm6
  212. mulsd %xmm2, %xmm4
  213. ADD3 %xmm5, %xmm0
  214. movsd -9 * SIZE(A1), %xmm5
  215. mulsd %xmm3, %xmm6
  216. ADD4 %xmm7, %xmm1
  217. movapd %xmm5, %xmm7
  218. mulsd %xmm3, %xmm5
  219. movsd -7 * SIZE(X), %xmm3
  220. ADD1 %xmm4, %xmm0
  221. movsd -8 * SIZE(A1), %xmm4
  222. mulsd %xmm2, %xmm7
  223. movsd -8 * SIZE(X), %xmm2
  224. ADD2 %xmm6, %xmm1
  225. movapd %xmm4, %xmm6
  226. mulsd %xmm2, %xmm4
  227. ADD3 %xmm5, %xmm0
  228. movsd -7 * SIZE(A1), %xmm5
  229. mulsd %xmm3, %xmm6
  230. ADD4 %xmm7, %xmm1
  231. addl $8 * SIZE, A1
  232. addl $8 * SIZE, X
  233. decl I
  234. jg .L12
  235. ALIGN_4
  236. .L13:
  237. movapd %xmm5, %xmm7
  238. mulsd %xmm3, %xmm5
  239. movsd -13 * SIZE(X), %xmm3
  240. ADD1 %xmm4, %xmm0
  241. movsd -14 * SIZE(A1), %xmm4
  242. mulsd %xmm2, %xmm7
  243. movsd -14 * SIZE(X), %xmm2
  244. ADD2 %xmm6, %xmm1
  245. movapd %xmm4, %xmm6
  246. mulsd %xmm2, %xmm4
  247. ADD3 %xmm5, %xmm0
  248. movsd -13 * SIZE(A1), %xmm5
  249. mulsd %xmm3, %xmm6
  250. ADD4 %xmm7, %xmm1
  251. movapd %xmm5, %xmm7
  252. mulsd %xmm3, %xmm5
  253. movsd -11 * SIZE(X), %xmm3
  254. ADD1 %xmm4, %xmm0
  255. movsd -12 * SIZE(A1), %xmm4
  256. mulsd %xmm2, %xmm7
  257. movsd -12 * SIZE(X), %xmm2
  258. ADD2 %xmm6, %xmm1
  259. movapd %xmm4, %xmm6
  260. mulsd %xmm2, %xmm4
  261. ADD3 %xmm5, %xmm0
  262. movsd -11 * SIZE(A1), %xmm5
  263. mulsd %xmm3, %xmm6
  264. ADD4 %xmm7, %xmm1
  265. movapd %xmm5, %xmm7
  266. mulsd %xmm3, %xmm5
  267. movsd -9 * SIZE(X), %xmm3
  268. ADD1 %xmm4, %xmm0
  269. movsd -10 * SIZE(A1), %xmm4
  270. mulsd %xmm2, %xmm7
  271. movsd -10 * SIZE(X), %xmm2
  272. ADD2 %xmm6, %xmm1
  273. movapd %xmm4, %xmm6
  274. mulsd %xmm2, %xmm4
  275. ADD3 %xmm5, %xmm0
  276. movsd -9 * SIZE(A1), %xmm5
  277. mulsd %xmm3, %xmm6
  278. ADD4 %xmm7, %xmm1
  279. movapd %xmm5, %xmm7
  280. mulsd %xmm3, %xmm5
  281. movsd -7 * SIZE(X), %xmm3
  282. ADD1 %xmm4, %xmm0
  283. mulsd %xmm2, %xmm7
  284. movsd -8 * SIZE(X), %xmm2
  285. ADD2 %xmm6, %xmm1
  286. ADD3 %xmm5, %xmm0
  287. ADD4 %xmm7, %xmm1
  288. addl $8 * SIZE, A1
  289. addl $8 * SIZE, X
  290. ALIGN_4
  291. .L15:
  292. testl $2, M
  293. jle .L17
  294. movsd -16 * SIZE(A1), %xmm4
  295. movsd -15 * SIZE(A1), %xmm5
  296. movapd %xmm4, %xmm6
  297. mulsd %xmm2, %xmm4
  298. mulsd %xmm3, %xmm6
  299. movapd %xmm5, %xmm7
  300. mulsd %xmm3, %xmm5
  301. movsd -13 * SIZE(X), %xmm3
  302. ADD1 %xmm4, %xmm0
  303. movsd -14 * SIZE(A1), %xmm4
  304. mulsd %xmm2, %xmm7
  305. movsd -14 * SIZE(X), %xmm2
  306. ADD2 %xmm6, %xmm1
  307. movapd %xmm4, %xmm6
  308. mulsd %xmm2, %xmm4
  309. ADD3 %xmm5, %xmm0
  310. movsd -13 * SIZE(A1), %xmm5
  311. mulsd %xmm3, %xmm6
  312. ADD4 %xmm7, %xmm1
  313. movapd %xmm5, %xmm7
  314. mulsd %xmm3, %xmm5
  315. movsd -11 * SIZE(X), %xmm3
  316. ADD1 %xmm4, %xmm0
  317. mulsd %xmm2, %xmm7
  318. movsd -12 * SIZE(X), %xmm2
  319. ADD2 %xmm6, %xmm1
  320. ADD3 %xmm5, %xmm0
  321. ADD4 %xmm7, %xmm1
  322. addl $4 * SIZE, A1
  323. ALIGN_4
  324. .L17:
  325. testl $1, M
  326. jle .L18
  327. movsd -16 * SIZE(A1), %xmm4
  328. movsd -15 * SIZE(A1), %xmm5
  329. movapd %xmm4, %xmm6
  330. mulsd %xmm2, %xmm4
  331. mulsd %xmm3, %xmm6
  332. movapd %xmm5, %xmm7
  333. mulsd %xmm3, %xmm5
  334. ADD1 %xmm4, %xmm0
  335. mulsd %xmm2, %xmm7
  336. ADD2 %xmm6, %xmm1
  337. ADD3 %xmm5, %xmm0
  338. ADD4 %xmm7, %xmm1
  339. ALIGN_4
  340. .L18:
  341. movsd 0 * SIZE(Y1), %xmm4
  342. movapd %xmm0, %xmm2
  343. mulsd ALPHA_R, %xmm0
  344. movsd 1 * SIZE(Y1), %xmm5
  345. movapd %xmm1, %xmm3
  346. mulsd ALPHA_R, %xmm1
  347. mulsd ALPHA_I, %xmm2
  348. mulsd ALPHA_I, %xmm3
  349. addsd %xmm2, %xmm1
  350. subsd %xmm3, %xmm0
  351. addsd %xmm4, %xmm0
  352. addsd %xmm5, %xmm1
  353. movlpd %xmm0, 0 * SIZE(Y1)
  354. movlpd %xmm1, 1 * SIZE(Y1)
  355. addl INCY, Y1
  356. decl J
  357. jg .L11
  358. ALIGN_4
  359. .L999:
  360. popl %ebx
  361. popl %esi
  362. popl %edi
  363. popl %ebp
  364. ret
  365. EPILOGUE