You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zscal_atom.S 8.2 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #ifndef WINDOWS_ABI
  41. #define M ARG1
  42. #define X ARG4
  43. #define INCX ARG5
  44. #else
  45. #define M ARG1
  46. #define X ARG2
  47. #define INCX ARG3
  48. #endif
  49. #define XX %r10
  50. #define I %rax
  51. #include "l1param.h"
  52. PROLOGUE
  53. PROFCODE
  54. #ifdef WINDOWS_ABI
  55. movaps %xmm3, %xmm0
  56. movsd 40(%rsp), %xmm1
  57. movq 48(%rsp), X
  58. movq 56(%rsp), INCX
  59. #endif
  60. SAVEREGISTERS
  61. salq $ZBASE_SHIFT, INCX
  62. testq M, M
  63. jle .L999
  64. pxor %xmm15, %xmm15
  65. comisd %xmm0, %xmm15
  66. jne .L30 # Alpha_r != ZERO
  67. jp .L30
  68. comisd %xmm1, %xmm15
  69. jne .L30 # Alpha_i != ZERO
  70. /* Alpha == ZERO */
  71. cmpq $2 * SIZE, INCX
  72. jne .L20
  73. movq M, I
  74. sarq $2, I
  75. jle .L12
  76. ALIGN_4
  77. .L11:
  78. movsd %xmm1, 0 * SIZE(X)
  79. movsd %xmm1, 1 * SIZE(X)
  80. movsd %xmm1, 2 * SIZE(X)
  81. movsd %xmm1, 3 * SIZE(X)
  82. movsd %xmm1, 4 * SIZE(X)
  83. movsd %xmm1, 5 * SIZE(X)
  84. movsd %xmm1, 6 * SIZE(X)
  85. movsd %xmm1, 7 * SIZE(X)
  86. addq $8 * SIZE, X
  87. decq I
  88. jg .L11
  89. ALIGN_4
  90. .L12:
  91. testq $2, M
  92. je .L14
  93. movsd %xmm1, 0 * SIZE(X)
  94. movsd %xmm1, 1 * SIZE(X)
  95. movsd %xmm1, 2 * SIZE(X)
  96. movsd %xmm1, 3 * SIZE(X)
  97. addq $4 * SIZE, X
  98. ALIGN_3
  99. .L14:
  100. testq $1, M
  101. je .L999
  102. movsd %xmm1, 0 * SIZE(X)
  103. movsd %xmm1, 1 * SIZE(X)
  104. addq $2 * SIZE, X
  105. jmp .L999
  106. ALIGN_4
  107. .L20:
  108. movq M, I # rcx = n
  109. sarq $2, I
  110. jle .L22
  111. ALIGN_4
  112. .L21:
  113. movsd %xmm1, 0 * SIZE(X)
  114. movsd %xmm1, 1 * SIZE(X)
  115. addq INCX, X
  116. movsd %xmm1, 0 * SIZE(X)
  117. movsd %xmm1, 1 * SIZE(X)
  118. addq INCX, X
  119. movsd %xmm1, 0 * SIZE(X)
  120. movsd %xmm1, 1 * SIZE(X)
  121. addq INCX, X
  122. movsd %xmm1, 0 * SIZE(X)
  123. movsd %xmm1, 1 * SIZE(X)
  124. addq INCX, X
  125. decq I
  126. jg .L21
  127. ALIGN_4
  128. .L22:
  129. testq $2, M
  130. je .L23
  131. movsd %xmm1, 0 * SIZE(X)
  132. movsd %xmm1, 1 * SIZE(X)
  133. addq INCX, X
  134. movsd %xmm1, 0 * SIZE(X)
  135. movsd %xmm1, 1 * SIZE(X)
  136. addq INCX, X
  137. ALIGN_3
  138. .L23:
  139. testq $1, M
  140. je .L999
  141. movsd %xmm1, 0 * SIZE(X)
  142. movsd %xmm1, 1 * SIZE(X)
  143. jmp .L999
  144. ALIGN_4
  145. /* Alpha != ZERO */
  146. .L30:
  147. movq X, XX
  148. movq M, I
  149. sarq $2, I
  150. jle .L35
  151. movsd 0 * SIZE(X), %xmm2
  152. movsd 1 * SIZE(X), %xmm3
  153. addq INCX, X
  154. movsd 0 * SIZE(X), %xmm6
  155. movsd 1 * SIZE(X), %xmm7
  156. addq INCX, X
  157. movaps %xmm2, %xmm4
  158. movsd 0 * SIZE(X), %xmm8
  159. mulsd %xmm0, %xmm2
  160. movaps %xmm3, %xmm5
  161. movsd 1 * SIZE(X), %xmm9
  162. mulsd %xmm1, %xmm5
  163. addq INCX, X
  164. mulsd %xmm0, %xmm3
  165. mulsd %xmm1, %xmm4
  166. subsd %xmm5, %xmm2
  167. movsd 0 * SIZE(X), %xmm10
  168. addsd %xmm4, %xmm3
  169. movsd 1 * SIZE(X), %xmm11
  170. movaps %xmm6, %xmm4
  171. mulsd %xmm0, %xmm6
  172. addq INCX, X
  173. movaps %xmm7, %xmm5
  174. mulsd %xmm1, %xmm5
  175. mulsd %xmm0, %xmm7
  176. mulsd %xmm1, %xmm4
  177. decq I
  178. jle .L32
  179. ALIGN_4
  180. .L31:
  181. #ifdef PREFETCHW
  182. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(X)
  183. #endif
  184. subsd %xmm5, %xmm6
  185. movsd %xmm2, 0 * SIZE(XX)
  186. addsd %xmm4, %xmm7
  187. movsd %xmm3, 1 * SIZE(XX)
  188. movaps %xmm8, %xmm4
  189. movsd 0 * SIZE(X), %xmm2
  190. mulsd %xmm0, %xmm8
  191. addq INCX, XX
  192. movaps %xmm9, %xmm5
  193. movsd 1 * SIZE(X), %xmm3
  194. mulsd %xmm1, %xmm5
  195. addq INCX, X
  196. mulsd %xmm0, %xmm9
  197. mulsd %xmm1, %xmm4
  198. subsd %xmm5, %xmm8
  199. movsd %xmm6, 0 * SIZE(XX)
  200. addsd %xmm4, %xmm9
  201. movsd %xmm7, 1 * SIZE(XX)
  202. movaps %xmm10, %xmm4
  203. movsd 0 * SIZE(X), %xmm6
  204. mulsd %xmm0, %xmm10
  205. addq INCX, XX
  206. movaps %xmm11, %xmm5
  207. movsd 1 * SIZE(X), %xmm7
  208. mulsd %xmm1, %xmm5
  209. addq INCX, X
  210. mulsd %xmm0, %xmm11
  211. mulsd %xmm1, %xmm4
  212. subsd %xmm5, %xmm10
  213. movsd %xmm8, 0 * SIZE(XX)
  214. addsd %xmm4, %xmm11
  215. movsd %xmm9, 1 * SIZE(XX)
  216. movaps %xmm2, %xmm4
  217. movsd 0 * SIZE(X), %xmm8
  218. mulsd %xmm0, %xmm2
  219. addq INCX, XX
  220. movaps %xmm3, %xmm5
  221. movsd 1 * SIZE(X), %xmm9
  222. mulsd %xmm1, %xmm5
  223. addq INCX, X
  224. mulsd %xmm0, %xmm3
  225. mulsd %xmm1, %xmm4
  226. subsd %xmm5, %xmm2
  227. movsd %xmm10, 0 * SIZE(XX)
  228. addsd %xmm4, %xmm3
  229. movsd %xmm11, 1 * SIZE(XX)
  230. movaps %xmm6, %xmm4
  231. movsd 0 * SIZE(X), %xmm10
  232. mulsd %xmm0, %xmm6
  233. addq INCX, XX
  234. movaps %xmm7, %xmm5
  235. movsd 1 * SIZE(X), %xmm11
  236. mulsd %xmm1, %xmm5
  237. addq INCX, X
  238. mulsd %xmm0, %xmm7
  239. mulsd %xmm1, %xmm4
  240. decq I
  241. jg .L31
  242. ALIGN_4
  243. .L32:
  244. subsd %xmm5, %xmm6
  245. movsd %xmm2, 0 * SIZE(XX)
  246. addsd %xmm4, %xmm7
  247. movsd %xmm3, 1 * SIZE(XX)
  248. movaps %xmm8, %xmm4
  249. mulsd %xmm0, %xmm8
  250. addq INCX, XX
  251. movaps %xmm9, %xmm5
  252. mulsd %xmm1, %xmm5
  253. mulsd %xmm0, %xmm9
  254. mulsd %xmm1, %xmm4
  255. subsd %xmm5, %xmm8
  256. movsd %xmm6, 0 * SIZE(XX)
  257. addsd %xmm4, %xmm9
  258. movsd %xmm7, 1 * SIZE(XX)
  259. movaps %xmm10, %xmm4
  260. mulsd %xmm0, %xmm10
  261. addq INCX, XX
  262. movaps %xmm11, %xmm5
  263. mulsd %xmm1, %xmm5
  264. mulsd %xmm0, %xmm11
  265. mulsd %xmm1, %xmm4
  266. subsd %xmm5, %xmm10
  267. movsd %xmm8, 0 * SIZE(XX)
  268. addsd %xmm4, %xmm11
  269. movsd %xmm9, 1 * SIZE(XX)
  270. addq INCX, XX
  271. movsd %xmm10, 0 * SIZE(XX)
  272. movsd %xmm11, 1 * SIZE(XX)
  273. addq INCX, XX
  274. ALIGN_3
  275. .L35:
  276. testq $2, M
  277. je .L37
  278. movsd 0 * SIZE(X), %xmm2
  279. movsd 1 * SIZE(X), %xmm3
  280. addq INCX, X
  281. movaps %xmm2, %xmm4
  282. movsd 0 * SIZE(X), %xmm6
  283. mulsd %xmm0, %xmm2
  284. movaps %xmm3, %xmm5
  285. movsd 1 * SIZE(X), %xmm7
  286. mulsd %xmm1, %xmm5
  287. addq INCX, X
  288. mulsd %xmm0, %xmm3
  289. mulsd %xmm1, %xmm4
  290. subsd %xmm5, %xmm2
  291. addsd %xmm4, %xmm3
  292. movaps %xmm6, %xmm4
  293. mulsd %xmm0, %xmm6
  294. movaps %xmm7, %xmm5
  295. mulsd %xmm1, %xmm5
  296. mulsd %xmm0, %xmm7
  297. mulsd %xmm1, %xmm4
  298. subsd %xmm5, %xmm6
  299. movsd %xmm2, 0 * SIZE(XX)
  300. addsd %xmm4, %xmm7
  301. movsd %xmm3, 1 * SIZE(XX)
  302. addq INCX, XX
  303. movsd %xmm6, 0 * SIZE(XX)
  304. movsd %xmm7, 1 * SIZE(XX)
  305. addq INCX, XX
  306. ALIGN_3
  307. .L37:
  308. testq $1, M
  309. je .L999
  310. movsd 0 * SIZE(X), %xmm2
  311. movsd 1 * SIZE(X), %xmm3
  312. movaps %xmm2, %xmm4
  313. mulsd %xmm0, %xmm2
  314. movaps %xmm3, %xmm5
  315. mulsd %xmm1, %xmm5
  316. mulsd %xmm0, %xmm3
  317. mulsd %xmm1, %xmm4
  318. subsd %xmm5, %xmm2
  319. addsd %xmm4, %xmm3
  320. movsd %xmm2, 0 * SIZE(XX)
  321. movsd %xmm3, 1 * SIZE(XX)
  322. ALIGN_3
  323. .L999:
  324. xorq %rax, %rax
  325. RESTOREREGISTERS
  326. ret
  327. EPILOGUE