You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dgemv_t_atom.S 14 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #include "l2param.h"
  41. #define PREFETCH prefetchnta
  42. #define PREFETCHW prefetcht0
  43. #define PREFETCH_SIZE (8 * 6)
  44. #ifndef WINDOWS_ABI
  45. #define STACKSIZE 64
  46. #define OLD_INCX 8 + STACKSIZE(%rsp)
  47. #define OLD_Y 16 + STACKSIZE(%rsp)
  48. #define OLD_INCY 24 + STACKSIZE(%rsp)
  49. #define OLD_BUFFER 32 + STACKSIZE(%rsp)
  50. #define M %rdi
  51. #define N %rsi
  52. #define A %rcx
  53. #define LDA %r8
  54. #define X %r9
  55. #define INCX %rdx
  56. #define Y %rbp
  57. #define INCY %r10
  58. #define BUFFER %rbx
  59. #else
  60. #define STACKSIZE 256
  61. #define OLD_A 40 + STACKSIZE(%rsp)
  62. #define OLD_LDA 48 + STACKSIZE(%rsp)
  63. #define OLD_X 56 + STACKSIZE(%rsp)
  64. #define OLD_INCX 64 + STACKSIZE(%rsp)
  65. #define OLD_Y 72 + STACKSIZE(%rsp)
  66. #define OLD_INCY 80 + STACKSIZE(%rsp)
  67. #define OLD_BUFFER 88 + STACKSIZE(%rsp)
  68. #define M %rcx
  69. #define N %rdx
  70. #define A %rdi
  71. #define LDA %r8
  72. #define X %r9
  73. #define INCX %rsi
  74. #define Y %rbp
  75. #define INCY %r10
  76. #define BUFFER %rbx
  77. #endif
  78. #define I %rax
  79. #define J %r11
  80. #define A1 %r12
  81. #define A2 %r13
  82. #define X1 %r14
  83. #define Y1 %r15
  84. #define ALPHA %xmm3
  85. PROLOGUE
  86. PROFCODE
  87. subq $STACKSIZE, %rsp
  88. movq %rbx, 0(%rsp)
  89. movq %rbp, 8(%rsp)
  90. movq %r12, 16(%rsp)
  91. movq %r13, 24(%rsp)
  92. movq %r14, 32(%rsp)
  93. movq %r15, 40(%rsp)
  94. #ifdef WINDOWS_ABI
  95. movq %rdi, 48(%rsp)
  96. movq %rsi, 56(%rsp)
  97. movups %xmm6, 64(%rsp)
  98. movups %xmm7, 80(%rsp)
  99. movups %xmm8, 96(%rsp)
  100. movups %xmm9, 112(%rsp)
  101. movups %xmm10, 128(%rsp)
  102. movups %xmm11, 144(%rsp)
  103. movups %xmm12, 160(%rsp)
  104. movups %xmm13, 176(%rsp)
  105. movups %xmm14, 192(%rsp)
  106. movups %xmm15, 208(%rsp)
  107. movq OLD_A, A
  108. movq OLD_LDA, LDA
  109. movq OLD_X, X
  110. #endif
  111. movq OLD_INCX, INCX
  112. movq OLD_Y, Y
  113. movq OLD_INCY, INCY
  114. movq OLD_BUFFER, BUFFER
  115. leaq (,INCX, SIZE), INCX
  116. leaq (,INCY, SIZE), INCY
  117. leaq (, LDA, SIZE), LDA
  118. #ifndef WINDOWS_ABI
  119. movapd %xmm0, ALPHA
  120. #endif
  121. movq Y, Y1
  122. testq M, M
  123. jle .L999
  124. testq N, N
  125. jle .L999
  126. cmpq $SIZE, INCX
  127. cmoveq X, BUFFER
  128. je .L10
  129. movq BUFFER, X1
  130. movq M, I
  131. sarq $3, I
  132. jle .L05
  133. ALIGN_3
  134. .L02:
  135. movsd (X), %xmm0
  136. addq INCX, X
  137. movsd (X), %xmm1
  138. addq INCX, X
  139. movsd (X), %xmm2
  140. addq INCX, X
  141. movsd (X), %xmm8
  142. addq INCX, X
  143. movsd (X), %xmm4
  144. addq INCX, X
  145. movsd (X), %xmm5
  146. addq INCX, X
  147. movsd (X), %xmm6
  148. addq INCX, X
  149. movsd (X), %xmm7
  150. addq INCX, X
  151. movsd %xmm0, 0 * SIZE(X1)
  152. movsd %xmm1, 1 * SIZE(X1)
  153. movsd %xmm2, 2 * SIZE(X1)
  154. movsd %xmm8, 3 * SIZE(X1)
  155. movsd %xmm4, 4 * SIZE(X1)
  156. movsd %xmm5, 5 * SIZE(X1)
  157. movsd %xmm6, 6 * SIZE(X1)
  158. movsd %xmm7, 7 * SIZE(X1)
  159. addq $8 * SIZE, X1
  160. decq I
  161. jg .L02
  162. ALIGN_3
  163. .L05:
  164. movq M, I
  165. andq $7, I
  166. jle .L10
  167. ALIGN_3
  168. .L06:
  169. movsd (X), %xmm0
  170. addq INCX, X
  171. movsd %xmm0, (X1)
  172. addq $SIZE, X1
  173. decq I
  174. jg .L06
  175. ALIGN_3
  176. .L10:
  177. movq N, J
  178. sarq $1, J
  179. jle .L20
  180. ALIGN_3
  181. .L11:
  182. movq A, A1
  183. leaq (A, LDA, 1), A2
  184. leaq (A, LDA, 2), A
  185. movq BUFFER, X1
  186. xorps %xmm0, %xmm0
  187. xorps %xmm1, %xmm1
  188. PREFETCHW 1 * SIZE(X1)
  189. movq M, I
  190. sarq $3, I
  191. jle .L14
  192. movsd 0 * SIZE(X1), %xmm4
  193. movsd 0 * SIZE(A1), %xmm8
  194. movsd 0 * SIZE(A2), %xmm12
  195. movsd 1 * SIZE(X1), %xmm5
  196. movsd 1 * SIZE(A1), %xmm9
  197. movsd 1 * SIZE(A2), %xmm13
  198. movsd 2 * SIZE(X1), %xmm6
  199. movsd 2 * SIZE(A1), %xmm10
  200. movsd 2 * SIZE(A2), %xmm14
  201. movsd 3 * SIZE(X1), %xmm7
  202. mulsd %xmm4, %xmm8
  203. movsd 3 * SIZE(A1), %xmm11
  204. mulsd %xmm4, %xmm12
  205. movsd 4 * SIZE(X1), %xmm4
  206. mulsd %xmm5, %xmm9
  207. movsd 3 * SIZE(A2), %xmm15
  208. mulsd %xmm5, %xmm13
  209. movsd 5 * SIZE(X1), %xmm5
  210. decq I
  211. jle .L13
  212. ALIGN_3
  213. .L12:
  214. PREFETCH PREFETCH_SIZE * SIZE(A1)
  215. addsd %xmm8, %xmm0
  216. PREFETCH PREFETCH_SIZE * SIZE(A2)
  217. mulsd %xmm6, %xmm10
  218. movsd 4 * SIZE(A1), %xmm8
  219. addsd %xmm12, %xmm1
  220. movsd 4 * SIZE(A2), %xmm12
  221. mulsd %xmm6, %xmm14
  222. movsd 6 * SIZE(X1), %xmm6
  223. addsd %xmm9, %xmm0
  224. movsd 5 * SIZE(A1), %xmm9
  225. mulsd %xmm7, %xmm11
  226. addsd %xmm13, %xmm1
  227. movsd 5 * SIZE(A2), %xmm13
  228. mulsd %xmm7, %xmm15
  229. movsd 7 * SIZE(X1), %xmm7
  230. addsd %xmm10, %xmm0
  231. movsd 6 * SIZE(A1), %xmm10
  232. mulsd %xmm4, %xmm8
  233. addsd %xmm14, %xmm1
  234. movsd 6 * SIZE(A2), %xmm14
  235. mulsd %xmm4, %xmm12
  236. movsd 8 * SIZE(X1), %xmm4
  237. addsd %xmm11, %xmm0
  238. movsd 7 * SIZE(A1), %xmm11
  239. mulsd %xmm5, %xmm9
  240. addsd %xmm15, %xmm1
  241. movsd 7 * SIZE(A2), %xmm15
  242. mulsd %xmm5, %xmm13
  243. movsd 9 * SIZE(X1), %xmm5
  244. addsd %xmm8, %xmm0
  245. movsd 8 * SIZE(A1), %xmm8
  246. mulsd %xmm6, %xmm10
  247. addq $8 * SIZE, X1
  248. addsd %xmm12, %xmm1
  249. movsd 8 * SIZE(A2), %xmm12
  250. mulsd %xmm6, %xmm14
  251. movsd 2 * SIZE(X1), %xmm6
  252. addsd %xmm9, %xmm0
  253. movsd 9 * SIZE(A1), %xmm9
  254. mulsd %xmm7, %xmm11
  255. addq $8 * SIZE, A2
  256. addsd %xmm13, %xmm1
  257. movsd 1 * SIZE(A2), %xmm13
  258. mulsd %xmm7, %xmm15
  259. movsd 3 * SIZE(X1), %xmm7
  260. addsd %xmm10, %xmm0
  261. movsd 10 * SIZE(A1), %xmm10
  262. mulsd %xmm4, %xmm8
  263. addq $8 * SIZE, A1
  264. addsd %xmm14, %xmm1
  265. movsd 2 * SIZE(A2), %xmm14
  266. mulsd %xmm4, %xmm12
  267. movsd 4 * SIZE(X1), %xmm4
  268. addsd %xmm11, %xmm0
  269. movsd 3 * SIZE(A1), %xmm11
  270. mulsd %xmm5, %xmm9
  271. decq I
  272. addsd %xmm15, %xmm1
  273. movsd 3 * SIZE(A2), %xmm15
  274. mulsd %xmm5, %xmm13
  275. movsd 5 * SIZE(X1), %xmm5
  276. jg .L12
  277. ALIGN_3
  278. .L13:
  279. addsd %xmm8, %xmm0
  280. movsd 4 * SIZE(A1), %xmm8
  281. mulsd %xmm6, %xmm10
  282. addsd %xmm12, %xmm1
  283. movsd 4 * SIZE(A2), %xmm12
  284. mulsd %xmm6, %xmm14
  285. movsd 6 * SIZE(X1), %xmm6
  286. addsd %xmm9, %xmm0
  287. movsd 5 * SIZE(A1), %xmm9
  288. mulsd %xmm7, %xmm11
  289. addsd %xmm13, %xmm1
  290. movsd 5 * SIZE(A2), %xmm13
  291. mulsd %xmm7, %xmm15
  292. movsd 7 * SIZE(X1), %xmm7
  293. addsd %xmm10, %xmm0
  294. movsd 6 * SIZE(A1), %xmm10
  295. mulsd %xmm4, %xmm8
  296. addsd %xmm14, %xmm1
  297. movsd 6 * SIZE(A2), %xmm14
  298. mulsd %xmm4, %xmm12
  299. addsd %xmm11, %xmm0
  300. movsd 7 * SIZE(A1), %xmm11
  301. mulsd %xmm5, %xmm9
  302. addsd %xmm15, %xmm1
  303. movsd 7 * SIZE(A2), %xmm15
  304. mulsd %xmm5, %xmm13
  305. addsd %xmm8, %xmm0
  306. mulsd %xmm6, %xmm10
  307. addsd %xmm12, %xmm1
  308. mulsd %xmm6, %xmm14
  309. addsd %xmm9, %xmm0
  310. mulsd %xmm7, %xmm11
  311. addsd %xmm13, %xmm1
  312. mulsd %xmm7, %xmm15
  313. addsd %xmm10, %xmm0
  314. addq $8 * SIZE, A1
  315. addsd %xmm14, %xmm1
  316. addq $8 * SIZE, A2
  317. addsd %xmm11, %xmm0
  318. addq $8 * SIZE, X1
  319. addsd %xmm15, %xmm1
  320. ALIGN_4
  321. .L14:
  322. testq $4, M
  323. je .L16
  324. movsd 0 * SIZE(X1), %xmm4
  325. movsd 0 * SIZE(A1), %xmm8
  326. movsd 0 * SIZE(A2), %xmm12
  327. movsd 1 * SIZE(X1), %xmm5
  328. movsd 1 * SIZE(A1), %xmm9
  329. movsd 1 * SIZE(A2), %xmm13
  330. movsd 2 * SIZE(X1), %xmm6
  331. movsd 2 * SIZE(A1), %xmm10
  332. movsd 2 * SIZE(A2), %xmm14
  333. movsd 3 * SIZE(X1), %xmm7
  334. movsd 3 * SIZE(A1), %xmm11
  335. movsd 3 * SIZE(A2), %xmm15
  336. mulsd %xmm4, %xmm8
  337. mulsd %xmm4, %xmm12
  338. mulsd %xmm5, %xmm9
  339. mulsd %xmm5, %xmm13
  340. addsd %xmm8, %xmm0
  341. addsd %xmm12, %xmm1
  342. addsd %xmm9, %xmm0
  343. addsd %xmm13, %xmm1
  344. mulsd %xmm6, %xmm10
  345. mulsd %xmm6, %xmm14
  346. mulsd %xmm7, %xmm11
  347. mulsd %xmm7, %xmm15
  348. addsd %xmm10, %xmm0
  349. addsd %xmm14, %xmm1
  350. addsd %xmm11, %xmm0
  351. addsd %xmm15, %xmm1
  352. addq $4 * SIZE, A1
  353. addq $4 * SIZE, A2
  354. addq $4 * SIZE, X1
  355. ALIGN_4
  356. .L16:
  357. testq $2, M
  358. je .L17
  359. movsd 0 * SIZE(X1), %xmm4
  360. movsd 0 * SIZE(A1), %xmm8
  361. movsd 0 * SIZE(A2), %xmm12
  362. movsd 1 * SIZE(X1), %xmm5
  363. movsd 1 * SIZE(A1), %xmm9
  364. movsd 1 * SIZE(A2), %xmm13
  365. mulsd %xmm4, %xmm8
  366. mulsd %xmm4, %xmm12
  367. mulsd %xmm5, %xmm9
  368. mulsd %xmm5, %xmm13
  369. addsd %xmm8, %xmm0
  370. addsd %xmm12, %xmm1
  371. addsd %xmm9, %xmm0
  372. addsd %xmm13, %xmm1
  373. addq $2 * SIZE, A1
  374. addq $2 * SIZE, A2
  375. addq $2 * SIZE, X1
  376. ALIGN_4
  377. .L17:
  378. testq $1, M
  379. je .L19
  380. movsd 0 * SIZE(X1), %xmm4
  381. movsd 0 * SIZE(A1), %xmm8
  382. movsd 0 * SIZE(A2), %xmm12
  383. mulsd %xmm4, %xmm8
  384. mulsd %xmm4, %xmm12
  385. addsd %xmm8, %xmm0
  386. addsd %xmm12, %xmm1
  387. ALIGN_4
  388. .L19:
  389. mulsd ALPHA, %xmm0
  390. addsd (Y), %xmm0
  391. addq INCY, Y
  392. mulsd ALPHA, %xmm1
  393. addsd (Y), %xmm1
  394. addq INCY, Y
  395. movsd %xmm0, (Y1)
  396. addq INCY, Y1
  397. movsd %xmm1, (Y1)
  398. addq INCY, Y1
  399. decq J
  400. jg .L11
  401. ALIGN_3
  402. .L20:
  403. testq $1, N
  404. jle .L999
  405. movq A, A1
  406. movq BUFFER, X1
  407. xorps %xmm0, %xmm0
  408. xorps %xmm1, %xmm1
  409. movq M, I
  410. sarq $3, I
  411. jle .L24
  412. movsd 0 * SIZE(X1), %xmm4
  413. movsd 0 * SIZE(A1), %xmm8
  414. movsd 1 * SIZE(X1), %xmm5
  415. movsd 1 * SIZE(A1), %xmm9
  416. movsd 2 * SIZE(X1), %xmm6
  417. movsd 2 * SIZE(A1), %xmm10
  418. movsd 3 * SIZE(X1), %xmm7
  419. movsd 3 * SIZE(A1), %xmm11
  420. mulsd %xmm4, %xmm8
  421. movsd 4 * SIZE(X1), %xmm4
  422. mulsd %xmm5, %xmm9
  423. movsd 5 * SIZE(X1), %xmm5
  424. mulsd %xmm6, %xmm10
  425. movsd 6 * SIZE(X1), %xmm6
  426. mulsd %xmm7, %xmm11
  427. movsd 7 * SIZE(X1), %xmm7
  428. decq I
  429. jle .L23
  430. ALIGN_3
  431. .L22:
  432. PREFETCH PREFETCH_SIZE * SIZE(A1)
  433. addsd %xmm8, %xmm0
  434. movsd 4 * SIZE(A1), %xmm8
  435. addsd %xmm9, %xmm0
  436. movsd 5 * SIZE(A1), %xmm9
  437. addsd %xmm10, %xmm0
  438. movsd 6 * SIZE(A1), %xmm10
  439. addsd %xmm11, %xmm0
  440. movsd 7 * SIZE(A1), %xmm11
  441. mulsd %xmm4, %xmm8
  442. movsd 8 * SIZE(X1), %xmm4
  443. mulsd %xmm5, %xmm9
  444. movsd 9 * SIZE(X1), %xmm5
  445. mulsd %xmm6, %xmm10
  446. movsd 10 * SIZE(X1), %xmm6
  447. mulsd %xmm7, %xmm11
  448. movsd 11 * SIZE(X1), %xmm7
  449. addsd %xmm8, %xmm0
  450. movsd 8 * SIZE(A1), %xmm8
  451. addsd %xmm9, %xmm1
  452. movsd 9 * SIZE(A1), %xmm9
  453. addsd %xmm10, %xmm1
  454. movsd 10 * SIZE(A1), %xmm10
  455. addsd %xmm11, %xmm0
  456. movsd 11 * SIZE(A1), %xmm11
  457. mulsd %xmm4, %xmm8
  458. movsd 12 * SIZE(X1), %xmm4
  459. mulsd %xmm5, %xmm9
  460. movsd 13 * SIZE(X1), %xmm5
  461. mulsd %xmm6, %xmm10
  462. movsd 14 * SIZE(X1), %xmm6
  463. mulsd %xmm7, %xmm11
  464. movsd 15 * SIZE(X1), %xmm7
  465. addq $8 * SIZE, A1
  466. addq $8 * SIZE, X1
  467. decq I
  468. jg .L22
  469. ALIGN_3
  470. .L23:
  471. addsd %xmm8, %xmm0
  472. movsd 4 * SIZE(A1), %xmm8
  473. addsd %xmm9, %xmm1
  474. movsd 5 * SIZE(A1), %xmm9
  475. addsd %xmm10, %xmm0
  476. movsd 6 * SIZE(A1), %xmm10
  477. addsd %xmm11, %xmm1
  478. movsd 7 * SIZE(A1), %xmm11
  479. mulsd %xmm4, %xmm8
  480. mulsd %xmm5, %xmm9
  481. mulsd %xmm6, %xmm10
  482. mulsd %xmm7, %xmm11
  483. addsd %xmm8, %xmm0
  484. addsd %xmm9, %xmm1
  485. addsd %xmm10, %xmm0
  486. addq $8 * SIZE, A1
  487. addsd %xmm11, %xmm1
  488. addq $8 * SIZE, X1
  489. ALIGN_4
  490. .L24:
  491. testq $4, M
  492. je .L26
  493. movsd 0 * SIZE(X1), %xmm4
  494. movsd 0 * SIZE(A1), %xmm8
  495. movsd 1 * SIZE(X1), %xmm5
  496. movsd 1 * SIZE(A1), %xmm9
  497. movsd 2 * SIZE(X1), %xmm6
  498. movsd 2 * SIZE(A1), %xmm10
  499. movsd 3 * SIZE(X1), %xmm7
  500. movsd 3 * SIZE(A1), %xmm11
  501. mulsd %xmm4, %xmm8
  502. mulsd %xmm5, %xmm9
  503. mulsd %xmm6, %xmm10
  504. mulsd %xmm7, %xmm11
  505. addsd %xmm8, %xmm0
  506. addsd %xmm9, %xmm1
  507. addsd %xmm10, %xmm0
  508. addq $4 * SIZE, A1
  509. addsd %xmm11, %xmm1
  510. addq $4 * SIZE, X1
  511. ALIGN_4
  512. .L26:
  513. testq $2, M
  514. je .L27
  515. movsd 0 * SIZE(X1), %xmm4
  516. movsd 0 * SIZE(A1), %xmm8
  517. movsd 1 * SIZE(X1), %xmm5
  518. movsd 1 * SIZE(A1), %xmm9
  519. mulsd %xmm4, %xmm8
  520. mulsd %xmm5, %xmm9
  521. addsd %xmm8, %xmm0
  522. addq $2 * SIZE, A1
  523. addsd %xmm9, %xmm1
  524. addq $2 * SIZE, X1
  525. ALIGN_4
  526. .L27:
  527. testq $1, M
  528. je .L29
  529. movsd 0 * SIZE(X1), %xmm4
  530. movsd 0 * SIZE(A1), %xmm8
  531. mulsd %xmm4, %xmm8
  532. addsd %xmm8, %xmm0
  533. ALIGN_4
  534. .L29:
  535. addsd %xmm1, %xmm0
  536. mulsd ALPHA, %xmm0
  537. addsd (Y), %xmm0
  538. movsd %xmm0, (Y1)
  539. ALIGN_3
  540. .L999:
  541. movq 0(%rsp), %rbx
  542. movq 8(%rsp), %rbp
  543. movq 16(%rsp), %r12
  544. movq 24(%rsp), %r13
  545. movq 32(%rsp), %r14
  546. movq 40(%rsp), %r15
  547. #ifdef WINDOWS_ABI
  548. movq 48(%rsp), %rdi
  549. movq 56(%rsp), %rsi
  550. movups 64(%rsp), %xmm6
  551. movups 80(%rsp), %xmm7
  552. movups 96(%rsp), %xmm8
  553. movups 112(%rsp), %xmm9
  554. movups 128(%rsp), %xmm10
  555. movups 144(%rsp), %xmm11
  556. movups 160(%rsp), %xmm12
  557. movups 176(%rsp), %xmm13
  558. movups 192(%rsp), %xmm14
  559. movups 208(%rsp), %xmm15
  560. #endif
  561. addq $STACKSIZE, %rsp
  562. ret
  563. ALIGN_3
  564. EPILOGUE