You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

axpy_sse2.S 15 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define STACK 16
  41. #define ARGS 0
  42. #define STACK_M 4 + STACK + ARGS(%esp)
  43. #define STACK_ALPHA 16 + STACK + ARGS(%esp)
  44. #define STACK_X 24 + STACK + ARGS(%esp)
  45. #define STACK_INCX 28 + STACK + ARGS(%esp)
  46. #define STACK_Y 32 + STACK + ARGS(%esp)
  47. #define STACK_INCY 36 + STACK + ARGS(%esp)
  48. #define M %ebx
  49. #define X %esi
  50. #define Y %edi
  51. #define INCX %ecx
  52. #define INCY %edx
  53. #define YY %ebp
  54. #define ALPHA %xmm7
  55. #include "l1param.h"
  56. PROLOGUE
  57. PROFCODE
  58. pushl %edi
  59. pushl %esi
  60. pushl %ebx
  61. pushl %ebp
  62. movl STACK_M, M
  63. movsd STACK_ALPHA, ALPHA
  64. movl STACK_X, X
  65. movl STACK_INCX, INCX
  66. movl STACK_Y, Y
  67. movl STACK_INCY, INCY
  68. unpcklpd ALPHA, ALPHA
  69. leal (, INCX, SIZE), INCX
  70. leal (, INCY, SIZE), INCY
  71. testl M, M
  72. jle .L47
  73. cmpl $SIZE, INCX
  74. jne .L40
  75. cmpl $SIZE, INCY
  76. jne .L40
  77. testl $SIZE, Y
  78. je .L10
  79. movsd (X), %xmm0
  80. mulsd ALPHA, %xmm0
  81. addsd (Y), %xmm0
  82. movsd %xmm0, (Y)
  83. addl $1 * SIZE, X
  84. addl $1 * SIZE, Y
  85. decl M
  86. jle .L19
  87. ALIGN_4
  88. .L10:
  89. subl $-16 * SIZE, X
  90. subl $-16 * SIZE, Y
  91. testl $SIZE, X
  92. jne .L20
  93. movl M, %eax
  94. sarl $4, %eax
  95. jle .L13
  96. movaps -16 * SIZE(X), %xmm0
  97. movaps -14 * SIZE(X), %xmm1
  98. movaps -12 * SIZE(X), %xmm2
  99. movaps -10 * SIZE(X), %xmm3
  100. decl %eax
  101. jle .L12
  102. ALIGN_3
  103. .L11:
  104. #ifdef PREFETCHW
  105. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(Y)
  106. #endif
  107. mulpd ALPHA, %xmm0
  108. addpd -16 * SIZE(Y), %xmm0
  109. movaps %xmm0, -16 * SIZE(Y)
  110. movaps -8 * SIZE(X), %xmm0
  111. mulpd ALPHA, %xmm1
  112. addpd -14 * SIZE(Y), %xmm1
  113. movaps %xmm1, -14 * SIZE(Y)
  114. movaps -6 * SIZE(X), %xmm1
  115. #ifdef PREFETCH
  116. PREFETCH (PREFETCHSIZE + 0) - PREOFFSET(X)
  117. #endif
  118. mulpd ALPHA, %xmm2
  119. addpd -12 * SIZE(Y), %xmm2
  120. movaps %xmm2, -12 * SIZE(Y)
  121. movaps -4 * SIZE(X), %xmm2
  122. mulpd ALPHA, %xmm3
  123. addpd -10 * SIZE(Y), %xmm3
  124. movaps %xmm3, -10 * SIZE(Y)
  125. movaps -2 * SIZE(X), %xmm3
  126. #if defined(PREFETCHW) && !defined(FETCH128)
  127. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(Y)
  128. #endif
  129. mulpd ALPHA, %xmm0
  130. addpd -8 * SIZE(Y), %xmm0
  131. movaps %xmm0, -8 * SIZE(Y)
  132. movaps 0 * SIZE(X), %xmm0
  133. mulpd ALPHA, %xmm1
  134. addpd -6 * SIZE(Y), %xmm1
  135. movaps %xmm1, -6 * SIZE(Y)
  136. movaps 2 * SIZE(X), %xmm1
  137. #if defined(PREFETCH) && !defined(FETCH128)
  138. PREFETCH (PREFETCHSIZE + 64) - PREOFFSET(X)
  139. #endif
  140. mulpd ALPHA, %xmm2
  141. addpd -4 * SIZE(Y), %xmm2
  142. movaps %xmm2, -4 * SIZE(Y)
  143. movaps 4 * SIZE(X), %xmm2
  144. mulpd ALPHA, %xmm3
  145. addpd -2 * SIZE(Y), %xmm3
  146. movaps %xmm3, -2 * SIZE(Y)
  147. movaps 6 * SIZE(X), %xmm3
  148. subl $-16 * SIZE, Y
  149. subl $-16 * SIZE, X
  150. decl %eax
  151. jg .L11
  152. ALIGN_3
  153. .L12:
  154. mulpd ALPHA, %xmm0
  155. addpd -16 * SIZE(Y), %xmm0
  156. movaps %xmm0, -16 * SIZE(Y)
  157. movaps -8 * SIZE(X), %xmm0
  158. mulpd ALPHA, %xmm1
  159. addpd -14 * SIZE(Y), %xmm1
  160. movaps %xmm1, -14 * SIZE(Y)
  161. movaps -6 * SIZE(X), %xmm1
  162. mulpd ALPHA, %xmm2
  163. addpd -12 * SIZE(Y), %xmm2
  164. movaps %xmm2, -12 * SIZE(Y)
  165. movaps -4 * SIZE(X), %xmm2
  166. mulpd ALPHA, %xmm3
  167. addpd -10 * SIZE(Y), %xmm3
  168. movaps %xmm3, -10 * SIZE(Y)
  169. movaps -2 * SIZE(X), %xmm3
  170. mulpd ALPHA, %xmm0
  171. addpd -8 * SIZE(Y), %xmm0
  172. movaps %xmm0, -8 * SIZE(Y)
  173. mulpd ALPHA, %xmm1
  174. addpd -6 * SIZE(Y), %xmm1
  175. movaps %xmm1, -6 * SIZE(Y)
  176. mulpd ALPHA, %xmm2
  177. addpd -4 * SIZE(Y), %xmm2
  178. movaps %xmm2, -4 * SIZE(Y)
  179. mulpd ALPHA, %xmm3
  180. addpd -2 * SIZE(Y), %xmm3
  181. movaps %xmm3, -2 * SIZE(Y)
  182. subl $-16 * SIZE, Y
  183. subl $-16 * SIZE, X
  184. ALIGN_3
  185. .L13:
  186. movl M, %eax
  187. andl $8, %eax
  188. jle .L14
  189. ALIGN_3
  190. movaps -16 * SIZE(X), %xmm0
  191. movaps -14 * SIZE(X), %xmm1
  192. movaps -12 * SIZE(X), %xmm2
  193. movaps -10 * SIZE(X), %xmm3
  194. mulpd ALPHA, %xmm0
  195. addpd -16 * SIZE(Y), %xmm0
  196. mulpd ALPHA, %xmm1
  197. addpd -14 * SIZE(Y), %xmm1
  198. mulpd ALPHA, %xmm2
  199. addpd -12 * SIZE(Y), %xmm2
  200. mulpd ALPHA, %xmm3
  201. addpd -10 * SIZE(Y), %xmm3
  202. movaps %xmm0, -16 * SIZE(Y)
  203. movaps %xmm1, -14 * SIZE(Y)
  204. movaps %xmm2, -12 * SIZE(Y)
  205. movaps %xmm3, -10 * SIZE(Y)
  206. addl $8 * SIZE, X
  207. addl $8 * SIZE, Y
  208. ALIGN_3
  209. .L14:
  210. movl M, %eax
  211. andl $4, %eax
  212. jle .L15
  213. ALIGN_3
  214. movaps -16 * SIZE(X), %xmm0
  215. movaps -14 * SIZE(X), %xmm1
  216. mulpd ALPHA, %xmm0
  217. mulpd ALPHA, %xmm1
  218. addpd -16 * SIZE(Y), %xmm0
  219. addpd -14 * SIZE(Y), %xmm1
  220. movaps %xmm0, -16 * SIZE(Y)
  221. movaps %xmm1, -14 * SIZE(Y)
  222. addl $4 * SIZE, X
  223. addl $4 * SIZE, Y
  224. ALIGN_3
  225. .L15:
  226. movl M, %eax
  227. andl $2, %eax
  228. jle .L16
  229. ALIGN_3
  230. movaps -16 * SIZE(X), %xmm0
  231. mulpd ALPHA, %xmm0
  232. addpd -16 * SIZE(Y), %xmm0
  233. movaps %xmm0, -16 * SIZE(Y)
  234. addl $2 * SIZE, X
  235. addl $2 * SIZE, Y
  236. ALIGN_3
  237. .L16:
  238. movl M, %eax
  239. andl $1, %eax
  240. jle .L19
  241. ALIGN_3
  242. movsd -16 * SIZE(X), %xmm0
  243. mulsd ALPHA, %xmm0
  244. addsd -16 * SIZE(Y), %xmm0
  245. movsd %xmm0, -16 * SIZE(Y)
  246. ALIGN_3
  247. .L19:
  248. popl %ebp
  249. popl %ebx
  250. popl %esi
  251. popl %edi
  252. ret
  253. ALIGN_3
  254. .L20:
  255. #ifdef ALIGNED_ACCESS
  256. movhps -16 * SIZE(X), %xmm0
  257. movl M, %eax
  258. sarl $4, %eax
  259. jle .L23
  260. movaps -15 * SIZE(X), %xmm1
  261. movaps -13 * SIZE(X), %xmm2
  262. movaps -11 * SIZE(X), %xmm3
  263. decl %eax
  264. jle .L22
  265. ALIGN_4
  266. .L21:
  267. #ifdef PREFETCHW
  268. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(Y)
  269. #endif
  270. SHUFPD_1 %xmm1, %xmm0
  271. mulpd ALPHA, %xmm0
  272. addpd -16 * SIZE(Y), %xmm0
  273. movaps %xmm0, -16 * SIZE(Y)
  274. movaps -9 * SIZE(X), %xmm0
  275. SHUFPD_1 %xmm2, %xmm1
  276. mulpd ALPHA, %xmm1
  277. addpd -14 * SIZE(Y), %xmm1
  278. movaps %xmm1, -14 * SIZE(Y)
  279. movaps -7 * SIZE(X), %xmm1
  280. #ifdef PREFETCH
  281. PREFETCH (PREFETCHSIZE + 0) - PREOFFSET(X)
  282. #endif
  283. SHUFPD_1 %xmm3, %xmm2
  284. mulpd ALPHA, %xmm2
  285. addpd -12 * SIZE(Y), %xmm2
  286. movaps %xmm2, -12 * SIZE(Y)
  287. movaps -5 * SIZE(X), %xmm2
  288. SHUFPD_1 %xmm0, %xmm3
  289. mulpd ALPHA, %xmm3
  290. addpd -10 * SIZE(Y), %xmm3
  291. movaps %xmm3, -10 * SIZE(Y)
  292. movaps -3 * SIZE(X), %xmm3
  293. #if defined(PREFETCHW) && !defined(FETCH128)
  294. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(Y)
  295. #endif
  296. SHUFPD_1 %xmm1, %xmm0
  297. mulpd ALPHA, %xmm0
  298. addpd -8 * SIZE(Y), %xmm0
  299. movaps %xmm0, -8 * SIZE(Y)
  300. movaps -1 * SIZE(X), %xmm0
  301. SHUFPD_1 %xmm2, %xmm1
  302. mulpd ALPHA, %xmm1
  303. addpd -6 * SIZE(Y), %xmm1
  304. movaps %xmm1, -6 * SIZE(Y)
  305. movaps 1 * SIZE(X), %xmm1
  306. #if defined(PREFETCH) && !defined(FETCH128)
  307. PREFETCH (PREFETCHSIZE + 64) - PREOFFSET(X)
  308. #endif
  309. SHUFPD_1 %xmm3, %xmm2
  310. mulpd ALPHA, %xmm2
  311. addpd -4 * SIZE(Y), %xmm2
  312. movaps %xmm2, -4 * SIZE(Y)
  313. movaps 3 * SIZE(X), %xmm2
  314. SHUFPD_1 %xmm0, %xmm3
  315. mulpd ALPHA, %xmm3
  316. addpd -2 * SIZE(Y), %xmm3
  317. movaps %xmm3, -2 * SIZE(Y)
  318. movaps 5 * SIZE(X), %xmm3
  319. subl $-16 * SIZE, X
  320. subl $-16 * SIZE, Y
  321. decl %eax
  322. jg .L21
  323. ALIGN_3
  324. .L22:
  325. SHUFPD_1 %xmm1, %xmm0
  326. mulpd ALPHA, %xmm0
  327. addpd -16 * SIZE(Y), %xmm0
  328. movaps %xmm0, -16 * SIZE(Y)
  329. movaps -9 * SIZE(X), %xmm0
  330. SHUFPD_1 %xmm2, %xmm1
  331. mulpd ALPHA, %xmm1
  332. addpd -14 * SIZE(Y), %xmm1
  333. movaps %xmm1, -14 * SIZE(Y)
  334. movaps -7 * SIZE(X), %xmm1
  335. SHUFPD_1 %xmm3, %xmm2
  336. mulpd ALPHA, %xmm2
  337. addpd -12 * SIZE(Y), %xmm2
  338. movaps %xmm2, -12 * SIZE(Y)
  339. movaps -5 * SIZE(X), %xmm2
  340. SHUFPD_1 %xmm0, %xmm3
  341. mulpd ALPHA, %xmm3
  342. addpd -10 * SIZE(Y), %xmm3
  343. movaps %xmm3, -10 * SIZE(Y)
  344. movaps -3 * SIZE(X), %xmm3
  345. SHUFPD_1 %xmm1, %xmm0
  346. mulpd ALPHA, %xmm0
  347. addpd -8 * SIZE(Y), %xmm0
  348. movaps %xmm0, -8 * SIZE(Y)
  349. movaps -1 * SIZE(X), %xmm0
  350. SHUFPD_1 %xmm2, %xmm1
  351. mulpd ALPHA, %xmm1
  352. addpd -6 * SIZE(Y), %xmm1
  353. movaps %xmm1, -6 * SIZE(Y)
  354. SHUFPD_1 %xmm3, %xmm2
  355. mulpd ALPHA, %xmm2
  356. addpd -4 * SIZE(Y), %xmm2
  357. movaps %xmm2, -4 * SIZE(Y)
  358. SHUFPD_1 %xmm0, %xmm3
  359. mulpd ALPHA, %xmm3
  360. addpd -2 * SIZE(Y), %xmm3
  361. movaps %xmm3, -2 * SIZE(Y)
  362. subl $-16 * SIZE, X
  363. subl $-16 * SIZE, Y
  364. ALIGN_3
  365. .L23:
  366. movl M, %eax
  367. andl $8, %eax
  368. jle .L24
  369. ALIGN_3
  370. movaps -15 * SIZE(X), %xmm1
  371. movaps -13 * SIZE(X), %xmm2
  372. movaps -11 * SIZE(X), %xmm3
  373. movaps -9 * SIZE(X), %xmm4
  374. SHUFPD_1 %xmm1, %xmm0
  375. mulpd ALPHA, %xmm0
  376. addpd -16 * SIZE(Y), %xmm0
  377. movaps %xmm0, -16 * SIZE(Y)
  378. SHUFPD_1 %xmm2, %xmm1
  379. mulpd ALPHA, %xmm1
  380. addpd -14 * SIZE(Y), %xmm1
  381. movaps %xmm1, -14 * SIZE(Y)
  382. SHUFPD_1 %xmm3, %xmm2
  383. mulpd ALPHA, %xmm2
  384. addpd -12 * SIZE(Y), %xmm2
  385. movaps %xmm2, -12 * SIZE(Y)
  386. SHUFPD_1 %xmm4, %xmm3
  387. mulpd ALPHA, %xmm3
  388. addpd -10 * SIZE(Y), %xmm3
  389. movaps %xmm3, -10 * SIZE(Y)
  390. movaps %xmm4, %xmm0
  391. addl $8 * SIZE, X
  392. addl $8 * SIZE, Y
  393. ALIGN_3
  394. .L24:
  395. movl M, %eax
  396. andl $4, %eax
  397. jle .L25
  398. ALIGN_3
  399. movaps -15 * SIZE(X), %xmm1
  400. movaps -13 * SIZE(X), %xmm2
  401. SHUFPD_1 %xmm1, %xmm0
  402. SHUFPD_1 %xmm2, %xmm1
  403. mulpd ALPHA, %xmm0
  404. mulpd ALPHA, %xmm1
  405. addpd -16 * SIZE(Y), %xmm0
  406. addpd -14 * SIZE(Y), %xmm1
  407. movaps %xmm0, -16 * SIZE(Y)
  408. movaps %xmm1, -14 * SIZE(Y)
  409. movaps %xmm2, %xmm0
  410. addl $4 * SIZE, X
  411. addl $4 * SIZE, Y
  412. ALIGN_3
  413. .L25:
  414. movl M, %eax
  415. andl $2, %eax
  416. jle .L26
  417. ALIGN_3
  418. movaps -15 * SIZE(X), %xmm1
  419. SHUFPD_1 %xmm1, %xmm0
  420. mulpd ALPHA, %xmm0
  421. addpd -16 * SIZE(Y), %xmm0
  422. movaps %xmm0, -16 * SIZE(Y)
  423. addl $2 * SIZE, X
  424. addl $2 * SIZE, Y
  425. ALIGN_3
  426. .L26:
  427. movl M, %eax
  428. andl $1, %eax
  429. jle .L29
  430. ALIGN_3
  431. movsd -16 * SIZE(X), %xmm0
  432. mulsd ALPHA, %xmm0
  433. addsd -16 * SIZE(Y), %xmm0
  434. movsd %xmm0, -16 * SIZE(Y)
  435. ALIGN_3
  436. .L29:
  437. popl %ebp
  438. popl %ebx
  439. popl %esi
  440. popl %edi
  441. ret
  442. ALIGN_3
  443. #else
  444. movl M, %eax
  445. sarl $3, %eax
  446. jle .L23
  447. movsd -16 * SIZE(X), %xmm0
  448. movhps -15 * SIZE(X), %xmm0
  449. movsd -14 * SIZE(X), %xmm1
  450. movhps -13 * SIZE(X), %xmm1
  451. movsd -12 * SIZE(X), %xmm2
  452. movhps -11 * SIZE(X), %xmm2
  453. movsd -10 * SIZE(X), %xmm3
  454. movhps -9 * SIZE(X), %xmm3
  455. decl %eax
  456. jle .L22
  457. ALIGN_3
  458. .L21:
  459. #ifdef PREFETCHW
  460. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(Y)
  461. #endif
  462. mulpd ALPHA, %xmm0
  463. addpd -16 * SIZE(Y), %xmm0
  464. movaps %xmm0, -16 * SIZE(Y)
  465. movsd -8 * SIZE(X), %xmm0
  466. movhps -7 * SIZE(X), %xmm0
  467. mulpd ALPHA, %xmm1
  468. addpd -14 * SIZE(Y), %xmm1
  469. movaps %xmm1, -14 * SIZE(Y)
  470. movsd -6 * SIZE(X), %xmm1
  471. movhps -5 * SIZE(X), %xmm1
  472. #ifdef PREFETCH
  473. PREFETCH (PREFETCHSIZE + 0) - PREOFFSET(X)
  474. #endif
  475. mulpd ALPHA, %xmm2
  476. addpd -12 * SIZE(Y), %xmm2
  477. movaps %xmm2, -12 * SIZE(Y)
  478. movsd -4 * SIZE(X), %xmm2
  479. movhps -3 * SIZE(X), %xmm2
  480. mulpd ALPHA, %xmm3
  481. addpd -10 * SIZE(Y), %xmm3
  482. movaps %xmm3, -10 * SIZE(Y)
  483. movsd -2 * SIZE(X), %xmm3
  484. movhps -1 * SIZE(X), %xmm3
  485. subl $-8 * SIZE, Y
  486. subl $-8 * SIZE, X
  487. decl %eax
  488. jg .L21
  489. ALIGN_3
  490. .L22:
  491. mulpd ALPHA, %xmm0
  492. addpd -16 * SIZE(Y), %xmm0
  493. movaps %xmm0, -16 * SIZE(Y)
  494. mulpd ALPHA, %xmm1
  495. addpd -14 * SIZE(Y), %xmm1
  496. movaps %xmm1, -14 * SIZE(Y)
  497. mulpd ALPHA, %xmm2
  498. addpd -12 * SIZE(Y), %xmm2
  499. movaps %xmm2, -12 * SIZE(Y)
  500. mulpd ALPHA, %xmm3
  501. addpd -10 * SIZE(Y), %xmm3
  502. movaps %xmm3, -10 * SIZE(Y)
  503. subl $-8 * SIZE, Y
  504. subl $-8 * SIZE, X
  505. ALIGN_3
  506. .L23:
  507. movl M, %eax
  508. andl $4, %eax
  509. jle .L25
  510. ALIGN_3
  511. movsd -16 * SIZE(X), %xmm0
  512. movhps -15 * SIZE(X), %xmm0
  513. movsd -14 * SIZE(X), %xmm1
  514. movhps -13 * SIZE(X), %xmm1
  515. mulpd ALPHA, %xmm0
  516. mulpd ALPHA, %xmm1
  517. addpd -16 * SIZE(Y), %xmm0
  518. addpd -14 * SIZE(Y), %xmm1
  519. movaps %xmm0, -16 * SIZE(Y)
  520. movaps %xmm1, -14 * SIZE(Y)
  521. addl $4 * SIZE, X
  522. addl $4 * SIZE, Y
  523. ALIGN_3
  524. .L25:
  525. movl M, %eax
  526. andl $2, %eax
  527. jle .L26
  528. ALIGN_3
  529. movsd -16 * SIZE(X), %xmm0
  530. movhps -15 * SIZE(X), %xmm0
  531. mulpd ALPHA, %xmm0
  532. addpd -16 * SIZE(Y), %xmm0
  533. movaps %xmm0, -16 * SIZE(Y)
  534. addl $2 * SIZE, X
  535. addl $2 * SIZE, Y
  536. ALIGN_3
  537. .L26:
  538. movl M, %eax
  539. andl $1, %eax
  540. jle .L29
  541. ALIGN_3
  542. movsd -16 * SIZE(X), %xmm0
  543. mulsd ALPHA, %xmm0
  544. addsd -16 * SIZE(Y), %xmm0
  545. movsd %xmm0, -16 * SIZE(Y)
  546. ALIGN_3
  547. .L29:
  548. popl %ebp
  549. popl %ebx
  550. popl %esi
  551. popl %edi
  552. ret
  553. ALIGN_3
  554. #endif
  555. .L40:
  556. movl Y, YY
  557. movl M, %eax
  558. //If incx==0 || incy==0, avoid unloop.
  559. cmpl $0, INCX
  560. je .L46
  561. cmpl $0, INCY
  562. je .L46
  563. sarl $3, %eax
  564. jle .L45
  565. ALIGN_3
  566. .L41:
  567. movsd 0 * SIZE(X), %xmm0
  568. addl INCX, X
  569. movhpd 0 * SIZE(X), %xmm0
  570. addl INCX, X
  571. mulpd ALPHA, %xmm0
  572. movsd 0 * SIZE(YY), %xmm6
  573. addl INCY, YY
  574. movhpd 0 * SIZE(YY), %xmm6
  575. addl INCY, YY
  576. addpd %xmm6, %xmm0
  577. movsd 0 * SIZE(X), %xmm1
  578. addl INCX, X
  579. movhpd 0 * SIZE(X), %xmm1
  580. addl INCX, X
  581. mulpd ALPHA, %xmm1
  582. movsd 0 * SIZE(YY), %xmm6
  583. addl INCY, YY
  584. movhpd 0 * SIZE(YY), %xmm6
  585. addl INCY, YY
  586. addpd %xmm6, %xmm1
  587. movsd 0 * SIZE(X), %xmm2
  588. addl INCX, X
  589. movhpd 0 * SIZE(X), %xmm2
  590. addl INCX, X
  591. mulpd ALPHA, %xmm2
  592. movsd 0 * SIZE(YY), %xmm6
  593. addl INCY, YY
  594. movhpd 0 * SIZE(YY), %xmm6
  595. addl INCY, YY
  596. addpd %xmm6, %xmm2
  597. movsd 0 * SIZE(X), %xmm3
  598. addl INCX, X
  599. movhpd 0 * SIZE(X), %xmm3
  600. addl INCX, X
  601. mulpd ALPHA, %xmm3
  602. movsd 0 * SIZE(YY), %xmm6
  603. addl INCY, YY
  604. movhpd 0 * SIZE(YY), %xmm6
  605. addl INCY, YY
  606. addpd %xmm6, %xmm3
  607. movsd %xmm0, 0 * SIZE(Y)
  608. addl INCY, Y
  609. movhpd %xmm0, 0 * SIZE(Y)
  610. addl INCY, Y
  611. movsd %xmm1, 0 * SIZE(Y)
  612. addl INCY, Y
  613. movhpd %xmm1, 0 * SIZE(Y)
  614. addl INCY, Y
  615. movsd %xmm2, 0 * SIZE(Y)
  616. addl INCY, Y
  617. movhpd %xmm2, 0 * SIZE(Y)
  618. addl INCY, Y
  619. movsd %xmm3, 0 * SIZE(Y)
  620. addl INCY, Y
  621. movhpd %xmm3, 0 * SIZE(Y)
  622. addl INCY, Y
  623. decl %eax
  624. jg .L41
  625. ALIGN_3
  626. .L45:
  627. movl M, %eax
  628. andl $7, %eax
  629. jle .L47
  630. ALIGN_3
  631. .L46:
  632. movsd (X), %xmm0
  633. addl INCX, X
  634. mulsd ALPHA, %xmm0
  635. addsd (Y), %xmm0
  636. movsd %xmm0, (Y)
  637. addl INCY, Y
  638. decl %eax
  639. jg .L46
  640. ALIGN_3
  641. .L47:
  642. popl %ebp
  643. popl %ebx
  644. popl %esi
  645. popl %edi
  646. ret
  647. EPILOGUE