You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

copy_sse2.S 13 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define M ARG1 /* rdi */
  41. #define X ARG2 /* rsi */
  42. #define INCX ARG3 /* rdx */
  43. #define Y ARG4 /* rcx */
  44. #ifndef WINDOWS_ABI
  45. #define INCY ARG5 /* r8 */
  46. #else
  47. #define INCY %r10
  48. #endif
  49. #include "l1param.h"
  50. #ifdef OPTERON
  51. #define LOAD(OFFSET, ADDR, REG) xorps REG, REG; addpd OFFSET(ADDR), REG
  52. #else
  53. #define LOAD(OFFSET, ADDR, REG) movups OFFSET(ADDR), REG
  54. #endif
  55. PROLOGUE
  56. PROFCODE
  57. #ifdef WINDOWS_ABI
  58. movq 40(%rsp), INCY
  59. #endif
  60. SAVEREGISTERS
  61. leaq (, INCX, SIZE), INCX
  62. leaq (, INCY, SIZE), INCY
  63. cmpq $SIZE, INCX
  64. jne .L40
  65. cmpq $SIZE, INCY
  66. jne .L40
  67. #ifdef ALIGNED_ACCESS
  68. testq $SIZE, Y
  69. #else
  70. testq $SIZE, X
  71. #endif
  72. je .L10
  73. movsd (X), %xmm0
  74. movsd %xmm0, (Y)
  75. addq $1 * SIZE, X
  76. addq $1 * SIZE, Y
  77. decq M
  78. jle .L19
  79. ALIGN_4
  80. .L10:
  81. subq $-16 * SIZE, X
  82. subq $-16 * SIZE, Y
  83. #ifdef ALIGNED_ACCESS
  84. testq $SIZE, X
  85. #else
  86. testq $SIZE, Y
  87. #endif
  88. jne .L20
  89. movq M, %rax
  90. sarq $4, %rax
  91. jle .L13
  92. movups -16 * SIZE(X), %xmm0
  93. movups -14 * SIZE(X), %xmm1
  94. movups -12 * SIZE(X), %xmm2
  95. movups -10 * SIZE(X), %xmm3
  96. movups -8 * SIZE(X), %xmm4
  97. movups -6 * SIZE(X), %xmm5
  98. movups -4 * SIZE(X), %xmm6
  99. movups -2 * SIZE(X), %xmm7
  100. decq %rax
  101. jle .L12
  102. ALIGN_3
  103. .L11:
  104. #ifdef PREFETCHW
  105. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(Y)
  106. #endif
  107. movups %xmm0, -16 * SIZE(Y)
  108. LOAD( 0 * SIZE, X, %xmm0)
  109. movups %xmm1, -14 * SIZE(Y)
  110. LOAD( 2 * SIZE, X, %xmm1)
  111. #ifdef PREFETCH
  112. PREFETCH (PREFETCHSIZE + 0) - PREOFFSET(X)
  113. #endif
  114. movups %xmm2, -12 * SIZE(Y)
  115. LOAD( 4 * SIZE, X, %xmm2)
  116. movups %xmm3, -10 * SIZE(Y)
  117. LOAD( 6 * SIZE, X, %xmm3)
  118. #if defined(PREFETCHW) && !defined(FETCH128)
  119. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(Y)
  120. #endif
  121. movups %xmm4, -8 * SIZE(Y)
  122. LOAD( 8 * SIZE, X, %xmm4)
  123. movups %xmm5, -6 * SIZE(Y)
  124. LOAD(10 * SIZE, X, %xmm5)
  125. #if defined(PREFETCH) && !defined(FETCH128)
  126. PREFETCH (PREFETCHSIZE + 64) - PREOFFSET(X)
  127. #endif
  128. movups %xmm6, -4 * SIZE(Y)
  129. LOAD(12 * SIZE, X, %xmm6)
  130. movups %xmm7, -2 * SIZE(Y)
  131. LOAD(14 * SIZE, X, %xmm7)
  132. subq $-16 * SIZE, Y
  133. subq $-16 * SIZE, X
  134. decq %rax
  135. jg .L11
  136. ALIGN_3
  137. .L12:
  138. movups %xmm0, -16 * SIZE(Y)
  139. movups %xmm1, -14 * SIZE(Y)
  140. movups %xmm2, -12 * SIZE(Y)
  141. movups %xmm3, -10 * SIZE(Y)
  142. movups %xmm4, -8 * SIZE(Y)
  143. movups %xmm5, -6 * SIZE(Y)
  144. movups %xmm6, -4 * SIZE(Y)
  145. movups %xmm7, -2 * SIZE(Y)
  146. subq $-16 * SIZE, Y
  147. subq $-16 * SIZE, X
  148. ALIGN_3
  149. .L13:
  150. testq $8, M
  151. jle .L14
  152. ALIGN_3
  153. movups -16 * SIZE(X), %xmm0
  154. movups -14 * SIZE(X), %xmm1
  155. movups -12 * SIZE(X), %xmm2
  156. movups -10 * SIZE(X), %xmm3
  157. movups %xmm0, -16 * SIZE(Y)
  158. movups %xmm1, -14 * SIZE(Y)
  159. movups %xmm2, -12 * SIZE(Y)
  160. movups %xmm3, -10 * SIZE(Y)
  161. addq $8 * SIZE, X
  162. addq $8 * SIZE, Y
  163. ALIGN_3
  164. .L14:
  165. testq $4, M
  166. jle .L15
  167. ALIGN_3
  168. movups -16 * SIZE(X), %xmm0
  169. movups -14 * SIZE(X), %xmm1
  170. movups %xmm0, -16 * SIZE(Y)
  171. movups %xmm1, -14 * SIZE(Y)
  172. addq $4 * SIZE, X
  173. addq $4 * SIZE, Y
  174. ALIGN_3
  175. .L15:
  176. testq $2, M
  177. jle .L16
  178. ALIGN_3
  179. movups -16 * SIZE(X), %xmm0
  180. movups %xmm0, -16 * SIZE(Y)
  181. addq $2 * SIZE, X
  182. addq $2 * SIZE, Y
  183. ALIGN_3
  184. .L16:
  185. testq $1, M
  186. jle .L19
  187. ALIGN_3
  188. movsd -16 * SIZE(X), %xmm0
  189. movsd %xmm0, -16 * SIZE(Y)
  190. ALIGN_3
  191. .L19:
  192. xorq %rax,%rax
  193. RESTOREREGISTERS
  194. ret
  195. ALIGN_3
  196. .L20:
  197. #ifdef ALIGNED_ACCESS
  198. movhps -16 * SIZE(X), %xmm0
  199. movq M, %rax
  200. sarq $4, %rax
  201. jle .L23
  202. movups -15 * SIZE(X), %xmm1
  203. movups -13 * SIZE(X), %xmm2
  204. movups -11 * SIZE(X), %xmm3
  205. movups -9 * SIZE(X), %xmm4
  206. movups -7 * SIZE(X), %xmm5
  207. movups -5 * SIZE(X), %xmm6
  208. movups -3 * SIZE(X), %xmm7
  209. decq %rax
  210. jle .L22
  211. ALIGN_4
  212. .L21:
  213. #ifdef PREFETCHW
  214. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(Y)
  215. #endif
  216. SHUFPD_1 %xmm1, %xmm0
  217. movups %xmm0, -16 * SIZE(Y)
  218. LOAD(-1 * SIZE, X, %xmm0)
  219. SHUFPD_1 %xmm2, %xmm1
  220. movups %xmm1, -14 * SIZE(Y)
  221. LOAD( 1 * SIZE, X, %xmm1)
  222. #ifdef PREFETCH
  223. PREFETCH (PREFETCHSIZE + 0) - PREOFFSET(X)
  224. #endif
  225. SHUFPD_1 %xmm3, %xmm2
  226. movups %xmm2, -12 * SIZE(Y)
  227. LOAD( 3 * SIZE, X, %xmm2)
  228. SHUFPD_1 %xmm4, %xmm3
  229. movups %xmm3, -10 * SIZE(Y)
  230. LOAD( 5 * SIZE, X, %xmm3)
  231. #if defined(PREFETCHW) && !defined(FETCH128)
  232. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(Y)
  233. #endif
  234. SHUFPD_1 %xmm5, %xmm4
  235. movups %xmm4, -8 * SIZE(Y)
  236. LOAD( 7 * SIZE, X, %xmm4)
  237. SHUFPD_1 %xmm6, %xmm5
  238. movups %xmm5, -6 * SIZE(Y)
  239. LOAD( 9 * SIZE, X, %xmm5)
  240. #if defined(PREFETCH) && !defined(FETCH128)
  241. PREFETCH (PREFETCHSIZE + 64) - PREOFFSET(X)
  242. #endif
  243. SHUFPD_1 %xmm7, %xmm6
  244. movups %xmm6, -4 * SIZE(Y)
  245. LOAD(11 * SIZE, X, %xmm6)
  246. SHUFPD_1 %xmm0, %xmm7
  247. movups %xmm7, -2 * SIZE(Y)
  248. LOAD(13 * SIZE, X, %xmm7)
  249. subq $-16 * SIZE, X
  250. subq $-16 * SIZE, Y
  251. decq %rax
  252. jg .L21
  253. ALIGN_3
  254. .L22:
  255. SHUFPD_1 %xmm1, %xmm0
  256. movups %xmm0, -16 * SIZE(Y)
  257. LOAD(-1 * SIZE, X, %xmm0)
  258. SHUFPD_1 %xmm2, %xmm1
  259. movups %xmm1, -14 * SIZE(Y)
  260. SHUFPD_1 %xmm3, %xmm2
  261. movups %xmm2, -12 * SIZE(Y)
  262. SHUFPD_1 %xmm4, %xmm3
  263. movups %xmm3, -10 * SIZE(Y)
  264. SHUFPD_1 %xmm5, %xmm4
  265. movups %xmm4, -8 * SIZE(Y)
  266. SHUFPD_1 %xmm6, %xmm5
  267. movups %xmm5, -6 * SIZE(Y)
  268. SHUFPD_1 %xmm7, %xmm6
  269. movups %xmm6, -4 * SIZE(Y)
  270. SHUFPD_1 %xmm0, %xmm7
  271. movups %xmm7, -2 * SIZE(Y)
  272. subq $-16 * SIZE, X
  273. subq $-16 * SIZE, Y
  274. ALIGN_3
  275. .L23:
  276. testq $8, M
  277. jle .L24
  278. ALIGN_3
  279. movups -15 * SIZE(X), %xmm1
  280. movups -13 * SIZE(X), %xmm2
  281. movups -11 * SIZE(X), %xmm3
  282. movups -9 * SIZE(X), %xmm8
  283. SHUFPD_1 %xmm1, %xmm0
  284. movups %xmm0, -16 * SIZE(Y)
  285. SHUFPD_1 %xmm2, %xmm1
  286. movups %xmm1, -14 * SIZE(Y)
  287. SHUFPD_1 %xmm3, %xmm2
  288. movups %xmm2, -12 * SIZE(Y)
  289. SHUFPD_1 %xmm8, %xmm3
  290. movups %xmm3, -10 * SIZE(Y)
  291. movups %xmm8, %xmm0
  292. addq $8 * SIZE, X
  293. addq $8 * SIZE, Y
  294. ALIGN_3
  295. .L24:
  296. testq $4, M
  297. jle .L25
  298. ALIGN_3
  299. movups -15 * SIZE(X), %xmm1
  300. movups -13 * SIZE(X), %xmm2
  301. SHUFPD_1 %xmm1, %xmm0
  302. SHUFPD_1 %xmm2, %xmm1
  303. movups %xmm0, -16 * SIZE(Y)
  304. movups %xmm1, -14 * SIZE(Y)
  305. movups %xmm2, %xmm0
  306. addq $4 * SIZE, X
  307. addq $4 * SIZE, Y
  308. ALIGN_3
  309. .L25:
  310. testq $2, M
  311. jle .L26
  312. ALIGN_3
  313. movups -15 * SIZE(X), %xmm1
  314. SHUFPD_1 %xmm1, %xmm0
  315. movups %xmm0, -16 * SIZE(Y)
  316. addq $2 * SIZE, X
  317. addq $2 * SIZE, Y
  318. ALIGN_3
  319. .L26:
  320. testq $1, M
  321. jle .L29
  322. ALIGN_3
  323. movsd -16 * SIZE(X), %xmm0
  324. movsd %xmm0, -16 * SIZE(Y)
  325. ALIGN_3
  326. .L29:
  327. xorq %rax,%rax
  328. RESTOREREGISTERS
  329. ret
  330. ALIGN_3
  331. #else
  332. movq M, %rax
  333. sarq $4, %rax
  334. jle .L23
  335. movups -16 * SIZE(X), %xmm0
  336. movups -14 * SIZE(X), %xmm1
  337. movups -12 * SIZE(X), %xmm2
  338. movups -10 * SIZE(X), %xmm3
  339. movups -8 * SIZE(X), %xmm4
  340. movups -6 * SIZE(X), %xmm5
  341. movups -4 * SIZE(X), %xmm6
  342. movups -2 * SIZE(X), %xmm7
  343. decq %rax
  344. jle .L22
  345. ALIGN_3
  346. .L21:
  347. #ifdef PREFETCHW
  348. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(Y)
  349. #endif
  350. movlps %xmm0, -16 * SIZE(Y)
  351. movhps %xmm0, -15 * SIZE(Y)
  352. LOAD( 0 * SIZE, X, %xmm0)
  353. movlps %xmm1, -14 * SIZE(Y)
  354. movhps %xmm1, -13 * SIZE(Y)
  355. LOAD( 2 * SIZE, X, %xmm1)
  356. #ifdef PREFETCH
  357. PREFETCH (PREFETCHSIZE + 0) - PREOFFSET(X)
  358. #endif
  359. movlps %xmm2, -12 * SIZE(Y)
  360. movhps %xmm2, -11 * SIZE(Y)
  361. LOAD( 4 * SIZE, X, %xmm2)
  362. movlps %xmm3, -10 * SIZE(Y)
  363. movhps %xmm3, -9 * SIZE(Y)
  364. LOAD( 6 * SIZE, X, %xmm3)
  365. #if defined(PREFETCHW) && !defined(FETCH128)
  366. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(Y)
  367. #endif
  368. movlps %xmm4, -8 * SIZE(Y)
  369. movhps %xmm4, -7 * SIZE(Y)
  370. LOAD( 8 * SIZE, X, %xmm4)
  371. movlps %xmm5, -6 * SIZE(Y)
  372. movhps %xmm5, -5 * SIZE(Y)
  373. LOAD(10 * SIZE, X, %xmm5)
  374. #if defined(PREFETCH) && !defined(FETCH128)
  375. PREFETCH (PREFETCHSIZE + 64) - PREOFFSET(X)
  376. #endif
  377. movlps %xmm6, -4 * SIZE(Y)
  378. movhps %xmm6, -3 * SIZE(Y)
  379. LOAD(12 * SIZE, X, %xmm6)
  380. movlps %xmm7, -2 * SIZE(Y)
  381. movhps %xmm7, -1 * SIZE(Y)
  382. LOAD(14 * SIZE, X, %xmm7)
  383. subq $-16 * SIZE, Y
  384. subq $-16 * SIZE, X
  385. decq %rax
  386. jg .L21
  387. ALIGN_3
  388. .L22:
  389. movlps %xmm0, -16 * SIZE(Y)
  390. movhps %xmm0, -15 * SIZE(Y)
  391. movlps %xmm1, -14 * SIZE(Y)
  392. movhps %xmm1, -13 * SIZE(Y)
  393. movlps %xmm2, -12 * SIZE(Y)
  394. movhps %xmm2, -11 * SIZE(Y)
  395. movlps %xmm3, -10 * SIZE(Y)
  396. movhps %xmm3, -9 * SIZE(Y)
  397. movlps %xmm4, -8 * SIZE(Y)
  398. movhps %xmm4, -7 * SIZE(Y)
  399. movlps %xmm5, -6 * SIZE(Y)
  400. movhps %xmm5, -5 * SIZE(Y)
  401. movlps %xmm6, -4 * SIZE(Y)
  402. movhps %xmm6, -3 * SIZE(Y)
  403. movlps %xmm7, -2 * SIZE(Y)
  404. movhps %xmm7, -1 * SIZE(Y)
  405. subq $-16 * SIZE, Y
  406. subq $-16 * SIZE, X
  407. ALIGN_3
  408. .L23:
  409. testq $8, M
  410. jle .L24
  411. ALIGN_3
  412. movups -16 * SIZE(X), %xmm0
  413. movlps %xmm0, -16 * SIZE(Y)
  414. movhps %xmm0, -15 * SIZE(Y)
  415. movups -14 * SIZE(X), %xmm1
  416. movlps %xmm1, -14 * SIZE(Y)
  417. movhps %xmm1, -13 * SIZE(Y)
  418. movups -12 * SIZE(X), %xmm2
  419. movlps %xmm2, -12 * SIZE(Y)
  420. movhps %xmm2, -11 * SIZE(Y)
  421. movups -10 * SIZE(X), %xmm3
  422. movlps %xmm3, -10 * SIZE(Y)
  423. movhps %xmm3, -9 * SIZE(Y)
  424. addq $8 * SIZE, X
  425. addq $8 * SIZE, Y
  426. ALIGN_3
  427. .L24:
  428. testq $4, M
  429. jle .L25
  430. ALIGN_3
  431. movups -16 * SIZE(X), %xmm0
  432. movlps %xmm0, -16 * SIZE(Y)
  433. movhps %xmm0, -15 * SIZE(Y)
  434. movups -14 * SIZE(X), %xmm1
  435. movlps %xmm1, -14 * SIZE(Y)
  436. movhps %xmm1, -13 * SIZE(Y)
  437. addq $4 * SIZE, X
  438. addq $4 * SIZE, Y
  439. ALIGN_3
  440. .L25:
  441. testq $2, M
  442. jle .L26
  443. ALIGN_3
  444. movups -16 * SIZE(X), %xmm0
  445. movlps %xmm0, -16 * SIZE(Y)
  446. movhps %xmm0, -15 * SIZE(Y)
  447. addq $2 * SIZE, X
  448. addq $2 * SIZE, Y
  449. ALIGN_3
  450. .L26:
  451. testq $1, M
  452. jle .L29
  453. ALIGN_3
  454. movsd -16 * SIZE(X), %xmm0
  455. movsd %xmm0, -16 * SIZE(Y)
  456. ALIGN_3
  457. .L29:
  458. xorq %rax,%rax
  459. RESTOREREGISTERS
  460. ret
  461. ALIGN_3
  462. #endif
  463. .L40:
  464. movq M, %rax
  465. sarq $3, %rax
  466. jle .L45
  467. ALIGN_3
  468. .L41:
  469. movsd (X), %xmm0
  470. addq INCX, X
  471. movhps (X), %xmm0
  472. addq INCX, X
  473. movsd (X), %xmm1
  474. addq INCX, X
  475. movhps (X), %xmm1
  476. addq INCX, X
  477. movsd (X), %xmm2
  478. addq INCX, X
  479. movhps (X), %xmm2
  480. addq INCX, X
  481. movsd (X), %xmm3
  482. addq INCX, X
  483. movhps (X), %xmm3
  484. addq INCX, X
  485. movlps %xmm0, (Y)
  486. addq INCY, Y
  487. movhps %xmm0, (Y)
  488. addq INCY, Y
  489. movlps %xmm1, (Y)
  490. addq INCY, Y
  491. movhps %xmm1, (Y)
  492. addq INCY, Y
  493. movlps %xmm2, (Y)
  494. addq INCY, Y
  495. movhps %xmm2, (Y)
  496. addq INCY, Y
  497. movlps %xmm3, (Y)
  498. addq INCY, Y
  499. movhps %xmm3, (Y)
  500. addq INCY, Y
  501. decq %rax
  502. jg .L41
  503. ALIGN_3
  504. .L45:
  505. movq M, %rax
  506. andq $7, %rax
  507. jle .L47
  508. ALIGN_3
  509. .L46:
  510. movsd (X), %xmm0
  511. addq INCX, X
  512. movlps %xmm0, (Y)
  513. addq INCY, Y
  514. decq %rax
  515. jg .L46
  516. ALIGN_3
  517. .L47:
  518. xorq %rax, %rax
  519. RESTOREREGISTERS
  520. ret
  521. EPILOGUE