You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

gemv_t_atom.S 12 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #ifdef ATOM
  41. #define PREFETCH prefetchnta
  42. #define PREFETCHW prefetcht0
  43. #define PREFETCHSIZE (8 * 6)
  44. #endif
  45. #define STACKSIZE 16
  46. #define M 4 + STACKSIZE(%esp)
  47. #define N 8 + STACKSIZE(%esp)
  48. #define ALPHA 16 + STACKSIZE(%esp)
  49. #define A 24 + STACKSIZE(%esp)
  50. #define STACK_LDA 28 + STACKSIZE(%esp)
  51. #define STACK_X 32 + STACKSIZE(%esp)
  52. #define STACK_INCX 36 + STACKSIZE(%esp)
  53. #define Y 40 + STACKSIZE(%esp)
  54. #define STACK_INCY 44 + STACKSIZE(%esp)
  55. #define BUFFER 48 + STACKSIZE(%esp)
  56. #define I %eax
  57. #define J %ebx
  58. #define INCX J
  59. #define INCY %ecx
  60. #define A1 %esi
  61. #define X %edx
  62. #define Y1 %edi
  63. #define LDA %ebp
  64. PROLOGUE
  65. pushl %ebp
  66. pushl %edi
  67. pushl %esi
  68. pushl %ebx
  69. PROFCODE
  70. movl STACK_LDA, LDA
  71. movl STACK_X, X
  72. movl STACK_INCX, INCX
  73. movl STACK_INCY, INCY
  74. leal (,INCX, SIZE), INCX
  75. leal (,INCY, SIZE), INCY
  76. leal (,LDA, SIZE), LDA
  77. subl $-16 * SIZE, A
  78. cmpl $0, N
  79. jle .L999
  80. cmpl $0, M
  81. jle .L999
  82. movl BUFFER, Y1
  83. movl M, I
  84. sarl $3, I
  85. jle .L05
  86. ALIGN_4
  87. .L02:
  88. movsd (X), %xmm0
  89. addl INCX, X
  90. movhpd (X), %xmm0
  91. addl INCX, X
  92. movsd (X), %xmm1
  93. addl INCX, X
  94. movhpd (X), %xmm1
  95. addl INCX, X
  96. movsd (X), %xmm2
  97. addl INCX, X
  98. movhpd (X), %xmm2
  99. addl INCX, X
  100. movsd (X), %xmm3
  101. addl INCX, X
  102. movhpd (X), %xmm3
  103. addl INCX, X
  104. movapd %xmm0, 0 * SIZE(Y1)
  105. movapd %xmm1, 2 * SIZE(Y1)
  106. movapd %xmm2, 4 * SIZE(Y1)
  107. movapd %xmm3, 6 * SIZE(Y1)
  108. addl $8 * SIZE, Y1
  109. decl I
  110. jg .L02
  111. ALIGN_4
  112. .L05:
  113. movl M, I
  114. andl $7, I
  115. jle .L10
  116. ALIGN_2
  117. .L06:
  118. movsd (X), %xmm0
  119. addl INCX, X
  120. movsd %xmm0, 0 * SIZE(Y1)
  121. addl $SIZE, Y1
  122. decl I
  123. jg .L06
  124. ALIGN_4
  125. .L10:
  126. movl Y, Y1
  127. movl N, J
  128. sarl $1, J
  129. jle .L20
  130. ALIGN_3
  131. .L11:
  132. movl BUFFER, X
  133. addl $16 * SIZE, X
  134. movl A, A1
  135. leal (A1, LDA, 2), %eax
  136. movl %eax, A
  137. xorps %xmm0, %xmm0
  138. xorps %xmm1, %xmm1
  139. movsd -16 * SIZE(X), %xmm2
  140. movsd -15 * SIZE(X), %xmm3
  141. movl M, I
  142. sarl $3, I
  143. jle .L15
  144. movsd -16 * SIZE(A1), %xmm4
  145. movsd -16 * SIZE(A1, LDA), %xmm5
  146. movsd -15 * SIZE(A1), %xmm6
  147. movsd -15 * SIZE(A1, LDA), %xmm7
  148. mulsd %xmm2, %xmm4
  149. mulsd %xmm2, %xmm5
  150. movsd -14 * SIZE(X), %xmm2
  151. decl I
  152. jle .L13
  153. ALIGN_4
  154. .L12:
  155. #ifdef PREFETCH
  156. PREFETCH PREFETCHSIZE * SIZE(A1)
  157. #endif
  158. mulsd %xmm3, %xmm6
  159. addsd %xmm4, %xmm0
  160. movsd -14 * SIZE(A1), %xmm4
  161. mulsd %xmm3, %xmm7
  162. movsd -13 * SIZE(X), %xmm3
  163. addsd %xmm5, %xmm1
  164. movsd -14 * SIZE(A1, LDA), %xmm5
  165. mulsd %xmm2, %xmm4
  166. addsd %xmm6, %xmm0
  167. movsd -13 * SIZE(A1), %xmm6
  168. mulsd %xmm2, %xmm5
  169. movsd -12 * SIZE(X), %xmm2
  170. addsd %xmm7, %xmm1
  171. movsd -13 * SIZE(A1, LDA), %xmm7
  172. mulsd %xmm3, %xmm6
  173. addsd %xmm4, %xmm0
  174. movsd -12 * SIZE(A1), %xmm4
  175. mulsd %xmm3, %xmm7
  176. movsd -11 * SIZE(X), %xmm3
  177. addsd %xmm5, %xmm1
  178. movsd -12 * SIZE(A1, LDA), %xmm5
  179. mulsd %xmm2, %xmm4
  180. addsd %xmm6, %xmm0
  181. movsd -11 * SIZE(A1), %xmm6
  182. mulsd %xmm2, %xmm5
  183. movsd -10 * SIZE(X), %xmm2
  184. addsd %xmm7, %xmm1
  185. movsd -11 * SIZE(A1, LDA), %xmm7
  186. #ifdef PREFETCH
  187. PREFETCH PREFETCHSIZE * SIZE(A1, LDA)
  188. #endif
  189. mulsd %xmm3, %xmm6
  190. addsd %xmm4, %xmm0
  191. movsd -10 * SIZE(A1), %xmm4
  192. mulsd %xmm3, %xmm7
  193. movsd -9 * SIZE(X), %xmm3
  194. addsd %xmm5, %xmm1
  195. movsd -10 * SIZE(A1, LDA), %xmm5
  196. mulsd %xmm2, %xmm4
  197. addsd %xmm6, %xmm0
  198. movsd -9 * SIZE(A1), %xmm6
  199. mulsd %xmm2, %xmm5
  200. movsd -8 * SIZE(X), %xmm2
  201. addsd %xmm7, %xmm1
  202. movsd -9 * SIZE(A1, LDA), %xmm7
  203. mulsd %xmm3, %xmm6
  204. addsd %xmm4, %xmm0
  205. movsd -8 * SIZE(A1), %xmm4
  206. mulsd %xmm3, %xmm7
  207. movsd -7 * SIZE(X), %xmm3
  208. addsd %xmm5, %xmm1
  209. movsd -8 * SIZE(A1, LDA), %xmm5
  210. mulsd %xmm2, %xmm4
  211. addsd %xmm6, %xmm0
  212. movsd -7 * SIZE(A1), %xmm6
  213. mulsd %xmm2, %xmm5
  214. movsd -6 * SIZE(X), %xmm2
  215. addsd %xmm7, %xmm1
  216. movsd -7 * SIZE(A1, LDA), %xmm7
  217. addl $8 * SIZE, A1
  218. addl $8 * SIZE, X
  219. decl I
  220. jg .L12
  221. ALIGN_4
  222. .L13:
  223. mulsd %xmm3, %xmm6
  224. addsd %xmm4, %xmm0
  225. movsd -14 * SIZE(A1), %xmm4
  226. mulsd %xmm3, %xmm7
  227. movsd -13 * SIZE(X), %xmm3
  228. addsd %xmm5, %xmm1
  229. movsd -14 * SIZE(A1, LDA), %xmm5
  230. mulsd %xmm2, %xmm4
  231. addsd %xmm6, %xmm0
  232. movsd -13 * SIZE(A1), %xmm6
  233. mulsd %xmm2, %xmm5
  234. movsd -12 * SIZE(X), %xmm2
  235. addsd %xmm7, %xmm1
  236. movsd -13 * SIZE(A1, LDA), %xmm7
  237. mulsd %xmm3, %xmm6
  238. addsd %xmm4, %xmm0
  239. movsd -12 * SIZE(A1), %xmm4
  240. mulsd %xmm3, %xmm7
  241. movsd -11 * SIZE(X), %xmm3
  242. addsd %xmm5, %xmm1
  243. movsd -12 * SIZE(A1, LDA), %xmm5
  244. mulsd %xmm2, %xmm4
  245. addsd %xmm6, %xmm0
  246. movsd -11 * SIZE(A1), %xmm6
  247. mulsd %xmm2, %xmm5
  248. movsd -10 * SIZE(X), %xmm2
  249. addsd %xmm7, %xmm1
  250. movsd -11 * SIZE(A1, LDA), %xmm7
  251. mulsd %xmm3, %xmm6
  252. addsd %xmm4, %xmm0
  253. movsd -10 * SIZE(A1), %xmm4
  254. mulsd %xmm3, %xmm7
  255. movsd -9 * SIZE(X), %xmm3
  256. addsd %xmm5, %xmm1
  257. movsd -10 * SIZE(A1, LDA), %xmm5
  258. mulsd %xmm2, %xmm4
  259. addsd %xmm6, %xmm0
  260. movsd -9 * SIZE(A1), %xmm6
  261. mulsd %xmm2, %xmm5
  262. movsd -8 * SIZE(X), %xmm2
  263. addsd %xmm7, %xmm1
  264. movsd -9 * SIZE(A1, LDA), %xmm7
  265. mulsd %xmm3, %xmm6
  266. addsd %xmm4, %xmm0
  267. mulsd %xmm3, %xmm7
  268. movsd -7 * SIZE(X), %xmm3
  269. addsd %xmm5, %xmm1
  270. addsd %xmm6, %xmm0
  271. addl $8 * SIZE, A1
  272. addsd %xmm7, %xmm1
  273. addl $8 * SIZE, X
  274. ALIGN_4
  275. .L15:
  276. testl $4, M
  277. jle .L16
  278. movsd -16 * SIZE(A1), %xmm4
  279. movsd -16 * SIZE(A1, LDA), %xmm5
  280. movsd -15 * SIZE(A1), %xmm6
  281. movsd -15 * SIZE(A1, LDA), %xmm7
  282. mulsd %xmm2, %xmm4
  283. mulsd %xmm2, %xmm5
  284. movsd -14 * SIZE(X), %xmm2
  285. mulsd %xmm3, %xmm6
  286. addsd %xmm4, %xmm0
  287. movsd -14 * SIZE(A1), %xmm4
  288. mulsd %xmm3, %xmm7
  289. movsd -13 * SIZE(X), %xmm3
  290. addsd %xmm5, %xmm1
  291. movsd -14 * SIZE(A1, LDA), %xmm5
  292. mulsd %xmm2, %xmm4
  293. addsd %xmm6, %xmm0
  294. movsd -13 * SIZE(A1), %xmm6
  295. mulsd %xmm2, %xmm5
  296. movsd -12 * SIZE(X), %xmm2
  297. addsd %xmm7, %xmm1
  298. movsd -13 * SIZE(A1, LDA), %xmm7
  299. mulsd %xmm3, %xmm6
  300. addsd %xmm4, %xmm0
  301. mulsd %xmm3, %xmm7
  302. movsd -11 * SIZE(X), %xmm3
  303. addsd %xmm5, %xmm1
  304. addsd %xmm6, %xmm0
  305. addsd %xmm7, %xmm1
  306. addl $4 * SIZE, A1
  307. addl $4 * SIZE, X
  308. ALIGN_4
  309. .L16:
  310. testl $2, M
  311. jle .L17
  312. movsd -16 * SIZE(A1), %xmm4
  313. movsd -16 * SIZE(A1, LDA), %xmm5
  314. movsd -15 * SIZE(A1), %xmm6
  315. movsd -15 * SIZE(A1, LDA), %xmm7
  316. mulsd %xmm2, %xmm4
  317. mulsd %xmm2, %xmm5
  318. movsd -14 * SIZE(X), %xmm2
  319. mulsd %xmm3, %xmm6
  320. addsd %xmm4, %xmm0
  321. mulsd %xmm3, %xmm7
  322. addsd %xmm5, %xmm1
  323. addsd %xmm6, %xmm0
  324. addsd %xmm7, %xmm1
  325. addl $2 * SIZE, A1
  326. ALIGN_4
  327. .L17:
  328. testl $1, M
  329. jle .L18
  330. movsd -16 * SIZE(A1), %xmm4
  331. movsd -16 * SIZE(A1, LDA), %xmm5
  332. mulsd %xmm2, %xmm4
  333. mulsd %xmm2, %xmm5
  334. addsd %xmm4, %xmm0
  335. addsd %xmm5, %xmm1
  336. ALIGN_4
  337. .L18:
  338. movsd ALPHA, %xmm7
  339. mulpd %xmm7, %xmm0
  340. mulpd %xmm7, %xmm1
  341. addsd (Y1), %xmm0
  342. addsd (Y1, INCY), %xmm1
  343. movsd %xmm0, (Y1)
  344. movsd %xmm1, (Y1, INCY)
  345. leal (Y1, INCY, 2), Y1
  346. decl J
  347. jg .L11
  348. ALIGN_4
  349. .L20:
  350. testl $1, N
  351. jle .L999
  352. movl BUFFER, X
  353. addl $16 * SIZE, X
  354. movl A, A1
  355. leal (A1, LDA, 2), %eax
  356. movl %eax, A
  357. xorps %xmm0, %xmm0
  358. xorps %xmm1, %xmm1
  359. movsd -16 * SIZE(X), %xmm2
  360. movsd -15 * SIZE(X), %xmm3
  361. movl M, I
  362. sarl $3, I
  363. jle .L25
  364. movsd -16 * SIZE(A1), %xmm4
  365. movsd -15 * SIZE(A1), %xmm5
  366. movsd -14 * SIZE(A1), %xmm6
  367. movsd -13 * SIZE(A1), %xmm7
  368. mulsd %xmm2, %xmm4
  369. movsd -14 * SIZE(X), %xmm2
  370. mulsd %xmm3, %xmm5
  371. movsd -13 * SIZE(X), %xmm3
  372. decl I
  373. jle .L23
  374. ALIGN_4
  375. .L22:
  376. #ifdef PREFETCH
  377. PREFETCH PREFETCHSIZE * SIZE(A1)
  378. #endif
  379. mulsd %xmm2, %xmm6
  380. movsd -12 * SIZE(X), %xmm2
  381. addsd %xmm4, %xmm0
  382. movsd -12 * SIZE(A1), %xmm4
  383. mulsd %xmm3, %xmm7
  384. movsd -11 * SIZE(X), %xmm3
  385. addsd %xmm5, %xmm1
  386. movsd -11 * SIZE(A1), %xmm5
  387. addsd %xmm6, %xmm0
  388. movsd -10 * SIZE(A1), %xmm6
  389. mulsd %xmm2, %xmm4
  390. movsd -10 * SIZE(X), %xmm2
  391. addsd %xmm7, %xmm1
  392. movsd -9 * SIZE(A1), %xmm7
  393. mulsd %xmm3, %xmm5
  394. movsd -9 * SIZE(X), %xmm3
  395. mulsd %xmm2, %xmm6
  396. movsd -8 * SIZE(X), %xmm2
  397. addsd %xmm4, %xmm0
  398. movsd -8 * SIZE(A1), %xmm4
  399. mulsd %xmm3, %xmm7
  400. movsd -7 * SIZE(X), %xmm3
  401. addsd %xmm5, %xmm1
  402. movsd -7 * SIZE(A1), %xmm5
  403. addsd %xmm6, %xmm0
  404. movsd -6 * SIZE(A1), %xmm6
  405. mulsd %xmm2, %xmm4
  406. movsd -6 * SIZE(X), %xmm2
  407. addsd %xmm7, %xmm1
  408. movsd -5 * SIZE(A1), %xmm7
  409. mulsd %xmm3, %xmm5
  410. movsd -5 * SIZE(X), %xmm3
  411. addl $8 * SIZE, A1
  412. addl $8 * SIZE, X
  413. decl I
  414. jg .L22
  415. ALIGN_4
  416. .L23:
  417. mulsd %xmm2, %xmm6
  418. movsd -12 * SIZE(X), %xmm2
  419. addsd %xmm4, %xmm0
  420. movsd -12 * SIZE(A1), %xmm4
  421. mulsd %xmm3, %xmm7
  422. movsd -11 * SIZE(X), %xmm3
  423. addsd %xmm5, %xmm1
  424. movsd -11 * SIZE(A1), %xmm5
  425. addsd %xmm6, %xmm0
  426. movsd -10 * SIZE(A1), %xmm6
  427. mulsd %xmm2, %xmm4
  428. movsd -10 * SIZE(X), %xmm2
  429. addsd %xmm7, %xmm1
  430. movsd -9 * SIZE(A1), %xmm7
  431. mulsd %xmm3, %xmm5
  432. movsd -9 * SIZE(X), %xmm3
  433. mulsd %xmm2, %xmm6
  434. movsd -8 * SIZE(X), %xmm2
  435. addsd %xmm4, %xmm0
  436. mulsd %xmm3, %xmm7
  437. movsd -7 * SIZE(X), %xmm3
  438. addsd %xmm5, %xmm1
  439. addsd %xmm6, %xmm0
  440. addsd %xmm7, %xmm1
  441. addl $8 * SIZE, A1
  442. addl $8 * SIZE, X
  443. ALIGN_4
  444. .L25:
  445. testl $4, M
  446. jle .L26
  447. movsd -16 * SIZE(A1), %xmm4
  448. movsd -15 * SIZE(A1), %xmm5
  449. movsd -14 * SIZE(A1), %xmm6
  450. movsd -13 * SIZE(A1), %xmm7
  451. mulsd %xmm2, %xmm4
  452. movsd -14 * SIZE(X), %xmm2
  453. mulsd %xmm3, %xmm5
  454. movsd -13 * SIZE(X), %xmm3
  455. mulsd %xmm2, %xmm6
  456. movsd -12 * SIZE(X), %xmm2
  457. addsd %xmm4, %xmm0
  458. mulsd %xmm3, %xmm7
  459. movsd -11 * SIZE(X), %xmm3
  460. addsd %xmm5, %xmm1
  461. addsd %xmm6, %xmm0
  462. addsd %xmm7, %xmm1
  463. addl $4 * SIZE, A1
  464. addl $4 * SIZE, X
  465. ALIGN_4
  466. .L26:
  467. testl $2, M
  468. jle .L27
  469. movsd -16 * SIZE(A1), %xmm4
  470. movsd -15 * SIZE(A1), %xmm5
  471. mulsd %xmm2, %xmm4
  472. movsd -14 * SIZE(X), %xmm2
  473. mulsd %xmm3, %xmm5
  474. addsd %xmm4, %xmm0
  475. addsd %xmm5, %xmm1
  476. addl $2 * SIZE, A1
  477. ALIGN_4
  478. .L27:
  479. testl $1, M
  480. jle .L28
  481. movsd -16 * SIZE(A1), %xmm4
  482. mulsd %xmm2, %xmm4
  483. addsd %xmm4, %xmm0
  484. ALIGN_4
  485. .L28:
  486. movsd ALPHA, %xmm7
  487. addsd %xmm1, %xmm0
  488. mulpd %xmm7, %xmm0
  489. addsd (Y1), %xmm0
  490. movsd %xmm0, (Y1)
  491. ALIGN_4
  492. .L999:
  493. popl %ebx
  494. popl %esi
  495. popl %edi
  496. popl %ebp
  497. ret
  498. EPILOGUE