You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

gemv_t.S 12 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #ifdef PENTIUM
  41. #define P 88
  42. #endif
  43. #ifndef P
  44. #define P 1000
  45. #endif
  46. #define STACK 16
  47. #define ARGS 24
  48. #define NLDA 0 + STACK(%esp)
  49. #define XP 4 + STACK(%esp)
  50. #define MIN_M 8 + STACK(%esp)
  51. #define J 12 + STACK(%esp)
  52. #define IS 16 + STACK(%esp)
  53. #define M 4 + STACK + ARGS(%esp)
  54. #define N 8 + STACK + ARGS(%esp)
  55. #define K 12 + STACK + ARGS(%esp)
  56. #define ALPHA 16 + STACK + ARGS(%esp)
  57. #ifdef DOUBLE
  58. #define A 24 + STACK + ARGS(%esp)
  59. #define LDA 28 + STACK + ARGS(%esp)
  60. #define X 32 + STACK + ARGS(%esp)
  61. #define INCX 36 + STACK + ARGS(%esp)
  62. #define Y 40 + STACK + ARGS(%esp)
  63. #define INCY 44 + STACK + ARGS(%esp)
  64. #define BUFFER 48 + STACK + ARGS(%esp)
  65. #else
  66. #define A 20 + STACK + ARGS(%esp)
  67. #define LDA 24 + STACK + ARGS(%esp)
  68. #define X 28 + STACK + ARGS(%esp)
  69. #define INCX 32 + STACK + ARGS(%esp)
  70. #define Y 36 + STACK + ARGS(%esp)
  71. #define INCY 40 + STACK + ARGS(%esp)
  72. #define BUFFER 44 + STACK + ARGS(%esp)
  73. #endif
  74. PROLOGUE
  75. subl $ARGS, %esp
  76. pushl %ebp
  77. pushl %edi
  78. pushl %esi
  79. pushl %ebx
  80. PROFCODE
  81. FLD ALPHA
  82. movl X, %edi # X
  83. movl $0, IS
  84. movl M, %ebx
  85. movl N, %eax
  86. testl %ebx, %ebx
  87. jle .L79
  88. testl %eax, %eax
  89. jle .L79
  90. movl INCX, %esi
  91. leal (,%esi,SIZE), %esi
  92. movl %esi, INCX
  93. movl INCY, %esi
  94. leal (, %esi, SIZE), %esi
  95. movl %esi, INCY
  96. movl LDA, %ebx
  97. imull %ebx, %eax
  98. movl $P, %esi
  99. subl %eax, %esi
  100. leal (, %esi, SIZE), %esi
  101. movl %esi, NLDA
  102. leal (,%ebx,SIZE), %esi
  103. movl %esi, LDA
  104. ALIGN_2
  105. .L32:
  106. movl IS, %esi
  107. movl $P, %edx
  108. movl M, %eax
  109. subl %esi, %eax
  110. cmpl %edx, %eax
  111. #ifdef PENTIUM
  112. jle .L33
  113. movl %edx, %eax
  114. .L33:
  115. #else
  116. cmovg %edx, %eax
  117. #endif
  118. movl %eax, MIN_M
  119. movl IS, %ecx
  120. leal (%edi,%ecx,SIZE), %ecx # xp = x + is
  121. movl INCX, %ebx
  122. movl %ecx, XP
  123. cmpl $SIZE, %ebx
  124. je .L34
  125. movl BUFFER, %esi
  126. movl MIN_M, %ecx
  127. movl %esi, XP
  128. sarl $2, %ecx
  129. jle .L35
  130. ALIGN_3
  131. .L36:
  132. FLD (%edi)
  133. addl %ebx, %edi
  134. FST 0 * SIZE(%esi)
  135. FLD (%edi)
  136. addl %ebx, %edi
  137. FST 1 * SIZE(%esi)
  138. FLD (%edi)
  139. addl %ebx, %edi
  140. FST 2 * SIZE(%esi)
  141. FLD (%edi)
  142. addl %ebx, %edi
  143. FST 3 * SIZE(%esi)
  144. addl $4 * SIZE, %esi
  145. decl %ecx
  146. jg .L36
  147. ALIGN_3
  148. .L35:
  149. movl MIN_M, %ecx
  150. andl $3,%ecx
  151. jle .L34
  152. ALIGN_2
  153. .L42:
  154. FLD (%edi)
  155. addl %ebx, %edi
  156. FST (%esi)
  157. addl $SIZE, %esi
  158. decl %ecx
  159. jg .L42
  160. ALIGN_3
  161. /* Main Routine */
  162. .L34:
  163. movl Y, %ebp # coffset = y
  164. movl N, %esi
  165. sarl $2, %esi
  166. movl %esi, J
  167. jle .L47
  168. ALIGN_3
  169. .L48:
  170. movl A, %ebx # a_offset = a
  171. fldz
  172. movl LDA, %edx
  173. fldz
  174. leal (%ebx, %edx), %ecx # a_offset2 = a + lda
  175. fldz
  176. leal (%ebx, %edx, 4), %eax
  177. fldz
  178. movl %eax, A
  179. movl XP, %esi
  180. FLD (%esi)
  181. movl MIN_M, %eax
  182. sarl $2,%eax
  183. jle .L51
  184. ALIGN_3
  185. #define PRESIZE 8
  186. .L80:
  187. #ifdef PENTIUM3
  188. prefetcht0 PRESIZE * SIZE(%ebx, %edx, 2)
  189. FLD 0 * SIZE(%ebx) # at = *(a_offset + 0 * lda)
  190. fmul %st(1),%st # at1 *= bt1
  191. prefetcht0 PRESIZE * SIZE(%ecx)
  192. faddp %st,%st(2) # ct1 += at1
  193. FLD 0 * SIZE(%ecx) # at1 = *(a_offset2 + 0 * lda)
  194. prefetcht0 PRESIZE * SIZE(%ecx, %edx, 2)
  195. fmul %st(1),%st # at1 *= bt1
  196. faddp %st,%st(3) # ct2 += at1
  197. prefetcht0 PRESIZE * SIZE(%ebx)
  198. FLD 0 * SIZE(%ebx, %edx, 2) # at = *(a_offset + 2 * lda)
  199. fmul %st(1),%st
  200. faddp %st,%st(4)
  201. FLD 0 * SIZE(%ecx, %edx, 2) # at1 = *(a_offset2 + 2 * lda)
  202. fmulp %st, %st(1)
  203. faddp %st,%st(4)
  204. FLD 1 * SIZE(%esi)
  205. FLD 1 * SIZE(%ebx) # at = *(a_offset + 0 * lda)
  206. fmul %st(1),%st # at1 *= bt1
  207. faddp %st,%st(2) # ct1 += at1
  208. FLD 1 * SIZE(%ecx) # at1 = *(a_offset2 + 0 * lda)
  209. fmul %st(1),%st # at1 *= bt1
  210. faddp %st,%st(3) # ct2 += at1
  211. FLD 1 * SIZE(%ebx, %edx, 2) # at = *(a_offset + 2 * lda)
  212. fmul %st(1),%st
  213. faddp %st,%st(4)
  214. FLD 1 * SIZE(%ecx, %edx, 2) # at1 = *(a_offset2 + 2 * lda)
  215. fmulp %st, %st(1)
  216. faddp %st,%st(4)
  217. FLD 2 * SIZE(%esi)
  218. FLD 2 * SIZE(%ebx) # at = *(a_offset + 0 * lda)
  219. fmul %st(1),%st # at1 *= bt1
  220. faddp %st,%st(2) # ct1 += at1
  221. FLD 2 * SIZE(%ecx) # at1 = *(a_offset2 + 0 * lda)
  222. fmul %st(1),%st # at1 *= bt1
  223. faddp %st,%st(3) # ct2 += at1
  224. FLD 2 * SIZE(%ebx, %edx, 2) # at = *(a_offset + 2 * lda)
  225. fmul %st(1),%st
  226. faddp %st,%st(4)
  227. FLD 2 * SIZE(%ecx, %edx, 2) # at1 = *(a_offset2 + 2 * lda)
  228. fmulp %st, %st(1)
  229. faddp %st,%st(4)
  230. FLD 3 * SIZE(%esi)
  231. FLD 3 * SIZE(%ebx) # at = *(a_offset + 0 * lda)
  232. fmul %st(1),%st # at1 *= bt1
  233. faddp %st,%st(2) # ct1 += at1
  234. FLD 3 * SIZE(%ecx) # at1 = *(a_offset2 + 0 * lda)
  235. fmul %st(1),%st # at1 *= bt1
  236. faddp %st,%st(3) # ct2 += at1
  237. FLD 3 * SIZE(%ebx, %edx, 2) # at = *(a_offset + 2 * lda)
  238. fmul %st(1),%st
  239. faddp %st,%st(4)
  240. FLD 3 * SIZE(%ecx, %edx, 2) # at1 = *(a_offset2 + 2 * lda)
  241. fmulp %st, %st(1)
  242. addl $4 * SIZE, %ebx
  243. faddp %st,%st(4)
  244. addl $4 * SIZE, %ecx
  245. FLD 4 * SIZE(%esi)
  246. addl $4 * SIZE, %esi
  247. #else
  248. #if defined(HAS_PREFETCH)
  249. prefetcht0 PRESIZE * SIZE(%ebx)
  250. prefetcht0 PRESIZE * SIZE(%ebx, %edx, 2)
  251. prefetcht0 PRESIZE * SIZE(%ecx)
  252. prefetcht0 PRESIZE * SIZE(%ecx, %edx, 2)
  253. #endif
  254. FLD 0 * SIZE(%ebx) # at = *(a_offset + 0 * lda)
  255. fmul %st(1),%st # at1 *= bt1
  256. faddp %st,%st(2) # ct1 += at1
  257. FLD 0 * SIZE(%ecx) # at1 = *(a_offset2 + 0 * lda)
  258. fmul %st(1),%st # at1 *= bt1
  259. faddp %st,%st(3) # ct2 += at1
  260. FLD 0 * SIZE(%ebx, %edx, 2) # at = *(a_offset + 2 * lda)
  261. fmul %st(1),%st
  262. faddp %st,%st(4)
  263. FMUL 0 * SIZE(%ecx, %edx, 2) # at1 = *(a_offset2 + 2 * lda)
  264. faddp %st,%st(4)
  265. FLD 1 * SIZE(%esi)
  266. FLD 1 * SIZE(%ebx) # at = *(a_offset + 0 * lda)
  267. fmul %st(1),%st # at1 *= bt1
  268. faddp %st,%st(2) # ct1 += at1
  269. FLD 1 * SIZE(%ecx) # at1 = *(a_offset2 + 0 * lda)
  270. fmul %st(1),%st # at1 *= bt1
  271. faddp %st,%st(3) # ct2 += at1
  272. FLD 1 * SIZE(%ebx, %edx, 2) # at = *(a_offset + 2 * lda)
  273. fmul %st(1),%st
  274. faddp %st,%st(4)
  275. FMUL 1 * SIZE(%ecx, %edx, 2) # at1 = *(a_offset2 + 2 * lda)
  276. faddp %st,%st(4)
  277. FLD 2 * SIZE(%esi)
  278. FLD 2 * SIZE(%ebx) # at = *(a_offset + 0 * lda)
  279. fmul %st(1),%st # at1 *= bt1
  280. faddp %st,%st(2) # ct1 += at1
  281. FLD 2 * SIZE(%ecx) # at1 = *(a_offset2 + 0 * lda)
  282. fmul %st(1),%st # at1 *= bt1
  283. faddp %st,%st(3) # ct2 += at1
  284. FLD 2 * SIZE(%ebx, %edx, 2) # at = *(a_offset + 2 * lda)
  285. fmul %st(1),%st
  286. faddp %st,%st(4)
  287. FMUL 2 * SIZE(%ecx, %edx, 2) # at1 = *(a_offset2 + 2 * lda)
  288. faddp %st,%st(4)
  289. FLD 3 * SIZE(%esi)
  290. FLD 3 * SIZE(%ebx) # at = *(a_offset + 0 * lda)
  291. fmul %st(1),%st # at1 *= bt1
  292. faddp %st,%st(2) # ct1 += at1
  293. FLD 3 * SIZE(%ecx) # at1 = *(a_offset2 + 0 * lda)
  294. fmul %st(1),%st # at1 *= bt1
  295. faddp %st,%st(3) # ct2 += at1
  296. FLD 3 * SIZE(%ebx, %edx, 2) # at = *(a_offset + 2 * lda)
  297. fmul %st(1),%st
  298. faddp %st,%st(4)
  299. FMUL 3 * SIZE(%ecx, %edx, 2) # at1 = *(a_offset2 + 2 * lda)
  300. faddp %st,%st(4)
  301. FLD 4 * SIZE(%esi)
  302. addl $4 * SIZE, %ebx
  303. addl $4 * SIZE, %ecx
  304. addl $4 * SIZE, %esi
  305. #endif
  306. decl %eax
  307. jg .L80
  308. ALIGN_3
  309. .L51:
  310. movl MIN_M, %eax
  311. andl $3, %eax
  312. je .L81
  313. ALIGN_3
  314. .L52:
  315. FLD (%ebx) # at = *(a_offset + 0 * lda)
  316. fmul %st(1),%st # at1 *= bt1
  317. faddp %st,%st(2) # ct1 += at1
  318. FLD (%ecx) # at1 = *(a_offset2 + 0 * lda)
  319. fmul %st(1),%st # at1 *= bt1
  320. faddp %st,%st(3) # ct2 += at1
  321. FLD (%ebx, %edx, 2) # at = *(a_offset + 2 * lda)
  322. fmul %st(1),%st
  323. faddp %st,%st(4)
  324. FMUL (%ecx, %edx, 2) # at1 = *(a_offset2 + 2 * lda)
  325. faddp %st,%st(4)
  326. FLD 1 * SIZE(%esi)
  327. addl $SIZE, %ebx
  328. addl $SIZE, %ecx
  329. addl $SIZE, %esi
  330. decl %eax
  331. jg .L52
  332. ALIGN_3
  333. .L81:
  334. #ifndef C_SUN
  335. ffreep %st(0)
  336. #else
  337. .byte 0xdf
  338. .byte 0xc0
  339. #endif
  340. fxch %st(4)
  341. fmul %st, %st(4)
  342. fmul %st, %st(1)
  343. fmul %st, %st(2)
  344. fmul %st, %st(3)
  345. fxch %st(4)
  346. movl INCY, %eax
  347. FADD (%ebp)
  348. FST (%ebp)
  349. addl %eax, %ebp
  350. FADD (%ebp)
  351. FST (%ebp)
  352. addl %eax, %ebp
  353. FADD (%ebp)
  354. FST (%ebp)
  355. addl %eax, %ebp
  356. FADD (%ebp)
  357. FST (%ebp)
  358. addl %eax, %ebp
  359. decl J
  360. jg .L48
  361. ALIGN_3
  362. .L47:
  363. movl N, %esi
  364. andl $3,%esi
  365. movl %esi, J
  366. jle .L60
  367. ALIGN_2
  368. .L61:
  369. movl A, %ebx # a_offset = a
  370. fldz # ct1 = ZERO
  371. movl LDA, %edx
  372. fldz # ct1 = ZERO
  373. addl %ebx, %edx
  374. fldz # ct1 = ZERO
  375. movl %edx, A
  376. fldz # ct1 = ZERO
  377. movl XP, %esi
  378. movl MIN_M, %eax
  379. sarl $3,%eax
  380. jle .L64
  381. ALIGN_3
  382. .L65:
  383. #ifdef HAS_PREFETCH
  384. prefetcht0 PRESIZE * 2 * SIZE(%ebx)
  385. prefetcht0 PRESIZE * 2 * SIZE(%ebx)
  386. #endif
  387. FLD 0 * SIZE(%esi)
  388. FMUL 0 * SIZE(%ebx)
  389. faddp %st,%st(1)
  390. FLD 1 * SIZE(%esi)
  391. FMUL 1 * SIZE(%ebx)
  392. faddp %st,%st(2)
  393. FLD 2 * SIZE(%esi)
  394. FMUL 2 * SIZE(%ebx)
  395. faddp %st,%st(3)
  396. FLD 3 * SIZE(%esi)
  397. FMUL 3 * SIZE(%ebx)
  398. faddp %st,%st(4)
  399. FLD 4 * SIZE(%esi)
  400. FMUL 4 * SIZE(%ebx)
  401. faddp %st,%st(1)
  402. FLD 5 * SIZE(%esi)
  403. FMUL 5 * SIZE(%ebx)
  404. faddp %st,%st(2)
  405. FLD 6 * SIZE(%esi)
  406. FMUL 6 * SIZE(%ebx)
  407. faddp %st,%st(3)
  408. FLD 7 * SIZE(%esi)
  409. FMUL 7 * SIZE(%ebx)
  410. faddp %st,%st(4)
  411. addl $8 * SIZE, %esi
  412. addl $8 * SIZE, %ebx
  413. decl %eax
  414. jg .L65
  415. ALIGN_3
  416. .L64:
  417. movl MIN_M, %eax
  418. andl $7, %eax
  419. jle .L70
  420. ALIGN_3
  421. .L71:
  422. FLD (%esi)
  423. FMUL (%ebx)
  424. faddp %st,%st(1)
  425. addl $SIZE, %esi
  426. addl $SIZE, %ebx
  427. decl %eax
  428. jg .L71
  429. ALIGN_3
  430. .L70:
  431. faddp %st, %st(1)
  432. faddp %st, %st(1)
  433. faddp %st, %st(1)
  434. fmul %st(1),%st
  435. FADD (%ebp)
  436. FST (%ebp)
  437. addl INCY, %ebp
  438. decl J
  439. jg .L61
  440. ALIGN_3
  441. .L60:
  442. movl A, %ebx
  443. addl NLDA, %ebx
  444. movl %ebx, A
  445. addl $P, IS
  446. movl M, %esi
  447. cmpl %esi, IS
  448. jl .L32
  449. ALIGN_3
  450. .L79:
  451. #ifndef C_SUN
  452. ffreep %st(0)
  453. #else
  454. .byte 0xdf
  455. .byte 0xc0
  456. #endif
  457. popl %ebx
  458. popl %esi
  459. popl %edi
  460. popl %ebp
  461. addl $ARGS, %esp
  462. ret
  463. EPILOGUE