You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zgemv_t.S 8.4 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #ifdef PENTIUM
  41. #define P 88
  42. #endif
  43. #ifndef P
  44. #define P 400
  45. #endif
  46. #define STACK 16
  47. #define ARGS 24
  48. #define NLDA 0 + STACK(%esp)
  49. #define XP 4 + STACK(%esp)
  50. #define MIN_M 8 + STACK(%esp)
  51. #define J 12 + STACK(%esp)
  52. #define IS 16 + STACK(%esp)
  53. #define M 4 + STACK + ARGS(%esp)
  54. #define N 8 + STACK + ARGS(%esp)
  55. #define K 12 + STACK + ARGS(%esp)
  56. #ifdef DOUBLE
  57. #define ALPHA_R 16 + STACK + ARGS(%esp)
  58. #define ALPHA_I 24 + STACK + ARGS(%esp)
  59. #define A 32 + STACK + ARGS(%esp)
  60. #define LDA 36 + STACK + ARGS(%esp)
  61. #define X 40 + STACK + ARGS(%esp)
  62. #define INCX 44 + STACK + ARGS(%esp)
  63. #define Y 48 + STACK + ARGS(%esp)
  64. #define INCY 52 + STACK + ARGS(%esp)
  65. #define BUFFER 56 + STACK + ARGS(%esp)
  66. #else
  67. #define ALPHA_R 16 + STACK + ARGS(%esp)
  68. #define ALPHA_I 20 + STACK + ARGS(%esp)
  69. #define A 24 + STACK + ARGS(%esp)
  70. #define LDA 28 + STACK + ARGS(%esp)
  71. #define X 32 + STACK + ARGS(%esp)
  72. #define INCX 36 + STACK + ARGS(%esp)
  73. #define Y 40 + STACK + ARGS(%esp)
  74. #define INCY 44 + STACK + ARGS(%esp)
  75. #define BUFFER 48 + STACK + ARGS(%esp)
  76. #endif
  77. PROLOGUE
  78. subl $ARGS, %esp
  79. pushl %ebp
  80. pushl %edi
  81. pushl %esi
  82. pushl %ebx
  83. PROFCODE
  84. FLD ALPHA_I
  85. FLD ALPHA_R
  86. movl X, %edi # X
  87. movl $0, IS
  88. movl M, %ebx
  89. movl N, %ecx
  90. testl %ebx, %ebx
  91. jle .L79
  92. testl %ecx, %ecx
  93. jle .L79
  94. movl INCX, %esi
  95. addl %esi, %esi
  96. leal (,%esi,SIZE), %esi
  97. movl %esi, INCX
  98. movl INCY, %esi
  99. addl %esi, %esi
  100. leal (, %esi, SIZE), %esi
  101. movl %esi, INCY
  102. movl LDA, %ebx
  103. movl N, %eax
  104. imull %ebx, %eax
  105. movl $P, %esi
  106. subl %eax, %esi
  107. leal (, %esi, SIZE), %esi
  108. addl %esi, %esi
  109. movl %esi, NLDA
  110. leal (,%ebx,SIZE), %esi
  111. addl %esi, %esi
  112. movl %esi, LDA
  113. ALIGN_2
  114. .L32:
  115. movl IS, %esi
  116. movl $P, %edx
  117. movl M, %eax
  118. subl %esi, %eax
  119. cmpl %edx, %eax
  120. #ifdef PENTIUM
  121. jle .L33
  122. movl %edx, %eax
  123. .L33:
  124. #else
  125. cmovg %edx, %eax
  126. #endif
  127. movl %eax, MIN_M
  128. movl IS, %ecx
  129. addl %ecx, %ecx
  130. leal (%edi,%ecx,SIZE), %ecx # xp = x + is
  131. movl INCX, %ebx
  132. movl %ecx, XP
  133. cmpl $2 * SIZE, %ebx
  134. je .L34
  135. movl BUFFER, %esi
  136. movl MIN_M, %eax
  137. movl %esi, XP
  138. sarl $1, %eax
  139. jle .L35
  140. ALIGN_3
  141. .L36:
  142. FLD 0 * SIZE(%edi)
  143. FLD 1 * SIZE(%edi)
  144. addl %ebx,%edi # x += incx
  145. FLD 0 * SIZE(%edi)
  146. FLD 1 * SIZE(%edi)
  147. addl %ebx,%edi # x += incx
  148. FST 3 * SIZE(%esi)
  149. FST 2 * SIZE(%esi)
  150. FST 1 * SIZE(%esi)
  151. FST 0 * SIZE(%esi)
  152. addl $4 * SIZE, %esi # xp += 4
  153. decl %eax
  154. jg .L36
  155. ALIGN_3
  156. .L35:
  157. movl MIN_M, %eax
  158. andl $1,%eax
  159. jle .L34
  160. FLD 0 * SIZE(%edi)
  161. FLD 1 * SIZE(%edi)
  162. addl %ebx,%edi # x += incx
  163. FST 1 * SIZE(%esi)
  164. FST 0 * SIZE(%esi)
  165. ALIGN_3
  166. /* Main Routine */
  167. .L34:
  168. movl Y, %ebp # coffset = y
  169. movl N, %ecx
  170. testl %ecx, %ecx
  171. jle .L60
  172. ALIGN_2
  173. .L61:
  174. movl A, %ebx # a_offset = a
  175. fldz # ct1 = ZERO
  176. movl LDA, %edx
  177. fldz # ct1 = ZERO
  178. addl %ebx, %edx
  179. fldz # ct1 = ZERO
  180. movl %edx, A
  181. fldz # ct1 = ZERO
  182. movl XP, %esi
  183. FLD (%esi) # bt1 = *(b_offset + 0)
  184. movl MIN_M, %eax
  185. sarl $1, %eax
  186. jle .L64
  187. ALIGN_3
  188. #define PRESIZE 8
  189. .L65:
  190. #ifdef HAS_PREFETCH
  191. prefetcht0 PRESIZE * SIZE(%ebx)
  192. prefetcht0 PRESIZE * SIZE(%esi)
  193. #endif
  194. FLD 0 * SIZE(%ebx) # at1 = *(a_offset + 0)
  195. fmul %st(1), %st # at1 *= bt1
  196. faddp %st, %st(2) # ct1 += at1
  197. FMUL 1 * SIZE(%ebx) # bt1 *= *(a_offset + 1)
  198. #ifndef CONJ
  199. faddp %st, %st(2) # ct2 += bt1
  200. #else
  201. fsubrp %st, %st(2) # ct2 -= bt1
  202. #endif
  203. FLD 1 * SIZE(%esi) # bt1 = *(b_offset + 1)
  204. FLD 0 * SIZE(%ebx) # at1 = *(a_offset + 0)
  205. fmul %st(1), %st # at1 *= bt1
  206. faddp %st, %st(4) # ct3 += at1
  207. FMUL 1 * SIZE(%ebx) # bt1 *= *(a_offset + 1)
  208. faddp %st, %st(4) # ct4 += bt1
  209. FLD 2 * SIZE(%esi) # bt1 = *(b_offset + 1)
  210. FLD 2 * SIZE(%ebx) # at1 = *(a_offset + 0)
  211. fmul %st(1), %st # at1 *= bt1
  212. faddp %st, %st(2) # ct1 += at1
  213. FMUL 3 * SIZE(%ebx) # bt1 *= *(a_offset + 1)
  214. #ifndef CONJ
  215. faddp %st, %st(2) # ct2 += bt1
  216. #else
  217. fsubrp %st, %st(2) # ct2 -= bt1
  218. #endif
  219. FLD 3 * SIZE(%esi) # bt1 = *(b_offset + 1)
  220. FLD 2 * SIZE(%ebx) # at1 = *(a_offset + 0)
  221. fmul %st(1), %st # at1 *= bt1
  222. faddp %st, %st(4) # ct3 += at1
  223. FMUL 3 * SIZE(%ebx) # bt1 *= *(a_offset + 1)
  224. faddp %st, %st(4) # ct4 += bt1
  225. FLD 4 * SIZE(%esi) # bt1 = *(b_offset + 1)
  226. addl $4 * SIZE, %esi
  227. addl $4 * SIZE, %ebx
  228. decl %eax
  229. jg .L65
  230. ALIGN_3
  231. .L64:
  232. movl MIN_M, %eax
  233. andl $1, %eax
  234. jle .L70
  235. ALIGN_3
  236. .L71:
  237. FLD 0 * SIZE(%ebx) # at1 = *(a_offset + 0)
  238. fmul %st(1), %st # at1 *= bt1
  239. faddp %st, %st(2) # ct1 += at1
  240. FMUL 1 * SIZE(%ebx) # bt1 *= *(a_offset + 1)
  241. #ifndef CONJ
  242. faddp %st, %st(2) # ct2 += bt1
  243. #else
  244. fsubrp %st, %st(2) # ct2 -= bt1
  245. #endif
  246. FLD 1 * SIZE(%esi) # bt1 = *(b_offset + 1)
  247. FLD 0 * SIZE(%ebx) # at1 = *(a_offset + 0)
  248. fmul %st(1), %st # at1 *= bt1
  249. faddp %st, %st(4) # ct3 += at1
  250. FMUL 1 * SIZE(%ebx) # bt1 *= *(a_offset + 1)
  251. faddp %st, %st(4) # ct4 += bt1
  252. fldz
  253. ALIGN_3
  254. .L70:
  255. #ifndef C_SUN
  256. ffreep %st(0)
  257. #else
  258. .byte 0xdf
  259. .byte 0xc0
  260. #endif
  261. #ifndef XCONJ
  262. #ifndef CONJ
  263. fsubp %st, %st(3)
  264. faddp %st, %st(1)
  265. #else
  266. faddp %st, %st(3)
  267. faddp %st, %st(1)
  268. #endif
  269. #else
  270. #ifndef CONJ
  271. faddp %st, %st(3)
  272. fsubp %st, %st(1)
  273. #else
  274. fsubp %st, %st(3)
  275. fsubp %st, %st(1)
  276. #endif
  277. #endif
  278. fld %st(0) # ct4 = ct2
  279. fmul %st(4), %st
  280. fld %st(2)
  281. fmul %st(4), %st
  282. fsubp %st, %st(1)
  283. FADD 0 * SIZE(%ebp)
  284. FST 0 * SIZE(%ebp)
  285. fmul %st(2), %st
  286. fxch %st(1)
  287. fmul %st(3), %st
  288. faddp %st, %st(1)
  289. FADD 1 * SIZE(%ebp)
  290. FST 1 * SIZE(%ebp)
  291. addl INCY, %ebp
  292. decl %ecx
  293. jg .L61
  294. ALIGN_3
  295. .L60:
  296. movl A, %ebx
  297. addl NLDA, %ebx
  298. movl %ebx, A
  299. addl $P, IS
  300. movl M, %esi
  301. cmpl %esi, IS
  302. jl .L32
  303. ALIGN_3
  304. .L79:
  305. #ifndef C_SUN
  306. ffreep %st(0)
  307. ffreep %st(0)
  308. #else
  309. .byte 0xdf
  310. .byte 0xc0
  311. .byte 0xdf
  312. .byte 0xc0
  313. #endif
  314. popl %ebx
  315. popl %esi
  316. popl %edi
  317. popl %ebp
  318. addl $ARGS, %esp
  319. ret
  320. EPILOGUE