You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zscal.S 8.9 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define N r3
  41. #define XX r4
  42. #define PREA r5
  43. #if defined(linux) || defined(__FreeBSD__)
  44. #ifndef __64BIT__
  45. #define X r6
  46. #define INCX r7
  47. #else
  48. #define X r8
  49. #define INCX r9
  50. #endif
  51. #define FLAG r11
  52. #endif
  53. #if defined(_AIX) || defined(__APPLE__)
  54. #if !defined(__64BIT__) && defined(DOUBLE)
  55. #define X r10
  56. #define INCX r8
  57. #else
  58. #define X r8
  59. #define INCX r9
  60. #endif
  61. #define FLAG r11
  62. #endif
  63. #define FZERO f0
  64. #define ALPHA_R f1
  65. #define ALPHA_I f2
  66. PROLOGUE
  67. PROFCODE
  68. addi SP, SP, -8
  69. li r0, 0
  70. stw r0, 0(SP)
  71. lfs FZERO, 0(SP)
  72. addi SP, SP, 8
  73. #if (defined(_AIX) || defined(__APPLE__)) && !defined(__64BIT__) && defined(DOUBLE)
  74. lwz INCX, 56(SP)
  75. #endif
  76. slwi INCX, INCX, ZBASE_SHIFT
  77. li PREA, L1_PREFETCHSIZE
  78. cmpwi cr0, N, 0
  79. blelr- cr0
  80. fcmpu cr0, FZERO, ALPHA_R
  81. bne- cr0, LL(A1I1)
  82. fcmpu cr0, FZERO, ALPHA_I
  83. bne- cr0, LL(A1I1)
  84. LDLONG FLAG, 104(SP)
  85. cmpwi cr0, FLAG, 1
  86. beq- cr0, LL(A1I1)
  87. cmpwi cr0, INCX, 2 * SIZE
  88. bne- cr0, LL(A0IN)
  89. srawi. r0, N, 3
  90. mtspr CTR, r0
  91. beq- cr0, LL(A0I1_Remain)
  92. .align 4
  93. LL(A0I1_kernel):
  94. STFD FZERO, 0 * SIZE(X)
  95. STFD FZERO, 1 * SIZE(X)
  96. STFD FZERO, 2 * SIZE(X)
  97. STFD FZERO, 3 * SIZE(X)
  98. STFD FZERO, 4 * SIZE(X)
  99. STFD FZERO, 5 * SIZE(X)
  100. STFD FZERO, 6 * SIZE(X)
  101. STFD FZERO, 7 * SIZE(X)
  102. STFD FZERO, 8 * SIZE(X)
  103. STFD FZERO, 9 * SIZE(X)
  104. STFD FZERO, 10 * SIZE(X)
  105. STFD FZERO, 11 * SIZE(X)
  106. STFD FZERO, 12 * SIZE(X)
  107. STFD FZERO, 13 * SIZE(X)
  108. STFD FZERO, 14 * SIZE(X)
  109. STFD FZERO, 15 * SIZE(X)
  110. addi X, X, 16 * SIZE
  111. bdnz LL(A0I1_kernel)
  112. .align 4
  113. LL(A0I1_Remain):
  114. andi. r0, N, 7
  115. mtspr CTR, r0
  116. beqlr+
  117. .align 4
  118. LL(A0I1_RemainKernel):
  119. STFD FZERO, 0 * SIZE(X)
  120. STFD FZERO, 1 * SIZE(X)
  121. addi X, X, 2 * SIZE
  122. bdnz LL(A0I1_RemainKernel)
  123. blr
  124. .align 4
  125. LL(A0IN):
  126. srawi. r0, N, 3
  127. mtspr CTR, r0
  128. beq- LL(A0IN_Remain)
  129. .align 4
  130. LL(A0IN_Kernel):
  131. dcbtst X, PREA
  132. STFD FZERO, 0 * SIZE(X)
  133. STFD FZERO, 1 * SIZE(X)
  134. add X, X, INCX
  135. STFD FZERO, 0 * SIZE(X)
  136. STFD FZERO, 1 * SIZE(X)
  137. add X, X, INCX
  138. STFD FZERO, 0 * SIZE(X)
  139. STFD FZERO, 1 * SIZE(X)
  140. add X, X, INCX
  141. STFD FZERO, 0 * SIZE(X)
  142. STFD FZERO, 1 * SIZE(X)
  143. add X, X, INCX
  144. STFD FZERO, 0 * SIZE(X)
  145. STFD FZERO, 1 * SIZE(X)
  146. add X, X, INCX
  147. STFD FZERO, 0 * SIZE(X)
  148. STFD FZERO, 1 * SIZE(X)
  149. add X, X, INCX
  150. STFD FZERO, 0 * SIZE(X)
  151. STFD FZERO, 1 * SIZE(X)
  152. add X, X, INCX
  153. STFD FZERO, 0 * SIZE(X)
  154. STFD FZERO, 1 * SIZE(X)
  155. add X, X, INCX
  156. bdnz LL(A0IN_Kernel)
  157. .align 4
  158. LL(A0IN_Remain):
  159. andi. r0, N, 7
  160. mtspr CTR, r0
  161. beqlr+
  162. .align 4
  163. LL(A0IN_RemainKernel):
  164. STFD FZERO, 0 * SIZE(X)
  165. STFD FZERO, 1 * SIZE(X)
  166. add X, X, INCX
  167. bdnz LL(A0IN_RemainKernel)
  168. blr
  169. .align 4
  170. LL(A1I1):
  171. cmpwi cr0, INCX, 2 * SIZE
  172. bne- LL(A1IN)
  173. mr XX, X
  174. srawi. r0, N, 3
  175. mtspr CTR, r0
  176. beq+ LL(A1I1_Remain)
  177. .align 4
  178. LL(A1I1_kernel):
  179. LFD f3, 0 * SIZE(X)
  180. LFD f4, 1 * SIZE(X)
  181. LFD f5, 2 * SIZE(X)
  182. LFD f6, 3 * SIZE(X)
  183. LFD f7, 4 * SIZE(X)
  184. LFD f8, 5 * SIZE(X)
  185. LFD f9, 6 * SIZE(X)
  186. LFD f10, 7 * SIZE(X)
  187. FMUL f0, ALPHA_I, f4
  188. FMUL f4, ALPHA_R, f4
  189. FMUL f11, ALPHA_I, f6
  190. FMUL f6, ALPHA_R, f6
  191. FMUL f12, ALPHA_I, f8
  192. FMUL f8, ALPHA_R, f8
  193. FMUL f13, ALPHA_I, f10
  194. FMUL f10, ALPHA_R, f10
  195. FMADD f4, ALPHA_I, f3, f4
  196. FMSUB f3, ALPHA_R, f3, f0
  197. FMADD f6, ALPHA_I, f5, f6
  198. FMSUB f5, ALPHA_R, f5, f11
  199. FMADD f8, ALPHA_I, f7, f8
  200. FMSUB f7, ALPHA_R, f7, f12
  201. FMADD f10, ALPHA_I, f9, f10
  202. FMSUB f9, ALPHA_R, f9, f13
  203. STFD f3, 0 * SIZE(X)
  204. STFD f4, 1 * SIZE(X)
  205. STFD f5, 2 * SIZE(X)
  206. STFD f6, 3 * SIZE(X)
  207. STFD f7, 4 * SIZE(X)
  208. STFD f8, 5 * SIZE(X)
  209. STFD f9, 6 * SIZE(X)
  210. STFD f10, 7 * SIZE(X)
  211. LFD f3, 8 * SIZE(X)
  212. LFD f4, 9 * SIZE(X)
  213. LFD f5, 10 * SIZE(X)
  214. LFD f6, 11 * SIZE(X)
  215. LFD f7, 12 * SIZE(X)
  216. LFD f8, 13 * SIZE(X)
  217. LFD f9, 14 * SIZE(X)
  218. LFD f10,15 * SIZE(X)
  219. FMUL f0, ALPHA_I, f4
  220. FMUL f4, ALPHA_R, f4
  221. FMUL f11, ALPHA_I, f6
  222. FMUL f6, ALPHA_R, f6
  223. FMUL f12, ALPHA_I, f8
  224. FMUL f8, ALPHA_R, f8
  225. FMUL f13, ALPHA_I, f10
  226. FMUL f10, ALPHA_R, f10
  227. FMADD f4, ALPHA_I, f3, f4
  228. FMSUB f3, ALPHA_R, f3, f0
  229. FMADD f6, ALPHA_I, f5, f6
  230. FMSUB f5, ALPHA_R, f5, f11
  231. FMADD f8, ALPHA_I, f7, f8
  232. FMSUB f7, ALPHA_R, f7, f12
  233. FMADD f10, ALPHA_I, f9, f10
  234. FMSUB f9, ALPHA_R, f9, f13
  235. STFD f3, 8 * SIZE(X)
  236. STFD f4, 9 * SIZE(X)
  237. STFD f5, 10 * SIZE(X)
  238. STFD f6, 11 * SIZE(X)
  239. STFD f7, 12 * SIZE(X)
  240. STFD f8, 13 * SIZE(X)
  241. STFD f9, 14 * SIZE(X)
  242. STFD f10,15 * SIZE(X)
  243. addi X, X, 16 * SIZE
  244. dcbtst X, PREA
  245. bdnz LL(A1I1_kernel)
  246. .align 4
  247. LL(A1I1_Remain):
  248. andi. r0, N, 7
  249. mtspr CTR, r0
  250. beqlr+
  251. .align 4
  252. LL(A1I1_RemainKernel):
  253. LFD f3, 0 * SIZE(X)
  254. LFD f4, 1 * SIZE(X)
  255. FMUL f5, ALPHA_I, f4
  256. FMUL f4, ALPHA_R, f4
  257. FMADD f4, ALPHA_I, f3, f4
  258. FMSUB f3, ALPHA_R, f3, f5
  259. STFD f3, 0 * SIZE(X)
  260. STFD f4, 1 * SIZE(X)
  261. addi X, X, 2 * SIZE
  262. bdnz LL(A1I1_RemainKernel)
  263. blr
  264. .align 4
  265. LL(A1IN):
  266. mr XX, X
  267. srawi. r0, N, 2
  268. mtspr CTR, r0
  269. beq- LL(A1IN_Remain)
  270. .align 4
  271. LL(A1IN_Kernel):
  272. LFD f3, 0 * SIZE(XX)
  273. LFD f4, 1 * SIZE(XX)
  274. add XX, XX, INCX
  275. LFD f5, 0 * SIZE(XX)
  276. LFD f6, 1 * SIZE(XX)
  277. add XX, XX, INCX
  278. LFD f7, 0 * SIZE(XX)
  279. LFD f8, 1 * SIZE(XX)
  280. add XX, XX, INCX
  281. LFD f9, 0 * SIZE(XX)
  282. LFD f10, 1 * SIZE(XX)
  283. add XX, XX, INCX
  284. FMUL f0, ALPHA_I, f4
  285. FMUL f4, ALPHA_R, f4
  286. FMUL f11, ALPHA_I, f6
  287. FMUL f6, ALPHA_R, f6
  288. FMUL f12, ALPHA_I, f8
  289. FMUL f8, ALPHA_R, f8
  290. FMUL f13, ALPHA_I, f10
  291. FMUL f10, ALPHA_R, f10
  292. FMADD f4, ALPHA_I, f3, f4
  293. FMSUB f3, ALPHA_R, f3, f0
  294. FMADD f6, ALPHA_I, f5, f6
  295. FMSUB f5, ALPHA_R, f5, f11
  296. FMADD f8, ALPHA_I, f7, f8
  297. FMSUB f7, ALPHA_R, f7, f12
  298. FMADD f10, ALPHA_I, f9, f10
  299. FMSUB f9, ALPHA_R, f9, f13
  300. STFD f3, 0 * SIZE(X)
  301. STFD f4, 1 * SIZE(X)
  302. add X, X, INCX
  303. STFD f5, 0 * SIZE(X)
  304. STFD f6, 1 * SIZE(X)
  305. add X, X, INCX
  306. STFD f7, 0 * SIZE(X)
  307. STFD f8, 1 * SIZE(X)
  308. add X, X, INCX
  309. STFD f9, 0 * SIZE(X)
  310. STFD f10, 1 * SIZE(X)
  311. add X, X, INCX
  312. dcbtst X, PREA
  313. bdnz LL(A1IN_Kernel)
  314. .align 4
  315. LL(A1IN_Remain):
  316. andi. r0, N, 3
  317. mtspr CTR, r0
  318. beqlr+
  319. .align 4
  320. LL(A1IN_RemainKernel):
  321. LFD f3, 0 * SIZE(XX)
  322. LFD f4, 1 * SIZE(XX)
  323. add XX, XX, INCX
  324. FMUL f5, ALPHA_I, f4
  325. FMUL f4, ALPHA_R, f4
  326. FMADD f4, ALPHA_I, f3, f4
  327. FMSUB f3, ALPHA_R, f3, f5
  328. STFD f3, 0 * SIZE(X)
  329. STFD f4, 1 * SIZE(X)
  330. add X, X, INCX
  331. bdnz LL(A1IN_RemainKernel)
  332. blr
  333. EPILOGUE