You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zscal.S 8.8 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define N r3
  41. #define XX r4
  42. #define PREA r5
  43. #if defined(linux) || defined(__FreeBSD__)
  44. #ifndef __64BIT__
  45. #define X r6
  46. #define INCX r7
  47. #else
  48. #define X r8
  49. #define INCX r9
  50. #endif
  51. #endif
  52. #if defined(_AIX) || defined(__APPLE__)
  53. #if !defined(__64BIT__) && defined(DOUBLE)
  54. #define X r10
  55. #define INCX r8
  56. #else
  57. #define X r8
  58. #define INCX r9
  59. #endif
  60. #endif
  61. #define FZERO f0
  62. #define ALPHA_R f1
  63. #define ALPHA_I f2
  64. PROLOGUE
  65. PROFCODE
  66. addi SP, SP, -8
  67. li r0, 0
  68. stw r0, 0(SP)
  69. lfs FZERO, 0(SP)
  70. addi SP, SP, 8
  71. #if (defined(_AIX) || defined(__APPLE__)) && !defined(__64BIT__) && defined(DOUBLE)
  72. lwz INCX, 56(SP)
  73. #endif
  74. slwi INCX, INCX, ZBASE_SHIFT
  75. li PREA, L1_PREFETCHSIZE
  76. cmpwi cr0, N, 0
  77. blelr- cr0
  78. fcmpu cr0, FZERO, ALPHA_R
  79. bne- cr0, LL(A1I1)
  80. fcmpu cr0, FZERO, ALPHA_I
  81. bne- cr0, LL(A1I1)
  82. cmpwi cr0, INCX, 2 * SIZE
  83. bne- cr0, LL(A0IN)
  84. srawi. r0, N, 3
  85. mtspr CTR, r0
  86. beq- cr0, LL(A0I1_Remain)
  87. .align 4
  88. LL(A0I1_kernel):
  89. STFD FZERO, 0 * SIZE(X)
  90. STFD FZERO, 1 * SIZE(X)
  91. STFD FZERO, 2 * SIZE(X)
  92. STFD FZERO, 3 * SIZE(X)
  93. STFD FZERO, 4 * SIZE(X)
  94. STFD FZERO, 5 * SIZE(X)
  95. STFD FZERO, 6 * SIZE(X)
  96. STFD FZERO, 7 * SIZE(X)
  97. STFD FZERO, 8 * SIZE(X)
  98. STFD FZERO, 9 * SIZE(X)
  99. STFD FZERO, 10 * SIZE(X)
  100. STFD FZERO, 11 * SIZE(X)
  101. STFD FZERO, 12 * SIZE(X)
  102. STFD FZERO, 13 * SIZE(X)
  103. STFD FZERO, 14 * SIZE(X)
  104. STFD FZERO, 15 * SIZE(X)
  105. addi X, X, 16 * SIZE
  106. bdnz LL(A0I1_kernel)
  107. .align 4
  108. LL(A0I1_Remain):
  109. andi. r0, N, 7
  110. mtspr CTR, r0
  111. beqlr+
  112. .align 4
  113. LL(A0I1_RemainKernel):
  114. STFD FZERO, 0 * SIZE(X)
  115. STFD FZERO, 1 * SIZE(X)
  116. addi X, X, 2 * SIZE
  117. bdnz LL(A0I1_RemainKernel)
  118. blr
  119. .align 4
  120. LL(A0IN):
  121. srawi. r0, N, 3
  122. mtspr CTR, r0
  123. beq- LL(A0IN_Remain)
  124. .align 4
  125. LL(A0IN_Kernel):
  126. dcbtst X, PREA
  127. STFD FZERO, 0 * SIZE(X)
  128. STFD FZERO, 1 * SIZE(X)
  129. add X, X, INCX
  130. STFD FZERO, 0 * SIZE(X)
  131. STFD FZERO, 1 * SIZE(X)
  132. add X, X, INCX
  133. STFD FZERO, 0 * SIZE(X)
  134. STFD FZERO, 1 * SIZE(X)
  135. add X, X, INCX
  136. STFD FZERO, 0 * SIZE(X)
  137. STFD FZERO, 1 * SIZE(X)
  138. add X, X, INCX
  139. STFD FZERO, 0 * SIZE(X)
  140. STFD FZERO, 1 * SIZE(X)
  141. add X, X, INCX
  142. STFD FZERO, 0 * SIZE(X)
  143. STFD FZERO, 1 * SIZE(X)
  144. add X, X, INCX
  145. STFD FZERO, 0 * SIZE(X)
  146. STFD FZERO, 1 * SIZE(X)
  147. add X, X, INCX
  148. STFD FZERO, 0 * SIZE(X)
  149. STFD FZERO, 1 * SIZE(X)
  150. add X, X, INCX
  151. bdnz LL(A0IN_Kernel)
  152. .align 4
  153. LL(A0IN_Remain):
  154. andi. r0, N, 7
  155. mtspr CTR, r0
  156. beqlr+
  157. .align 4
  158. LL(A0IN_RemainKernel):
  159. STFD FZERO, 0 * SIZE(X)
  160. STFD FZERO, 1 * SIZE(X)
  161. add X, X, INCX
  162. bdnz LL(A0IN_RemainKernel)
  163. blr
  164. .align 4
  165. LL(A1I1):
  166. cmpwi cr0, INCX, 2 * SIZE
  167. bne- LL(A1IN)
  168. mr XX, X
  169. srawi. r0, N, 3
  170. mtspr CTR, r0
  171. beq+ LL(A1I1_Remain)
  172. .align 4
  173. LL(A1I1_kernel):
  174. LFD f3, 0 * SIZE(X)
  175. LFD f4, 1 * SIZE(X)
  176. LFD f5, 2 * SIZE(X)
  177. LFD f6, 3 * SIZE(X)
  178. LFD f7, 4 * SIZE(X)
  179. LFD f8, 5 * SIZE(X)
  180. LFD f9, 6 * SIZE(X)
  181. LFD f10, 7 * SIZE(X)
  182. FMUL f0, ALPHA_I, f4
  183. FMUL f4, ALPHA_R, f4
  184. FMUL f11, ALPHA_I, f6
  185. FMUL f6, ALPHA_R, f6
  186. FMUL f12, ALPHA_I, f8
  187. FMUL f8, ALPHA_R, f8
  188. FMUL f13, ALPHA_I, f10
  189. FMUL f10, ALPHA_R, f10
  190. FMADD f4, ALPHA_I, f3, f4
  191. FMSUB f3, ALPHA_R, f3, f0
  192. FMADD f6, ALPHA_I, f5, f6
  193. FMSUB f5, ALPHA_R, f5, f11
  194. FMADD f8, ALPHA_I, f7, f8
  195. FMSUB f7, ALPHA_R, f7, f12
  196. FMADD f10, ALPHA_I, f9, f10
  197. FMSUB f9, ALPHA_R, f9, f13
  198. STFD f3, 0 * SIZE(X)
  199. STFD f4, 1 * SIZE(X)
  200. STFD f5, 2 * SIZE(X)
  201. STFD f6, 3 * SIZE(X)
  202. STFD f7, 4 * SIZE(X)
  203. STFD f8, 5 * SIZE(X)
  204. STFD f9, 6 * SIZE(X)
  205. STFD f10, 7 * SIZE(X)
  206. LFD f3, 8 * SIZE(X)
  207. LFD f4, 9 * SIZE(X)
  208. LFD f5, 10 * SIZE(X)
  209. LFD f6, 11 * SIZE(X)
  210. LFD f7, 12 * SIZE(X)
  211. LFD f8, 13 * SIZE(X)
  212. LFD f9, 14 * SIZE(X)
  213. LFD f10,15 * SIZE(X)
  214. FMUL f0, ALPHA_I, f4
  215. FMUL f4, ALPHA_R, f4
  216. FMUL f11, ALPHA_I, f6
  217. FMUL f6, ALPHA_R, f6
  218. FMUL f12, ALPHA_I, f8
  219. FMUL f8, ALPHA_R, f8
  220. FMUL f13, ALPHA_I, f10
  221. FMUL f10, ALPHA_R, f10
  222. FMADD f4, ALPHA_I, f3, f4
  223. FMSUB f3, ALPHA_R, f3, f0
  224. FMADD f6, ALPHA_I, f5, f6
  225. FMSUB f5, ALPHA_R, f5, f11
  226. FMADD f8, ALPHA_I, f7, f8
  227. FMSUB f7, ALPHA_R, f7, f12
  228. FMADD f10, ALPHA_I, f9, f10
  229. FMSUB f9, ALPHA_R, f9, f13
  230. STFD f3, 8 * SIZE(X)
  231. STFD f4, 9 * SIZE(X)
  232. STFD f5, 10 * SIZE(X)
  233. STFD f6, 11 * SIZE(X)
  234. STFD f7, 12 * SIZE(X)
  235. STFD f8, 13 * SIZE(X)
  236. STFD f9, 14 * SIZE(X)
  237. STFD f10,15 * SIZE(X)
  238. addi X, X, 16 * SIZE
  239. dcbtst X, PREA
  240. bdnz LL(A1I1_kernel)
  241. .align 4
  242. LL(A1I1_Remain):
  243. andi. r0, N, 7
  244. mtspr CTR, r0
  245. beqlr+
  246. .align 4
  247. LL(A1I1_RemainKernel):
  248. LFD f3, 0 * SIZE(X)
  249. LFD f4, 1 * SIZE(X)
  250. FMUL f5, ALPHA_I, f4
  251. FMUL f4, ALPHA_R, f4
  252. FMADD f4, ALPHA_I, f3, f4
  253. FMSUB f3, ALPHA_R, f3, f5
  254. STFD f3, 0 * SIZE(X)
  255. STFD f4, 1 * SIZE(X)
  256. addi X, X, 2 * SIZE
  257. bdnz LL(A1I1_RemainKernel)
  258. blr
  259. .align 4
  260. LL(A1IN):
  261. mr XX, X
  262. srawi. r0, N, 2
  263. mtspr CTR, r0
  264. beq- LL(A1IN_Remain)
  265. .align 4
  266. LL(A1IN_Kernel):
  267. LFD f3, 0 * SIZE(XX)
  268. LFD f4, 1 * SIZE(XX)
  269. add XX, XX, INCX
  270. LFD f5, 0 * SIZE(XX)
  271. LFD f6, 1 * SIZE(XX)
  272. add XX, XX, INCX
  273. LFD f7, 0 * SIZE(XX)
  274. LFD f8, 1 * SIZE(XX)
  275. add XX, XX, INCX
  276. LFD f9, 0 * SIZE(XX)
  277. LFD f10, 1 * SIZE(XX)
  278. add XX, XX, INCX
  279. FMUL f0, ALPHA_I, f4
  280. FMUL f4, ALPHA_R, f4
  281. FMUL f11, ALPHA_I, f6
  282. FMUL f6, ALPHA_R, f6
  283. FMUL f12, ALPHA_I, f8
  284. FMUL f8, ALPHA_R, f8
  285. FMUL f13, ALPHA_I, f10
  286. FMUL f10, ALPHA_R, f10
  287. FMADD f4, ALPHA_I, f3, f4
  288. FMSUB f3, ALPHA_R, f3, f0
  289. FMADD f6, ALPHA_I, f5, f6
  290. FMSUB f5, ALPHA_R, f5, f11
  291. FMADD f8, ALPHA_I, f7, f8
  292. FMSUB f7, ALPHA_R, f7, f12
  293. FMADD f10, ALPHA_I, f9, f10
  294. FMSUB f9, ALPHA_R, f9, f13
  295. STFD f3, 0 * SIZE(X)
  296. STFD f4, 1 * SIZE(X)
  297. add X, X, INCX
  298. STFD f5, 0 * SIZE(X)
  299. STFD f6, 1 * SIZE(X)
  300. add X, X, INCX
  301. STFD f7, 0 * SIZE(X)
  302. STFD f8, 1 * SIZE(X)
  303. add X, X, INCX
  304. STFD f9, 0 * SIZE(X)
  305. STFD f10, 1 * SIZE(X)
  306. add X, X, INCX
  307. dcbtst X, PREA
  308. bdnz LL(A1IN_Kernel)
  309. .align 4
  310. LL(A1IN_Remain):
  311. andi. r0, N, 3
  312. mtspr CTR, r0
  313. beqlr+
  314. .align 4
  315. LL(A1IN_RemainKernel):
  316. LFD f3, 0 * SIZE(XX)
  317. LFD f4, 1 * SIZE(XX)
  318. add XX, XX, INCX
  319. FMUL f5, ALPHA_I, f4
  320. FMUL f4, ALPHA_R, f4
  321. FMADD f4, ALPHA_I, f3, f4
  322. FMSUB f3, ALPHA_R, f3, f5
  323. STFD f3, 0 * SIZE(X)
  324. STFD f4, 1 * SIZE(X)
  325. add X, X, INCX
  326. bdnz LL(A1IN_RemainKernel)
  327. blr
  328. EPILOGUE