You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

scal_lasx.S 6.6 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282
  1. /***************************************************************************
  2. Copyright (c) 2023, The OpenBLAS Project
  3. All rights reserved.
  4. Redistribution and use in source and binary forms, with or without
  5. modification, are permitted provided that the following conditions are
  6. met:
  7. 1. Redistributions of source code must retain the above copyright
  8. notice, this list of conditions and the following disclaimer.
  9. 2. Redistributions in binary form must reproduce the above copyright
  10. notice, this list of conditions and the following disclaimer in
  11. the documentation and/or other materials provided with the
  12. distribution.
  13. 3. Neither the name of the OpenBLAS project nor the names of
  14. its contributors may be used to endorse or promote products
  15. derived from this software without specific prior written permission.
  16. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
  17. AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  18. IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  19. ARE DISCLAIMED. IN NO EVENT SHALL THE OPENBLAS PROJECT OR CONTRIBUTORS BE
  20. LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  21. DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
  22. SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  23. CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
  24. OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
  25. USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  26. *****************************************************************************/
  27. #define ASSEMBLER
  28. #include "common.h"
  29. #define N $r4
  30. #define ALPHA $f0
  31. #define X $r7
  32. #define INCX $r8
  33. #define I $r12
  34. #define TEMP $r13
  35. #define t1 $r14
  36. #define t2 $r18
  37. #define t3 $r15
  38. #define t4 $r17
  39. #define XX $r16
  40. #define VX0 $xr12
  41. #define VX1 $xr13
  42. #define VT0 $xr14
  43. #define VT1 $xr15
  44. #define VALPHA $xr19
  45. #define a1 $f8
  46. #define a2 $f23
  47. PROLOGUE
  48. bge $r0, N, .L999
  49. bge $r0, INCX, .L999
  50. li.d TEMP, 1
  51. movgr2fr.d a1, $r0
  52. FFINT a1, a1
  53. movgr2fr.d a2, TEMP
  54. FFINT a2, a2
  55. slli.d TEMP, TEMP, BASE_SHIFT
  56. slli.d INCX, INCX, BASE_SHIFT
  57. CMPEQ $fcc0, ALPHA, a1
  58. bcnez $fcc0, .L20 //ALPHA==0
  59. CMPEQ $fcc0, ALPHA, a2
  60. bcnez $fcc0, .L999 //ALPHA==1 return
  61. srai.d I, N, 3
  62. beq INCX, TEMP, .L30 //ALPHA!=0|1 and INCX==1
  63. MTG TEMP, ALPHA
  64. #ifdef DOUBLE
  65. xvreplgr2vr.d VALPHA, TEMP
  66. #else
  67. xvreplgr2vr.w VALPHA, TEMP
  68. #endif
  69. move XX, X
  70. .align 3
  71. .L10: //ALPHA!=0|1 and INCX!=1
  72. bge $r0, I, .L32
  73. .align 3
  74. .L11:
  75. #ifdef DOUBLE
  76. ld.d t1, X, 0 * SIZE
  77. add.d X, X, INCX
  78. ld.d t2, X, 0 * SIZE
  79. add.d X, X, INCX
  80. ld.d t3, X, 0 * SIZE
  81. add.d X, X, INCX
  82. ld.d t4, X, 0 * SIZE
  83. add.d X, X, INCX
  84. xvinsgr2vr.d VX0, t1, 0
  85. xvinsgr2vr.d VX0, t2, 1
  86. xvinsgr2vr.d VX0, t3, 2
  87. xvinsgr2vr.d VX0, t4, 3
  88. ld.d t1, X, 0 * SIZE
  89. add.d X, X, INCX
  90. ld.d t2, X, 0 * SIZE
  91. add.d X, X, INCX
  92. xvfmul.d VT0, VX0, VALPHA
  93. ld.d t3, X, 0 * SIZE
  94. add.d X, X, INCX
  95. ld.d t4, X, 0 * SIZE
  96. add.d X, X, INCX
  97. xvinsgr2vr.d VX1, t1, 0
  98. xvinsgr2vr.d VX1, t2, 1
  99. xvinsgr2vr.d VX1, t3, 2
  100. xvinsgr2vr.d VX1, t4, 3
  101. xvstelm.d VT0, XX, 0, 0
  102. add.d XX, XX, INCX
  103. xvstelm.d VT0, XX, 0, 1
  104. add.d XX, XX, INCX
  105. xvstelm.d VT0, XX, 0, 2
  106. add.d XX, XX, INCX
  107. xvstelm.d VT0, XX, 0, 3
  108. add.d XX, XX, INCX
  109. xvfmul.d VT1, VX1, VALPHA
  110. xvstelm.d VT1, XX, 0, 0
  111. add.d XX, XX, INCX
  112. xvstelm.d VT1, XX, 0, 1
  113. add.d XX, XX, INCX
  114. xvstelm.d VT1, XX, 0, 2
  115. add.d XX, XX, INCX
  116. xvstelm.d VT1, XX, 0, 3
  117. #else
  118. ld.w t1, X, 0 * SIZE
  119. add.d X, X, INCX
  120. ld.w t2, X, 0 * SIZE
  121. add.d X, X, INCX
  122. ld.w t3, X, 0 * SIZE
  123. add.d X, X, INCX
  124. ld.w t4, X, 0 * SIZE
  125. add.d X, X, INCX
  126. xvinsgr2vr.w VX0, t1, 0
  127. xvinsgr2vr.w VX0, t2, 1
  128. xvinsgr2vr.w VX0, t3, 2
  129. xvinsgr2vr.w VX0, t4, 3
  130. ld.w t1, X, 0 * SIZE
  131. add.d X, X, INCX
  132. ld.w t2, X, 0 * SIZE
  133. add.d X, X, INCX
  134. ld.w t3, X, 0 * SIZE
  135. add.d X, X, INCX
  136. ld.w t4, X, 0 * SIZE
  137. add.d X, X, INCX
  138. xvinsgr2vr.w VX0, t1, 4
  139. xvinsgr2vr.w VX0, t2, 5
  140. xvinsgr2vr.w VX0, t3, 6
  141. xvinsgr2vr.w VX0, t4, 7
  142. xvfmul.s VT0, VX0, VALPHA
  143. xvstelm.w VT0, XX, 0, 0
  144. add.d XX, XX, INCX
  145. xvstelm.w VT0, XX, 0, 1
  146. add.d XX, XX, INCX
  147. xvstelm.w VT0, XX, 0, 2
  148. add.d XX, XX, INCX
  149. xvstelm.w VT0, XX, 0, 3
  150. add.d XX, XX, INCX
  151. xvstelm.w VT0, XX, 0, 4
  152. add.d XX, XX, INCX
  153. xvstelm.w VT0, XX, 0, 5
  154. add.d XX, XX, INCX
  155. xvstelm.w VT0, XX, 0, 6
  156. add.d XX, XX, INCX
  157. xvstelm.w VT0, XX, 0, 7
  158. #endif
  159. add.d XX, XX, INCX
  160. addi.d I, I, -1
  161. blt $r0, I, .L11
  162. b .L32
  163. .align 3
  164. .L20:
  165. srai.d I, N, 3
  166. beq INCX, TEMP, .L24
  167. bge $r0, I, .L22
  168. .align 3
  169. .L21:
  170. ST a1, X, 0
  171. add.d X, X, INCX
  172. ST a1, X, 0
  173. add.d X, X, INCX
  174. ST a1, X, 0
  175. add.d X, X, INCX
  176. ST a1, X, 0
  177. add.d X, X, INCX
  178. ST a1, X, 0
  179. add.d X, X, INCX
  180. ST a1, X, 0
  181. add.d X, X, INCX
  182. ST a1, X, 0
  183. add.d X, X, INCX
  184. ST a1, X, 0
  185. add.d X, X, INCX
  186. addi.d I, I, -1
  187. blt $r0, I, .L21
  188. .align 3
  189. .L22:
  190. andi I, N, 7
  191. bge $r0, I, .L999
  192. .align 3
  193. .L23:
  194. ST a1, X, 0 * SIZE
  195. addi.d I, I, -1
  196. add.d X, X, INCX
  197. blt $r0, I, .L23
  198. jirl $r0, $r1, 0
  199. .align 3
  200. .L24:
  201. bge $r0, I, .L26 /*N<8 INCX==1*/
  202. .align 3
  203. .L25:
  204. xvxor.v VX0, VX0, VX0
  205. xvst VX0, X, 0 * SIZE
  206. #ifdef DOUBLE
  207. xvst VX0, X, 4 * SIZE
  208. #endif
  209. addi.d I, I, -1
  210. addi.d X, X, 8 * SIZE
  211. blt $r0, I, .L25
  212. .align 3
  213. .L26:
  214. andi I, N, 7
  215. bge $r0, I, .L999
  216. .align 3
  217. .L27:
  218. ST a1, X, 0 * SIZE
  219. addi.d I, I, -1
  220. addi.d X, X, SIZE
  221. blt $r0, I, .L27
  222. jirl $r0, $r1, 0
  223. .align 3
  224. .L30:
  225. bge $r0, I, .L32/*N<8 INCX==1*/
  226. MTG TEMP, ALPHA
  227. #ifdef DOUBLE
  228. xvreplgr2vr.d VALPHA , TEMP
  229. #else
  230. xvreplgr2vr.w VALPHA , TEMP
  231. #endif
  232. .align 3
  233. .L31:
  234. xvld VX0, X, 0 * SIZE
  235. #ifdef DOUBLE
  236. xvld VX1, X, 4 * SIZE
  237. xvfmul.d VT0, VX0, VALPHA
  238. xvfmul.d VT1, VX1, VALPHA
  239. xvst VT0, X, 0 * SIZE
  240. xvst VT1, X, 4 * SIZE
  241. #else
  242. xvfmul.s VT0, VX0, VALPHA
  243. xvst VT0, X, 0 * SIZE
  244. #endif
  245. addi.d I, I, -1
  246. addi.d X, X, 8 * SIZE
  247. blt $r0, I, .L31
  248. .align 3
  249. .L32:
  250. andi I, N, 7
  251. bge $r0, I, .L999
  252. .align 3
  253. .L33:
  254. LD a1, X, 0 * SIZE
  255. addi.d I, I, -1
  256. MUL a1, ALPHA, a1
  257. ST a1, X, 0 * SIZE
  258. add.d X, X, INCX
  259. blt $r0, I, .L33
  260. jirl $r0, $r1, 0
  261. .align 3
  262. .L999:
  263. jirl $r0, $r1, 0x0
  264. EPILOGUE