You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ztrsm_kernel_LT_1x2_penryn.S 17 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define STACK 16
  41. #define ARGS 16
  42. #define M 4 + STACK + ARGS(%esp)
  43. #define N 8 + STACK + ARGS(%esp)
  44. #define K 12 + STACK + ARGS(%esp)
  45. #define ALPHA_R 16 + STACK + ARGS(%esp)
  46. #define ALPHA_I 24 + STACK + ARGS(%esp)
  47. #define A 32 + STACK + ARGS(%esp)
  48. #define ARG_B 36 + STACK + ARGS(%esp)
  49. #define C 40 + STACK + ARGS(%esp)
  50. #define ARG_LDC 44 + STACK + ARGS(%esp)
  51. #define OFFSET 48 + STACK + ARGS(%esp)
  52. #define J 0 + STACK(%esp)
  53. #define KK 4 + STACK(%esp)
  54. #define KKK 8 + STACK(%esp)
  55. #define AORIG 12 + STACK(%esp)
  56. #if defined(PENRYN) || defined(DUNNINGTON)
  57. #define PREFETCH prefetcht1
  58. #define PREFETCHSIZE 84
  59. #endif
  60. #if defined(NEHALEM) || defined(NEHALEM_OPTIMIZATION)
  61. #define PREFETCH prefetcht1
  62. #define PREFETCHSIZE 84
  63. #endif
  64. #ifdef NANO
  65. #define PREFETCH prefetcht0
  66. #define PREFETCHSIZE (8 * 2)
  67. #endif
  68. #define AA %edx
  69. #define BB %ecx
  70. #define LDC %ebp
  71. #define B %edi
  72. #define CO1 %esi
  73. #define ADD1 addpd
  74. #define ADD2 addpd
  75. PROLOGUE
  76. subl $ARGS, %esp
  77. pushl %ebp
  78. pushl %edi
  79. pushl %esi
  80. pushl %ebx
  81. PROFCODE
  82. movl ARG_B, B
  83. movl ARG_LDC, LDC
  84. movl OFFSET, %eax
  85. #ifdef RN
  86. negl %eax
  87. #endif
  88. movl %eax, KK
  89. movl M, %ebx
  90. testl %ebx, %ebx
  91. jle .L999
  92. subl $-16 * SIZE, A
  93. subl $-16 * SIZE, B
  94. sall $ZBASE_SHIFT, LDC
  95. #ifdef LN
  96. movl M, %eax
  97. sall $ZBASE_SHIFT, %eax
  98. addl %eax, C
  99. imull K, %eax
  100. addl %eax, A
  101. #endif
  102. #ifdef RT
  103. movl N, %eax
  104. sall $ZBASE_SHIFT, %eax
  105. imull K, %eax
  106. addl %eax, B
  107. movl N, %eax
  108. imull LDC, %eax
  109. addl %eax, C
  110. #endif
  111. #ifdef RT
  112. movl N, %eax
  113. subl OFFSET, %eax
  114. movl %eax, KK
  115. #endif
  116. movl N, %eax
  117. sarl $1, %eax
  118. movl %eax, J # j = n
  119. jle .L100
  120. ALIGN_4
  121. .L01:
  122. #if defined(LT) || defined(RN)
  123. movl A, AA
  124. #else
  125. movl A, %eax
  126. movl %eax, AORIG
  127. #endif
  128. #ifdef RT
  129. movl K, %eax
  130. sall $1 + ZBASE_SHIFT, %eax
  131. subl %eax, B
  132. #endif
  133. leal (, LDC, 2), %eax
  134. #ifdef RT
  135. subl %eax, C
  136. #endif
  137. movl C, CO1
  138. #ifndef RT
  139. addl %eax, C
  140. #endif
  141. #ifdef LN
  142. movl OFFSET, %eax
  143. addl M, %eax
  144. movl %eax, KK
  145. #endif
  146. #ifdef LT
  147. movl OFFSET, %eax
  148. movl %eax, KK
  149. #endif
  150. movl M, %ebx
  151. ALIGN_4
  152. .L10:
  153. #ifdef LN
  154. movl K, %eax
  155. sall $ZBASE_SHIFT, %eax
  156. subl %eax, AORIG
  157. #endif
  158. #if defined(LN) || defined(RT)
  159. movl KK, %eax
  160. movl AORIG, AA
  161. sall $ZBASE_SHIFT, %eax
  162. addl %eax, AA
  163. #endif
  164. movl B, BB
  165. #if defined(LN) || defined(RT)
  166. movl KK, %eax
  167. sall $1 + ZBASE_SHIFT, %eax
  168. addl %eax, BB
  169. #endif
  170. movaps -16 * SIZE(AA), %xmm0
  171. pxor %xmm2, %xmm2
  172. movaps -16 * SIZE(BB), %xmm1
  173. pxor %xmm3, %xmm3
  174. #ifdef LN
  175. pxor %xmm4, %xmm4
  176. prefetcht0 -2 * SIZE(CO1)
  177. pxor %xmm5, %xmm5
  178. prefetcht0 -2 * SIZE(CO1, LDC)
  179. #else
  180. pxor %xmm4, %xmm4
  181. prefetcht0 1 * SIZE(CO1)
  182. pxor %xmm5, %xmm5
  183. prefetcht0 1 * SIZE(CO1, LDC)
  184. #endif
  185. pxor %xmm6, %xmm6
  186. pxor %xmm7, %xmm7
  187. #if defined(LT) || defined(RN)
  188. movl KK, %eax
  189. #else
  190. movl K, %eax
  191. subl KK, %eax
  192. #endif
  193. sarl $3, %eax
  194. je .L15
  195. ALIGN_4
  196. .L12:
  197. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  198. ADD1 %xmm3, %xmm6
  199. movaps -14 * SIZE(BB), %xmm3
  200. ADD2 %xmm2, %xmm7
  201. pshufd $0x4e, %xmm1, %xmm2
  202. mulpd %xmm0, %xmm1
  203. mulpd %xmm0, %xmm2
  204. ADD1 %xmm1, %xmm4
  205. movaps -12 * SIZE(BB), %xmm1
  206. ADD2 %xmm2, %xmm5
  207. pshufd $0x4e, %xmm3, %xmm2
  208. mulpd %xmm0, %xmm3
  209. mulpd %xmm0, %xmm2
  210. movaps -14 * SIZE(AA), %xmm0
  211. ADD1 %xmm3, %xmm6
  212. movaps -10 * SIZE(BB), %xmm3
  213. ADD2 %xmm2, %xmm7
  214. pshufd $0x4e, %xmm1, %xmm2
  215. mulpd %xmm0, %xmm1
  216. mulpd %xmm0, %xmm2
  217. ADD1 %xmm1, %xmm4
  218. movaps -8 * SIZE(BB), %xmm1
  219. ADD2 %xmm2, %xmm5
  220. pshufd $0x4e, %xmm3, %xmm2
  221. mulpd %xmm0, %xmm3
  222. mulpd %xmm0, %xmm2
  223. movaps -12 * SIZE(AA), %xmm0
  224. ADD1 %xmm3, %xmm6
  225. movaps -6 * SIZE(BB), %xmm3
  226. ADD2 %xmm2, %xmm7
  227. pshufd $0x4e, %xmm1, %xmm2
  228. mulpd %xmm0, %xmm1
  229. mulpd %xmm0, %xmm2
  230. ADD1 %xmm1, %xmm4
  231. movaps -4 * SIZE(BB), %xmm1
  232. ADD2 %xmm2, %xmm5
  233. pshufd $0x4e, %xmm3, %xmm2
  234. mulpd %xmm0, %xmm3
  235. mulpd %xmm0, %xmm2
  236. movaps -10 * SIZE(AA), %xmm0
  237. ADD1 %xmm3, %xmm6
  238. movaps -2 * SIZE(BB), %xmm3
  239. ADD2 %xmm2, %xmm7
  240. pshufd $0x4e, %xmm1, %xmm2
  241. mulpd %xmm0, %xmm1
  242. mulpd %xmm0, %xmm2
  243. ADD1 %xmm1, %xmm4
  244. movaps 0 * SIZE(BB), %xmm1
  245. ADD2 %xmm2, %xmm5
  246. pshufd $0x4e, %xmm3, %xmm2
  247. mulpd %xmm0, %xmm3
  248. mulpd %xmm0, %xmm2
  249. movaps -8 * SIZE(AA), %xmm0
  250. PREFETCH (PREFETCHSIZE + 8) * SIZE(AA)
  251. ADD1 %xmm3, %xmm6
  252. movaps 2 * SIZE(BB), %xmm3
  253. ADD2 %xmm2, %xmm7
  254. pshufd $0x4e, %xmm1, %xmm2
  255. mulpd %xmm0, %xmm1
  256. mulpd %xmm0, %xmm2
  257. ADD1 %xmm1, %xmm4
  258. movaps 4 * SIZE(BB), %xmm1
  259. ADD2 %xmm2, %xmm5
  260. pshufd $0x4e, %xmm3, %xmm2
  261. mulpd %xmm0, %xmm3
  262. mulpd %xmm0, %xmm2
  263. movaps -6 * SIZE(AA), %xmm0
  264. ADD1 %xmm3, %xmm6
  265. movaps 6 * SIZE(BB), %xmm3
  266. ADD2 %xmm2, %xmm7
  267. pshufd $0x4e, %xmm1, %xmm2
  268. mulpd %xmm0, %xmm1
  269. mulpd %xmm0, %xmm2
  270. ADD1 %xmm1, %xmm4
  271. movaps 8 * SIZE(BB), %xmm1
  272. ADD2 %xmm2, %xmm5
  273. pshufd $0x4e, %xmm3, %xmm2
  274. mulpd %xmm0, %xmm3
  275. mulpd %xmm0, %xmm2
  276. movaps -4 * SIZE(AA), %xmm0
  277. ADD1 %xmm3, %xmm6
  278. movaps 10 * SIZE(BB), %xmm3
  279. ADD2 %xmm2, %xmm7
  280. pshufd $0x4e, %xmm1, %xmm2
  281. mulpd %xmm0, %xmm1
  282. mulpd %xmm0, %xmm2
  283. ADD1 %xmm1, %xmm4
  284. movaps 12 * SIZE(BB), %xmm1
  285. ADD2 %xmm2, %xmm5
  286. pshufd $0x4e, %xmm3, %xmm2
  287. mulpd %xmm0, %xmm3
  288. mulpd %xmm0, %xmm2
  289. movaps -2 * SIZE(AA), %xmm0
  290. ADD1 %xmm3, %xmm6
  291. movaps 14 * SIZE(BB), %xmm3
  292. ADD2 %xmm2, %xmm7
  293. pshufd $0x4e, %xmm1, %xmm2
  294. mulpd %xmm0, %xmm1
  295. mulpd %xmm0, %xmm2
  296. ADD1 %xmm1, %xmm4
  297. movaps 16 * SIZE(BB), %xmm1
  298. ADD2 %xmm2, %xmm5
  299. pshufd $0x4e, %xmm3, %xmm2
  300. mulpd %xmm0, %xmm3
  301. subl $-32 * SIZE, BB
  302. mulpd %xmm0, %xmm2
  303. movaps 0 * SIZE(AA), %xmm0
  304. subl $-16 * SIZE, AA
  305. subl $1, %eax
  306. jne .L12
  307. ALIGN_4
  308. .L15:
  309. #if defined(LT) || defined(RN)
  310. movl KK, %eax
  311. #else
  312. movl K, %eax
  313. subl KK, %eax
  314. #endif
  315. andl $7, %eax # if (k & 1)
  316. BRANCH
  317. je .L18
  318. ALIGN_4
  319. .L16:
  320. ADD1 %xmm3, %xmm6
  321. movaps -14 * SIZE(BB), %xmm3
  322. ADD2 %xmm2, %xmm7
  323. pshufd $0x4e, %xmm1, %xmm2
  324. mulpd %xmm0, %xmm1
  325. mulpd %xmm0, %xmm2
  326. ADD1 %xmm1, %xmm4
  327. movaps -12 * SIZE(BB), %xmm1
  328. ADD2 %xmm2, %xmm5
  329. pshufd $0x4e, %xmm3, %xmm2
  330. mulpd %xmm0, %xmm3
  331. mulpd %xmm0, %xmm2
  332. movaps -14 * SIZE(AA), %xmm0
  333. addl $2 * SIZE, AA
  334. addl $4 * SIZE, BB
  335. decl %eax
  336. jg .L16
  337. ALIGN_4
  338. .L18:
  339. #if defined(LN) || defined(RT)
  340. movl KK, %eax
  341. #ifdef LN
  342. subl $1, %eax
  343. #else
  344. subl $2, %eax
  345. #endif
  346. movl AORIG, AA
  347. sall $ZBASE_SHIFT, %eax
  348. leal (AA, %eax, 1), AA
  349. leal (B, %eax, 2), BB
  350. #endif
  351. ADD1 %xmm3, %xmm6
  352. pcmpeqb %xmm1, %xmm1
  353. ADD2 %xmm2, %xmm7
  354. psllq $63, %xmm1
  355. #ifndef CONJ
  356. pshufd $0x40, %xmm1, %xmm0
  357. shufps $0x04, %xmm1, %xmm1
  358. pxor %xmm0, %xmm4
  359. pxor %xmm0, %xmm6
  360. #else
  361. #if defined(LN) || defined(LT)
  362. pshufd $0x40, %xmm1, %xmm0
  363. #else
  364. pshufd $0x04, %xmm1, %xmm0
  365. #endif
  366. shufps $0x40, %xmm1, %xmm1
  367. pxor %xmm0, %xmm5
  368. pxor %xmm0, %xmm7
  369. #endif
  370. haddpd %xmm5, %xmm4
  371. haddpd %xmm7, %xmm6
  372. #if defined(LN) || defined(LT)
  373. movapd -16 * SIZE(BB), %xmm5
  374. movapd -14 * SIZE(BB), %xmm7
  375. subpd %xmm4, %xmm5
  376. subpd %xmm6, %xmm7
  377. #else
  378. movapd -16 * SIZE(AA), %xmm5
  379. movapd -14 * SIZE(AA), %xmm7
  380. subpd %xmm4, %xmm5
  381. subpd %xmm6, %xmm7
  382. #endif
  383. #if defined(LN) || defined(LT)
  384. movddup -16 * SIZE(AA), %xmm2
  385. movddup -15 * SIZE(AA), %xmm3
  386. pshufd $0x4e, %xmm5, %xmm4
  387. pshufd $0x4e, %xmm7, %xmm6
  388. xorpd %xmm1, %xmm4
  389. xorpd %xmm1, %xmm6
  390. mulpd %xmm2, %xmm5
  391. mulpd %xmm3, %xmm4
  392. mulpd %xmm2, %xmm7
  393. mulpd %xmm3, %xmm6
  394. addpd %xmm4, %xmm5
  395. addpd %xmm6, %xmm7
  396. #endif
  397. #ifdef RN
  398. movddup -16 * SIZE(BB), %xmm2
  399. movddup -15 * SIZE(BB), %xmm3
  400. pshufd $0x4e, %xmm5, %xmm4
  401. xorpd %xmm1, %xmm4
  402. mulpd %xmm2, %xmm5
  403. mulpd %xmm3, %xmm4
  404. addpd %xmm4, %xmm5
  405. movddup -14 * SIZE(BB), %xmm2
  406. movddup -13 * SIZE(BB), %xmm3
  407. movapd %xmm5, %xmm4
  408. pshufd $0x4e, %xmm5, %xmm6
  409. xorpd %xmm1, %xmm6
  410. mulpd %xmm2, %xmm4
  411. mulpd %xmm3, %xmm6
  412. subpd %xmm4, %xmm7
  413. subpd %xmm6, %xmm7
  414. movddup -10 * SIZE(BB), %xmm2
  415. movddup -9 * SIZE(BB), %xmm3
  416. pshufd $0x4e, %xmm7, %xmm6
  417. xorpd %xmm1, %xmm6
  418. mulpd %xmm2, %xmm7
  419. mulpd %xmm3, %xmm6
  420. addpd %xmm6, %xmm7
  421. #endif
  422. #ifdef RT
  423. movddup -10 * SIZE(BB), %xmm2
  424. movddup -9 * SIZE(BB), %xmm3
  425. pshufd $0x4e, %xmm7, %xmm6
  426. xorpd %xmm1, %xmm6
  427. mulpd %xmm2, %xmm7
  428. mulpd %xmm3, %xmm6
  429. addpd %xmm6, %xmm7
  430. movddup -12 * SIZE(BB), %xmm2
  431. movddup -11 * SIZE(BB), %xmm3
  432. movapd %xmm7, %xmm4
  433. pshufd $0x4e, %xmm7, %xmm6
  434. xorpd %xmm1, %xmm6
  435. mulpd %xmm2, %xmm4
  436. mulpd %xmm3, %xmm6
  437. subpd %xmm4, %xmm5
  438. subpd %xmm6, %xmm5
  439. movddup -16 * SIZE(BB), %xmm2
  440. movddup -15 * SIZE(BB), %xmm3
  441. pshufd $0x4e, %xmm5, %xmm4
  442. xorpd %xmm1, %xmm4
  443. mulpd %xmm2, %xmm5
  444. mulpd %xmm3, %xmm4
  445. addpd %xmm4, %xmm5
  446. #endif
  447. #ifdef LN
  448. subl $2 * SIZE, CO1
  449. #endif
  450. movlpd %xmm5, 0 * SIZE(CO1)
  451. movhpd %xmm5, 1 * SIZE(CO1)
  452. movlpd %xmm7, 0 * SIZE(CO1, LDC)
  453. movhpd %xmm7, 1 * SIZE(CO1, LDC)
  454. #if defined(LN) || defined(LT)
  455. movapd %xmm5, -16 * SIZE(BB)
  456. movapd %xmm7, -14 * SIZE(BB)
  457. #else
  458. movapd %xmm5, -16 * SIZE(AA)
  459. movapd %xmm7, -14 * SIZE(AA)
  460. #endif
  461. #ifndef LN
  462. addl $2 * SIZE, CO1
  463. #endif
  464. #if defined(LT) || defined(RN)
  465. movl K, %eax
  466. subl KK, %eax
  467. sall $ZBASE_SHIFT, %eax
  468. addl %eax, AA
  469. leal (BB, %eax, 2), BB
  470. #endif
  471. #ifdef LN
  472. subl $1, KK
  473. #endif
  474. #ifdef LT
  475. addl $1, KK
  476. #endif
  477. #ifdef RT
  478. movl K, %eax
  479. sall $ZBASE_SHIFT, %eax
  480. addl %eax, AORIG
  481. #endif
  482. decl %ebx # i --
  483. jg .L10
  484. ALIGN_4
  485. .L99:
  486. #ifdef LN
  487. movl K, %eax
  488. sall $1 + ZBASE_SHIFT, %eax
  489. addl %eax, B
  490. #endif
  491. #if defined(LT) || defined(RN)
  492. movl BB, B
  493. #endif
  494. #ifdef RN
  495. addl $2, KK
  496. #endif
  497. #ifdef RT
  498. subl $2, KK
  499. #endif
  500. decl J # j --
  501. jg .L01
  502. ALIGN_4
  503. .L100:
  504. movl N, %eax
  505. testl $1, %eax
  506. jle .L999
  507. #if defined(LT) || defined(RN)
  508. movl A, AA
  509. #else
  510. movl A, %eax
  511. movl %eax, AORIG
  512. #endif
  513. #ifdef RT
  514. movl K, %eax
  515. sall $ZBASE_SHIFT, %eax
  516. subl %eax, B
  517. #endif
  518. #ifdef RT
  519. subl LDC, C
  520. #endif
  521. movl C, CO1
  522. #ifndef RT
  523. addl LDC, C
  524. #endif
  525. #ifdef LN
  526. movl OFFSET, %eax
  527. addl M, %eax
  528. movl %eax, KK
  529. #endif
  530. #ifdef LT
  531. movl OFFSET, %eax
  532. movl %eax, KK
  533. #endif
  534. movl M, %ebx
  535. ALIGN_4
  536. L110:
  537. #ifdef LN
  538. movl K, %eax
  539. sall $ZBASE_SHIFT, %eax
  540. subl %eax, AORIG
  541. #endif
  542. #if defined(LN) || defined(RT)
  543. movl KK, %eax
  544. movl AORIG, AA
  545. sall $ZBASE_SHIFT, %eax
  546. addl %eax, AA
  547. #endif
  548. movl B, BB
  549. #if defined(LN) || defined(RT)
  550. movl KK, %eax
  551. sall $ZBASE_SHIFT, %eax
  552. addl %eax, BB
  553. #endif
  554. movaps -16 * SIZE(AA), %xmm0
  555. pxor %xmm2, %xmm2
  556. movaps -16 * SIZE(BB), %xmm1
  557. pxor %xmm3, %xmm3
  558. pxor %xmm4, %xmm4
  559. #ifdef LN
  560. prefetcht0 -2 * SIZE(CO1)
  561. #else
  562. prefetcht0 1 * SIZE(CO1)
  563. #endif
  564. pxor %xmm5, %xmm5
  565. pxor %xmm6, %xmm6
  566. pxor %xmm7, %xmm7
  567. #if defined(LT) || defined(RN)
  568. movl KK, %eax
  569. #else
  570. movl K, %eax
  571. subl KK, %eax
  572. #endif
  573. sarl $3, %eax
  574. je L115
  575. ALIGN_4
  576. L112:
  577. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  578. pshufd $0x4e, %xmm1, %xmm2
  579. mulpd %xmm0, %xmm1
  580. mulpd %xmm0, %xmm2
  581. movaps -14 * SIZE(AA), %xmm0
  582. ADD1 %xmm1, %xmm4
  583. movaps -14 * SIZE(BB), %xmm1
  584. ADD2 %xmm2, %xmm5
  585. pshufd $0x4e, %xmm1, %xmm2
  586. mulpd %xmm0, %xmm1
  587. mulpd %xmm0, %xmm2
  588. movaps -12 * SIZE(AA), %xmm0
  589. ADD1 %xmm1, %xmm6
  590. movaps -12 * SIZE(BB), %xmm1
  591. ADD2 %xmm2, %xmm7
  592. pshufd $0x4e, %xmm1, %xmm2
  593. mulpd %xmm0, %xmm1
  594. mulpd %xmm0, %xmm2
  595. movaps -10 * SIZE(AA), %xmm0
  596. ADD1 %xmm1, %xmm4
  597. movaps -10 * SIZE(BB), %xmm1
  598. ADD2 %xmm2, %xmm5
  599. pshufd $0x4e, %xmm1, %xmm2
  600. mulpd %xmm0, %xmm1
  601. mulpd %xmm0, %xmm2
  602. movaps -8 * SIZE(AA), %xmm0
  603. ADD1 %xmm1, %xmm6
  604. movaps -8 * SIZE(BB), %xmm1
  605. ADD2 %xmm2, %xmm7
  606. PREFETCH (PREFETCHSIZE + 8) * SIZE(AA)
  607. pshufd $0x4e, %xmm1, %xmm2
  608. mulpd %xmm0, %xmm1
  609. mulpd %xmm0, %xmm2
  610. movaps -6 * SIZE(AA), %xmm0
  611. ADD1 %xmm1, %xmm4
  612. movaps -6 * SIZE(BB), %xmm1
  613. ADD2 %xmm2, %xmm5
  614. pshufd $0x4e, %xmm1, %xmm2
  615. mulpd %xmm0, %xmm1
  616. mulpd %xmm0, %xmm2
  617. movaps -4 * SIZE(AA), %xmm0
  618. ADD1 %xmm1, %xmm6
  619. movaps -4 * SIZE(BB), %xmm1
  620. ADD2 %xmm2, %xmm7
  621. pshufd $0x4e, %xmm1, %xmm2
  622. mulpd %xmm0, %xmm1
  623. mulpd %xmm0, %xmm2
  624. movaps -2 * SIZE(AA), %xmm0
  625. ADD1 %xmm1, %xmm4
  626. movaps -2 * SIZE(BB), %xmm1
  627. ADD2 %xmm2, %xmm5
  628. pshufd $0x4e, %xmm1, %xmm2
  629. mulpd %xmm0, %xmm1
  630. mulpd %xmm0, %xmm2
  631. movaps 0 * SIZE(AA), %xmm0
  632. ADD1 %xmm1, %xmm6
  633. movaps 0 * SIZE(BB), %xmm1
  634. ADD2 %xmm2, %xmm7
  635. subl $-16 * SIZE, AA
  636. subl $-16 * SIZE, BB
  637. subl $1, %eax
  638. jne L112
  639. ALIGN_4
  640. L115:
  641. #if defined(LT) || defined(RN)
  642. movl KK, %eax
  643. #else
  644. movl K, %eax
  645. subl KK, %eax
  646. #endif
  647. andl $7, %eax # if (k & 1)
  648. BRANCH
  649. je L118
  650. ALIGN_4
  651. L116:
  652. pshufd $0x4e, %xmm1, %xmm2
  653. mulpd %xmm0, %xmm1
  654. mulpd %xmm0, %xmm2
  655. movaps -14 * SIZE(AA), %xmm0
  656. ADD1 %xmm1, %xmm4
  657. movaps -14 * SIZE(BB), %xmm1
  658. ADD2 %xmm2, %xmm5
  659. addl $2 * SIZE, AA
  660. addl $2 * SIZE, BB
  661. decl %eax
  662. jg L116
  663. ALIGN_4
  664. L118:
  665. #if defined(LN) || defined(RT)
  666. movl KK, %eax
  667. #ifdef LN
  668. subl $1, %eax
  669. #else
  670. subl $1, %eax
  671. #endif
  672. movl AORIG, AA
  673. sall $ZBASE_SHIFT, %eax
  674. leal (AA, %eax, 1), AA
  675. leal (B, %eax, 1), BB
  676. #endif
  677. addpd %xmm6, %xmm4
  678. pcmpeqb %xmm1, %xmm1
  679. addpd %xmm7, %xmm5
  680. psllq $63, %xmm1
  681. #ifndef CONJ
  682. pshufd $0x40, %xmm1, %xmm0
  683. shufps $0x04, %xmm1, %xmm1
  684. pxor %xmm0, %xmm4
  685. #else
  686. #if defined(LN) || defined(LT)
  687. pshufd $0x40, %xmm1, %xmm0
  688. #else
  689. pshufd $0x04, %xmm1, %xmm0
  690. #endif
  691. shufps $0x40, %xmm1, %xmm1
  692. pxor %xmm0, %xmm5
  693. #endif
  694. haddpd %xmm5, %xmm4
  695. #if defined(LN) || defined(LT)
  696. movapd -16 * SIZE(BB), %xmm5
  697. subpd %xmm4, %xmm5
  698. #else
  699. movapd -16 * SIZE(AA), %xmm5
  700. subpd %xmm4, %xmm5
  701. #endif
  702. #if defined(LN) || defined(LT)
  703. movddup -16 * SIZE(AA), %xmm2
  704. movddup -15 * SIZE(AA), %xmm3
  705. pshufd $0x4e, %xmm5, %xmm4
  706. xorpd %xmm1, %xmm4
  707. mulpd %xmm2, %xmm5
  708. mulpd %xmm3, %xmm4
  709. addpd %xmm4, %xmm5
  710. #endif
  711. #if defined(RN) || defined(RT)
  712. movddup -16 * SIZE(BB), %xmm2
  713. movddup -15 * SIZE(BB), %xmm3
  714. pshufd $0x4e, %xmm5, %xmm4
  715. xorpd %xmm1, %xmm4
  716. mulpd %xmm2, %xmm5
  717. mulpd %xmm3, %xmm4
  718. addpd %xmm4, %xmm5
  719. #endif
  720. #ifdef LN
  721. subl $2 * SIZE, CO1
  722. #endif
  723. movlpd %xmm5, 0 * SIZE(CO1)
  724. movhpd %xmm5, 1 * SIZE(CO1)
  725. #if defined(LN) || defined(LT)
  726. movapd %xmm5, -16 * SIZE(BB)
  727. #else
  728. movapd %xmm5, -16 * SIZE(AA)
  729. #endif
  730. #ifndef LN
  731. addl $2 * SIZE, CO1
  732. #endif
  733. #if defined(LT) || defined(RN)
  734. movl K, %eax
  735. subl KK, %eax
  736. sall $ZBASE_SHIFT, %eax
  737. addl %eax, AA
  738. addl %eax, BB
  739. #endif
  740. #ifdef LN
  741. subl $1, KK
  742. #endif
  743. #ifdef LT
  744. addl $1, KK
  745. #endif
  746. #ifdef RT
  747. movl K, %eax
  748. sall $ZBASE_SHIFT, %eax
  749. addl %eax, AORIG
  750. #endif
  751. decl %ebx # i --
  752. jg L110
  753. #ifdef LN
  754. movl K, %eax
  755. sall $ZBASE_SHIFT, %eax
  756. addl %eax, B
  757. #endif
  758. #if defined(LT) || defined(RN)
  759. movl BB, B
  760. #endif
  761. #ifdef RN
  762. addl $1, KK
  763. #endif
  764. #ifdef RT
  765. subl $1, KK
  766. #endif
  767. ALIGN_4
  768. .L999:
  769. popl %ebx
  770. popl %esi
  771. popl %edi
  772. popl %ebp
  773. addl $ARGS, %esp
  774. ret
  775. EPILOGUE