You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ztrsm_kernel_RT_1x2_penryn.S 17 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define STACK 16
  41. #define ARGS 16
  42. #define M 4 + STACK + ARGS(%esp)
  43. #define N 8 + STACK + ARGS(%esp)
  44. #define K 12 + STACK + ARGS(%esp)
  45. #define ALPHA_R 16 + STACK + ARGS(%esp)
  46. #define ALPHA_I 24 + STACK + ARGS(%esp)
  47. #define A 32 + STACK + ARGS(%esp)
  48. #define ARG_B 36 + STACK + ARGS(%esp)
  49. #define C 40 + STACK + ARGS(%esp)
  50. #define ARG_LDC 44 + STACK + ARGS(%esp)
  51. #define OFFSET 48 + STACK + ARGS(%esp)
  52. #define J 0 + STACK(%esp)
  53. #define KK 4 + STACK(%esp)
  54. #define KKK 8 + STACK(%esp)
  55. #define AORIG 12 + STACK(%esp)
  56. #if defined(PENRYN) || defined(DUNNINGTON)
  57. #define PREFETCH prefetcht1
  58. #define PREFETCHSIZE 84
  59. #endif
  60. #if defined(NEHALEM) || defined(SANDYBRIDGE) || defined(HASWELL) || defined (SKYLAKEX) || defined (COOPERLAKE)
  61. #define PREFETCH prefetcht1
  62. #define PREFETCHSIZE 84
  63. #endif
  64. #ifdef NANO
  65. #define PREFETCH prefetcht0
  66. #define PREFETCHSIZE (8 * 2)
  67. #endif
  68. #define AA %edx
  69. #define BB %ecx
  70. #define LDC %ebp
  71. #define B %edi
  72. #define CO1 %esi
  73. #define ADD1 addpd
  74. #define ADD2 addpd
  75. PROLOGUE
  76. subl $ARGS, %esp
  77. pushl %ebp
  78. pushl %edi
  79. pushl %esi
  80. pushl %ebx
  81. PROFCODE
  82. movl ARG_B, B
  83. movl ARG_LDC, LDC
  84. movl OFFSET, %eax
  85. #ifdef RN
  86. negl %eax
  87. #endif
  88. movl %eax, KK
  89. movl M, %ebx
  90. testl %ebx, %ebx
  91. jle .L999
  92. subl $-16 * SIZE, A
  93. subl $-16 * SIZE, B
  94. sall $ZBASE_SHIFT, LDC
  95. #ifdef LN
  96. movl M, %eax
  97. sall $ZBASE_SHIFT, %eax
  98. addl %eax, C
  99. imull K, %eax
  100. addl %eax, A
  101. #endif
  102. #ifdef RT
  103. movl N, %eax
  104. sall $ZBASE_SHIFT, %eax
  105. imull K, %eax
  106. addl %eax, B
  107. movl N, %eax
  108. imull LDC, %eax
  109. addl %eax, C
  110. #endif
  111. #ifdef RT
  112. movl N, %eax
  113. subl OFFSET, %eax
  114. movl %eax, KK
  115. #endif
  116. movl N, %eax
  117. testl $1, %eax
  118. jle .L100
  119. #if defined(LT) || defined(RN)
  120. movl A, AA
  121. #else
  122. movl A, %eax
  123. movl %eax, AORIG
  124. #endif
  125. #ifdef RT
  126. movl K, %eax
  127. sall $ZBASE_SHIFT, %eax
  128. subl %eax, B
  129. #endif
  130. #ifdef RT
  131. subl LDC, C
  132. #endif
  133. movl C, CO1
  134. #ifndef RT
  135. addl LDC, C
  136. #endif
  137. #ifdef LN
  138. movl OFFSET, %eax
  139. addl M, %eax
  140. movl %eax, KK
  141. #endif
  142. #ifdef LT
  143. movl OFFSET, %eax
  144. movl %eax, KK
  145. #endif
  146. movl M, %ebx
  147. ALIGN_4
  148. L110:
  149. #ifdef LN
  150. movl K, %eax
  151. sall $ZBASE_SHIFT, %eax
  152. subl %eax, AORIG
  153. #endif
  154. #if defined(LN) || defined(RT)
  155. movl KK, %eax
  156. movl AORIG, AA
  157. sall $ZBASE_SHIFT, %eax
  158. addl %eax, AA
  159. #endif
  160. movl B, BB
  161. #if defined(LN) || defined(RT)
  162. movl KK, %eax
  163. sall $ZBASE_SHIFT, %eax
  164. addl %eax, BB
  165. #endif
  166. movaps -16 * SIZE(AA), %xmm0
  167. pxor %xmm2, %xmm2
  168. movaps -16 * SIZE(BB), %xmm1
  169. pxor %xmm3, %xmm3
  170. pxor %xmm4, %xmm4
  171. #ifdef LN
  172. prefetcht0 -2 * SIZE(CO1)
  173. #else
  174. prefetcht0 1 * SIZE(CO1)
  175. #endif
  176. pxor %xmm5, %xmm5
  177. pxor %xmm6, %xmm6
  178. pxor %xmm7, %xmm7
  179. #if defined(LT) || defined(RN)
  180. movl KK, %eax
  181. #else
  182. movl K, %eax
  183. subl KK, %eax
  184. #endif
  185. sarl $3, %eax
  186. je L115
  187. ALIGN_4
  188. L112:
  189. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  190. pshufd $0x4e, %xmm1, %xmm2
  191. mulpd %xmm0, %xmm1
  192. mulpd %xmm0, %xmm2
  193. movaps -14 * SIZE(AA), %xmm0
  194. ADD1 %xmm1, %xmm4
  195. movaps -14 * SIZE(BB), %xmm1
  196. ADD2 %xmm2, %xmm5
  197. pshufd $0x4e, %xmm1, %xmm2
  198. mulpd %xmm0, %xmm1
  199. mulpd %xmm0, %xmm2
  200. movaps -12 * SIZE(AA), %xmm0
  201. ADD1 %xmm1, %xmm6
  202. movaps -12 * SIZE(BB), %xmm1
  203. ADD2 %xmm2, %xmm7
  204. pshufd $0x4e, %xmm1, %xmm2
  205. mulpd %xmm0, %xmm1
  206. mulpd %xmm0, %xmm2
  207. movaps -10 * SIZE(AA), %xmm0
  208. ADD1 %xmm1, %xmm4
  209. movaps -10 * SIZE(BB), %xmm1
  210. ADD2 %xmm2, %xmm5
  211. pshufd $0x4e, %xmm1, %xmm2
  212. mulpd %xmm0, %xmm1
  213. mulpd %xmm0, %xmm2
  214. movaps -8 * SIZE(AA), %xmm0
  215. ADD1 %xmm1, %xmm6
  216. movaps -8 * SIZE(BB), %xmm1
  217. ADD2 %xmm2, %xmm7
  218. PREFETCH (PREFETCHSIZE + 8) * SIZE(AA)
  219. pshufd $0x4e, %xmm1, %xmm2
  220. mulpd %xmm0, %xmm1
  221. mulpd %xmm0, %xmm2
  222. movaps -6 * SIZE(AA), %xmm0
  223. ADD1 %xmm1, %xmm4
  224. movaps -6 * SIZE(BB), %xmm1
  225. ADD2 %xmm2, %xmm5
  226. pshufd $0x4e, %xmm1, %xmm2
  227. mulpd %xmm0, %xmm1
  228. mulpd %xmm0, %xmm2
  229. movaps -4 * SIZE(AA), %xmm0
  230. ADD1 %xmm1, %xmm6
  231. movaps -4 * SIZE(BB), %xmm1
  232. ADD2 %xmm2, %xmm7
  233. pshufd $0x4e, %xmm1, %xmm2
  234. mulpd %xmm0, %xmm1
  235. mulpd %xmm0, %xmm2
  236. movaps -2 * SIZE(AA), %xmm0
  237. ADD1 %xmm1, %xmm4
  238. movaps -2 * SIZE(BB), %xmm1
  239. ADD2 %xmm2, %xmm5
  240. pshufd $0x4e, %xmm1, %xmm2
  241. mulpd %xmm0, %xmm1
  242. mulpd %xmm0, %xmm2
  243. movaps 0 * SIZE(AA), %xmm0
  244. ADD1 %xmm1, %xmm6
  245. movaps 0 * SIZE(BB), %xmm1
  246. ADD2 %xmm2, %xmm7
  247. subl $-16 * SIZE, AA
  248. subl $-16 * SIZE, BB
  249. subl $1, %eax
  250. jne L112
  251. ALIGN_4
  252. L115:
  253. #if defined(LT) || defined(RN)
  254. movl KK, %eax
  255. #else
  256. movl K, %eax
  257. subl KK, %eax
  258. #endif
  259. andl $7, %eax # if (k & 1)
  260. BRANCH
  261. je L118
  262. ALIGN_4
  263. L116:
  264. pshufd $0x4e, %xmm1, %xmm2
  265. mulpd %xmm0, %xmm1
  266. mulpd %xmm0, %xmm2
  267. movaps -14 * SIZE(AA), %xmm0
  268. ADD1 %xmm1, %xmm4
  269. movaps -14 * SIZE(BB), %xmm1
  270. ADD2 %xmm2, %xmm5
  271. addl $2 * SIZE, AA
  272. addl $2 * SIZE, BB
  273. decl %eax
  274. jg L116
  275. ALIGN_4
  276. L118:
  277. #if defined(LN) || defined(RT)
  278. movl KK, %eax
  279. #ifdef LN
  280. subl $1, %eax
  281. #else
  282. subl $1, %eax
  283. #endif
  284. movl AORIG, AA
  285. sall $ZBASE_SHIFT, %eax
  286. leal (AA, %eax, 1), AA
  287. leal (B, %eax, 1), BB
  288. #endif
  289. addpd %xmm6, %xmm4
  290. pcmpeqb %xmm1, %xmm1
  291. addpd %xmm7, %xmm5
  292. psllq $63, %xmm1
  293. #ifndef CONJ
  294. pshufd $0x40, %xmm1, %xmm0
  295. shufps $0x04, %xmm1, %xmm1
  296. pxor %xmm0, %xmm4
  297. #else
  298. #if defined(LN) || defined(LT)
  299. pshufd $0x40, %xmm1, %xmm0
  300. #else
  301. pshufd $0x04, %xmm1, %xmm0
  302. #endif
  303. shufps $0x40, %xmm1, %xmm1
  304. pxor %xmm0, %xmm5
  305. #endif
  306. haddpd %xmm5, %xmm4
  307. #if defined(LN) || defined(LT)
  308. movapd -16 * SIZE(BB), %xmm5
  309. subpd %xmm4, %xmm5
  310. #else
  311. movapd -16 * SIZE(AA), %xmm5
  312. subpd %xmm4, %xmm5
  313. #endif
  314. #if defined(LN) || defined(LT)
  315. movddup -16 * SIZE(AA), %xmm2
  316. movddup -15 * SIZE(AA), %xmm3
  317. pshufd $0x4e, %xmm5, %xmm4
  318. xorpd %xmm1, %xmm4
  319. mulpd %xmm2, %xmm5
  320. mulpd %xmm3, %xmm4
  321. addpd %xmm4, %xmm5
  322. #endif
  323. #if defined(RN) || defined(RT)
  324. movddup -16 * SIZE(BB), %xmm2
  325. movddup -15 * SIZE(BB), %xmm3
  326. pshufd $0x4e, %xmm5, %xmm4
  327. xorpd %xmm1, %xmm4
  328. mulpd %xmm2, %xmm5
  329. mulpd %xmm3, %xmm4
  330. addpd %xmm4, %xmm5
  331. #endif
  332. #ifdef LN
  333. subl $2 * SIZE, CO1
  334. #endif
  335. movlpd %xmm5, 0 * SIZE(CO1)
  336. movhpd %xmm5, 1 * SIZE(CO1)
  337. #if defined(LN) || defined(LT)
  338. movapd %xmm5, -16 * SIZE(BB)
  339. #else
  340. movapd %xmm5, -16 * SIZE(AA)
  341. #endif
  342. #ifndef LN
  343. addl $2 * SIZE, CO1
  344. #endif
  345. #if defined(LT) || defined(RN)
  346. movl K, %eax
  347. subl KK, %eax
  348. sall $ZBASE_SHIFT, %eax
  349. addl %eax, AA
  350. addl %eax, BB
  351. #endif
  352. #ifdef LN
  353. subl $1, KK
  354. #endif
  355. #ifdef LT
  356. addl $1, KK
  357. #endif
  358. #ifdef RT
  359. movl K, %eax
  360. sall $ZBASE_SHIFT, %eax
  361. addl %eax, AORIG
  362. #endif
  363. decl %ebx # i --
  364. jg L110
  365. #ifdef LN
  366. movl K, %eax
  367. sall $ZBASE_SHIFT, %eax
  368. addl %eax, B
  369. #endif
  370. #if defined(LT) || defined(RN)
  371. movl BB, B
  372. #endif
  373. #ifdef RN
  374. addl $1, KK
  375. #endif
  376. #ifdef RT
  377. subl $1, KK
  378. #endif
  379. ALIGN_4
  380. .L100:
  381. movl N, %eax
  382. sarl $1, %eax
  383. movl %eax, J # j = n
  384. jle .L999
  385. ALIGN_4
  386. .L01:
  387. #if defined(LT) || defined(RN)
  388. movl A, AA
  389. #else
  390. movl A, %eax
  391. movl %eax, AORIG
  392. #endif
  393. #ifdef RT
  394. movl K, %eax
  395. sall $1 + ZBASE_SHIFT, %eax
  396. subl %eax, B
  397. #endif
  398. leal (, LDC, 2), %eax
  399. #ifdef RT
  400. subl %eax, C
  401. #endif
  402. movl C, CO1
  403. #ifndef RT
  404. addl %eax, C
  405. #endif
  406. #ifdef LN
  407. movl OFFSET, %eax
  408. addl M, %eax
  409. movl %eax, KK
  410. #endif
  411. #ifdef LT
  412. movl OFFSET, %eax
  413. movl %eax, KK
  414. #endif
  415. movl M, %ebx
  416. ALIGN_4
  417. .L10:
  418. #ifdef LN
  419. movl K, %eax
  420. sall $ZBASE_SHIFT, %eax
  421. subl %eax, AORIG
  422. #endif
  423. #if defined(LN) || defined(RT)
  424. movl KK, %eax
  425. movl AORIG, AA
  426. sall $ZBASE_SHIFT, %eax
  427. addl %eax, AA
  428. #endif
  429. movl B, BB
  430. #if defined(LN) || defined(RT)
  431. movl KK, %eax
  432. sall $1 + ZBASE_SHIFT, %eax
  433. addl %eax, BB
  434. #endif
  435. movaps -16 * SIZE(AA), %xmm0
  436. pxor %xmm2, %xmm2
  437. movaps -16 * SIZE(BB), %xmm1
  438. pxor %xmm3, %xmm3
  439. #ifdef LN
  440. pxor %xmm4, %xmm4
  441. prefetcht0 -2 * SIZE(CO1)
  442. pxor %xmm5, %xmm5
  443. prefetcht0 -2 * SIZE(CO1, LDC)
  444. #else
  445. pxor %xmm4, %xmm4
  446. prefetcht0 1 * SIZE(CO1)
  447. pxor %xmm5, %xmm5
  448. prefetcht0 1 * SIZE(CO1, LDC)
  449. #endif
  450. pxor %xmm6, %xmm6
  451. pxor %xmm7, %xmm7
  452. #if defined(LT) || defined(RN)
  453. movl KK, %eax
  454. #else
  455. movl K, %eax
  456. subl KK, %eax
  457. #endif
  458. sarl $3, %eax
  459. je .L15
  460. ALIGN_4
  461. .L12:
  462. PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
  463. ADD1 %xmm3, %xmm6
  464. movaps -14 * SIZE(BB), %xmm3
  465. ADD2 %xmm2, %xmm7
  466. pshufd $0x4e, %xmm1, %xmm2
  467. mulpd %xmm0, %xmm1
  468. mulpd %xmm0, %xmm2
  469. ADD1 %xmm1, %xmm4
  470. movaps -12 * SIZE(BB), %xmm1
  471. ADD2 %xmm2, %xmm5
  472. pshufd $0x4e, %xmm3, %xmm2
  473. mulpd %xmm0, %xmm3
  474. mulpd %xmm0, %xmm2
  475. movaps -14 * SIZE(AA), %xmm0
  476. ADD1 %xmm3, %xmm6
  477. movaps -10 * SIZE(BB), %xmm3
  478. ADD2 %xmm2, %xmm7
  479. pshufd $0x4e, %xmm1, %xmm2
  480. mulpd %xmm0, %xmm1
  481. mulpd %xmm0, %xmm2
  482. ADD1 %xmm1, %xmm4
  483. movaps -8 * SIZE(BB), %xmm1
  484. ADD2 %xmm2, %xmm5
  485. pshufd $0x4e, %xmm3, %xmm2
  486. mulpd %xmm0, %xmm3
  487. mulpd %xmm0, %xmm2
  488. movaps -12 * SIZE(AA), %xmm0
  489. ADD1 %xmm3, %xmm6
  490. movaps -6 * SIZE(BB), %xmm3
  491. ADD2 %xmm2, %xmm7
  492. pshufd $0x4e, %xmm1, %xmm2
  493. mulpd %xmm0, %xmm1
  494. mulpd %xmm0, %xmm2
  495. ADD1 %xmm1, %xmm4
  496. movaps -4 * SIZE(BB), %xmm1
  497. ADD2 %xmm2, %xmm5
  498. pshufd $0x4e, %xmm3, %xmm2
  499. mulpd %xmm0, %xmm3
  500. mulpd %xmm0, %xmm2
  501. movaps -10 * SIZE(AA), %xmm0
  502. ADD1 %xmm3, %xmm6
  503. movaps -2 * SIZE(BB), %xmm3
  504. ADD2 %xmm2, %xmm7
  505. pshufd $0x4e, %xmm1, %xmm2
  506. mulpd %xmm0, %xmm1
  507. mulpd %xmm0, %xmm2
  508. ADD1 %xmm1, %xmm4
  509. movaps 0 * SIZE(BB), %xmm1
  510. ADD2 %xmm2, %xmm5
  511. pshufd $0x4e, %xmm3, %xmm2
  512. mulpd %xmm0, %xmm3
  513. mulpd %xmm0, %xmm2
  514. movaps -8 * SIZE(AA), %xmm0
  515. PREFETCH (PREFETCHSIZE + 8) * SIZE(AA)
  516. ADD1 %xmm3, %xmm6
  517. movaps 2 * SIZE(BB), %xmm3
  518. ADD2 %xmm2, %xmm7
  519. pshufd $0x4e, %xmm1, %xmm2
  520. mulpd %xmm0, %xmm1
  521. mulpd %xmm0, %xmm2
  522. ADD1 %xmm1, %xmm4
  523. movaps 4 * SIZE(BB), %xmm1
  524. ADD2 %xmm2, %xmm5
  525. pshufd $0x4e, %xmm3, %xmm2
  526. mulpd %xmm0, %xmm3
  527. mulpd %xmm0, %xmm2
  528. movaps -6 * SIZE(AA), %xmm0
  529. ADD1 %xmm3, %xmm6
  530. movaps 6 * SIZE(BB), %xmm3
  531. ADD2 %xmm2, %xmm7
  532. pshufd $0x4e, %xmm1, %xmm2
  533. mulpd %xmm0, %xmm1
  534. mulpd %xmm0, %xmm2
  535. ADD1 %xmm1, %xmm4
  536. movaps 8 * SIZE(BB), %xmm1
  537. ADD2 %xmm2, %xmm5
  538. pshufd $0x4e, %xmm3, %xmm2
  539. mulpd %xmm0, %xmm3
  540. mulpd %xmm0, %xmm2
  541. movaps -4 * SIZE(AA), %xmm0
  542. ADD1 %xmm3, %xmm6
  543. movaps 10 * SIZE(BB), %xmm3
  544. ADD2 %xmm2, %xmm7
  545. pshufd $0x4e, %xmm1, %xmm2
  546. mulpd %xmm0, %xmm1
  547. mulpd %xmm0, %xmm2
  548. ADD1 %xmm1, %xmm4
  549. movaps 12 * SIZE(BB), %xmm1
  550. ADD2 %xmm2, %xmm5
  551. pshufd $0x4e, %xmm3, %xmm2
  552. mulpd %xmm0, %xmm3
  553. mulpd %xmm0, %xmm2
  554. movaps -2 * SIZE(AA), %xmm0
  555. ADD1 %xmm3, %xmm6
  556. movaps 14 * SIZE(BB), %xmm3
  557. ADD2 %xmm2, %xmm7
  558. pshufd $0x4e, %xmm1, %xmm2
  559. mulpd %xmm0, %xmm1
  560. mulpd %xmm0, %xmm2
  561. ADD1 %xmm1, %xmm4
  562. movaps 16 * SIZE(BB), %xmm1
  563. ADD2 %xmm2, %xmm5
  564. pshufd $0x4e, %xmm3, %xmm2
  565. mulpd %xmm0, %xmm3
  566. subl $-32 * SIZE, BB
  567. mulpd %xmm0, %xmm2
  568. movaps 0 * SIZE(AA), %xmm0
  569. subl $-16 * SIZE, AA
  570. subl $1, %eax
  571. jne .L12
  572. ALIGN_4
  573. .L15:
  574. #if defined(LT) || defined(RN)
  575. movl KK, %eax
  576. #else
  577. movl K, %eax
  578. subl KK, %eax
  579. #endif
  580. andl $7, %eax # if (k & 1)
  581. BRANCH
  582. je .L18
  583. ALIGN_4
  584. .L16:
  585. ADD1 %xmm3, %xmm6
  586. movaps -14 * SIZE(BB), %xmm3
  587. ADD2 %xmm2, %xmm7
  588. pshufd $0x4e, %xmm1, %xmm2
  589. mulpd %xmm0, %xmm1
  590. mulpd %xmm0, %xmm2
  591. ADD1 %xmm1, %xmm4
  592. movaps -12 * SIZE(BB), %xmm1
  593. ADD2 %xmm2, %xmm5
  594. pshufd $0x4e, %xmm3, %xmm2
  595. mulpd %xmm0, %xmm3
  596. mulpd %xmm0, %xmm2
  597. movaps -14 * SIZE(AA), %xmm0
  598. addl $2 * SIZE, AA
  599. addl $4 * SIZE, BB
  600. decl %eax
  601. jg .L16
  602. ALIGN_4
  603. .L18:
  604. #if defined(LN) || defined(RT)
  605. movl KK, %eax
  606. #ifdef LN
  607. subl $1, %eax
  608. #else
  609. subl $2, %eax
  610. #endif
  611. movl AORIG, AA
  612. sall $ZBASE_SHIFT, %eax
  613. leal (AA, %eax, 1), AA
  614. leal (B, %eax, 2), BB
  615. #endif
  616. ADD1 %xmm3, %xmm6
  617. pcmpeqb %xmm1, %xmm1
  618. ADD2 %xmm2, %xmm7
  619. psllq $63, %xmm1
  620. #ifndef CONJ
  621. pshufd $0x40, %xmm1, %xmm0
  622. shufps $0x04, %xmm1, %xmm1
  623. pxor %xmm0, %xmm4
  624. pxor %xmm0, %xmm6
  625. #else
  626. #if defined(LN) || defined(LT)
  627. pshufd $0x40, %xmm1, %xmm0
  628. #else
  629. pshufd $0x04, %xmm1, %xmm0
  630. #endif
  631. shufps $0x40, %xmm1, %xmm1
  632. pxor %xmm0, %xmm5
  633. pxor %xmm0, %xmm7
  634. #endif
  635. haddpd %xmm5, %xmm4
  636. haddpd %xmm7, %xmm6
  637. #if defined(LN) || defined(LT)
  638. movapd -16 * SIZE(BB), %xmm5
  639. movapd -14 * SIZE(BB), %xmm7
  640. subpd %xmm4, %xmm5
  641. subpd %xmm6, %xmm7
  642. #else
  643. movapd -16 * SIZE(AA), %xmm5
  644. movapd -14 * SIZE(AA), %xmm7
  645. subpd %xmm4, %xmm5
  646. subpd %xmm6, %xmm7
  647. #endif
  648. #if defined(LN) || defined(LT)
  649. movddup -16 * SIZE(AA), %xmm2
  650. movddup -15 * SIZE(AA), %xmm3
  651. pshufd $0x4e, %xmm5, %xmm4
  652. pshufd $0x4e, %xmm7, %xmm6
  653. xorpd %xmm1, %xmm4
  654. xorpd %xmm1, %xmm6
  655. mulpd %xmm2, %xmm5
  656. mulpd %xmm3, %xmm4
  657. mulpd %xmm2, %xmm7
  658. mulpd %xmm3, %xmm6
  659. addpd %xmm4, %xmm5
  660. addpd %xmm6, %xmm7
  661. #endif
  662. #ifdef RN
  663. movddup -16 * SIZE(BB), %xmm2
  664. movddup -15 * SIZE(BB), %xmm3
  665. pshufd $0x4e, %xmm5, %xmm4
  666. xorpd %xmm1, %xmm4
  667. mulpd %xmm2, %xmm5
  668. mulpd %xmm3, %xmm4
  669. addpd %xmm4, %xmm5
  670. movddup -14 * SIZE(BB), %xmm2
  671. movddup -13 * SIZE(BB), %xmm3
  672. movapd %xmm5, %xmm4
  673. pshufd $0x4e, %xmm5, %xmm6
  674. xorpd %xmm1, %xmm6
  675. mulpd %xmm2, %xmm4
  676. mulpd %xmm3, %xmm6
  677. subpd %xmm4, %xmm7
  678. subpd %xmm6, %xmm7
  679. movddup -10 * SIZE(BB), %xmm2
  680. movddup -9 * SIZE(BB), %xmm3
  681. pshufd $0x4e, %xmm7, %xmm6
  682. xorpd %xmm1, %xmm6
  683. mulpd %xmm2, %xmm7
  684. mulpd %xmm3, %xmm6
  685. addpd %xmm6, %xmm7
  686. #endif
  687. #ifdef RT
  688. movddup -10 * SIZE(BB), %xmm2
  689. movddup -9 * SIZE(BB), %xmm3
  690. pshufd $0x4e, %xmm7, %xmm6
  691. xorpd %xmm1, %xmm6
  692. mulpd %xmm2, %xmm7
  693. mulpd %xmm3, %xmm6
  694. addpd %xmm6, %xmm7
  695. movddup -12 * SIZE(BB), %xmm2
  696. movddup -11 * SIZE(BB), %xmm3
  697. movapd %xmm7, %xmm4
  698. pshufd $0x4e, %xmm7, %xmm6
  699. xorpd %xmm1, %xmm6
  700. mulpd %xmm2, %xmm4
  701. mulpd %xmm3, %xmm6
  702. subpd %xmm4, %xmm5
  703. subpd %xmm6, %xmm5
  704. movddup -16 * SIZE(BB), %xmm2
  705. movddup -15 * SIZE(BB), %xmm3
  706. pshufd $0x4e, %xmm5, %xmm4
  707. xorpd %xmm1, %xmm4
  708. mulpd %xmm2, %xmm5
  709. mulpd %xmm3, %xmm4
  710. addpd %xmm4, %xmm5
  711. #endif
  712. #ifdef LN
  713. subl $2 * SIZE, CO1
  714. #endif
  715. movlpd %xmm5, 0 * SIZE(CO1)
  716. movhpd %xmm5, 1 * SIZE(CO1)
  717. movlpd %xmm7, 0 * SIZE(CO1, LDC)
  718. movhpd %xmm7, 1 * SIZE(CO1, LDC)
  719. #if defined(LN) || defined(LT)
  720. movapd %xmm5, -16 * SIZE(BB)
  721. movapd %xmm7, -14 * SIZE(BB)
  722. #else
  723. movapd %xmm5, -16 * SIZE(AA)
  724. movapd %xmm7, -14 * SIZE(AA)
  725. #endif
  726. #ifndef LN
  727. addl $2 * SIZE, CO1
  728. #endif
  729. #if defined(LT) || defined(RN)
  730. movl K, %eax
  731. subl KK, %eax
  732. sall $ZBASE_SHIFT, %eax
  733. addl %eax, AA
  734. leal (BB, %eax, 2), BB
  735. #endif
  736. #ifdef LN
  737. subl $1, KK
  738. #endif
  739. #ifdef LT
  740. addl $1, KK
  741. #endif
  742. #ifdef RT
  743. movl K, %eax
  744. sall $ZBASE_SHIFT, %eax
  745. addl %eax, AORIG
  746. #endif
  747. decl %ebx # i --
  748. jg .L10
  749. ALIGN_4
  750. .L99:
  751. #ifdef LN
  752. movl K, %eax
  753. sall $1 + ZBASE_SHIFT, %eax
  754. addl %eax, B
  755. #endif
  756. #if defined(LT) || defined(RN)
  757. movl BB, B
  758. #endif
  759. #ifdef RN
  760. addl $2, KK
  761. #endif
  762. #ifdef RT
  763. subl $2, KK
  764. #endif
  765. decl J # j --
  766. jg .L01
  767. ALIGN_4
  768. .L999:
  769. popl %ebx
  770. popl %esi
  771. popl %edi
  772. popl %ebp
  773. addl $ARGS, %esp
  774. ret
  775. EPILOGUE