You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

trsm_kernel_RT_1x4.S 20 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define STACK 16
  41. #define ARGS 32
  42. #define J 0 + STACK(%esp)
  43. #define I 4 + STACK(%esp)
  44. #define KK 8 + STACK(%esp)
  45. #define KKK 12 + STACK(%esp)
  46. #define AORIG 16 + STACK(%esp)
  47. #define M 4 + STACK + ARGS(%esp)
  48. #define N 8 + STACK + ARGS(%esp)
  49. #define K 12 + STACK + ARGS(%esp)
  50. #define ALPHA 16 + STACK + ARGS(%esp)
  51. #ifdef DOUBLE
  52. #define STACK_A 24 + STACK + ARGS(%esp)
  53. #define STACK_B 28 + STACK + ARGS(%esp)
  54. #define C 32 + STACK + ARGS(%esp)
  55. #define STACK_LDC 36 + STACK + ARGS(%esp)
  56. #define OFFSET 40 + STACK + ARGS(%esp)
  57. #else
  58. #define STACK_A 20 + STACK + ARGS(%esp)
  59. #define STACK_B 24 + STACK + ARGS(%esp)
  60. #define C 28 + STACK + ARGS(%esp)
  61. #define STACK_LDC 32 + STACK + ARGS(%esp)
  62. #define OFFSET 36 + STACK + ARGS(%esp)
  63. #endif
  64. #define A %edx
  65. #define B %ecx
  66. #define B_ORIG %ebx
  67. #define LDC %ebp
  68. #define PREFETCHSIZE (5 + 8 * 10)
  69. PROLOGUE
  70. subl $ARGS, %esp # Generate Stack Frame
  71. pushl %ebp
  72. pushl %edi
  73. pushl %esi
  74. pushl %ebx
  75. PROFCODE
  76. movl STACK_B, B_ORIG
  77. movl STACK_LDC, LDC
  78. leal (, LDC, SIZE), LDC
  79. #ifdef LN
  80. movl M, %eax
  81. leal (, %eax, SIZE), %eax
  82. addl %eax, C
  83. imull K, %eax
  84. addl %eax, STACK_A
  85. #endif
  86. #ifdef RT
  87. movl N, %eax
  88. leal (, %eax, SIZE), %eax
  89. imull K, %eax
  90. addl %eax, B_ORIG
  91. movl N, %eax
  92. imull LDC, %eax
  93. addl %eax, C
  94. #endif
  95. #ifdef RN
  96. movl OFFSET, %eax
  97. negl %eax
  98. movl %eax, KK
  99. #endif
  100. #ifdef RT
  101. movl N, %eax
  102. subl OFFSET, %eax
  103. movl %eax, KK
  104. #endif
  105. subl $-16 * SIZE, B_ORIG
  106. subl $-16 * SIZE, STACK_A
  107. movl M, %eax
  108. testl %eax, %eax
  109. jle .L999
  110. movl N, %eax
  111. testl %eax, %eax
  112. jle .L999
  113. movl K, %eax
  114. testl %eax, %eax
  115. jle .L999
  116. movl N, %eax
  117. andl $1, %eax
  118. je .L20
  119. ALIGN_3
  120. .L31:
  121. #if defined(LT) || defined(RN)
  122. movl STACK_A, A
  123. #else
  124. movl STACK_A, %eax
  125. movl %eax, AORIG
  126. #endif
  127. #ifdef RT
  128. movl K, %eax
  129. sall $0 + BASE_SHIFT, %eax
  130. subl %eax, B_ORIG
  131. #endif
  132. #ifdef RT
  133. subl LDC, C
  134. #endif
  135. movl C, %edi
  136. #ifndef RT
  137. addl LDC, C
  138. #endif
  139. #ifdef LN
  140. movl OFFSET, %eax
  141. addl M, %eax
  142. movl %eax, KK
  143. #endif
  144. #ifdef LT
  145. movl OFFSET, %eax
  146. movl %eax, KK
  147. #endif
  148. movl B_ORIG, B
  149. #if defined(LT) || defined(RN)
  150. movl KK, %eax
  151. #else
  152. movl K, %eax
  153. subl KK, %eax
  154. #endif
  155. sarl $5, %eax
  156. jle .L33
  157. ALIGN_4
  158. .L32:
  159. movl -16 * SIZE(B), %esi
  160. movl -8 * SIZE(B), %esi
  161. movl 0 * SIZE(B), %esi
  162. movl 8 * SIZE(B), %esi
  163. subl $-32 * SIZE, B
  164. decl %eax
  165. jne .L32
  166. ALIGN_3
  167. .L33:
  168. movl M, %esi
  169. movl %esi, I
  170. ALIGN_3
  171. .L34:
  172. #ifdef LN
  173. movl K, %eax
  174. sall $BASE_SHIFT, %eax
  175. subl %eax, AORIG
  176. #endif
  177. #if defined(LN) || defined(RT)
  178. movl KK, %eax
  179. leal (, %eax, SIZE), %eax
  180. movl AORIG, A
  181. leal (A , %eax, 1), A
  182. leal (B_ORIG, %eax, 1), B
  183. #else
  184. movl B_ORIG, B
  185. #endif
  186. fldz
  187. fldz
  188. fldz
  189. fldz
  190. prefetchw 1 * SIZE(%edi)
  191. #if defined(LT) || defined(RN)
  192. movl KK, %eax
  193. #else
  194. movl K, %eax
  195. subl KK, %eax
  196. #endif
  197. sarl $3, %eax
  198. je .L36
  199. ALIGN_3
  200. .L35:
  201. FLD -16 * SIZE(A)
  202. FMUL -16 * SIZE(B)
  203. faddp %st, %st(1)
  204. FLD -15 * SIZE(A)
  205. FMUL -15 * SIZE(B)
  206. faddp %st, %st(2)
  207. FLD -14 * SIZE(A)
  208. FMUL -14 * SIZE(B)
  209. faddp %st, %st(3)
  210. FLD -13 * SIZE(A)
  211. FMUL -13 * SIZE(B)
  212. faddp %st, %st(4)
  213. FLD -12 * SIZE(A)
  214. FMUL -12 * SIZE(B)
  215. faddp %st, %st(1)
  216. FLD -11 * SIZE(A)
  217. FMUL -11 * SIZE(B)
  218. faddp %st, %st(2)
  219. FLD -10 * SIZE(A)
  220. FMUL -10 * SIZE(B)
  221. faddp %st, %st(3)
  222. FLD -9 * SIZE(A)
  223. FMUL -9 * SIZE(B)
  224. faddp %st, %st(4)
  225. addl $8 * SIZE, A
  226. addl $8 * SIZE, B
  227. decl %eax
  228. jne .L35
  229. ALIGN_4
  230. .L36:
  231. #if defined(LT) || defined(RN)
  232. movl KK, %eax
  233. #else
  234. movl K, %eax
  235. subl KK, %eax
  236. #endif
  237. and $7, %eax
  238. je .L39
  239. ALIGN_4
  240. .L37:
  241. FLD -16 * SIZE(A)
  242. FMUL -16 * SIZE(B)
  243. faddp %st, %st(1)
  244. addl $1 * SIZE,A
  245. addl $1 * SIZE,B
  246. decl %eax
  247. jne .L37
  248. ALIGN_4
  249. .L39:
  250. faddp %st, %st(2)
  251. faddp %st, %st(2)
  252. faddp %st, %st(1)
  253. #if defined(LN) || defined(RT)
  254. movl KK, %eax
  255. subl $1, %eax
  256. movl AORIG, A
  257. leal (A, %eax, SIZE), A
  258. leal (B_ORIG, %eax, SIZE), B
  259. #endif
  260. #if defined(LN) || defined(LT)
  261. FLD 0 * SIZE - 16 * SIZE(B)
  262. fsubp %st, %st(1)
  263. #else
  264. FLD 0 * SIZE - 16 * SIZE(A)
  265. fsubp %st, %st(1)
  266. #endif
  267. #if defined(LN) || defined(LT)
  268. FLD 0 * SIZE - 16 * SIZE(A)
  269. fmulp %st, %st(1)
  270. #endif
  271. #if defined(RN) || defined(RT)
  272. FMUL 0 * SIZE - 16 * SIZE(B)
  273. #endif
  274. #ifdef LN
  275. subl $1 * SIZE, %edi
  276. #endif
  277. #if defined(LN) || defined(LT)
  278. FSTU 0 * SIZE - 16 * SIZE(B)
  279. #else
  280. FSTU 0 * SIZE - 16 * SIZE(A)
  281. #endif
  282. FST 0 * SIZE(%edi)
  283. #ifndef LN
  284. addl $1 * SIZE, %edi
  285. #endif
  286. #if defined(LT) || defined(RN)
  287. movl K, %eax
  288. subl KK, %eax
  289. leal (A, %eax, SIZE), A
  290. leal (B, %eax, SIZE), B
  291. #endif
  292. #ifdef LN
  293. subl $1, KK
  294. #endif
  295. #ifdef LT
  296. addl $1, KK
  297. #endif
  298. #ifdef RT
  299. movl K, %eax
  300. sall $BASE_SHIFT, %eax
  301. addl %eax, AORIG
  302. #endif
  303. decl I
  304. jne .L34
  305. #ifdef LN
  306. movl K, %eax
  307. leal ( , %eax, SIZE), %eax
  308. leal (B_ORIG, %eax, 1), B_ORIG
  309. #endif
  310. #if defined(LT) || defined(RN)
  311. movl B, B_ORIG
  312. #endif
  313. #ifdef RN
  314. addl $1, KK
  315. #endif
  316. #ifdef RT
  317. subl $1, KK
  318. #endif
  319. ALIGN_4
  320. .L20:
  321. movl N, %eax
  322. andl $2, %eax
  323. je .L30
  324. #if defined(LT) || defined(RN)
  325. movl STACK_A, A
  326. #else
  327. movl STACK_A, %eax
  328. movl %eax, AORIG
  329. #endif
  330. #ifdef RT
  331. movl K, %eax
  332. sall $1 + BASE_SHIFT, %eax
  333. subl %eax, B_ORIG
  334. #endif
  335. leal (, LDC, 2), %eax
  336. #ifdef RT
  337. subl %eax, C
  338. #endif
  339. movl C, %edi
  340. #ifndef RT
  341. addl %eax, C
  342. #endif
  343. #ifdef LN
  344. movl OFFSET, %eax
  345. addl M, %eax
  346. movl %eax, KK
  347. #endif
  348. #ifdef LT
  349. movl OFFSET, %eax
  350. movl %eax, KK
  351. #endif
  352. movl B_ORIG, B
  353. #if defined(LT) || defined(RN)
  354. movl KK, %eax
  355. #else
  356. movl K, %eax
  357. subl KK, %eax
  358. #endif
  359. sarl $4, %eax
  360. jle .L23
  361. ALIGN_4
  362. .L22:
  363. movl -16 * SIZE(B), %esi
  364. movl -8 * SIZE(B), %esi
  365. movl 0 * SIZE(B), %esi
  366. movl 8 * SIZE(B), %esi
  367. subl $-32 * SIZE, B
  368. decl %eax
  369. jne .L22
  370. ALIGN_3
  371. .L23:
  372. movl M, %esi
  373. movl %esi, I
  374. ALIGN_3
  375. .L24:
  376. #ifdef LN
  377. movl K, %eax
  378. sall $BASE_SHIFT, %eax
  379. subl %eax, AORIG
  380. #endif
  381. #if defined(LN) || defined(RT)
  382. movl KK, %eax
  383. leal (, %eax, SIZE), %eax
  384. movl AORIG, A
  385. leal (A , %eax, 1), A
  386. leal (B_ORIG, %eax, 2), B
  387. #else
  388. movl B_ORIG, B
  389. #endif
  390. fldz
  391. fldz
  392. fldz
  393. fldz
  394. FLD -16 * SIZE(A)
  395. FLD -16 * SIZE(B)
  396. prefetchw 1 * SIZE(%edi)
  397. prefetchw 1 * SIZE(%edi, LDC)
  398. #if defined(LT) || defined(RN)
  399. movl KK, %eax
  400. #else
  401. movl K, %eax
  402. subl KK, %eax
  403. #endif
  404. sarl $3, %eax
  405. je .L26
  406. ALIGN_3
  407. .L25:
  408. fmul %st(1), %st
  409. faddp %st, %st(2)
  410. FMUL -15 * SIZE(B)
  411. faddp %st, %st(2)
  412. FLD -15 * SIZE(A)
  413. FLD -14 * SIZE(B)
  414. fmul %st(1), %st
  415. faddp %st, %st(4)
  416. FMUL -13 * SIZE(B)
  417. faddp %st, %st(4)
  418. FLD -14 * SIZE(A)
  419. FLD -12 * SIZE(B)
  420. fmul %st(1), %st
  421. faddp %st, %st(2)
  422. FMUL -11 * SIZE(B)
  423. faddp %st, %st(2)
  424. FLD -13 * SIZE(A)
  425. FLD -10 * SIZE(B)
  426. fmul %st(1), %st
  427. faddp %st, %st(4)
  428. FMUL -9 * SIZE(B)
  429. faddp %st, %st(4)
  430. FLD -12 * SIZE(A)
  431. FLD -8 * SIZE(B)
  432. fmul %st(1), %st
  433. faddp %st, %st(2)
  434. FMUL -7 * SIZE(B)
  435. faddp %st, %st(2)
  436. FLD -11 * SIZE(A)
  437. FLD -6 * SIZE(B)
  438. fmul %st(1), %st
  439. faddp %st, %st(4)
  440. FMUL -5 * SIZE(B)
  441. faddp %st, %st(4)
  442. FLD -10 * SIZE(A)
  443. FLD -4 * SIZE(B)
  444. fmul %st(1), %st
  445. faddp %st, %st(2)
  446. FMUL -3 * SIZE(B)
  447. faddp %st, %st(2)
  448. FLD -9 * SIZE(A)
  449. FLD -2 * SIZE(B)
  450. fmul %st(1), %st
  451. faddp %st, %st(4)
  452. FMUL -1 * SIZE(B)
  453. faddp %st, %st(4)
  454. FLD -8 * SIZE(A)
  455. FLD 0 * SIZE(B)
  456. addl $ 8 * SIZE, A
  457. subl $-16 * SIZE, B
  458. decl %eax
  459. jne .L25
  460. ALIGN_4
  461. .L26:
  462. #if defined(LT) || defined(RN)
  463. movl KK, %eax
  464. #else
  465. movl K, %eax
  466. subl KK, %eax
  467. #endif
  468. and $7, %eax
  469. je .L29
  470. ALIGN_4
  471. .L27:
  472. fmul %st(1), %st
  473. faddp %st, %st(2)
  474. FMUL -15 * SIZE(B)
  475. faddp %st, %st(2)
  476. FLD -15 * SIZE(A)
  477. FLD -14 * SIZE(B)
  478. addl $1 * SIZE,A
  479. addl $2 * SIZE,B
  480. decl %eax
  481. jne .L27
  482. ALIGN_4
  483. .L29:
  484. ffreep %st(0)
  485. ffreep %st(0)
  486. faddp %st, %st(2)
  487. faddp %st, %st(2)
  488. #if defined(LN) || defined(RT)
  489. movl KK, %eax
  490. #ifdef LN
  491. subl $1, %eax
  492. #else
  493. subl $2, %eax
  494. #endif
  495. leal (, %eax, SIZE), %eax
  496. movl AORIG, A
  497. leal (A, %eax, 1), A
  498. leal (B_ORIG, %eax, 2), B
  499. #endif
  500. #if defined(LN) || defined(LT)
  501. FLD 0 * SIZE - 16 * SIZE(B)
  502. fsubp %st, %st(1)
  503. FLD 1 * SIZE - 16 * SIZE(B)
  504. fsubp %st, %st(2)
  505. #else
  506. FLD 0 * SIZE - 16 * SIZE(A)
  507. fsubp %st, %st(1)
  508. FLD 1 * SIZE - 16 * SIZE(A)
  509. fsubp %st, %st(2)
  510. #endif
  511. #ifdef LN
  512. FLD 0 * SIZE - 16 * SIZE(A)
  513. fmul %st, %st(1)
  514. fmulp %st, %st(2)
  515. #endif
  516. #ifdef LT
  517. FLD 0 * SIZE - 16 * SIZE(A)
  518. fmul %st, %st(1)
  519. fmulp %st, %st(2)
  520. #endif
  521. #ifdef RN
  522. FMUL 0 * SIZE - 16 * SIZE(B)
  523. FLD 1 * SIZE - 16 * SIZE(B)
  524. fmul %st(1), %st
  525. fsubrp %st, %st(2)
  526. FLD 3 * SIZE - 16 * SIZE(B)
  527. fmulp %st, %st(2)
  528. #endif
  529. #ifdef RT
  530. FLD 3 * SIZE - 16 * SIZE(B)
  531. fmulp %st, %st(2)
  532. FLD 2 * SIZE - 16 * SIZE(B)
  533. fmul %st(2), %st
  534. fsubrp %st, %st(1)
  535. FLD 0 * SIZE - 16 * SIZE(B)
  536. fmulp %st, %st(1)
  537. #endif
  538. #ifdef LN
  539. subl $1 * SIZE, %edi
  540. #endif
  541. #if defined(LN) || defined(LT)
  542. FSTU 0 * SIZE - 16 * SIZE(B)
  543. fxch %st(1)
  544. FSTU 1 * SIZE - 16 * SIZE(B)
  545. #else
  546. FSTU 0 * SIZE - 16 * SIZE(A)
  547. fxch %st(1)
  548. FSTU 1 * SIZE - 16 * SIZE(A)
  549. #endif
  550. FST 0 * SIZE(%edi, LDC)
  551. FST 0 * SIZE(%edi)
  552. #ifndef LN
  553. addl $1 * SIZE, %edi
  554. #endif
  555. #if defined(LT) || defined(RN)
  556. movl K, %eax
  557. subl KK, %eax
  558. leal (,%eax, SIZE), %eax
  559. leal (A, %eax, 1), A
  560. leal (B, %eax, 2), B
  561. #endif
  562. #ifdef LN
  563. subl $1, KK
  564. #endif
  565. #ifdef LT
  566. addl $1, KK
  567. #endif
  568. #ifdef RT
  569. movl K, %eax
  570. sall $BASE_SHIFT, %eax
  571. addl %eax, AORIG
  572. #endif
  573. decl I
  574. jne .L24
  575. #ifdef LN
  576. movl K, %eax
  577. leal ( , %eax, SIZE), %eax
  578. leal (B_ORIG, %eax, 2), B_ORIG
  579. #endif
  580. #if defined(LT) || defined(RN)
  581. movl B, B_ORIG
  582. #endif
  583. #ifdef RN
  584. addl $2, KK
  585. #endif
  586. #ifdef RT
  587. subl $2, KK
  588. #endif
  589. ALIGN_4
  590. .L30:
  591. movl N, %eax
  592. sarl $2, %eax
  593. movl %eax, J
  594. je .L999
  595. ALIGN_3
  596. .L11:
  597. #if defined(LT) || defined(RN)
  598. movl STACK_A, A
  599. #else
  600. movl STACK_A, %eax
  601. movl %eax, AORIG
  602. #endif
  603. #ifdef RT
  604. movl K, %eax
  605. sall $2 + BASE_SHIFT, %eax
  606. subl %eax, B_ORIG
  607. #endif
  608. leal (, LDC, 4), %eax
  609. #ifdef RT
  610. subl %eax, C
  611. #endif
  612. movl C, %edi
  613. #ifndef RT
  614. addl %eax, C
  615. #endif
  616. #ifdef LN
  617. movl OFFSET, %eax
  618. addl M, %eax
  619. movl %eax, KK
  620. #endif
  621. #ifdef LT
  622. movl OFFSET, %eax
  623. movl %eax, KK
  624. #endif
  625. movl B_ORIG, B
  626. #if defined(LT) || defined(RN)
  627. movl KK, %eax
  628. #else
  629. movl K, %eax
  630. subl KK, %eax
  631. #endif
  632. sarl $4, %eax
  633. jle .L13
  634. ALIGN_4
  635. .L12:
  636. movl -16 * SIZE(B), %esi
  637. movl -8 * SIZE(B), %esi
  638. movl 0 * SIZE(B), %esi
  639. movl 8 * SIZE(B), %esi
  640. movl 16 * SIZE(B), %esi
  641. movl 24 * SIZE(B), %esi
  642. movl 32 * SIZE(B), %esi
  643. movl 40 * SIZE(B), %esi
  644. subl $-64 * SIZE, B
  645. decl %eax
  646. jne .L12
  647. ALIGN_3
  648. .L13:
  649. movl M, %esi
  650. movl %esi, I
  651. ALIGN_3
  652. .L14:
  653. #ifdef LN
  654. movl K, %eax
  655. sall $BASE_SHIFT, %eax
  656. subl %eax, AORIG
  657. #endif
  658. #if defined(LN) || defined(RT)
  659. movl KK, %eax
  660. leal (, %eax, SIZE), %eax
  661. movl AORIG, A
  662. leal (A , %eax, 1), A
  663. leal (B_ORIG, %eax, 4), B
  664. #else
  665. movl B_ORIG, B
  666. #endif
  667. leal (%edi, LDC, 2), %eax
  668. fldz
  669. fldz
  670. fldz
  671. fldz
  672. FLD -8 * SIZE(A)
  673. FLD -16 * SIZE(A)
  674. FLD -16 * SIZE(B)
  675. movl $32 * SIZE, %esi
  676. prefetchw 1 * SIZE(%edi)
  677. prefetchw 1 * SIZE(%edi, LDC)
  678. prefetchw 1 * SIZE(%eax)
  679. prefetchw 1 * SIZE(%eax, LDC)
  680. #if defined(LT) || defined(RN)
  681. movl KK, %eax
  682. #else
  683. movl K, %eax
  684. subl KK, %eax
  685. #endif
  686. sarl $3, %eax
  687. je .L16
  688. ALIGN_3
  689. .L15:
  690. fmul %st(1), %st
  691. faddp %st, %st(3)
  692. PADDING
  693. FLD -15 * SIZE(B)
  694. fmul %st(1), %st
  695. faddp %st, %st(4)
  696. PADDING
  697. FLD -14 * SIZE(B)
  698. fmul %st(1), %st
  699. faddp %st, %st(5)
  700. PADDING
  701. FMUL -13 * SIZE(B)
  702. faddp %st, %st(5)
  703. FLD -15 * SIZE(A)
  704. FLD -12 * SIZE(B)
  705. fmul %st(1), %st
  706. faddp %st, %st(3)
  707. PADDING
  708. FLD -11 * SIZE(B)
  709. fmul %st(1), %st
  710. faddp %st, %st(4)
  711. PADDING
  712. FLD -10 * SIZE(B)
  713. fmul %st(1), %st
  714. faddp %st, %st(5)
  715. PADDING
  716. FMUL -9 * SIZE(B)
  717. faddp %st, %st(5)
  718. FLD -14 * SIZE(A)
  719. FLD -8 * SIZE(B)
  720. fmul %st(1), %st
  721. faddp %st, %st(3)
  722. PADDING
  723. FLD -7 * SIZE(B)
  724. fmul %st(1), %st
  725. faddp %st, %st(4)
  726. PADDING
  727. FLD -6 * SIZE(B)
  728. fmul %st(1), %st
  729. faddp %st, %st(5)
  730. PADDING
  731. FMUL -5 * SIZE(B)
  732. faddp %st, %st(5)
  733. FLD -13 * SIZE(A)
  734. FLD -4 * SIZE(B)
  735. fmul %st(1), %st
  736. faddp %st, %st(3)
  737. PADDING
  738. FLD -3 * SIZE(B)
  739. fmul %st(1), %st
  740. faddp %st, %st(4)
  741. PADDING
  742. FLD -2 * SIZE(B)
  743. fmul %st(1), %st
  744. faddp %st, %st(5)
  745. PADDING
  746. FMUL -1 * SIZE(B)
  747. faddp %st, %st(5)
  748. FLD -12 * SIZE(A)
  749. FLD 0 * SIZE(B)
  750. fmul %st(1), %st
  751. faddp %st, %st(3)
  752. PADDING
  753. FLD 1 * SIZE(B)
  754. fmul %st(1), %st
  755. faddp %st, %st(4)
  756. PADDING
  757. FLD 2 * SIZE(B)
  758. fmul %st(1), %st
  759. faddp %st, %st(5)
  760. PADDING
  761. FMUL 3 * SIZE(B)
  762. faddp %st, %st(5)
  763. FLD -11 * SIZE(A)
  764. FLD 4 * SIZE(B)
  765. fmul %st(1), %st
  766. faddp %st, %st(3)
  767. PADDING
  768. FLD 5 * SIZE(B)
  769. fmul %st(1), %st
  770. faddp %st, %st(4)
  771. PADDING
  772. FLD 6 * SIZE(B)
  773. fmul %st(1), %st
  774. faddp %st, %st(5)
  775. PADDING
  776. FMUL 7 * SIZE(B)
  777. faddp %st, %st(5)
  778. FLD -10 * SIZE(A)
  779. FLD 8 * SIZE(B)
  780. fmul %st(1), %st
  781. faddp %st, %st(3)
  782. PADDING
  783. FLD 9 * SIZE(B)
  784. fmul %st(1), %st
  785. faddp %st, %st(4)
  786. PADDING
  787. FLD 10 * SIZE(B)
  788. fmul %st(1), %st
  789. faddp %st, %st(5)
  790. PADDING
  791. FMUL 11 * SIZE(B)
  792. faddp %st, %st(5)
  793. FLD -9 * SIZE(A)
  794. FLD 12 * SIZE(B)
  795. fmul %st(1), %st
  796. faddp %st, %st(3)
  797. PADDING
  798. FLD 13 * SIZE(B)
  799. fmul %st(1), %st
  800. faddp %st, %st(4)
  801. PADDING
  802. FLD 14 * SIZE(B)
  803. fmul %st(1), %st
  804. faddp %st, %st(5)
  805. PADDING
  806. FMUL 15 * SIZE(B)
  807. faddp %st, %st(5)
  808. FLD 0 * SIZE(A)
  809. PADDING prefetch PREFETCHSIZE * SIZE(A)
  810. addl $8 * SIZE, A
  811. fxch %st(1)
  812. addl $32 * SIZE, B
  813. FLD -16 * SIZE(B)
  814. decl %eax
  815. jne .L15
  816. ALIGN_4
  817. .L16:
  818. #if defined(LT) || defined(RN)
  819. movl KK, %eax
  820. #else
  821. movl K, %eax
  822. subl KK, %eax
  823. #endif
  824. and $7, %eax
  825. je .L19
  826. ALIGN_4
  827. .L17:
  828. fmul %st(1), %st
  829. faddp %st, %st(3)
  830. FLD -15 * SIZE(B)
  831. fmul %st(1), %st
  832. faddp %st, %st(4)
  833. FLD -14 * SIZE(B)
  834. fmul %st(1), %st
  835. faddp %st, %st(5)
  836. FMUL -13 * SIZE(B)
  837. faddp %st, %st(5)
  838. FLD -15 * SIZE(A)
  839. FLD -12 * SIZE(B)
  840. addl $1 * SIZE,A
  841. addl $4 * SIZE,B
  842. decl %eax
  843. jne .L17
  844. ALIGN_4
  845. .L19:
  846. ffreep %st(0)
  847. ffreep %st(0)
  848. ffreep %st(0)
  849. #if defined(LN) || defined(RT)
  850. movl KK, %eax
  851. #ifdef LN
  852. subl $1, %eax
  853. #else
  854. subl $4, %eax
  855. #endif
  856. leal (, %eax, SIZE), %eax
  857. movl AORIG, A
  858. leal (A, %eax, 1), A
  859. leal (B_ORIG, %eax, 4), B
  860. #endif
  861. #if defined(LN) || defined(LT)
  862. FLD 0 * SIZE - 16 * SIZE(B)
  863. fsubp %st, %st(1)
  864. FLD 1 * SIZE - 16 * SIZE(B)
  865. fsubp %st, %st(2)
  866. FLD 2 * SIZE - 16 * SIZE(B)
  867. fsubp %st, %st(3)
  868. FLD 3 * SIZE - 16 * SIZE(B)
  869. fsubp %st, %st(4)
  870. #else
  871. FLD 0 * SIZE - 16 * SIZE(A)
  872. fsubp %st, %st(1)
  873. FLD 1 * SIZE - 16 * SIZE(A)
  874. fsubp %st, %st(2)
  875. FLD 2 * SIZE - 16 * SIZE(A)
  876. fsubp %st, %st(3)
  877. FLD 3 * SIZE - 16 * SIZE(A)
  878. fsubp %st, %st(4)
  879. #endif
  880. #ifdef LN
  881. FLD 0 * SIZE - 16 * SIZE(A)
  882. fmul %st, %st(1)
  883. fmul %st, %st(2)
  884. fmul %st, %st(3)
  885. fmulp %st, %st(4)
  886. #endif
  887. #ifdef LT
  888. FLD 0 * SIZE - 16 * SIZE(A)
  889. fmul %st, %st(1)
  890. fmul %st, %st(2)
  891. fmul %st, %st(3)
  892. fmulp %st, %st(4)
  893. #endif
  894. #ifdef RN
  895. FMUL 0 * SIZE - 16 * SIZE(B)
  896. FLD 1 * SIZE - 16 * SIZE(B)
  897. fmul %st(1), %st
  898. fsubrp %st, %st(2)
  899. FLD 2 * SIZE - 16 * SIZE(B)
  900. fmul %st(1), %st
  901. fsubrp %st, %st(3)
  902. FLD 3 * SIZE - 16 * SIZE(B)
  903. fmul %st(1), %st
  904. fsubrp %st, %st(4)
  905. FLD 5 * SIZE - 16 * SIZE(B)
  906. fmulp %st, %st(2)
  907. FLD 6 * SIZE - 16 * SIZE(B)
  908. fmul %st(2), %st
  909. fsubrp %st, %st(3)
  910. FLD 7 * SIZE - 16 * SIZE(B)
  911. fmul %st(2), %st
  912. fsubrp %st, %st(4)
  913. FLD 10 * SIZE - 16 * SIZE(B)
  914. fmulp %st, %st(3)
  915. FLD 11 * SIZE - 16 * SIZE(B)
  916. fmul %st(3), %st
  917. fsubrp %st, %st(4)
  918. FLD 15 * SIZE - 16 * SIZE(B)
  919. fmulp %st, %st(4)
  920. #endif
  921. #ifdef RT
  922. FLD 15 * SIZE - 16 * SIZE(B)
  923. fmulp %st, %st(4)
  924. FLD 14 * SIZE - 16 * SIZE(B)
  925. fmul %st(4), %st
  926. fsubrp %st, %st(3)
  927. FLD 13 * SIZE - 16 * SIZE(B)
  928. fmul %st(4), %st
  929. fsubrp %st, %st(2)
  930. FLD 12 * SIZE - 16 * SIZE(B)
  931. fmul %st(4), %st
  932. fsubrp %st, %st(1)
  933. FLD 10 * SIZE - 16 * SIZE(B)
  934. fmulp %st, %st(3)
  935. FLD 9 * SIZE - 16 * SIZE(B)
  936. fmul %st(3), %st
  937. fsubrp %st, %st(2)
  938. FLD 8 * SIZE - 16 * SIZE(B)
  939. fmul %st(3), %st
  940. fsubrp %st, %st(1)
  941. FLD 5 * SIZE - 16 * SIZE(B)
  942. fmulp %st, %st(2)
  943. FLD 4 * SIZE - 16 * SIZE(B)
  944. fmul %st(2), %st
  945. fsubrp %st, %st(1)
  946. FLD 0 * SIZE - 16 * SIZE(B)
  947. fmulp %st, %st(1)
  948. #endif
  949. #ifdef LN
  950. subl $1 * SIZE, %edi
  951. #endif
  952. #if defined(LN) || defined(LT)
  953. FSTU 0 * SIZE - 16 * SIZE(B)
  954. fxch %st(1)
  955. FSTU 1 * SIZE - 16 * SIZE(B)
  956. fxch %st(2)
  957. FSTU 2 * SIZE - 16 * SIZE(B)
  958. fxch %st(3)
  959. FSTU 3 * SIZE - 16 * SIZE(B)
  960. #else
  961. FSTU 0 * SIZE - 16 * SIZE(A)
  962. fxch %st(1)
  963. FSTU 1 * SIZE - 16 * SIZE(A)
  964. fxch %st(2)
  965. FSTU 2 * SIZE - 16 * SIZE(A)
  966. fxch %st(3)
  967. FSTU 3 * SIZE - 16 * SIZE(A)
  968. #endif
  969. leal (%edi, LDC, 2), %eax
  970. FST 0 * SIZE(%eax, LDC)
  971. FST 0 * SIZE(%edi)
  972. FST 0 * SIZE(%edi, LDC)
  973. FST 0 * SIZE(%eax)
  974. #ifndef LN
  975. addl $1 * SIZE, %edi
  976. #endif
  977. #if defined(LT) || defined(RN)
  978. movl K, %eax
  979. subl KK, %eax
  980. leal (,%eax, SIZE), %eax
  981. leal (A, %eax, 1), A
  982. leal (B, %eax, 4), B
  983. #endif
  984. #ifdef LN
  985. subl $1, KK
  986. #endif
  987. #ifdef LT
  988. addl $1, KK
  989. #endif
  990. #ifdef RT
  991. movl K, %eax
  992. sall $BASE_SHIFT, %eax
  993. addl %eax, AORIG
  994. #endif
  995. decl I
  996. jne .L14
  997. #ifdef LN
  998. movl K, %eax
  999. leal ( , %eax, SIZE), %eax
  1000. leal (B_ORIG, %eax, 4), B_ORIG
  1001. #endif
  1002. #if defined(LT) || defined(RN)
  1003. movl B, B_ORIG
  1004. #endif
  1005. #ifdef RN
  1006. addl $4, KK
  1007. #endif
  1008. #ifdef RT
  1009. subl $4, KK
  1010. #endif
  1011. decl J
  1012. jne .L11
  1013. ALIGN_4
  1014. .L999:
  1015. popl %ebx
  1016. popl %esi
  1017. popl %edi
  1018. popl %ebp
  1019. addl $ARGS, %esp
  1020. ret
  1021. EPILOGUE