You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

zscal_sse2.S 33 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745
  1. /*********************************************************************/
  2. /* Copyright 2009, 2010 The University of Texas at Austin. */
  3. /* All rights reserved. */
  4. /* */
  5. /* Redistribution and use in source and binary forms, with or */
  6. /* without modification, are permitted provided that the following */
  7. /* conditions are met: */
  8. /* */
  9. /* 1. Redistributions of source code must retain the above */
  10. /* copyright notice, this list of conditions and the following */
  11. /* disclaimer. */
  12. /* */
  13. /* 2. Redistributions in binary form must reproduce the above */
  14. /* copyright notice, this list of conditions and the following */
  15. /* disclaimer in the documentation and/or other materials */
  16. /* provided with the distribution. */
  17. /* */
  18. /* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
  19. /* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
  20. /* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
  21. /* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
  22. /* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
  23. /* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
  24. /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
  25. /* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
  26. /* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
  27. /* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
  28. /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
  29. /* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
  30. /* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
  31. /* POSSIBILITY OF SUCH DAMAGE. */
  32. /* */
  33. /* The views and conclusions contained in the software and */
  34. /* documentation are those of the authors and should not be */
  35. /* interpreted as representing official policies, either expressed */
  36. /* or implied, of The University of Texas at Austin. */
  37. /*********************************************************************/
  38. #define ASSEMBLER
  39. #include "common.h"
  40. #define STACK 16
  41. #define ARGS 0
  42. #define STACK_M 4 + STACK + ARGS(%esp)
  43. #define STACK_ALPHA_R 16 + STACK + ARGS(%esp)
  44. #define STACK_ALPHA_I 24 + STACK + ARGS(%esp)
  45. #define STACK_X 32 + STACK + ARGS(%esp)
  46. #define STACK_INCX 36 + STACK + ARGS(%esp)
  47. #define M %ebx
  48. #define X %ecx
  49. #define INCX %edx
  50. #define I %esi
  51. #define XX %edi
  52. #define FLAG %ebp
  53. #include "l1param.h"
  54. #if defined(NEHALEM) || defined(PENRYN) || defined(DUNNINGTON) || defined(SANDYBRIDGE)
  55. #define USE_PSHUFD
  56. #else
  57. #define USE_PSHUFD_HALF
  58. #endif
  59. #define xmm8 xmm0
  60. #define xmm9 xmm1
  61. #define xmm10 xmm2
  62. #define xmm11 xmm3
  63. #define xmm12 xmm4
  64. #define xmm13 xmm5
  65. #define xmm14 xmm6
  66. #define xmm15 xmm7
  67. PROLOGUE
  68. PROFCODE
  69. pushl %edi
  70. pushl %esi
  71. pushl %ebx
  72. pushl %ebp
  73. movl STACK_M, M
  74. movl STACK_X, X
  75. movl STACK_INCX, INCX
  76. movsd STACK_ALPHA_R, %xmm0
  77. movsd STACK_ALPHA_I, %xmm1
  78. sall $ZBASE_SHIFT, INCX
  79. xor FLAG, FLAG
  80. testl M, M
  81. jle .L999
  82. xorps %xmm7, %xmm7
  83. comisd %xmm0, %xmm7
  84. jne .L100
  85. comisd %xmm1, %xmm7
  86. jne .L100
  87. /* Alpha == ZERO */
  88. cmpl $2 * SIZE, INCX
  89. jne .L20
  90. /* INCX == 1 */
  91. testl $SIZE, X
  92. je .L05
  93. movsd %xmm7, 0 * SIZE(X)
  94. addl $SIZE, X
  95. movl $1, FLAG
  96. decl M
  97. jle .L19
  98. ALIGN_3
  99. .L05:
  100. movl M, I # rcx = n
  101. sarl $3, I
  102. jle .L12
  103. ALIGN_4
  104. .L11:
  105. #ifdef PREFETCHW
  106. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(X)
  107. #endif
  108. movaps %xmm7, 0 * SIZE(X)
  109. movaps %xmm7, 2 * SIZE(X)
  110. movaps %xmm7, 4 * SIZE(X)
  111. movaps %xmm7, 6 * SIZE(X)
  112. #ifdef PREFETCHW
  113. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(X)
  114. #endif
  115. movaps %xmm7, 8 * SIZE(X)
  116. movaps %xmm7, 10 * SIZE(X)
  117. movaps %xmm7, 12 * SIZE(X)
  118. movaps %xmm7, 14 * SIZE(X)
  119. addl $16 * SIZE, X
  120. decl I
  121. jg .L11
  122. ALIGN_4
  123. .L12:
  124. testl $4, M
  125. je .L13
  126. movaps %xmm7, 0 * SIZE(X)
  127. movaps %xmm7, 2 * SIZE(X)
  128. movaps %xmm7, 4 * SIZE(X)
  129. movaps %xmm7, 6 * SIZE(X)
  130. addl $8 * SIZE, X
  131. ALIGN_3
  132. .L13:
  133. testl $2, M
  134. je .L14
  135. movaps %xmm7, 0 * SIZE(X)
  136. movaps %xmm7, 2 * SIZE(X)
  137. addl $4 * SIZE, X
  138. ALIGN_3
  139. .L14:
  140. testl $1, M
  141. je .L19
  142. movaps %xmm7, 0 * SIZE(X)
  143. addl $2 * SIZE, X
  144. ALIGN_3
  145. .L19:
  146. testl $1, FLAG
  147. je .L999
  148. movsd %xmm7, 0 * SIZE(X)
  149. jmp .L999
  150. ALIGN_4
  151. /* incx != 1 */
  152. .L20:
  153. testl $SIZE, X
  154. jne .L30
  155. /* Aligned Mode */
  156. movl M, I # rcx = n
  157. sarl $2, I
  158. jle .L22
  159. ALIGN_4
  160. .L21:
  161. #ifdef PREFETCHW
  162. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(X)
  163. #endif
  164. movaps %xmm7, (X)
  165. addl INCX, X
  166. movaps %xmm7, (X)
  167. addl INCX, X
  168. movaps %xmm7, (X)
  169. addl INCX, X
  170. movaps %xmm7, (X)
  171. addl INCX, X
  172. decl I
  173. jg .L21
  174. ALIGN_4
  175. .L22:
  176. testl $3, M
  177. je .L999
  178. testl $2, M
  179. je .L23
  180. movaps %xmm7, (X)
  181. addl INCX, X
  182. movaps %xmm7, (X)
  183. addl INCX, X
  184. ALIGN_3
  185. .L23:
  186. testl $1, M
  187. je .L999
  188. movaps %xmm7, (X)
  189. jmp .L999
  190. ALIGN_4
  191. /* Unaligned Mode */
  192. .L30:
  193. movl M, I # rcx = n
  194. sarl $2, I
  195. jle .L32
  196. ALIGN_4
  197. .L31:
  198. #ifdef PREFETCHW
  199. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(X)
  200. #endif
  201. movlps %xmm7, 0 * SIZE(X)
  202. movlps %xmm7, 1 * SIZE(X)
  203. addl INCX, X
  204. movlps %xmm7, 0 * SIZE(X)
  205. movlps %xmm7, 1 * SIZE(X)
  206. addl INCX, X
  207. movlps %xmm7, 0 * SIZE(X)
  208. movlps %xmm7, 1 * SIZE(X)
  209. addl INCX, X
  210. movlps %xmm7, 0 * SIZE(X)
  211. movlps %xmm7, 1 * SIZE(X)
  212. addl INCX, X
  213. decl I
  214. jg .L31
  215. ALIGN_4
  216. .L32:
  217. testl $3, M
  218. je .L999
  219. testl $2, M
  220. je .L33
  221. movlps %xmm7, 0 * SIZE(X)
  222. movlps %xmm7, 1 * SIZE(X)
  223. addl INCX, X
  224. movlps %xmm7, 0 * SIZE(X)
  225. movlps %xmm7, 1 * SIZE(X)
  226. addl INCX, X
  227. ALIGN_3
  228. .L33:
  229. testl $1, M
  230. je .L999
  231. movlps %xmm7, 0 * SIZE(X)
  232. movlps %xmm7, 1 * SIZE(X)
  233. jmp .L999
  234. ALIGN_4
  235. /* Alpha != ZERO */
  236. .L100:
  237. testl $SIZE, X
  238. jne .L200
  239. #ifdef HAVE_SSE3
  240. movddup %xmm0, %xmm6
  241. #else
  242. pshufd $0x44, %xmm0, %xmm6
  243. #endif
  244. xorps %xmm7, %xmm7
  245. subsd %xmm1, %xmm7
  246. movlhps %xmm1, %xmm7
  247. cmpl $2 * SIZE, INCX
  248. jne .L120
  249. subl $-16 * SIZE, X
  250. movl M, I
  251. sarl $3, I
  252. jle .L115
  253. movaps -16 * SIZE(X), %xmm0
  254. movaps -14 * SIZE(X), %xmm1
  255. movaps -12 * SIZE(X), %xmm2
  256. movaps -10 * SIZE(X), %xmm3
  257. decl I
  258. jle .L112
  259. ALIGN_4
  260. .L111:
  261. #ifdef PREFETCHW
  262. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(X)
  263. #endif
  264. #if defined(USE_PSHUFD) || defined(USE_PSHUFD_HALF)
  265. pshufd $0x4e, %xmm0, %xmm5
  266. #else
  267. movsd -15 * SIZE(X), %xmm5
  268. movhps -16 * SIZE(X), %xmm5
  269. #endif
  270. mulpd %xmm6, %xmm0
  271. mulpd %xmm7, %xmm5
  272. addpd %xmm5, %xmm0
  273. movaps %xmm0, -16 * SIZE(X)
  274. movaps -8 * SIZE(X), %xmm0
  275. #ifdef USE_PSHUFD
  276. pshufd $0x4e, %xmm1, %xmm5
  277. #else
  278. movsd -13 * SIZE(X), %xmm5
  279. movhps -14 * SIZE(X), %xmm5
  280. #endif
  281. mulpd %xmm6, %xmm1
  282. mulpd %xmm7, %xmm5
  283. addpd %xmm5, %xmm1
  284. movaps %xmm1, -14 * SIZE(X)
  285. movaps -6 * SIZE(X), %xmm1
  286. #if defined(USE_PSHUFD) || defined(USE_PSHUFD_HALF)
  287. pshufd $0x4e, %xmm2, %xmm5
  288. #else
  289. movsd -11 * SIZE(X), %xmm5
  290. movhps -12 * SIZE(X), %xmm5
  291. #endif
  292. mulpd %xmm6, %xmm2
  293. mulpd %xmm7, %xmm5
  294. addpd %xmm5, %xmm2
  295. movaps %xmm2, -12 * SIZE(X)
  296. movaps -4 * SIZE(X), %xmm2
  297. #ifdef USE_PSHUFD
  298. pshufd $0x4e, %xmm3, %xmm5
  299. #else
  300. movsd -9 * SIZE(X), %xmm5
  301. movhps -10 * SIZE(X), %xmm5
  302. #endif
  303. mulpd %xmm6, %xmm3
  304. mulpd %xmm7, %xmm5
  305. addpd %xmm5, %xmm3
  306. movaps %xmm3, -10 * SIZE(X)
  307. movaps -2 * SIZE(X), %xmm3
  308. #ifdef PREFETCHW
  309. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(X)
  310. #endif
  311. #if defined(USE_PSHUFD) || defined(USE_PSHUFD_HALF)
  312. pshufd $0x4e, %xmm0, %xmm5
  313. #else
  314. movsd -7 * SIZE(X), %xmm5
  315. movhps -8 * SIZE(X), %xmm5
  316. #endif
  317. mulpd %xmm6, %xmm0
  318. mulpd %xmm7, %xmm5
  319. addpd %xmm5, %xmm0
  320. movaps %xmm0, -8 * SIZE(X)
  321. movaps 0 * SIZE(X), %xmm0
  322. #ifdef USE_PSHUFD
  323. pshufd $0x4e, %xmm1, %xmm5
  324. #else
  325. movsd -5 * SIZE(X), %xmm5
  326. movhps -6 * SIZE(X), %xmm5
  327. #endif
  328. mulpd %xmm6, %xmm1
  329. mulpd %xmm7, %xmm5
  330. addpd %xmm5, %xmm1
  331. movaps %xmm1, -6 * SIZE(X)
  332. movaps 2 * SIZE(X), %xmm1
  333. #if defined(USE_PSHUFD) || defined(USE_PSHUFD_HALF)
  334. pshufd $0x4e, %xmm2, %xmm5
  335. #else
  336. movsd -3 * SIZE(X), %xmm5
  337. movhps -4 * SIZE(X), %xmm5
  338. #endif
  339. mulpd %xmm6, %xmm2
  340. mulpd %xmm7, %xmm5
  341. addpd %xmm5, %xmm2
  342. movaps %xmm2, -4 * SIZE(X)
  343. movaps 4 * SIZE(X), %xmm2
  344. #ifdef USE_PSHUFD
  345. pshufd $0x4e, %xmm3, %xmm5
  346. #else
  347. movsd -1 * SIZE(X), %xmm5
  348. movhps -2 * SIZE(X), %xmm5
  349. #endif
  350. mulpd %xmm6, %xmm3
  351. mulpd %xmm7, %xmm5
  352. addpd %xmm5, %xmm3
  353. movaps %xmm3, -2 * SIZE(X)
  354. movaps 6 * SIZE(X), %xmm3
  355. subl $-16 * SIZE, X
  356. decl I
  357. jg .L111
  358. ALIGN_4
  359. .L112:
  360. #if defined(USE_PSHUFD) || defined(USE_PSHUFD_HALF)
  361. pshufd $0x4e, %xmm0, %xmm5
  362. #else
  363. movsd -15 * SIZE(X), %xmm5
  364. movhps -16 * SIZE(X), %xmm5
  365. #endif
  366. mulpd %xmm6, %xmm0
  367. mulpd %xmm7, %xmm5
  368. addpd %xmm5, %xmm0
  369. movaps %xmm0, -16 * SIZE(X)
  370. movaps -8 * SIZE(X), %xmm0
  371. #ifdef USE_PSHUFD
  372. pshufd $0x4e, %xmm1, %xmm5
  373. #else
  374. movsd -13 * SIZE(X), %xmm5
  375. movhps -14 * SIZE(X), %xmm5
  376. #endif
  377. mulpd %xmm6, %xmm1
  378. mulpd %xmm7, %xmm5
  379. addpd %xmm5, %xmm1
  380. movaps %xmm1, -14 * SIZE(X)
  381. movaps -6 * SIZE(X), %xmm1
  382. #if defined(USE_PSHUFD) || defined(USE_PSHUFD_HALF)
  383. pshufd $0x4e, %xmm2, %xmm5
  384. #else
  385. movsd -11 * SIZE(X), %xmm5
  386. movhps -12 * SIZE(X), %xmm5
  387. #endif
  388. mulpd %xmm6, %xmm2
  389. mulpd %xmm7, %xmm5
  390. addpd %xmm5, %xmm2
  391. movaps %xmm2, -12 * SIZE(X)
  392. movaps -4 * SIZE(X), %xmm2
  393. #ifdef USE_PSHUFD
  394. pshufd $0x4e, %xmm3, %xmm5
  395. #else
  396. movsd -9 * SIZE(X), %xmm5
  397. movhps -10 * SIZE(X), %xmm5
  398. #endif
  399. mulpd %xmm6, %xmm3
  400. mulpd %xmm7, %xmm5
  401. addpd %xmm5, %xmm3
  402. movaps %xmm3, -10 * SIZE(X)
  403. movaps -2 * SIZE(X), %xmm3
  404. #if defined(USE_PSHUFD) || defined(USE_PSHUFD_HALF)
  405. pshufd $0x4e, %xmm0, %xmm5
  406. #else
  407. movsd -7 * SIZE(X), %xmm5
  408. movhps -8 * SIZE(X), %xmm5
  409. #endif
  410. mulpd %xmm6, %xmm0
  411. mulpd %xmm7, %xmm5
  412. addpd %xmm5, %xmm0
  413. movaps %xmm0, -8 * SIZE(X)
  414. #ifdef USE_PSHUFD
  415. pshufd $0x4e, %xmm1, %xmm5
  416. #else
  417. movsd -5 * SIZE(X), %xmm5
  418. movhps -6 * SIZE(X), %xmm5
  419. #endif
  420. mulpd %xmm6, %xmm1
  421. mulpd %xmm7, %xmm5
  422. addpd %xmm5, %xmm1
  423. movaps %xmm1, -6 * SIZE(X)
  424. #if defined(USE_PSHUFD) || defined(USE_PSHUFD_HALF)
  425. pshufd $0x4e, %xmm2, %xmm5
  426. #else
  427. movsd -3 * SIZE(X), %xmm5
  428. movhps -4 * SIZE(X), %xmm5
  429. #endif
  430. mulpd %xmm6, %xmm2
  431. mulpd %xmm7, %xmm5
  432. addpd %xmm5, %xmm2
  433. movaps %xmm2, -4 * SIZE(X)
  434. #ifdef USE_PSHUFD
  435. pshufd $0x4e, %xmm3, %xmm5
  436. #else
  437. movsd -1 * SIZE(X), %xmm5
  438. movhps -2 * SIZE(X), %xmm5
  439. #endif
  440. mulpd %xmm6, %xmm3
  441. mulpd %xmm7, %xmm5
  442. addpd %xmm5, %xmm3
  443. movaps %xmm3, -2 * SIZE(X)
  444. subl $-16 * SIZE, X
  445. ALIGN_3
  446. .L115:
  447. testl $7, M
  448. je .L999
  449. testl $4, M
  450. je .L116
  451. movaps -16 * SIZE(X), %xmm0
  452. movaps -14 * SIZE(X), %xmm1
  453. pshufd $0x4e, %xmm0, %xmm5
  454. mulpd %xmm6, %xmm0
  455. mulpd %xmm7, %xmm5
  456. addpd %xmm5, %xmm0
  457. movaps %xmm0, -16 * SIZE(X)
  458. pshufd $0x4e, %xmm1, %xmm5
  459. mulpd %xmm6, %xmm1
  460. mulpd %xmm7, %xmm5
  461. addpd %xmm5, %xmm1
  462. movaps %xmm1, -14 * SIZE(X)
  463. movaps -12 * SIZE(X), %xmm2
  464. movaps -10 * SIZE(X), %xmm3
  465. pshufd $0x4e, %xmm2, %xmm5
  466. mulpd %xmm6, %xmm2
  467. mulpd %xmm7, %xmm5
  468. addpd %xmm5, %xmm2
  469. movaps %xmm2, -12 * SIZE(X)
  470. pshufd $0x4e, %xmm3, %xmm5
  471. mulpd %xmm6, %xmm3
  472. mulpd %xmm7, %xmm5
  473. addpd %xmm5, %xmm3
  474. movaps %xmm3, -10 * SIZE(X)
  475. addl $8 * SIZE, X
  476. ALIGN_3
  477. .L116:
  478. testl $2, M
  479. je .L117
  480. movaps -16 * SIZE(X), %xmm0
  481. movaps -14 * SIZE(X), %xmm1
  482. pshufd $0x4e, %xmm0, %xmm5
  483. mulpd %xmm6, %xmm0
  484. mulpd %xmm7, %xmm5
  485. addpd %xmm5, %xmm0
  486. movaps %xmm0, -16 * SIZE(X)
  487. pshufd $0x4e, %xmm1, %xmm5
  488. mulpd %xmm6, %xmm1
  489. mulpd %xmm7, %xmm5
  490. addpd %xmm5, %xmm1
  491. movaps %xmm1, -14 * SIZE(X)
  492. addl $4 * SIZE, X
  493. ALIGN_3
  494. .L117:
  495. testl $1, M
  496. je .L999
  497. movaps -16 * SIZE(X), %xmm0
  498. pshufd $0x4e, %xmm0, %xmm5
  499. mulpd %xmm6, %xmm0
  500. mulpd %xmm7, %xmm5
  501. addpd %xmm5, %xmm0
  502. movaps %xmm0, -16 * SIZE(X)
  503. jmp .L999
  504. ALIGN_3
  505. .L120:
  506. movl X, XX
  507. movl M, I
  508. sarl $3, I
  509. jle .L125
  510. movaps (X), %xmm0
  511. addl INCX, X
  512. movaps (X), %xmm1
  513. addl INCX, X
  514. movaps (X), %xmm2
  515. addl INCX, X
  516. movaps (X), %xmm3
  517. addl INCX, X
  518. decl I
  519. jle .L122
  520. ALIGN_4
  521. .L121:
  522. #ifdef PREFETCHW
  523. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(X)
  524. #endif
  525. pshufd $0x4e, %xmm0, %xmm5
  526. mulpd %xmm6, %xmm0
  527. mulpd %xmm7, %xmm5
  528. addpd %xmm5, %xmm0
  529. movaps %xmm0, (XX)
  530. addl INCX, XX
  531. movaps (X), %xmm0
  532. addl INCX, X
  533. pshufd $0x4e, %xmm1, %xmm5
  534. mulpd %xmm6, %xmm1
  535. mulpd %xmm7, %xmm5
  536. addpd %xmm5, %xmm1
  537. movaps %xmm1, (XX)
  538. addl INCX, XX
  539. movaps (X), %xmm1
  540. addl INCX, X
  541. pshufd $0x4e, %xmm2, %xmm5
  542. mulpd %xmm6, %xmm2
  543. mulpd %xmm7, %xmm5
  544. addpd %xmm5, %xmm2
  545. movaps %xmm2, (XX)
  546. addl INCX, XX
  547. movaps (X), %xmm2
  548. addl INCX, X
  549. pshufd $0x4e, %xmm3, %xmm5
  550. mulpd %xmm6, %xmm3
  551. mulpd %xmm7, %xmm5
  552. addpd %xmm5, %xmm3
  553. movaps %xmm3, (XX)
  554. addl INCX, XX
  555. movaps (X), %xmm3
  556. addl INCX, X
  557. #ifdef PREFETCHW
  558. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(X)
  559. #endif
  560. pshufd $0x4e, %xmm0, %xmm5
  561. mulpd %xmm6, %xmm0
  562. mulpd %xmm7, %xmm5
  563. addpd %xmm5, %xmm0
  564. movaps %xmm0, (XX)
  565. addl INCX, XX
  566. movaps (X), %xmm0
  567. addl INCX, X
  568. pshufd $0x4e, %xmm1, %xmm5
  569. mulpd %xmm6, %xmm1
  570. mulpd %xmm7, %xmm5
  571. addpd %xmm5, %xmm1
  572. movaps %xmm1, (XX)
  573. addl INCX, XX
  574. movaps (X), %xmm1
  575. addl INCX, X
  576. pshufd $0x4e, %xmm2, %xmm5
  577. mulpd %xmm6, %xmm2
  578. mulpd %xmm7, %xmm5
  579. addpd %xmm5, %xmm2
  580. movaps %xmm2, (XX)
  581. addl INCX, XX
  582. movaps (X), %xmm2
  583. addl INCX, X
  584. pshufd $0x4e, %xmm3, %xmm5
  585. mulpd %xmm6, %xmm3
  586. mulpd %xmm7, %xmm5
  587. addpd %xmm5, %xmm3
  588. movaps %xmm3, (XX)
  589. addl INCX, XX
  590. movaps (X), %xmm3
  591. addl INCX, X
  592. decl I
  593. jg .L121
  594. ALIGN_4
  595. .L122:
  596. pshufd $0x4e, %xmm0, %xmm5
  597. mulpd %xmm6, %xmm0
  598. mulpd %xmm7, %xmm5
  599. addpd %xmm5, %xmm0
  600. movaps %xmm0, (XX)
  601. addl INCX, XX
  602. movaps (X), %xmm0
  603. addl INCX, X
  604. pshufd $0x4e, %xmm1, %xmm5
  605. mulpd %xmm6, %xmm1
  606. mulpd %xmm7, %xmm5
  607. addpd %xmm5, %xmm1
  608. movaps %xmm1, (XX)
  609. addl INCX, XX
  610. movaps (X), %xmm1
  611. addl INCX, X
  612. pshufd $0x4e, %xmm2, %xmm5
  613. mulpd %xmm6, %xmm2
  614. mulpd %xmm7, %xmm5
  615. addpd %xmm5, %xmm2
  616. movaps %xmm2, (XX)
  617. addl INCX, XX
  618. movaps (X), %xmm2
  619. addl INCX, X
  620. pshufd $0x4e, %xmm3, %xmm5
  621. mulpd %xmm6, %xmm3
  622. mulpd %xmm7, %xmm5
  623. addpd %xmm5, %xmm3
  624. movaps %xmm3, (XX)
  625. addl INCX, XX
  626. movaps (X), %xmm3
  627. addl INCX, X
  628. pshufd $0x4e, %xmm0, %xmm5
  629. mulpd %xmm6, %xmm0
  630. mulpd %xmm7, %xmm5
  631. addpd %xmm5, %xmm0
  632. movaps %xmm0, (XX)
  633. addl INCX, XX
  634. pshufd $0x4e, %xmm1, %xmm5
  635. mulpd %xmm6, %xmm1
  636. mulpd %xmm7, %xmm5
  637. addpd %xmm5, %xmm1
  638. movaps %xmm1, (XX)
  639. addl INCX, XX
  640. pshufd $0x4e, %xmm2, %xmm5
  641. mulpd %xmm6, %xmm2
  642. mulpd %xmm7, %xmm5
  643. addpd %xmm5, %xmm2
  644. movaps %xmm2, (XX)
  645. addl INCX, XX
  646. pshufd $0x4e, %xmm3, %xmm5
  647. mulpd %xmm6, %xmm3
  648. mulpd %xmm7, %xmm5
  649. addpd %xmm5, %xmm3
  650. movaps %xmm3, (XX)
  651. addl INCX, XX
  652. ALIGN_3
  653. .L125:
  654. testl $7, M
  655. je .L999
  656. testl $4, M
  657. je .L126
  658. movaps (X), %xmm0
  659. addl INCX, X
  660. movaps (X), %xmm1
  661. addl INCX, X
  662. movaps (X), %xmm2
  663. addl INCX, X
  664. movaps (X), %xmm3
  665. addl INCX, X
  666. pshufd $0x4e, %xmm0, %xmm5
  667. mulpd %xmm6, %xmm0
  668. mulpd %xmm7, %xmm5
  669. addpd %xmm5, %xmm0
  670. movaps %xmm0, (XX)
  671. addl INCX, XX
  672. pshufd $0x4e, %xmm1, %xmm5
  673. mulpd %xmm6, %xmm1
  674. mulpd %xmm7, %xmm5
  675. addpd %xmm5, %xmm1
  676. movaps %xmm1, (XX)
  677. addl INCX, XX
  678. pshufd $0x4e, %xmm2, %xmm5
  679. mulpd %xmm6, %xmm2
  680. mulpd %xmm7, %xmm5
  681. addpd %xmm5, %xmm2
  682. movaps %xmm2, (XX)
  683. addl INCX, XX
  684. pshufd $0x4e, %xmm3, %xmm5
  685. mulpd %xmm6, %xmm3
  686. mulpd %xmm7, %xmm5
  687. addpd %xmm5, %xmm3
  688. movaps %xmm3, (XX)
  689. addl INCX, XX
  690. ALIGN_3
  691. .L126:
  692. testl $2, M
  693. je .L127
  694. movaps (X), %xmm0
  695. addl INCX, X
  696. movaps (X), %xmm1
  697. addl INCX, X
  698. pshufd $0x4e, %xmm0, %xmm5
  699. mulpd %xmm6, %xmm0
  700. mulpd %xmm7, %xmm5
  701. addpd %xmm5, %xmm0
  702. movaps %xmm0, (XX)
  703. addl INCX, XX
  704. pshufd $0x4e, %xmm1, %xmm5
  705. mulpd %xmm6, %xmm1
  706. mulpd %xmm7, %xmm5
  707. addpd %xmm5, %xmm1
  708. movaps %xmm1, (XX)
  709. addl INCX, XX
  710. ALIGN_3
  711. .L127:
  712. testl $1, M
  713. je .L999
  714. movaps (X), %xmm0
  715. pshufd $0x4e, %xmm0, %xmm5
  716. mulpd %xmm6, %xmm0
  717. mulpd %xmm7, %xmm5
  718. addpd %xmm5, %xmm0
  719. movaps %xmm0, (XX)
  720. jmp .L999
  721. ALIGN_3
  722. .L200:
  723. cmpl $2 * SIZE, INCX
  724. jne .L220
  725. #if defined(ALIGNED_ACCESS) && !defined(NEHALEM) && !defined(SANDYBRIDGE)
  726. #ifdef HAVE_SSE3
  727. movddup %xmm0, %xmm6
  728. #else
  729. pshufd $0x44, %xmm0, %xmm6
  730. #endif
  731. pxor %xmm7, %xmm7
  732. subsd %xmm1, %xmm7
  733. movlhps %xmm1, %xmm7
  734. shufpd $1, %xmm7, %xmm7
  735. movhps 0 * SIZE(X), %xmm0
  736. movaps 1 * SIZE(X), %xmm1
  737. subl $-16 * SIZE, X
  738. unpckhpd %xmm0, %xmm0
  739. mulsd %xmm6, %xmm0
  740. movaps %xmm1, %xmm5
  741. mulsd %xmm7, %xmm5
  742. subsd %xmm5, %xmm0
  743. movlps %xmm0, -16 * SIZE(X)
  744. decl M
  745. movl M, I
  746. sarl $3, I
  747. jle .L205
  748. movaps -13 * SIZE(X), %xmm2
  749. movaps -11 * SIZE(X), %xmm3
  750. decl I
  751. jle .L202
  752. ALIGN_4
  753. .L201:
  754. #ifdef PREFETCHW
  755. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(X)
  756. #endif
  757. movaps %xmm1, %xmm5
  758. SHUFPD_1 %xmm2, %xmm0
  759. mulpd %xmm6, %xmm5
  760. mulpd %xmm7, %xmm0
  761. addpd %xmm5, %xmm0
  762. movaps %xmm0, -15 * SIZE(X)
  763. movaps -9 * SIZE(X), %xmm0
  764. movaps %xmm2, %xmm5
  765. SHUFPD_1 %xmm3, %xmm1
  766. mulpd %xmm6, %xmm5
  767. mulpd %xmm7, %xmm1
  768. addpd %xmm5, %xmm1
  769. movaps %xmm1, -13 * SIZE(X)
  770. movaps -7 * SIZE(X), %xmm1
  771. movaps %xmm3, %xmm5
  772. SHUFPD_1 %xmm0, %xmm2
  773. mulpd %xmm6, %xmm5
  774. mulpd %xmm7, %xmm2
  775. addpd %xmm5, %xmm2
  776. movaps %xmm2, -11 * SIZE(X)
  777. movaps -5 * SIZE(X), %xmm2
  778. movaps %xmm0, %xmm5
  779. SHUFPD_1 %xmm1, %xmm3
  780. mulpd %xmm6, %xmm5
  781. mulpd %xmm7, %xmm3
  782. addpd %xmm5, %xmm3
  783. movaps %xmm3, -9 * SIZE(X)
  784. movaps -3 * SIZE(X), %xmm3
  785. #ifdef PREFETCHW
  786. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(X)
  787. #endif
  788. movaps %xmm1, %xmm5
  789. SHUFPD_1 %xmm2, %xmm0
  790. mulpd %xmm6, %xmm5
  791. mulpd %xmm7, %xmm0
  792. addpd %xmm5, %xmm0
  793. movaps %xmm0, -7 * SIZE(X)
  794. movaps -1 * SIZE(X), %xmm0
  795. movaps %xmm2, %xmm5
  796. SHUFPD_1 %xmm3, %xmm1
  797. mulpd %xmm6, %xmm5
  798. mulpd %xmm7, %xmm1
  799. addpd %xmm5, %xmm1
  800. movaps %xmm1, -5 * SIZE(X)
  801. movaps 1 * SIZE(X), %xmm1
  802. movaps %xmm3, %xmm5
  803. SHUFPD_1 %xmm0, %xmm2
  804. mulpd %xmm6, %xmm5
  805. mulpd %xmm7, %xmm2
  806. addpd %xmm5, %xmm2
  807. movaps %xmm2, -3 * SIZE(X)
  808. movaps 3 * SIZE(X), %xmm2
  809. movaps %xmm0, %xmm5
  810. SHUFPD_1 %xmm1, %xmm3
  811. mulpd %xmm6, %xmm5
  812. mulpd %xmm7, %xmm3
  813. addpd %xmm5, %xmm3
  814. movaps %xmm3, -1 * SIZE(X)
  815. movaps 5 * SIZE(X), %xmm3
  816. subl $-16 * SIZE, X
  817. decl I
  818. jg .L201
  819. ALIGN_4
  820. .L202:
  821. movaps %xmm1, %xmm5
  822. SHUFPD_1 %xmm2, %xmm0
  823. mulpd %xmm6, %xmm5
  824. mulpd %xmm7, %xmm0
  825. addpd %xmm5, %xmm0
  826. movaps %xmm0, -15 * SIZE(X)
  827. movaps -9 * SIZE(X), %xmm0
  828. movaps %xmm2, %xmm5
  829. SHUFPD_1 %xmm3, %xmm1
  830. mulpd %xmm6, %xmm5
  831. mulpd %xmm7, %xmm1
  832. addpd %xmm5, %xmm1
  833. movaps %xmm1, -13 * SIZE(X)
  834. movaps -7 * SIZE(X), %xmm1
  835. movaps %xmm3, %xmm5
  836. SHUFPD_1 %xmm0, %xmm2
  837. mulpd %xmm6, %xmm5
  838. mulpd %xmm7, %xmm2
  839. addpd %xmm5, %xmm2
  840. movaps %xmm2, -11 * SIZE(X)
  841. movaps -5 * SIZE(X), %xmm2
  842. movaps %xmm0, %xmm5
  843. SHUFPD_1 %xmm1, %xmm3
  844. mulpd %xmm6, %xmm5
  845. mulpd %xmm7, %xmm3
  846. addpd %xmm5, %xmm3
  847. movaps %xmm3, -9 * SIZE(X)
  848. movaps -3 * SIZE(X), %xmm3
  849. movaps %xmm1, %xmm5
  850. SHUFPD_1 %xmm2, %xmm0
  851. mulpd %xmm6, %xmm5
  852. mulpd %xmm7, %xmm0
  853. addpd %xmm5, %xmm0
  854. movaps %xmm0, -7 * SIZE(X)
  855. movaps -1 * SIZE(X), %xmm0
  856. movaps %xmm2, %xmm5
  857. SHUFPD_1 %xmm3, %xmm1
  858. mulpd %xmm6, %xmm5
  859. mulpd %xmm7, %xmm1
  860. addpd %xmm5, %xmm1
  861. movaps %xmm1, -5 * SIZE(X)
  862. movaps 1 * SIZE(X), %xmm1
  863. movaps %xmm3, %xmm5
  864. SHUFPD_1 %xmm0, %xmm2
  865. mulpd %xmm6, %xmm5
  866. mulpd %xmm7, %xmm2
  867. addpd %xmm5, %xmm2
  868. movaps %xmm2, -3 * SIZE(X)
  869. movaps %xmm0, %xmm5
  870. SHUFPD_1 %xmm1, %xmm3
  871. mulpd %xmm6, %xmm5
  872. mulpd %xmm7, %xmm3
  873. addpd %xmm5, %xmm3
  874. movaps %xmm3, -1 * SIZE(X)
  875. subl $-16 * SIZE, X
  876. ALIGN_3
  877. .L205:
  878. testl $4, M
  879. je .L206
  880. movaps -13 * SIZE(X), %xmm2
  881. movaps %xmm1, %xmm5
  882. SHUFPD_1 %xmm2, %xmm0
  883. mulpd %xmm6, %xmm5
  884. mulpd %xmm7, %xmm0
  885. addpd %xmm5, %xmm0
  886. movaps %xmm0, -15 * SIZE(X)
  887. movaps -11 * SIZE(X), %xmm3
  888. movaps %xmm2, %xmm5
  889. SHUFPD_1 %xmm3, %xmm1
  890. mulpd %xmm6, %xmm5
  891. mulpd %xmm7, %xmm1
  892. addpd %xmm5, %xmm1
  893. movaps %xmm1, -13 * SIZE(X)
  894. movaps -9 * SIZE(X), %xmm0
  895. movaps %xmm3, %xmm5
  896. SHUFPD_1 %xmm0, %xmm2
  897. mulpd %xmm6, %xmm5
  898. mulpd %xmm7, %xmm2
  899. addpd %xmm5, %xmm2
  900. movaps %xmm2, -11 * SIZE(X)
  901. movaps -7 * SIZE(X), %xmm1
  902. movaps %xmm0, %xmm5
  903. SHUFPD_1 %xmm1, %xmm3
  904. mulpd %xmm6, %xmm5
  905. mulpd %xmm7, %xmm3
  906. addpd %xmm5, %xmm3
  907. movaps %xmm3, -9 * SIZE(X)
  908. addl $8 * SIZE, X
  909. ALIGN_3
  910. .L206:
  911. testl $2, M
  912. je .L207
  913. movaps -13 * SIZE(X), %xmm2
  914. movaps %xmm1, %xmm5
  915. SHUFPD_1 %xmm2, %xmm0
  916. mulpd %xmm6, %xmm5
  917. mulpd %xmm7, %xmm0
  918. addpd %xmm5, %xmm0
  919. movaps %xmm0, -15 * SIZE(X)
  920. movaps -11 * SIZE(X), %xmm3
  921. movaps %xmm2, %xmm5
  922. SHUFPD_1 %xmm3, %xmm1
  923. mulpd %xmm6, %xmm5
  924. mulpd %xmm7, %xmm1
  925. addpd %xmm5, %xmm1
  926. movaps %xmm1, -13 * SIZE(X)
  927. movaps %xmm2, %xmm0
  928. movaps %xmm3, %xmm1
  929. addl $4 * SIZE, X
  930. ALIGN_3
  931. .L207:
  932. testl $1, M
  933. je .L208
  934. movaps -13 * SIZE(X), %xmm2
  935. movaps %xmm1, %xmm5
  936. SHUFPD_1 %xmm2, %xmm0
  937. mulpd %xmm6, %xmm5
  938. mulpd %xmm7, %xmm0
  939. addpd %xmm5, %xmm0
  940. movaps %xmm0, -15 * SIZE(X)
  941. movaps %xmm1, %xmm0
  942. movaps %xmm2, %xmm1
  943. addl $2 * SIZE, X
  944. ALIGN_3
  945. .L208:
  946. unpckhpd %xmm0, %xmm0
  947. mulsd %xmm6, %xmm1
  948. mulsd %xmm7, %xmm0
  949. addsd %xmm1, %xmm0
  950. movlps %xmm0, -15 * SIZE(X)
  951. jmp .L999
  952. ALIGN_3
  953. #else
  954. #ifdef HAVE_SSE3
  955. movddup %xmm0, %xmm6
  956. #else
  957. pshufd $0x44, %xmm0, %xmm6
  958. #endif
  959. pxor %xmm7, %xmm7
  960. subsd %xmm1, %xmm7
  961. movlhps %xmm1, %xmm7
  962. subl $-16 * SIZE, X
  963. movl M, I
  964. sarl $3, I
  965. jle .L205
  966. movsd -16 * SIZE(X), %xmm0
  967. movhps -15 * SIZE(X), %xmm0
  968. movsd -14 * SIZE(X), %xmm1
  969. movhps -13 * SIZE(X), %xmm1
  970. movsd -12 * SIZE(X), %xmm2
  971. movhps -11 * SIZE(X), %xmm2
  972. movsd -10 * SIZE(X), %xmm3
  973. movhps -9 * SIZE(X), %xmm3
  974. decl I
  975. jle .L202
  976. ALIGN_4
  977. .L201:
  978. #ifdef PREFETCHW
  979. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(X)
  980. #endif
  981. pshufd $0x4e, %xmm0, %xmm5
  982. mulpd %xmm6, %xmm0
  983. mulpd %xmm7, %xmm5
  984. addpd %xmm5, %xmm0
  985. movlps %xmm0, -16 * SIZE(X)
  986. movhps %xmm0, -15 * SIZE(X)
  987. movsd -8 * SIZE(X), %xmm0
  988. movhps -7 * SIZE(X), %xmm0
  989. pshufd $0x4e, %xmm1, %xmm5
  990. mulpd %xmm6, %xmm1
  991. mulpd %xmm7, %xmm5
  992. addpd %xmm5, %xmm1
  993. movlps %xmm1, -14 * SIZE(X)
  994. movhps %xmm1, -13 * SIZE(X)
  995. movsd -6 * SIZE(X), %xmm1
  996. movhps -5 * SIZE(X), %xmm1
  997. pshufd $0x4e, %xmm2, %xmm5
  998. mulpd %xmm6, %xmm2
  999. mulpd %xmm7, %xmm5
  1000. addpd %xmm5, %xmm2
  1001. movlps %xmm2, -12 * SIZE(X)
  1002. movhps %xmm2, -11 * SIZE(X)
  1003. movsd -4 * SIZE(X), %xmm2
  1004. movhps -3 * SIZE(X), %xmm2
  1005. pshufd $0x4e, %xmm3, %xmm5
  1006. mulpd %xmm6, %xmm3
  1007. mulpd %xmm7, %xmm5
  1008. addpd %xmm5, %xmm3
  1009. movlps %xmm3, -10 * SIZE(X)
  1010. movhps %xmm3, -9 * SIZE(X)
  1011. movsd -2 * SIZE(X), %xmm3
  1012. movhps -1 * SIZE(X), %xmm3
  1013. #ifdef PREFETCHW
  1014. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(X)
  1015. #endif
  1016. pshufd $0x4e, %xmm0, %xmm5
  1017. mulpd %xmm6, %xmm0
  1018. mulpd %xmm7, %xmm5
  1019. addpd %xmm5, %xmm0
  1020. movlps %xmm0, -8 * SIZE(X)
  1021. movhps %xmm0, -7 * SIZE(X)
  1022. movsd 0 * SIZE(X), %xmm0
  1023. movhps 1 * SIZE(X), %xmm0
  1024. pshufd $0x4e, %xmm1, %xmm5
  1025. mulpd %xmm6, %xmm1
  1026. mulpd %xmm7, %xmm5
  1027. addpd %xmm5, %xmm1
  1028. movlps %xmm1, -6 * SIZE(X)
  1029. movhps %xmm1, -5 * SIZE(X)
  1030. movsd 2 * SIZE(X), %xmm1
  1031. movhps 3 * SIZE(X), %xmm1
  1032. pshufd $0x4e, %xmm2, %xmm5
  1033. mulpd %xmm6, %xmm2
  1034. mulpd %xmm7, %xmm5
  1035. addpd %xmm5, %xmm2
  1036. movlps %xmm2, -4 * SIZE(X)
  1037. movhps %xmm2, -3 * SIZE(X)
  1038. movsd 4 * SIZE(X), %xmm2
  1039. movhps 5 * SIZE(X), %xmm2
  1040. pshufd $0x4e, %xmm3, %xmm5
  1041. mulpd %xmm6, %xmm3
  1042. mulpd %xmm7, %xmm5
  1043. addpd %xmm5, %xmm3
  1044. movlps %xmm3, -2 * SIZE(X)
  1045. movhps %xmm3, -1 * SIZE(X)
  1046. movsd 6 * SIZE(X), %xmm3
  1047. movhps 7 * SIZE(X), %xmm3
  1048. subl $-16 * SIZE, X
  1049. decl I
  1050. jg .L201
  1051. ALIGN_4
  1052. .L202:
  1053. pshufd $0x4e, %xmm0, %xmm5
  1054. mulpd %xmm6, %xmm0
  1055. mulpd %xmm7, %xmm5
  1056. addpd %xmm5, %xmm0
  1057. movlps %xmm0, -16 * SIZE(X)
  1058. movhps %xmm0, -15 * SIZE(X)
  1059. movsd -8 * SIZE(X), %xmm0
  1060. movhps -7 * SIZE(X), %xmm0
  1061. pshufd $0x4e, %xmm1, %xmm5
  1062. mulpd %xmm6, %xmm1
  1063. mulpd %xmm7, %xmm5
  1064. addpd %xmm5, %xmm1
  1065. movlps %xmm1, -14 * SIZE(X)
  1066. movhps %xmm1, -13 * SIZE(X)
  1067. movsd -6 * SIZE(X), %xmm1
  1068. movhps -5 * SIZE(X), %xmm1
  1069. pshufd $0x4e, %xmm2, %xmm5
  1070. mulpd %xmm6, %xmm2
  1071. mulpd %xmm7, %xmm5
  1072. addpd %xmm5, %xmm2
  1073. movlps %xmm2, -12 * SIZE(X)
  1074. movhps %xmm2, -11 * SIZE(X)
  1075. movsd -4 * SIZE(X), %xmm2
  1076. movhps -3 * SIZE(X), %xmm2
  1077. pshufd $0x4e, %xmm3, %xmm5
  1078. mulpd %xmm6, %xmm3
  1079. mulpd %xmm7, %xmm5
  1080. addpd %xmm5, %xmm3
  1081. movlps %xmm3, -10 * SIZE(X)
  1082. movhps %xmm3, -9 * SIZE(X)
  1083. movsd -2 * SIZE(X), %xmm3
  1084. movhps -1 * SIZE(X), %xmm3
  1085. pshufd $0x4e, %xmm0, %xmm5
  1086. mulpd %xmm6, %xmm0
  1087. mulpd %xmm7, %xmm5
  1088. addpd %xmm5, %xmm0
  1089. movlps %xmm0, -8 * SIZE(X)
  1090. movhps %xmm0, -7 * SIZE(X)
  1091. pshufd $0x4e, %xmm1, %xmm5
  1092. mulpd %xmm6, %xmm1
  1093. mulpd %xmm7, %xmm5
  1094. addpd %xmm5, %xmm1
  1095. movlps %xmm1, -6 * SIZE(X)
  1096. movhps %xmm1, -5 * SIZE(X)
  1097. pshufd $0x4e, %xmm2, %xmm5
  1098. mulpd %xmm6, %xmm2
  1099. mulpd %xmm7, %xmm5
  1100. addpd %xmm5, %xmm2
  1101. movlps %xmm2, -4 * SIZE(X)
  1102. movhps %xmm2, -3 * SIZE(X)
  1103. pshufd $0x4e, %xmm3, %xmm5
  1104. mulpd %xmm6, %xmm3
  1105. mulpd %xmm7, %xmm5
  1106. addpd %xmm5, %xmm3
  1107. movlps %xmm3, -2 * SIZE(X)
  1108. movhps %xmm3, -1 * SIZE(X)
  1109. subl $-16 * SIZE, X
  1110. ALIGN_3
  1111. .L205:
  1112. testl $7, M
  1113. je .L999
  1114. testl $4, M
  1115. je .L206
  1116. movsd -16 * SIZE(X), %xmm0
  1117. movhps -15 * SIZE(X), %xmm0
  1118. movsd -14 * SIZE(X), %xmm1
  1119. movhps -13 * SIZE(X), %xmm1
  1120. pshufd $0x4e, %xmm0, %xmm5
  1121. mulpd %xmm6, %xmm0
  1122. mulpd %xmm7, %xmm5
  1123. addpd %xmm5, %xmm0
  1124. movlps %xmm0, -16 * SIZE(X)
  1125. movhps %xmm0, -15 * SIZE(X)
  1126. pshufd $0x4e, %xmm1, %xmm5
  1127. mulpd %xmm6, %xmm1
  1128. mulpd %xmm7, %xmm5
  1129. addpd %xmm5, %xmm1
  1130. movlps %xmm1, -14 * SIZE(X)
  1131. movhps %xmm1, -13 * SIZE(X)
  1132. movsd -12 * SIZE(X), %xmm2
  1133. movhps -11 * SIZE(X), %xmm2
  1134. movsd -10 * SIZE(X), %xmm3
  1135. movhps -9 * SIZE(X), %xmm3
  1136. pshufd $0x4e, %xmm2, %xmm5
  1137. mulpd %xmm6, %xmm2
  1138. mulpd %xmm7, %xmm5
  1139. addpd %xmm5, %xmm2
  1140. movlps %xmm2, -12 * SIZE(X)
  1141. movhps %xmm2, -11 * SIZE(X)
  1142. pshufd $0x4e, %xmm3, %xmm5
  1143. mulpd %xmm6, %xmm3
  1144. mulpd %xmm7, %xmm5
  1145. addpd %xmm5, %xmm3
  1146. movlps %xmm3, -10 * SIZE(X)
  1147. movhps %xmm3, -9 * SIZE(X)
  1148. addl $8 * SIZE, X
  1149. ALIGN_3
  1150. .L206:
  1151. testl $2, M
  1152. je .L207
  1153. movsd -16 * SIZE(X), %xmm0
  1154. movhps -15 * SIZE(X), %xmm0
  1155. pshufd $0x4e, %xmm0, %xmm5
  1156. mulpd %xmm6, %xmm0
  1157. mulpd %xmm7, %xmm5
  1158. addpd %xmm5, %xmm0
  1159. movlps %xmm0, -16 * SIZE(X)
  1160. movhps %xmm0, -15 * SIZE(X)
  1161. movsd -14 * SIZE(X), %xmm1
  1162. movhps -13 * SIZE(X), %xmm1
  1163. pshufd $0x4e, %xmm1, %xmm5
  1164. mulpd %xmm6, %xmm1
  1165. mulpd %xmm7, %xmm5
  1166. addpd %xmm5, %xmm1
  1167. movlps %xmm1, -14 * SIZE(X)
  1168. movhps %xmm1, -13 * SIZE(X)
  1169. addl $4 * SIZE, X
  1170. ALIGN_3
  1171. .L207:
  1172. testl $1, M
  1173. je .L999
  1174. movsd -16 * SIZE(X), %xmm0
  1175. movhps -15 * SIZE(X), %xmm0
  1176. pshufd $0x4e, %xmm0, %xmm5
  1177. mulpd %xmm6, %xmm0
  1178. mulpd %xmm7, %xmm5
  1179. addpd %xmm5, %xmm0
  1180. movlps %xmm0, -16 * SIZE(X)
  1181. movhps %xmm0, -15 * SIZE(X)
  1182. jmp .L999
  1183. ALIGN_3
  1184. #endif
  1185. .L220:
  1186. #ifdef HAVE_SSE3
  1187. movddup %xmm0, %xmm6
  1188. #else
  1189. pshufd $0x44, %xmm0, %xmm6
  1190. #endif
  1191. pxor %xmm7, %xmm7
  1192. subsd %xmm1, %xmm7
  1193. movlhps %xmm1, %xmm7
  1194. movl X, XX
  1195. movl M, I
  1196. sarl $3, I
  1197. jle .L225
  1198. movsd 0 * SIZE(X), %xmm0
  1199. movhps 1 * SIZE(X), %xmm0
  1200. addl INCX, X
  1201. movsd 0 * SIZE(X), %xmm1
  1202. movhps 1 * SIZE(X), %xmm1
  1203. addl INCX, X
  1204. movsd 0 * SIZE(X), %xmm2
  1205. movhps 1 * SIZE(X), %xmm2
  1206. addl INCX, X
  1207. movsd 0 * SIZE(X), %xmm3
  1208. movhps 1 * SIZE(X), %xmm3
  1209. addl INCX, X
  1210. decl I
  1211. jle .L222
  1212. ALIGN_4
  1213. .L221:
  1214. #ifdef PREFETCHW
  1215. PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(X)
  1216. #endif
  1217. pshufd $0x4e, %xmm0, %xmm5
  1218. mulpd %xmm6, %xmm0
  1219. mulpd %xmm7, %xmm5
  1220. addpd %xmm5, %xmm0
  1221. movlps %xmm0, 0 * SIZE(XX)
  1222. movhps %xmm0, 1 * SIZE(XX)
  1223. addl INCX, XX
  1224. movsd 0 * SIZE(X), %xmm0
  1225. movhps 1 * SIZE(X), %xmm0
  1226. addl INCX, X
  1227. pshufd $0x4e, %xmm1, %xmm5
  1228. mulpd %xmm6, %xmm1
  1229. mulpd %xmm7, %xmm5
  1230. addpd %xmm5, %xmm1
  1231. movlps %xmm1, 0 * SIZE(XX)
  1232. movhps %xmm1, 1 * SIZE(XX)
  1233. addl INCX, XX
  1234. movsd 0 * SIZE(X), %xmm1
  1235. movhps 1 * SIZE(X), %xmm1
  1236. addl INCX, X
  1237. pshufd $0x4e, %xmm2, %xmm5
  1238. mulpd %xmm6, %xmm2
  1239. mulpd %xmm7, %xmm5
  1240. addpd %xmm5, %xmm2
  1241. movlps %xmm2, 0 * SIZE(XX)
  1242. movhps %xmm2, 1 * SIZE(XX)
  1243. addl INCX, XX
  1244. movsd 0 * SIZE(X), %xmm2
  1245. movhps 1 * SIZE(X), %xmm2
  1246. addl INCX, X
  1247. pshufd $0x4e, %xmm3, %xmm5
  1248. mulpd %xmm6, %xmm3
  1249. mulpd %xmm7, %xmm5
  1250. addpd %xmm5, %xmm3
  1251. movlps %xmm3, 0 * SIZE(XX)
  1252. movhps %xmm3, 1 * SIZE(XX)
  1253. addl INCX, XX
  1254. movsd 0 * SIZE(X), %xmm3
  1255. movhps 1 * SIZE(X), %xmm3
  1256. addl INCX, X
  1257. #ifdef PREFETCHW
  1258. PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(X)
  1259. #endif
  1260. pshufd $0x4e, %xmm0, %xmm5
  1261. mulpd %xmm6, %xmm0
  1262. mulpd %xmm7, %xmm5
  1263. addpd %xmm5, %xmm0
  1264. movlps %xmm0, 0 * SIZE(XX)
  1265. movhps %xmm0, 1 * SIZE(XX)
  1266. addl INCX, XX
  1267. movsd 0 * SIZE(X), %xmm0
  1268. movhps 1 * SIZE(X), %xmm0
  1269. addl INCX, X
  1270. pshufd $0x4e, %xmm1, %xmm5
  1271. mulpd %xmm6, %xmm1
  1272. mulpd %xmm7, %xmm5
  1273. addpd %xmm5, %xmm1
  1274. movlps %xmm1, 0 * SIZE(XX)
  1275. movhps %xmm1, 1 * SIZE(XX)
  1276. addl INCX, XX
  1277. movsd 0 * SIZE(X), %xmm1
  1278. movhps 1 * SIZE(X), %xmm1
  1279. addl INCX, X
  1280. pshufd $0x4e, %xmm2, %xmm5
  1281. mulpd %xmm6, %xmm2
  1282. mulpd %xmm7, %xmm5
  1283. addpd %xmm5, %xmm2
  1284. movlps %xmm2, 0 * SIZE(XX)
  1285. movhps %xmm2, 1 * SIZE(XX)
  1286. addl INCX, XX
  1287. movsd 0 * SIZE(X), %xmm2
  1288. movhps 1 * SIZE(X), %xmm2
  1289. addl INCX, X
  1290. pshufd $0x4e, %xmm3, %xmm5
  1291. mulpd %xmm6, %xmm3
  1292. mulpd %xmm7, %xmm5
  1293. addpd %xmm5, %xmm3
  1294. movlps %xmm3, 0 * SIZE(XX)
  1295. movhps %xmm3, 1 * SIZE(XX)
  1296. addl INCX, XX
  1297. movsd 0 * SIZE(X), %xmm3
  1298. movhps 1 * SIZE(X), %xmm3
  1299. addl INCX, X
  1300. decl I
  1301. jg .L221
  1302. ALIGN_4
  1303. .L222:
  1304. pshufd $0x4e, %xmm0, %xmm5
  1305. mulpd %xmm6, %xmm0
  1306. mulpd %xmm7, %xmm5
  1307. addpd %xmm5, %xmm0
  1308. movlps %xmm0, 0 * SIZE(XX)
  1309. movhps %xmm0, 1 * SIZE(XX)
  1310. addl INCX, XX
  1311. movsd 0 * SIZE(X), %xmm0
  1312. movhps 1 * SIZE(X), %xmm0
  1313. addl INCX, X
  1314. pshufd $0x4e, %xmm1, %xmm5
  1315. mulpd %xmm6, %xmm1
  1316. mulpd %xmm7, %xmm5
  1317. addpd %xmm5, %xmm1
  1318. movlps %xmm1, 0 * SIZE(XX)
  1319. movhps %xmm1, 1 * SIZE(XX)
  1320. addl INCX, XX
  1321. movsd 0 * SIZE(X), %xmm1
  1322. movhps 1 * SIZE(X), %xmm1
  1323. addl INCX, X
  1324. pshufd $0x4e, %xmm2, %xmm5
  1325. mulpd %xmm6, %xmm2
  1326. mulpd %xmm7, %xmm5
  1327. addpd %xmm5, %xmm2
  1328. movlps %xmm2, 0 * SIZE(XX)
  1329. movhps %xmm2, 1 * SIZE(XX)
  1330. addl INCX, XX
  1331. movsd 0 * SIZE(X), %xmm2
  1332. movhps 1 * SIZE(X), %xmm2
  1333. addl INCX, X
  1334. pshufd $0x4e, %xmm3, %xmm5
  1335. mulpd %xmm6, %xmm3
  1336. mulpd %xmm7, %xmm5
  1337. addpd %xmm5, %xmm3
  1338. movlps %xmm3, 0 * SIZE(XX)
  1339. movhps %xmm3, 1 * SIZE(XX)
  1340. addl INCX, XX
  1341. movsd 0 * SIZE(X), %xmm3
  1342. movhps 1 * SIZE(X), %xmm3
  1343. addl INCX, X
  1344. pshufd $0x4e, %xmm0, %xmm5
  1345. mulpd %xmm6, %xmm0
  1346. mulpd %xmm7, %xmm5
  1347. addpd %xmm5, %xmm0
  1348. movlps %xmm0, 0 * SIZE(XX)
  1349. movhps %xmm0, 1 * SIZE(XX)
  1350. addl INCX, XX
  1351. pshufd $0x4e, %xmm1, %xmm5
  1352. mulpd %xmm6, %xmm1
  1353. mulpd %xmm7, %xmm5
  1354. addpd %xmm5, %xmm1
  1355. movlps %xmm1, 0 * SIZE(XX)
  1356. movhps %xmm1, 1 * SIZE(XX)
  1357. addl INCX, XX
  1358. pshufd $0x4e, %xmm2, %xmm5
  1359. mulpd %xmm6, %xmm2
  1360. mulpd %xmm7, %xmm5
  1361. addpd %xmm5, %xmm2
  1362. movlps %xmm2, 0 * SIZE(XX)
  1363. movhps %xmm2, 1 * SIZE(XX)
  1364. addl INCX, XX
  1365. pshufd $0x4e, %xmm3, %xmm5
  1366. mulpd %xmm6, %xmm3
  1367. mulpd %xmm7, %xmm5
  1368. addpd %xmm5, %xmm3
  1369. movlps %xmm3, 0 * SIZE(XX)
  1370. movhps %xmm3, 1 * SIZE(XX)
  1371. addl INCX, XX
  1372. ALIGN_3
  1373. .L225:
  1374. testl $7, M
  1375. je .L999
  1376. testl $4, M
  1377. je .L226
  1378. movsd 0 * SIZE(X), %xmm0
  1379. movhps 1 * SIZE(X), %xmm0
  1380. addl INCX, X
  1381. pshufd $0x4e, %xmm0, %xmm5
  1382. mulpd %xmm6, %xmm0
  1383. mulpd %xmm7, %xmm5
  1384. addpd %xmm5, %xmm0
  1385. movlps %xmm0, 0 * SIZE(XX)
  1386. movhps %xmm0, 1 * SIZE(XX)
  1387. addl INCX, XX
  1388. movsd 0 * SIZE(X), %xmm1
  1389. movhps 1 * SIZE(X), %xmm1
  1390. addl INCX, X
  1391. pshufd $0x4e, %xmm1, %xmm5
  1392. mulpd %xmm6, %xmm1
  1393. mulpd %xmm7, %xmm5
  1394. addpd %xmm5, %xmm1
  1395. movlps %xmm1, 0 * SIZE(XX)
  1396. movhps %xmm1, 1 * SIZE(XX)
  1397. addl INCX, XX
  1398. movsd 0 * SIZE(X), %xmm2
  1399. movhps 1 * SIZE(X), %xmm2
  1400. addl INCX, X
  1401. pshufd $0x4e, %xmm2, %xmm5
  1402. mulpd %xmm6, %xmm2
  1403. mulpd %xmm7, %xmm5
  1404. addpd %xmm5, %xmm2
  1405. movlps %xmm2, 0 * SIZE(XX)
  1406. movhps %xmm2, 1 * SIZE(XX)
  1407. addl INCX, XX
  1408. movsd 0 * SIZE(X), %xmm3
  1409. movhps 1 * SIZE(X), %xmm3
  1410. addl INCX, X
  1411. pshufd $0x4e, %xmm3, %xmm5
  1412. mulpd %xmm6, %xmm3
  1413. mulpd %xmm7, %xmm5
  1414. addpd %xmm5, %xmm3
  1415. movlps %xmm3, 0 * SIZE(XX)
  1416. movhps %xmm3, 1 * SIZE(XX)
  1417. addl INCX, XX
  1418. ALIGN_3
  1419. .L226:
  1420. testl $2, M
  1421. je .L227
  1422. movsd 0 * SIZE(X), %xmm0
  1423. movhps 1 * SIZE(X), %xmm0
  1424. addl INCX, X
  1425. pshufd $0x4e, %xmm0, %xmm5
  1426. mulpd %xmm6, %xmm0
  1427. mulpd %xmm7, %xmm5
  1428. addpd %xmm5, %xmm0
  1429. movlps %xmm0, 0 * SIZE(XX)
  1430. movhps %xmm0, 1 * SIZE(XX)
  1431. addl INCX, XX
  1432. movsd 0 * SIZE(X), %xmm1
  1433. movhps 1 * SIZE(X), %xmm1
  1434. addl INCX, X
  1435. pshufd $0x4e, %xmm1, %xmm5
  1436. mulpd %xmm6, %xmm1
  1437. mulpd %xmm7, %xmm5
  1438. addpd %xmm5, %xmm1
  1439. movlps %xmm1, 0 * SIZE(XX)
  1440. movhps %xmm1, 1 * SIZE(XX)
  1441. addl INCX, XX
  1442. ALIGN_3
  1443. .L227:
  1444. testl $1, M
  1445. je .L999
  1446. movsd 0 * SIZE(X), %xmm0
  1447. movhps 1 * SIZE(X), %xmm0
  1448. pshufd $0x4e, %xmm0, %xmm5
  1449. mulpd %xmm6, %xmm0
  1450. mulpd %xmm7, %xmm5
  1451. addpd %xmm5, %xmm0
  1452. movlps %xmm0, 0 * SIZE(XX)
  1453. movhps %xmm0, 1 * SIZE(XX)
  1454. ALIGN_3
  1455. .L999:
  1456. xorl %eax, %eax
  1457. popl %ebp
  1458. popl %ebx
  1459. popl %esi
  1460. popl %edi
  1461. ret
  1462. EPILOGUE