jccolext-avx2.asm 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578
  1. ;
  2. ; jccolext.asm - colorspace conversion (AVX2)
  3. ;
  4. ; Copyright (C) 2015, Intel Corporation.
  5. ; Copyright (C) 2016, D. R. Commander.
  6. ;
  7. ; Based on the x86 SIMD extension for IJG JPEG library
  8. ; Copyright (C) 1999-2006, MIYASAKA Masaru.
  9. ; For conditions of distribution and use, see copyright notice in jsimdext.inc
  10. ;
  11. ; This file should be assembled with NASM (Netwide Assembler),
  12. ; can *not* be assembled with Microsoft's MASM or any compatible
  13. ; assembler (including Borland's Turbo Assembler).
  14. ; NASM is available from http://nasm.sourceforge.net/ or
  15. ; http://sourceforge.net/project/showfiles.php?group_id=6208
  16. %include "jcolsamp.inc"
  17. ; --------------------------------------------------------------------------
  18. ;
  19. ; Convert some rows of samples to the output colorspace.
  20. ;
  21. ; GLOBAL(void)
  22. ; jsimd_rgb_ycc_convert_avx2(JDIMENSION img_width, JSAMPARRAY input_buf,
  23. ; JSAMPIMAGE output_buf, JDIMENSION output_row,
  24. ; int num_rows);
  25. ;
  26. %define img_width(b) (b) + 8 ; JDIMENSION img_width
  27. %define input_buf(b) (b) + 12 ; JSAMPARRAY input_buf
  28. %define output_buf(b) (b) + 16 ; JSAMPIMAGE output_buf
  29. %define output_row(b) (b) + 20 ; JDIMENSION output_row
  30. %define num_rows(b) (b) + 24 ; int num_rows
  31. %define original_ebp ebp + 0
  32. %define wk(i) ebp - (WK_NUM - (i)) * SIZEOF_YMMWORD
  33. ; ymmword wk[WK_NUM]
  34. %define WK_NUM 8
  35. %define gotptr wk(0) - SIZEOF_POINTER ; void * gotptr
  36. align 32
  37. GLOBAL_FUNCTION(jsimd_rgb_ycc_convert_avx2)
  38. EXTN(jsimd_rgb_ycc_convert_avx2):
  39. push ebp
  40. mov eax, esp ; eax = original ebp
  41. sub esp, byte 4
  42. and esp, byte (-SIZEOF_YMMWORD) ; align to 256 bits
  43. mov [esp], eax
  44. mov ebp, esp ; ebp = aligned ebp
  45. lea esp, [wk(0)]
  46. pushpic eax ; make a room for GOT address
  47. push ebx
  48. ; push ecx ; need not be preserved
  49. ; push edx ; need not be preserved
  50. push esi
  51. push edi
  52. get_GOT ebx ; get GOT address
  53. movpic POINTER [gotptr], ebx ; save GOT address
  54. mov ecx, JDIMENSION [img_width(eax)]
  55. test ecx, ecx
  56. jz near .return
  57. push ecx
  58. mov esi, JSAMPIMAGE [output_buf(eax)]
  59. mov ecx, JDIMENSION [output_row(eax)]
  60. mov edi, JSAMPARRAY [esi+0*SIZEOF_JSAMPARRAY]
  61. mov ebx, JSAMPARRAY [esi+1*SIZEOF_JSAMPARRAY]
  62. mov edx, JSAMPARRAY [esi+2*SIZEOF_JSAMPARRAY]
  63. lea edi, [edi+ecx*SIZEOF_JSAMPROW]
  64. lea ebx, [ebx+ecx*SIZEOF_JSAMPROW]
  65. lea edx, [edx+ecx*SIZEOF_JSAMPROW]
  66. pop ecx
  67. mov esi, JSAMPARRAY [input_buf(eax)]
  68. mov eax, INT [num_rows(eax)]
  69. test eax, eax
  70. jle near .return
  71. alignx 16, 7
  72. .rowloop:
  73. pushpic eax
  74. push edx
  75. push ebx
  76. push edi
  77. push esi
  78. push ecx ; col
  79. mov esi, JSAMPROW [esi] ; inptr
  80. mov edi, JSAMPROW [edi] ; outptr0
  81. mov ebx, JSAMPROW [ebx] ; outptr1
  82. mov edx, JSAMPROW [edx] ; outptr2
  83. movpic eax, POINTER [gotptr] ; load GOT address (eax)
  84. cmp ecx, byte SIZEOF_YMMWORD
  85. jae near .columnloop
  86. alignx 16, 7
  87. %if RGB_PIXELSIZE == 3 ; ---------------
  88. .column_ld1:
  89. push eax
  90. push edx
  91. lea ecx, [ecx+ecx*2] ; imul ecx,RGB_PIXELSIZE
  92. test cl, SIZEOF_BYTE
  93. jz short .column_ld2
  94. sub ecx, byte SIZEOF_BYTE
  95. movzx eax, byte [esi+ecx]
  96. .column_ld2:
  97. test cl, SIZEOF_WORD
  98. jz short .column_ld4
  99. sub ecx, byte SIZEOF_WORD
  100. movzx edx, word [esi+ecx]
  101. shl eax, WORD_BIT
  102. or eax, edx
  103. .column_ld4:
  104. vmovd xmmA, eax
  105. pop edx
  106. pop eax
  107. test cl, SIZEOF_DWORD
  108. jz short .column_ld8
  109. sub ecx, byte SIZEOF_DWORD
  110. vmovd xmmF, XMM_DWORD [esi+ecx]
  111. vpslldq xmmA, xmmA, SIZEOF_DWORD
  112. vpor xmmA, xmmA, xmmF
  113. .column_ld8:
  114. test cl, SIZEOF_MMWORD
  115. jz short .column_ld16
  116. sub ecx, byte SIZEOF_MMWORD
  117. vmovq xmmB, XMM_MMWORD [esi+ecx]
  118. vpslldq xmmA, xmmA, SIZEOF_MMWORD
  119. vpor xmmA, xmmA, xmmB
  120. .column_ld16:
  121. test cl, SIZEOF_XMMWORD
  122. jz short .column_ld32
  123. sub ecx, byte SIZEOF_XMMWORD
  124. vmovdqu xmmB, XMM_MMWORD [esi+ecx]
  125. vperm2i128 ymmA, ymmA, ymmA, 1
  126. vpor ymmA, ymmB
  127. .column_ld32:
  128. test cl, SIZEOF_YMMWORD
  129. jz short .column_ld64
  130. sub ecx, byte SIZEOF_YMMWORD
  131. vmovdqa ymmF, ymmA
  132. vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD]
  133. .column_ld64:
  134. test cl, 2*SIZEOF_YMMWORD
  135. mov ecx, SIZEOF_YMMWORD
  136. jz short .rgb_ycc_cnv
  137. vmovdqa ymmB, ymmA
  138. vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD]
  139. vmovdqu ymmF, YMMWORD [esi+1*SIZEOF_YMMWORD]
  140. jmp short .rgb_ycc_cnv
  141. alignx 16, 7
  142. .columnloop:
  143. vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD]
  144. vmovdqu ymmF, YMMWORD [esi+1*SIZEOF_YMMWORD]
  145. vmovdqu ymmB, YMMWORD [esi+2*SIZEOF_YMMWORD]
  146. .rgb_ycc_cnv:
  147. ; ymmA=(00 10 20 01 11 21 02 12 22 03 13 23 04 14 24 05
  148. ; 15 25 06 16 26 07 17 27 08 18 28 09 19 29 0A 1A)
  149. ; ymmF=(2A 0B 1B 2B 0C 1C 2C 0D 1D 2D 0E 1E 2E 0F 1F 2F
  150. ; 0G 1G 2G 0H 1H 2H 0I 1I 2I 0J 1J 2J 0K 1K 2K 0L)
  151. ; ymmB=(1L 2L 0M 1M 2M 0N 1N 2N 0O 1O 2O 0P 1P 2P 0Q 1Q
  152. ; 2Q 0R 1R 2R 0S 1S 2S 0T 1T 2T 0U 1U 2U 0V 1V 2V)
  153. vmovdqu ymmC, ymmA
  154. vinserti128 ymmA, ymmF, xmmA, 0 ; ymmA=(00 10 20 01 11 21 02 12 22 03 13 23 04 14 24 05
  155. ; 0G 1G 2G 0H 1H 2H 0I 1I 2I 0J 1J 2J 0K 1K 2K 0L)
  156. vinserti128 ymmC, ymmC, xmmB, 0 ; ymmC=(1L 2L 0M 1M 2M 0N 1N 2N 0O 1O 2O 0P 1P 2P 0Q 1Q
  157. ; 15 25 06 16 26 07 17 27 08 18 28 09 19 29 0A 1A)
  158. vinserti128 ymmB, ymmB, xmmF, 0 ; ymmB=(2A 0B 1B 2B 0C 1C 2C 0D 1D 2D 0E 1E 2E 0F 1F 2F
  159. ; 2Q 0R 1R 2R 0S 1S 2S 0T 1T 2T 0U 1U 2U 0V 1V 2V)
  160. vperm2i128 ymmF, ymmC, ymmC, 1 ; ymmF=(15 25 06 16 26 07 17 27 08 18 28 09 19 29 0A 1A
  161. ; 1L 2L 0M 1M 2M 0N 1N 2N 0O 1O 2O 0P 1P 2P 0Q 1Q)
  162. vmovdqa ymmG, ymmA
  163. vpslldq ymmA, ymmA, 8 ; ymmA=(-- -- -- -- -- -- -- -- 00 10 20 01 11 21 02 12
  164. ; 22 03 13 23 04 14 24 05 0G 1G 2G 0H 1H 2H 0I 1I)
  165. vpsrldq ymmG, ymmG, 8 ; ymmG=(22 03 13 23 04 14 24 05 0G 1G 2G 0H 1H 2H 0I 1I
  166. ; 2I 0J 1J 2J 0K 1K 2K 0L -- -- -- -- -- -- -- --)
  167. vpunpckhbw ymmA, ymmA, ymmF ; ymmA=(00 08 10 18 20 28 01 09 11 19 21 29 02 0A 12 1A
  168. ; 0G 0O 1G 1O 2G 2O 0H 0P 1H 1P 2H 2P 0I 0Q 1I 1Q)
  169. vpslldq ymmF, ymmF, 8 ; ymmF=(-- -- -- -- -- -- -- -- 15 25 06 16 26 07 17 27
  170. ; 08 18 28 09 19 29 0A 1A 1L 2L 0M 1M 2M 0N 1N 2N)
  171. vpunpcklbw ymmG, ymmG, ymmB ; ymmG=(22 2A 03 0B 13 1B 23 2B 04 0C 14 1C 24 2C 05 0D
  172. ; 2I 2Q 0J 0R 1J 1R 2J 2R 0K 0S 1K 1S 2K 2S 0L 0T)
  173. vpunpckhbw ymmF, ymmF, ymmB ; ymmF=(15 1D 25 2D 06 0E 16 1E 26 2E 07 0F 17 1F 27 2F
  174. ; 1L 1T 2L 2T 0M 0U 1M 1U 2M 2U 0N 0V 1N 1V 2N 2V)
  175. vmovdqa ymmD, ymmA
  176. vpslldq ymmA, ymmA, 8 ; ymmA=(-- -- -- -- -- -- -- -- 00 08 10 18 20 28 01 09
  177. ; 11 19 21 29 02 0A 12 1A 0G 0O 1G 1O 2G 2O 0H 0P)
  178. vpsrldq ymmD, ymmD, 8 ; ymmD=(11 19 21 29 02 0A 12 1A 0G 0O 1G 1O 2G 2O 0H 0P
  179. ; 1H 1P 2H 2P 0I 0Q 1I 1Q -- -- -- -- -- -- -- --)
  180. vpunpckhbw ymmA, ymmA, ymmG ; ymmA=(00 04 08 0C 10 14 18 1C 20 24 28 2C 01 05 09 0D
  181. ; 0G 0K 0O 0S 1G 1K 1O 1S 2G 2K 2O 2S 0H 0L 0P 0T)
  182. vpslldq ymmG, ymmG, 8 ; ymmG=(-- -- -- -- -- -- -- -- 22 2A 03 0B 13 1B 23 2B
  183. ; 04 0C 14 1C 24 2C 05 0D 2I 2Q 0J 0R 1J 1R 2J 2R)
  184. vpunpcklbw ymmD, ymmD, ymmF ; ymmD=(11 15 19 1D 21 25 29 2D 02 06 0A 0E 12 16 1A 1E
  185. ; 1H 1L 1P 1T 2H 2L 2P 2T 0I 0M 0Q 0U 1I 1M 1Q 1U)
  186. vpunpckhbw ymmG, ymmG, ymmF ; ymmG=(22 26 2A 2E 03 07 0B 0F 13 17 1B 1F 23 27 2B 2F
  187. ; 2I 2M 2Q 2U 0J 0N 0R 0V 1J 1N 1R 1V 2J 2N 2R 2V)
  188. vmovdqa ymmE, ymmA
  189. vpslldq ymmA, ymmA, 8 ; ymmA=(-- -- -- -- -- -- -- -- 00 04 08 0C 10 14 18 1C
  190. ; 20 24 28 2C 01 05 09 0D 0G 0K 0O 0S 1G 1K 1O 1S)
  191. vpsrldq ymmE, ymmE, 8 ; ymmE=(20 24 28 2C 01 05 09 0D 0G 0K 0O 0S 1G 1K 1O 1S
  192. ; 2G 2K 2O 2S 0H 0L 0P 0T -- -- -- -- -- -- -- --)
  193. vpunpckhbw ymmA, ymmA, ymmD ; ymmA=(00 02 04 06 08 0A 0C 0E 10 12 14 16 18 1A 1C 1E
  194. ; 0G 0I 0K 0M 0O 0Q 0S 0U 1G 1I 1K 1M 1O 1Q 1S 1U)
  195. vpslldq ymmD, ymmD, 8 ; ymmD=(-- -- -- -- -- -- -- -- 11 15 19 1D 21 25 29 2D
  196. ; 02 06 0A 0E 12 16 1A 1E 1H 1L 1P 1T 2H 2L 2P 2T)
  197. vpunpcklbw ymmE, ymmE, ymmG ; ymmE=(20 22 24 26 28 2A 2C 2E 01 03 05 07 09 0B 0D 0F
  198. ; 2G 2I 2K 2M 2O 2Q 2S 2U 0H 0J 0L 0N 0P 0R 0T 0V)
  199. vpunpckhbw ymmD, ymmD, ymmG ; ymmD=(11 13 15 17 19 1B 1D 1F 21 23 25 27 29 2B 2D 2F
  200. ; 1H 1J 1L 1N 1P 1R 1T 1V 2H 2J 2L 2N 2P 2R 2T 2V)
  201. vpxor ymmH, ymmH, ymmH
  202. vmovdqa ymmC, ymmA
  203. vpunpcklbw ymmA, ymmA, ymmH ; ymmA=(00 02 04 06 08 0A 0C 0E 0G 0I 0K 0M 0O 0Q 0S 0U)
  204. vpunpckhbw ymmC, ymmC, ymmH ; ymmC=(10 12 14 16 18 1A 1C 1E 1G 1I 1K 1M 1O 1Q 1S 1U)
  205. vmovdqa ymmB, ymmE
  206. vpunpcklbw ymmE, ymmE, ymmH ; ymmE=(20 22 24 26 28 2A 2C 2E 2G 2I 2K 2M 2O 2Q 2S 2U)
  207. vpunpckhbw ymmB, ymmB, ymmH ; ymmB=(01 03 05 07 09 0B 0D 0F 0H 0J 0L 0N 0P 0R 0T 0V)
  208. vmovdqa ymmF, ymmD
  209. vpunpcklbw ymmD, ymmD, ymmH ; ymmD=(11 13 15 17 19 1B 1D 1F 1H 1J 1L 1N 1P 1R 1T 1V)
  210. vpunpckhbw ymmF, ymmF, ymmH ; ymmF=(21 23 25 27 29 2B 2D 2F 2H 2J 2L 2N 2P 2R 2T 2V)
  211. %else ; RGB_PIXELSIZE == 4 ; -----------
  212. .column_ld1:
  213. test cl, SIZEOF_XMMWORD/16
  214. jz short .column_ld2
  215. sub ecx, byte SIZEOF_XMMWORD/16
  216. vmovd xmmA, XMM_DWORD [esi+ecx*RGB_PIXELSIZE]
  217. .column_ld2:
  218. test cl, SIZEOF_XMMWORD/8
  219. jz short .column_ld4
  220. sub ecx, byte SIZEOF_XMMWORD/8
  221. vmovq xmmF, XMM_MMWORD [esi+ecx*RGB_PIXELSIZE]
  222. vpslldq xmmA, xmmA, SIZEOF_MMWORD
  223. vpor xmmA, xmmA, xmmF
  224. .column_ld4:
  225. test cl, SIZEOF_XMMWORD/4
  226. jz short .column_ld8
  227. sub ecx, byte SIZEOF_XMMWORD/4
  228. vmovdqa xmmF, xmmA
  229. vperm2i128 ymmF, ymmF, ymmF, 1
  230. vmovdqu xmmA, XMMWORD [esi+ecx*RGB_PIXELSIZE]
  231. vpor ymmA, ymmA, ymmF
  232. .column_ld8:
  233. test cl, SIZEOF_XMMWORD/2
  234. jz short .column_ld16
  235. sub ecx, byte SIZEOF_XMMWORD/2
  236. vmovdqa ymmF, ymmA
  237. vmovdqu ymmA, YMMWORD [esi+ecx*RGB_PIXELSIZE]
  238. .column_ld16:
  239. test cl, SIZEOF_XMMWORD
  240. mov ecx, SIZEOF_YMMWORD
  241. jz short .rgb_ycc_cnv
  242. vmovdqa ymmE, ymmA
  243. vmovdqa ymmH, ymmF
  244. vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD]
  245. vmovdqu ymmF, YMMWORD [esi+1*SIZEOF_YMMWORD]
  246. jmp short .rgb_ycc_cnv
  247. alignx 16, 7
  248. .columnloop:
  249. vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD]
  250. vmovdqu ymmF, YMMWORD [esi+1*SIZEOF_YMMWORD]
  251. vmovdqu ymmE, YMMWORD [esi+2*SIZEOF_YMMWORD]
  252. vmovdqu ymmH, YMMWORD [esi+3*SIZEOF_YMMWORD]
  253. .rgb_ycc_cnv:
  254. ; ymmA=(00 10 20 30 01 11 21 31 02 12 22 32 03 13 23 33
  255. ; 04 14 24 34 05 15 25 35 06 16 26 36 07 17 27 37)
  256. ; ymmF=(08 18 28 38 09 19 29 39 0A 1A 2A 3A 0B 1B 2B 3B
  257. ; 0C 1C 2C 3C 0D 1D 2D 3D 0E 1E 2E 3E 0F 1F 2F 3F)
  258. ; ymmE=(0G 1G 2G 3G 0H 1H 2H 3H 0I 1I 2I 3I 0J 1J 2J 3J
  259. ; 0K 1K 2K 3K 0L 1L 2L 3L 0M 1M 2M 3M 0N 1N 2N 3N)
  260. ; ymmH=(0O 1O 2O 3O 0P 1P 2P 3P 0Q 1Q 2Q 3Q 0R 1R 2R 3R
  261. ; 0S 1S 2S 3S 0T 1T 2T 3T 0U 1U 2U 3U 0V 1V 2V 3V)
  262. vmovdqa ymmB, ymmA
  263. vinserti128 ymmA, ymmA, xmmE, 1 ; ymmA=(00 10 20 30 01 11 21 31 02 12 22 32 03 13 23 33
  264. ; 0G 1G 2G 3G 0H 1H 2H 3H 0I 1I 2I 3I 0J 1J 2J 3J)
  265. vperm2i128 ymmE, ymmB, ymmE, 0x31 ; ymmE=(04 14 24 34 05 15 25 35 06 16 26 36 07 17 27 37
  266. ; 0K 1K 2K 3K 0L 1L 2L 3L 0M 1M 2M 3M 0N 1N 2N 3N)
  267. vmovdqa ymmB, ymmF
  268. vinserti128 ymmF, ymmF, xmmH, 1 ; ymmF=(08 18 28 38 09 19 29 39 0A 1A 2A 3A 0B 1B 2B 3B
  269. ; 0O 1O 2O 3O 0P 1P 2P 3P 0Q 1Q 2Q 3Q 0R 1R 2R 3R)
  270. vperm2i128 ymmH, ymmB, ymmH, 0x31 ; ymmH=(0C 1C 2C 3C 0D 1D 2D 3D 0E 1E 2E 3E 0F 1F 2F 3F
  271. ; 0S 1S 2S 3S 0T 1T 2T 3T 0U 1U 2U 3U 0V 1V 2V 3V)
  272. vmovdqa ymmD, ymmA
  273. vpunpcklbw ymmA, ymmA, ymmE ; ymmA=(00 04 10 14 20 24 30 34 01 05 11 15 21 25 31 35
  274. ; 0G 0K 1G 1K 2G 2K 3G 3K 0H 0L 1H 1L 2H 2L 3H 3L)
  275. vpunpckhbw ymmD, ymmD, ymmE ; ymmD=(02 06 12 16 22 26 32 36 03 07 13 17 23 27 33 37
  276. ; 0I 0M 1I 1M 2I 2M 3I 3M 0J 0N 1J 1N 2J 2N 3J 3N)
  277. vmovdqa ymmC, ymmF
  278. vpunpcklbw ymmF, ymmF, ymmH ; ymmF=(08 0C 18 1C 28 2C 38 3C 09 0D 19 1D 29 2D 39 3D
  279. ; 0O 0S 1O 1S 2O 2S 3O 3S 0P 0T 1P 1T 2P 2T 3P 3T)
  280. vpunpckhbw ymmC, ymmC, ymmH ; ymmC=(0A 0E 1A 1E 2A 2E 3A 3E 0B 0F 1B 1F 2B 2F 3B 3F
  281. ; 0Q 0U 1Q 1U 2Q 2U 3Q 3U 0R 0V 1R 1V 2R 2V 3R 3V)
  282. vmovdqa ymmB, ymmA
  283. vpunpcklwd ymmA, ymmA, ymmF ; ymmA=(00 04 08 0C 10 14 18 1C 20 24 28 2C 30 34 38 3C
  284. ; 0G 0K 0O 0S 1G 1K 1O 1S 2G 2K 2O 2S 3G 3K 3O 3S)
  285. vpunpckhwd ymmB, ymmB, ymmF ; ymmB=(01 05 09 0D 11 15 19 1D 21 25 29 2D 31 35 39 3D
  286. ; 0H 0L 0P 0T 1H 1L 1P 1T 2H 2L 2P 2T 3H 3L 3P 3T)
  287. vmovdqa ymmG, ymmD
  288. vpunpcklwd ymmD, ymmD, ymmC ; ymmD=(02 06 0A 0E 12 16 1A 1E 22 26 2A 2E 32 36 3A 3E
  289. ; 0I 0M 0Q 0U 1I 1M 1Q 1U 2I 2M 2Q 2U 3I 3M 3Q 3U)
  290. vpunpckhwd ymmG, ymmG, ymmC ; ymmG=(03 07 0B 0F 13 17 1B 1F 23 27 2B 2F 33 37 3B 3F
  291. ; 0J 0N 0R 0V 1J 1N 1R 1V 2J 2N 2R 2V 3J 3N 3R 3V)
  292. vmovdqa ymmE, ymmA
  293. vpunpcklbw ymmA, ymmA, ymmD ; ymmA=(00 02 04 06 08 0A 0C 0E 10 12 14 16 18 1A 1C 1E
  294. ; 0G 0I 0K 0M 0O 0Q 0S 0U 1G 1I 1K 1M 1O 1Q 1S 1U)
  295. vpunpckhbw ymmE, ymmE, ymmD ; ymmE=(20 22 24 26 28 2A 2C 2E 30 32 34 36 38 3A 3C 3E
  296. ; 2G 2I 2K 2M 2O 2Q 2S 2U 3G 3I 3K 3M 3O 3Q 3S 3U)
  297. vmovdqa ymmH, ymmB
  298. vpunpcklbw ymmB, ymmB, ymmG ; ymmB=(01 03 05 07 09 0B 0D 0F 11 13 15 17 19 1B 1D 1F
  299. ; 0H 0J 0L 0N 0P 0R 0T 0V 1H 1J 1L 1N 1P 1R 1T 1V)
  300. vpunpckhbw ymmH, ymmH, ymmG ; ymmH=(21 23 25 27 29 2B 2D 2F 31 33 35 37 39 3B 3D 3F
  301. ; 2H 2J 2L 2N 2P 2R 2T 2V 3H 3J 3L 3N 3P 3R 3T 3V)
  302. vpxor ymmF, ymmF, ymmF
  303. vmovdqa ymmC, ymmA
  304. vpunpcklbw ymmA, ymmA, ymmF ; ymmA=(00 02 04 06 08 0A 0C 0E 0G 0I 0K 0M 0O 0Q 0S 0U)
  305. vpunpckhbw ymmC, ymmC, ymmF ; ymmC=(10 12 14 16 18 1A 1C 1E 1G 1I 1K 1M 1O 1Q 1S 1U)
  306. vmovdqa ymmD, ymmB
  307. vpunpcklbw ymmB, ymmB, ymmF ; ymmB=(01 03 05 07 09 0B 0D 0F 0H 0J 0L 0N 0P 0R 0T 0V)
  308. vpunpckhbw ymmD, ymmD, ymmF ; ymmD=(11 13 15 17 19 1B 1D 1F 1H 1J 1L 1N 1P 1R 1T 1V)
  309. vmovdqa ymmG, ymmE
  310. vpunpcklbw ymmE, ymmE, ymmF ; ymmE=(20 22 24 26 28 2A 2C 2E 2G 2I 2K 2M 2O 2Q 2S 2U)
  311. vpunpckhbw ymmG, ymmG, ymmF ; ymmG=(30 32 34 36 38 3A 3C 3E 3G 3I 3K 3M 3O 3Q 3S 3U)
  312. vpunpcklbw ymmF, ymmF, ymmH
  313. vpunpckhbw ymmH, ymmH, ymmH
  314. vpsrlw ymmF, ymmF, BYTE_BIT ; ymmF=(21 23 25 27 29 2B 2D 2F 2H 2J 2L 2N 2P 2R 2T 2V)
  315. vpsrlw ymmH, ymmH, BYTE_BIT ; ymmH=(31 33 35 37 39 3B 3D 3F 3H 3J 3L 3N 3P 3R 3T 3V)
  316. %endif ; RGB_PIXELSIZE ; ---------------
  317. ; ymm0=R(02468ACEGIKMOQSU)=RE, ymm2=G(02468ACEGIKMOQSU)=GE, ymm4=B(02468ACEGIKMOQSU)=BE
  318. ; ymm1=R(13579BDFHJLNPRTV)=RO, ymm3=G(13579BDFHJLNPRTV)=GO, ymm5=B(13579BDFHJLNPRTV)=BO
  319. ; (Original)
  320. ; Y = 0.29900 * R + 0.58700 * G + 0.11400 * B
  321. ; Cb = -0.16874 * R - 0.33126 * G + 0.50000 * B + CENTERJSAMPLE
  322. ; Cr = 0.50000 * R - 0.41869 * G - 0.08131 * B + CENTERJSAMPLE
  323. ;
  324. ; (This implementation)
  325. ; Y = 0.29900 * R + 0.33700 * G + 0.11400 * B + 0.25000 * G
  326. ; Cb = -0.16874 * R - 0.33126 * G + 0.50000 * B + CENTERJSAMPLE
  327. ; Cr = 0.50000 * R - 0.41869 * G - 0.08131 * B + CENTERJSAMPLE
  328. vmovdqa YMMWORD [wk(0)], ymm0 ; wk(0)=RE
  329. vmovdqa YMMWORD [wk(1)], ymm1 ; wk(1)=RO
  330. vmovdqa YMMWORD [wk(2)], ymm4 ; wk(2)=BE
  331. vmovdqa YMMWORD [wk(3)], ymm5 ; wk(3)=BO
  332. vmovdqa ymm6, ymm1
  333. vpunpcklwd ymm1, ymm1, ymm3
  334. vpunpckhwd ymm6, ymm6, ymm3
  335. vmovdqa ymm7, ymm1
  336. vmovdqa ymm4, ymm6
  337. vpmaddwd ymm1, ymm1, [GOTOFF(eax,PW_F0299_F0337)] ; ymm1=ROL*FIX(0.299)+GOL*FIX(0.337)
  338. vpmaddwd ymm6, ymm6, [GOTOFF(eax,PW_F0299_F0337)] ; ymm6=ROH*FIX(0.299)+GOH*FIX(0.337)
  339. vpmaddwd ymm7, ymm7, [GOTOFF(eax,PW_MF016_MF033)] ; ymm7=ROL*-FIX(0.168)+GOL*-FIX(0.331)
  340. vpmaddwd ymm4, ymm4, [GOTOFF(eax,PW_MF016_MF033)] ; ymm4=ROH*-FIX(0.168)+GOH*-FIX(0.331)
  341. vmovdqa YMMWORD [wk(4)], ymm1 ; wk(4)=ROL*FIX(0.299)+GOL*FIX(0.337)
  342. vmovdqa YMMWORD [wk(5)], ymm6 ; wk(5)=ROH*FIX(0.299)+GOH*FIX(0.337)
  343. vpxor ymm1, ymm1, ymm1
  344. vpxor ymm6, ymm6, ymm6
  345. vpunpcklwd ymm1, ymm1, ymm5 ; ymm1=BOL
  346. vpunpckhwd ymm6, ymm6, ymm5 ; ymm6=BOH
  347. vpsrld ymm1, ymm1, 1 ; ymm1=BOL*FIX(0.500)
  348. vpsrld ymm6, ymm6, 1 ; ymm6=BOH*FIX(0.500)
  349. vmovdqa ymm5, [GOTOFF(eax,PD_ONEHALFM1_CJ)] ; ymm5=[PD_ONEHALFM1_CJ]
  350. vpaddd ymm7, ymm7, ymm1
  351. vpaddd ymm4, ymm4, ymm6
  352. vpaddd ymm7, ymm7, ymm5
  353. vpaddd ymm4, ymm4, ymm5
  354. vpsrld ymm7, ymm7, SCALEBITS ; ymm7=CbOL
  355. vpsrld ymm4, ymm4, SCALEBITS ; ymm4=CbOH
  356. vpackssdw ymm7, ymm7, ymm4 ; ymm7=CbO
  357. vmovdqa ymm1, YMMWORD [wk(2)] ; ymm1=BE
  358. vmovdqa ymm6, ymm0
  359. vpunpcklwd ymm0, ymm0, ymm2
  360. vpunpckhwd ymm6, ymm6, ymm2
  361. vmovdqa ymm5, ymm0
  362. vmovdqa ymm4, ymm6
  363. vpmaddwd ymm0, ymm0, [GOTOFF(eax,PW_F0299_F0337)] ; ymm0=REL*FIX(0.299)+GEL*FIX(0.337)
  364. vpmaddwd ymm6, ymm6, [GOTOFF(eax,PW_F0299_F0337)] ; ymm6=REH*FIX(0.299)+GEH*FIX(0.337)
  365. vpmaddwd ymm5, ymm5, [GOTOFF(eax,PW_MF016_MF033)] ; ymm5=REL*-FIX(0.168)+GEL*-FIX(0.331)
  366. vpmaddwd ymm4, ymm4, [GOTOFF(eax,PW_MF016_MF033)] ; ymm4=REH*-FIX(0.168)+GEH*-FIX(0.331)
  367. vmovdqa YMMWORD [wk(6)], ymm0 ; wk(6)=REL*FIX(0.299)+GEL*FIX(0.337)
  368. vmovdqa YMMWORD [wk(7)], ymm6 ; wk(7)=REH*FIX(0.299)+GEH*FIX(0.337)
  369. vpxor ymm0, ymm0, ymm0
  370. vpxor ymm6, ymm6, ymm6
  371. vpunpcklwd ymm0, ymm0, ymm1 ; ymm0=BEL
  372. vpunpckhwd ymm6, ymm6, ymm1 ; ymm6=BEH
  373. vpsrld ymm0, ymm0, 1 ; ymm0=BEL*FIX(0.500)
  374. vpsrld ymm6, ymm6, 1 ; ymm6=BEH*FIX(0.500)
  375. vmovdqa ymm1, [GOTOFF(eax,PD_ONEHALFM1_CJ)] ; ymm1=[PD_ONEHALFM1_CJ]
  376. vpaddd ymm5, ymm5, ymm0
  377. vpaddd ymm4, ymm4, ymm6
  378. vpaddd ymm5, ymm5, ymm1
  379. vpaddd ymm4, ymm4, ymm1
  380. vpsrld ymm5, ymm5, SCALEBITS ; ymm5=CbEL
  381. vpsrld ymm4, ymm4, SCALEBITS ; ymm4=CbEH
  382. vpackssdw ymm5, ymm5, ymm4 ; ymm5=CbE
  383. vpsllw ymm7, ymm7, BYTE_BIT
  384. vpor ymm5, ymm5, ymm7 ; ymm5=Cb
  385. vmovdqu YMMWORD [ebx], ymm5 ; Save Cb
  386. vmovdqa ymm0, YMMWORD [wk(3)] ; ymm0=BO
  387. vmovdqa ymm6, YMMWORD [wk(2)] ; ymm6=BE
  388. vmovdqa ymm1, YMMWORD [wk(1)] ; ymm1=RO
  389. vmovdqa ymm4, ymm0
  390. vpunpcklwd ymm0, ymm0, ymm3
  391. vpunpckhwd ymm4, ymm4, ymm3
  392. vmovdqa ymm7, ymm0
  393. vmovdqa ymm5, ymm4
  394. vpmaddwd ymm0, ymm0, [GOTOFF(eax,PW_F0114_F0250)] ; ymm0=BOL*FIX(0.114)+GOL*FIX(0.250)
  395. vpmaddwd ymm4, ymm4, [GOTOFF(eax,PW_F0114_F0250)] ; ymm4=BOH*FIX(0.114)+GOH*FIX(0.250)
  396. vpmaddwd ymm7, ymm7, [GOTOFF(eax,PW_MF008_MF041)] ; ymm7=BOL*-FIX(0.081)+GOL*-FIX(0.418)
  397. vpmaddwd ymm5, ymm5, [GOTOFF(eax,PW_MF008_MF041)] ; ymm5=BOH*-FIX(0.081)+GOH*-FIX(0.418)
  398. vmovdqa ymm3, [GOTOFF(eax,PD_ONEHALF)] ; ymm3=[PD_ONEHALF]
  399. vpaddd ymm0, ymm0, YMMWORD [wk(4)]
  400. vpaddd ymm4, ymm4, YMMWORD [wk(5)]
  401. vpaddd ymm0, ymm0, ymm3
  402. vpaddd ymm4, ymm4, ymm3
  403. vpsrld ymm0, ymm0, SCALEBITS ; ymm0=YOL
  404. vpsrld ymm4, ymm4, SCALEBITS ; ymm4=YOH
  405. vpackssdw ymm0, ymm0, ymm4 ; ymm0=YO
  406. vpxor ymm3, ymm3, ymm3
  407. vpxor ymm4, ymm4, ymm4
  408. vpunpcklwd ymm3, ymm3, ymm1 ; ymm3=ROL
  409. vpunpckhwd ymm4, ymm4, ymm1 ; ymm4=ROH
  410. vpsrld ymm3, ymm3, 1 ; ymm3=ROL*FIX(0.500)
  411. vpsrld ymm4, ymm4, 1 ; ymm4=ROH*FIX(0.500)
  412. vmovdqa ymm1, [GOTOFF(eax,PD_ONEHALFM1_CJ)] ; ymm1=[PD_ONEHALFM1_CJ]
  413. vpaddd ymm7, ymm7, ymm3
  414. vpaddd ymm5, ymm5, ymm4
  415. vpaddd ymm7, ymm7, ymm1
  416. vpaddd ymm5, ymm5, ymm1
  417. vpsrld ymm7, ymm7, SCALEBITS ; ymm7=CrOL
  418. vpsrld ymm5, ymm5, SCALEBITS ; ymm5=CrOH
  419. vpackssdw ymm7, ymm7, ymm5 ; ymm7=CrO
  420. vmovdqa ymm3, YMMWORD [wk(0)] ; ymm3=RE
  421. vmovdqa ymm4, ymm6
  422. vpunpcklwd ymm6, ymm6, ymm2
  423. vpunpckhwd ymm4, ymm4, ymm2
  424. vmovdqa ymm1, ymm6
  425. vmovdqa ymm5, ymm4
  426. vpmaddwd ymm6, ymm6, [GOTOFF(eax,PW_F0114_F0250)] ; ymm6=BEL*FIX(0.114)+GEL*FIX(0.250)
  427. vpmaddwd ymm4, ymm4, [GOTOFF(eax,PW_F0114_F0250)] ; ymm4=BEH*FIX(0.114)+GEH*FIX(0.250)
  428. vpmaddwd ymm1, ymm1, [GOTOFF(eax,PW_MF008_MF041)] ; ymm1=BEL*-FIX(0.081)+GEL*-FIX(0.418)
  429. vpmaddwd ymm5, ymm5, [GOTOFF(eax,PW_MF008_MF041)] ; ymm5=BEH*-FIX(0.081)+GEH*-FIX(0.418)
  430. vmovdqa ymm2, [GOTOFF(eax,PD_ONEHALF)] ; ymm2=[PD_ONEHALF]
  431. vpaddd ymm6, ymm6, YMMWORD [wk(6)]
  432. vpaddd ymm4, ymm4, YMMWORD [wk(7)]
  433. vpaddd ymm6, ymm6, ymm2
  434. vpaddd ymm4, ymm4, ymm2
  435. vpsrld ymm6, ymm6, SCALEBITS ; ymm6=YEL
  436. vpsrld ymm4, ymm4, SCALEBITS ; ymm4=YEH
  437. vpackssdw ymm6, ymm6, ymm4 ; ymm6=YE
  438. vpsllw ymm0, ymm0, BYTE_BIT
  439. vpor ymm6, ymm6, ymm0 ; ymm6=Y
  440. vmovdqu YMMWORD [edi], ymm6 ; Save Y
  441. vpxor ymm2, ymm2, ymm2
  442. vpxor ymm4, ymm4, ymm4
  443. vpunpcklwd ymm2, ymm2, ymm3 ; ymm2=REL
  444. vpunpckhwd ymm4, ymm4, ymm3 ; ymm4=REH
  445. vpsrld ymm2, ymm2, 1 ; ymm2=REL*FIX(0.500)
  446. vpsrld ymm4, ymm4, 1 ; ymm4=REH*FIX(0.500)
  447. vmovdqa ymm0, [GOTOFF(eax,PD_ONEHALFM1_CJ)] ; ymm0=[PD_ONEHALFM1_CJ]
  448. vpaddd ymm1, ymm1, ymm2
  449. vpaddd ymm5, ymm5, ymm4
  450. vpaddd ymm1, ymm1, ymm0
  451. vpaddd ymm5, ymm5, ymm0
  452. vpsrld ymm1, ymm1, SCALEBITS ; ymm1=CrEL
  453. vpsrld ymm5, ymm5, SCALEBITS ; ymm5=CrEH
  454. vpackssdw ymm1, ymm1, ymm5 ; ymm1=CrE
  455. vpsllw ymm7, ymm7, BYTE_BIT
  456. vpor ymm1, ymm1, ymm7 ; ymm1=Cr
  457. vmovdqu YMMWORD [edx], ymm1 ; Save Cr
  458. sub ecx, byte SIZEOF_YMMWORD
  459. add esi, RGB_PIXELSIZE*SIZEOF_YMMWORD ; inptr
  460. add edi, byte SIZEOF_YMMWORD ; outptr0
  461. add ebx, byte SIZEOF_YMMWORD ; outptr1
  462. add edx, byte SIZEOF_YMMWORD ; outptr2
  463. cmp ecx, byte SIZEOF_YMMWORD
  464. jae near .columnloop
  465. test ecx, ecx
  466. jnz near .column_ld1
  467. pop ecx ; col
  468. pop esi
  469. pop edi
  470. pop ebx
  471. pop edx
  472. poppic eax
  473. add esi, byte SIZEOF_JSAMPROW ; input_buf
  474. add edi, byte SIZEOF_JSAMPROW
  475. add ebx, byte SIZEOF_JSAMPROW
  476. add edx, byte SIZEOF_JSAMPROW
  477. dec eax ; num_rows
  478. jg near .rowloop
  479. .return:
  480. vzeroupper
  481. pop edi
  482. pop esi
  483. ; pop edx ; need not be preserved
  484. ; pop ecx ; need not be preserved
  485. pop ebx
  486. mov esp, ebp ; esp <- aligned ebp
  487. pop esp ; esp <- original ebp
  488. pop ebp
  489. ret
  490. ; For some reason, the OS X linker does not honor the request to align the
  491. ; segment unless we do this.
  492. align 32