Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | ; |
michael@0 | 2 | ; jcclrss2.asm - colorspace conversion (SSE2) |
michael@0 | 3 | ; |
michael@0 | 4 | ; x86 SIMD extension for IJG JPEG library |
michael@0 | 5 | ; Copyright (C) 1999-2006, MIYASAKA Masaru. |
michael@0 | 6 | ; For conditions of distribution and use, see copyright notice in jsimdext.inc |
michael@0 | 7 | ; |
michael@0 | 8 | ; This file should be assembled with NASM (Netwide Assembler), |
michael@0 | 9 | ; can *not* be assembled with Microsoft's MASM or any compatible |
michael@0 | 10 | ; assembler (including Borland's Turbo Assembler). |
michael@0 | 11 | ; NASM is available from http://nasm.sourceforge.net/ or |
michael@0 | 12 | ; http://sourceforge.net/project/showfiles.php?group_id=6208 |
michael@0 | 13 | ; |
michael@0 | 14 | ; [TAB8] |
michael@0 | 15 | |
michael@0 | 16 | %include "jcolsamp.inc" |
michael@0 | 17 | |
michael@0 | 18 | ; -------------------------------------------------------------------------- |
michael@0 | 19 | ; |
michael@0 | 20 | ; Convert some rows of samples to the output colorspace. |
michael@0 | 21 | ; |
michael@0 | 22 | ; GLOBAL(void) |
michael@0 | 23 | ; jsimd_rgb_ycc_convert_sse2 (JDIMENSION img_width, |
michael@0 | 24 | ; JSAMPARRAY input_buf, JSAMPIMAGE output_buf, |
michael@0 | 25 | ; JDIMENSION output_row, int num_rows); |
michael@0 | 26 | ; |
michael@0 | 27 | |
michael@0 | 28 | %define img_width(b) (b)+8 ; JDIMENSION img_width |
michael@0 | 29 | %define input_buf(b) (b)+12 ; JSAMPARRAY input_buf |
michael@0 | 30 | %define output_buf(b) (b)+16 ; JSAMPIMAGE output_buf |
michael@0 | 31 | %define output_row(b) (b)+20 ; JDIMENSION output_row |
michael@0 | 32 | %define num_rows(b) (b)+24 ; int num_rows |
michael@0 | 33 | |
michael@0 | 34 | %define original_ebp ebp+0 |
michael@0 | 35 | %define wk(i) ebp-(WK_NUM-(i))*SIZEOF_XMMWORD ; xmmword wk[WK_NUM] |
michael@0 | 36 | %define WK_NUM 8 |
michael@0 | 37 | %define gotptr wk(0)-SIZEOF_POINTER ; void * gotptr |
michael@0 | 38 | |
michael@0 | 39 | align 16 |
michael@0 | 40 | |
michael@0 | 41 | global EXTN(jsimd_rgb_ycc_convert_sse2) |
michael@0 | 42 | |
michael@0 | 43 | EXTN(jsimd_rgb_ycc_convert_sse2): |
michael@0 | 44 | push ebp |
michael@0 | 45 | mov eax,esp ; eax = original ebp |
michael@0 | 46 | sub esp, byte 4 |
michael@0 | 47 | and esp, byte (-SIZEOF_XMMWORD) ; align to 128 bits |
michael@0 | 48 | mov [esp],eax |
michael@0 | 49 | mov ebp,esp ; ebp = aligned ebp |
michael@0 | 50 | lea esp, [wk(0)] |
michael@0 | 51 | pushpic eax ; make a room for GOT address |
michael@0 | 52 | push ebx |
michael@0 | 53 | ; push ecx ; need not be preserved |
michael@0 | 54 | ; push edx ; need not be preserved |
michael@0 | 55 | push esi |
michael@0 | 56 | push edi |
michael@0 | 57 | |
michael@0 | 58 | get_GOT ebx ; get GOT address |
michael@0 | 59 | movpic POINTER [gotptr], ebx ; save GOT address |
michael@0 | 60 | |
michael@0 | 61 | mov ecx, JDIMENSION [img_width(eax)] |
michael@0 | 62 | test ecx,ecx |
michael@0 | 63 | jz near .return |
michael@0 | 64 | |
michael@0 | 65 | push ecx |
michael@0 | 66 | |
michael@0 | 67 | mov esi, JSAMPIMAGE [output_buf(eax)] |
michael@0 | 68 | mov ecx, JDIMENSION [output_row(eax)] |
michael@0 | 69 | mov edi, JSAMPARRAY [esi+0*SIZEOF_JSAMPARRAY] |
michael@0 | 70 | mov ebx, JSAMPARRAY [esi+1*SIZEOF_JSAMPARRAY] |
michael@0 | 71 | mov edx, JSAMPARRAY [esi+2*SIZEOF_JSAMPARRAY] |
michael@0 | 72 | lea edi, [edi+ecx*SIZEOF_JSAMPROW] |
michael@0 | 73 | lea ebx, [ebx+ecx*SIZEOF_JSAMPROW] |
michael@0 | 74 | lea edx, [edx+ecx*SIZEOF_JSAMPROW] |
michael@0 | 75 | |
michael@0 | 76 | pop ecx |
michael@0 | 77 | |
michael@0 | 78 | mov esi, JSAMPARRAY [input_buf(eax)] |
michael@0 | 79 | mov eax, INT [num_rows(eax)] |
michael@0 | 80 | test eax,eax |
michael@0 | 81 | jle near .return |
michael@0 | 82 | alignx 16,7 |
michael@0 | 83 | .rowloop: |
michael@0 | 84 | pushpic eax |
michael@0 | 85 | push edx |
michael@0 | 86 | push ebx |
michael@0 | 87 | push edi |
michael@0 | 88 | push esi |
michael@0 | 89 | push ecx ; col |
michael@0 | 90 | |
michael@0 | 91 | mov esi, JSAMPROW [esi] ; inptr |
michael@0 | 92 | mov edi, JSAMPROW [edi] ; outptr0 |
michael@0 | 93 | mov ebx, JSAMPROW [ebx] ; outptr1 |
michael@0 | 94 | mov edx, JSAMPROW [edx] ; outptr2 |
michael@0 | 95 | movpic eax, POINTER [gotptr] ; load GOT address (eax) |
michael@0 | 96 | |
michael@0 | 97 | cmp ecx, byte SIZEOF_XMMWORD |
michael@0 | 98 | jae near .columnloop |
michael@0 | 99 | alignx 16,7 |
michael@0 | 100 | |
michael@0 | 101 | %if RGB_PIXELSIZE == 3 ; --------------- |
michael@0 | 102 | |
michael@0 | 103 | .column_ld1: |
michael@0 | 104 | push eax |
michael@0 | 105 | push edx |
michael@0 | 106 | lea ecx,[ecx+ecx*2] ; imul ecx,RGB_PIXELSIZE |
michael@0 | 107 | test cl, SIZEOF_BYTE |
michael@0 | 108 | jz short .column_ld2 |
michael@0 | 109 | sub ecx, byte SIZEOF_BYTE |
michael@0 | 110 | movzx eax, BYTE [esi+ecx] |
michael@0 | 111 | .column_ld2: |
michael@0 | 112 | test cl, SIZEOF_WORD |
michael@0 | 113 | jz short .column_ld4 |
michael@0 | 114 | sub ecx, byte SIZEOF_WORD |
michael@0 | 115 | movzx edx, WORD [esi+ecx] |
michael@0 | 116 | shl eax, WORD_BIT |
michael@0 | 117 | or eax,edx |
michael@0 | 118 | .column_ld4: |
michael@0 | 119 | movd xmmA,eax |
michael@0 | 120 | pop edx |
michael@0 | 121 | pop eax |
michael@0 | 122 | test cl, SIZEOF_DWORD |
michael@0 | 123 | jz short .column_ld8 |
michael@0 | 124 | sub ecx, byte SIZEOF_DWORD |
michael@0 | 125 | movd xmmF, XMM_DWORD [esi+ecx] |
michael@0 | 126 | pslldq xmmA, SIZEOF_DWORD |
michael@0 | 127 | por xmmA,xmmF |
michael@0 | 128 | .column_ld8: |
michael@0 | 129 | test cl, SIZEOF_MMWORD |
michael@0 | 130 | jz short .column_ld16 |
michael@0 | 131 | sub ecx, byte SIZEOF_MMWORD |
michael@0 | 132 | movq xmmB, XMM_MMWORD [esi+ecx] |
michael@0 | 133 | pslldq xmmA, SIZEOF_MMWORD |
michael@0 | 134 | por xmmA,xmmB |
michael@0 | 135 | .column_ld16: |
michael@0 | 136 | test cl, SIZEOF_XMMWORD |
michael@0 | 137 | jz short .column_ld32 |
michael@0 | 138 | movdqa xmmF,xmmA |
michael@0 | 139 | movdqu xmmA, XMMWORD [esi+0*SIZEOF_XMMWORD] |
michael@0 | 140 | mov ecx, SIZEOF_XMMWORD |
michael@0 | 141 | jmp short .rgb_ycc_cnv |
michael@0 | 142 | .column_ld32: |
michael@0 | 143 | test cl, 2*SIZEOF_XMMWORD |
michael@0 | 144 | mov ecx, SIZEOF_XMMWORD |
michael@0 | 145 | jz short .rgb_ycc_cnv |
michael@0 | 146 | movdqa xmmB,xmmA |
michael@0 | 147 | movdqu xmmA, XMMWORD [esi+0*SIZEOF_XMMWORD] |
michael@0 | 148 | movdqu xmmF, XMMWORD [esi+1*SIZEOF_XMMWORD] |
michael@0 | 149 | jmp short .rgb_ycc_cnv |
michael@0 | 150 | alignx 16,7 |
michael@0 | 151 | |
michael@0 | 152 | .columnloop: |
michael@0 | 153 | movdqu xmmA, XMMWORD [esi+0*SIZEOF_XMMWORD] |
michael@0 | 154 | movdqu xmmF, XMMWORD [esi+1*SIZEOF_XMMWORD] |
michael@0 | 155 | movdqu xmmB, XMMWORD [esi+2*SIZEOF_XMMWORD] |
michael@0 | 156 | |
michael@0 | 157 | .rgb_ycc_cnv: |
michael@0 | 158 | ; xmmA=(00 10 20 01 11 21 02 12 22 03 13 23 04 14 24 05) |
michael@0 | 159 | ; xmmF=(15 25 06 16 26 07 17 27 08 18 28 09 19 29 0A 1A) |
michael@0 | 160 | ; xmmB=(2A 0B 1B 2B 0C 1C 2C 0D 1D 2D 0E 1E 2E 0F 1F 2F) |
michael@0 | 161 | |
michael@0 | 162 | movdqa xmmG,xmmA |
michael@0 | 163 | pslldq xmmA,8 ; xmmA=(-- -- -- -- -- -- -- -- 00 10 20 01 11 21 02 12) |
michael@0 | 164 | psrldq xmmG,8 ; xmmG=(22 03 13 23 04 14 24 05 -- -- -- -- -- -- -- --) |
michael@0 | 165 | |
michael@0 | 166 | punpckhbw xmmA,xmmF ; xmmA=(00 08 10 18 20 28 01 09 11 19 21 29 02 0A 12 1A) |
michael@0 | 167 | pslldq xmmF,8 ; xmmF=(-- -- -- -- -- -- -- -- 15 25 06 16 26 07 17 27) |
michael@0 | 168 | |
michael@0 | 169 | punpcklbw xmmG,xmmB ; xmmG=(22 2A 03 0B 13 1B 23 2B 04 0C 14 1C 24 2C 05 0D) |
michael@0 | 170 | punpckhbw xmmF,xmmB ; xmmF=(15 1D 25 2D 06 0E 16 1E 26 2E 07 0F 17 1F 27 2F) |
michael@0 | 171 | |
michael@0 | 172 | movdqa xmmD,xmmA |
michael@0 | 173 | pslldq xmmA,8 ; xmmA=(-- -- -- -- -- -- -- -- 00 08 10 18 20 28 01 09) |
michael@0 | 174 | psrldq xmmD,8 ; xmmD=(11 19 21 29 02 0A 12 1A -- -- -- -- -- -- -- --) |
michael@0 | 175 | |
michael@0 | 176 | punpckhbw xmmA,xmmG ; xmmA=(00 04 08 0C 10 14 18 1C 20 24 28 2C 01 05 09 0D) |
michael@0 | 177 | pslldq xmmG,8 ; xmmG=(-- -- -- -- -- -- -- -- 22 2A 03 0B 13 1B 23 2B) |
michael@0 | 178 | |
michael@0 | 179 | punpcklbw xmmD,xmmF ; xmmD=(11 15 19 1D 21 25 29 2D 02 06 0A 0E 12 16 1A 1E) |
michael@0 | 180 | punpckhbw xmmG,xmmF ; xmmG=(22 26 2A 2E 03 07 0B 0F 13 17 1B 1F 23 27 2B 2F) |
michael@0 | 181 | |
michael@0 | 182 | movdqa xmmE,xmmA |
michael@0 | 183 | pslldq xmmA,8 ; xmmA=(-- -- -- -- -- -- -- -- 00 04 08 0C 10 14 18 1C) |
michael@0 | 184 | psrldq xmmE,8 ; xmmE=(20 24 28 2C 01 05 09 0D -- -- -- -- -- -- -- --) |
michael@0 | 185 | |
michael@0 | 186 | punpckhbw xmmA,xmmD ; xmmA=(00 02 04 06 08 0A 0C 0E 10 12 14 16 18 1A 1C 1E) |
michael@0 | 187 | pslldq xmmD,8 ; xmmD=(-- -- -- -- -- -- -- -- 11 15 19 1D 21 25 29 2D) |
michael@0 | 188 | |
michael@0 | 189 | punpcklbw xmmE,xmmG ; xmmE=(20 22 24 26 28 2A 2C 2E 01 03 05 07 09 0B 0D 0F) |
michael@0 | 190 | punpckhbw xmmD,xmmG ; xmmD=(11 13 15 17 19 1B 1D 1F 21 23 25 27 29 2B 2D 2F) |
michael@0 | 191 | |
michael@0 | 192 | pxor xmmH,xmmH |
michael@0 | 193 | |
michael@0 | 194 | movdqa xmmC,xmmA |
michael@0 | 195 | punpcklbw xmmA,xmmH ; xmmA=(00 02 04 06 08 0A 0C 0E) |
michael@0 | 196 | punpckhbw xmmC,xmmH ; xmmC=(10 12 14 16 18 1A 1C 1E) |
michael@0 | 197 | |
michael@0 | 198 | movdqa xmmB,xmmE |
michael@0 | 199 | punpcklbw xmmE,xmmH ; xmmE=(20 22 24 26 28 2A 2C 2E) |
michael@0 | 200 | punpckhbw xmmB,xmmH ; xmmB=(01 03 05 07 09 0B 0D 0F) |
michael@0 | 201 | |
michael@0 | 202 | movdqa xmmF,xmmD |
michael@0 | 203 | punpcklbw xmmD,xmmH ; xmmD=(11 13 15 17 19 1B 1D 1F) |
michael@0 | 204 | punpckhbw xmmF,xmmH ; xmmF=(21 23 25 27 29 2B 2D 2F) |
michael@0 | 205 | |
michael@0 | 206 | %else ; RGB_PIXELSIZE == 4 ; ----------- |
michael@0 | 207 | |
michael@0 | 208 | .column_ld1: |
michael@0 | 209 | test cl, SIZEOF_XMMWORD/16 |
michael@0 | 210 | jz short .column_ld2 |
michael@0 | 211 | sub ecx, byte SIZEOF_XMMWORD/16 |
michael@0 | 212 | movd xmmA, XMM_DWORD [esi+ecx*RGB_PIXELSIZE] |
michael@0 | 213 | .column_ld2: |
michael@0 | 214 | test cl, SIZEOF_XMMWORD/8 |
michael@0 | 215 | jz short .column_ld4 |
michael@0 | 216 | sub ecx, byte SIZEOF_XMMWORD/8 |
michael@0 | 217 | movq xmmE, XMM_MMWORD [esi+ecx*RGB_PIXELSIZE] |
michael@0 | 218 | pslldq xmmA, SIZEOF_MMWORD |
michael@0 | 219 | por xmmA,xmmE |
michael@0 | 220 | .column_ld4: |
michael@0 | 221 | test cl, SIZEOF_XMMWORD/4 |
michael@0 | 222 | jz short .column_ld8 |
michael@0 | 223 | sub ecx, byte SIZEOF_XMMWORD/4 |
michael@0 | 224 | movdqa xmmE,xmmA |
michael@0 | 225 | movdqu xmmA, XMMWORD [esi+ecx*RGB_PIXELSIZE] |
michael@0 | 226 | .column_ld8: |
michael@0 | 227 | test cl, SIZEOF_XMMWORD/2 |
michael@0 | 228 | mov ecx, SIZEOF_XMMWORD |
michael@0 | 229 | jz short .rgb_ycc_cnv |
michael@0 | 230 | movdqa xmmF,xmmA |
michael@0 | 231 | movdqa xmmH,xmmE |
michael@0 | 232 | movdqu xmmA, XMMWORD [esi+0*SIZEOF_XMMWORD] |
michael@0 | 233 | movdqu xmmE, XMMWORD [esi+1*SIZEOF_XMMWORD] |
michael@0 | 234 | jmp short .rgb_ycc_cnv |
michael@0 | 235 | alignx 16,7 |
michael@0 | 236 | |
michael@0 | 237 | .columnloop: |
michael@0 | 238 | movdqu xmmA, XMMWORD [esi+0*SIZEOF_XMMWORD] |
michael@0 | 239 | movdqu xmmE, XMMWORD [esi+1*SIZEOF_XMMWORD] |
michael@0 | 240 | movdqu xmmF, XMMWORD [esi+2*SIZEOF_XMMWORD] |
michael@0 | 241 | movdqu xmmH, XMMWORD [esi+3*SIZEOF_XMMWORD] |
michael@0 | 242 | |
michael@0 | 243 | .rgb_ycc_cnv: |
michael@0 | 244 | ; xmmA=(00 10 20 30 01 11 21 31 02 12 22 32 03 13 23 33) |
michael@0 | 245 | ; xmmE=(04 14 24 34 05 15 25 35 06 16 26 36 07 17 27 37) |
michael@0 | 246 | ; xmmF=(08 18 28 38 09 19 29 39 0A 1A 2A 3A 0B 1B 2B 3B) |
michael@0 | 247 | ; xmmH=(0C 1C 2C 3C 0D 1D 2D 3D 0E 1E 2E 3E 0F 1F 2F 3F) |
michael@0 | 248 | |
michael@0 | 249 | movdqa xmmD,xmmA |
michael@0 | 250 | punpcklbw xmmA,xmmE ; xmmA=(00 04 10 14 20 24 30 34 01 05 11 15 21 25 31 35) |
michael@0 | 251 | punpckhbw xmmD,xmmE ; xmmD=(02 06 12 16 22 26 32 36 03 07 13 17 23 27 33 37) |
michael@0 | 252 | |
michael@0 | 253 | movdqa xmmC,xmmF |
michael@0 | 254 | punpcklbw xmmF,xmmH ; xmmF=(08 0C 18 1C 28 2C 38 3C 09 0D 19 1D 29 2D 39 3D) |
michael@0 | 255 | punpckhbw xmmC,xmmH ; xmmC=(0A 0E 1A 1E 2A 2E 3A 3E 0B 0F 1B 1F 2B 2F 3B 3F) |
michael@0 | 256 | |
michael@0 | 257 | movdqa xmmB,xmmA |
michael@0 | 258 | punpcklwd xmmA,xmmF ; xmmA=(00 04 08 0C 10 14 18 1C 20 24 28 2C 30 34 38 3C) |
michael@0 | 259 | punpckhwd xmmB,xmmF ; xmmB=(01 05 09 0D 11 15 19 1D 21 25 29 2D 31 35 39 3D) |
michael@0 | 260 | |
michael@0 | 261 | movdqa xmmG,xmmD |
michael@0 | 262 | punpcklwd xmmD,xmmC ; xmmD=(02 06 0A 0E 12 16 1A 1E 22 26 2A 2E 32 36 3A 3E) |
michael@0 | 263 | punpckhwd xmmG,xmmC ; xmmG=(03 07 0B 0F 13 17 1B 1F 23 27 2B 2F 33 37 3B 3F) |
michael@0 | 264 | |
michael@0 | 265 | movdqa xmmE,xmmA |
michael@0 | 266 | punpcklbw xmmA,xmmD ; xmmA=(00 02 04 06 08 0A 0C 0E 10 12 14 16 18 1A 1C 1E) |
michael@0 | 267 | punpckhbw xmmE,xmmD ; xmmE=(20 22 24 26 28 2A 2C 2E 30 32 34 36 38 3A 3C 3E) |
michael@0 | 268 | |
michael@0 | 269 | movdqa xmmH,xmmB |
michael@0 | 270 | punpcklbw xmmB,xmmG ; xmmB=(01 03 05 07 09 0B 0D 0F 11 13 15 17 19 1B 1D 1F) |
michael@0 | 271 | punpckhbw xmmH,xmmG ; xmmH=(21 23 25 27 29 2B 2D 2F 31 33 35 37 39 3B 3D 3F) |
michael@0 | 272 | |
michael@0 | 273 | pxor xmmF,xmmF |
michael@0 | 274 | |
michael@0 | 275 | movdqa xmmC,xmmA |
michael@0 | 276 | punpcklbw xmmA,xmmF ; xmmA=(00 02 04 06 08 0A 0C 0E) |
michael@0 | 277 | punpckhbw xmmC,xmmF ; xmmC=(10 12 14 16 18 1A 1C 1E) |
michael@0 | 278 | |
michael@0 | 279 | movdqa xmmD,xmmB |
michael@0 | 280 | punpcklbw xmmB,xmmF ; xmmB=(01 03 05 07 09 0B 0D 0F) |
michael@0 | 281 | punpckhbw xmmD,xmmF ; xmmD=(11 13 15 17 19 1B 1D 1F) |
michael@0 | 282 | |
michael@0 | 283 | movdqa xmmG,xmmE |
michael@0 | 284 | punpcklbw xmmE,xmmF ; xmmE=(20 22 24 26 28 2A 2C 2E) |
michael@0 | 285 | punpckhbw xmmG,xmmF ; xmmG=(30 32 34 36 38 3A 3C 3E) |
michael@0 | 286 | |
michael@0 | 287 | punpcklbw xmmF,xmmH |
michael@0 | 288 | punpckhbw xmmH,xmmH |
michael@0 | 289 | psrlw xmmF,BYTE_BIT ; xmmF=(21 23 25 27 29 2B 2D 2F) |
michael@0 | 290 | psrlw xmmH,BYTE_BIT ; xmmH=(31 33 35 37 39 3B 3D 3F) |
michael@0 | 291 | |
michael@0 | 292 | %endif ; RGB_PIXELSIZE ; --------------- |
michael@0 | 293 | |
michael@0 | 294 | ; xmm0=R(02468ACE)=RE, xmm2=G(02468ACE)=GE, xmm4=B(02468ACE)=BE |
michael@0 | 295 | ; xmm1=R(13579BDF)=RO, xmm3=G(13579BDF)=GO, xmm5=B(13579BDF)=BO |
michael@0 | 296 | |
michael@0 | 297 | ; (Original) |
michael@0 | 298 | ; Y = 0.29900 * R + 0.58700 * G + 0.11400 * B |
michael@0 | 299 | ; Cb = -0.16874 * R - 0.33126 * G + 0.50000 * B + CENTERJSAMPLE |
michael@0 | 300 | ; Cr = 0.50000 * R - 0.41869 * G - 0.08131 * B + CENTERJSAMPLE |
michael@0 | 301 | ; |
michael@0 | 302 | ; (This implementation) |
michael@0 | 303 | ; Y = 0.29900 * R + 0.33700 * G + 0.11400 * B + 0.25000 * G |
michael@0 | 304 | ; Cb = -0.16874 * R - 0.33126 * G + 0.50000 * B + CENTERJSAMPLE |
michael@0 | 305 | ; Cr = 0.50000 * R - 0.41869 * G - 0.08131 * B + CENTERJSAMPLE |
michael@0 | 306 | |
michael@0 | 307 | movdqa XMMWORD [wk(0)], xmm0 ; wk(0)=RE |
michael@0 | 308 | movdqa XMMWORD [wk(1)], xmm1 ; wk(1)=RO |
michael@0 | 309 | movdqa XMMWORD [wk(2)], xmm4 ; wk(2)=BE |
michael@0 | 310 | movdqa XMMWORD [wk(3)], xmm5 ; wk(3)=BO |
michael@0 | 311 | |
michael@0 | 312 | movdqa xmm6,xmm1 |
michael@0 | 313 | punpcklwd xmm1,xmm3 |
michael@0 | 314 | punpckhwd xmm6,xmm3 |
michael@0 | 315 | movdqa xmm7,xmm1 |
michael@0 | 316 | movdqa xmm4,xmm6 |
michael@0 | 317 | pmaddwd xmm1,[GOTOFF(eax,PW_F0299_F0337)] ; xmm1=ROL*FIX(0.299)+GOL*FIX(0.337) |
michael@0 | 318 | pmaddwd xmm6,[GOTOFF(eax,PW_F0299_F0337)] ; xmm6=ROH*FIX(0.299)+GOH*FIX(0.337) |
michael@0 | 319 | pmaddwd xmm7,[GOTOFF(eax,PW_MF016_MF033)] ; xmm7=ROL*-FIX(0.168)+GOL*-FIX(0.331) |
michael@0 | 320 | pmaddwd xmm4,[GOTOFF(eax,PW_MF016_MF033)] ; xmm4=ROH*-FIX(0.168)+GOH*-FIX(0.331) |
michael@0 | 321 | |
michael@0 | 322 | movdqa XMMWORD [wk(4)], xmm1 ; wk(4)=ROL*FIX(0.299)+GOL*FIX(0.337) |
michael@0 | 323 | movdqa XMMWORD [wk(5)], xmm6 ; wk(5)=ROH*FIX(0.299)+GOH*FIX(0.337) |
michael@0 | 324 | |
michael@0 | 325 | pxor xmm1,xmm1 |
michael@0 | 326 | pxor xmm6,xmm6 |
michael@0 | 327 | punpcklwd xmm1,xmm5 ; xmm1=BOL |
michael@0 | 328 | punpckhwd xmm6,xmm5 ; xmm6=BOH |
michael@0 | 329 | psrld xmm1,1 ; xmm1=BOL*FIX(0.500) |
michael@0 | 330 | psrld xmm6,1 ; xmm6=BOH*FIX(0.500) |
michael@0 | 331 | |
michael@0 | 332 | movdqa xmm5,[GOTOFF(eax,PD_ONEHALFM1_CJ)] ; xmm5=[PD_ONEHALFM1_CJ] |
michael@0 | 333 | |
michael@0 | 334 | paddd xmm7,xmm1 |
michael@0 | 335 | paddd xmm4,xmm6 |
michael@0 | 336 | paddd xmm7,xmm5 |
michael@0 | 337 | paddd xmm4,xmm5 |
michael@0 | 338 | psrld xmm7,SCALEBITS ; xmm7=CbOL |
michael@0 | 339 | psrld xmm4,SCALEBITS ; xmm4=CbOH |
michael@0 | 340 | packssdw xmm7,xmm4 ; xmm7=CbO |
michael@0 | 341 | |
michael@0 | 342 | movdqa xmm1, XMMWORD [wk(2)] ; xmm1=BE |
michael@0 | 343 | |
michael@0 | 344 | movdqa xmm6,xmm0 |
michael@0 | 345 | punpcklwd xmm0,xmm2 |
michael@0 | 346 | punpckhwd xmm6,xmm2 |
michael@0 | 347 | movdqa xmm5,xmm0 |
michael@0 | 348 | movdqa xmm4,xmm6 |
michael@0 | 349 | pmaddwd xmm0,[GOTOFF(eax,PW_F0299_F0337)] ; xmm0=REL*FIX(0.299)+GEL*FIX(0.337) |
michael@0 | 350 | pmaddwd xmm6,[GOTOFF(eax,PW_F0299_F0337)] ; xmm6=REH*FIX(0.299)+GEH*FIX(0.337) |
michael@0 | 351 | pmaddwd xmm5,[GOTOFF(eax,PW_MF016_MF033)] ; xmm5=REL*-FIX(0.168)+GEL*-FIX(0.331) |
michael@0 | 352 | pmaddwd xmm4,[GOTOFF(eax,PW_MF016_MF033)] ; xmm4=REH*-FIX(0.168)+GEH*-FIX(0.331) |
michael@0 | 353 | |
michael@0 | 354 | movdqa XMMWORD [wk(6)], xmm0 ; wk(6)=REL*FIX(0.299)+GEL*FIX(0.337) |
michael@0 | 355 | movdqa XMMWORD [wk(7)], xmm6 ; wk(7)=REH*FIX(0.299)+GEH*FIX(0.337) |
michael@0 | 356 | |
michael@0 | 357 | pxor xmm0,xmm0 |
michael@0 | 358 | pxor xmm6,xmm6 |
michael@0 | 359 | punpcklwd xmm0,xmm1 ; xmm0=BEL |
michael@0 | 360 | punpckhwd xmm6,xmm1 ; xmm6=BEH |
michael@0 | 361 | psrld xmm0,1 ; xmm0=BEL*FIX(0.500) |
michael@0 | 362 | psrld xmm6,1 ; xmm6=BEH*FIX(0.500) |
michael@0 | 363 | |
michael@0 | 364 | movdqa xmm1,[GOTOFF(eax,PD_ONEHALFM1_CJ)] ; xmm1=[PD_ONEHALFM1_CJ] |
michael@0 | 365 | |
michael@0 | 366 | paddd xmm5,xmm0 |
michael@0 | 367 | paddd xmm4,xmm6 |
michael@0 | 368 | paddd xmm5,xmm1 |
michael@0 | 369 | paddd xmm4,xmm1 |
michael@0 | 370 | psrld xmm5,SCALEBITS ; xmm5=CbEL |
michael@0 | 371 | psrld xmm4,SCALEBITS ; xmm4=CbEH |
michael@0 | 372 | packssdw xmm5,xmm4 ; xmm5=CbE |
michael@0 | 373 | |
michael@0 | 374 | psllw xmm7,BYTE_BIT |
michael@0 | 375 | por xmm5,xmm7 ; xmm5=Cb |
michael@0 | 376 | movdqa XMMWORD [ebx], xmm5 ; Save Cb |
michael@0 | 377 | |
michael@0 | 378 | movdqa xmm0, XMMWORD [wk(3)] ; xmm0=BO |
michael@0 | 379 | movdqa xmm6, XMMWORD [wk(2)] ; xmm6=BE |
michael@0 | 380 | movdqa xmm1, XMMWORD [wk(1)] ; xmm1=RO |
michael@0 | 381 | |
michael@0 | 382 | movdqa xmm4,xmm0 |
michael@0 | 383 | punpcklwd xmm0,xmm3 |
michael@0 | 384 | punpckhwd xmm4,xmm3 |
michael@0 | 385 | movdqa xmm7,xmm0 |
michael@0 | 386 | movdqa xmm5,xmm4 |
michael@0 | 387 | pmaddwd xmm0,[GOTOFF(eax,PW_F0114_F0250)] ; xmm0=BOL*FIX(0.114)+GOL*FIX(0.250) |
michael@0 | 388 | pmaddwd xmm4,[GOTOFF(eax,PW_F0114_F0250)] ; xmm4=BOH*FIX(0.114)+GOH*FIX(0.250) |
michael@0 | 389 | pmaddwd xmm7,[GOTOFF(eax,PW_MF008_MF041)] ; xmm7=BOL*-FIX(0.081)+GOL*-FIX(0.418) |
michael@0 | 390 | pmaddwd xmm5,[GOTOFF(eax,PW_MF008_MF041)] ; xmm5=BOH*-FIX(0.081)+GOH*-FIX(0.418) |
michael@0 | 391 | |
michael@0 | 392 | movdqa xmm3,[GOTOFF(eax,PD_ONEHALF)] ; xmm3=[PD_ONEHALF] |
michael@0 | 393 | |
michael@0 | 394 | paddd xmm0, XMMWORD [wk(4)] |
michael@0 | 395 | paddd xmm4, XMMWORD [wk(5)] |
michael@0 | 396 | paddd xmm0,xmm3 |
michael@0 | 397 | paddd xmm4,xmm3 |
michael@0 | 398 | psrld xmm0,SCALEBITS ; xmm0=YOL |
michael@0 | 399 | psrld xmm4,SCALEBITS ; xmm4=YOH |
michael@0 | 400 | packssdw xmm0,xmm4 ; xmm0=YO |
michael@0 | 401 | |
michael@0 | 402 | pxor xmm3,xmm3 |
michael@0 | 403 | pxor xmm4,xmm4 |
michael@0 | 404 | punpcklwd xmm3,xmm1 ; xmm3=ROL |
michael@0 | 405 | punpckhwd xmm4,xmm1 ; xmm4=ROH |
michael@0 | 406 | psrld xmm3,1 ; xmm3=ROL*FIX(0.500) |
michael@0 | 407 | psrld xmm4,1 ; xmm4=ROH*FIX(0.500) |
michael@0 | 408 | |
michael@0 | 409 | movdqa xmm1,[GOTOFF(eax,PD_ONEHALFM1_CJ)] ; xmm1=[PD_ONEHALFM1_CJ] |
michael@0 | 410 | |
michael@0 | 411 | paddd xmm7,xmm3 |
michael@0 | 412 | paddd xmm5,xmm4 |
michael@0 | 413 | paddd xmm7,xmm1 |
michael@0 | 414 | paddd xmm5,xmm1 |
michael@0 | 415 | psrld xmm7,SCALEBITS ; xmm7=CrOL |
michael@0 | 416 | psrld xmm5,SCALEBITS ; xmm5=CrOH |
michael@0 | 417 | packssdw xmm7,xmm5 ; xmm7=CrO |
michael@0 | 418 | |
michael@0 | 419 | movdqa xmm3, XMMWORD [wk(0)] ; xmm3=RE |
michael@0 | 420 | |
michael@0 | 421 | movdqa xmm4,xmm6 |
michael@0 | 422 | punpcklwd xmm6,xmm2 |
michael@0 | 423 | punpckhwd xmm4,xmm2 |
michael@0 | 424 | movdqa xmm1,xmm6 |
michael@0 | 425 | movdqa xmm5,xmm4 |
michael@0 | 426 | pmaddwd xmm6,[GOTOFF(eax,PW_F0114_F0250)] ; xmm6=BEL*FIX(0.114)+GEL*FIX(0.250) |
michael@0 | 427 | pmaddwd xmm4,[GOTOFF(eax,PW_F0114_F0250)] ; xmm4=BEH*FIX(0.114)+GEH*FIX(0.250) |
michael@0 | 428 | pmaddwd xmm1,[GOTOFF(eax,PW_MF008_MF041)] ; xmm1=BEL*-FIX(0.081)+GEL*-FIX(0.418) |
michael@0 | 429 | pmaddwd xmm5,[GOTOFF(eax,PW_MF008_MF041)] ; xmm5=BEH*-FIX(0.081)+GEH*-FIX(0.418) |
michael@0 | 430 | |
michael@0 | 431 | movdqa xmm2,[GOTOFF(eax,PD_ONEHALF)] ; xmm2=[PD_ONEHALF] |
michael@0 | 432 | |
michael@0 | 433 | paddd xmm6, XMMWORD [wk(6)] |
michael@0 | 434 | paddd xmm4, XMMWORD [wk(7)] |
michael@0 | 435 | paddd xmm6,xmm2 |
michael@0 | 436 | paddd xmm4,xmm2 |
michael@0 | 437 | psrld xmm6,SCALEBITS ; xmm6=YEL |
michael@0 | 438 | psrld xmm4,SCALEBITS ; xmm4=YEH |
michael@0 | 439 | packssdw xmm6,xmm4 ; xmm6=YE |
michael@0 | 440 | |
michael@0 | 441 | psllw xmm0,BYTE_BIT |
michael@0 | 442 | por xmm6,xmm0 ; xmm6=Y |
michael@0 | 443 | movdqa XMMWORD [edi], xmm6 ; Save Y |
michael@0 | 444 | |
michael@0 | 445 | pxor xmm2,xmm2 |
michael@0 | 446 | pxor xmm4,xmm4 |
michael@0 | 447 | punpcklwd xmm2,xmm3 ; xmm2=REL |
michael@0 | 448 | punpckhwd xmm4,xmm3 ; xmm4=REH |
michael@0 | 449 | psrld xmm2,1 ; xmm2=REL*FIX(0.500) |
michael@0 | 450 | psrld xmm4,1 ; xmm4=REH*FIX(0.500) |
michael@0 | 451 | |
michael@0 | 452 | movdqa xmm0,[GOTOFF(eax,PD_ONEHALFM1_CJ)] ; xmm0=[PD_ONEHALFM1_CJ] |
michael@0 | 453 | |
michael@0 | 454 | paddd xmm1,xmm2 |
michael@0 | 455 | paddd xmm5,xmm4 |
michael@0 | 456 | paddd xmm1,xmm0 |
michael@0 | 457 | paddd xmm5,xmm0 |
michael@0 | 458 | psrld xmm1,SCALEBITS ; xmm1=CrEL |
michael@0 | 459 | psrld xmm5,SCALEBITS ; xmm5=CrEH |
michael@0 | 460 | packssdw xmm1,xmm5 ; xmm1=CrE |
michael@0 | 461 | |
michael@0 | 462 | psllw xmm7,BYTE_BIT |
michael@0 | 463 | por xmm1,xmm7 ; xmm1=Cr |
michael@0 | 464 | movdqa XMMWORD [edx], xmm1 ; Save Cr |
michael@0 | 465 | |
michael@0 | 466 | sub ecx, byte SIZEOF_XMMWORD |
michael@0 | 467 | add esi, byte RGB_PIXELSIZE*SIZEOF_XMMWORD ; inptr |
michael@0 | 468 | add edi, byte SIZEOF_XMMWORD ; outptr0 |
michael@0 | 469 | add ebx, byte SIZEOF_XMMWORD ; outptr1 |
michael@0 | 470 | add edx, byte SIZEOF_XMMWORD ; outptr2 |
michael@0 | 471 | cmp ecx, byte SIZEOF_XMMWORD |
michael@0 | 472 | jae near .columnloop |
michael@0 | 473 | test ecx,ecx |
michael@0 | 474 | jnz near .column_ld1 |
michael@0 | 475 | |
michael@0 | 476 | pop ecx ; col |
michael@0 | 477 | pop esi |
michael@0 | 478 | pop edi |
michael@0 | 479 | pop ebx |
michael@0 | 480 | pop edx |
michael@0 | 481 | poppic eax |
michael@0 | 482 | |
michael@0 | 483 | add esi, byte SIZEOF_JSAMPROW ; input_buf |
michael@0 | 484 | add edi, byte SIZEOF_JSAMPROW |
michael@0 | 485 | add ebx, byte SIZEOF_JSAMPROW |
michael@0 | 486 | add edx, byte SIZEOF_JSAMPROW |
michael@0 | 487 | dec eax ; num_rows |
michael@0 | 488 | jg near .rowloop |
michael@0 | 489 | |
michael@0 | 490 | .return: |
michael@0 | 491 | pop edi |
michael@0 | 492 | pop esi |
michael@0 | 493 | ; pop edx ; need not be preserved |
michael@0 | 494 | ; pop ecx ; need not be preserved |
michael@0 | 495 | pop ebx |
michael@0 | 496 | mov esp,ebp ; esp <- aligned ebp |
michael@0 | 497 | pop esp ; esp <- original ebp |
michael@0 | 498 | pop ebp |
michael@0 | 499 | ret |
michael@0 | 500 | |
michael@0 | 501 | ; For some reason, the OS X linker does not honor the request to align the |
michael@0 | 502 | ; segment unless we do this. |
michael@0 | 503 | align 16 |