Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | ; |
michael@0 | 2 | ; jcgryss2-64.asm - grayscale colorspace conversion (64-bit SSE2) |
michael@0 | 3 | ; |
michael@0 | 4 | ; x86 SIMD extension for IJG JPEG library |
michael@0 | 5 | ; Copyright (C) 1999-2006, MIYASAKA Masaru. |
michael@0 | 6 | ; Copyright (C) 2011, D. R. Commander. |
michael@0 | 7 | ; For conditions of distribution and use, see copyright notice in jsimdext.inc |
michael@0 | 8 | ; |
michael@0 | 9 | ; This file should be assembled with NASM (Netwide Assembler), |
michael@0 | 10 | ; can *not* be assembled with Microsoft's MASM or any compatible |
michael@0 | 11 | ; assembler (including Borland's Turbo Assembler). |
michael@0 | 12 | ; NASM is available from http://nasm.sourceforge.net/ or |
michael@0 | 13 | ; http://sourceforge.net/project/showfiles.php?group_id=6208 |
michael@0 | 14 | ; |
michael@0 | 15 | ; [TAB8] |
michael@0 | 16 | |
michael@0 | 17 | %include "jcolsamp.inc" |
michael@0 | 18 | |
michael@0 | 19 | ; -------------------------------------------------------------------------- |
michael@0 | 20 | ; |
michael@0 | 21 | ; Convert some rows of samples to the output colorspace. |
michael@0 | 22 | ; |
michael@0 | 23 | ; GLOBAL(void) |
michael@0 | 24 | ; jsimd_rgb_gray_convert_sse2 (JDIMENSION img_width, |
michael@0 | 25 | ; JSAMPARRAY input_buf, JSAMPIMAGE output_buf, |
michael@0 | 26 | ; JDIMENSION output_row, int num_rows); |
michael@0 | 27 | ; |
michael@0 | 28 | |
michael@0 | 29 | ; r10 = JDIMENSION img_width |
michael@0 | 30 | ; r11 = JSAMPARRAY input_buf |
michael@0 | 31 | ; r12 = JSAMPIMAGE output_buf |
michael@0 | 32 | ; r13 = JDIMENSION output_row |
michael@0 | 33 | ; r14 = int num_rows |
michael@0 | 34 | |
michael@0 | 35 | %define wk(i) rbp-(WK_NUM-(i))*SIZEOF_XMMWORD ; xmmword wk[WK_NUM] |
michael@0 | 36 | %define WK_NUM 2 |
michael@0 | 37 | |
michael@0 | 38 | align 16 |
michael@0 | 39 | |
michael@0 | 40 | global EXTN(jsimd_rgb_gray_convert_sse2) |
michael@0 | 41 | |
michael@0 | 42 | EXTN(jsimd_rgb_gray_convert_sse2): |
michael@0 | 43 | push rbp |
michael@0 | 44 | mov rax,rsp ; rax = original rbp |
michael@0 | 45 | sub rsp, byte 4 |
michael@0 | 46 | and rsp, byte (-SIZEOF_XMMWORD) ; align to 128 bits |
michael@0 | 47 | mov [rsp],rax |
michael@0 | 48 | mov rbp,rsp ; rbp = aligned rbp |
michael@0 | 49 | lea rsp, [wk(0)] |
michael@0 | 50 | collect_args |
michael@0 | 51 | push rbx |
michael@0 | 52 | |
michael@0 | 53 | mov rcx, r10 |
michael@0 | 54 | test rcx,rcx |
michael@0 | 55 | jz near .return |
michael@0 | 56 | |
michael@0 | 57 | push rcx |
michael@0 | 58 | |
michael@0 | 59 | mov rsi, r12 |
michael@0 | 60 | mov rcx, r13 |
michael@0 | 61 | mov rdi, JSAMPARRAY [rsi+0*SIZEOF_JSAMPARRAY] |
michael@0 | 62 | lea rdi, [rdi+rcx*SIZEOF_JSAMPROW] |
michael@0 | 63 | |
michael@0 | 64 | pop rcx |
michael@0 | 65 | |
michael@0 | 66 | mov rsi, r11 |
michael@0 | 67 | mov eax, r14d |
michael@0 | 68 | test rax,rax |
michael@0 | 69 | jle near .return |
michael@0 | 70 | .rowloop: |
michael@0 | 71 | push rdi |
michael@0 | 72 | push rsi |
michael@0 | 73 | push rcx ; col |
michael@0 | 74 | |
michael@0 | 75 | mov rsi, JSAMPROW [rsi] ; inptr |
michael@0 | 76 | mov rdi, JSAMPROW [rdi] ; outptr0 |
michael@0 | 77 | |
michael@0 | 78 | cmp rcx, byte SIZEOF_XMMWORD |
michael@0 | 79 | jae near .columnloop |
michael@0 | 80 | |
michael@0 | 81 | %if RGB_PIXELSIZE == 3 ; --------------- |
michael@0 | 82 | |
michael@0 | 83 | .column_ld1: |
michael@0 | 84 | push rax |
michael@0 | 85 | push rdx |
michael@0 | 86 | lea rcx,[rcx+rcx*2] ; imul ecx,RGB_PIXELSIZE |
michael@0 | 87 | test cl, SIZEOF_BYTE |
michael@0 | 88 | jz short .column_ld2 |
michael@0 | 89 | sub rcx, byte SIZEOF_BYTE |
michael@0 | 90 | movzx rax, BYTE [rsi+rcx] |
michael@0 | 91 | .column_ld2: |
michael@0 | 92 | test cl, SIZEOF_WORD |
michael@0 | 93 | jz short .column_ld4 |
michael@0 | 94 | sub rcx, byte SIZEOF_WORD |
michael@0 | 95 | movzx rdx, WORD [rsi+rcx] |
michael@0 | 96 | shl rax, WORD_BIT |
michael@0 | 97 | or rax,rdx |
michael@0 | 98 | .column_ld4: |
michael@0 | 99 | movd xmmA,eax |
michael@0 | 100 | pop rdx |
michael@0 | 101 | pop rax |
michael@0 | 102 | test cl, SIZEOF_DWORD |
michael@0 | 103 | jz short .column_ld8 |
michael@0 | 104 | sub rcx, byte SIZEOF_DWORD |
michael@0 | 105 | movd xmmF, XMM_DWORD [rsi+rcx] |
michael@0 | 106 | pslldq xmmA, SIZEOF_DWORD |
michael@0 | 107 | por xmmA,xmmF |
michael@0 | 108 | .column_ld8: |
michael@0 | 109 | test cl, SIZEOF_MMWORD |
michael@0 | 110 | jz short .column_ld16 |
michael@0 | 111 | sub rcx, byte SIZEOF_MMWORD |
michael@0 | 112 | movq xmmB, XMM_MMWORD [rsi+rcx] |
michael@0 | 113 | pslldq xmmA, SIZEOF_MMWORD |
michael@0 | 114 | por xmmA,xmmB |
michael@0 | 115 | .column_ld16: |
michael@0 | 116 | test cl, SIZEOF_XMMWORD |
michael@0 | 117 | jz short .column_ld32 |
michael@0 | 118 | movdqa xmmF,xmmA |
michael@0 | 119 | movdqu xmmA, XMMWORD [rsi+0*SIZEOF_XMMWORD] |
michael@0 | 120 | mov rcx, SIZEOF_XMMWORD |
michael@0 | 121 | jmp short .rgb_gray_cnv |
michael@0 | 122 | .column_ld32: |
michael@0 | 123 | test cl, 2*SIZEOF_XMMWORD |
michael@0 | 124 | mov rcx, SIZEOF_XMMWORD |
michael@0 | 125 | jz short .rgb_gray_cnv |
michael@0 | 126 | movdqa xmmB,xmmA |
michael@0 | 127 | movdqu xmmA, XMMWORD [rsi+0*SIZEOF_XMMWORD] |
michael@0 | 128 | movdqu xmmF, XMMWORD [rsi+1*SIZEOF_XMMWORD] |
michael@0 | 129 | jmp short .rgb_gray_cnv |
michael@0 | 130 | |
michael@0 | 131 | .columnloop: |
michael@0 | 132 | movdqu xmmA, XMMWORD [rsi+0*SIZEOF_XMMWORD] |
michael@0 | 133 | movdqu xmmF, XMMWORD [rsi+1*SIZEOF_XMMWORD] |
michael@0 | 134 | movdqu xmmB, XMMWORD [rsi+2*SIZEOF_XMMWORD] |
michael@0 | 135 | |
michael@0 | 136 | .rgb_gray_cnv: |
michael@0 | 137 | ; xmmA=(00 10 20 01 11 21 02 12 22 03 13 23 04 14 24 05) |
michael@0 | 138 | ; xmmF=(15 25 06 16 26 07 17 27 08 18 28 09 19 29 0A 1A) |
michael@0 | 139 | ; xmmB=(2A 0B 1B 2B 0C 1C 2C 0D 1D 2D 0E 1E 2E 0F 1F 2F) |
michael@0 | 140 | |
michael@0 | 141 | movdqa xmmG,xmmA |
michael@0 | 142 | pslldq xmmA,8 ; xmmA=(-- -- -- -- -- -- -- -- 00 10 20 01 11 21 02 12) |
michael@0 | 143 | psrldq xmmG,8 ; xmmG=(22 03 13 23 04 14 24 05 -- -- -- -- -- -- -- --) |
michael@0 | 144 | |
michael@0 | 145 | punpckhbw xmmA,xmmF ; xmmA=(00 08 10 18 20 28 01 09 11 19 21 29 02 0A 12 1A) |
michael@0 | 146 | pslldq xmmF,8 ; xmmF=(-- -- -- -- -- -- -- -- 15 25 06 16 26 07 17 27) |
michael@0 | 147 | |
michael@0 | 148 | punpcklbw xmmG,xmmB ; xmmG=(22 2A 03 0B 13 1B 23 2B 04 0C 14 1C 24 2C 05 0D) |
michael@0 | 149 | punpckhbw xmmF,xmmB ; xmmF=(15 1D 25 2D 06 0E 16 1E 26 2E 07 0F 17 1F 27 2F) |
michael@0 | 150 | |
michael@0 | 151 | movdqa xmmD,xmmA |
michael@0 | 152 | pslldq xmmA,8 ; xmmA=(-- -- -- -- -- -- -- -- 00 08 10 18 20 28 01 09) |
michael@0 | 153 | psrldq xmmD,8 ; xmmD=(11 19 21 29 02 0A 12 1A -- -- -- -- -- -- -- --) |
michael@0 | 154 | |
michael@0 | 155 | punpckhbw xmmA,xmmG ; xmmA=(00 04 08 0C 10 14 18 1C 20 24 28 2C 01 05 09 0D) |
michael@0 | 156 | pslldq xmmG,8 ; xmmG=(-- -- -- -- -- -- -- -- 22 2A 03 0B 13 1B 23 2B) |
michael@0 | 157 | |
michael@0 | 158 | punpcklbw xmmD,xmmF ; xmmD=(11 15 19 1D 21 25 29 2D 02 06 0A 0E 12 16 1A 1E) |
michael@0 | 159 | punpckhbw xmmG,xmmF ; xmmG=(22 26 2A 2E 03 07 0B 0F 13 17 1B 1F 23 27 2B 2F) |
michael@0 | 160 | |
michael@0 | 161 | movdqa xmmE,xmmA |
michael@0 | 162 | pslldq xmmA,8 ; xmmA=(-- -- -- -- -- -- -- -- 00 04 08 0C 10 14 18 1C) |
michael@0 | 163 | psrldq xmmE,8 ; xmmE=(20 24 28 2C 01 05 09 0D -- -- -- -- -- -- -- --) |
michael@0 | 164 | |
michael@0 | 165 | punpckhbw xmmA,xmmD ; xmmA=(00 02 04 06 08 0A 0C 0E 10 12 14 16 18 1A 1C 1E) |
michael@0 | 166 | pslldq xmmD,8 ; xmmD=(-- -- -- -- -- -- -- -- 11 15 19 1D 21 25 29 2D) |
michael@0 | 167 | |
michael@0 | 168 | punpcklbw xmmE,xmmG ; xmmE=(20 22 24 26 28 2A 2C 2E 01 03 05 07 09 0B 0D 0F) |
michael@0 | 169 | punpckhbw xmmD,xmmG ; xmmD=(11 13 15 17 19 1B 1D 1F 21 23 25 27 29 2B 2D 2F) |
michael@0 | 170 | |
michael@0 | 171 | pxor xmmH,xmmH |
michael@0 | 172 | |
michael@0 | 173 | movdqa xmmC,xmmA |
michael@0 | 174 | punpcklbw xmmA,xmmH ; xmmA=(00 02 04 06 08 0A 0C 0E) |
michael@0 | 175 | punpckhbw xmmC,xmmH ; xmmC=(10 12 14 16 18 1A 1C 1E) |
michael@0 | 176 | |
michael@0 | 177 | movdqa xmmB,xmmE |
michael@0 | 178 | punpcklbw xmmE,xmmH ; xmmE=(20 22 24 26 28 2A 2C 2E) |
michael@0 | 179 | punpckhbw xmmB,xmmH ; xmmB=(01 03 05 07 09 0B 0D 0F) |
michael@0 | 180 | |
michael@0 | 181 | movdqa xmmF,xmmD |
michael@0 | 182 | punpcklbw xmmD,xmmH ; xmmD=(11 13 15 17 19 1B 1D 1F) |
michael@0 | 183 | punpckhbw xmmF,xmmH ; xmmF=(21 23 25 27 29 2B 2D 2F) |
michael@0 | 184 | |
michael@0 | 185 | %else ; RGB_PIXELSIZE == 4 ; ----------- |
michael@0 | 186 | |
michael@0 | 187 | .column_ld1: |
michael@0 | 188 | test cl, SIZEOF_XMMWORD/16 |
michael@0 | 189 | jz short .column_ld2 |
michael@0 | 190 | sub rcx, byte SIZEOF_XMMWORD/16 |
michael@0 | 191 | movd xmmA, XMM_DWORD [rsi+rcx*RGB_PIXELSIZE] |
michael@0 | 192 | .column_ld2: |
michael@0 | 193 | test cl, SIZEOF_XMMWORD/8 |
michael@0 | 194 | jz short .column_ld4 |
michael@0 | 195 | sub rcx, byte SIZEOF_XMMWORD/8 |
michael@0 | 196 | movq xmmE, XMM_MMWORD [rsi+rcx*RGB_PIXELSIZE] |
michael@0 | 197 | pslldq xmmA, SIZEOF_MMWORD |
michael@0 | 198 | por xmmA,xmmE |
michael@0 | 199 | .column_ld4: |
michael@0 | 200 | test cl, SIZEOF_XMMWORD/4 |
michael@0 | 201 | jz short .column_ld8 |
michael@0 | 202 | sub rcx, byte SIZEOF_XMMWORD/4 |
michael@0 | 203 | movdqa xmmE,xmmA |
michael@0 | 204 | movdqu xmmA, XMMWORD [rsi+rcx*RGB_PIXELSIZE] |
michael@0 | 205 | .column_ld8: |
michael@0 | 206 | test cl, SIZEOF_XMMWORD/2 |
michael@0 | 207 | mov rcx, SIZEOF_XMMWORD |
michael@0 | 208 | jz short .rgb_gray_cnv |
michael@0 | 209 | movdqa xmmF,xmmA |
michael@0 | 210 | movdqa xmmH,xmmE |
michael@0 | 211 | movdqu xmmA, XMMWORD [rsi+0*SIZEOF_XMMWORD] |
michael@0 | 212 | movdqu xmmE, XMMWORD [rsi+1*SIZEOF_XMMWORD] |
michael@0 | 213 | jmp short .rgb_gray_cnv |
michael@0 | 214 | |
michael@0 | 215 | .columnloop: |
michael@0 | 216 | movdqu xmmA, XMMWORD [rsi+0*SIZEOF_XMMWORD] |
michael@0 | 217 | movdqu xmmE, XMMWORD [rsi+1*SIZEOF_XMMWORD] |
michael@0 | 218 | movdqu xmmF, XMMWORD [rsi+2*SIZEOF_XMMWORD] |
michael@0 | 219 | movdqu xmmH, XMMWORD [rsi+3*SIZEOF_XMMWORD] |
michael@0 | 220 | |
michael@0 | 221 | .rgb_gray_cnv: |
michael@0 | 222 | ; xmmA=(00 10 20 30 01 11 21 31 02 12 22 32 03 13 23 33) |
michael@0 | 223 | ; xmmE=(04 14 24 34 05 15 25 35 06 16 26 36 07 17 27 37) |
michael@0 | 224 | ; xmmF=(08 18 28 38 09 19 29 39 0A 1A 2A 3A 0B 1B 2B 3B) |
michael@0 | 225 | ; xmmH=(0C 1C 2C 3C 0D 1D 2D 3D 0E 1E 2E 3E 0F 1F 2F 3F) |
michael@0 | 226 | |
michael@0 | 227 | movdqa xmmD,xmmA |
michael@0 | 228 | punpcklbw xmmA,xmmE ; xmmA=(00 04 10 14 20 24 30 34 01 05 11 15 21 25 31 35) |
michael@0 | 229 | punpckhbw xmmD,xmmE ; xmmD=(02 06 12 16 22 26 32 36 03 07 13 17 23 27 33 37) |
michael@0 | 230 | |
michael@0 | 231 | movdqa xmmC,xmmF |
michael@0 | 232 | punpcklbw xmmF,xmmH ; xmmF=(08 0C 18 1C 28 2C 38 3C 09 0D 19 1D 29 2D 39 3D) |
michael@0 | 233 | punpckhbw xmmC,xmmH ; xmmC=(0A 0E 1A 1E 2A 2E 3A 3E 0B 0F 1B 1F 2B 2F 3B 3F) |
michael@0 | 234 | |
michael@0 | 235 | movdqa xmmB,xmmA |
michael@0 | 236 | punpcklwd xmmA,xmmF ; xmmA=(00 04 08 0C 10 14 18 1C 20 24 28 2C 30 34 38 3C) |
michael@0 | 237 | punpckhwd xmmB,xmmF ; xmmB=(01 05 09 0D 11 15 19 1D 21 25 29 2D 31 35 39 3D) |
michael@0 | 238 | |
michael@0 | 239 | movdqa xmmG,xmmD |
michael@0 | 240 | punpcklwd xmmD,xmmC ; xmmD=(02 06 0A 0E 12 16 1A 1E 22 26 2A 2E 32 36 3A 3E) |
michael@0 | 241 | punpckhwd xmmG,xmmC ; xmmG=(03 07 0B 0F 13 17 1B 1F 23 27 2B 2F 33 37 3B 3F) |
michael@0 | 242 | |
michael@0 | 243 | movdqa xmmE,xmmA |
michael@0 | 244 | punpcklbw xmmA,xmmD ; xmmA=(00 02 04 06 08 0A 0C 0E 10 12 14 16 18 1A 1C 1E) |
michael@0 | 245 | punpckhbw xmmE,xmmD ; xmmE=(20 22 24 26 28 2A 2C 2E 30 32 34 36 38 3A 3C 3E) |
michael@0 | 246 | |
michael@0 | 247 | movdqa xmmH,xmmB |
michael@0 | 248 | punpcklbw xmmB,xmmG ; xmmB=(01 03 05 07 09 0B 0D 0F 11 13 15 17 19 1B 1D 1F) |
michael@0 | 249 | punpckhbw xmmH,xmmG ; xmmH=(21 23 25 27 29 2B 2D 2F 31 33 35 37 39 3B 3D 3F) |
michael@0 | 250 | |
michael@0 | 251 | pxor xmmF,xmmF |
michael@0 | 252 | |
michael@0 | 253 | movdqa xmmC,xmmA |
michael@0 | 254 | punpcklbw xmmA,xmmF ; xmmA=(00 02 04 06 08 0A 0C 0E) |
michael@0 | 255 | punpckhbw xmmC,xmmF ; xmmC=(10 12 14 16 18 1A 1C 1E) |
michael@0 | 256 | |
michael@0 | 257 | movdqa xmmD,xmmB |
michael@0 | 258 | punpcklbw xmmB,xmmF ; xmmB=(01 03 05 07 09 0B 0D 0F) |
michael@0 | 259 | punpckhbw xmmD,xmmF ; xmmD=(11 13 15 17 19 1B 1D 1F) |
michael@0 | 260 | |
michael@0 | 261 | movdqa xmmG,xmmE |
michael@0 | 262 | punpcklbw xmmE,xmmF ; xmmE=(20 22 24 26 28 2A 2C 2E) |
michael@0 | 263 | punpckhbw xmmG,xmmF ; xmmG=(30 32 34 36 38 3A 3C 3E) |
michael@0 | 264 | |
michael@0 | 265 | punpcklbw xmmF,xmmH |
michael@0 | 266 | punpckhbw xmmH,xmmH |
michael@0 | 267 | psrlw xmmF,BYTE_BIT ; xmmF=(21 23 25 27 29 2B 2D 2F) |
michael@0 | 268 | psrlw xmmH,BYTE_BIT ; xmmH=(31 33 35 37 39 3B 3D 3F) |
michael@0 | 269 | |
michael@0 | 270 | %endif ; RGB_PIXELSIZE ; --------------- |
michael@0 | 271 | |
michael@0 | 272 | ; xmm0=R(02468ACE)=RE, xmm2=G(02468ACE)=GE, xmm4=B(02468ACE)=BE |
michael@0 | 273 | ; xmm1=R(13579BDF)=RO, xmm3=G(13579BDF)=GO, xmm5=B(13579BDF)=BO |
michael@0 | 274 | |
michael@0 | 275 | ; (Original) |
michael@0 | 276 | ; Y = 0.29900 * R + 0.58700 * G + 0.11400 * B |
michael@0 | 277 | ; |
michael@0 | 278 | ; (This implementation) |
michael@0 | 279 | ; Y = 0.29900 * R + 0.33700 * G + 0.11400 * B + 0.25000 * G |
michael@0 | 280 | |
michael@0 | 281 | movdqa xmm6,xmm1 |
michael@0 | 282 | punpcklwd xmm1,xmm3 |
michael@0 | 283 | punpckhwd xmm6,xmm3 |
michael@0 | 284 | pmaddwd xmm1,[rel PW_F0299_F0337] ; xmm1=ROL*FIX(0.299)+GOL*FIX(0.337) |
michael@0 | 285 | pmaddwd xmm6,[rel PW_F0299_F0337] ; xmm6=ROH*FIX(0.299)+GOH*FIX(0.337) |
michael@0 | 286 | |
michael@0 | 287 | movdqa xmm7, xmm6 ; xmm7=ROH*FIX(0.299)+GOH*FIX(0.337) |
michael@0 | 288 | |
michael@0 | 289 | movdqa xmm6,xmm0 |
michael@0 | 290 | punpcklwd xmm0,xmm2 |
michael@0 | 291 | punpckhwd xmm6,xmm2 |
michael@0 | 292 | pmaddwd xmm0,[rel PW_F0299_F0337] ; xmm0=REL*FIX(0.299)+GEL*FIX(0.337) |
michael@0 | 293 | pmaddwd xmm6,[rel PW_F0299_F0337] ; xmm6=REH*FIX(0.299)+GEH*FIX(0.337) |
michael@0 | 294 | |
michael@0 | 295 | movdqa XMMWORD [wk(0)], xmm0 ; wk(0)=REL*FIX(0.299)+GEL*FIX(0.337) |
michael@0 | 296 | movdqa XMMWORD [wk(1)], xmm6 ; wk(1)=REH*FIX(0.299)+GEH*FIX(0.337) |
michael@0 | 297 | |
michael@0 | 298 | movdqa xmm0, xmm5 ; xmm0=BO |
michael@0 | 299 | movdqa xmm6, xmm4 ; xmm6=BE |
michael@0 | 300 | |
michael@0 | 301 | movdqa xmm4,xmm0 |
michael@0 | 302 | punpcklwd xmm0,xmm3 |
michael@0 | 303 | punpckhwd xmm4,xmm3 |
michael@0 | 304 | pmaddwd xmm0,[rel PW_F0114_F0250] ; xmm0=BOL*FIX(0.114)+GOL*FIX(0.250) |
michael@0 | 305 | pmaddwd xmm4,[rel PW_F0114_F0250] ; xmm4=BOH*FIX(0.114)+GOH*FIX(0.250) |
michael@0 | 306 | |
michael@0 | 307 | movdqa xmm3,[rel PD_ONEHALF] ; xmm3=[PD_ONEHALF] |
michael@0 | 308 | |
michael@0 | 309 | paddd xmm0, xmm1 |
michael@0 | 310 | paddd xmm4, xmm7 |
michael@0 | 311 | paddd xmm0,xmm3 |
michael@0 | 312 | paddd xmm4,xmm3 |
michael@0 | 313 | psrld xmm0,SCALEBITS ; xmm0=YOL |
michael@0 | 314 | psrld xmm4,SCALEBITS ; xmm4=YOH |
michael@0 | 315 | packssdw xmm0,xmm4 ; xmm0=YO |
michael@0 | 316 | |
michael@0 | 317 | movdqa xmm4,xmm6 |
michael@0 | 318 | punpcklwd xmm6,xmm2 |
michael@0 | 319 | punpckhwd xmm4,xmm2 |
michael@0 | 320 | pmaddwd xmm6,[rel PW_F0114_F0250] ; xmm6=BEL*FIX(0.114)+GEL*FIX(0.250) |
michael@0 | 321 | pmaddwd xmm4,[rel PW_F0114_F0250] ; xmm4=BEH*FIX(0.114)+GEH*FIX(0.250) |
michael@0 | 322 | |
michael@0 | 323 | movdqa xmm2,[rel PD_ONEHALF] ; xmm2=[PD_ONEHALF] |
michael@0 | 324 | |
michael@0 | 325 | paddd xmm6, XMMWORD [wk(0)] |
michael@0 | 326 | paddd xmm4, XMMWORD [wk(1)] |
michael@0 | 327 | paddd xmm6,xmm2 |
michael@0 | 328 | paddd xmm4,xmm2 |
michael@0 | 329 | psrld xmm6,SCALEBITS ; xmm6=YEL |
michael@0 | 330 | psrld xmm4,SCALEBITS ; xmm4=YEH |
michael@0 | 331 | packssdw xmm6,xmm4 ; xmm6=YE |
michael@0 | 332 | |
michael@0 | 333 | psllw xmm0,BYTE_BIT |
michael@0 | 334 | por xmm6,xmm0 ; xmm6=Y |
michael@0 | 335 | movdqa XMMWORD [rdi], xmm6 ; Save Y |
michael@0 | 336 | |
michael@0 | 337 | sub rcx, byte SIZEOF_XMMWORD |
michael@0 | 338 | add rsi, byte RGB_PIXELSIZE*SIZEOF_XMMWORD ; inptr |
michael@0 | 339 | add rdi, byte SIZEOF_XMMWORD ; outptr0 |
michael@0 | 340 | cmp rcx, byte SIZEOF_XMMWORD |
michael@0 | 341 | jae near .columnloop |
michael@0 | 342 | test rcx,rcx |
michael@0 | 343 | jnz near .column_ld1 |
michael@0 | 344 | |
michael@0 | 345 | pop rcx ; col |
michael@0 | 346 | pop rsi |
michael@0 | 347 | pop rdi |
michael@0 | 348 | |
michael@0 | 349 | add rsi, byte SIZEOF_JSAMPROW ; input_buf |
michael@0 | 350 | add rdi, byte SIZEOF_JSAMPROW |
michael@0 | 351 | dec rax ; num_rows |
michael@0 | 352 | jg near .rowloop |
michael@0 | 353 | |
michael@0 | 354 | .return: |
michael@0 | 355 | pop rbx |
michael@0 | 356 | uncollect_args |
michael@0 | 357 | mov rsp,rbp ; rsp <- aligned rbp |
michael@0 | 358 | pop rsp ; rsp <- original rbp |
michael@0 | 359 | pop rbp |
michael@0 | 360 | ret |
michael@0 | 361 | |
michael@0 | 362 | ; For some reason, the OS X linker does not honor the request to align the |
michael@0 | 363 | ; segment unless we do this. |
michael@0 | 364 | align 16 |