Searched refs:xmm4 (Results 1 - 25 of 154) sorted by relevance

1234567

/external/llvm/test/MC/X86/
H A Dx86_64-xop-encoding.s32 // CHECK: vphaddwq (%rcx), %xmm4
34 vphaddwq (%rcx), %xmm4
43 // CHECK: vphaddwd %xmm3, %xmm4
45 vphaddwd %xmm3, %xmm4
64 // CHECK: vphaddudq 8(%rcx,%rax), %xmm4
66 vphaddudq 8(%rcx,%rax), %xmm4
80 // CHECK: vphaddubq (%rcx), %xmm4
82 vphaddubq (%rcx), %xmm4
96 // CHECK: vphadddq (%rdx), %xmm4
98 vphadddq (%rdx), %xmm4
[all...]
/external/libvpx/libvpx/vp8/common/x86/
H A Diwalsh_sse2.asm36 movdqa xmm4, xmm0
38 punpckhqdq xmm4, xmm3 ;c1 b1
40 movdqa xmm1, xmm4 ;c1 b1
41 paddw xmm4, xmm0 ;dl+cl a1+b1 aka op[4] op[0]
49 movdqa xmm3, xmm4 ; 13 12 11 10 03 02 01 00
50 punpcklwd xmm4, xmm0 ; 23 03 22 02 21 01 20 00
52 movdqa xmm1, xmm4 ; 23 03 22 02 21 01 20 00
53 punpcklwd xmm4, xmm3 ; 31 21 11 01 30 20 10 00
58 movdqa xmm3, xmm4 ;ip[4] ip[0]
62 paddw xmm4, xmm
[all...]
H A Didctllm_sse2.asm33 movd xmm4, [rax]
36 pinsrw xmm4, [rax+32], 4
39 pmullw xmm4, xmm5
51 pshuflw xmm4, xmm4, 00000000b
52 pshufhw xmm4, xmm4, 00000000b
55 paddw xmm4, [GLOBAL(fours)]
57 psraw xmm4, 3
71 paddw xmm0, xmm4
[all...]
H A Dsubpixel_sse2.asm68 movdqa xmm4, xmm1
75 psrldq xmm4, 1 ; xx 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00 -1
78 punpcklbw xmm4, xmm0 ; xx06 xx05 xx04 xx03 xx02 xx01 xx00 xx-1
81 pmullw xmm4, XMMWORD PTR [rdx+16] ; x[-1] * H[-1]; Tap 2
104 paddsw xmm4, xmm7
105 paddsw xmm4, xmm5
107 paddsw xmm4, xmm3
108 paddsw xmm4, xmm6
110 paddsw xmm4, xmm1
111 paddsw xmm4, [GLOBA
[all...]
/external/libvpx/libvpx/vp8/encoder/x86/
H A Ddct_sse2.asm96 movdqa xmm4, xmm3
98 pmaddwd xmm4, XMMWORD PTR[GLOBAL(_2217_neg5352)];d1*2217 - c1*5352
101 paddd xmm4, XMMWORD PTR[GLOBAL(_7500)]
103 psrad xmm4, 12 ;(d1 * 2217 - c1 * 5352 + 7500)>>12
106 packssdw xmm3, xmm4 ;op[3] op[1]
139 pxor xmm4, xmm4 ;zero out for compare
142 pcmpeqw xmm2, xmm4
148 movdqa xmm4, xmm3
150 pmaddwd xmm4, XMMWOR
[all...]
/external/libjpeg-turbo/simd/
H A Djfdctint-sse2-64.asm127 movdqa xmm4,xmm0 ; transpose coefficients(phase 1)
129 punpckhwd xmm4,xmm1 ; xmm4=(04 14 05 15 06 16 07 17)
167 movdqa xmm2,xmm4 ; transpose coefficients(phase 2)
168 punpckldq xmm4,xmm5 ; xmm4=(04 14 24 34 05 15 25 35)
193 movdqa xmm0,xmm4 ; transpose coefficients(phase 3)
194 punpcklqdq xmm4,xmm5 ; xmm4=(04 14 24 34 44 54 64 74)=data4
199 paddw xmm1,xmm4 ; xmm
[all...]
H A Djfdctint-sse2.asm133 movdqa xmm4,xmm0 ; transpose coefficients(phase 1)
135 punpckhwd xmm4,xmm1 ; xmm4=(04 14 05 15 06 16 07 17)
173 movdqa xmm2,xmm4 ; transpose coefficients(phase 2)
174 punpckldq xmm4,xmm5 ; xmm4=(04 14 24 34 05 15 25 35)
199 movdqa xmm0,xmm4 ; transpose coefficients(phase 3)
200 punpcklqdq xmm4,xmm5 ; xmm4=(04 14 24 34 44 54 64 74)=data4
205 paddw xmm1,xmm4 ; xmm
[all...]
H A Djidctfst-sse2-64.asm145 pshufd xmm4,xmm7,0x55 ; xmm4=col5=(05 05 05 05 05 05 05 05)
166 movdqa xmm4,xmm0
170 paddw xmm4,xmm2 ; xmm4=tmp10
177 movdqa xmm6,xmm4
179 psubw xmm4,xmm5 ; xmm4=tmp3
184 movdqa XMMWORD [wk(1)], xmm4 ; wk(1)=tmp3
198 movdqa xmm4,xmm
[all...]
H A Djidctfst-sse2.asm151 pshufd xmm4,xmm7,0x55 ; xmm4=col5=(05 05 05 05 05 05 05 05)
173 movdqa xmm4,xmm0
177 paddw xmm4,xmm2 ; xmm4=tmp10
184 movdqa xmm6,xmm4
186 psubw xmm4,xmm5 ; xmm4=tmp3
191 movdqa XMMWORD [wk(1)], xmm4 ; wk(1)=tmp3
205 movdqa xmm4,xmm
[all...]
H A Djfdctflt-sse-64.asm93 movaps xmm4,xmm0 ; transpose coefficients(phase 1)
95 unpckhps xmm4,xmm1 ; xmm4=(22 32 23 33)
108 movaps XMMWORD [wk(0)], xmm4 ; wk(0)=(22 32 23 33)
111 movaps xmm4,xmm6 ; transpose coefficients(phase 1)
113 unpckhps xmm4,xmm7 ; xmm4=(02 12 03 13)
137 movaps xmm7,xmm4 ; transpose coefficients(phase 2)
138 unpcklps2 xmm4,xmm2 ; xmm4
[all...]
H A Djidctint-sse2-64.asm151 movdqa xmm4,xmm5 ; xmm5=in0=(00 01 02 03 04 05 06 07)
153 punpckhwd xmm4,xmm4 ; xmm4=(04 04 05 05 06 06 07 07)
159 pshufd xmm0,xmm4,0x00 ; xmm0=col4=(04 04 04 04 04 04 04 04)
160 pshufd xmm3,xmm4,0x55 ; xmm3=col5=(05 05 05 05 05 05 05 05)
161 pshufd xmm2,xmm4,0xAA ; xmm2=col6=(06 06 06 06 06 06 06 06)
162 pshufd xmm4,xmm4,0xFF ; xmm4
[all...]
H A Djidctint-sse2.asm157 movdqa xmm4,xmm5 ; xmm5=in0=(00 01 02 03 04 05 06 07)
159 punpckhwd xmm4,xmm4 ; xmm4=(04 04 05 05 06 06 07 07)
165 pshufd xmm0,xmm4,0x00 ; xmm0=col4=(04 04 04 04 04 04 04 04)
166 pshufd xmm3,xmm4,0x55 ; xmm3=col5=(05 05 05 05 05 05 05 05)
167 pshufd xmm2,xmm4,0xAA ; xmm2=col6=(06 06 06 06 06 06 06 06)
168 pshufd xmm4,xmm4,0xFF ; xmm4
[all...]
H A Djidctflt-sse2-64.asm104 movq xmm4, XMM_MMWORD [MMBLOCK(4,0,rsi,SIZEOF_JCOEF)]
109 por xmm3,xmm4
176 movaps xmm4,xmm0
180 addps xmm4,xmm2 ; xmm4=tmp10
186 movaps xmm6,xmm4
188 subps xmm4,xmm5 ; xmm4=tmp3
193 movaps XMMWORD [wk(1)], xmm4 ; tmp3
222 movaps xmm4,xmm
[all...]
H A Djfdctfst-sse2-64.asm106 movdqa xmm4,xmm0 ; transpose coefficients(phase 1)
108 punpckhwd xmm4,xmm1 ; xmm4=(04 14 05 15 06 16 07 17)
146 movdqa xmm2,xmm4 ; transpose coefficients(phase 2)
147 punpckldq xmm4,xmm5 ; xmm4=(04 14 24 34 05 15 25 35)
172 movdqa xmm0,xmm4 ; transpose coefficients(phase 3)
173 punpcklqdq xmm4,xmm5 ; xmm4=(04 14 24 34 44 54 64 74)=data4
178 paddw xmm1,xmm4 ; xmm
[all...]
H A Djfdctfst-sse2.asm112 movdqa xmm4,xmm0 ; transpose coefficients(phase 1)
114 punpckhwd xmm4,xmm1 ; xmm4=(04 14 05 15 06 16 07 17)
152 movdqa xmm2,xmm4 ; transpose coefficients(phase 2)
153 punpckldq xmm4,xmm5 ; xmm4=(04 14 24 34 05 15 25 35)
178 movdqa xmm0,xmm4 ; transpose coefficients(phase 3)
179 punpcklqdq xmm4,xmm5 ; xmm4=(04 14 24 34 44 54 64 74)=data4
184 paddw xmm1,xmm4 ; xmm
[all...]
H A Djfdctflt-sse.asm100 movaps xmm4,xmm0 ; transpose coefficients(phase 1)
102 unpckhps xmm4,xmm1 ; xmm4=(22 32 23 33)
115 movaps XMMWORD [wk(0)], xmm4 ; wk(0)=(22 32 23 33)
118 movaps xmm4,xmm6 ; transpose coefficients(phase 1)
120 unpckhps xmm4,xmm7 ; xmm4=(02 12 03 13)
144 movaps xmm7,xmm4 ; transpose coefficients(phase 2)
145 unpcklps2 xmm4,xmm2 ; xmm4
[all...]
H A Djidctred-sse2-64.asm182 movdqa xmm4,xmm0
184 punpcklwd xmm4,xmm1
186 movdqa xmm0,xmm4
188 pmaddwd xmm4,[rel PW_F256_F089] ; xmm4=(tmp2L)
204 paddd xmm6,xmm4 ; xmm6=tmp2L
214 movdqa xmm4, XMMWORD [XMMBLOCK(0,0,rsi,SIZEOF_JCOEF)]
217 pmullw xmm4, XMMWORD [XMMBLOCK(0,0,rdx,SIZEOF_ISLOW_MULT_TYPE)]
223 punpcklwd xmm1,xmm4 ; xmm1=tmp0L
224 punpckhwd xmm2,xmm4 ; xmm
[all...]
H A Djidctred-sse2.asm189 movdqa xmm4,xmm0
191 punpcklwd xmm4,xmm1
193 movdqa xmm0,xmm4
195 pmaddwd xmm4,[GOTOFF(ebx,PW_F256_F089)] ; xmm4=(tmp2L)
211 paddd xmm6,xmm4 ; xmm6=tmp2L
221 movdqa xmm4, XMMWORD [XMMBLOCK(0,0,esi,SIZEOF_JCOEF)]
224 pmullw xmm4, XMMWORD [XMMBLOCK(0,0,edx,SIZEOF_ISLOW_MULT_TYPE)]
230 punpcklwd xmm1,xmm4 ; xmm1=tmp0L
231 punpckhwd xmm2,xmm4 ; xmm
[all...]
/external/boringssl/linux-x86_64/crypto/modes/
H A Dghash-x86_64.S677 pshufd $255,%xmm2,%xmm4
682 pcmpgtd %xmm4,%xmm5
703 movdqa %xmm3,%xmm4
705 pslldq $8,%xmm4
707 pxor %xmm4,%xmm0
709 movdqa %xmm0,%xmm4
719 pxor %xmm4,%xmm0
723 movdqa %xmm0,%xmm4
725 pxor %xmm4,%xmm1
726 pxor %xmm0,%xmm4
[all...]
H A Daesni-gcm-x86_64.S9 vpxor %xmm4,%xmm4,%xmm4
17 vmovdqu %xmm4,16+8(%rsp)
69 vpxor %xmm4,%xmm8,%xmm8
71 vpxor %xmm5,%xmm1,%xmm4
108 vpxor %xmm1,%xmm4,%xmm4
130 vpxor %xmm2,%xmm4,%xmm4
[all...]
/external/boringssl/mac-x86_64/crypto/modes/
H A Dghash-x86_64.S676 pshufd $255,%xmm2,%xmm4
681 pcmpgtd %xmm4,%xmm5
702 movdqa %xmm3,%xmm4
704 pslldq $8,%xmm4
706 pxor %xmm4,%xmm0
708 movdqa %xmm0,%xmm4
718 pxor %xmm4,%xmm0
722 movdqa %xmm0,%xmm4
724 pxor %xmm4,%xmm1
725 pxor %xmm0,%xmm4
[all...]
H A Daesni-gcm-x86_64.S9 vpxor %xmm4,%xmm4,%xmm4
17 vmovdqu %xmm4,16+8(%rsp)
69 vpxor %xmm4,%xmm8,%xmm8
71 vpxor %xmm5,%xmm1,%xmm4
108 vpxor %xmm1,%xmm4,%xmm4
130 vpxor %xmm2,%xmm4,%xmm4
[all...]
/external/boringssl/win-x86_64/crypto/modes/
H A Dghash-x86_64.asm703 pshufd xmm4,xmm2,255
708 pcmpgtd xmm5,xmm4
729 movdqa xmm4,xmm3
731 pslldq xmm4,8
733 pxor xmm0,xmm4
735 movdqa xmm4,xmm0
745 pxor xmm0,xmm4
749 movdqa xmm4,xmm0
751 pxor xmm1,xmm4
752 pxor xmm4,xmm
[all...]
/external/boringssl/win-x86/crypto/aes/
H A Dvpaes-x86.asm100 movdqa xmm4,[32+ebp]
104 pxor xmm4,xmm5
106 pxor xmm0,xmm4
110 movdqa xmm4,[ecx*1+ebx]
134 movdqa xmm4,xmm7
138 pxor xmm4,xmm5
146 movdqa xmm4,[96+ebp]
149 pxor xmm4,xmm5
152 pxor xmm0,xmm4
180 movdqa xmm4,[eb
[all...]
/external/boringssl/src/crypto/aes/asm/
H A Dvpaes-x86_64.pl108 movdqa %xmm13, %xmm4 # 4 : sb1u
110 pshufb %xmm2, %xmm4 # 4 = sb1u
112 pxor %xmm5, %xmm4 # 4 = sb1u + k
114 pxor %xmm4, %xmm0 # 0 = A
117 movdqa (%r11,%r10), %xmm4 # .Lk_mc_backward[]
125 pshufb %xmm4, %xmm3 # 3 = D
144 movdqa %xmm10, %xmm4 # 4 : 1/j
146 pshufb %xmm0, %xmm4 # 4 = 1/j
148 pxor %xmm5, %xmm4 # 4 = jak = 1/j + a/k
152 pshufb %xmm4,
[all...]

Completed in 291 milliseconds

1234567