Lines Matching refs:ecx

105 	movl	LEN(%esp), %ecx
116 cmp $16, %ecx
119 cmpl $32, %ecx
124 movdqu -16(%eax, %ecx), %xmm1
126 movdqu %xmm1, -16(%edx, %ecx)
130 cmpl $64, %ecx
136 movdqu -16(%eax, %ecx), %xmm2
137 movdqu -32(%eax, %ecx), %xmm3
140 movdqu %xmm2, -16(%edx, %ecx)
141 movdqu %xmm3, -32(%edx, %ecx)
145 cmpl $128, %ecx
153 movdqu -64(%eax, %ecx), %xmm4
154 movdqu -48(%eax, %ecx), %xmm5
155 movdqu -32(%eax, %ecx), %xmm6
156 movdqu -16(%eax, %ecx), %xmm7
161 movdqu %xmm4, -64(%edx, %ecx)
162 movdqu %xmm5, -48(%edx, %ecx)
163 movdqu %xmm6, -32(%edx, %ecx)
164 movdqu %xmm7, -16(%edx, %ecx)
196 leal (%edx, %ecx), %ebx
201 cmp $SHARED_CACHE_SIZE_HALF, %ecx
222 addl %edx, %ecx
223 subl %edi, %ecx
225 In %ecx now is how many bytes are left to copy.
230 cmp $32, %ecx
232 cmp $16, %ecx
234 testl %ecx, %ecx
246 movzbl -1(%esi,%ecx), %eax
248 movb %al, -1(%edi,%ecx)
255 movdqu -32(%esi, %ecx), %xmm2
256 movdqu -16(%esi, %ecx), %xmm3
259 movdqu %xmm2, -32(%edi, %ecx)
260 movdqu %xmm3, -16(%edi, %ecx)
265 movdqu -16(%esi, %ecx), %xmm1
267 movdqu %xmm1, -16(%edi, %ecx)
272 movq -8(%esi, %ecx), %xmm1
274 movq %xmm1, -8(%edi, %ecx)
279 movl -4(%esi,%ecx), %ebx
281 movl %ebx, -4(%edi,%ecx)
285 movzwl -2(%esi,%ecx), %eax
287 movw %ax, -2(%edi,%ecx)
297 testl %ecx, %ecx
303 movzbl -1(%eax,%ecx), %ebx
305 movb %bl, -1(%edx,%ecx)
310 movzwl -2(%eax,%ecx), %ebx
312 movw %bx, -2(%edx,%ecx)
318 movl -4(%eax,%ecx), %eax
320 movl %eax, -4(%edx,%ecx)
325 movq -8(%eax, %ecx), %xmm1
327 movq %xmm1, -8(%edx, %ecx)
334 /* Compute in %ecx how many bytes are left to copy after
336 movl %ebx, %ecx
337 subl %edx, %ecx
343 cmp $16, %ecx
346 cmpl $32, %ecx
351 movdqu -16(%eax, %ecx), %xmm1
353 movdqu %xmm1, -16(%edx, %ecx)
357 cmpl $64, %ecx
363 movdqu -16(%eax, %ecx), %xmm2
364 movdqu -32(%eax, %ecx), %xmm3
367 movdqu %xmm2, -16(%edx, %ecx)
368 movdqu %xmm3, -32(%edx, %ecx)
372 cmpl $128, %ecx
380 movdqu -64(%eax, %ecx), %xmm4
381 movdqu -48(%eax, %ecx), %xmm5
382 movdqu -32(%eax, %ecx), %xmm6
383 movdqu -16(%eax, %ecx), %xmm7
388 movdqu %xmm4, -64(%edx, %ecx)
389 movdqu %xmm5, -48(%edx, %ecx)
390 movdqu %xmm6, -32(%edx, %ecx)
391 movdqu %xmm7, -16(%edx, %ecx)
400 movdqu -16(%eax, %ecx), %xmm0
401 movdqu -32(%eax, %ecx), %xmm1
402 movdqu -48(%eax, %ecx), %xmm2
403 movdqu -64(%eax, %ecx), %xmm3
405 leal (%edx, %ecx), %edi
416 movdqu %xmm0, -16(%edx, %ecx)
417 movdqu %xmm1, -32(%edx, %ecx)
418 movdqu %xmm2, -48(%edx, %ecx)
419 movdqu %xmm3, -64(%edx, %ecx)
432 cmp $SHARED_CACHE_SIZE_HALF, %ecx
463 testl %ecx, %ecx
469 movzbl -1(%eax,%ecx), %ebx
471 movb %bl, -1(%edx,%ecx)
476 movzwl -2(%eax,%ecx), %ebx
478 movw %bx, -2(%edx,%ecx)
484 movl -4(%eax,%ecx), %ebx
485 movl -8(%eax,%ecx), %esi
486 movl %ebx, -4(%edx,%ecx)
487 movl %esi, -8(%edx,%ecx)
488 subl $8, %ecx
494 movl -4(%eax,%ecx), %eax
496 movl %eax, -4(%edx,%ecx)