Lines Matching refs:__

17 #define __ masm.
51 __ vmov(input, r0, r1);
53 __ Push(temp3, temp2, temp1);
57 __ Pop(temp3, temp2, temp1);
59 __ vmov(d0, result);
61 __ vmov(r0, r1, result);
63 __ Ret();
103 __ pld(MemOperand(src, 0));
105 __ cmp(chars, Operand(8));
106 __ b(lt, &size_less_than_8);
107 __ cmp(chars, Operand(32));
108 __ b(lt, &less_32);
110 __ pld(MemOperand(src, 32));
112 __ cmp(chars, Operand(64));
113 __ b(lt, &less_64);
114 __ pld(MemOperand(src, 64));
116 __ pld(MemOperand(src, 96));
118 __ cmp(chars, Operand(128));
119 __ b(lt, &less_128);
120 __ pld(MemOperand(src, 128));
122 __ pld(MemOperand(src, 160));
124 __ pld(MemOperand(src, 192));
126 __ pld(MemOperand(src, 224));
128 __ cmp(chars, Operand(256));
129 __ b(lt, &less_256);
130 __ sub(chars, chars, Operand(256));
132 __ bind(&loop);
133 __ pld(MemOperand(src, 256));
134 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
136 __ pld(MemOperand(src, 256));
138 __ vld1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(src, PostIndex));
139 __ sub(chars, chars, Operand(64), SetCC);
140 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
141 __ vst1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(dest, PostIndex));
142 __ b(ge, &loop);
143 __ add(chars, chars, Operand(256));
145 __ bind(&less_256);
146 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
147 __ vld1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(src, PostIndex));
148 __ sub(chars, chars, Operand(128));
149 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
150 __ vst1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(dest, PostIndex));
151 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
152 __ vld1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(src, PostIndex));
153 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
154 __ vst1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(dest, PostIndex));
155 __ cmp(chars, Operand(64));
156 __ b(lt, &less_64);
158 __ bind(&less_128);
159 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
160 __ vld1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(src, PostIndex));
161 __ sub(chars, chars, Operand(64));
162 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
163 __ vst1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(dest, PostIndex));
165 __ bind(&less_64);
166 __ cmp(chars, Operand(32));
167 __ b(lt, &less_32);
168 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
169 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
170 __ sub(chars, chars, Operand(32));
172 __ bind(&less_32);
173 __ cmp(chars, Operand(16));
174 __ b(le, &_16_or_less);
175 __ vld1(Neon8, NeonListOperand(d0, 2), NeonMemOperand(src, PostIndex));
176 __ vst1(Neon8, NeonListOperand(d0, 2), NeonMemOperand(dest, PostIndex));
177 __ sub(chars, chars, Operand(16));
179 __ bind(&_16_or_less);
180 __ cmp(chars, Operand(8));
181 __ b(le, &_8_or_less);
182 __ vld1(Neon8, NeonListOperand(d0), NeonMemOperand(src, PostIndex));
183 __ vst1(Neon8, NeonListOperand(d0), NeonMemOperand(dest, PostIndex));
184 __ sub(chars, chars, Operand(8));
187 __ bind(&_8_or_less);
188 __ rsb(chars, chars, Operand(8));
189 __ sub(src, src, Operand(chars));
190 __ sub(dest, dest, Operand(chars));
191 __ vld1(Neon8, NeonListOperand(d0), NeonMemOperand(src));
192 __ vst1(Neon8, NeonListOperand(d0), NeonMemOperand(dest));
194 __ Ret();
196 __ bind(&size_less_than_8);
198 __ bic(temp1, chars, Operand(0x3), SetCC);
199 __ b(&less_4, eq);
200 __ ldr(temp1, MemOperand(src, 4, PostIndex));
201 __ str(temp1, MemOperand(dest, 4, PostIndex));
206 __ bic(temp2, chars, Operand(0x3), SetCC);
207 __ b(&less_4, eq);
208 __ add(temp2, dest, temp2);
210 __ bind(&loop);
211 __ ldr(temp1, MemOperand(src, 4, PostIndex));
212 __ str(temp1, MemOperand(dest, 4, PostIndex));
213 __ cmp(dest, temp2);
214 __ b(&loop, ne);
217 __ bind(&less_4);
218 __ mov(chars, Operand(chars, LSL, 31), SetCC);
220 __ ldrh(temp1, MemOperand(src, 2, PostIndex), cs);
221 __ strh(temp1, MemOperand(dest, 2, PostIndex), cs);
222 __ ldrb(temp1, MemOperand(src), ne);
223 __ strb(temp1, MemOperand(dest), ne);
224 __ Ret();
258 __ bic(temp, chars, Operand(0x7));
259 __ sub(chars, chars, Operand(temp));
260 __ add(temp, dest, Operand(temp, LSL, 1));
262 __ bind(&loop);
263 __ vld1(Neon8, NeonListOperand(d0), NeonMemOperand(src, PostIndex));
264 __ vmovl(NeonU8, q0, d0);
265 __ vst1(Neon16, NeonListOperand(d0, 2), NeonMemOperand(dest, PostIndex));
266 __ cmp(dest, temp);
267 __ b(&loop, ne);
270 __ rsb(chars, chars, Operand(8));
271 __ sub(src, src, Operand(chars));
272 __ sub(dest, dest, Operand(chars, LSL, 1));
273 __ vld1(Neon8, NeonListOperand(d0), NeonMemOperand(src));
274 __ vmovl(NeonU8, q0, d0);
275 __ vst1(Neon16, NeonListOperand(d0, 2), NeonMemOperand(dest));
276 __ Ret();
285 __ Push(lr, r4);
286 __ bic(temp2, chars, Operand(0x3));
287 __ add(temp2, dest, Operand(temp2, LSL, 1));
289 __ bind(&loop);
290 __ ldr(temp1, MemOperand(src, 4, PostIndex));
291 __ uxtb16(temp3, Operand(temp1, ROR, 0));
292 __ uxtb16(temp4, Operand(temp1, ROR, 8));
293 __ pkhbt(temp1, temp3, Operand(temp4, LSL, 16));
294 __ str(temp1, MemOperand(dest));
295 __ pkhtb(temp1, temp4, Operand(temp3, ASR, 16));
296 __ str(temp1, MemOperand(dest, 4));
297 __ add(dest, dest, Operand(8));
298 __ cmp(dest, temp2);
299 __ b(&loop, ne);
301 __ mov(chars, Operand(chars, LSL, 31), SetCC); // bit0 => ne, bit1 => cs
302 __ b(&not_two, cc);
303 __ ldrh(temp1, MemOperand(src, 2, PostIndex));
304 __ uxtb(temp3, Operand(temp1, ROR, 8));
305 __ mov(temp3, Operand(temp3, LSL, 16));
306 __ uxtab(temp3, temp3, Operand(temp1, ROR, 0));
307 __ str(temp3, MemOperand(dest, 4, PostIndex));
308 __ bind(&not_two);
309 __ ldrb(temp1, MemOperand(src), ne);
310 __ strh(temp1, MemOperand(dest), ne);
311 __ Pop(pc, r4);
336 __ MovFromFloatParameter(d0);
337 __ vsqrt(d0, d0);
338 __ MovToFloatResult(d0);
339 __ Ret();
351 #undef __
374 #define __ ACCESS_MASM(masm)
390 __ JumpIfJSArrayHasAllocationMemento(
395 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
396 __ RecordWriteField(receiver,
431 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail);
436 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
437 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
438 __ b(eq, &only_change_map);
440 __ push(lr);
441 __ ldr(length, FieldMemOperand(elements, FixedArray::kLengthOffset));
446 __ mov(lr, Operand(length, LSL, 2));
447 __ add(lr, lr, Operand(FixedDoubleArray::kHeaderSize));
448 __ Allocate(lr, array, elements, scratch2, &gc_required, DOUBLE_ALIGNMENT);
450 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
454 __ LoadRoot(scratch2, Heap::kFixedDoubleArrayMapRootIndex);
455 __ str(length, MemOperand(array, FixedDoubleArray::kLengthOffset));
457 __ str(scratch2, MemOperand(array, HeapObject::kMapOffset));
459 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
460 __ RecordWriteField(receiver,
469 __ add(scratch1, array, Operand(kHeapObjectTag));
470 __ str(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
471 __ RecordWriteField(receiver,
481 __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
482 __ add(scratch2, array, Operand(FixedDoubleArray::kHeaderSize));
483 __ add(array_end, scratch2, Operand(length, LSL, 2));
489 __ mov(hole_lower, Operand(kHoleNanLower32));
490 __ mov(hole_upper, Operand(kHoleNanUpper32));
497 __ b(&entry);
499 __ bind(&only_change_map);
500 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
501 __ RecordWriteField(receiver,
509 __ b(&done);
512 __ bind(&gc_required);
513 __ pop(lr);
514 __ b(fail);
517 __ bind(&loop);
518 __ ldr(lr, MemOperand(scratch1, 4, PostIndex));
520 __ UntagAndJumpIfNotSmi(lr, lr, &convert_hole);
523 __ vmov(s0, lr);
524 __ vcvt_f64_s32(d0, s0);
525 __ vstr(d0, scratch2, 0);
526 __ add(scratch2, scratch2, Operand(8));
527 __ b(&entry);
530 __ bind(&convert_hole);
533 __ SmiTag(lr);
534 __ orr(lr, lr, Operand(1));
535 __ CompareRoot(lr, Heap::kTheHoleValueRootIndex);
536 __ Assert(eq, kObjectFoundInSmiOnlyArray);
538 __ Strd(hole_lower, hole_upper, MemOperand(scratch2, 8, PostIndex));
540 __ bind(&entry);
541 __ cmp(scratch2, array_end);
542 __ b(lt, &loop);
544 __ pop(lr);
545 __ bind(&done);
569 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail);
574 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
575 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
576 __ b(eq, &only_change_map);
578 __ push(lr);
579 __ Push(target_map, receiver, key, value);
580 __ ldr(length, FieldMemOperand(elements, FixedArray::kLengthOffset));
589 __ mov(array_size, Operand(FixedDoubleArray::kHeaderSize));
590 __ add(array_size, array_size, Operand(length, LSL, 1));
591 __ Allocate(array_size, array, allocate_scratch, scratch, &gc_required,
595 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
596 __ str(length, MemOperand(array, FixedDoubleArray::kLengthOffset));
597 __ str(scratch, MemOperand(array, HeapObject::kMapOffset));
604 __ add(src_elements, elements,
606 __ add(dst_elements, array, Operand(FixedArray::kHeaderSize));
607 __ add(array, array, Operand(kHeapObjectTag));
608 __ add(dst_end, dst_elements, Operand(length, LSL, 1));
609 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
618 __ b(&entry);
621 __ bind(&gc_required);
622 __ Pop(target_map, receiver, key, value);
623 __ pop(lr);
624 __ b(fail);
626 __ bind(&loop);
628 __ ldr(upper_bits, MemOperand(src_elements, 8, PostIndex));
631 __ cmp(upper_bits, Operand(kHoleNanUpper32));
632 __ b(eq, &convert_hole);
637 __ AllocateHeapNumber(heap_number, scratch2, lr, heap_number_map,
640 __ ldr(scratch2, MemOperand(src_elements, 12, NegOffset));
641 __ Strd(scratch2, upper_bits,
643 __ mov(scratch2, dst_elements);
644 __ str(heap_number, MemOperand(dst_elements, 4, PostIndex));
645 __ RecordWrite(array,
652 __ b(&entry);
655 __ bind(&convert_hole);
656 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex);
657 __ str(scratch2, MemOperand(dst_elements, 4, PostIndex));
659 __ bind(&entry);
660 __ cmp(dst_elements, dst_end);
661 __ b(lt, &loop);
663 __ Pop(target_map, receiver, key, value);
665 __ str(array, FieldMemOperand(receiver, JSObject::kElementsOffset));
666 __ RecordWriteField(receiver,
674 __ pop(lr);
676 __ bind(&only_change_map);
678 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
679 __ RecordWriteField(receiver,
696 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
697 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
701 __ tst(result, Operand(kIsIndirectStringMask));
702 __ b(eq, &check_sequential);
706 __ tst(result, Operand(kSlicedNotConsMask));
707 __ b(eq, &cons_string);
711 __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
712 __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
713 __ add(index, index, Operand::SmiUntag(result));
714 __ jmp(&indirect_string_loaded);
721 __ bind(&cons_string);
722 __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
723 __ CompareRoot(result, Heap::kempty_stringRootIndex);
724 __ b(ne, call_runtime);
726 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
728 __ bind(&indirect_string_loaded);
729 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
730 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
736 __ bind(&check_sequential);
738 __ tst(result, Operand(kStringRepresentationMask));
739 __ b(ne, &external_string);
743 __ add(string,
746 __ jmp(&check_encoding);
749 __ bind(&external_string);
753 __ tst(result, Operand(kIsIndirectStringMask));
754 __ Assert(eq, kExternalStringExpectedButNotFound);
758 __ tst(result, Operand(kShortExternalStringMask));
759 __ b(ne, call_runtime);
760 __ ldr(string, FieldMemOperand(string, ExternalString::kResourceDataOffset));
763 __ bind(&check_encoding);
765 __ tst(result, Operand(kStringEncodingMask));
766 __ b(ne, &one_byte);
768 __ ldrh(result, MemOperand(string, index, LSL, 1));
769 __ jmp(&done);
770 __ bind(&one_byte);
772 __ ldrb(result, MemOperand(string, index));
773 __ bind(&done);
804 __ mov(temp3, Operand(ExternalReference::math_exp_constants(0)));
806 __ vldr(double_scratch1, ExpConstant(0, temp3));
807 __ VFPCompareAndSetFlags(double_scratch1, input);
808 __ b(ge, &zero);
810 __ vldr(double_scratch2, ExpConstant(1, temp3));
811 __ VFPCompareAndSetFlags(input, double_scratch2);
812 __ b(ge, &infinity);
814 __ vldr(double_scratch1, ExpConstant(3, temp3));
815 __ vldr(result, ExpConstant(4, temp3));
816 __ vmul(double_scratch1, double_scratch1, input);
817 __ vadd(double_scratch1, double_scratch1, result);
818 __ VmovLow(temp2, double_scratch1);
819 __ vsub(double_scratch1, double_scratch1, result);
820 __ vldr(result, ExpConstant(6, temp3));
821 __ vldr(double_scratch2, ExpConstant(5, temp3));
822 __ vmul(double_scratch1, double_scratch1, double_scratch2);
823 __ vsub(double_scratch1, double_scratch1, input);
824 __ vsub(result, result, double_scratch1);
825 __ vmul(double_scratch2, double_scratch1, double_scratch1);
826 __ vmul(result, result, double_scratch2);
827 __ vldr(double_scratch2, ExpConstant(7, temp3));
828 __ vmul(result, result, double_scratch2);
829 __ vsub(result, result, double_scratch1);
833 __ vmov(double_scratch2, 1);
834 __ vadd(result, result, double_scratch2);
835 __ mov(temp1, Operand(temp2, LSR, 11));
836 __ Ubfx(temp2, temp2, 0, 11);
837 __ add(temp1, temp1, Operand(0x3ff));
840 __ mov(temp3, Operand(ExternalReference::math_exp_log_table()));
841 __ add(temp3, temp3, Operand(temp2, LSL, 3));
842 __ ldm(ia, temp3, temp2.bit() | temp3.bit());
845 __ orr(temp1, temp3, Operand(temp1, LSL, 20));
846 __ vmov(double_scratch1, temp2, temp1);
848 __ orr(temp1, temp2, Operand(temp1, LSL, 20));
849 __ vmov(double_scratch1, temp3, temp1);
851 __ vmul(result, result, double_scratch1);
852 __ b(&done);
854 __ bind(&zero);
855 __ vmov(result, kDoubleRegZero);
856 __ b(&done);
858 __ bind(&infinity);
859 __ vldr(result, ExpConstant(2, temp3));
861 __ bind(&done);
864 #undef __