Lines Matching refs:__

43 #define __ masm()->
57 __ B(cond_, label);
62 __ B(NegateCondition(cond_), label);
88 __ CompareAndBranch(lhs_, rhs_, cond_, label);
92 __ CompareAndBranch(lhs_, rhs_, NegateCondition(cond_), label);
120 __ TestAndBranchIfAllClear(value_, mask_, label);
123 __ TestAndBranchIfAnySet(value_, mask_, label);
126 __ Tst(value_, mask_);
127 __ B(cond_, label);
135 __ TestAndBranchIfAnySet(value_, mask_, label);
138 __ TestAndBranchIfAllClear(value_, mask_, label);
141 __ Tst(value_, mask_);
142 __ B(NegateCondition(cond_), label);
161 __ Fabs(scratch_, value_);
164 __ Fcmp(scratch_, 0.0);
165 __ B(gt, label);
169 __ Fabs(scratch_, value_);
170 __ Fcmp(scratch_, 0.0);
171 __ B(le, label);
187 __ JumpIfHeapNumber(value_, label);
191 __ JumpIfNotHeapNumber(value_, label);
207 __ JumpIfRoot(value_, index_, label);
211 __ JumpIfNotRoot(value_, index_, label);
395 __ Call(code, mode);
424 __ Mov(x0, instr->arity());
426 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
441 __ Mov(x0, Operand(instr->arity()));
442 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
459 __ Peek(x10, 0);
460 __ Cbz(x10, &packed_case);
467 __ B(&done);
468 __ Bind(&packed_case);
473 __ Bind(&done);
490 __ CallRuntime(function, num_arguments, save_doubles);
498 __ Mov(cp, ToRegister(context));
500 __ Ldr(cp, ToMemOperand(context, kMustUseFramePointer));
504 __ LoadHeapObject(cp,
517 __ CallRuntimeSaveDoubles(id);
612 __ Poke(value, count * kDoubleSize);
630 __ Peek(value, count * kDoubleSize);
653 __ Peek(x10, receiver_offset);
654 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
656 __ Ldr(x10, GlobalObjectMemOperand());
657 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
658 __ Poke(x10, receiver_offset);
660 __ Bind(&ok);
664 DCHECK(__ StackPointer().Is(jssp));
668 __ StubPrologue();
670 __ Prologue(info()->IsCodePreAgingActive());
679 __ Claim(slots, kPointerSize);
694 __ CallStub(&stub);
698 __ Push(x1);
699 __ CallRuntime(Runtime::kNewFunctionContext, 1);
704 __ Mov(cp, x0);
705 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
717 __ Ldr(value, MemOperand(fp, parameter_offset));
720 __ Str(value, target);
723 __ RecordWriteContextSlot(cp, target.offset(), value, scratch,
727 __ JumpIfInNewSpace(cp, &done);
728 __ Abort(kExpectedNewSpaceObject);
729 __ bind(&done);
740 __ CallRuntime(Runtime::kTraceEnter, 0);
758 __ Claim(slots);
789 __ Bind(code->entry());
796 __ Push(lr, fp, cp);
797 __ Mov(fp, Smi::FromInt(StackFrame::STUB));
798 __ Push(fp);
799 __ Add(fp, __ StackPointer(),
809 __ Pop(xzr, cp, fp, lr);
813 __ B(code->exit());
840 __ Bind(&table_entry->label);
848 __ Mov(entry_offset, entry - base);
864 __ Bind(&needs_frame);
865 __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB));
866 __ Push(lr, fp, cp, stub_marker);
867 __ Add(fp, __ StackPointer(), 2 * kPointerSize);
868 if (!last_entry) __ B(&call_deopt_entry);
871 __ B(&needs_frame);
876 __ Bind(&restore_caller_doubles);
878 if (!last_entry) __ B(&call_deopt_entry);
881 __ B(&restore_caller_doubles);
886 if (!last_entry) __ B(&call_deopt_entry);
894 __ Bind(&call_deopt_entry);
895 __ Mov(deopt_entry, Operand(reinterpret_cast<uint64_t>(base),
897 __ Add(deopt_entry, deopt_entry, entry_offset);
898 __ Call(deopt_entry);
1017 __ Push(x0, x1, x2);
1018 __ Mrs(x2, NZCV);
1019 __ Mov(x0, count);
1020 __ Ldr(w1, MemOperand(x0));
1021 __ Subs(x1, x1, 1);
1022 __ B(gt, &not_zero);
1023 __ Mov(w1, FLAG_deopt_every_n_times);
1024 __ Str(w1, MemOperand(x0));
1025 __ Pop(x2, x1, x0);
1027 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
1028 __ Unreachable();
1030 __ Bind(&not_zero);
1031 __ Str(w1, MemOperand(x0));
1032 __ Msr(NZCV, x2);
1033 __ Pop(x2, x1, x0);
1038 __ B(&dont_trap, InvertBranchType(branch_type), reg, bit);
1039 __ Debug("trap_on_deopt", __LINE__, BREAK);
1040 __ Bind(&dont_trap);
1050 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
1061 __ B(&jump_table_.last()->label, branch_type, reg, bit);
1112 __ CompareRoot(rt, index);
1119 __ CompareRoot(rt, index);
1126 __ TestForMinusZero(input);
1132 __ CompareObjectMap(object, Heap::kHeapNumberMapRootIndex);
1162 __ nop();
1389 __ B(chunk_->GetAssemblyLabel(right_block));
1478 __ Ldr(result, MemOperand(arguments, offset));
1484 __ Sub(result.W(), length, loc);
1485 __ Ldr(result, MemOperand(arguments, result, UXTW, kPointerSizeLog2));
1487 __ Ldr(result, MemOperand(arguments, length, UXTW, kPointerSizeLog2));
1492 __ Sub(result.W(), length, index);
1493 __ Add(result.W(), result.W(), 1);
1494 __ Ldr(result, MemOperand(arguments, result, UXTW, kPointerSizeLog2));
1507 __ Add(result, left, right);
1518 __ Adds(result, left, right);
1521 __ Add(result, left, right);
1532 __ Adds(result, left, right);
1535 __ Add(result, left, right);
1575 __ Allocate(size, result, temp1, temp2, deferred->entry(), flags);
1577 __ B(deferred->entry());
1581 __ Sxtw(size.X(), size);
1582 __ Allocate(size.X(), result, temp1, temp2, deferred->entry(), flags);
1585 __ Bind(deferred->exit());
1594 __ Mov(filler_count, size / kPointerSize);
1596 __ Lsr(filler_count.W(), ToRegister32(instr->size()), kPointerSizeLog2);
1599 __ Sub(untagged_result, result, kHeapObjectTag);
1600 __ Mov(filler, Operand(isolate()->factory()->one_pointer_filler_map()));
1601 __ FillFields(untagged_result, filler_count, filler);
1612 __ Mov(ToRegister(instr->result()), Smi::FromInt(0));
1618 __ Mov(size, ToSmi(LConstantOperand::cast(instr->size())));
1620 __ SmiTag(size, ToRegister32(instr->size()).X());
1634 __ Mov(x10, Smi::FromInt(flags));
1635 __ Push(size, x10);
1639 __ StoreToSafepointRegisterSlot(x0, ToRegister(instr->result()));
1658 __ Cmp(length, kArgumentsLimit);
1663 __ Push(receiver);
1666 __ Sxtw(argc, length);
1668 __ Add(elements, elements, 1 * kPointerSize);
1674 __ Cbz(length, &invoke);
1675 __ Bind(&loop);
1676 __ Ldr(scratch, MemOperand(elements, length, SXTW, kPointerSizeLog2));
1677 __ Push(scratch);
1678 __ Subs(length, length, 1);
1679 __ B(ne, &loop);
1681 __ Bind(&invoke);
1688 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator);
1709 __ Sub(result, jssp, 2 * kPointerSize);
1714 __ Ldr(previous_fp,
1716 __ Ldr(result,
1718 __ Cmp(result, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1719 __ Csel(result, fp, previous_fp, ne);
1730 __ Cmp(fp, elements);
1731 __ Mov(result, scope()->num_parameters());
1732 __ B(eq, &done);
1735 __ Ldr(result.X(), MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1736 __ Ldr(result,
1741 __ Bind(&done);
1751 case Token::ADD: __ Fadd(result, left, right); break;
1752 case Token::SUB: __ Fsub(result, left, right); break;
1753 case Token::MUL: __ Fmul(result, left, right); break;
1754 case Token::DIV: __ Fdiv(result, left, right); break;
1767 __ CallCFunction(
1798 case Token::BIT_AND: __ And(result, left, right); break;
1799 case Token::BIT_OR: __ Orr(result, left, right); break;
1800 case Token::BIT_XOR: __ Eor(result, left, right); break;
1814 case Token::BIT_AND: __ And(result, left, right); break;
1815 case Token::BIT_OR: __ Orr(result, left, right); break;
1816 case Token::BIT_XOR: __ Eor(result, left, right); break;
1831 __ Cmp(length, index);
1836 __ Cmp(index, length);
1839 __ Assert(NegateCondition(cond), kEliminatedBoundsCheckFailed);
1869 __ CompareRoot(value, Heap::kTrueValueRootIndex);
1879 __ Ldr(double_scratch(), FieldMemOperand(value,
1886 __ Ldr(temp, FieldMemOperand(value, String::kLengthOffset));
1895 __ JumpIfRoot(
1901 __ JumpIfRoot(
1903 __ JumpIfRoot(
1909 __ JumpIfRoot(
1916 __ Cbz(value, false_label);
1917 __ JumpIfSmi(value, true_label);
1931 __ Ldr(map, FieldMemOperand(value, HeapObject::kMapOffset));
1935 __ Ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
1936 __ TestAndBranchIfAnySet(
1943 __ CompareInstanceType(map, scratch, FIRST_SPEC_OBJECT_TYPE);
1944 __ B(ge, true_label);
1950 __ CompareInstanceType(map, scratch, FIRST_NONSTRING_TYPE);
1951 __ B(ge, &not_string);
1952 __ Ldr(scratch, FieldMemOperand(value, String::kLengthOffset));
1953 __ Cbz(scratch, false_label);
1954 __ B(true_label);
1955 __ Bind(&not_string);
1960 __ CompareInstanceType(map, scratch, SYMBOL_TYPE);
1961 __ B(eq, true_label);
1966 __ JumpIfNotRoot(map, Heap::kHeapNumberMapRootIndex, &not_heap_number);
1968 __ Ldr(double_scratch(),
1970 __ Fcmp(double_scratch(), 0.0);
1972 __ B(vs, false_label);
1973 __ B(eq, false_label);
1974 __ B(true_label);
1975 __ Bind(&not_heap_number);
2007 __ LoadObject(function_reg, function);
2013 __ JumpIfNotSmi(function_reg, &is_not_smi);
2014 __ Abort(kExpectedFunctionObject);
2015 __ Bind(&is_not_smi);
2020 __ Ldr(cp, FieldMemOperand(function_reg, JSFunction::kContextOffset));
2025 __ Mov(arity_reg, arity);
2029 __ Ldr(x10, FieldMemOperand(function_reg, JSFunction::kCodeEntryOffset));
2030 __ Call(x10);
2038 __ InvokeFunction(function_reg, expected, count, CALL_FUNCTION, generator);
2066 if (must_teardown_frame) __ LeaveFrame(StackFrame::INTERNAL);
2081 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET));
2085 __ Call(code, RelocInfo::CODE_TARGET, TypeFeedbackId::None());
2089 generator.BeforeCall(__ CallSize(target));
2090 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
2091 __ Call(target);
2103 __ Mov(x0, Operand(instr->arity()));
2107 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
2110 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
2111 __ Call(x10);
2159 __ Push(object);
2160 __ Mov(cp, 0);
2161 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
2164 __ StoreToSafepointRegisterSlot(x0, temp);
2199 __ Ldr(map_reg, FieldMemOperand(object, HeapObject::kMapOffset));
2204 __ Bind(deferred->check_maps());
2211 __ CompareMap(map_reg, map);
2212 __ B(eq, &success);
2215 __ CompareMap(map_reg, map);
2219 __ B(ne, deferred->entry());
2224 __ Bind(&success);
2246 __ Ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2247 __ Ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2253 __ Cmp(scratch, first);
2263 __ Ccmp(scratch, last, CFlag, hs);
2280 __ Tst(scratch, mask);
2282 __ And(scratch, scratch, mask);
2283 __ Cmp(scratch, tag);
2294 __ ClampDoubleToUint8(result, input, double_scratch());
2301 __ ClampInt32ToUint8(result, input);
2312 __ JumpIfNotSmi(input, &is_not_smi);
2313 __ SmiUntag(result.X(), input);
2314 __ ClampInt32ToUint8(result);
2315 __ B(&done);
2317 __ Bind(&is_not_smi);
2321 __ JumpIfHeapNumber(input, &is_heap_number);
2325 __ Mov(result, 0);
2326 __ B(&done);
2329 __ Bind(&is_heap_number);
2332 __ Ldr(dbl_scratch, FieldMemOperand(input, HeapNumber::kValueOffset));
2333 __ ClampDoubleToUint8(result, dbl_scratch, dbl_scratch2);
2335 __ Bind(&done);
2343 __ Fmov(result_reg, value_reg);
2344 __ Lsr(result_reg, result_reg, 32);
2346 __ Fmov(result_reg.W(), value_reg.S());
2358 __ Bfi(lo_reg, hi_reg, 32, 32);
2359 __ Fmov(result_reg, lo_reg);
2371 __ JumpIfSmi(input, false_label);
2385 __ CompareObjectType(input, map, scratch1, FIRST_SPEC_OBJECT_TYPE);
2386 __ B(lt, false_label);
2387 __ B(eq, true_label);
2388 __ Cmp(scratch1, LAST_SPEC_OBJECT_TYPE);
2389 __ B(eq, true_label);
2391 __ IsObjectJSObjectType(input, map, scratch1, false_label);
2396 __ Ldr(scratch1, FieldMemOperand(map, Map::kConstructorOffset));
2400 __ JumpIfNotObjectType(
2403 __ JumpIfNotObjectType(
2408 __ Ldr(scratch1,
2410 __ Ldr(scratch1,
2431 __ Fcmp(object, object);
2432 __ B(vc, instr->FalseLabel(chunk_));
2435 __ Fmov(temp, object);
2452 __ Ldr(map, FieldMemOperand(value, HeapObject::kMapOffset));
2463 __ JumpIfMinusZero(ToDoubleRegister(instr->value()),
2467 __ JumpIfNotHeapNumber(value, instr->FalseLabel(chunk()), DO_SMI_CHECK);
2468 __ Ldr(scratch, FieldMemOperand(value, HeapNumber::kValueOffset));
2469 __ JumpIfMinusZero(scratch, instr->TrueLabel(chunk()));
2492 __ Fcmp(ToDoubleRegister(left), ToDoubleRegister(right));
2496 __ B(vs, instr->FalseLabel(chunk_));
2557 __ LoadTrueFalseRoots(x1, x2);
2558 __ Cmp(x0, 0);
2559 __ Csel(ToRegister(instr->result()), x1, x2, cond);
2568 __ Fmov(result, fp_zero);
2570 __ Fneg(result, fp_zero);
2573 __ Fmov(result, instr->value());
2579 __ Mov(ToRegister(instr->result()), Operand(instr->value()));
2587 __ Mov(ToRegister32(instr->result()), static_cast<uint32_t>(instr->value()));
2592 __ Mov(ToRegister(instr->result()), Operand(instr->value()));
2599 __ LoadObject(ToRegister(instr->result()), object);
2607 __ Ldr(result, MemOperand(fp, StandardFrameConstants::kContextOffset));
2623 __ Mov(temp, Operand(Handle<Object>(cell)));
2624 __ Ldr(temp, FieldMemOperand(temp, Cell::kValueOffset));
2625 __ Cmp(reg, temp);
2627 __ Cmp(reg, Operand(object));
2654 __ CompareObjectType(object, temp1, temp1, JS_DATE_TYPE);
2658 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
2662 __ Mov(temp1, Operand(stamp));
2663 __ Ldr(temp1, MemOperand(temp1));
2664 __ Ldr(temp2, FieldMemOperand(object, JSDate::kCacheStampOffset));
2665 __ Cmp(temp1, temp2);
2666 __ B(ne, &runtime);
2667 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
2669 __ B(&done);
2672 __ Bind(&runtime);
2673 __ Mov(x1, Operand(index));
2674 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
2677 __ Bind(&done);
2711 __ Cmp(dividend, 1);
2718 __ Tst(dividend, mask);
2723 __ Neg(result, dividend);
2728 __ Mov(result, dividend);
2730 __ Add(result, dividend, Operand(dividend, LSR, 31));
2732 __ Mov(result, Operand(dividend, ASR, 31));
2733 __ Add(result, dividend, Operand(result, LSR, 32 - shift));
2735 if (shift > 0) __ Mov(result, Operand(result, ASR, shift));
2736 if (divisor < 0) __ Neg(result, result);
2757 __ TruncatingDiv(result, dividend, Abs(divisor));
2758 if (divisor < 0) __ Neg(result, result);
2763 __ Sxtw(dividend.X(), dividend);
2764 __ Mov(temp, divisor);
2765 __ Smsubl(temp.X(), result, temp, dividend.X());
2780 __ Sdiv(result, dividend, divisor);
2794 __ Cmp(divisor, 0);
2800 __ Ccmp(dividend, 0, NoFlag, mi);
2808 __ Cmp(dividend, 1);
2812 __ Ccmp(divisor, -1, NoFlag, vs);
2818 __ Msub(remainder, result, divisor, dividend);
2831 __ TryRepresentDoubleAsInt32(result, input, double_scratch());
2835 __ SmiTag(result.X());
2841 __ Drop(instr->count());
2866 __ Mov(x2, Operand(instr->hydrogen()->shared_info()));
2869 __ Mov(x2, Operand(instr->hydrogen()->shared_info()));
2870 __ Mov(x1, Operand(pretenure ? factory()->true_value()
2872 __ Push(cp, x2, x1);
2883 __ EnumLengthUntagged(result, map);
2884 __ Cbnz(result, &load_cache);
2886 __ Mov(result, Operand(isolate()->factory()->empty_fixed_array()));
2887 __ B(&done);
2889 __ Bind(&load_cache);
2890 __ LoadInstanceDescriptors(map, result);
2891 __ Ldr(result, FieldMemOperand(result, DescriptorArray::kEnumCacheOffset));
2892 __ Ldr(result, FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
2895 __ Bind(&done);
2908 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
2909 __ Cmp(object, null_value);
2915 __ CompareObjectType(object, x1, x1, LAST_JS_PROXY_TYPE);
2919 __ CheckEnumCache(object, null_value, x1, x2, x3, x4, &call_runtime);
2921 __ Ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
2922 __ B(&use_cache);
2925 __ Bind(&call_runtime);
2926 __ Push(object);
2929 __ Ldr(x1, FieldMemOperand(object, HeapObject::kMapOffset));
2932 __ Bind(&use_cache);
2940 __ AssertString(input);
2944 __ Ldr(result.W(), FieldMemOperand(input, String::kHashFieldOffset));
2945 __ IndexFromHash(result, result);
2952 __ B(chunk_->GetAssemblyLabel(LookupDestination(block)));
2969 __ Ldr(temp, FieldMemOperand(input, String::kHashFieldOffset));
2970 __ Tst(temp, String::kContainsCachedArrayIndexMask);
3014 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
3016 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
3025 __ Add(result, base, ToOperand32(instr->offset()));
3027 __ Add(result, base, Operand(ToRegister32(instr->offset()), SXTW));
3044 __ Cmp(x0, 0);
3045 __ LoadTrueFalseRoots(x0, x1);
3046 __ Csel(x0, x0, x1, eq);
3082 __ JumpIfSmi(object, &return_false);
3087 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3093 __ bind(&map_check);
3096 __ ldr(scratch, Immediate(Handle<Object>(cell)));
3097 __ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
3098 __ cmp(map, scratch);
3099 __ b(&cache_miss, ne);
3104 __ ldr(result, Immediate(factory()->the_hole_value()));
3106 __ B(&done);
3110 __ Bind(&cache_miss);
3113 __ Adr(map_check_site, &map_check);
3115 __ JumpIfRoot(object, Heap::kNullValueRootIndex, &return_false);
3123 __ IsObjectJSStringType(object, scratch, NULL, &return_false);
3124 __ B(deferred->entry());
3126 __ Bind(&return_false);
3127 __ LoadRoot(result, Heap::kFalseValueRootIndex);
3130 __ Bind(deferred->exit());
3131 __ Bind(&done);
3151 __ LoadObject(InstanceofStub::right(), instr->function());
3162 __ StoreToSafepointRegisterSlot(result, result);
3174 __ Scvtf(result, value);
3189 __ InvokeFunction(x1, count, CALL_FUNCTION, generator);
3206 __ Ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3210 __ Ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
3211 __ Cmp(temp2, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3212 __ B(ne, &check_frame_marker);
3213 __ Ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
3216 __ Bind(&check_frame_marker);
3217 __ Ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
3231 __ JumpIfSmi(value, is_not_object);
3232 __ JumpIfRoot(value, Heap::kNullValueRootIndex, is_object);
3234 __ Ldr(map, FieldMemOperand(value, HeapObject::kMapOffset));
3237 __ Ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
3238 __ TestAndBranchIfAnySet(scratch, 1 << Map::kIsUndetectable, is_not_object);
3241 __ IsInstanceJSObjectType(map, scratch, NULL);
3254 __ JumpIfSmi(input, is_not_string);
3256 __ CompareObjectType(input, temp1, temp1, FIRST_NONSTRING_TYPE);
3288 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
3290 __ Ldr(temp, FieldMemOperand(input, HeapObject::kMapOffset));
3291 __ Ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
3311 __ Bind(label->label());
3320 __ Ldr(result, ContextMemOperand(context, instr->slot_index()));
3326 __ JumpIfNotRoot(result, Heap::kTheHoleValueRootIndex, &not_the_hole);
3327 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3328 __ Bind(&not_the_hole);
3340 __ Ldr(result, FieldMemOperand(function,
3348 __ CompareObjectType(result, temp, temp, MAP_TYPE);
3349 __ B(ne, &done);
3352 __ Ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
3355 __ Bind(&done);
3361 __ Mov(result, Operand(Handle<Object>(instr->hydrogen()->cell().handle())));
3362 __ Ldr(result, FieldMemOperand(result, Cell::kValueOffset));
3374 __ Mov(vector, instr->hydrogen()->feedback_vector());
3377 __ Mov(VectorLoadICDescriptor::SlotRegister(),
3387 __ Mov(LoadDescriptor::NameRegister(), Operand(instr->name()));
3414 __ Add(scratch, base, Operand::UntagSmiAndScale(key, element_size_shift));
3423 __ Add(scratch, base, base_offset);
3457 __ Ldr(result.S(), mem_op);
3458 __ Fcvt(result, result.S());
3462 __ Ldr(result, mem_op);
3469 __ Ldrsb(result, mem_op);
3475 __ Ldrb(result, mem_op);
3479 __ Ldrsh(result, mem_op);
3483 __ Ldrh(result, mem_op);
3487 __ Ldrsw(result, mem_op);
3491 __ Ldr(result.W(), mem_op);
3494 __ Tst(result, 0xFFFFFFFF80000000);
3533 __ Add(base, elements, Operand::UntagSmiAndScale(key, element_size_shift));
3548 __ Add(base, elements, Operand(key, SXTW, element_size_shift));
3551 __ Add(base, elements, base_offset);
3583 __ Ldr(result, mem_op);
3590 __ Ldr(scratch, mem_op);
3591 __ Cmn(scratch, 1);
3626 __ Load(result, mem_op, representation);
3660 __ Load(result, MemOperand(object, offset), access.representation());
3666 __ Ldr(result, FieldMemOperand(object, offset));
3676 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
3685 __ Load(result, UntagSmiFieldMemOperand(source, offset),
3688 __ Load(result, FieldMemOperand(source, offset), access.representation());
3697 __ Mov(LoadDescriptor::NameRegister(), Operand(instr->name()));
3711 __ LoadRoot(result, instr->index());
3718 __ EnumLengthSmi(result, map);
3727 __ Fabs(result, input);
3733 __ Abs(result, input);
3768 __ Ldr(result_bits, FieldMemOperand(input, HeapNumber::kValueOffset));
3769 __ Mov(result, input);
3770 __ Tbz(result_bits, kXSignBit, exit);
3773 __ Bic(result_bits, result_bits, kXSignMask);
3777 __ Bind(allocation_entry);
3778 __ AllocateHeapNumber(result, &runtime_allocation, temp1, temp2);
3780 __ B(exit);
3782 __ Bind(&runtime_allocation);
3790 __ JumpIfSmi(result, &result_ok);
3791 __ Cmp(input, result);
3792 __ Assert(eq, kUnexpectedValue);
3793 __ Bind(&result_ok);
3799 __ StoreToSafepointRegisterSlot(x0, result);
3842 __ JumpIfNotSmi(input, deferred->entry());
3844 __ Abs(result, input, NULL, &done);
3848 __ Mov(result_bits, double_to_rawbits(0x80000000));
3849 __ B(deferred->allocation_entry());
3851 __ Bind(deferred->exit());
3852 __ Str(result_bits, FieldMemOperand(result, HeapNumber::kValueOffset));
3854 __ Bind(&done);
3877 __ Frintm(result, input);
3889 __ Fcvtms(result, input);
3893 __ Cmp(result, Operand(result, SXTW));
3895 __ Fccmp(input, input, NoFlag, eq);
3907 __ Mov(result, dividend, kDiscardForSameWReg);
3915 __ Mov(result, Operand(dividend, ASR, shift));
3920 __ Negs(result, dividend);
3935 __ Mov(result, Operand(dividend, ASR, shift));
3939 __ Asr(result, result, shift);
3940 __ Csel(result, result, kMinInt / divisor, vc);
3965 __ TruncatingDiv(result, dividend, Abs(divisor));
3966 if (divisor < 0) __ Neg(result, result);
3975 __ Cmp(dividend, 0);
3976 __ B(divisor > 0 ? lt : gt, &needs_adjustment);
3977 __ TruncatingDiv(result, dividend, Abs(divisor));
3978 if (divisor < 0) __ Neg(result, result);
3979 __ B(&done);
3980 __ Bind(&needs_adjustment);
3981 __ Add(temp, dividend, Operand(divisor > 0 ? 1 : -1));
3982 __ TruncatingDiv(result, temp, Abs(divisor));
3983 if (divisor < 0) __ Neg(result, result);
3984 __ Sub(result, result, Operand(1));
3985 __ Bind(&done);
3998 __ Sdiv(result, dividend, divisor);
4006 __ Cmp(dividend, 1);
4007 __ Ccmp(divisor, -1, NoFlag, vs);
4013 __ Cmp(divisor, 0);
4014 __ Ccmp(dividend, 0, ZFlag, mi);
4023 __ Eor(remainder, dividend, divisor);
4024 __ Tbz(remainder, kWSignBit, &done);
4027 __ Msub(remainder, result, divisor, dividend);
4028 __ Cbz(remainder, &done);
4029 __ Sub(result, result, 1);
4031 __ Bind(&done);
4038 __ CallCFunction(ExternalReference::math_log_double_function(isolate()),
4047 __ Clz(result, input);
4062 __ Fmov(double_scratch(), kFP64NegativeInfinity);
4063 __ Fcmp(double_scratch(), input);
4064 __ Fabs(result, input);
4065 __ B(&done, eq);
4068 __ Fadd(double_scratch(), input, fp_zero);
4069 __ Fsqrt(result, double_scratch());
4071 __ Bind(&done);
4092 __ CallStub(&stub);
4095 __ JumpIfSmi(tagged_exponent, &no_deopt);
4097 __ Bind(&no_deopt);
4099 __ CallStub(&stub);
4103 __ Sxtw(integer_exponent, integer_exponent);
4105 __ CallStub(&stub);
4109 __ CallStub(&stub);
4123 __ Frinta(result, input);
4124 __ Fcmp(input, 0.0);
4125 __ Fccmp(result, input, ZFlag, lt);
4128 __ B(eq, &done);
4133 __ Fmov(scratch_d, 0.5);
4134 __ Fadd(result, input, scratch_d);
4135 __ Frintm(result, result);
4137 __ Fabs(result, result);
4138 __ Fneg(result, result);
4140 __ Bind(&done);
4160 __ Fmov(dot_five, 0.5);
4161 __ Fadd(temp, input, dot_five);
4162 __ Fcvtms(result, temp);
4168 __ Cmp(result, Operand(result.W(), SXTW));
4169 __ Ccmp(result, 1, ZFlag, eq);
4170 __ B(hi, &done);
4176 __ Cmp(result, 1);
4182 __ Fmov(result, input);
4187 __ Fcmp(input, dot_five);
4193 __ Cset(result, ge);
4194 __ Bind(&done);
4201 __ Fcvt(result.S(), input);
4202 __ Fcvt(result, result.S());
4209 __ Fsqrt(result, input);
4220 __ Cmp(left, right);
4221 __ Csel(result, left, right, (op == HMathMinMax::kMathMax) ? ge : le);
4227 __ Cmp(left, right);
4228 __ Csel(result, left, right, (op == HMathMinMax::kMathMax) ? ge : le);
4236 __ Fmax(result, left, right);
4239 __ Fmin(result, left, right);
4260 __ Tbz(dividend, kWSignBit, &dividend_is_not_negative);
4262 __ Neg(dividend, dividend);
4263 __ And(dividend, dividend, mask);
4264 __ Negs(dividend, dividend);
4268 __ B(&done);
4271 __ bind(&dividend_is_not_negative);
4272 __ And(dividend, dividend, mask);
4273 __ bind(&done);
4289 __ TruncatingDiv(result, dividend, Abs(divisor));
4290 __ Sxtw(dividend.X(), dividend);
4291 __ Mov(temp, Abs(divisor));
4292 __ Smsubl(result.X(), result, temp, dividend.X());
4298 __ Cbnz(result, &remainder_not_zero);
4300 __ bind(&remainder_not_zero);
4312 __ Sdiv(result, dividend, divisor);
4316 __ Msub(result, result, divisor, dividend);
4318 __ Cbnz(result, &done);
4321 __ Bind(&done);
4354 __ Negs(result, left);
4357 __ Neg(result, left);
4362 __ Mov(result, 0);
4366 __ Mov(result, left, kDiscardForSameWReg);
4370 __ Adds(result, left, left);
4373 __ Add(result, left, left);
4388 __ Cls(scratch, left);
4389 __ Cmp(scratch, right_log2);
4395 __ Lsl(result, left, right_log2);
4399 __ Negs(result, Operand(left, LSL, right_log2));
4402 __ Neg(result, Operand(left, LSL, right_log2));
4417 __ Add(result, left, Operand(left, LSL, WhichPowerOf2(right - 1)));
4420 __ Sub(result, left, Operand(left, LSL, WhichPowerOf2(right + 1)));
4421 __ Neg(result, result);
4428 __ Sub(result, left, Operand(left, LSL, WhichPowerOf2(-right + 1)));
4431 __ Add(result, left, Operand(left, LSL, WhichPowerOf2(-right - 1)));
4432 __ Neg(result, result);
4453 __ Cmp(left, 0);
4454 __ Ccmp(right, 0, ZFlag, ne);
4457 __ Ccmn(left, right, NoFlag, eq);
4462 __ Smull(result.X(), left, right);
4463 __ Cmp(result.X(), Operand(result, SXTW));
4466 __ Mul(result, left, right);
4483 __ Cmp(left, 0);
4484 __ Ccmp(right, 0, ZFlag, ne);
4487 __ Ccmn(left, right, NoFlag, eq);
4493 __ Smulh(result, left, right);
4494 __ Cmp(result, Operand(result.W(), SXTW));
4495 __ SmiTag(result);
4502 __ Asr(result, left, kSmiShift / 2);
4503 __ Mul(result, result, result);
4507 __ SmiUntag(result, left);
4508 __ Mul(result, result, right);
4514 __ SmiUntag(result, right);
4515 __ Mul(result, left, result);
4526 __ Mov(result, 0);
4534 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4535 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4538 __ StoreToSafepointRegisterSlot(x0, result);
4560 __ AllocateHeapNumber(result, deferred->entry(), temp1, temp2);
4562 __ B(deferred->entry());
4565 __ Bind(deferred->exit());
4566 __ Str(input, FieldMemOperand(result, HeapNumber::kValueOffset));
4581 __ AllocateHeapNumber(dst, &slow, scratch1, scratch2);
4582 __ B(&convert_and_store);
4586 __ Bind(&slow);
4590 __ Mov(dst, 0);
4600 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4601 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4604 __ StoreToSafepointRegisterSlot(x0, dst);
4609 __ Bind(&convert_and_store);
4611 __ Ucvtf(dbl_scratch, src);
4612 __ Str(dbl_scratch, FieldMemOperand(dst, HeapNumber::kValueOffset));
4636 __ Cmp(value, Smi::kMaxValue);
4637 __ B(hi, deferred->entry());
4638 __ SmiTag(result, value.X());
4639 __ Bind(deferred->exit());
4658 __ JumpIfSmi(input, &load_smi);
4664 __ JumpIfNotHeapNumber(input, &convert_undefined);
4670 __ Ldr(result, FieldMemOperand(input, HeapNumber::kValueOffset));
4674 __ B(&done);
4677 __ Bind(&convert_undefined);
4680 __ LoadRoot(scratch, Heap::kNanValueRootIndex);
4681 __ Ldr(result, FieldMemOperand(scratch, HeapNumber::kValueOffset));
4682 __ B(&done);
4691 __ Bind(&load_smi);
4692 __ SmiUntagToDouble(result, input);
4694 __ Bind(&done);
4719 __ PushPreamble(instr->argc(), kPointerSize);
4748 __ Push(x0);
4749 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4750 __ CallRuntime(Runtime::kTraceExit, 1);
4760 __ Mov(stack_pointer, fp);
4762 __ Pop(fp, lr);
4767 __ Drop(parameter_count + 1);
4770 __ DropBySMI(parameter_count);
4772 __ Ret();
4793 __ Add(temp, string, SeqString::kHeaderSize - kHeapObjectTag);
4816 __ Ldr(dbg_temp, FieldMemOperand(string, HeapObject::kMapOffset));
4817 __ Ldrb(dbg_temp, FieldMemOperand(dbg_temp, Map::kInstanceTypeOffset));
4819 __ And(dbg_temp, dbg_temp,
4823 __ Cmp(dbg_temp, Operand(encoding == String::ONE_BYTE_ENCODING
4825 __ Check(eq, kUnexpectedStringType);
4831 __ Ldrb(result, operand);
4833 __ Ldrh(result, operand);
4852 __ EmitSeqStringSetCharCheck(string, index, kIndexIsInteger32, temp,
4858 __ Strb(value, operand);
4860 __ Strh(value, operand);
4873 __ SmiTag(output, input);
4886 __ Bind(&untag);
4887 __ SmiUntag(result, input);
4888 __ Bind(&done);
4900 case Token::ROR: __ Ror(result, left, right); break;
4901 case Token::SAR: __ Asr(result, left, right); break;
4902 case Token::SHL: __ Lsl(result, left, right); break;
4904 __ Lsr(result, left, right);
4920 __ Mov(result, left, kDiscardForSameWReg);
4923 case Token::ROR: __ Ror(result, left, shift_count); break;
4924 case Token::SAR: __ Asr(result, left, shift_count); break;
4925 case Token::SHL: __ Lsl(result, left, shift_count); break;
4926 case Token::SHR: __ Lsr(result, left, shift_count); break;
4945 __ Ubfx(result, right, kSmiShift, 5);
4953 __ SmiUntag(temp, left);
4954 __ Ror(result.W(), temp.W(), result.W());
4955 __ SmiTag(result);
4959 __ Asr(result, left, result);
4960 __ Bic(result, result, kSmiShiftMask);
4963 __ Lsl(result, left, result);
4966 __ Lsr(result, left, result);
4967 __ Bic(result, result, kSmiShiftMask);
4983 __ Mov(result, left);
4987 __ SmiUntag(result, left);
4988 __ Ror(result.W(), result.W(), shift_count);
4989 __ SmiTag(result);
4992 __ Asr(result, left, shift_count);
4993 __ Bic(result, result, kSmiShiftMask);
4996 __ Lsl(result, left, shift_count);
4999 __ Lsr(result, left, shift_count);
5000 __ Bic(result, result, kSmiShiftMask);
5010 __ Debug("LDebugBreak", 0, BREAK);
5023 __ LoadHeapObject(scratch1, instr->hydrogen()->pairs());
5024 __ Mov(scratch2, Smi::FromInt(instr->hydrogen()->flags()));
5025 __ Push(cp, scratch1, scratch2); // The context is the first argument.
5033 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5060 __ CompareRoot(masm()->StackPointer(), Heap::kStackLimitRootIndex);
5061 __ B(hs, &done);
5070 __ Bind(&done);
5076 __ CompareRoot(masm()->StackPointer(), Heap::kStackLimitRootIndex);
5077 __ B(lo, deferred_stack_check->entry());
5080 __ Bind(instr->done_label());
5094 __ Add(temp, code_object, Code::kHeaderSize - kHeapObjectTag);
5095 __ Str(temp, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
5108 __ Ldr(scratch, target);
5112 __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, &skip_assignment);
5116 __ Str(value, target);
5121 __ RecordWriteContextSlot(context,
5130 __ Bind(&skip_assignment);
5139 __ Mov(cell, Operand(instr->hydrogen()->cell().handle()));
5147 __ Ldr(payload, FieldMemOperand(cell, Cell::kValueOffset));
5152 __ Str(value, FieldMemOperand(cell, Cell::kValueOffset));
5187 __ Fcvt(dbl_scratch.S(), value);
5188 __ Str(dbl_scratch.S(), dst);
5192 __ Str(value, dst);
5203 __ Strb(value, dst);
5209 __ Strh(value, dst);
5215 __ Str(value.W(), dst);
5259 __ CanonicalizeNaN(double_scratch(), value);
5260 __ Str(double_scratch(), mem_op);
5262 __ Str(value, mem_op);
5305 __ Store(value, mem_op, representation);
5315 __ Add(element_addr, mem_op.base(), mem_op.OffsetAsOperand());
5316 __ RecordWrite(elements, element_addr, value, GetLinkRegisterState(),
5346 __ Store(value, MemOperand(object, offset), representation);
5350 __ AssertNotSmi(object);
5357 __ Str(value, FieldMemOperand(object, offset));
5372 __ Mov(new_map_value, Operand(transition));
5373 __ Str(new_map_value, FieldMemOperand(object, HeapObject::kMapOffset));
5376 __ RecordWriteForMap(object,
5390 __ Ldr(temp0, FieldMemOperand(object, JSObject::kPropertiesOffset));
5399 __ Ldr(temp0, FieldMemOperand(destination, offset));
5400 __ AssertSmi(temp0);
5405 __ Ldr(destination, FieldMemOperand(object, JSObject::kPropertiesOffset));
5410 __ Store(value, UntagSmiFieldMemOperand(destination, offset),
5413 __ Store(value, FieldMemOperand(destination, offset), representation);
5416 __ RecordWriteField(destination,
5434 __ Mov(StoreDescriptor::NameRegister(), Operand(instr->name()));
5470 __ Bind(deferred->exit());
5481 __ Mov(result, 0);
5484 __ Push(string);
5488 __ SmiTagAndPush(index);
5492 __ AssertSmi(x0);
5493 __ SmiUntag(x0);
5494 __ StoreToSafepointRegisterSlot(x0, result);
5516 __ Cmp(char_code, String::kMaxOneByteCharCode);
5517 __ B(hi, deferred->entry());
5518 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
5519 __ Add(result, result, FixedArray::kHeaderSize - kHeapObjectTag);
5520 __ Ldr(result, MemOperand(result, char_code, SXTW, kPointerSizeLog2));
5521 __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
5522 __ B(eq, deferred->entry());
5523 __ Bind(deferred->exit());
5534 __ Mov(result, 0);
5537 __ SmiTagAndPush(char_code);
5539 __ StoreToSafepointRegisterSlot(x0, result);
5564 __ Subs(result, left, right);
5567 __ Sub(result, left, right);
5578 __ Subs(result, left, right);
5581 __ Sub(result, left, right);
5601 __ JumpIfNotHeapNumber(input, &check_bools);
5604 __ TruncateHeapNumberToI(output, input);
5605 __ B(&done);
5607 __ Bind(&check_bools);
5611 __ LoadTrueFalseRoots(true_root, false_root);
5612 __ Cmp(input, true_root);
5613 __ Cset(output, eq);
5614 __ Ccmp(input, false_root, ZFlag, ne);
5615 __ B(eq, &done);
5628 __ Ldr(dbl_scratch1, FieldMemOperand(input, HeapNumber::kValueOffset));
5629 __ TryRepresentDoubleAsInt32(output, dbl_scratch1, dbl_scratch2);
5633 __ Cmp(output, 0);
5634 __ B(ne, &done);
5635 __ Fmov(scratch1, dbl_scratch1);
5639 __ Bind(&done);
5662 __ SmiUntag(output, input);
5666 __ JumpIfNotSmi(input, deferred->entry());
5667 __ SmiUntag(output, input);
5668 __ Bind(deferred->exit());
5675 __ Ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5682 __ Push(x0);
5697 __ LoadObject(x7, instr->hydrogen()->literals());
5698 __ Ldr(x1, FieldMemOperand(x7, literal_offset));
5699 __ JumpIfNotRoot(x1, Heap::kUndefinedValueRootIndex, &materialized);
5703 __ Mov(x12, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
5704 __ Mov(x11, Operand(instr->hydrogen()->pattern()));
5705 __ Mov(x10, Operand(instr->hydrogen()->flags()));
5706 __ Push(x7, x12, x11, x10);
5708 __ Mov(x1, x0);
5710 __ Bind(&materialized);
5714 __ Allocate(size, x0, x10, x11, &runtime_allocate, TAG_OBJECT);
5715 __ B(&allocated);
5717 __ Bind(&runtime_allocate);
5718 __ Mov(x0, Smi::FromInt(size));
5719 __ Push(x1, x0);
5721 __ Pop(x1);
5723 __ Bind(&allocated);
5725 __ CopyFields(x0, x1, CPURegList(x10, x11, x12), size / kPointerSize);
5742 __ CheckMap(object, temp1, from_map, &not_applicable, DONT_DO_SMI_CHECK);
5743 __ Mov(new_map, Operand(to_map));
5744 __ Str(new_map, FieldMemOperand(object, HeapObject::kMapOffset));
5746 __ RecordWriteForMap(object, new_map, temp1, GetLinkRegisterState(),
5753 __ CheckMap(object, temps.AcquireX(), from_map, &not_applicable,
5759 __ Mov(x1, Operand(to_map));
5762 __ CallStub(&stub);
5766 __ Bind(&not_applicable);
5776 __ TestJSArrayForAllocationMemento(object, temp1, temp2, &no_memento_found);
5778 __ Bind(&no_memento_found);
5785 __ TruncateDoubleToI(result, input);
5787 __ SmiTag(result, result);
5794 __ Push(input);
5807 __ JumpIfSmi(value, true_label);
5816 __ JumpIfNotHeapNumber(value, chunk_->GetAssemblyLabel(false_block));
5818 __ JumpIfHeapNumber(value, chunk_->GetAssemblyLabel(true_block));
5820 __ B(chunk_->GetAssemblyLabel(false_block));
5829 __ JumpIfSmi(value, false_label);
5830 __ JumpIfObjectType(
5832 __ Ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
5840 __ JumpIfSmi(value, false_label);
5841 __ CompareObjectType(value, map, scratch, SYMBOL_TYPE);
5845 __ JumpIfRoot(value, Heap::kTrueValueRootIndex, true_label);
5846 __ CompareRoot(value, Heap::kFalseValueRootIndex);
5853 __ JumpIfRoot(value, Heap::kUndefinedValueRootIndex, true_label);
5854 __ JumpIfSmi(value, false_label);
5856 __ Ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
5857 __ Ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5865 __ JumpIfSmi(value, false_label);
5866 __ JumpIfObjectType(value, type, type, JS_FUNCTION_TYPE, true_label);
5875 __ JumpIfSmi(value, false_label);
5876 __ JumpIfRoot(value, Heap::kNullValueRootIndex, true_label);
5877 __ JumpIfObjectType(value, map, scratch,
5879 __ CompareInstanceType(map, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5880 __ B(gt, false_label);
5882 __ Ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
5886 __ B(false_label);
5892 __ Ucvtf(ToDoubleRegister(instr->result()), ToRegister32(instr->value()));
5900 __ Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset));
5901 __ Cmp(map, temp);
5917 __ Ldr(result, FieldMemOperand(function,
5921 __ Ldr(result.W(),
5925 __ Tbnz(result, SharedFunctionInfo::kStrictModeFunction, &copy_receiver);
5928 __ Tbnz(result, SharedFunctionInfo::kNative, &copy_receiver);
5932 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &global_object);
5933 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, &global_object);
5937 __ CompareObjectType(receiver, result, result, FIRST_SPEC_OBJECT_TYPE);
5938 __ B(ge, &copy_receiver);
5941 __ Bind(&global_object);
5942 __ Ldr(result, FieldMemOperand(function, JSFunction::kContextOffset));
5943 __ Ldr(result, ContextMemOperand(result, Context::GLOBAL_OBJECT_INDEX));
5944 __ Ldr(result, FieldMemOperand(result, GlobalObject::kGlobalProxyOffset));
5945 __ B(&done);
5947 __ Bind(&copy_receiver);
5948 __ Mov(result, receiver);
5949 __ Bind(&done);
5958 __ Push(object);
5959 __ Push(index);
5960 __ Mov(cp, 0);
5961 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
5964 __ StoreToSafepointRegisterSlot(x0, result);
5996 __ AssertSmi(index);
6004 __ TestAndBranchIfAnySet(
6006 __ Mov(index, Operand(index, ASR, 1));
6008 __ Cmp(index, Smi::FromInt(0));
6009 __ B(lt, &out_of_object);
6012 __ Add(result, object, Operand::UntagSmiAndScale(index, kPointerSizeLog2));
6013 __ Ldr(result, FieldMemOperand(result, JSObject::kHeaderSize));
6015 __ B(&done);
6017 __ Bind(&out_of_object);
6018 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
6020 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2));
6021 __ Ldr(result, FieldMemOperand(result,
6023 __ Bind(deferred->exit());
6024 __ Bind(&done);
6030 __ Str(context, MemOperand(fp, StandardFrameConstants::kContextOffset));
6036 __ Push(scope_info);
6037 __ Push(ToRegister(instr->function()));