Lines Matching defs:__

42 #define __ masm()->
76 __ sdc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
92 __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
109 __ stop("stop_at");
126 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
127 __ ld(a2, MemOperand(sp, receiver_offset));
128 __ Branch(&ok, ne, a2, Operand(at));
130 __ ld(a2, GlobalObjectOperand());
131 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
133 __ sd(a2, MemOperand(sp, receiver_offset));
135 __ bind(&ok);
142 __ StubPrologue();
144 __ Prologue(info()->IsCodePreAgingActive());
154 __ Dsubu(sp, sp, Operand(slots * kPointerSize));
155 __ Push(a0, a1);
156 __ Daddu(a0, sp, Operand(slots * kPointerSize));
157 __ li(a1, Operand(kSlotsZapValue));
159 __ bind(&loop);
160 __ Dsubu(a0, a0, Operand(kPointerSize));
161 __ sd(a1, MemOperand(a0, 2 * kPointerSize));
162 __ Branch(&loop, ne, a0, Operand(sp));
163 __ Pop(a0, a1);
165 __ Dsubu(sp, sp, Operand(slots * kPointerSize));
181 __ CallStub(&stub);
185 __ push(a1);
186 __ CallRuntime(Runtime::kNewFunctionContext, 1);
191 __ mov(cp, v0);
192 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
201 __ ld(a0, MemOperand(fp, parameter_offset));
204 __ sd(a0, target);
207 __ RecordWriteContextSlot(
211 __ JumpIfInNewSpace(cp, a0, &done);
212 __ Abort(kExpectedNewSpaceObject);
213 __ bind(&done);
224 __ CallRuntime(Runtime::kTraceEnter, 0);
241 __ Dsubu(sp, sp, Operand(slots * kPointerSize));
271 __ bind(code->entry());
277 __ MultiPush(cp.bit() | fp.bit() | ra.bit());
278 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
279 __ push(scratch0());
280 __ Daddu(fp, sp,
288 __ pop(at);
289 __ MultiPop(cp.bit() | fp.bit() | ra.bit());
292 __ jmp(code->exit());
308 __ bind(&table_start);
312 __ bind(&table_entry->label);
315 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry)));
319 __ Branch(&needs_frame);
321 __ bind(&needs_frame);
322 __ MultiPush(cp.bit() | fp.bit() | ra.bit());
327 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
328 __ push(scratch0());
329 __ Daddu(fp, sp,
331 __ Call(t9);
338 __ Call(t9);
341 __ RecordComment("]");
383 __ li(scratch, Operand(static_cast<int32_t>(literal->Number())));
386 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value())));
391 __ li(scratch, literal);
395 __ ld(scratch, ToMemOperand(op));
421 __ li(at, Operand(static_cast<int32_t>(literal->Number())));
422 __ mtc1(at, flt_scratch);
423 __ cvt_d_w(dbl_scratch, flt_scratch);
432 __ ldc1(dbl_scratch, mem_op);
687 __ Call(code, mode);
698 __ CallRuntime(function, num_arguments, save_doubles);
706 __ Move(cp, ToRegister(context));
708 __ ld(cp, ToMemOperand(context));
712 __ li(cp, Handle<Object>::cast(constant->handle(isolate())));
724 __ CallRuntimeSaveDoubles(id);
787 __ Push(a1, scratch);
788 __ li(scratch, Operand(count));
789 __ lw(a1, MemOperand(scratch));
790 __ Subu(a1, a1, Operand(1));
791 __ Branch(&no_deopt, ne, a1, Operand(zero_reg));
792 __ li(a1, Operand(FLAG_deopt_every_n_times));
793 __ sw(a1, MemOperand(scratch));
794 __ Pop(a1, scratch);
796 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
797 __ bind(&no_deopt);
798 __ sw(a1, MemOperand(scratch));
799 __ Pop(a1, scratch);
805 __ Branch(&skip, NegateCondition(condition), src1, src2);
807 __ stop("trap_on_deopt");
808 __ bind(&skip);
819 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
829 __ Branch(&jump_table_.last().label, condition, src1, src2);
987 __ bind(label->label());
1065 __ Branch(&dividend_is_not_negative, ge, dividend, Operand(zero_reg));
1067 __ dsubu(dividend, zero_reg, dividend);
1068 __ And(dividend, dividend, Operand(mask));
1072 __ Branch(USE_DELAY_SLOT, &done);
1073 __ dsubu(dividend, zero_reg, dividend);
1076 __ bind(&dividend_is_not_negative);
1077 __ And(dividend, dividend, Operand(mask));
1078 __ bind(&done);
1093 __ TruncatingDiv(result, dividend, Abs(divisor));
1094 __ Dmul(result, result, Operand(Abs(divisor)));
1095 __ Dsubu(result, dividend, Operand(result));
1101 __ Branch(&remainder_not_zero, ne, result, Operand(zero_reg));
1103 __ bind(&remainder_not_zero);
1115 __ Dmod(result_reg, left_reg, right_reg);
1128 __ Branch(&no_overflow_possible, ne, left_reg, Operand(kMinInt));
1132 __ Branch(&no_overflow_possible, ne, right_reg, Operand(-1));
1133 __ Branch(USE_DELAY_SLOT, &done);
1134 __ mov(result_reg, zero_reg);
1136 __ bind(&no_overflow_possible);
1140 __ Branch(&done, ge, left_reg, Operand(zero_reg));
1145 __ bind(&done);
1169 __ And(at, dividend, Operand(mask));
1174 __ Dsubu(result, zero_reg, dividend);
1179 __ Move(result, dividend);
1181 __ dsrl32(result, dividend, 31);
1182 __ Daddu(result, dividend, Operand(result));
1184 __ dsra32(result, dividend, 31);
1185 __ dsrl32(result, result, 32 - shift);
1186 __ Daddu(result, dividend, Operand(result));
1188 if (shift > 0) __ dsra(result, result, shift);
1189 if (divisor < 0) __ Dsubu(result, zero_reg, result);
1210 __ TruncatingDiv(result, dividend, Abs(divisor));
1211 if (divisor < 0) __ Subu(result, zero_reg, result);
1214 __ Dmul(scratch0(), result, Operand(divisor));
1215 __ Dsubu(scratch0(), scratch0(), dividend);
1230 __ Ddiv(result, dividend, divisor);
1240 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg));
1242 __ bind(&left_not_zero);
1249 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt));
1251 __ bind(&left_not_min_int);
1258 __ mfhi(remainder);
1260 __ dmod(remainder, dividend, divisor);
1275 __ Madd_d(addend, addend, multiplier, multiplicand, double_scratch0());
1288 __ Move(result, dividend);
1296 __ dsra(result, dividend, shift);
1303 __ Move(scratch, dividend);
1305 __ Dsubu(result, zero_reg, dividend);
1310 __ Xor(scratch, scratch, result);
1321 __ dsra(result, result, shift);
1326 __ Branch(&no_overflow, lt, scratch, Operand(zero_reg));
1327 __ li(result, Operand(kMinInt / divisor), CONSTANT_SIZE);
1328 __ Branch(&done);
1329 __ bind(&no_overflow);
1330 __ dsra(result, result, shift);
1331 __ bind(&done);
1356 __ TruncatingDiv(result, dividend, Abs(divisor));
1357 if (divisor < 0) __ Dsubu(result, zero_reg, result);
1366 __ Branch(&needs_adjustment, divisor > 0 ? lt : gt,
1368 __ TruncatingDiv(result, dividend, Abs(divisor));
1369 if (divisor < 0) __ Dsubu(result, zero_reg, result);
1370 __ jmp(&done);
1371 __ bind(&needs_adjustment);
1372 __ Daddu(temp, dividend, Operand(divisor > 0 ? 1 : -1));
1373 __ TruncatingDiv(result, temp, Abs(divisor));
1374 if (divisor < 0) __ Dsubu(result, zero_reg, result);
1375 __ Dsubu(result, result, Operand(1));
1376 __ bind(&done);
1389 __ Ddiv(result, dividend, divisor);
1399 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg));
1401 __ bind(&left_not_zero);
1408 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt));
1410 __ bind(&left_not_min_int);
1417 __ mfhi(remainder);
1419 __ dmod(remainder, dividend, divisor);
1421 __ Branch(&done, eq, remainder, Operand(zero_reg), USE_DELAY_SLOT);
1422 __ Xor(remainder, remainder, Operand(divisor));
1423 __ Branch(&done, ge, remainder, Operand(zero_reg));
1424 __ Dsubu(result, result, Operand(1));
1425 __ bind(&done);
1452 __ SubuAndCheckForOverflow(result, zero_reg, left, scratch);
1455 __ Dsubu(result, zero_reg, left);
1464 __ mov(result, zero_reg);
1468 __ Move(result, left);
1479 __ dsll(result, left, shift);
1481 if (constant < 0) __ Dsubu(result, zero_reg, result);
1484 __ dsll(scratch, left, shift);
1485 __ Daddu(result, scratch, left);
1487 if (constant < 0) __ Dsubu(result, zero_reg, result);
1490 __ dsll(scratch, left, shift);
1491 __ Dsubu(result, scratch, left);
1493 if (constant < 0) __ Dsubu(result, zero_reg, result);
1496 __ li(at, constant);
1497 __ Dmul(result, left, at);
1508 __ Dmulh(result, left, right);
1510 __ Dmul(result, left, right);
1512 __ dsra32(scratch, result, 0);
1513 __ sra(at, result, 31);
1515 __ SmiTag(result);
1520 __ SmiUntag(result, left);
1521 __ Dmul(result, result, right);
1523 __ Dmul(result, left, right);
1529 __ Xor(at, left, right);
1530 __ Branch(&done, ge, at, Operand(zero_reg));
1533 __ bind(&done);
1556 __ And(result, left, right);
1559 __ Or(result, left, right);
1563 __ Nor(result, zero_reg, left);
1565 __ Xor(result, left, right);
1587 __ Ror(result, left, Operand(ToRegister(right_op)));
1590 __ srav(result, left, ToRegister(right_op));
1593 __ srlv(result, left, ToRegister(right_op));
1601 __ sllv(result, left, ToRegister(right_op));
1614 __ Ror(result, left, Operand(shift_count));
1616 __ Move(result, left);
1621 __ sra(result, left, shift_count);
1623 __ Move(result, left);
1628 __ srl(result, left, shift_count);
1631 __ And(at, left, Operand(0x80000000));
1634 __ Move(result, left);
1640 __ dsll(result, left, shift_count);
1642 __ sll(result, left, shift_count);
1645 __ Move(result, left);
1665 __ Dsubu(ToRegister(result), ToRegister(left), Operand(right_reg));
1668 __ Dsubu(ToRegister(result), ToRegister(left), ToOperand(right));
1675 __ SubuAndCheckForOverflow(ToRegister(result),
1683 __ SubuAndCheckForOverflow(ToRegister(result),
1698 __ li(ToRegister(instr->result()), Operand(instr->value()));
1703 __ li(ToRegister(instr->result()), Operand(instr->value()));
1711 __ Move(result, v);
1716 __ li(ToRegister(instr->result()), Operand(instr->value()));
1723 __ li(ToRegister(instr->result()), object);
1730 __ EnumLength(result, map);
1745 __ SmiTst(object, at);
1747 __ GetObjectType(object, scratch, scratch);
1751 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
1755 __ li(scratch, Operand(stamp));
1756 __ ld(scratch, MemOperand(scratch));
1757 __ ld(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset));
1758 __ Branch(&runtime, ne, scratch, Operand(scratch0()));
1759 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
1761 __ jmp(&done);
1763 __ bind(&runtime);
1764 __ PrepareCallCFunction(2, scratch);
1765 __ li(a1, Operand(index));
1766 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1767 __ bind(&done);
1787 __ Daddu(scratch, string, ToRegister(index));
1790 __ dsll(scratch, ToRegister(index), 1);
1791 __ Daddu(scratch, string, scratch);
1804 __ ld(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
1805 __ lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1807 __ And(scratch, scratch,
1811 __ Dsubu(at, scratch, Operand(encoding == String::ONE_BYTE_ENCODING
1813 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
1818 __ lbu(result, operand);
1820 __ lhu(result, operand);
1838 __ EmitSeqStringSetCharCheck(string, index, value, scratch, encoding_mask);
1843 __ sb(value, operand);
1845 __ sh(value, operand);
1859 __ Daddu(ToRegister(result), ToRegister(left), Operand(right_reg));
1862 __ Daddu(ToRegister(result), ToRegister(left), ToOperand(right));
1870 __ AdduAndCheckForOverflow(ToRegister(result),
1878 __ AdduAndCheckForOverflow(ToRegister(result),
1904 __ Slt(scratch, left_reg, Operand(right_reg));
1906 __ Movz(result_reg, left_reg, scratch);
1907 __ Movn(result_reg, right_reg, scratch);
1910 __ Movn(result_reg, left_reg, scratch);
1911 __ Movz(result_reg, right_reg, scratch);
1919 __ BranchF(&check_zero, &check_nan_left, eq, left_reg, right_reg);
1920 __ BranchF(&return_left, NULL, condition, left_reg, right_reg);
1921 __ Branch(&return_right);
1923 __ bind(&check_zero);
1925 __ BranchF(&return_left, NULL, ne, left_reg, kDoubleRegZero);
1928 __ neg_d(left_reg, left_reg);
1929 __ sub_d(result_reg, left_reg, right_reg);
1930 __ neg_d(result_reg, result_reg);
1932 __ add_d(result_reg, left_reg, right_reg);
1934 __ Branch(&done);
1936 __ bind(&check_nan_left);
1938 __ BranchF(NULL, &return_left, eq, left_reg, left_reg);
1939 __ bind(&return_right);
1941 __ mov_d(result_reg, right_reg);
1943 __ Branch(&done);
1945 __ bind(&return_left);
1947 __ mov_d(result_reg, left_reg);
1949 __ bind(&done);
1960 __ add_d(result, left, right);
1963 __ sub_d(result, left, right);
1966 __ mul_d(result, left, right);
1969 __ div_d(result, left, right);
1974 __ MultiPush(saved_regs);
1976 __ PrepareCallCFunction(0, 2, scratch0());
1977 __ MovToFloatParameters(left, right);
1978 __ CallCFunction(
1982 __ MovFromFloatResult(result);
1985 __ MultiPop(saved_regs);
2022 __ Branch(chunk_->GetAssemblyLabel(right_block),
2025 __ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
2027 __ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
2028 __ Branch(chunk_->GetAssemblyLabel(right_block));
2045 __ BranchF(chunk_->GetAssemblyLabel(right_block), NULL,
2048 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL,
2051 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL,
2053 __ Branch(chunk_->GetAssemblyLabel(right_block));
2064 __ Branch(chunk_->GetAssemblyLabel(false_block), condition, src1, src2);
2074 __ BranchF(chunk_->GetAssemblyLabel(false_block), NULL,
2080 __ stop("LDebugBreak");
2101 __ LoadRoot(at, Heap::kTrueValueRootIndex);
2112 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
2117 __ ld(at, FieldMemOperand(reg, String::kLengthOffset));
2126 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2127 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at));
2131 __ LoadRoot(at, Heap::kTrueValueRootIndex);
2132 __ Branch(instr->TrueLabel(chunk_), eq, reg, Operand(at));
2133 __ LoadRoot(at, Heap::kFalseValueRootIndex);
2134 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at));
2138 __ LoadRoot(at, Heap::kNullValueRootIndex);
2139 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at));
2144 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(zero_reg));
2145 __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
2148 __ SmiTst(reg, at);
2154 __ ld(map, FieldMemOperand(reg, HeapObject::kMapOffset));
2157 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset));
2158 __ And(at, at, Operand(1 << Map::kIsUndetectable));
2159 __ Branch(instr->FalseLabel(chunk_), ne, at, Operand(zero_reg));
2165 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset));
2166 __ Branch(instr->TrueLabel(chunk_),
2173 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset));
2174 __ Branch(&not_string, ge , at, Operand(FIRST_NONSTRING_TYPE));
2175 __ ld(at, FieldMemOperand(reg, String::kLengthOffset));
2176 __ Branch(instr->TrueLabel(chunk_), ne, at, Operand(zero_reg));
2177 __ Branch(instr->FalseLabel(chunk_));
2178 __ bind(&not_string);
2184 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
2185 __ Branch(instr->TrueLabel(chunk_), eq, scratch, Operand(SYMBOL_TYPE));
2192 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
2193 __ Branch(&not_heap_number, ne, map, Operand(at));
2194 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
2195 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_),
2198 __ Branch(instr->FalseLabel(chunk_));
2199 __ bind(&not_heap_number);
2214 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block)));
2280 __ BranchF(NULL, instr->FalseLabel(chunk_), eq,
2329 __ li(at, Operand(factory()->the_hole_value()));
2338 __ FmoveHigh(scratch, input_reg);
2351 __ FmoveHigh(scratch, value);
2353 __ dsll32(scratch, scratch, 0);
2354 __ dsrl32(scratch, scratch, 0);
2355 __ li(at, 0x80000000);
2358 __ CheckMap(value,
2363 __ lwu(scratch, FieldMemOperand(value, HeapNumber::kExponentOffset));
2365 __ lwu(scratch, FieldMemOperand(value, HeapNumber::kMantissaOffset));
2366 __ mov(at, zero_reg);
2377 __ JumpIfSmi(input, is_not_object);
2379 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
2380 __ Branch(is_object, eq, input, Operand(temp2));
2383 __ ld(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
2385 __ lbu(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
2386 __ And(temp2, temp2, Operand(1 << Map::kIsUndetectable));
2387 __ Branch(is_not_object, ne, temp2, Operand(zero_reg));
2390 __ lbu(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
2391 __ Branch(is_not_object,
2417 __ JumpIfSmi(input, is_not_string);
2419 __ GetObjectType(input, temp1, temp1);
2442 __ And(at, input_reg, kSmiTagMask);
2452 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2454 __ ld(temp, FieldMemOperand(input, HeapObject::kMapOffset));
2455 __ lbu(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
2456 __ And(at, temp, Operand(1 << Map::kIsUndetectable));
2519 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2522 __ GetObjectType(input, scratch, scratch);
2534 __ AssertString(input);
2536 __ lwu(result, FieldMemOperand(input, String::kHashFieldOffset));
2537 __ IndexFromHash(result, result);
2546 __ lwu(scratch,
2548 __ And(at, scratch, Operand(String::kContainsCachedArrayIndexMask));
2565 __ JumpIfSmi(input, is_false);
2577 __ GetObjectType(input, temp, temp2);
2578 __ Branch(is_false, lt, temp2, Operand(FIRST_SPEC_OBJECT_TYPE));
2579 __ Branch(is_true, eq, temp2, Operand(FIRST_SPEC_OBJECT_TYPE));
2580 __ Branch(is_true, eq, temp2, Operand(LAST_SPEC_OBJECT_TYPE));
2584 __ GetObjectType(input, temp, temp2);
2585 __ Dsubu(temp2, temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2586 __ Branch(is_false, gt, temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
2592 __ ld(temp, FieldMemOperand(temp, Map::kConstructorOffset));
2595 __ GetObjectType(temp, temp2, temp2);
2597 __ Branch(is_true, ne, temp2, Operand(JS_FUNCTION_TYPE));
2599 __ Branch(is_false, ne, temp2, Operand(JS_FUNCTION_TYPE));
2604 __ ld(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
2605 __ ld(temp, FieldMemOperand(temp,
2636 __ ld(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
2652 __ Branch(&true_label, eq, result, Operand(zero_reg));
2653 __ li(result, Operand(factory()->false_value()));
2654 __ Branch(&done);
2655 __ bind(&true_label);
2656 __ li(result, Operand(factory()->true_value()));
2657 __ bind(&done);
2690 __ JumpIfSmi(object, &false_result);
2697 __ ld(map, FieldMemOperand(object, HeapObject::kMapOffset));
2700 __ bind(deferred->map_check()); // Label for calculating code patching.
2705 __ li(at, Operand(Handle<Object>(cell)));
2706 __ ld(at, FieldMemOperand(at, PropertyCell::kValueOffset));
2707 __ BranchShort(&cache_miss, ne, map, Operand(at));
2711 __ li(result, Operand(factory()->the_hole_value()));
2712 __ Branch(&done);
2716 __ bind(&cache_miss);
2718 __ LoadRoot(temp, Heap::kNullValueRootIndex);
2719 __ Branch(&false_result, eq, object, Operand(temp));
2722 Condition cc = __ IsObjectStringType(object, temp, temp);
2723 __ Branch(&false_result, cc, temp, Operand(zero_reg));
2726 __ Branch(deferred->entry());
2728 __ bind(&false_result);
2729 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2733 __ bind(deferred->exit());
2734 __ bind(&done);
2760 __ li(InstanceofStub::right(), instr->function());
2764 __ bind(&before_push_delta);
2767 __ li(temp, Operand(delta * kIntSize), CONSTANT_SIZE);
2768 __ StoreToSafepointRegisterSlot(temp, temp);
2778 __ StoreToSafepointRegisterSlot(result, result);
2795 __ Branch(USE_DELAY_SLOT, &done, condition, v0, Operand(zero_reg));
2796 __ bind(&check);
2797 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2799 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2800 __ bind(&done);
2810 __ push(v0);
2811 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2812 __ CallRuntime(Runtime::kTraceExit, 1);
2819 __ mov(sp, fp);
2821 __ Pop(ra, fp);
2827 __ Daddu(sp, sp, Operand(sp_delta));
2832 __ SmiUntag(reg);
2833 __ dsll(at, reg, kPointerSizeLog2);
2834 __ Daddu(sp, sp, at);
2837 __ Jump(ra);
2847 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle())));
2848 __ ld(result, FieldMemOperand(at, Cell::kValueOffset));
2850 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2861 __ li(vector, instr->hydrogen()->feedback_vector());
2864 __ li(VectorLoadICDescriptor::SlotRegister(),
2875 __ li(LoadDescriptor::NameRegister(), Operand(instr->name()));
2890 __ li(cell, Operand(instr->hydrogen()->cell().handle()));
2899 __ ld(payload, FieldMemOperand(cell, Cell::kValueOffset));
2900 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2905 __ sd(value, FieldMemOperand(cell, Cell::kValueOffset));
2914 __ ld(result, ContextOperand(context, instr->slot_index()));
2916 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2922 __ Branch(&is_not_hole, ne, result, Operand(at));
2923 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2924 __ bind(&is_not_hole);
2939 __ ld(scratch, target);
2940 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2945 __ Branch(&skip_assignment, ne, scratch, Operand(at));
2949 __ sd(value, target);
2954 __ RecordWriteContextSlot(context,
2964 __ bind(&skip_assignment);
2975 __ Load(result, operand, access.representation());
2981 __ ldc1(result, FieldMemOperand(object, offset));
2987 __ ld(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2997 __ Load(scratch, FieldMemOperand(object, offset), representation);
2998 __ AssertSmi(scratch);
3007 __ Load(result, FieldMemOperand(object, offset), representation);
3017 __ li(LoadDescriptor::NameRegister(), Operand(instr->name()));
3032 __ ld(result,
3036 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3041 __ GetObjectType(result, scratch, scratch);
3042 __ Branch(&done, ne, scratch, Operand(MAP_TYPE));
3045 __ ld(result, FieldMemOperand(result, Map::kPrototypeOffset));
3048 __ bind(&done);
3054 __ LoadRoot(result, instr->index());
3068 __ ld(result, MemOperand(arguments, index * kPointerSize));
3071 __ li(at, Operand(const_length + 1));
3072 __ Dsubu(result, at, index);
3073 __ dsll(at, result, kPointerSizeLog2);
3074 __ Daddu(at, arguments, at);
3075 __ ld(result, MemOperand(at));
3082 __ Dsubu(result, length, Operand(loc));
3083 __ dsll(at, result, kPointerSizeLog2);
3084 __ Daddu(at, arguments, at);
3085 __ ld(result, MemOperand(at));
3087 __ dsll(at, length, kPointerSizeLog2);
3088 __ Daddu(at, arguments, at);
3089 __ ld(result, MemOperand(at));
3094 __ Dsubu(result, length, index);
3095 __ Daddu(result, result, 1);
3096 __ dsll(at, result, kPointerSizeLog2);
3097 __ Daddu(at, arguments, at);
3098 __ ld(result, MemOperand(at));
3130 __ Daddu(scratch0(), external_pointer,
3135 __ dsra32(scratch0(), key, 0);
3137 __ dsra(scratch0(), key, -shift_size);
3140 __ dsll(scratch0(), key, shift_size);
3142 __ Daddu(scratch0(), scratch0(), external_pointer);
3146 __ lwc1(result, MemOperand(scratch0(), base_offset));
3147 __ cvt_d_s(result, result);
3149 __ ldc1(result, MemOperand(scratch0(), base_offset));
3159 __ lb(result, mem_operand);
3165 __ lbu(result, mem_operand);
3169 __ lh(result, mem_operand);
3173 __ lhu(result, mem_operand);
3177 __ lw(result, mem_operand);
3181 __ lw(result, mem_operand);
3222 __ Daddu(scratch, elements, Operand(base_offset));
3230 __ dsll(at, key, shift_size);
3232 __ dsra32(at, key, 0);
3234 __ dsra(at, key, -shift_size);
3236 __ Daddu(scratch, scratch, at);
3239 __ ldc1(result, MemOperand(scratch));
3242 __ lw(scratch, MemOperand(scratch, sizeof(kHoleNanLower32)));
3267 __ SmiScale(scratch, key, kPointerSizeLog2);
3268 __ daddu(scratch, elements, scratch);
3270 __ dsll(scratch, key, kPointerSizeLog2);
3271 __ daddu(scratch, elements, scratch);
3281 __ Load(temp, MemOperand(store_base, offset), Representation::Smi());
3282 __ AssertSmi(temp);
3291 __ Load(result, MemOperand(store_base, offset), representation);
3296 __ SmiTst(result, scratch);
3299 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
3330 __ dsll(scratch0(), key, shift_size);
3331 __ Daddu(scratch0(), base, scratch0());
3335 __ dsra32(scratch0(), key, 0);
3337 __ dsra(scratch0(), key, -shift_size);
3339 __ Daddu(scratch0(), base, scratch0());
3345 __ dsll(scratch0(), key, shift_size);
3346 __ Daddu(scratch0(), base, scratch0());
3350 __ dsra32(scratch0(), key, 0);
3352 __ dsra(scratch0(), key, -shift_size);
3354 __ Daddu(scratch0(), base, scratch0());
3380 __ Dsubu(result, sp, 2 * kPointerSize);
3384 __ ld(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3385 __ ld(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
3386 __ Xor(temp, result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3390 __ Movn(result, fp, temp); // Move only if temp is not equal to zero (ne).
3391 __ Movz(result, scratch, temp); // Move only if temp is equal to zero (eq).
3403 __ Daddu(result, zero_reg, Operand(scope()->num_parameters()));
3404 __ Branch(&done, eq, fp, Operand(elem));
3407 __ ld(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3408 __ ld(result,
3410 __ SmiUntag(result);
3413 __ bind(&done);
3430 __ ld(scratch,
3438 __ lbu(at,
3440 __ And(at, at, Operand(strict_mode_function_mask));
3441 __ Branch(&result_in_receiver, ne, at, Operand(zero_reg));
3442 __ lbu(at,
3444 __ And(at, at, Operand(native_mask));
3445 __ Branch(&result_in_receiver, ne, at, Operand(zero_reg));
3449 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
3450 __ Branch(&global_object, eq, receiver, Operand(scratch));
3451 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
3452 __ Branch(&global_object, eq, receiver, Operand(scratch));
3455 __ SmiTst(receiver, scratch);
3458 __ GetObjectType(receiver, scratch, scratch);
3460 __ Branch(&result_in_receiver);
3462 __ bind(&global_object);
3463 __ ld(result, FieldMemOperand(function, JSFunction::kContextOffset));
3464 __ ld(result,
3466 __ ld(result,
3470 __ bind(&result_in_receiver);
3473 __ Branch(&result_ok);
3474 __ bind(&result_in_receiver);
3475 __ mov(result, receiver);
3476 __ bind(&result_ok);
3498 __ push(receiver);
3499 __ Move(receiver, length);
3501 __ Daddu(elements, elements, Operand(1 * kPointerSize));
3507 __ Branch(USE_DELAY_SLOT, &invoke, eq, length, Operand(zero_reg));
3508 __ dsll(scratch, length, kPointerSizeLog2);
3509 __ bind(&loop);
3510 __ Daddu(scratch, elements, scratch);
3511 __ ld(scratch, MemOperand(scratch));
3512 __ push(scratch);
3513 __ Dsubu(length, length, Operand(1));
3514 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg));
3515 __ dsll(scratch, length, kPointerSizeLog2);
3517 __ bind(&invoke);
3525 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator);
3535 __ push(argument_reg);
3541 __ Drop(instr->count());
3547 __ ld(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3555 __ ld(result, MemOperand(fp, StandardFrameConstants::kContextOffset));
3565 __ li(scratch0(), instr->hydrogen()->pairs());
3566 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
3568 __ Push(cp, scratch0(), scratch1());
3587 __ li(a1, function);
3591 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
3596 __ li(a0, Operand(arity));
3600 __ ld(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
3601 __ Call(at);
3609 __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator);
3622 __ ld(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3623 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3629 __ lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
3632 __ Move(result, input);
3633 __ And(at, exponent, Operand(HeapNumber::kSignMask));
3634 __ Branch(&done, eq, at, Operand(zero_reg));
3651 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
3652 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
3653 __ Branch(&allocated);
3656 __ bind(&slow);
3662 __ mov(tmp1, v0);
3664 __ LoadFromSafepointRegisterSlot(input, input);
3665 __ lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
3667 __ bind(&allocated);
3670 __ And(exponent, exponent, Operand(~HeapNumber::kSignMask));
3671 __ sw(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
3672 __ lwu(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
3673 __ sw(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
3675 __ StoreToSafepointRegisterSlot(tmp1, result);
3678 __ bind(&done);
3687 __ Branch(USE_DELAY_SLOT, &done, ge, input, Operand(zero_reg));
3688 __ mov(result, input);
3689 __ dsubu(result, zero_reg, input);
3692 __ bind(&done);
3714 __ abs_d(result, input);
3723 __ JumpIfNotSmi(input, deferred->entry());
3726 __ bind(deferred->exit());
3737 __ EmitFPUTruncate(kRoundToMinusInf,
3750 __ Branch(&done, ne, result, Operand(zero_reg));
3751 __ mfhc1(scratch1, input); // Get exponent/sign bits.
3752 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask));
3754 __ bind(&done);
3767 __ mfhc1(result, input);
3768 __ Ext(scratch,
3775 __ Branch(&skip1, gt, scratch, Operand(HeapNumber::kExponentBias - 2));
3776 __ mov(result, zero_reg);
3778 __ Branch(&check_sign_on_zero);
3780 __ Branch(&done);
3782 __ bind(&skip1);
3789 __ And(scratch, result, Operand(HeapNumber::kSignMask));
3791 __ Move(double_scratch0(), 0.5);
3792 __ add_d(double_scratch0(), input, double_scratch0());
3796 __ mfhc1(result, double_scratch0());
3798 __ dsll32(result, result, 0);
3799 __ dsrl32(result, result, 0);
3800 __ Xor(result, result, Operand(scratch));
3808 __ Branch(&skip2, ge, result, Operand(zero_reg));
3809 __ mov(result, zero_reg);
3810 __ Branch(&done);
3811 __ bind(&skip2);
3815 __ EmitFPUTruncate(kRoundToMinusInf,
3826 __ Branch(&done, ne, result, Operand(zero_reg));
3827 __ bind(&check_sign_on_zero);
3828 __ mfhc1(scratch, input); // Get exponent/sign bits.
3829 __ And(scratch, scratch, Operand(HeapNumber::kSignMask));
3832 __ bind(&done);
3839 __ cvt_s_d(result, input);
3840 __ cvt_d_s(result, result);
3847 __ sqrt_d(result, input);
3862 __ Move(temp, -V8_INFINITY);
3863 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, temp, input);
3866 __ neg_d(result, temp);
3869 __ add_d(result, input, kDoubleRegZero);
3870 __ sqrt_d(result, result);
3871 __ bind(&done);
3889 __ CallStub(&stub);
3892 __ JumpIfSmi(tagged_exponent, &no_deopt);
3894 __ lw(a7, FieldMemOperand(tagged_exponent, HeapObject::kMapOffset));
3895 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3897 __ bind(&no_deopt);
3899 __ CallStub(&stub);
3902 __ CallStub(&stub);
3906 __ CallStub(&stub);
3926 __ PrepareCallCFunction(0, 1, scratch0());
3927 __ MovToFloatParameter(ToDoubleRegister(instr->value()));
3928 __ CallCFunction(ExternalReference::math_log_double_function(isolate()),
3930 __ MovFromFloatResult(ToDoubleRegister(instr->result()));
3937 __ Clz(result, input);
3951 __ InvokeFunction(a1, count, CALL_FUNCTION, generator);
3985 if (must_teardown_frame) __ LeaveFrame(StackFrame::INTERNAL);
3999 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET));
4000 __ Call(code, RelocInfo::CODE_TARGET);
4004 generator.BeforeCall(__ CallSize(target));
4005 __ Daddu(target, target, Operand(Code::kHeaderSize - kHeapObjectTag));
4006 __ Call(target);
4017 __ li(a0, Operand(instr->arity()));
4021 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4024 __ ld(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
4025 __ Call(at);
4047 __ li(a0, Operand(instr->arity()));
4049 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4060 __ li(a0, Operand(instr->arity()));
4061 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4077 __ ld(a5, MemOperand(sp, 0));
4078 __ Branch(&packed_case, eq, a5, Operand(zero_reg));
4085 __ jmp(&done);
4086 __ bind(&packed_case);
4091 __ bind(&done);
4107 __ Daddu(code_object, code_object,
4109 __ sd(code_object,
4119 __ Daddu(result, base, Operand(ToInteger32(offset)));
4122 __ Daddu(result, base, offset);
4138 __ Store(value, operand, representation);
4142 __ AssertNotSmi(object);
4152 __ sdc1(value, FieldMemOperand(object, offset));
4159 __ li(scratch1, Operand(transition));
4160 __ sd(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
4164 __ RecordWriteForMap(object,
4176 __ ld(destination, FieldMemOperand(object, JSObject::kPropertiesOffset));
4183 __ Load(scratch2, FieldMemOperand(destination, offset), representation);
4184 __ AssertSmi(scratch2);
4193 __ Store(value, operand, representation);
4196 __ RecordWriteField(destination,
4214 __ li(StoreDescriptor::NameRegister(), Operand(instr->name()));
4234 __ Branch(&done, NegateCondition(cc), reg, operand);
4235 __ stop("eliminated bounds check failed");
4236 __ bind(&done);
4271 __ Daddu(address, external_pointer,
4279 __ dsra32(address, key, 0);
4281 __ dsra(address, key, -shift_size);
4284 __ dsll(address, key, shift_size);
4286 __ Daddu(address, external_pointer, address);
4291 __ cvt_s_d(double_scratch0(), value);
4292 __ swc1(double_scratch0(), MemOperand(address, base_offset));
4294 __ sdc1(value, MemOperand(address, base_offset));
4309 __ sb(value, mem_operand);
4315 __ sh(value, mem_operand);
4321 __ sw(value, mem_operand);
4359 __ Daddu(scratch, elements,
4365 __ Daddu(scratch, elements, Operand(base_offset));
4368 __ dsll(at, ToRegister(instr->key()), 3);
4370 __ dsra(at, ToRegister(instr->key()), 29);
4372 __ Daddu(scratch, scratch, at);
4378 __ BranchF(NULL, &is_nan, eq, value, value);
4379 __ Branch(&not_nan);
4382 __ bind(&is_nan);
4383 __ LoadRoot(at, Heap::kNanValueRootIndex);
4384 __ ldc1(double_scratch, FieldMemOperand(at, HeapNumber::kValueOffset));
4385 __ sdc1(double_scratch, MemOperand(scratch, 0));
4386 __ Branch(&done);
4389 __ bind(&not_nan);
4390 __ sdc1(value, MemOperand(scratch, 0));
4391 __ bind(&done);
4416 __ SmiScale(scratch, key, kPointerSizeLog2);
4417 __ daddu(store_base, elements, scratch);
4419 __ dsll(scratch, key, kPointerSizeLog2);
4420 __ daddu(store_base, elements, scratch);
4430 __ Load(temp, MemOperand(store_base, offset), Representation::Smi());
4431 __ AssertSmi(temp);
4441 __ Store(value, MemOperand(store_base, offset), representation);
4448 __ Daddu(key, store_base, Operand(offset));
4449 __ RecordWrite(elements,
4495 __ ld(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset));
4496 __ Branch(&not_applicable, ne, scratch, Operand(from_map));
4500 __ li(new_map_reg, Operand(to_map));
4501 __ sd(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
4503 __ RecordWriteForMap(object_reg,
4512 __ li(a1, Operand(to_map));
4515 __ CallStub(&stub);
4519 __ bind(&not_applicable);
4527 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found,
4530 __ bind(&no_memento_found);
4565 __ bind(deferred->exit());
4577 __ mov(result, zero_reg);
4580 __ push(string);
4585 __ Daddu(scratch, zero_reg, Operand(Smi::FromInt(const_index)));
4586 __ push(scratch);
4589 __ SmiTag(index);
4590 __ push(index);
4594 __ AssertSmi(v0);
4595 __ SmiUntag(v0);
4596 __ StoreToSafepointRegisterSlot(v0, result);
4622 __ Branch(deferred->entry(), hi,
4624 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
4625 __ dsll(scratch, char_code, kPointerSizeLog2);
4626 __ Daddu(result, result, scratch);
4627 __ ld(result, FieldMemOperand(result, FixedArray::kHeaderSize));
4628 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4629 __ Branch(deferred->entry(), eq, result, Operand(scratch));
4630 __ bind(deferred->exit());
4641 __ mov(result, zero_reg);
4644 __ SmiTag(char_code);
4645 __ push(char_code);
4647 __ StoreToSafepointRegisterSlot(v0, result);
4659 __ ld(scratch, ToMemOperand(input));
4660 __ mtc1(scratch, single_scratch);
4662 __ mtc1(ToRegister(input), single_scratch);
4664 __ cvt_d_w(ToDoubleRegister(output), single_scratch);
4673 __ mtc1(ToRegister(input), dbl_scratch);
4674 __ Cvt_d_uw(ToDoubleRegister(output), dbl_scratch, f22); // TODO(plind): f22?
4699 __ Branch(deferred->entry(), hi, input, Operand(Smi::kMaxValue));
4700 __ SmiTag(result, input);
4701 __ bind(deferred->exit());
4723 __ SmiUntag(src, dst);
4724 __ Xor(src, src, Operand(0x80000000));
4726 __ mtc1(src, dbl_scratch);
4727 __ cvt_d_w(dbl_scratch, dbl_scratch);
4729 __ mtc1(src, dbl_scratch);
4730 __ Cvt_d_uw(dbl_scratch, dbl_scratch, f22);
4734 __ LoadRoot(tmp3, Heap::kHeapNumberMapRootIndex);
4735 __ AllocateHeapNumber(dst, tmp1, tmp2, tmp3, &slow, TAG_RESULT);
4736 __ Branch(&done);
4740 __ bind(&slow);
4745 __ mov(dst, zero_reg);
4754 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4755 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4758 __ StoreToSafepointRegisterSlot(v0, dst);
4763 __ bind(&done);
4764 __ sdc1(dbl_scratch, FieldMemOperand(dst, HeapNumber::kValueOffset));
4789 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
4791 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry(),
4794 __ Branch(deferred->entry());
4796 __ bind(deferred->exit());
4797 __ sdc1(input_reg, MemOperand(reg, HeapNumber::kValueOffset));
4799 __ Daddu(reg, reg, kHeapObjectTag);
4808 __ mov(reg, zero_reg);
4816 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4817 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4820 __ Dsubu(v0, v0, kHeapObjectTag);
4821 __ StoreToSafepointRegisterSlot(v0, reg);
4831 __ And(at, input, Operand(0x80000000));
4836 __ SmiTagCheckOverflow(output, input, at);
4839 __ SmiTag(output, input);
4851 __ And(scratch, input, Operand(kHeapObjectTag));
4852 __ SmiUntag(result, input);
4855 __ SmiUntag(result, input);
4871 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
4873 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4874 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4876 __ Branch(&convert, ne, scratch, Operand(at));
4881 __ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset));
4883 __ mfc1(at, result_reg);
4884 __ Branch(&done, ne, at, Operand(zero_reg));
4885 __ mfhc1(scratch, result_reg); // Get exponent/sign bits.
4888 __ Branch(&done);
4890 __ bind(&convert);
4892 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4894 __ LoadRoot(scratch, Heap::kNanValueRootIndex);
4895 __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset));
4896 __ Branch(&done);
4899 __ SmiUntag(scratch, input_reg);
4903 __ bind(&load_smi);
4905 __ mtc1(scratch, result_reg);
4906 __ cvt_d_w(result_reg, result_reg);
4907 __ bind(&done);
4925 __ ld(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4926 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4935 __ Branch(USE_DELAY_SLOT, &no_heap_number, ne, scratch1, Operand(at));
4936 __ mov(scratch2, input_reg); // In delay slot.
4937 __ TruncateHeapNumberToI(input_reg, scratch2);
4938 __ Branch(&done);
4942 __ bind(&no_heap_number);
4943 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4944 __ Branch(&check_bools, ne, input_reg, Operand(at));
4946 __ Branch(USE_DELAY_SLOT, &done);
4947 __ mov(input_reg, zero_reg); // In delay slot.
4949 __ bind(&check_bools);
4950 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4951 __ Branch(&check_false, ne, scratch2, Operand(at));
4952 __ Branch(USE_DELAY_SLOT, &done);
4953 __ li(input_reg, Operand(1)); // In delay slot.
4955 __ bind(&check_false);
4956 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4958 __ Branch(USE_DELAY_SLOT, &done);
4959 __ mov(input_reg, zero_reg); // In delay slot.
4964 __ ldc1(double_scratch,
4968 __ EmitFPUTruncate(kRoundToZero,
4980 __ Branch(&done, ne, input_reg, Operand(zero_reg));
4982 __ mfhc1(scratch1, double_scratch); // Get exponent/sign bits.
4983 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask));
4987 __ bind(&done);
5011 __ SmiUntag(input_reg);
5016 __ JumpIfNotSmi(input_reg, deferred->entry());
5019 __ SmiUntag(input_reg);
5020 __ bind(deferred->exit());
5048 __ TruncateDoubleToI(result_reg, double_input);
5052 __ EmitFPUTruncate(kRoundToMinusInf,
5065 __ Branch(&done, ne, result_reg, Operand(zero_reg));
5066 __ mfhc1(scratch1, double_input); // Get exponent/sign bits.
5067 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask));
5069 __ bind(&done);
5081 __ TruncateDoubleToI(result_reg, double_input);
5085 __ EmitFPUTruncate(kRoundToMinusInf,
5098 __ Branch(&done, ne, result_reg, Operand(zero_reg));
5099 __ mfhc1(scratch1, double_input); // Get exponent/sign bits.
5100 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask));
5102 __ bind(&done);
5105 __ SmiTag(result_reg, result_reg);
5111 __ SmiTst(ToRegister(input), at);
5119 __ SmiTst(ToRegister(input), at);
5129 __ GetObjectType(input, scratch, scratch);
5153 __ And(at, scratch, mask);
5156 __ And(scratch, scratch, Operand(mask));
5170 __ li(at, Operand(Handle<Object>(cell)));
5171 __ ld(at, FieldMemOperand(at, Cell::kValueOffset));
5182 __ push(object);
5183 __ mov(cp, zero_reg);
5184 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
5187 __ StoreToSafepointRegisterSlot(v0, scratch0());
5189 __ SmiTst(scratch0(), at);
5224 __ ld(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
5229 __ bind(deferred->check_maps());
5236 __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
5241 __ Branch(deferred->entry(), ne, map_reg, Operand(map));
5246 __ bind(&success);
5254 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
5261 __ ClampUint8(result_reg, unclamped_reg);
5273 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi);
5276 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
5277 __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map()));
5282 __ mov(result_reg, zero_reg);
5283 __ jmp(&done);
5286 __ bind(&heap_number);
5287 __ ldc1(double_scratch0(), FieldMemOperand(input_reg,
5289 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
5290 __ jmp(&done);
5292 __ bind(&is_smi);
5293 __ ClampUint8(result_reg, scratch);
5295 __ bind(&done);
5303 __ FmoveHigh(result_reg, value_reg);
5305 __ FmoveLow(result_reg, value_reg);
5314 __ Move(result_reg, lo_reg, hi_reg);
5354 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
5356 __ jmp(deferred->entry());
5360 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
5363 __ bind(deferred->exit());
5369 __ li(scratch, Operand(size - kHeapObjectTag));
5371 __ Dsubu(scratch, ToRegister(instr->size()), Operand(kHeapObjectTag));
5373 __ li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
5375 __ bind(&loop);
5376 __ Dsubu(scratch, scratch, Operand(kPointerSize));
5377 __ Daddu(at, result, Operand(scratch));
5378 __ sd(scratch2, MemOperand(at));
5379 __ Branch(&loop, ge, scratch, Operand(zero_reg));
5390 __ mov(result, zero_reg);
5396 __ SmiTag(size);
5397 __ push(size);
5401 __ li(v0, Operand(Smi::FromInt(size)));
5402 __ Push(v0);
5405 __ stop("invalid allocation size");
5422 __ li(v0, Operand(Smi::FromInt(flags)));
5423 __ Push(v0);
5427 __ StoreToSafepointRegisterSlot(v0, result);
5434 __ push(a0);
5449 __ li(a7, instr->hydrogen()->literals());
5450 __ ld(a1, FieldMemOperand(a7, literal_offset));
5451 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5452 __ Branch(&materialized, ne, a1, Operand(at));
5456 __ li(a6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
5457 __ li(a5, Operand(instr->hydrogen()->pattern()));
5458 __ li(a4, Operand(instr->hydrogen()->flags()));
5459 __ Push(a7, a6, a5, a4);
5461 __ mov(a1, v0);
5463 __ bind(&materialized);
5467 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
5468 __ jmp(&allocated);
5470 __ bind(&runtime_allocate);
5471 __ li(a0, Operand(Smi::FromInt(size)));
5472 __ Push(a1, a0);
5474 __ pop(a1);
5476 __ bind(&allocated);
5480 __ ld(a3, FieldMemOperand(a1, i));
5481 __ ld(a2, FieldMemOperand(a1, i + kPointerSize));
5482 __ sd(a3, FieldMemOperand(v0, i));
5483 __ sd(a2, FieldMemOperand(v0, i + kPointerSize));
5486 __ ld(a3, FieldMemOperand(a1, size - kPointerSize));
5487 __ sd(a3, FieldMemOperand(v0, size - kPointerSize));
5500 __ li(a2, Operand(instr->hydrogen()->shared_info()));
5503 __ li(a2, Operand(instr->hydrogen()->shared_info()));
5504 __ li(a1, Operand(pretenure ? factory()->true_value()
5506 __ Push(cp, a2, a1);
5515 __ push(input);
5555 __ JumpIfSmi(input, true_label);
5556 __ ld(input, FieldMemOperand(input, HeapObject::kMapOffset));
5557 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5563 __ JumpIfSmi(input, false_label);
5564 __ GetObjectType(input, input, scratch);
5565 __ Branch(USE_DELAY_SLOT, false_label,
5569 __ lbu(at, FieldMemOperand(input, Map::kBitFieldOffset));
5570 __ And(at, at, 1 << Map::kIsUndetectable);
5576 __ JumpIfSmi(input, false_label);
5577 __ GetObjectType(input, input, scratch);
5583 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5584 __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
5585 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5591 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5592 __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
5595 __ JumpIfSmi(input, false_label);
5597 __ ld(input, FieldMemOperand(input, HeapObject::kMapOffset));
5598 __ lbu(at, FieldMemOperand(input, Map::kBitFieldOffset));
5599 __ And(at, at, 1 << Map::kIsUndetectable);
5606 __ JumpIfSmi(input, false_label);
5607 __ GetObjectType(input, scratch, input);
5608 __ Branch(true_label, eq, input, Operand(JS_FUNCTION_TYPE));
5614 __ JumpIfSmi(input, false_label);
5615 __ LoadRoot(at, Heap::kNullValueRootIndex);
5616 __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
5618 __ GetObjectType(input, map, scratch);
5619 __ Branch(false_label,
5621 __ Branch(USE_DELAY_SLOT, false_label,
5625 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset));
5626 __ And(at, at, 1 << Map::kIsUndetectable);
5634 __ Branch(false_label);
5654 __ ld(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5658 __ ld(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
5659 __ Branch(&check_frame_marker, ne, temp2,
5661 __ ld(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
5664 __ bind(&check_frame_marker);
5665 __ ld(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
5678 __ nop();
5724 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5753 __ LoadRoot(at, Heap::kStackLimitRootIndex);
5754 __ Branch(&done, hs, sp, Operand(at));
5760 __ bind(&done);
5766 __ LoadRoot(at, Heap::kStackLimitRootIndex);
5767 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at));
5769 __ bind(instr->done_label());
5797 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5801 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
5804 __ And(at, object, kSmiTagMask);
5808 __ GetObjectType(object, a1, a1);
5813 __ CheckEnumCache(null_value, &call_runtime);
5815 __ ld(result, FieldMemOperand(object, HeapObject::kMapOffset));
5816 __ Branch(&use_cache);
5819 __ bind(&call_runtime);
5820 __ push(object);
5823 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
5825 __ LoadRoot(at, Heap::kMetaMapRootIndex);
5827 __ bind(&use_cache);
5835 __ EnumLength(result, map);
5836 __ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0)));
5837 __ li(result, Operand(isolate()->factory()->empty_fixed_array()));
5838 __ jmp(&done);
5840 __ bind(&load_cache);
5841 __ LoadInstanceDescriptors(map, result);
5842 __ ld(result,
5844 __ ld(result,
5848 __ bind(&done);
5855 __ ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset));
5865 __ Push(object, index);
5866 __ mov(cp, zero_reg);
5867 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
5870 __ StoreToSafepointRegisterSlot(v0, result);
5910 __ And(scratch, index, Operand(Smi::FromInt(1)));
5911 __ Branch(deferred->entry(), ne, scratch, Operand(zero_reg));
5912 __ dsra(index, index, 1);
5914 __ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg));
5915 __ SmiScale(scratch, index, kPointerSizeLog2); // In delay slot.
5916 __ Daddu(scratch, object, scratch);
5917 __ ld(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
5919 __ Branch(&done);
5921 __ bind(&out_of_object);
5922 __ ld(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
5924 __ Dsubu(scratch, result, scratch);
5925 __ ld(result, FieldMemOperand(scratch,
5927 __ bind(deferred->exit());
5928 __ bind(&done);
5934 __ sd(context, MemOperand(fp, StandardFrameConstants::kContextOffset));
5940 __ li(at, scope_info);
5941 __ Push(at, ToRegister(instr->function()));
5947 #undef __