Lines Matching refs:dst

295     Register dst,
313 leap(dst, FieldOperand(object, offset));
316 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
322 RecordWrite(object, dst, value, save_fp, remembered_set_action,
331 Move(dst, kZapValue, Assembler::RelocInfoNone());
354 Register dst = index;
355 leap(dst, Operand(object, index, times_pointer_size,
358 RecordWrite(object, dst, value, save_fp, remembered_set_action,
374 Register dst,
378 DCHECK(!object.is(dst));
379 DCHECK(!map.is(dst));
407 leap(dst, FieldOperand(object, HeapObject::kMapOffset));
424 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
437 Move(dst, kZapValue, Assembler::RelocInfoNone());
962 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
963 xorps(dst, dst);
964 cvtlsi2sd(dst, src);
968 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
969 xorps(dst, dst);
970 cvtlsi2sd(dst, src);
974 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
977 movsxbq(dst, src);
979 movzxbl(dst, src);
981 movsxwq(dst, src);
983 movzxwl(dst, src);
985 movl(dst, src);
987 movp(dst, src);
992 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
995 movb(dst, src);
997 movw(dst, src);
999 movl(dst, src);
1006 movp(dst, src);
1011 void MacroAssembler::Set(Register dst, int64_t x) {
1013 xorl(dst, dst);
1015 movl(dst, Immediate(static_cast<uint32_t>(x)));
1017 movq(dst, Immediate(static_cast<int32_t>(x)));
1019 movq(dst, x);
1024 void MacroAssembler::Set(const Operand& dst, intptr_t x) {
1027 movp(dst, Immediate(static_cast<int32_t>(x)));
1030 movp(dst, kScratchRegister);
1033 movp(dst, Immediate(static_cast<int32_t>(x)));
1047 void MacroAssembler::SafeMove(Register dst, Smi* src) {
1048 DCHECK(!dst.is(kScratchRegister));
1052 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
1054 xorp(dst, kScratchRegister);
1058 movp(dst, Immediate(value ^ jit_cookie()));
1059 xorp(dst, Immediate(jit_cookie()));
1062 Move(dst, src);
1100 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
1102 Move(dst, Smi::FromInt(kSmiConstantRegisterValue),
1104 cmpp(dst, kSmiConstantRegister);
1109 xorl(dst, dst);
1117 leap(dst,
1121 xorl(dst, dst);
1122 leap(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
1125 xorl(dst, dst);
1126 leap(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
1129 leap(dst,
1133 leap(dst,
1137 leap(dst,
1141 movp(dst, kSmiConstantRegister);
1147 Move(dst, source, Assembler::RelocInfoNone());
1151 negp(dst);
1156 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
1158 if (!dst.is(src)) {
1159 movl(dst, src);
1161 shlp(dst, Immediate(kSmiShift));
1165 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
1167 testb(dst, Immediate(0x01));
1176 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1180 movp(dst, kScratchRegister);
1185 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1188 if (dst.is(src)) {
1189 addl(dst, Immediate(constant));
1191 leal(dst, Operand(src, constant));
1193 shlp(dst, Immediate(kSmiShift));
1197 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1199 if (!dst.is(src)) {
1200 movp(dst, src);
1204 shrp(dst, Immediate(kSmiShift));
1207 sarl(dst, Immediate(kSmiShift));
1212 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
1214 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1217 movl(dst, src);
1218 sarl(dst, Immediate(kSmiShift));
1223 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1225 if (!dst.is(src)) {
1226 movp(dst, src);
1228 sarp(dst, Immediate(kSmiShift));
1231 movsxlq(dst, dst);
1236 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
1238 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1241 movp(dst, src);
1242 SmiToInteger64(dst, dst);
1260 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
1261 AssertSmi(dst);
1262 Cmp(dst, src);
1266 void MacroAssembler::Cmp(Register dst, Smi* src) {
1267 DCHECK(!dst.is(kScratchRegister));
1269 testp(dst, dst);
1272 cmpp(dst, constant_reg);
1277 void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
1278 AssertSmi(dst);
1280 cmpp(dst, src);
1284 void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
1285 AssertSmi(dst);
1287 cmpp(dst, src);
1291 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
1292 AssertSmi(dst);
1294 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1297 cmpl(dst, Immediate(src));
1302 void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1305 DCHECK(!dst.AddressUsesRegister(smi_reg));
1306 cmpp(dst, smi_reg);
1310 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1312 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1315 SmiToInteger32(kScratchRegister, dst);
1321 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1327 SmiToInteger64(dst, src);
1330 if (!dst.is(src)) {
1331 movp(dst, src);
1334 sarp(dst, Immediate(kSmiShift - power));
1336 shlp(dst, Immediate(power - kSmiShift));
1341 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1345 if (dst.is(src)) {
1346 shrp(dst, Immediate(power + kSmiShift));
1353 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1356 if (dst.is(src1) || dst.is(src2)) {
1362 movp(dst, kScratchRegister);
1364 movp(dst, src1);
1365 orp(dst, src2);
1366 JumpIfNotSmi(dst, on_not_smis, near_jump);
1479 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1480 if (dst.is(src)) {
1481 andl(dst, Immediate(kSmiTagMask));
1483 movl(dst, Immediate(kSmiTagMask));
1484 andl(dst, src);
1489 void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1490 if (!(src.AddressUsesRegister(dst))) {
1491 movl(dst, Immediate(kSmiTagMask));
1492 andl(dst, src);
1494 movl(dst, src);
1495 andl(dst, Immediate(kSmiTagMask));
1583 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1585 if (!dst.is(src)) {
1586 movp(dst, src);
1589 } else if (dst.is(src)) {
1590 DCHECK(!dst.is(kScratchRegister));
1593 addp(dst, kSmiConstantRegister);
1596 leap(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1599 leap(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1602 leap(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1606 addp(dst, constant_reg);
1612 leap(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1615 leap(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1618 leap(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1621 leap(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1624 LoadSmiConstant(dst, constant);
1625 addp(dst, src);
1632 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1635 addl(Operand(dst, kSmiShift / kBitsPerByte),
1639 addp(dst, Immediate(constant));
1645 void MacroAssembler::SmiAddConstant(Register dst,
1652 if (!dst.is(src)) {
1653 movp(dst, src);
1655 } else if (dst.is(src)) {
1656 DCHECK(!dst.is(kScratchRegister));
1658 addp(dst, kScratchRegister);
1662 subp(dst, kScratchRegister);
1667 subp(dst, kScratchRegister);
1680 LoadSmiConstant(dst, constant);
1681 addp(dst, src);
1687 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1689 if (!dst.is(src)) {
1690 movp(dst, src);
1692 } else if (dst.is(src)) {
1693 DCHECK(!dst.is(kScratchRegister));
1695 subp(dst, constant_reg);
1698 LoadSmiConstant(dst, constant);
1701 addp(dst, src);
1704 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1705 addp(dst, src);
1711 void MacroAssembler::SmiSubConstant(Register dst,
1718 if (!dst.is(src)) {
1719 movp(dst, src);
1721 } else if (dst.is(src)) {
1722 DCHECK(!dst.is(kScratchRegister));
1724 subp(dst, kScratchRegister);
1728 addp(dst, kScratchRegister);
1733 addp(dst, kScratchRegister);
1747 DCHECK(!dst.is(kScratchRegister));
1748 movp(dst, src);
1750 subp(dst, kScratchRegister);
1754 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1755 addp(dst, src);
1762 void MacroAssembler::SmiNeg(Register dst,
1766 if (dst.is(src)) {
1767 DCHECK(!dst.is(kScratchRegister));
1769 negp(dst); // Low 32 bits are retained as zero by negation.
1771 cmpp(dst, kScratchRegister);
1775 movp(dst, src);
1776 negp(dst);
1777 cmpp(dst, src);
1786 Register dst,
1791 if (dst.is(src1)) {
1793 masm->addp(dst, src2);
1796 masm->subp(dst, src2);
1800 masm->movp(dst, src1);
1801 masm->addp(dst, src2);
1807 void MacroAssembler::SmiAdd(Register dst,
1813 DCHECK(!dst.is(src2));
1814 SmiAddHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
1818 void MacroAssembler::SmiAdd(Register dst,
1824 DCHECK(!src2.AddressUsesRegister(dst));
1825 SmiAddHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1829 void MacroAssembler::SmiAdd(Register dst,
1834 if (!dst.is(src1)) {
1840 leap(dst, Operand(src1, src2, times_1, 0));
1842 addp(dst, src2);
1850 Register dst,
1855 if (dst.is(src1)) {
1857 masm->subp(dst, src2);
1860 masm->addp(dst, src2);
1864 masm->movp(dst, src1);
1865 masm->subp(dst, src2);
1871 void MacroAssembler::SmiSub(Register dst,
1877 DCHECK(!dst.is(src2));
1878 SmiSubHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
1882 void MacroAssembler::SmiSub(Register dst,
1888 DCHECK(!src2.AddressUsesRegister(dst));
1889 SmiSubHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1895 Register dst,
1900 if (!dst.is(src1)) {
1901 masm->movp(dst, src1);
1903 masm->subp(dst, src2);
1908 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1909 DCHECK(!dst.is(src2));
1910 SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
1914 void MacroAssembler::SmiSub(Register dst,
1917 SmiSubNoOverflowHelper<Operand>(this, dst, src1, src2);
1921 void MacroAssembler::SmiMul(Register dst,
1926 DCHECK(!dst.is(src2));
1927 DCHECK(!dst.is(kScratchRegister));
1931 if (dst.is(src1)) {
1934 SmiToInteger64(dst, src1);
1935 imulp(dst, src2);
1941 testp(dst, dst);
1944 movp(dst, kScratchRegister);
1945 xorp(dst, src2);
1954 Set(dst, 0);
1958 SmiToInteger64(dst, src1);
1959 imulp(dst, src2);
1964 testp(dst, dst);
1976 void MacroAssembler::SmiDiv(Register dst,
1983 DCHECK(!dst.is(kScratchRegister));
2032 if (!dst.is(src1) && src1.is(rax)) {
2035 Integer32ToSmi(dst, rax);
2039 void MacroAssembler::SmiMod(Register dst,
2044 DCHECK(!dst.is(kScratchRegister));
2091 Integer32ToSmi(dst, rdx);
2095 void MacroAssembler::SmiNot(Register dst, Register src) {
2096 DCHECK(!dst.is(kScratchRegister));
2106 if (dst.is(src)) {
2107 xorp(dst, kScratchRegister);
2109 leap(dst, Operand(src, kScratchRegister, times_1, 0));
2111 notp(dst);
2115 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
2116 DCHECK(!dst.is(src2));
2117 if (!dst.is(src1)) {
2118 movp(dst, src1);
2120 andp(dst, src2);
2124 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
2126 Set(dst, 0);
2127 } else if (dst.is(src)) {
2128 DCHECK(!dst.is(kScratchRegister));
2130 andp(dst, constant_reg);
2132 LoadSmiConstant(dst, constant);
2133 andp(dst, src);
2138 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
2139 if (!dst.is(src1)) {
2141 movp(dst, src1);
2143 orp(dst, src2);
2147 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2148 if (dst.is(src)) {
2149 DCHECK(!dst.is(kScratchRegister));
2151 orp(dst, constant_reg);
2153 LoadSmiConstant(dst, constant);
2154 orp(dst, src);
2159 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2160 if (!dst.is(src1)) {
2162 movp(dst, src1);
2164 xorp(dst, src2);
2168 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2169 if (dst.is(src)) {
2170 DCHECK(!dst.is(kScratchRegister));
2172 xorp(dst, constant_reg);
2174 LoadSmiConstant(dst, constant);
2175 xorp(dst, src);
2180 void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
2185 if (dst.is(src)) {
2186 sarp(dst, Immediate(shift_value + kSmiShift));
2187 shlp(dst, Immediate(kSmiShift));
2195 void MacroAssembler::SmiShiftLeftConstant(Register dst,
2201 if (!dst.is(src)) {
2202 movp(dst, src);
2206 shlq(dst, Immediate(shift_value & 0x1f));
2210 if (dst.is(src)) {
2213 SmiToInteger32(dst, src);
2214 shll(dst, Immediate(shift_value));
2215 JumpIfNotValidSmiValue(dst, on_not_smi_result, near_jump);
2216 Integer32ToSmi(dst, dst);
2223 Register dst, Register src, int shift_value,
2226 if (dst.is(src)) {
2234 movp(dst, src);
2235 shrp(dst, Immediate(shift_value + kSmiShift));
2236 shlp(dst, Immediate(kSmiShift));
2239 SmiToInteger32(dst, src);
2240 shrp(dst, Immediate(shift_value));
2241 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2242 Integer32ToSmi(dst, dst);
2248 void MacroAssembler::SmiShiftLeft(Register dst,
2254 DCHECK(!dst.is(rcx));
2255 if (!dst.is(src1)) {
2256 movp(dst, src1);
2262 shlq_cl(dst);
2265 DCHECK(!dst.is(kScratchRegister));
2268 DCHECK(!dst.is(src2));
2269 DCHECK(!dst.is(rcx));
2274 if (dst.is(src1)) {
2278 SmiToInteger32(dst, src1);
2280 shll_cl(dst);
2281 JumpIfValidSmiValue(dst, &valid_result, Label::kNear);
2282 // As src1 or src2 could not be dst, we do not need to restore them for
2283 // clobbering dst.
2293 Integer32ToSmi(dst, dst);
2299 void MacroAssembler::SmiShiftLogicalRight(Register dst,
2304 DCHECK(!dst.is(kScratchRegister));
2307 DCHECK(!dst.is(src2));
2308 DCHECK(!dst.is(rcx));
2312 if (dst.is(src1)) {
2316 SmiToInteger32(dst, src1);
2318 shrl_cl(dst);
2319 JumpIfUIntValidSmiValue(dst, &valid_result, Label::kNear);
2320 // As src1 or src2 could not be dst, we do not need to restore them for
2321 // clobbering dst.
2331 Integer32ToSmi(dst, dst);
2336 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2339 DCHECK(!dst.is(kScratchRegister));
2342 DCHECK(!dst.is(rcx));
2345 if (!dst.is(src1)) {
2346 movp(dst, src1);
2348 SmiToInteger32(dst, dst);
2349 sarl_cl(dst);
2350 Integer32ToSmi(dst, dst);
2354 void MacroAssembler::SelectNonSmi(Register dst,
2359 DCHECK(!dst.is(kScratchRegister));
2362 DCHECK(!dst.is(src1));
2363 DCHECK(!dst.is(src2));
2382 movp(dst, src1);
2383 xorp(dst, src2);
2384 andp(dst, kScratchRegister);
2385 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
2386 xorp(dst, src1);
2387 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2391 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2398 if (!dst.is(src)) {
2399 movp(dst, src);
2402 sarp(dst, Immediate(kSmiShift - shift));
2404 shlp(dst, Immediate(shift - kSmiShift));
2406 return SmiIndex(dst, times_1);
2410 if (!dst.is(src)) {
2411 movp(dst, src);
2415 movsxlq(dst, dst);
2417 sarq(dst, Immediate(kSmiShift));
2418 return SmiIndex(dst, times_1);
2420 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
2425 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2431 if (!dst.is(src)) {
2432 movp(dst, src);
2434 negp(dst);
2436 sarp(dst, Immediate(kSmiShift - shift));
2438 shlp(dst, Immediate(shift - kSmiShift));
2440 return SmiIndex(dst, times_1);
2444 if (!dst.is(src)) {
2445 movp(dst, src);
2447 negq(dst);
2449 sarq(dst, Immediate(kSmiShift));
2450 return SmiIndex(dst, times_1);
2452 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
2457 void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2460 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2464 addl(dst, kScratchRegister);
2493 void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
2494 DCHECK(!dst.is(scratch));
2498 Pop(dst);
2499 shrp(dst, Immediate(kSmiShift));
2501 shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2502 orp(dst, scratch);
2718 void MacroAssembler::Move(Register dst, Register src) {
2719 if (!dst.is(src)) {
2720 movp(dst, src);
2725 void MacroAssembler::Move(Register dst, Handle<Object> source) {
2728 Move(dst, Smi::cast(*source));
2730 MoveHeapObject(dst, source);
2735 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
2738 Move(dst, Smi::cast(*source));
2741 movp(dst, kScratchRegister);
2746 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
2749 Cmp(dst, Smi::cast(*source));
2752 cmpp(dst, kScratchRegister);
2757 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
2760 Cmp(dst, Smi::cast(*source));
2763 cmpp(dst, kScratchRegister);
2793 void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
2794 if (dst.is(rax)) {
2798 Move(dst, cell, RelocInfo::CELL);
2799 movp(dst, Operand(dst, 0));
2878 void MacroAssembler::Pop(Register dst) {
2880 popq(dst);
2883 DCHECK(dst.code() != rbp.code());
2884 movp(dst, Operand(rsp, 0));
2890 void MacroAssembler::Pop(const Operand& dst) {
2892 popq(dst);
2894 Register scratch = dst.AddressUsesRegister(kScratchRegister)
2897 movp(dst, scratch);
2909 void MacroAssembler::PopQuad(const Operand& dst) {
2911 popq(dst);
2914 movp(dst, kScratchRegister);
2919 void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst,
2926 movsxlq(dst, FieldOperand(base, offset));
2928 movp(dst, FieldOperand(base, offset));
2929 SmiToInteger32(dst, dst);
3105 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
3107 movp(SafepointRegisterSlot(dst), imm);
3111 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
3112 movp(SafepointRegisterSlot(dst), src);
3116 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
3117 movp(dst, SafepointRegisterSlot(src));
3460 void MacroAssembler::LoadUint32(XMMRegister dst,
3466 cvtqsi2sd(dst, src);
3524 Label* minus_zero, Label::Distance dst) {
3528 j(not_equal, lost_precision, dst);
3529 j(parity_even, is_nan, dst); // NaN.
3541 j(not_zero, minus_zero, dst);
3553 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3554 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3555 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3559 void MacroAssembler::EnumLength(Register dst, Register map) {
3561 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3562 andl(dst, Immediate(Map::EnumLengthBits::kMask));
3563 Integer32ToSmi(dst, dst);
4807 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4810 movp(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4812 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4818 movp(dst, rsi);
4826 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),