Lines Matching refs:scratch

86                                Register scratch,
91 mov(scratch, Immediate(index));
92 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
98 Register scratch,
102 mov(scratch, Immediate(index));
103 cmp(with, Operand::StaticArray(scratch,
126 Register scratch,
131 if (scratch.is(object)) {
132 and_(scratch, Immediate(~Page::kPageAlignmentMask));
134 mov(scratch, Immediate(~Page::kPageAlignmentMask));
135 and_(scratch, object);
143 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
152 Register scratch,
158 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
165 mov(scratch, Operand::StaticVariable(store_buffer));
167 mov(Operand(scratch, 0), addr);
169 add(scratch, Immediate(kPointerSize));
171 mov(Operand::StaticVariable(store_buffer), scratch);
174 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
254 XMMRegister scratch,
258 DCHECK(!input_reg.is(scratch));
260 Cvtsi2sd(scratch, Operand(result_reg));
261 ucomisd(scratch, input_reg);
335 // Input is clobbered. Restore number from double scratch.
545 value, // Used as scratch.
552 value, // Used as scratch.
784 Register scratch,
787 IsInstanceJSObjectType(map, scratch, fail);
792 Register scratch,
794 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
795 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
796 cmp(scratch,
1253 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1258 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1259 mov(scratch,
1260 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1261 SmiUntag(scratch);
1262 xor_(r0, scratch);
1269 mov(scratch, r0);
1271 shl(scratch, 15);
1272 add(r0, scratch);
1274 mov(scratch, r0);
1275 shr(scratch, 12);
1276 xor_(r0, scratch);
1280 mov(scratch, r0);
1281 shr(scratch, 4);
1282 xor_(r0, scratch);
1286 mov(scratch, r0);
1287 shr(scratch, 16);
1288 xor_(r0, scratch);
1368 Register scratch,
1375 // No use of scratch if allocation top is provided.
1376 DCHECK(scratch.is(no_reg));
1385 // Move address of new object to result. Use scratch register if available.
1386 if (scratch.is(no_reg)) {
1389 mov(scratch, Immediate(allocation_top));
1390 mov(result, Operand(scratch, 0));
1396 Register scratch,
1406 // Update new top. Use scratch if available.
1407 if (scratch.is(no_reg)) {
1410 mov(Operand(scratch, 0), result_end);
1418 Register scratch,
1430 if (scratch.is_valid()) {
1431 mov(scratch, Immediate(0x7291));
1440 LoadAllocationTopHelper(result, scratch, flags);
1474 UpdateAllocationTopHelper(top_reg, scratch, flags);
1497 Register scratch,
1506 if (scratch.is_valid()) {
1507 mov(scratch, Immediate(0x7291));
1517 LoadAllocationTopHelper(result, scratch, flags);
1565 UpdateAllocationTopHelper(result_end, scratch, flags);
1572 Register scratch,
1581 if (scratch.is_valid()) {
1582 mov(scratch, Immediate(0x7291));
1592 LoadAllocationTopHelper(result, scratch, flags);
1631 UpdateAllocationTopHelper(result_end, scratch, flags);
1816 // long or aligned copies. The contents of scratch and length are destroyed.
1826 Register scratch) {
1837 mov(scratch, Operand(source, length, times_1, -4));
1838 mov(Operand(destination, length, times_1, -4), scratch);
1847 mov(scratch, ecx);
1850 and_(scratch, Immediate(0x3));
1851 add(destination, scratch);
1855 mov(scratch, Operand(source, 8));
1856 mov(Operand(destination, 8), scratch);
1858 mov(scratch, Operand(source, 4));
1859 mov(Operand(destination, 4), scratch);
1861 mov(scratch, Operand(source, 0));
1862 mov(Operand(destination, 0), scratch);
1871 mov_b(scratch, Operand(source, 0));
1872 mov_b(Operand(destination, 0), scratch);
1924 Register scratch,
1929 mov(scratch, op1);
1930 or_(scratch, op2);
1938 Register scratch,
1951 mov(scratch,
1953 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1958 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1959 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1975 CmpObjectType(result, MAP_TYPE, scratch);
2462 Register scratch,
2465 mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2466 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2469 mov(scratch, Operand(scratch,
2474 cmp(map_in_out, FieldOperand(scratch, offset));
2480 mov(map_in_out, FieldOperand(scratch, offset));
2581 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2585 pop(scratch);
2587 push(scratch);
2790 Register scratch,
2794 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2795 movd(dst, scratch);
2808 Register scratch = scratch2;
2827 mov(scratch, object);
2828 SmiUntag(scratch);
2835 mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2836 xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2837 // Object is heap number and hash is now in scratch. Calculate cache index.
2838 and_(scratch, mask);
2839 Register index = scratch;
2854 // Object is smi and hash is now in scratch. Calculate cache index.
2855 and_(scratch, mask);
2876 Register instance_type, Register scratch, Label* failure) {
2877 if (!scratch.is(instance_type)) {
2878 mov(scratch, instance_type);
2880 and_(scratch,
2882 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2968 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2973 mov(scratch, esp);
2977 mov(Operand(esp, num_arguments * kPointerSize), scratch);
3061 Register scratch,
3067 if (scratch.is(object)) {
3068 and_(scratch, Immediate(~Page::kPageAlignmentMask));
3070 mov(scratch, Immediate(~Page::kPageAlignmentMask));
3071 and_(scratch, object);
3074 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3077 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3107 Register scratch,
3110 mov(scratch, map);
3111 mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
3112 and_(scratch, Immediate(Map::Deprecated::kMask));
3367 // scratch contained elements pointer.