1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X87
6
7#include "src/base/bits.h"
8#include "src/base/division-by-constant.h"
9#include "src/bootstrapper.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/runtime/runtime.h"
13#include "src/x87/frames-x87.h"
14#include "src/x87/macro-assembler-x87.h"
15
16namespace v8 {
17namespace internal {
18
19// -------------------------------------------------------------------------
20// MacroAssembler implementation.
21
22MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23                               CodeObjectRequired create_code_object)
24    : Assembler(arg_isolate, buffer, size),
25      generating_stub_(false),
26      has_frame_(false) {
27  if (create_code_object == CodeObjectRequired::kYes) {
28    code_object_ =
29        Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
30  }
31}
32
33
34void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
35  DCHECK(!r.IsDouble());
36  if (r.IsInteger8()) {
37    movsx_b(dst, src);
38  } else if (r.IsUInteger8()) {
39    movzx_b(dst, src);
40  } else if (r.IsInteger16()) {
41    movsx_w(dst, src);
42  } else if (r.IsUInteger16()) {
43    movzx_w(dst, src);
44  } else {
45    mov(dst, src);
46  }
47}
48
49
50void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
51  DCHECK(!r.IsDouble());
52  if (r.IsInteger8() || r.IsUInteger8()) {
53    mov_b(dst, src);
54  } else if (r.IsInteger16() || r.IsUInteger16()) {
55    mov_w(dst, src);
56  } else {
57    if (r.IsHeapObject()) {
58      AssertNotSmi(src);
59    } else if (r.IsSmi()) {
60      AssertSmi(src);
61    }
62    mov(dst, src);
63  }
64}
65
66
67void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
68  if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
69    mov(destination, isolate()->heap()->root_handle(index));
70    return;
71  }
72  ExternalReference roots_array_start =
73      ExternalReference::roots_array_start(isolate());
74  mov(destination, Immediate(index));
75  mov(destination, Operand::StaticArray(destination,
76                                        times_pointer_size,
77                                        roots_array_start));
78}
79
80
81void MacroAssembler::StoreRoot(Register source,
82                               Register scratch,
83                               Heap::RootListIndex index) {
84  DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
85  ExternalReference roots_array_start =
86      ExternalReference::roots_array_start(isolate());
87  mov(scratch, Immediate(index));
88  mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
89      source);
90}
91
92
93void MacroAssembler::CompareRoot(Register with,
94                                 Register scratch,
95                                 Heap::RootListIndex index) {
96  ExternalReference roots_array_start =
97      ExternalReference::roots_array_start(isolate());
98  mov(scratch, Immediate(index));
99  cmp(with, Operand::StaticArray(scratch,
100                                times_pointer_size,
101                                roots_array_start));
102}
103
104
105void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
106  DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
107  cmp(with, isolate()->heap()->root_handle(index));
108}
109
110
111void MacroAssembler::CompareRoot(const Operand& with,
112                                 Heap::RootListIndex index) {
113  DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
114  cmp(with, isolate()->heap()->root_handle(index));
115}
116
117
118void MacroAssembler::PushRoot(Heap::RootListIndex index) {
119  DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
120  Push(isolate()->heap()->root_handle(index));
121}
122
123#define REG(Name) \
124  { Register::kCode_##Name }
125
126static const Register saved_regs[] = {REG(eax), REG(ecx), REG(edx)};
127
128#undef REG
129
130static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
131
132void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
133                                     Register exclusion1, Register exclusion2,
134                                     Register exclusion3) {
135  // We don't allow a GC during a store buffer overflow so there is no need to
136  // store the registers in any particular way, but we do have to store and
137  // restore them.
138  for (int i = 0; i < kNumberOfSavedRegs; i++) {
139    Register reg = saved_regs[i];
140    if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
141      push(reg);
142    }
143  }
144  if (fp_mode == kSaveFPRegs) {
145    // Save FPU state in m108byte.
146    sub(esp, Immediate(108));
147    fnsave(Operand(esp, 0));
148  }
149}
150
151void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
152                                    Register exclusion2, Register exclusion3) {
153  if (fp_mode == kSaveFPRegs) {
154    // Restore FPU state in m108byte.
155    frstor(Operand(esp, 0));
156    add(esp, Immediate(108));
157  }
158
159  for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
160    Register reg = saved_regs[i];
161    if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
162      pop(reg);
163    }
164  }
165}
166
167void MacroAssembler::InNewSpace(Register object, Register scratch, Condition cc,
168                                Label* condition_met,
169                                Label::Distance distance) {
170  CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc,
171                condition_met, distance);
172}
173
174
175void MacroAssembler::RememberedSetHelper(
176    Register object,  // Only used for debug checks.
177    Register addr, Register scratch, SaveFPRegsMode save_fp,
178    MacroAssembler::RememberedSetFinalAction and_then) {
179  Label done;
180  if (emit_debug_code()) {
181    Label ok;
182    JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
183    int3();
184    bind(&ok);
185  }
186  // Load store buffer top.
187  ExternalReference store_buffer =
188      ExternalReference::store_buffer_top(isolate());
189  mov(scratch, Operand::StaticVariable(store_buffer));
190  // Store pointer to buffer.
191  mov(Operand(scratch, 0), addr);
192  // Increment buffer top.
193  add(scratch, Immediate(kPointerSize));
194  // Write back new top of buffer.
195  mov(Operand::StaticVariable(store_buffer), scratch);
196  // Call stub on end of buffer.
197  // Check for end of buffer.
198  test(scratch, Immediate(StoreBuffer::kStoreBufferMask));
199  if (and_then == kReturnAtEnd) {
200    Label buffer_overflowed;
201    j(equal, &buffer_overflowed, Label::kNear);
202    ret(0);
203    bind(&buffer_overflowed);
204  } else {
205    DCHECK(and_then == kFallThroughAtEnd);
206    j(not_equal, &done, Label::kNear);
207  }
208  StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
209  CallStub(&store_buffer_overflow);
210  if (and_then == kReturnAtEnd) {
211    ret(0);
212  } else {
213    DCHECK(and_then == kFallThroughAtEnd);
214    bind(&done);
215  }
216}
217
218
219void MacroAssembler::ClampTOSToUint8(Register result_reg) {
220  Label done, conv_failure;
221  sub(esp, Immediate(kPointerSize));
222  fnclex();
223  fist_s(Operand(esp, 0));
224  pop(result_reg);
225  X87CheckIA();
226  j(equal, &conv_failure, Label::kNear);
227  test(result_reg, Immediate(0xFFFFFF00));
228  j(zero, &done, Label::kNear);
229  setcc(sign, result_reg);
230  sub(result_reg, Immediate(1));
231  and_(result_reg, Immediate(255));
232  jmp(&done, Label::kNear);
233  bind(&conv_failure);
234  fnclex();
235  fldz();
236  fld(1);
237  FCmp();
238  setcc(below, result_reg);  // 1 if negative, 0 if positive.
239  dec_b(result_reg);         // 0 if negative, 255 if positive.
240  bind(&done);
241}
242
243
244void MacroAssembler::ClampUint8(Register reg) {
245  Label done;
246  test(reg, Immediate(0xFFFFFF00));
247  j(zero, &done, Label::kNear);
248  setcc(negative, reg);  // 1 if negative, 0 if positive.
249  dec_b(reg);  // 0 if negative, 255 if positive.
250  bind(&done);
251}
252
253
254void MacroAssembler::SlowTruncateToI(Register result_reg,
255                                     Register input_reg,
256                                     int offset) {
257  DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
258  call(stub.GetCode(), RelocInfo::CODE_TARGET);
259}
260
261
262void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
263  sub(esp, Immediate(kDoubleSize));
264  fst_d(MemOperand(esp, 0));
265  SlowTruncateToI(result_reg, esp, 0);
266  add(esp, Immediate(kDoubleSize));
267}
268
269
270void MacroAssembler::X87TOSToI(Register result_reg,
271                               MinusZeroMode minus_zero_mode,
272                               Label* lost_precision, Label* is_nan,
273                               Label* minus_zero, Label::Distance dst) {
274  Label done;
275  sub(esp, Immediate(kPointerSize));
276  fld(0);
277  fist_s(MemOperand(esp, 0));
278  fild_s(MemOperand(esp, 0));
279  pop(result_reg);
280  FCmp();
281  j(not_equal, lost_precision, dst);
282  j(parity_even, is_nan, dst);
283  if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
284    test(result_reg, Operand(result_reg));
285    j(not_zero, &done, Label::kNear);
286    // To check for minus zero, we load the value again as float, and check
287    // if that is still 0.
288    sub(esp, Immediate(kPointerSize));
289    fst_s(MemOperand(esp, 0));
290    pop(result_reg);
291    test(result_reg, Operand(result_reg));
292    j(not_zero, minus_zero, dst);
293  }
294  bind(&done);
295}
296
297
298void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
299                                           Register input_reg) {
300  Label done, slow_case;
301
302  SlowTruncateToI(result_reg, input_reg);
303  bind(&done);
304}
305
306
307void MacroAssembler::LoadUint32NoSSE2(const Operand& src) {
308  Label done;
309  push(src);
310  fild_s(Operand(esp, 0));
311  cmp(src, Immediate(0));
312  j(not_sign, &done, Label::kNear);
313  ExternalReference uint32_bias =
314        ExternalReference::address_of_uint32_bias();
315  fld_d(Operand::StaticVariable(uint32_bias));
316  faddp(1);
317  bind(&done);
318  add(esp, Immediate(kPointerSize));
319}
320
321
322void MacroAssembler::RecordWriteArray(
323    Register object, Register value, Register index, SaveFPRegsMode save_fp,
324    RememberedSetAction remembered_set_action, SmiCheck smi_check,
325    PointersToHereCheck pointers_to_here_check_for_value) {
326  // First, check if a write barrier is even needed. The tests below
327  // catch stores of Smis.
328  Label done;
329
330  // Skip barrier if writing a smi.
331  if (smi_check == INLINE_SMI_CHECK) {
332    DCHECK_EQ(0, kSmiTag);
333    test(value, Immediate(kSmiTagMask));
334    j(zero, &done);
335  }
336
337  // Array access: calculate the destination address in the same manner as
338  // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
339  // into an array of words.
340  Register dst = index;
341  lea(dst, Operand(object, index, times_half_pointer_size,
342                   FixedArray::kHeaderSize - kHeapObjectTag));
343
344  RecordWrite(object, dst, value, save_fp, remembered_set_action,
345              OMIT_SMI_CHECK, pointers_to_here_check_for_value);
346
347  bind(&done);
348
349  // Clobber clobbered input registers when running with the debug-code flag
350  // turned on to provoke errors.
351  if (emit_debug_code()) {
352    mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
353    mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
354  }
355}
356
357
358void MacroAssembler::RecordWriteField(
359    Register object, int offset, Register value, Register dst,
360    SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action,
361    SmiCheck smi_check, PointersToHereCheck pointers_to_here_check_for_value) {
362  // First, check if a write barrier is even needed. The tests below
363  // catch stores of Smis.
364  Label done;
365
366  // Skip barrier if writing a smi.
367  if (smi_check == INLINE_SMI_CHECK) {
368    JumpIfSmi(value, &done, Label::kNear);
369  }
370
371  // Although the object register is tagged, the offset is relative to the start
372  // of the object, so so offset must be a multiple of kPointerSize.
373  DCHECK(IsAligned(offset, kPointerSize));
374
375  lea(dst, FieldOperand(object, offset));
376  if (emit_debug_code()) {
377    Label ok;
378    test_b(dst, Immediate((1 << kPointerSizeLog2) - 1));
379    j(zero, &ok, Label::kNear);
380    int3();
381    bind(&ok);
382  }
383
384  RecordWrite(object, dst, value, save_fp, remembered_set_action,
385              OMIT_SMI_CHECK, pointers_to_here_check_for_value);
386
387  bind(&done);
388
389  // Clobber clobbered input registers when running with the debug-code flag
390  // turned on to provoke errors.
391  if (emit_debug_code()) {
392    mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
393    mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
394  }
395}
396
397
398void MacroAssembler::RecordWriteForMap(Register object, Handle<Map> map,
399                                       Register scratch1, Register scratch2,
400                                       SaveFPRegsMode save_fp) {
401  Label done;
402
403  Register address = scratch1;
404  Register value = scratch2;
405  if (emit_debug_code()) {
406    Label ok;
407    lea(address, FieldOperand(object, HeapObject::kMapOffset));
408    test_b(address, Immediate((1 << kPointerSizeLog2) - 1));
409    j(zero, &ok, Label::kNear);
410    int3();
411    bind(&ok);
412  }
413
414  DCHECK(!object.is(value));
415  DCHECK(!object.is(address));
416  DCHECK(!value.is(address));
417  AssertNotSmi(object);
418
419  if (!FLAG_incremental_marking) {
420    return;
421  }
422
423  // Compute the address.
424  lea(address, FieldOperand(object, HeapObject::kMapOffset));
425
426  // A single check of the map's pages interesting flag suffices, since it is
427  // only set during incremental collection, and then it's also guaranteed that
428  // the from object's page's interesting flag is also set.  This optimization
429  // relies on the fact that maps can never be in new space.
430  DCHECK(!isolate()->heap()->InNewSpace(*map));
431  CheckPageFlagForMap(map,
432                      MemoryChunk::kPointersToHereAreInterestingMask,
433                      zero,
434                      &done,
435                      Label::kNear);
436
437  RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
438                       save_fp);
439  CallStub(&stub);
440
441  bind(&done);
442
443  // Count number of write barriers in generated code.
444  isolate()->counters()->write_barriers_static()->Increment();
445  IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
446
447  // Clobber clobbered input registers when running with the debug-code flag
448  // turned on to provoke errors.
449  if (emit_debug_code()) {
450    mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
451    mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
452    mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
453  }
454}
455
456
457void MacroAssembler::RecordWrite(
458    Register object, Register address, Register value, SaveFPRegsMode fp_mode,
459    RememberedSetAction remembered_set_action, SmiCheck smi_check,
460    PointersToHereCheck pointers_to_here_check_for_value) {
461  DCHECK(!object.is(value));
462  DCHECK(!object.is(address));
463  DCHECK(!value.is(address));
464  AssertNotSmi(object);
465
466  if (remembered_set_action == OMIT_REMEMBERED_SET &&
467      !FLAG_incremental_marking) {
468    return;
469  }
470
471  if (emit_debug_code()) {
472    Label ok;
473    cmp(value, Operand(address, 0));
474    j(equal, &ok, Label::kNear);
475    int3();
476    bind(&ok);
477  }
478
479  // First, check if a write barrier is even needed. The tests below
480  // catch stores of Smis and stores into young gen.
481  Label done;
482
483  if (smi_check == INLINE_SMI_CHECK) {
484    // Skip barrier if writing a smi.
485    JumpIfSmi(value, &done, Label::kNear);
486  }
487
488  if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
489    CheckPageFlag(value,
490                  value,  // Used as scratch.
491                  MemoryChunk::kPointersToHereAreInterestingMask,
492                  zero,
493                  &done,
494                  Label::kNear);
495  }
496  CheckPageFlag(object,
497                value,  // Used as scratch.
498                MemoryChunk::kPointersFromHereAreInterestingMask,
499                zero,
500                &done,
501                Label::kNear);
502
503  RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
504                       fp_mode);
505  CallStub(&stub);
506
507  bind(&done);
508
509  // Count number of write barriers in generated code.
510  isolate()->counters()->write_barriers_static()->Increment();
511  IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
512
513  // Clobber clobbered registers when running with the debug-code flag
514  // turned on to provoke errors.
515  if (emit_debug_code()) {
516    mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
517    mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
518  }
519}
520
521void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
522                                               Register code_entry,
523                                               Register scratch) {
524  const int offset = JSFunction::kCodeEntryOffset;
525
526  // Since a code entry (value) is always in old space, we don't need to update
527  // remembered set. If incremental marking is off, there is nothing for us to
528  // do.
529  if (!FLAG_incremental_marking) return;
530
531  DCHECK(!js_function.is(code_entry));
532  DCHECK(!js_function.is(scratch));
533  DCHECK(!code_entry.is(scratch));
534  AssertNotSmi(js_function);
535
536  if (emit_debug_code()) {
537    Label ok;
538    lea(scratch, FieldOperand(js_function, offset));
539    cmp(code_entry, Operand(scratch, 0));
540    j(equal, &ok, Label::kNear);
541    int3();
542    bind(&ok);
543  }
544
545  // First, check if a write barrier is even needed. The tests below
546  // catch stores of Smis and stores into young gen.
547  Label done;
548
549  CheckPageFlag(code_entry, scratch,
550                MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
551                Label::kNear);
552  CheckPageFlag(js_function, scratch,
553                MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
554                Label::kNear);
555
556  // Save input registers.
557  push(js_function);
558  push(code_entry);
559
560  const Register dst = scratch;
561  lea(dst, FieldOperand(js_function, offset));
562
563  // Save caller-saved registers.
564  PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
565
566  int argument_count = 3;
567  PrepareCallCFunction(argument_count, code_entry);
568  mov(Operand(esp, 0 * kPointerSize), js_function);
569  mov(Operand(esp, 1 * kPointerSize), dst);  // Slot.
570  mov(Operand(esp, 2 * kPointerSize),
571      Immediate(ExternalReference::isolate_address(isolate())));
572
573  {
574    AllowExternalCallThatCantCauseGC scope(this);
575    CallCFunction(
576        ExternalReference::incremental_marking_record_write_code_entry_function(
577            isolate()),
578        argument_count);
579  }
580
581  // Restore caller-saved registers.
582  PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
583
584  // Restore input registers.
585  pop(code_entry);
586  pop(js_function);
587
588  bind(&done);
589}
590
591void MacroAssembler::DebugBreak() {
592  Move(eax, Immediate(0));
593  mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
594                                       isolate())));
595  CEntryStub ces(isolate(), 1);
596  call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
597}
598
599void MacroAssembler::ShlPair(Register high, Register low, uint8_t shift) {
600  if (shift >= 32) {
601    mov(high, low);
602    shl(high, shift - 32);
603    xor_(low, low);
604  } else {
605    shld(high, low, shift);
606    shl(low, shift);
607  }
608}
609
610void MacroAssembler::ShlPair_cl(Register high, Register low) {
611  shld_cl(high, low);
612  shl_cl(low);
613  Label done;
614  test(ecx, Immediate(0x20));
615  j(equal, &done, Label::kNear);
616  mov(high, low);
617  xor_(low, low);
618  bind(&done);
619}
620
621void MacroAssembler::ShrPair(Register high, Register low, uint8_t shift) {
622  if (shift >= 32) {
623    mov(low, high);
624    shr(low, shift - 32);
625    xor_(high, high);
626  } else {
627    shrd(high, low, shift);
628    shr(high, shift);
629  }
630}
631
632void MacroAssembler::ShrPair_cl(Register high, Register low) {
633  shrd_cl(low, high);
634  shr_cl(high);
635  Label done;
636  test(ecx, Immediate(0x20));
637  j(equal, &done, Label::kNear);
638  mov(low, high);
639  xor_(high, high);
640  bind(&done);
641}
642
643void MacroAssembler::SarPair(Register high, Register low, uint8_t shift) {
644  if (shift >= 32) {
645    mov(low, high);
646    sar(low, shift - 32);
647    sar(high, 31);
648  } else {
649    shrd(high, low, shift);
650    sar(high, shift);
651  }
652}
653
654void MacroAssembler::SarPair_cl(Register high, Register low) {
655  shrd_cl(low, high);
656  sar_cl(high);
657  Label done;
658  test(ecx, Immediate(0x20));
659  j(equal, &done, Label::kNear);
660  mov(low, high);
661  sar(high, 31);
662  bind(&done);
663}
664
665bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
666  static const int kMaxImmediateBits = 17;
667  if (!RelocInfo::IsNone(x.rmode_)) return false;
668  return !is_intn(x.x_, kMaxImmediateBits);
669}
670
671
672void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
673  if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
674    Move(dst, Immediate(x.x_ ^ jit_cookie()));
675    xor_(dst, jit_cookie());
676  } else {
677    Move(dst, x);
678  }
679}
680
681
682void MacroAssembler::SafePush(const Immediate& x) {
683  if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
684    push(Immediate(x.x_ ^ jit_cookie()));
685    xor_(Operand(esp, 0), Immediate(jit_cookie()));
686  } else {
687    push(x);
688  }
689}
690
691
692void MacroAssembler::CmpObjectType(Register heap_object,
693                                   InstanceType type,
694                                   Register map) {
695  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
696  CmpInstanceType(map, type);
697}
698
699
700void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
701  cmpb(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
702}
703
704void MacroAssembler::CheckFastObjectElements(Register map,
705                                             Label* fail,
706                                             Label::Distance distance) {
707  STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
708  STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
709  STATIC_ASSERT(FAST_ELEMENTS == 2);
710  STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
711  cmpb(FieldOperand(map, Map::kBitField2Offset),
712       Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
713  j(below_equal, fail, distance);
714  cmpb(FieldOperand(map, Map::kBitField2Offset),
715       Immediate(Map::kMaximumBitField2FastHoleyElementValue));
716  j(above, fail, distance);
717}
718
719
720void MacroAssembler::CheckFastSmiElements(Register map,
721                                          Label* fail,
722                                          Label::Distance distance) {
723  STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
724  STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
725  cmpb(FieldOperand(map, Map::kBitField2Offset),
726       Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
727  j(above, fail, distance);
728}
729
730
731void MacroAssembler::StoreNumberToDoubleElements(
732    Register maybe_number,
733    Register elements,
734    Register key,
735    Register scratch,
736    Label* fail,
737    int elements_offset) {
738  Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
739  JumpIfSmi(maybe_number, &smi_value, Label::kNear);
740
741  CheckMap(maybe_number,
742           isolate()->factory()->heap_number_map(),
743           fail,
744           DONT_DO_SMI_CHECK);
745
746  fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
747  jmp(&done, Label::kNear);
748
749  bind(&smi_value);
750  // Value is a smi. Convert to a double and store.
751  // Preserve original value.
752  mov(scratch, maybe_number);
753  SmiUntag(scratch);
754  push(scratch);
755  fild_s(Operand(esp, 0));
756  pop(scratch);
757  bind(&done);
758  fstp_d(FieldOperand(elements, key, times_4,
759                      FixedDoubleArray::kHeaderSize - elements_offset));
760}
761
762
763void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
764  cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
765}
766
767
768void MacroAssembler::CheckMap(Register obj,
769                              Handle<Map> map,
770                              Label* fail,
771                              SmiCheckType smi_check_type) {
772  if (smi_check_type == DO_SMI_CHECK) {
773    JumpIfSmi(obj, fail);
774  }
775
776  CompareMap(obj, map);
777  j(not_equal, fail);
778}
779
780
781void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
782                                     Register scratch2, Handle<WeakCell> cell,
783                                     Handle<Code> success,
784                                     SmiCheckType smi_check_type) {
785  Label fail;
786  if (smi_check_type == DO_SMI_CHECK) {
787    JumpIfSmi(obj, &fail);
788  }
789  mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
790  CmpWeakValue(scratch1, cell, scratch2);
791  j(equal, success);
792
793  bind(&fail);
794}
795
796
797Condition MacroAssembler::IsObjectStringType(Register heap_object,
798                                             Register map,
799                                             Register instance_type) {
800  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
801  movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
802  STATIC_ASSERT(kNotStringTag != 0);
803  test(instance_type, Immediate(kIsNotStringMask));
804  return zero;
805}
806
807
808Condition MacroAssembler::IsObjectNameType(Register heap_object,
809                                           Register map,
810                                           Register instance_type) {
811  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
812  movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
813  cmpb(instance_type, Immediate(LAST_NAME_TYPE));
814  return below_equal;
815}
816
817
818void MacroAssembler::FCmp() {
819  fucompp();
820  push(eax);
821  fnstsw_ax();
822  sahf();
823  pop(eax);
824}
825
826
827void MacroAssembler::FXamMinusZero() {
828  fxam();
829  push(eax);
830  fnstsw_ax();
831  and_(eax, Immediate(0x4700));
832  // For minus zero, C3 == 1 && C1 == 1.
833  cmp(eax, Immediate(0x4200));
834  pop(eax);
835  fstp(0);
836}
837
838
839void MacroAssembler::FXamSign() {
840  fxam();
841  push(eax);
842  fnstsw_ax();
843  // For negative value (including -0.0), C1 == 1.
844  and_(eax, Immediate(0x0200));
845  pop(eax);
846  fstp(0);
847}
848
849
850void MacroAssembler::X87CheckIA() {
851  push(eax);
852  fnstsw_ax();
853  // For #IA, IE == 1 && SF == 0.
854  and_(eax, Immediate(0x0041));
855  cmp(eax, Immediate(0x0001));
856  pop(eax);
857}
858
859
860// rc=00B, round to nearest.
861// rc=01B, round down.
862// rc=10B, round up.
863// rc=11B, round toward zero.
864void MacroAssembler::X87SetRC(int rc) {
865  sub(esp, Immediate(kPointerSize));
866  fnstcw(MemOperand(esp, 0));
867  and_(MemOperand(esp, 0), Immediate(0xF3FF));
868  or_(MemOperand(esp, 0), Immediate(rc));
869  fldcw(MemOperand(esp, 0));
870  add(esp, Immediate(kPointerSize));
871}
872
873
874void MacroAssembler::X87SetFPUCW(int cw) {
875  RecordComment("-- X87SetFPUCW start --");
876  push(Immediate(cw));
877  fldcw(MemOperand(esp, 0));
878  add(esp, Immediate(kPointerSize));
879  RecordComment("-- X87SetFPUCW end--");
880}
881
882
883void MacroAssembler::AssertNumber(Register object) {
884  if (emit_debug_code()) {
885    Label ok;
886    JumpIfSmi(object, &ok);
887    cmp(FieldOperand(object, HeapObject::kMapOffset),
888        isolate()->factory()->heap_number_map());
889    Check(equal, kOperandNotANumber);
890    bind(&ok);
891  }
892}
893
894void MacroAssembler::AssertNotNumber(Register object) {
895  if (emit_debug_code()) {
896    test(object, Immediate(kSmiTagMask));
897    Check(not_equal, kOperandIsANumber);
898    cmp(FieldOperand(object, HeapObject::kMapOffset),
899        isolate()->factory()->heap_number_map());
900    Check(not_equal, kOperandIsANumber);
901  }
902}
903
904void MacroAssembler::AssertSmi(Register object) {
905  if (emit_debug_code()) {
906    test(object, Immediate(kSmiTagMask));
907    Check(equal, kOperandIsNotASmi);
908  }
909}
910
911
912void MacroAssembler::AssertString(Register object) {
913  if (emit_debug_code()) {
914    test(object, Immediate(kSmiTagMask));
915    Check(not_equal, kOperandIsASmiAndNotAString);
916    push(object);
917    mov(object, FieldOperand(object, HeapObject::kMapOffset));
918    CmpInstanceType(object, FIRST_NONSTRING_TYPE);
919    pop(object);
920    Check(below, kOperandIsNotAString);
921  }
922}
923
924
925void MacroAssembler::AssertName(Register object) {
926  if (emit_debug_code()) {
927    test(object, Immediate(kSmiTagMask));
928    Check(not_equal, kOperandIsASmiAndNotAName);
929    push(object);
930    mov(object, FieldOperand(object, HeapObject::kMapOffset));
931    CmpInstanceType(object, LAST_NAME_TYPE);
932    pop(object);
933    Check(below_equal, kOperandIsNotAName);
934  }
935}
936
937
938void MacroAssembler::AssertFunction(Register object) {
939  if (emit_debug_code()) {
940    test(object, Immediate(kSmiTagMask));
941    Check(not_equal, kOperandIsASmiAndNotAFunction);
942    Push(object);
943    CmpObjectType(object, JS_FUNCTION_TYPE, object);
944    Pop(object);
945    Check(equal, kOperandIsNotAFunction);
946  }
947}
948
949
950void MacroAssembler::AssertBoundFunction(Register object) {
951  if (emit_debug_code()) {
952    test(object, Immediate(kSmiTagMask));
953    Check(not_equal, kOperandIsASmiAndNotABoundFunction);
954    Push(object);
955    CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
956    Pop(object);
957    Check(equal, kOperandIsNotABoundFunction);
958  }
959}
960
961void MacroAssembler::AssertGeneratorObject(Register object) {
962  if (emit_debug_code()) {
963    test(object, Immediate(kSmiTagMask));
964    Check(not_equal, kOperandIsASmiAndNotAGeneratorObject);
965    Push(object);
966    CmpObjectType(object, JS_GENERATOR_OBJECT_TYPE, object);
967    Pop(object);
968    Check(equal, kOperandIsNotAGeneratorObject);
969  }
970}
971
972void MacroAssembler::AssertReceiver(Register object) {
973  if (emit_debug_code()) {
974    test(object, Immediate(kSmiTagMask));
975    Check(not_equal, kOperandIsASmiAndNotAReceiver);
976    Push(object);
977    STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
978    CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
979    Pop(object);
980    Check(above_equal, kOperandIsNotAReceiver);
981  }
982}
983
984void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
985  if (emit_debug_code()) {
986    Label done_checking;
987    AssertNotSmi(object);
988    cmp(object, isolate()->factory()->undefined_value());
989    j(equal, &done_checking);
990    cmp(FieldOperand(object, 0),
991        Immediate(isolate()->factory()->allocation_site_map()));
992    Assert(equal, kExpectedUndefinedOrCell);
993    bind(&done_checking);
994  }
995}
996
997
998void MacroAssembler::AssertNotSmi(Register object) {
999  if (emit_debug_code()) {
1000    test(object, Immediate(kSmiTagMask));
1001    Check(not_equal, kOperandIsASmi);
1002  }
1003}
1004
1005void MacroAssembler::StubPrologue(StackFrame::Type type) {
1006  push(ebp);  // Caller's frame pointer.
1007  mov(ebp, esp);
1008  push(Immediate(Smi::FromInt(type)));
1009}
1010
1011
1012void MacroAssembler::Prologue(bool code_pre_aging) {
1013  PredictableCodeSizeScope predictible_code_size_scope(this,
1014      kNoCodeAgeSequenceLength);
1015  if (code_pre_aging) {
1016      // Pre-age the code.
1017    call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
1018        RelocInfo::CODE_AGE_SEQUENCE);
1019    Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
1020  } else {
1021    push(ebp);  // Caller's frame pointer.
1022    mov(ebp, esp);
1023    push(esi);  // Callee's context.
1024    push(edi);  // Callee's JS function.
1025  }
1026}
1027
1028
1029void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1030  mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1031  mov(vector, FieldOperand(vector, JSFunction::kLiteralsOffset));
1032  mov(vector, FieldOperand(vector, LiteralsArray::kFeedbackVectorOffset));
1033}
1034
1035
1036void MacroAssembler::EnterFrame(StackFrame::Type type,
1037                                bool load_constant_pool_pointer_reg) {
1038  // Out-of-line constant pool not implemented on x87.
1039  UNREACHABLE();
1040}
1041
1042
1043void MacroAssembler::EnterFrame(StackFrame::Type type) {
1044  push(ebp);
1045  mov(ebp, esp);
1046  push(Immediate(Smi::FromInt(type)));
1047  if (type == StackFrame::INTERNAL) {
1048    push(Immediate(CodeObject()));
1049  }
1050  if (emit_debug_code()) {
1051    cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
1052    Check(not_equal, kCodeObjectNotProperlyPatched);
1053  }
1054}
1055
1056
1057void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1058  if (emit_debug_code()) {
1059    cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
1060        Immediate(Smi::FromInt(type)));
1061    Check(equal, kStackFrameTypesMustMatch);
1062  }
1063  leave();
1064}
1065
1066void MacroAssembler::EnterBuiltinFrame(Register context, Register target,
1067                                       Register argc) {
1068  Push(ebp);
1069  Move(ebp, esp);
1070  Push(context);
1071  Push(target);
1072  Push(argc);
1073}
1074
1075void MacroAssembler::LeaveBuiltinFrame(Register context, Register target,
1076                                       Register argc) {
1077  Pop(argc);
1078  Pop(target);
1079  Pop(context);
1080  leave();
1081}
1082
1083void MacroAssembler::EnterExitFramePrologue(StackFrame::Type frame_type) {
1084  DCHECK(frame_type == StackFrame::EXIT ||
1085         frame_type == StackFrame::BUILTIN_EXIT);
1086
1087  // Set up the frame structure on the stack.
1088  DCHECK_EQ(+2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1089  DCHECK_EQ(+1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1090  DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
1091  push(ebp);
1092  mov(ebp, esp);
1093
1094  // Reserve room for entry stack pointer and push the code object.
1095  push(Immediate(Smi::FromInt(frame_type)));
1096  DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
1097  push(Immediate(0));  // Saved entry sp, patched before call.
1098  DCHECK_EQ(-3 * kPointerSize, ExitFrameConstants::kCodeOffset);
1099  push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
1100
1101  // Save the frame pointer and the context in top.
1102  ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
1103  ExternalReference context_address(Isolate::kContextAddress, isolate());
1104  ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
1105  mov(Operand::StaticVariable(c_entry_fp_address), ebp);
1106  mov(Operand::StaticVariable(context_address), esi);
1107  mov(Operand::StaticVariable(c_function_address), ebx);
1108}
1109
1110
1111void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
1112  // Optionally save FPU state.
1113  if (save_doubles) {
1114    // Store FPU state to m108byte.
1115    int space = 108 + argc * kPointerSize;
1116    sub(esp, Immediate(space));
1117    const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
1118    fnsave(MemOperand(ebp, offset - 108));
1119  } else {
1120    sub(esp, Immediate(argc * kPointerSize));
1121  }
1122
1123  // Get the required frame alignment for the OS.
1124  const int kFrameAlignment = base::OS::ActivationFrameAlignment();
1125  if (kFrameAlignment > 0) {
1126    DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
1127    and_(esp, -kFrameAlignment);
1128  }
1129
1130  // Patch the saved entry sp.
1131  mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
1132}
1133
1134void MacroAssembler::EnterExitFrame(int argc, bool save_doubles,
1135                                    StackFrame::Type frame_type) {
1136  EnterExitFramePrologue(frame_type);
1137
1138  // Set up argc and argv in callee-saved registers.
1139  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1140  mov(edi, eax);
1141  lea(esi, Operand(ebp, eax, times_4, offset));
1142
1143  // Reserve space for argc, argv and isolate.
1144  EnterExitFrameEpilogue(argc, save_doubles);
1145}
1146
1147
1148void MacroAssembler::EnterApiExitFrame(int argc) {
1149  EnterExitFramePrologue(StackFrame::EXIT);
1150  EnterExitFrameEpilogue(argc, false);
1151}
1152
1153
1154void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
1155  // Optionally restore FPU state.
1156  if (save_doubles) {
1157    const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
1158    frstor(MemOperand(ebp, offset - 108));
1159  }
1160
1161  if (pop_arguments) {
1162    // Get the return address from the stack and restore the frame pointer.
1163    mov(ecx, Operand(ebp, 1 * kPointerSize));
1164    mov(ebp, Operand(ebp, 0 * kPointerSize));
1165
1166    // Pop the arguments and the receiver from the caller stack.
1167    lea(esp, Operand(esi, 1 * kPointerSize));
1168
1169    // Push the return address to get ready to return.
1170    push(ecx);
1171  } else {
1172    // Otherwise just leave the exit frame.
1173    leave();
1174  }
1175
1176  LeaveExitFrameEpilogue(true);
1177}
1178
1179
1180void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
1181  // Restore current context from top and clear it in debug mode.
1182  ExternalReference context_address(Isolate::kContextAddress, isolate());
1183  if (restore_context) {
1184    mov(esi, Operand::StaticVariable(context_address));
1185  }
1186#ifdef DEBUG
1187  mov(Operand::StaticVariable(context_address), Immediate(0));
1188#endif
1189
1190  // Clear the top frame.
1191  ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1192                                       isolate());
1193  mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1194}
1195
1196
1197void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1198  mov(esp, ebp);
1199  pop(ebp);
1200
1201  LeaveExitFrameEpilogue(restore_context);
1202}
1203
1204
1205void MacroAssembler::PushStackHandler() {
1206  // Adjust this code if not the case.
1207  STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
1208  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1209
1210  // Link the current handler as the next handler.
1211  ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1212  push(Operand::StaticVariable(handler_address));
1213
1214  // Set this new handler as the current one.
1215  mov(Operand::StaticVariable(handler_address), esp);
1216}
1217
1218
1219void MacroAssembler::PopStackHandler() {
1220  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1221  ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1222  pop(Operand::StaticVariable(handler_address));
1223  add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1224}
1225
1226
1227// Compute the hash code from the untagged key.  This must be kept in sync with
1228// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1229// code-stub-hydrogen.cc
1230//
1231// Note: r0 will contain hash code
1232void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1233  // Xor original key with a seed.
1234  if (serializer_enabled()) {
1235    ExternalReference roots_array_start =
1236        ExternalReference::roots_array_start(isolate());
1237    mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1238    mov(scratch,
1239        Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1240    SmiUntag(scratch);
1241    xor_(r0, scratch);
1242  } else {
1243    int32_t seed = isolate()->heap()->HashSeed();
1244    xor_(r0, Immediate(seed));
1245  }
1246
1247  // hash = ~hash + (hash << 15);
1248  mov(scratch, r0);
1249  not_(r0);
1250  shl(scratch, 15);
1251  add(r0, scratch);
1252  // hash = hash ^ (hash >> 12);
1253  mov(scratch, r0);
1254  shr(scratch, 12);
1255  xor_(r0, scratch);
1256  // hash = hash + (hash << 2);
1257  lea(r0, Operand(r0, r0, times_4, 0));
1258  // hash = hash ^ (hash >> 4);
1259  mov(scratch, r0);
1260  shr(scratch, 4);
1261  xor_(r0, scratch);
1262  // hash = hash * 2057;
1263  imul(r0, r0, 2057);
1264  // hash = hash ^ (hash >> 16);
1265  mov(scratch, r0);
1266  shr(scratch, 16);
1267  xor_(r0, scratch);
1268  and_(r0, 0x3fffffff);
1269}
1270
1271void MacroAssembler::LoadAllocationTopHelper(Register result,
1272                                             Register scratch,
1273                                             AllocationFlags flags) {
1274  ExternalReference allocation_top =
1275      AllocationUtils::GetAllocationTopReference(isolate(), flags);
1276
1277  // Just return if allocation top is already known.
1278  if ((flags & RESULT_CONTAINS_TOP) != 0) {
1279    // No use of scratch if allocation top is provided.
1280    DCHECK(scratch.is(no_reg));
1281#ifdef DEBUG
1282    // Assert that result actually contains top on entry.
1283    cmp(result, Operand::StaticVariable(allocation_top));
1284    Check(equal, kUnexpectedAllocationTop);
1285#endif
1286    return;
1287  }
1288
1289  // Move address of new object to result. Use scratch register if available.
1290  if (scratch.is(no_reg)) {
1291    mov(result, Operand::StaticVariable(allocation_top));
1292  } else {
1293    mov(scratch, Immediate(allocation_top));
1294    mov(result, Operand(scratch, 0));
1295  }
1296}
1297
1298
1299void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1300                                               Register scratch,
1301                                               AllocationFlags flags) {
1302  if (emit_debug_code()) {
1303    test(result_end, Immediate(kObjectAlignmentMask));
1304    Check(zero, kUnalignedAllocationInNewSpace);
1305  }
1306
1307  ExternalReference allocation_top =
1308      AllocationUtils::GetAllocationTopReference(isolate(), flags);
1309
1310  // Update new top. Use scratch if available.
1311  if (scratch.is(no_reg)) {
1312    mov(Operand::StaticVariable(allocation_top), result_end);
1313  } else {
1314    mov(Operand(scratch, 0), result_end);
1315  }
1316}
1317
1318
1319void MacroAssembler::Allocate(int object_size,
1320                              Register result,
1321                              Register result_end,
1322                              Register scratch,
1323                              Label* gc_required,
1324                              AllocationFlags flags) {
1325  DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1326  DCHECK(object_size <= kMaxRegularHeapObjectSize);
1327  DCHECK((flags & ALLOCATION_FOLDED) == 0);
1328  if (!FLAG_inline_new) {
1329    if (emit_debug_code()) {
1330      // Trash the registers to simulate an allocation failure.
1331      mov(result, Immediate(0x7091));
1332      if (result_end.is_valid()) {
1333        mov(result_end, Immediate(0x7191));
1334      }
1335      if (scratch.is_valid()) {
1336        mov(scratch, Immediate(0x7291));
1337      }
1338    }
1339    jmp(gc_required);
1340    return;
1341  }
1342  DCHECK(!result.is(result_end));
1343
1344  // Load address of new object into result.
1345  LoadAllocationTopHelper(result, scratch, flags);
1346
1347  ExternalReference allocation_limit =
1348      AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1349
1350  // Align the next allocation. Storing the filler map without checking top is
1351  // safe in new-space because the limit of the heap is aligned there.
1352  if ((flags & DOUBLE_ALIGNMENT) != 0) {
1353    DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1354    Label aligned;
1355    test(result, Immediate(kDoubleAlignmentMask));
1356    j(zero, &aligned, Label::kNear);
1357    if ((flags & PRETENURE) != 0) {
1358      cmp(result, Operand::StaticVariable(allocation_limit));
1359      j(above_equal, gc_required);
1360    }
1361    mov(Operand(result, 0),
1362        Immediate(isolate()->factory()->one_pointer_filler_map()));
1363    add(result, Immediate(kDoubleSize / 2));
1364    bind(&aligned);
1365  }
1366
1367  // Calculate new top and bail out if space is exhausted.
1368  Register top_reg = result_end.is_valid() ? result_end : result;
1369
1370  if (!top_reg.is(result)) {
1371    mov(top_reg, result);
1372  }
1373  add(top_reg, Immediate(object_size));
1374  cmp(top_reg, Operand::StaticVariable(allocation_limit));
1375  j(above, gc_required);
1376
1377  if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
1378    // The top pointer is not updated for allocation folding dominators.
1379    UpdateAllocationTopHelper(top_reg, scratch, flags);
1380  }
1381
1382  if (top_reg.is(result)) {
1383    sub(result, Immediate(object_size - kHeapObjectTag));
1384  } else {
1385    // Tag the result.
1386    DCHECK(kHeapObjectTag == 1);
1387    inc(result);
1388  }
1389}
1390
1391
1392void MacroAssembler::Allocate(int header_size,
1393                              ScaleFactor element_size,
1394                              Register element_count,
1395                              RegisterValueType element_count_type,
1396                              Register result,
1397                              Register result_end,
1398                              Register scratch,
1399                              Label* gc_required,
1400                              AllocationFlags flags) {
1401  DCHECK((flags & SIZE_IN_WORDS) == 0);
1402  DCHECK((flags & ALLOCATION_FOLDING_DOMINATOR) == 0);
1403  DCHECK((flags & ALLOCATION_FOLDED) == 0);
1404  if (!FLAG_inline_new) {
1405    if (emit_debug_code()) {
1406      // Trash the registers to simulate an allocation failure.
1407      mov(result, Immediate(0x7091));
1408      mov(result_end, Immediate(0x7191));
1409      if (scratch.is_valid()) {
1410        mov(scratch, Immediate(0x7291));
1411      }
1412      // Register element_count is not modified by the function.
1413    }
1414    jmp(gc_required);
1415    return;
1416  }
1417  DCHECK(!result.is(result_end));
1418
1419  // Load address of new object into result.
1420  LoadAllocationTopHelper(result, scratch, flags);
1421
1422  ExternalReference allocation_limit =
1423      AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1424
1425  // Align the next allocation. Storing the filler map without checking top is
1426  // safe in new-space because the limit of the heap is aligned there.
1427  if ((flags & DOUBLE_ALIGNMENT) != 0) {
1428    DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1429    Label aligned;
1430    test(result, Immediate(kDoubleAlignmentMask));
1431    j(zero, &aligned, Label::kNear);
1432    if ((flags & PRETENURE) != 0) {
1433      cmp(result, Operand::StaticVariable(allocation_limit));
1434      j(above_equal, gc_required);
1435    }
1436    mov(Operand(result, 0),
1437        Immediate(isolate()->factory()->one_pointer_filler_map()));
1438    add(result, Immediate(kDoubleSize / 2));
1439    bind(&aligned);
1440  }
1441
1442  // Calculate new top and bail out if space is exhausted.
1443  // We assume that element_count*element_size + header_size does not
1444  // overflow.
1445  if (element_count_type == REGISTER_VALUE_IS_SMI) {
1446    STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1447    STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1448    STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1449    DCHECK(element_size >= times_2);
1450    DCHECK(kSmiTagSize == 1);
1451    element_size = static_cast<ScaleFactor>(element_size - 1);
1452  } else {
1453    DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1454  }
1455  lea(result_end, Operand(element_count, element_size, header_size));
1456  add(result_end, result);
1457  j(carry, gc_required);
1458  cmp(result_end, Operand::StaticVariable(allocation_limit));
1459  j(above, gc_required);
1460
1461  // Tag result.
1462  DCHECK(kHeapObjectTag == 1);
1463  inc(result);
1464
1465  // Update allocation top.
1466  UpdateAllocationTopHelper(result_end, scratch, flags);
1467}
1468
1469void MacroAssembler::Allocate(Register object_size,
1470                              Register result,
1471                              Register result_end,
1472                              Register scratch,
1473                              Label* gc_required,
1474                              AllocationFlags flags) {
1475  DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1476  DCHECK((flags & ALLOCATION_FOLDED) == 0);
1477  if (!FLAG_inline_new) {
1478    if (emit_debug_code()) {
1479      // Trash the registers to simulate an allocation failure.
1480      mov(result, Immediate(0x7091));
1481      mov(result_end, Immediate(0x7191));
1482      if (scratch.is_valid()) {
1483        mov(scratch, Immediate(0x7291));
1484      }
1485      // object_size is left unchanged by this function.
1486    }
1487    jmp(gc_required);
1488    return;
1489  }
1490  DCHECK(!result.is(result_end));
1491
1492  // Load address of new object into result.
1493  LoadAllocationTopHelper(result, scratch, flags);
1494
1495  ExternalReference allocation_limit =
1496      AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1497
1498  // Align the next allocation. Storing the filler map without checking top is
1499  // safe in new-space because the limit of the heap is aligned there.
1500  if ((flags & DOUBLE_ALIGNMENT) != 0) {
1501    DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1502    Label aligned;
1503    test(result, Immediate(kDoubleAlignmentMask));
1504    j(zero, &aligned, Label::kNear);
1505    if ((flags & PRETENURE) != 0) {
1506      cmp(result, Operand::StaticVariable(allocation_limit));
1507      j(above_equal, gc_required);
1508    }
1509    mov(Operand(result, 0),
1510        Immediate(isolate()->factory()->one_pointer_filler_map()));
1511    add(result, Immediate(kDoubleSize / 2));
1512    bind(&aligned);
1513  }
1514
1515  // Calculate new top and bail out if space is exhausted.
1516  if (!object_size.is(result_end)) {
1517    mov(result_end, object_size);
1518  }
1519  add(result_end, result);
1520  cmp(result_end, Operand::StaticVariable(allocation_limit));
1521  j(above, gc_required);
1522
1523  // Tag result.
1524  DCHECK(kHeapObjectTag == 1);
1525  inc(result);
1526
1527  if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
1528    // The top pointer is not updated for allocation folding dominators.
1529    UpdateAllocationTopHelper(result_end, scratch, flags);
1530  }
1531}
1532
1533void MacroAssembler::FastAllocate(int object_size, Register result,
1534                                  Register result_end, AllocationFlags flags) {
1535  DCHECK(!result.is(result_end));
1536  // Load address of new object into result.
1537  LoadAllocationTopHelper(result, no_reg, flags);
1538
1539  if ((flags & DOUBLE_ALIGNMENT) != 0) {
1540    DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1541    Label aligned;
1542    test(result, Immediate(kDoubleAlignmentMask));
1543    j(zero, &aligned, Label::kNear);
1544    mov(Operand(result, 0),
1545        Immediate(isolate()->factory()->one_pointer_filler_map()));
1546    add(result, Immediate(kDoubleSize / 2));
1547    bind(&aligned);
1548  }
1549
1550  lea(result_end, Operand(result, object_size));
1551  UpdateAllocationTopHelper(result_end, no_reg, flags);
1552
1553  DCHECK(kHeapObjectTag == 1);
1554  inc(result);
1555}
1556
1557void MacroAssembler::FastAllocate(Register object_size, Register result,
1558                                  Register result_end, AllocationFlags flags) {
1559  DCHECK(!result.is(result_end));
1560  // Load address of new object into result.
1561  LoadAllocationTopHelper(result, no_reg, flags);
1562
1563  if ((flags & DOUBLE_ALIGNMENT) != 0) {
1564    DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1565    Label aligned;
1566    test(result, Immediate(kDoubleAlignmentMask));
1567    j(zero, &aligned, Label::kNear);
1568    mov(Operand(result, 0),
1569        Immediate(isolate()->factory()->one_pointer_filler_map()));
1570    add(result, Immediate(kDoubleSize / 2));
1571    bind(&aligned);
1572  }
1573
1574  lea(result_end, Operand(result, object_size, times_1, 0));
1575  UpdateAllocationTopHelper(result_end, no_reg, flags);
1576
1577  DCHECK(kHeapObjectTag == 1);
1578  inc(result);
1579}
1580
1581void MacroAssembler::AllocateHeapNumber(Register result,
1582                                        Register scratch1,
1583                                        Register scratch2,
1584                                        Label* gc_required,
1585                                        MutableMode mode) {
1586  // Allocate heap number in new space.
1587  Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1588           NO_ALLOCATION_FLAGS);
1589
1590  Handle<Map> map = mode == MUTABLE
1591      ? isolate()->factory()->mutable_heap_number_map()
1592      : isolate()->factory()->heap_number_map();
1593
1594  // Set the map.
1595  mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
1596}
1597
1598
1599void MacroAssembler::AllocateTwoByteString(Register result,
1600                                           Register length,
1601                                           Register scratch1,
1602                                           Register scratch2,
1603                                           Register scratch3,
1604                                           Label* gc_required) {
1605  // Calculate the number of bytes needed for the characters in the string while
1606  // observing object alignment.
1607  DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1608  DCHECK(kShortSize == 2);
1609  // scratch1 = length * 2 + kObjectAlignmentMask.
1610  lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1611  and_(scratch1, Immediate(~kObjectAlignmentMask));
1612
1613  // Allocate two byte string in new space.
1614  Allocate(SeqTwoByteString::kHeaderSize, times_1, scratch1,
1615           REGISTER_VALUE_IS_INT32, result, scratch2, scratch3, gc_required,
1616           NO_ALLOCATION_FLAGS);
1617
1618  // Set the map, length and hash field.
1619  mov(FieldOperand(result, HeapObject::kMapOffset),
1620      Immediate(isolate()->factory()->string_map()));
1621  mov(scratch1, length);
1622  SmiTag(scratch1);
1623  mov(FieldOperand(result, String::kLengthOffset), scratch1);
1624  mov(FieldOperand(result, String::kHashFieldOffset),
1625      Immediate(String::kEmptyHashField));
1626}
1627
1628
1629void MacroAssembler::AllocateOneByteString(Register result, Register length,
1630                                           Register scratch1, Register scratch2,
1631                                           Register scratch3,
1632                                           Label* gc_required) {
1633  // Calculate the number of bytes needed for the characters in the string while
1634  // observing object alignment.
1635  DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1636  mov(scratch1, length);
1637  DCHECK(kCharSize == 1);
1638  add(scratch1, Immediate(kObjectAlignmentMask));
1639  and_(scratch1, Immediate(~kObjectAlignmentMask));
1640
1641  // Allocate one-byte string in new space.
1642  Allocate(SeqOneByteString::kHeaderSize, times_1, scratch1,
1643           REGISTER_VALUE_IS_INT32, result, scratch2, scratch3, gc_required,
1644           NO_ALLOCATION_FLAGS);
1645
1646  // Set the map, length and hash field.
1647  mov(FieldOperand(result, HeapObject::kMapOffset),
1648      Immediate(isolate()->factory()->one_byte_string_map()));
1649  mov(scratch1, length);
1650  SmiTag(scratch1);
1651  mov(FieldOperand(result, String::kLengthOffset), scratch1);
1652  mov(FieldOperand(result, String::kHashFieldOffset),
1653      Immediate(String::kEmptyHashField));
1654}
1655
1656
1657void MacroAssembler::AllocateOneByteString(Register result, int length,
1658                                           Register scratch1, Register scratch2,
1659                                           Label* gc_required) {
1660  DCHECK(length > 0);
1661
1662  // Allocate one-byte string in new space.
1663  Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1664           gc_required, NO_ALLOCATION_FLAGS);
1665
1666  // Set the map, length and hash field.
1667  mov(FieldOperand(result, HeapObject::kMapOffset),
1668      Immediate(isolate()->factory()->one_byte_string_map()));
1669  mov(FieldOperand(result, String::kLengthOffset),
1670      Immediate(Smi::FromInt(length)));
1671  mov(FieldOperand(result, String::kHashFieldOffset),
1672      Immediate(String::kEmptyHashField));
1673}
1674
1675
1676void MacroAssembler::AllocateTwoByteConsString(Register result,
1677                                        Register scratch1,
1678                                        Register scratch2,
1679                                        Label* gc_required) {
1680  // Allocate heap number in new space.
1681  Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1682           NO_ALLOCATION_FLAGS);
1683
1684  // Set the map. The other fields are left uninitialized.
1685  mov(FieldOperand(result, HeapObject::kMapOffset),
1686      Immediate(isolate()->factory()->cons_string_map()));
1687}
1688
1689
1690void MacroAssembler::AllocateOneByteConsString(Register result,
1691                                               Register scratch1,
1692                                               Register scratch2,
1693                                               Label* gc_required) {
1694  Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1695           NO_ALLOCATION_FLAGS);
1696
1697  // Set the map. The other fields are left uninitialized.
1698  mov(FieldOperand(result, HeapObject::kMapOffset),
1699      Immediate(isolate()->factory()->cons_one_byte_string_map()));
1700}
1701
1702
1703void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1704                                          Register scratch1,
1705                                          Register scratch2,
1706                                          Label* gc_required) {
1707  // Allocate heap number in new space.
1708  Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1709           NO_ALLOCATION_FLAGS);
1710
1711  // Set the map. The other fields are left uninitialized.
1712  mov(FieldOperand(result, HeapObject::kMapOffset),
1713      Immediate(isolate()->factory()->sliced_string_map()));
1714}
1715
1716
1717void MacroAssembler::AllocateOneByteSlicedString(Register result,
1718                                                 Register scratch1,
1719                                                 Register scratch2,
1720                                                 Label* gc_required) {
1721  // Allocate heap number in new space.
1722  Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1723           NO_ALLOCATION_FLAGS);
1724
1725  // Set the map. The other fields are left uninitialized.
1726  mov(FieldOperand(result, HeapObject::kMapOffset),
1727      Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1728}
1729
1730
1731void MacroAssembler::AllocateJSValue(Register result, Register constructor,
1732                                     Register value, Register scratch,
1733                                     Label* gc_required) {
1734  DCHECK(!result.is(constructor));
1735  DCHECK(!result.is(scratch));
1736  DCHECK(!result.is(value));
1737
1738  // Allocate JSValue in new space.
1739  Allocate(JSValue::kSize, result, scratch, no_reg, gc_required,
1740           NO_ALLOCATION_FLAGS);
1741
1742  // Initialize the JSValue.
1743  LoadGlobalFunctionInitialMap(constructor, scratch);
1744  mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
1745  LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
1746  mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
1747  mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
1748  mov(FieldOperand(result, JSValue::kValueOffset), value);
1749  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1750}
1751
1752void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
1753                                                Register end_address,
1754                                                Register filler) {
1755  Label loop, entry;
1756  jmp(&entry, Label::kNear);
1757  bind(&loop);
1758  mov(Operand(current_address, 0), filler);
1759  add(current_address, Immediate(kPointerSize));
1760  bind(&entry);
1761  cmp(current_address, end_address);
1762  j(below, &loop, Label::kNear);
1763}
1764
1765
1766void MacroAssembler::BooleanBitTest(Register object,
1767                                    int field_offset,
1768                                    int bit_index) {
1769  bit_index += kSmiTagSize + kSmiShiftSize;
1770  DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
1771  int byte_index = bit_index / kBitsPerByte;
1772  int byte_bit_index = bit_index & (kBitsPerByte - 1);
1773  test_b(FieldOperand(object, field_offset + byte_index),
1774         Immediate(1 << byte_bit_index));
1775}
1776
1777
1778
1779void MacroAssembler::NegativeZeroTest(Register result,
1780                                      Register op,
1781                                      Label* then_label) {
1782  Label ok;
1783  test(result, result);
1784  j(not_zero, &ok, Label::kNear);
1785  test(op, op);
1786  j(sign, then_label, Label::kNear);
1787  bind(&ok);
1788}
1789
1790
1791void MacroAssembler::NegativeZeroTest(Register result,
1792                                      Register op1,
1793                                      Register op2,
1794                                      Register scratch,
1795                                      Label* then_label) {
1796  Label ok;
1797  test(result, result);
1798  j(not_zero, &ok, Label::kNear);
1799  mov(scratch, op1);
1800  or_(scratch, op2);
1801  j(sign, then_label, Label::kNear);
1802  bind(&ok);
1803}
1804
1805
1806void MacroAssembler::GetMapConstructor(Register result, Register map,
1807                                       Register temp) {
1808  Label done, loop;
1809  mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
1810  bind(&loop);
1811  JumpIfSmi(result, &done, Label::kNear);
1812  CmpObjectType(result, MAP_TYPE, temp);
1813  j(not_equal, &done, Label::kNear);
1814  mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
1815  jmp(&loop);
1816  bind(&done);
1817}
1818
1819
1820void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
1821                                             Register scratch, Label* miss) {
1822  // Get the prototype or initial map from the function.
1823  mov(result,
1824      FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1825
1826  // If the prototype or initial map is the hole, don't return it and
1827  // simply miss the cache instead. This will allow us to allocate a
1828  // prototype object on-demand in the runtime system.
1829  cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1830  j(equal, miss);
1831
1832  // If the function does not have an initial map, we're done.
1833  Label done;
1834  CmpObjectType(result, MAP_TYPE, scratch);
1835  j(not_equal, &done, Label::kNear);
1836
1837  // Get the prototype from the initial map.
1838  mov(result, FieldOperand(result, Map::kPrototypeOffset));
1839
1840  // All done.
1841  bind(&done);
1842}
1843
1844
1845void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1846  DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
1847  call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1848}
1849
1850
1851void MacroAssembler::TailCallStub(CodeStub* stub) {
1852  jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1853}
1854
1855
1856void MacroAssembler::StubReturn(int argc) {
1857  DCHECK(argc >= 1 && generating_stub());
1858  ret((argc - 1) * kPointerSize);
1859}
1860
1861
1862bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1863  return has_frame_ || !stub->SometimesSetsUpAFrame();
1864}
1865
1866void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
1867                                 SaveFPRegsMode save_doubles) {
1868  // If the expected number of arguments of the runtime function is
1869  // constant, we check that the actual number of arguments match the
1870  // expectation.
1871  CHECK(f->nargs < 0 || f->nargs == num_arguments);
1872
1873  // TODO(1236192): Most runtime routines don't need the number of
1874  // arguments passed in because it is constant. At some point we
1875  // should remove this need and make the runtime routine entry code
1876  // smarter.
1877  Move(eax, Immediate(num_arguments));
1878  mov(ebx, Immediate(ExternalReference(f, isolate())));
1879  CEntryStub ces(isolate(), 1, save_doubles);
1880  CallStub(&ces);
1881}
1882
1883
1884void MacroAssembler::CallExternalReference(ExternalReference ref,
1885                                           int num_arguments) {
1886  mov(eax, Immediate(num_arguments));
1887  mov(ebx, Immediate(ref));
1888
1889  CEntryStub stub(isolate(), 1);
1890  CallStub(&stub);
1891}
1892
1893
1894void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
1895  // ----------- S t a t e -------------
1896  //  -- esp[0]                 : return address
1897  //  -- esp[8]                 : argument num_arguments - 1
1898  //  ...
1899  //  -- esp[8 * num_arguments] : argument 0 (receiver)
1900  //
1901  //  For runtime functions with variable arguments:
1902  //  -- eax                    : number of  arguments
1903  // -----------------------------------
1904
1905  const Runtime::Function* function = Runtime::FunctionForId(fid);
1906  DCHECK_EQ(1, function->result_size);
1907  if (function->nargs >= 0) {
1908    // TODO(1236192): Most runtime routines don't need the number of
1909    // arguments passed in because it is constant. At some point we
1910    // should remove this need and make the runtime routine entry code
1911    // smarter.
1912    mov(eax, Immediate(function->nargs));
1913  }
1914  JumpToExternalReference(ExternalReference(fid, isolate()));
1915}
1916
1917void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
1918                                             bool builtin_exit_frame) {
1919  // Set the entry point and jump to the C entry runtime stub.
1920  mov(ebx, Immediate(ext));
1921  CEntryStub ces(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
1922                 builtin_exit_frame);
1923  jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1924}
1925
1926void MacroAssembler::PrepareForTailCall(
1927    const ParameterCount& callee_args_count, Register caller_args_count_reg,
1928    Register scratch0, Register scratch1, ReturnAddressState ra_state,
1929    int number_of_temp_values_after_return_address) {
1930#if DEBUG
1931  if (callee_args_count.is_reg()) {
1932    DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
1933                       scratch1));
1934  } else {
1935    DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
1936  }
1937  DCHECK(ra_state != ReturnAddressState::kNotOnStack ||
1938         number_of_temp_values_after_return_address == 0);
1939#endif
1940
1941  // Calculate the destination address where we will put the return address
1942  // after we drop current frame.
1943  Register new_sp_reg = scratch0;
1944  if (callee_args_count.is_reg()) {
1945    sub(caller_args_count_reg, callee_args_count.reg());
1946    lea(new_sp_reg,
1947        Operand(ebp, caller_args_count_reg, times_pointer_size,
1948                StandardFrameConstants::kCallerPCOffset -
1949                    number_of_temp_values_after_return_address * kPointerSize));
1950  } else {
1951    lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size,
1952                            StandardFrameConstants::kCallerPCOffset -
1953                                (callee_args_count.immediate() +
1954                                 number_of_temp_values_after_return_address) *
1955                                    kPointerSize));
1956  }
1957
1958  if (FLAG_debug_code) {
1959    cmp(esp, new_sp_reg);
1960    Check(below, kStackAccessBelowStackPointer);
1961  }
1962
1963  // Copy return address from caller's frame to current frame's return address
1964  // to avoid its trashing and let the following loop copy it to the right
1965  // place.
1966  Register tmp_reg = scratch1;
1967  if (ra_state == ReturnAddressState::kOnStack) {
1968    mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
1969    mov(Operand(esp, number_of_temp_values_after_return_address * kPointerSize),
1970        tmp_reg);
1971  } else {
1972    DCHECK(ReturnAddressState::kNotOnStack == ra_state);
1973    DCHECK_EQ(0, number_of_temp_values_after_return_address);
1974    Push(Operand(ebp, StandardFrameConstants::kCallerPCOffset));
1975  }
1976
1977  // Restore caller's frame pointer now as it could be overwritten by
1978  // the copying loop.
1979  mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1980
1981  // +2 here is to copy both receiver and return address.
1982  Register count_reg = caller_args_count_reg;
1983  if (callee_args_count.is_reg()) {
1984    lea(count_reg, Operand(callee_args_count.reg(),
1985                           2 + number_of_temp_values_after_return_address));
1986  } else {
1987    mov(count_reg, Immediate(callee_args_count.immediate() + 2 +
1988                             number_of_temp_values_after_return_address));
1989    // TODO(ishell): Unroll copying loop for small immediate values.
1990  }
1991
1992  // Now copy callee arguments to the caller frame going backwards to avoid
1993  // callee arguments corruption (source and destination areas could overlap).
1994  Label loop, entry;
1995  jmp(&entry, Label::kNear);
1996  bind(&loop);
1997  dec(count_reg);
1998  mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0));
1999  mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
2000  bind(&entry);
2001  cmp(count_reg, Immediate(0));
2002  j(not_equal, &loop, Label::kNear);
2003
2004  // Leave current frame.
2005  mov(esp, new_sp_reg);
2006}
2007
2008void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2009                                    const ParameterCount& actual,
2010                                    Label* done,
2011                                    bool* definitely_mismatches,
2012                                    InvokeFlag flag,
2013                                    Label::Distance done_near,
2014                                    const CallWrapper& call_wrapper) {
2015  bool definitely_matches = false;
2016  *definitely_mismatches = false;
2017  Label invoke;
2018  if (expected.is_immediate()) {
2019    DCHECK(actual.is_immediate());
2020    mov(eax, actual.immediate());
2021    if (expected.immediate() == actual.immediate()) {
2022      definitely_matches = true;
2023    } else {
2024      const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2025      if (expected.immediate() == sentinel) {
2026        // Don't worry about adapting arguments for builtins that
2027        // don't want that done. Skip adaption code by making it look
2028        // like we have a match between expected and actual number of
2029        // arguments.
2030        definitely_matches = true;
2031      } else {
2032        *definitely_mismatches = true;
2033        mov(ebx, expected.immediate());
2034      }
2035    }
2036  } else {
2037    if (actual.is_immediate()) {
2038      // Expected is in register, actual is immediate. This is the
2039      // case when we invoke function values without going through the
2040      // IC mechanism.
2041      mov(eax, actual.immediate());
2042      cmp(expected.reg(), actual.immediate());
2043      j(equal, &invoke);
2044      DCHECK(expected.reg().is(ebx));
2045    } else if (!expected.reg().is(actual.reg())) {
2046      // Both expected and actual are in (different) registers. This
2047      // is the case when we invoke functions using call and apply.
2048      cmp(expected.reg(), actual.reg());
2049      j(equal, &invoke);
2050      DCHECK(actual.reg().is(eax));
2051      DCHECK(expected.reg().is(ebx));
2052    } else {
2053      Move(eax, actual.reg());
2054    }
2055  }
2056
2057  if (!definitely_matches) {
2058    Handle<Code> adaptor =
2059        isolate()->builtins()->ArgumentsAdaptorTrampoline();
2060    if (flag == CALL_FUNCTION) {
2061      call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2062      call(adaptor, RelocInfo::CODE_TARGET);
2063      call_wrapper.AfterCall();
2064      if (!*definitely_mismatches) {
2065        jmp(done, done_near);
2066      }
2067    } else {
2068      jmp(adaptor, RelocInfo::CODE_TARGET);
2069    }
2070    bind(&invoke);
2071  }
2072}
2073
2074
2075void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
2076                                             const ParameterCount& expected,
2077                                             const ParameterCount& actual) {
2078  Label skip_flooding;
2079  ExternalReference last_step_action =
2080      ExternalReference::debug_last_step_action_address(isolate());
2081  STATIC_ASSERT(StepFrame > StepIn);
2082  cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
2083  j(less, &skip_flooding);
2084  {
2085    FrameScope frame(this,
2086                     has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2087    if (expected.is_reg()) {
2088      SmiTag(expected.reg());
2089      Push(expected.reg());
2090    }
2091    if (actual.is_reg()) {
2092      SmiTag(actual.reg());
2093      Push(actual.reg());
2094    }
2095    if (new_target.is_valid()) {
2096      Push(new_target);
2097    }
2098    Push(fun);
2099    Push(fun);
2100    CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
2101    Pop(fun);
2102    if (new_target.is_valid()) {
2103      Pop(new_target);
2104    }
2105    if (actual.is_reg()) {
2106      Pop(actual.reg());
2107      SmiUntag(actual.reg());
2108    }
2109    if (expected.is_reg()) {
2110      Pop(expected.reg());
2111      SmiUntag(expected.reg());
2112    }
2113  }
2114  bind(&skip_flooding);
2115}
2116
2117
2118void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2119                                        const ParameterCount& expected,
2120                                        const ParameterCount& actual,
2121                                        InvokeFlag flag,
2122                                        const CallWrapper& call_wrapper) {
2123  // You can't call a function without a valid frame.
2124  DCHECK(flag == JUMP_FUNCTION || has_frame());
2125  DCHECK(function.is(edi));
2126  DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
2127
2128  if (call_wrapper.NeedsDebugStepCheck()) {
2129    FloodFunctionIfStepping(function, new_target, expected, actual);
2130  }
2131
2132  // Clear the new.target register if not given.
2133  if (!new_target.is_valid()) {
2134    mov(edx, isolate()->factory()->undefined_value());
2135  }
2136
2137  Label done;
2138  bool definitely_mismatches = false;
2139  InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2140                 Label::kNear, call_wrapper);
2141  if (!definitely_mismatches) {
2142    // We call indirectly through the code field in the function to
2143    // allow recompilation to take effect without changing any of the
2144    // call sites.
2145    Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
2146    if (flag == CALL_FUNCTION) {
2147      call_wrapper.BeforeCall(CallSize(code));
2148      call(code);
2149      call_wrapper.AfterCall();
2150    } else {
2151      DCHECK(flag == JUMP_FUNCTION);
2152      jmp(code);
2153    }
2154    bind(&done);
2155  }
2156}
2157
2158
2159void MacroAssembler::InvokeFunction(Register fun, Register new_target,
2160                                    const ParameterCount& actual,
2161                                    InvokeFlag flag,
2162                                    const CallWrapper& call_wrapper) {
2163  // You can't call a function without a valid frame.
2164  DCHECK(flag == JUMP_FUNCTION || has_frame());
2165
2166  DCHECK(fun.is(edi));
2167  mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2168  mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2169  mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
2170  SmiUntag(ebx);
2171
2172  ParameterCount expected(ebx);
2173  InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
2174}
2175
2176
2177void MacroAssembler::InvokeFunction(Register fun,
2178                                    const ParameterCount& expected,
2179                                    const ParameterCount& actual,
2180                                    InvokeFlag flag,
2181                                    const CallWrapper& call_wrapper) {
2182  // You can't call a function without a valid frame.
2183  DCHECK(flag == JUMP_FUNCTION || has_frame());
2184
2185  DCHECK(fun.is(edi));
2186  mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2187
2188  InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
2189}
2190
2191
2192void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2193                                    const ParameterCount& expected,
2194                                    const ParameterCount& actual,
2195                                    InvokeFlag flag,
2196                                    const CallWrapper& call_wrapper) {
2197  LoadHeapObject(edi, function);
2198  InvokeFunction(edi, expected, actual, flag, call_wrapper);
2199}
2200
2201
2202void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2203  if (context_chain_length > 0) {
2204    // Move up the chain of contexts to the context containing the slot.
2205    mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2206    for (int i = 1; i < context_chain_length; i++) {
2207      mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2208    }
2209  } else {
2210    // Slot is in the current function context.  Move it into the
2211    // destination register in case we store into it (the write barrier
2212    // cannot be allowed to destroy the context in esi).
2213    mov(dst, esi);
2214  }
2215
2216  // We should not have found a with context by walking the context chain
2217  // (i.e., the static scope chain and runtime context chain do not agree).
2218  // A variable occurring in such a scope should have slot type LOOKUP and
2219  // not CONTEXT.
2220  if (emit_debug_code()) {
2221    cmp(FieldOperand(dst, HeapObject::kMapOffset),
2222        isolate()->factory()->with_context_map());
2223    Check(not_equal, kVariableResolvedToWithContext);
2224  }
2225}
2226
2227
2228void MacroAssembler::LoadGlobalProxy(Register dst) {
2229  mov(dst, NativeContextOperand());
2230  mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
2231}
2232
2233
2234void MacroAssembler::LoadTransitionedArrayMapConditional(
2235    ElementsKind expected_kind,
2236    ElementsKind transitioned_kind,
2237    Register map_in_out,
2238    Register scratch,
2239    Label* no_map_match) {
2240  DCHECK(IsFastElementsKind(expected_kind));
2241  DCHECK(IsFastElementsKind(transitioned_kind));
2242
2243  // Check that the function's map is the same as the expected cached map.
2244  mov(scratch, NativeContextOperand());
2245  cmp(map_in_out,
2246      ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
2247  j(not_equal, no_map_match);
2248
2249  // Use the transitioned cached map.
2250  mov(map_in_out,
2251      ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
2252}
2253
2254
2255void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2256  // Load the native context from the current context.
2257  mov(function, NativeContextOperand());
2258  // Load the function from the native context.
2259  mov(function, ContextOperand(function, index));
2260}
2261
2262
2263void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2264                                                  Register map) {
2265  // Load the initial map.  The global functions all have initial maps.
2266  mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2267  if (emit_debug_code()) {
2268    Label ok, fail;
2269    CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2270    jmp(&ok);
2271    bind(&fail);
2272    Abort(kGlobalFunctionsMustHaveInitialMap);
2273    bind(&ok);
2274  }
2275}
2276
2277
2278// Store the value in register src in the safepoint register stack
2279// slot for register dst.
2280void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2281  mov(SafepointRegisterSlot(dst), src);
2282}
2283
2284
2285void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2286  mov(SafepointRegisterSlot(dst), src);
2287}
2288
2289
2290void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2291  mov(dst, SafepointRegisterSlot(src));
2292}
2293
2294
2295Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2296  return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2297}
2298
2299
2300int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2301  // The registers are pushed starting with the lowest encoding,
2302  // which means that lowest encodings are furthest away from
2303  // the stack pointer.
2304  DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2305  return kNumSafepointRegisters - reg_code - 1;
2306}
2307
2308
2309void MacroAssembler::LoadHeapObject(Register result,
2310                                    Handle<HeapObject> object) {
2311  mov(result, object);
2312}
2313
2314
2315void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2316  cmp(reg, object);
2317}
2318
2319void MacroAssembler::PushHeapObject(Handle<HeapObject> object) { Push(object); }
2320
2321void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2322                                  Register scratch) {
2323  mov(scratch, cell);
2324  cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2325}
2326
2327
2328void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
2329  mov(value, cell);
2330  mov(value, FieldOperand(value, WeakCell::kValueOffset));
2331}
2332
2333
2334void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2335                                   Label* miss) {
2336  GetWeakValue(value, cell);
2337  JumpIfSmi(value, miss);
2338}
2339
2340
2341void MacroAssembler::Ret() {
2342  ret(0);
2343}
2344
2345
2346void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2347  if (is_uint16(bytes_dropped)) {
2348    ret(bytes_dropped);
2349  } else {
2350    pop(scratch);
2351    add(esp, Immediate(bytes_dropped));
2352    push(scratch);
2353    ret(0);
2354  }
2355}
2356
2357
2358void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
2359  // Turn off the stack depth check when serializer is enabled to reduce the
2360  // code size.
2361  if (serializer_enabled()) return;
2362  // Make sure the floating point stack is either empty or has depth items.
2363  DCHECK(depth <= 7);
2364  // This is very expensive.
2365  DCHECK(FLAG_debug_code && FLAG_enable_slow_asserts);
2366
2367  // The top-of-stack (tos) is 7 if there is one item pushed.
2368  int tos = (8 - depth) % 8;
2369  const int kTopMask = 0x3800;
2370  push(eax);
2371  fwait();
2372  fnstsw_ax();
2373  and_(eax, kTopMask);
2374  shr(eax, 11);
2375  cmp(eax, Immediate(tos));
2376  Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
2377  fnclex();
2378  pop(eax);
2379}
2380
2381
2382void MacroAssembler::Drop(int stack_elements) {
2383  if (stack_elements > 0) {
2384    add(esp, Immediate(stack_elements * kPointerSize));
2385  }
2386}
2387
2388
2389void MacroAssembler::Move(Register dst, Register src) {
2390  if (!dst.is(src)) {
2391    mov(dst, src);
2392  }
2393}
2394
2395
2396void MacroAssembler::Move(Register dst, const Immediate& x) {
2397  if (x.is_zero() && RelocInfo::IsNone(x.rmode_)) {
2398    xor_(dst, dst);  // Shorter than mov of 32-bit immediate 0.
2399  } else {
2400    mov(dst, x);
2401  }
2402}
2403
2404
2405void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2406  mov(dst, x);
2407}
2408
2409
2410void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
2411  // TODO(intel): Add support for LZCNT (with ABM/BMI1).
2412  Label not_zero_src;
2413  bsr(dst, src);
2414  j(not_zero, &not_zero_src, Label::kNear);
2415  Move(dst, Immediate(63));  // 63^31 == 32
2416  bind(&not_zero_src);
2417  xor_(dst, Immediate(31));  // for x in [0..31], 31^x == 31-x.
2418}
2419
2420
2421void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
2422  // TODO(intel): Add support for TZCNT (with ABM/BMI1).
2423  Label not_zero_src;
2424  bsf(dst, src);
2425  j(not_zero, &not_zero_src, Label::kNear);
2426  Move(dst, Immediate(32));  // The result of tzcnt is 32 if src = 0.
2427  bind(&not_zero_src);
2428}
2429
2430
2431void MacroAssembler::Popcnt(Register dst, const Operand& src) {
2432  // TODO(intel): Add support for POPCNT (with POPCNT)
2433  // if (CpuFeatures::IsSupported(POPCNT)) {
2434  //   CpuFeatureScope scope(this, POPCNT);
2435  //   popcnt(dst, src);
2436  //   return;
2437  // }
2438  UNREACHABLE();
2439}
2440
2441
2442void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2443  if (FLAG_native_code_counters && counter->Enabled()) {
2444    mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2445  }
2446}
2447
2448
2449void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2450  DCHECK(value > 0);
2451  if (FLAG_native_code_counters && counter->Enabled()) {
2452    Operand operand = Operand::StaticVariable(ExternalReference(counter));
2453    if (value == 1) {
2454      inc(operand);
2455    } else {
2456      add(operand, Immediate(value));
2457    }
2458  }
2459}
2460
2461
2462void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2463  DCHECK(value > 0);
2464  if (FLAG_native_code_counters && counter->Enabled()) {
2465    Operand operand = Operand::StaticVariable(ExternalReference(counter));
2466    if (value == 1) {
2467      dec(operand);
2468    } else {
2469      sub(operand, Immediate(value));
2470    }
2471  }
2472}
2473
2474
2475void MacroAssembler::IncrementCounter(Condition cc,
2476                                      StatsCounter* counter,
2477                                      int value) {
2478  DCHECK(value > 0);
2479  if (FLAG_native_code_counters && counter->Enabled()) {
2480    Label skip;
2481    j(NegateCondition(cc), &skip);
2482    pushfd();
2483    IncrementCounter(counter, value);
2484    popfd();
2485    bind(&skip);
2486  }
2487}
2488
2489
2490void MacroAssembler::DecrementCounter(Condition cc,
2491                                      StatsCounter* counter,
2492                                      int value) {
2493  DCHECK(value > 0);
2494  if (FLAG_native_code_counters && counter->Enabled()) {
2495    Label skip;
2496    j(NegateCondition(cc), &skip);
2497    pushfd();
2498    DecrementCounter(counter, value);
2499    popfd();
2500    bind(&skip);
2501  }
2502}
2503
2504
2505void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2506  if (emit_debug_code()) Check(cc, reason);
2507}
2508
2509
2510void MacroAssembler::AssertFastElements(Register elements) {
2511  if (emit_debug_code()) {
2512    Factory* factory = isolate()->factory();
2513    Label ok;
2514    cmp(FieldOperand(elements, HeapObject::kMapOffset),
2515        Immediate(factory->fixed_array_map()));
2516    j(equal, &ok);
2517    cmp(FieldOperand(elements, HeapObject::kMapOffset),
2518        Immediate(factory->fixed_double_array_map()));
2519    j(equal, &ok);
2520    cmp(FieldOperand(elements, HeapObject::kMapOffset),
2521        Immediate(factory->fixed_cow_array_map()));
2522    j(equal, &ok);
2523    Abort(kJSObjectWithFastElementsMapHasSlowElements);
2524    bind(&ok);
2525  }
2526}
2527
2528
2529void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2530  Label L;
2531  j(cc, &L);
2532  Abort(reason);
2533  // will not return here
2534  bind(&L);
2535}
2536
2537
2538void MacroAssembler::CheckStackAlignment() {
2539  int frame_alignment = base::OS::ActivationFrameAlignment();
2540  int frame_alignment_mask = frame_alignment - 1;
2541  if (frame_alignment > kPointerSize) {
2542    DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2543    Label alignment_as_expected;
2544    test(esp, Immediate(frame_alignment_mask));
2545    j(zero, &alignment_as_expected);
2546    // Abort if stack is not aligned.
2547    int3();
2548    bind(&alignment_as_expected);
2549  }
2550}
2551
2552
2553void MacroAssembler::Abort(BailoutReason reason) {
2554#ifdef DEBUG
2555  const char* msg = GetBailoutReason(reason);
2556  if (msg != NULL) {
2557    RecordComment("Abort message: ");
2558    RecordComment(msg);
2559  }
2560
2561  if (FLAG_trap_on_abort) {
2562    int3();
2563    return;
2564  }
2565#endif
2566
2567  // Check if Abort() has already been initialized.
2568  DCHECK(isolate()->builtins()->Abort()->IsHeapObject());
2569
2570  Move(edx, Smi::FromInt(static_cast<int>(reason)));
2571
2572  // Disable stub call restrictions to always allow calls to abort.
2573  if (!has_frame_) {
2574    // We don't actually want to generate a pile of code for this, so just
2575    // claim there is a stack frame, without generating one.
2576    FrameScope scope(this, StackFrame::NONE);
2577    Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
2578  } else {
2579    Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
2580  }
2581  // will not return here
2582  int3();
2583}
2584
2585
2586void MacroAssembler::LoadInstanceDescriptors(Register map,
2587                                             Register descriptors) {
2588  mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2589}
2590
2591
2592void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2593  mov(dst, FieldOperand(map, Map::kBitField3Offset));
2594  DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2595}
2596
2597
2598void MacroAssembler::LoadAccessor(Register dst, Register holder,
2599                                  int accessor_index,
2600                                  AccessorComponent accessor) {
2601  mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
2602  LoadInstanceDescriptors(dst, dst);
2603  mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
2604  int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
2605                                           : AccessorPair::kSetterOffset;
2606  mov(dst, FieldOperand(dst, offset));
2607}
2608
2609
2610void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2611    Register instance_type, Register scratch, Label* failure) {
2612  if (!scratch.is(instance_type)) {
2613    mov(scratch, instance_type);
2614  }
2615  and_(scratch,
2616       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2617  cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2618  j(not_equal, failure);
2619}
2620
2621
2622void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2623                                                           Register object2,
2624                                                           Register scratch1,
2625                                                           Register scratch2,
2626                                                           Label* failure) {
2627  // Check that both objects are not smis.
2628  STATIC_ASSERT(kSmiTag == 0);
2629  mov(scratch1, object1);
2630  and_(scratch1, object2);
2631  JumpIfSmi(scratch1, failure);
2632
2633  // Load instance type for both strings.
2634  mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2635  mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2636  movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2637  movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2638
2639  // Check that both are flat one-byte strings.
2640  const int kFlatOneByteStringMask =
2641      kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2642  const int kFlatOneByteStringTag =
2643      kStringTag | kOneByteStringTag | kSeqStringTag;
2644  // Interleave bits from both instance types and compare them in one check.
2645  DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2646  and_(scratch1, kFlatOneByteStringMask);
2647  and_(scratch2, kFlatOneByteStringMask);
2648  lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2649  cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
2650  j(not_equal, failure);
2651}
2652
2653
2654void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2655                                                     Label* not_unique_name,
2656                                                     Label::Distance distance) {
2657  STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2658  Label succeed;
2659  test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2660  j(zero, &succeed);
2661  cmpb(operand, Immediate(SYMBOL_TYPE));
2662  j(not_equal, not_unique_name, distance);
2663
2664  bind(&succeed);
2665}
2666
2667
2668void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2669                                               Register index,
2670                                               Register value,
2671                                               uint32_t encoding_mask) {
2672  Label is_object;
2673  JumpIfNotSmi(string, &is_object, Label::kNear);
2674  Abort(kNonObject);
2675  bind(&is_object);
2676
2677  push(value);
2678  mov(value, FieldOperand(string, HeapObject::kMapOffset));
2679  movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2680
2681  and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2682  cmp(value, Immediate(encoding_mask));
2683  pop(value);
2684  Check(equal, kUnexpectedStringType);
2685
2686  // The index is assumed to be untagged coming in, tag it to compare with the
2687  // string length without using a temp register, it is restored at the end of
2688  // this function.
2689  SmiTag(index);
2690  Check(no_overflow, kIndexIsTooLarge);
2691
2692  cmp(index, FieldOperand(string, String::kLengthOffset));
2693  Check(less, kIndexIsTooLarge);
2694
2695  cmp(index, Immediate(Smi::kZero));
2696  Check(greater_equal, kIndexIsNegative);
2697
2698  // Restore the index
2699  SmiUntag(index);
2700}
2701
2702
2703void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2704  int frame_alignment = base::OS::ActivationFrameAlignment();
2705  if (frame_alignment != 0) {
2706    // Make stack end at alignment and make room for num_arguments words
2707    // and the original value of esp.
2708    mov(scratch, esp);
2709    sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2710    DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2711    and_(esp, -frame_alignment);
2712    mov(Operand(esp, num_arguments * kPointerSize), scratch);
2713  } else {
2714    sub(esp, Immediate(num_arguments * kPointerSize));
2715  }
2716}
2717
2718
2719void MacroAssembler::CallCFunction(ExternalReference function,
2720                                   int num_arguments) {
2721  // Trashing eax is ok as it will be the return value.
2722  mov(eax, Immediate(function));
2723  CallCFunction(eax, num_arguments);
2724}
2725
2726
2727void MacroAssembler::CallCFunction(Register function,
2728                                   int num_arguments) {
2729  DCHECK(has_frame());
2730  // Check stack alignment.
2731  if (emit_debug_code()) {
2732    CheckStackAlignment();
2733  }
2734
2735  call(function);
2736  if (base::OS::ActivationFrameAlignment() != 0) {
2737    mov(esp, Operand(esp, num_arguments * kPointerSize));
2738  } else {
2739    add(esp, Immediate(num_arguments * kPointerSize));
2740  }
2741}
2742
2743
2744#ifdef DEBUG
2745bool AreAliased(Register reg1,
2746                Register reg2,
2747                Register reg3,
2748                Register reg4,
2749                Register reg5,
2750                Register reg6,
2751                Register reg7,
2752                Register reg8) {
2753  int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
2754      reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
2755      reg7.is_valid() + reg8.is_valid();
2756
2757  RegList regs = 0;
2758  if (reg1.is_valid()) regs |= reg1.bit();
2759  if (reg2.is_valid()) regs |= reg2.bit();
2760  if (reg3.is_valid()) regs |= reg3.bit();
2761  if (reg4.is_valid()) regs |= reg4.bit();
2762  if (reg5.is_valid()) regs |= reg5.bit();
2763  if (reg6.is_valid()) regs |= reg6.bit();
2764  if (reg7.is_valid()) regs |= reg7.bit();
2765  if (reg8.is_valid()) regs |= reg8.bit();
2766  int n_of_non_aliasing_regs = NumRegs(regs);
2767
2768  return n_of_valid_regs != n_of_non_aliasing_regs;
2769}
2770#endif
2771
2772
2773CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
2774    : address_(address),
2775      size_(size),
2776      masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
2777  // Create a new macro assembler pointing to the address of the code to patch.
2778  // The size is adjusted with kGap on order for the assembler to generate size
2779  // bytes of instructions without failing with buffer size constraints.
2780  DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2781}
2782
2783
2784CodePatcher::~CodePatcher() {
2785  // Indicate that code has changed.
2786  Assembler::FlushICache(masm_.isolate(), address_, size_);
2787
2788  // Check that the code was patched as expected.
2789  DCHECK(masm_.pc_ == address_ + size_);
2790  DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2791}
2792
2793
2794void MacroAssembler::CheckPageFlag(
2795    Register object,
2796    Register scratch,
2797    int mask,
2798    Condition cc,
2799    Label* condition_met,
2800    Label::Distance condition_met_distance) {
2801  DCHECK(cc == zero || cc == not_zero);
2802  if (scratch.is(object)) {
2803    and_(scratch, Immediate(~Page::kPageAlignmentMask));
2804  } else {
2805    mov(scratch, Immediate(~Page::kPageAlignmentMask));
2806    and_(scratch, object);
2807  }
2808  if (mask < (1 << kBitsPerByte)) {
2809    test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2810  } else {
2811    test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2812  }
2813  j(cc, condition_met, condition_met_distance);
2814}
2815
2816
2817void MacroAssembler::CheckPageFlagForMap(
2818    Handle<Map> map,
2819    int mask,
2820    Condition cc,
2821    Label* condition_met,
2822    Label::Distance condition_met_distance) {
2823  DCHECK(cc == zero || cc == not_zero);
2824  Page* page = Page::FromAddress(map->address());
2825  DCHECK(!serializer_enabled());  // Serializer cannot match page_flags.
2826  ExternalReference reference(ExternalReference::page_flags(page));
2827  // The inlined static address check of the page's flags relies
2828  // on maps never being compacted.
2829  DCHECK(!isolate()->heap()->mark_compact_collector()->
2830         IsOnEvacuationCandidate(*map));
2831  if (mask < (1 << kBitsPerByte)) {
2832    test_b(Operand::StaticVariable(reference), Immediate(mask));
2833  } else {
2834    test(Operand::StaticVariable(reference), Immediate(mask));
2835  }
2836  j(cc, condition_met, condition_met_distance);
2837}
2838
2839
2840void MacroAssembler::JumpIfBlack(Register object,
2841                                 Register scratch0,
2842                                 Register scratch1,
2843                                 Label* on_black,
2844                                 Label::Distance on_black_near) {
2845  HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
2846           1);  // kBlackBitPattern.
2847  DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
2848}
2849
2850
2851void MacroAssembler::HasColor(Register object,
2852                              Register bitmap_scratch,
2853                              Register mask_scratch,
2854                              Label* has_color,
2855                              Label::Distance has_color_distance,
2856                              int first_bit,
2857                              int second_bit) {
2858  DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
2859
2860  GetMarkBits(object, bitmap_scratch, mask_scratch);
2861
2862  Label other_color, word_boundary;
2863  test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2864  j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2865  add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
2866  j(zero, &word_boundary, Label::kNear);
2867  test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2868  j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2869  jmp(&other_color, Label::kNear);
2870
2871  bind(&word_boundary);
2872  test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize),
2873         Immediate(1));
2874
2875  j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2876  bind(&other_color);
2877}
2878
2879
2880void MacroAssembler::GetMarkBits(Register addr_reg,
2881                                 Register bitmap_reg,
2882                                 Register mask_reg) {
2883  DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
2884  mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2885  and_(bitmap_reg, addr_reg);
2886  mov(ecx, addr_reg);
2887  int shift =
2888      Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2889  shr(ecx, shift);
2890  and_(ecx,
2891       (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2892
2893  add(bitmap_reg, ecx);
2894  mov(ecx, addr_reg);
2895  shr(ecx, kPointerSizeLog2);
2896  and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2897  mov(mask_reg, Immediate(1));
2898  shl_cl(mask_reg);
2899}
2900
2901
2902void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
2903                                 Register mask_scratch, Label* value_is_white,
2904                                 Label::Distance distance) {
2905  DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
2906  GetMarkBits(value, bitmap_scratch, mask_scratch);
2907
2908  // If the value is black or grey we don't need to do anything.
2909  DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
2910  DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
2911  DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
2912  DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
2913
2914  // Since both black and grey have a 1 in the first position and white does
2915  // not have a 1 there we only need to check one bit.
2916  test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2917  j(zero, value_is_white, Label::kNear);
2918}
2919
2920
2921void MacroAssembler::EnumLength(Register dst, Register map) {
2922  STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
2923  mov(dst, FieldOperand(map, Map::kBitField3Offset));
2924  and_(dst, Immediate(Map::EnumLengthBits::kMask));
2925  SmiTag(dst);
2926}
2927
2928
2929void MacroAssembler::CheckEnumCache(Label* call_runtime) {
2930  Label next, start;
2931  mov(ecx, eax);
2932
2933  // Check if the enum length field is properly initialized, indicating that
2934  // there is an enum cache.
2935  mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
2936
2937  EnumLength(edx, ebx);
2938  cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
2939  j(equal, call_runtime);
2940
2941  jmp(&start);
2942
2943  bind(&next);
2944  mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
2945
2946  // For all objects but the receiver, check that the cache is empty.
2947  EnumLength(edx, ebx);
2948  cmp(edx, Immediate(Smi::kZero));
2949  j(not_equal, call_runtime);
2950
2951  bind(&start);
2952
2953  // Check that there are no elements. Register rcx contains the current JS
2954  // object we've reached through the prototype chain.
2955  Label no_elements;
2956  mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
2957  cmp(ecx, isolate()->factory()->empty_fixed_array());
2958  j(equal, &no_elements);
2959
2960  // Second chance, the object may be using the empty slow element dictionary.
2961  cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
2962  j(not_equal, call_runtime);
2963
2964  bind(&no_elements);
2965  mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
2966  cmp(ecx, isolate()->factory()->null_value());
2967  j(not_equal, &next);
2968}
2969
2970
2971void MacroAssembler::TestJSArrayForAllocationMemento(
2972    Register receiver_reg,
2973    Register scratch_reg,
2974    Label* no_memento_found) {
2975  Label map_check;
2976  Label top_check;
2977  ExternalReference new_space_allocation_top =
2978      ExternalReference::new_space_allocation_top_address(isolate());
2979  const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
2980  const int kMementoLastWordOffset =
2981      kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
2982
2983  // Bail out if the object is not in new space.
2984  JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
2985  // If the object is in new space, we need to check whether it is on the same
2986  // page as the current top.
2987  lea(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
2988  xor_(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
2989  test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
2990  j(zero, &top_check);
2991  // The object is on a different page than allocation top. Bail out if the
2992  // object sits on the page boundary as no memento can follow and we cannot
2993  // touch the memory following it.
2994  lea(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
2995  xor_(scratch_reg, receiver_reg);
2996  test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
2997  j(not_zero, no_memento_found);
2998  // Continue with the actual map check.
2999  jmp(&map_check);
3000  // If top is on the same page as the current object, we need to check whether
3001  // we are below top.
3002  bind(&top_check);
3003  lea(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
3004  cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3005  j(greater_equal, no_memento_found);
3006  // Memento map check.
3007  bind(&map_check);
3008  mov(scratch_reg, Operand(receiver_reg, kMementoMapOffset));
3009  cmp(scratch_reg, Immediate(isolate()->factory()->allocation_memento_map()));
3010}
3011
3012
3013void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3014    Register object,
3015    Register scratch0,
3016    Register scratch1,
3017    Label* found) {
3018  DCHECK(!scratch1.is(scratch0));
3019  Factory* factory = isolate()->factory();
3020  Register current = scratch0;
3021  Label loop_again, end;
3022
3023  // scratch contained elements pointer.
3024  mov(current, object);
3025  mov(current, FieldOperand(current, HeapObject::kMapOffset));
3026  mov(current, FieldOperand(current, Map::kPrototypeOffset));
3027  cmp(current, Immediate(factory->null_value()));
3028  j(equal, &end);
3029
3030  // Loop based on the map going up the prototype chain.
3031  bind(&loop_again);
3032  mov(current, FieldOperand(current, HeapObject::kMapOffset));
3033  STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3034  STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3035  CmpInstanceType(current, JS_OBJECT_TYPE);
3036  j(below, found);
3037  mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3038  DecodeField<Map::ElementsKindBits>(scratch1);
3039  cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3040  j(equal, found);
3041  mov(current, FieldOperand(current, Map::kPrototypeOffset));
3042  cmp(current, Immediate(factory->null_value()));
3043  j(not_equal, &loop_again);
3044
3045  bind(&end);
3046}
3047
3048
3049void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3050  DCHECK(!dividend.is(eax));
3051  DCHECK(!dividend.is(edx));
3052  base::MagicNumbersForDivision<uint32_t> mag =
3053      base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3054  mov(eax, Immediate(mag.multiplier));
3055  imul(dividend);
3056  bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3057  if (divisor > 0 && neg) add(edx, dividend);
3058  if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3059  if (mag.shift > 0) sar(edx, mag.shift);
3060  mov(eax, dividend);
3061  shr(eax, 31);
3062  add(edx, eax);
3063}
3064
3065
3066}  // namespace internal
3067}  // namespace v8
3068
3069#endif  // V8_TARGET_ARCH_X87
3070