macro-assembler-x64.cc revision 8c569c4a1286b419597940890e04517bc59eefcd
1// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
38#include "heap.h"
39
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
44    : Assembler(buffer, size),
45      generating_stub_(false),
46      allow_stub_calls_(true),
47      code_object_(Heap::undefined_value()) {
48}
49
50
51void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
52  movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
53}
54
55
56void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57  movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
61void MacroAssembler::PushRoot(Heap::RootListIndex index) {
62  push(Operand(kRootRegister, index << kPointerSizeLog2));
63}
64
65
66void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
67  cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
68}
69
70
71void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
72  LoadRoot(kScratchRegister, index);
73  cmpq(with, kScratchRegister);
74}
75
76
77void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
78  CompareRoot(rsp, Heap::kStackLimitRootIndex);
79  j(below, on_stack_overflow);
80}
81
82
83void MacroAssembler::RecordWriteHelper(Register object,
84                                       Register addr,
85                                       Register scratch) {
86  if (FLAG_debug_code) {
87    // Check that the object is not in new space.
88    NearLabel not_in_new_space;
89    InNewSpace(object, scratch, not_equal, &not_in_new_space);
90    Abort("new-space object passed to RecordWriteHelper");
91    bind(&not_in_new_space);
92  }
93
94  // Compute the page start address from the heap object pointer, and reuse
95  // the 'object' register for it.
96  and_(object, Immediate(~Page::kPageAlignmentMask));
97
98  // Compute number of region covering addr. See Page::GetRegionNumberForAddress
99  // method for more details.
100  shrl(addr, Immediate(Page::kRegionSizeLog2));
101  andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
102
103  // Set dirty mark for region.
104  bts(Operand(object, Page::kDirtyFlagOffset), addr);
105}
106
107
108void MacroAssembler::RecordWrite(Register object,
109                                 int offset,
110                                 Register value,
111                                 Register index) {
112  // The compiled code assumes that record write doesn't change the
113  // context register, so we check that none of the clobbered
114  // registers are rsi.
115  ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
116
117  // First, check if a write barrier is even needed. The tests below
118  // catch stores of Smis and stores into young gen.
119  Label done;
120  JumpIfSmi(value, &done);
121
122  RecordWriteNonSmi(object, offset, value, index);
123  bind(&done);
124
125  // Clobber all input registers when running with the debug-code flag
126  // turned on to provoke errors. This clobbering repeats the
127  // clobbering done inside RecordWriteNonSmi but it's necessary to
128  // avoid having the fast case for smis leave the registers
129  // unchanged.
130  if (FLAG_debug_code) {
131    movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
132    movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
133    movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
134  }
135}
136
137
138void MacroAssembler::RecordWrite(Register object,
139                                 Register address,
140                                 Register value) {
141  // The compiled code assumes that record write doesn't change the
142  // context register, so we check that none of the clobbered
143  // registers are esi.
144  ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
145
146  // First, check if a write barrier is even needed. The tests below
147  // catch stores of Smis and stores into young gen.
148  Label done;
149  JumpIfSmi(value, &done);
150
151  InNewSpace(object, value, equal, &done);
152
153  RecordWriteHelper(object, address, value);
154
155  bind(&done);
156
157  // Clobber all input registers when running with the debug-code flag
158  // turned on to provoke errors.
159  if (FLAG_debug_code) {
160    movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
161    movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
162    movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
163  }
164}
165
166
167void MacroAssembler::RecordWriteNonSmi(Register object,
168                                       int offset,
169                                       Register scratch,
170                                       Register index) {
171  Label done;
172
173  if (FLAG_debug_code) {
174    NearLabel okay;
175    JumpIfNotSmi(object, &okay);
176    Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
177    bind(&okay);
178
179    if (offset == 0) {
180      // index must be int32.
181      Register tmp = index.is(rax) ? rbx : rax;
182      push(tmp);
183      movl(tmp, index);
184      cmpq(tmp, index);
185      Check(equal, "Index register for RecordWrite must be untagged int32.");
186      pop(tmp);
187    }
188  }
189
190  // Test that the object address is not in the new space. We cannot
191  // update page dirty marks for new space pages.
192  InNewSpace(object, scratch, equal, &done);
193
194  // The offset is relative to a tagged or untagged HeapObject pointer,
195  // so either offset or offset + kHeapObjectTag must be a
196  // multiple of kPointerSize.
197  ASSERT(IsAligned(offset, kPointerSize) ||
198         IsAligned(offset + kHeapObjectTag, kPointerSize));
199
200  Register dst = index;
201  if (offset != 0) {
202    lea(dst, Operand(object, offset));
203  } else {
204    // array access: calculate the destination address in the same manner as
205    // KeyedStoreIC::GenerateGeneric.
206    lea(dst, FieldOperand(object,
207                          index,
208                          times_pointer_size,
209                          FixedArray::kHeaderSize));
210  }
211  RecordWriteHelper(object, dst, scratch);
212
213  bind(&done);
214
215  // Clobber all input registers when running with the debug-code flag
216  // turned on to provoke errors.
217  if (FLAG_debug_code) {
218    movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
219    movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
220    movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
221  }
222}
223
224void MacroAssembler::Assert(Condition cc, const char* msg) {
225  if (FLAG_debug_code) Check(cc, msg);
226}
227
228
229void MacroAssembler::AssertFastElements(Register elements) {
230  if (FLAG_debug_code) {
231    NearLabel ok;
232    CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
233                Heap::kFixedArrayMapRootIndex);
234    j(equal, &ok);
235    CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
236                Heap::kFixedCOWArrayMapRootIndex);
237    j(equal, &ok);
238    Abort("JSObject with fast elements map has slow elements");
239    bind(&ok);
240  }
241}
242
243
244void MacroAssembler::Check(Condition cc, const char* msg) {
245  NearLabel L;
246  j(cc, &L);
247  Abort(msg);
248  // will not return here
249  bind(&L);
250}
251
252
253void MacroAssembler::CheckStackAlignment() {
254  int frame_alignment = OS::ActivationFrameAlignment();
255  int frame_alignment_mask = frame_alignment - 1;
256  if (frame_alignment > kPointerSize) {
257    ASSERT(IsPowerOf2(frame_alignment));
258    NearLabel alignment_as_expected;
259    testq(rsp, Immediate(frame_alignment_mask));
260    j(zero, &alignment_as_expected);
261    // Abort if stack is not aligned.
262    int3();
263    bind(&alignment_as_expected);
264  }
265}
266
267
268void MacroAssembler::NegativeZeroTest(Register result,
269                                      Register op,
270                                      Label* then_label) {
271  NearLabel ok;
272  testl(result, result);
273  j(not_zero, &ok);
274  testl(op, op);
275  j(sign, then_label);
276  bind(&ok);
277}
278
279
280void MacroAssembler::Abort(const char* msg) {
281  // We want to pass the msg string like a smi to avoid GC
282  // problems, however msg is not guaranteed to be aligned
283  // properly. Instead, we pass an aligned pointer that is
284  // a proper v8 smi, but also pass the alignment difference
285  // from the real pointer as a smi.
286  intptr_t p1 = reinterpret_cast<intptr_t>(msg);
287  intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
288  // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
289  ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
290#ifdef DEBUG
291  if (msg != NULL) {
292    RecordComment("Abort message: ");
293    RecordComment(msg);
294  }
295#endif
296  // Disable stub call restrictions to always allow calls to abort.
297  set_allow_stub_calls(true);
298
299  push(rax);
300  movq(kScratchRegister, p0, RelocInfo::NONE);
301  push(kScratchRegister);
302  movq(kScratchRegister,
303       reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
304       RelocInfo::NONE);
305  push(kScratchRegister);
306  CallRuntime(Runtime::kAbort, 2);
307  // will not return here
308  int3();
309}
310
311
312void MacroAssembler::CallStub(CodeStub* stub) {
313  ASSERT(allow_stub_calls());  // calls are not allowed in some stubs
314  Call(stub->GetCode(), RelocInfo::CODE_TARGET);
315}
316
317
318MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
319  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
320  MaybeObject* result = stub->TryGetCode();
321  if (!result->IsFailure()) {
322    call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
323         RelocInfo::CODE_TARGET);
324  }
325  return result;
326}
327
328
329void MacroAssembler::TailCallStub(CodeStub* stub) {
330  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
331  Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
332}
333
334
335MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
336  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
337  MaybeObject* result = stub->TryGetCode();
338  if (!result->IsFailure()) {
339    jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
340        RelocInfo::CODE_TARGET);
341  }
342  return result;
343}
344
345
346void MacroAssembler::StubReturn(int argc) {
347  ASSERT(argc >= 1 && generating_stub());
348  ret((argc - 1) * kPointerSize);
349}
350
351
352void MacroAssembler::IllegalOperation(int num_arguments) {
353  if (num_arguments > 0) {
354    addq(rsp, Immediate(num_arguments * kPointerSize));
355  }
356  LoadRoot(rax, Heap::kUndefinedValueRootIndex);
357}
358
359
360void MacroAssembler::IndexFromHash(Register hash, Register index) {
361  // The assert checks that the constants for the maximum number of digits
362  // for an array index cached in the hash field and the number of bits
363  // reserved for it does not conflict.
364  ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
365         (1 << String::kArrayIndexValueBits));
366  // We want the smi-tagged index in key. Even if we subsequently go to
367  // the slow case, converting the key to a smi is always valid.
368  // key: string key
369  // hash: key's hash field, including its array index value.
370  and_(hash, Immediate(String::kArrayIndexValueMask));
371  shr(hash, Immediate(String::kHashShift));
372  // Here we actually clobber the key which will be used if calling into
373  // runtime later. However as the new key is the numeric value of a string key
374  // there is no difference in using either key.
375  Integer32ToSmi(index, hash);
376}
377
378
379void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
380  CallRuntime(Runtime::FunctionForId(id), num_arguments);
381}
382
383
384MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
385                                            int num_arguments) {
386  return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
387}
388
389
390void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
391  // If the expected number of arguments of the runtime function is
392  // constant, we check that the actual number of arguments match the
393  // expectation.
394  if (f->nargs >= 0 && f->nargs != num_arguments) {
395    IllegalOperation(num_arguments);
396    return;
397  }
398
399  // TODO(1236192): Most runtime routines don't need the number of
400  // arguments passed in because it is constant. At some point we
401  // should remove this need and make the runtime routine entry code
402  // smarter.
403  Set(rax, num_arguments);
404  movq(rbx, ExternalReference(f));
405  CEntryStub ces(f->result_size);
406  CallStub(&ces);
407}
408
409
410MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
411                                            int num_arguments) {
412  if (f->nargs >= 0 && f->nargs != num_arguments) {
413    IllegalOperation(num_arguments);
414    // Since we did not call the stub, there was no allocation failure.
415    // Return some non-failure object.
416    return Heap::undefined_value();
417  }
418
419  // TODO(1236192): Most runtime routines don't need the number of
420  // arguments passed in because it is constant. At some point we
421  // should remove this need and make the runtime routine entry code
422  // smarter.
423  Set(rax, num_arguments);
424  movq(rbx, ExternalReference(f));
425  CEntryStub ces(f->result_size);
426  return TryCallStub(&ces);
427}
428
429
430void MacroAssembler::CallExternalReference(const ExternalReference& ext,
431                                           int num_arguments) {
432  Set(rax, num_arguments);
433  movq(rbx, ext);
434
435  CEntryStub stub(1);
436  CallStub(&stub);
437}
438
439
440void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
441                                               int num_arguments,
442                                               int result_size) {
443  // ----------- S t a t e -------------
444  //  -- rsp[0] : return address
445  //  -- rsp[8] : argument num_arguments - 1
446  //  ...
447  //  -- rsp[8 * num_arguments] : argument 0 (receiver)
448  // -----------------------------------
449
450  // TODO(1236192): Most runtime routines don't need the number of
451  // arguments passed in because it is constant. At some point we
452  // should remove this need and make the runtime routine entry code
453  // smarter.
454  Set(rax, num_arguments);
455  JumpToExternalReference(ext, result_size);
456}
457
458
459MaybeObject* MacroAssembler::TryTailCallExternalReference(
460    const ExternalReference& ext, int num_arguments, int result_size) {
461  // ----------- S t a t e -------------
462  //  -- rsp[0] : return address
463  //  -- rsp[8] : argument num_arguments - 1
464  //  ...
465  //  -- rsp[8 * num_arguments] : argument 0 (receiver)
466  // -----------------------------------
467
468  // TODO(1236192): Most runtime routines don't need the number of
469  // arguments passed in because it is constant. At some point we
470  // should remove this need and make the runtime routine entry code
471  // smarter.
472  Set(rax, num_arguments);
473  return TryJumpToExternalReference(ext, result_size);
474}
475
476
477void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
478                                     int num_arguments,
479                                     int result_size) {
480  TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
481}
482
483
484MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
485                                                int num_arguments,
486                                                int result_size) {
487  return TryTailCallExternalReference(ExternalReference(fid),
488                                      num_arguments,
489                                      result_size);
490}
491
492
493static int Offset(ExternalReference ref0, ExternalReference ref1) {
494  int64_t offset = (ref0.address() - ref1.address());
495  // Check that fits into int.
496  ASSERT(static_cast<int>(offset) == offset);
497  return static_cast<int>(offset);
498}
499
500
501void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
502#ifdef _WIN64
503  // We need to prepare a slot for result handle on stack and put
504  // a pointer to it into 1st arg register.
505  EnterApiExitFrame(arg_stack_space + 1);
506
507  // rcx must be used to pass the pointer to the return value slot.
508  lea(rcx, StackSpaceOperand(arg_stack_space));
509#else
510  EnterApiExitFrame(arg_stack_space);
511#endif
512}
513
514
515MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
516    ApiFunction* function, int stack_space) {
517  Label empty_result;
518  Label prologue;
519  Label promote_scheduled_exception;
520  Label delete_allocated_handles;
521  Label leave_exit_frame;
522  Label write_back;
523
524  ExternalReference next_address =
525      ExternalReference::handle_scope_next_address();
526  const int kNextOffset = 0;
527  const int kLimitOffset = Offset(
528      ExternalReference::handle_scope_limit_address(),
529      next_address);
530  const int kLevelOffset = Offset(
531      ExternalReference::handle_scope_level_address(),
532      next_address);
533  ExternalReference scheduled_exception_address =
534      ExternalReference::scheduled_exception_address();
535
536  // Allocate HandleScope in callee-save registers.
537  Register prev_next_address_reg = r14;
538  Register prev_limit_reg = rbx;
539  Register base_reg = r12;
540  movq(base_reg, next_address);
541  movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
542  movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
543  addl(Operand(base_reg, kLevelOffset), Immediate(1));
544  // Call the api function!
545  movq(rax,
546       reinterpret_cast<int64_t>(function->address()),
547       RelocInfo::RUNTIME_ENTRY);
548  call(rax);
549
550#ifdef _WIN64
551  // rax keeps a pointer to v8::Handle, unpack it.
552  movq(rax, Operand(rax, 0));
553#endif
554  // Check if the result handle holds 0.
555  testq(rax, rax);
556  j(zero, &empty_result);
557  // It was non-zero.  Dereference to get the result value.
558  movq(rax, Operand(rax, 0));
559  bind(&prologue);
560
561  // No more valid handles (the result handle was the last one). Restore
562  // previous handle scope.
563  subl(Operand(base_reg, kLevelOffset), Immediate(1));
564  movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
565  cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
566  j(not_equal, &delete_allocated_handles);
567  bind(&leave_exit_frame);
568
569  // Check if the function scheduled an exception.
570  movq(rsi, scheduled_exception_address);
571  Cmp(Operand(rsi, 0), Factory::the_hole_value());
572  j(not_equal, &promote_scheduled_exception);
573
574  LeaveApiExitFrame();
575  ret(stack_space * kPointerSize);
576
577  bind(&promote_scheduled_exception);
578  MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
579                                           0, 1);
580  if (result->IsFailure()) {
581    return result;
582  }
583
584  bind(&empty_result);
585  // It was zero; the result is undefined.
586  Move(rax, Factory::undefined_value());
587  jmp(&prologue);
588
589  // HandleScope limit has changed. Delete allocated extensions.
590  bind(&delete_allocated_handles);
591  movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
592  movq(prev_limit_reg, rax);
593  movq(rax, ExternalReference::delete_handle_scope_extensions());
594  call(rax);
595  movq(rax, prev_limit_reg);
596  jmp(&leave_exit_frame);
597
598  return result;
599}
600
601
602void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
603                                             int result_size) {
604  // Set the entry point and jump to the C entry runtime stub.
605  movq(rbx, ext);
606  CEntryStub ces(result_size);
607  jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
608}
609
610
611MaybeObject* MacroAssembler::TryJumpToExternalReference(
612    const ExternalReference& ext, int result_size) {
613  // Set the entry point and jump to the C entry runtime stub.
614  movq(rbx, ext);
615  CEntryStub ces(result_size);
616  return TryTailCallStub(&ces);
617}
618
619
620void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
621  // Calls are not allowed in some stubs.
622  ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
623
624  // Rely on the assertion to check that the number of provided
625  // arguments match the expected number of arguments. Fake a
626  // parameter count to avoid emitting code to do the check.
627  ParameterCount expected(0);
628  GetBuiltinEntry(rdx, id);
629  InvokeCode(rdx, expected, expected, flag);
630}
631
632
633void MacroAssembler::GetBuiltinFunction(Register target,
634                                        Builtins::JavaScript id) {
635  // Load the builtins object into target register.
636  movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
637  movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
638  movq(target, FieldOperand(target,
639                            JSBuiltinsObject::OffsetOfFunctionWithId(id)));
640}
641
642
643void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
644  ASSERT(!target.is(rdi));
645  // Load the JavaScript builtin function from the builtins object.
646  GetBuiltinFunction(rdi, id);
647  movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
648}
649
650
651void MacroAssembler::Set(Register dst, int64_t x) {
652  if (x == 0) {
653    xorl(dst, dst);
654  } else if (is_int32(x)) {
655    movq(dst, Immediate(static_cast<int32_t>(x)));
656  } else if (is_uint32(x)) {
657    movl(dst, Immediate(static_cast<uint32_t>(x)));
658  } else {
659    movq(dst, x, RelocInfo::NONE);
660  }
661}
662
663void MacroAssembler::Set(const Operand& dst, int64_t x) {
664  if (is_int32(x)) {
665    movq(dst, Immediate(static_cast<int32_t>(x)));
666  } else {
667    movq(kScratchRegister, x, RelocInfo::NONE);
668    movq(dst, kScratchRegister);
669  }
670}
671
672// ----------------------------------------------------------------------------
673// Smi tagging, untagging and tag detection.
674
675Register MacroAssembler::GetSmiConstant(Smi* source) {
676  int value = source->value();
677  if (value == 0) {
678    xorl(kScratchRegister, kScratchRegister);
679    return kScratchRegister;
680  }
681  if (value == 1) {
682    return kSmiConstantRegister;
683  }
684  LoadSmiConstant(kScratchRegister, source);
685  return kScratchRegister;
686}
687
688void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
689  if (FLAG_debug_code) {
690    movq(dst,
691         reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
692         RelocInfo::NONE);
693    cmpq(dst, kSmiConstantRegister);
694    if (allow_stub_calls()) {
695      Assert(equal, "Uninitialized kSmiConstantRegister");
696    } else {
697      NearLabel ok;
698      j(equal, &ok);
699      int3();
700      bind(&ok);
701    }
702  }
703  if (source->value() == 0) {
704    xorl(dst, dst);
705    return;
706  }
707  int value = source->value();
708  bool negative = value < 0;
709  unsigned int uvalue = negative ? -value : value;
710
711  switch (uvalue) {
712    case 9:
713      lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
714      break;
715    case 8:
716      xorl(dst, dst);
717      lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
718      break;
719    case 4:
720      xorl(dst, dst);
721      lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
722      break;
723    case 5:
724      lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
725      break;
726    case 3:
727      lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
728      break;
729    case 2:
730      lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
731      break;
732    case 1:
733      movq(dst, kSmiConstantRegister);
734      break;
735    case 0:
736      UNREACHABLE();
737      return;
738    default:
739      movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
740      return;
741  }
742  if (negative) {
743    neg(dst);
744  }
745}
746
747
748void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
749  ASSERT_EQ(0, kSmiTag);
750  if (!dst.is(src)) {
751    movl(dst, src);
752  }
753  shl(dst, Immediate(kSmiShift));
754}
755
756
757void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
758  if (FLAG_debug_code) {
759    testb(dst, Immediate(0x01));
760    NearLabel ok;
761    j(zero, &ok);
762    if (allow_stub_calls()) {
763      Abort("Integer32ToSmiField writing to non-smi location");
764    } else {
765      int3();
766    }
767    bind(&ok);
768  }
769  ASSERT(kSmiShift % kBitsPerByte == 0);
770  movl(Operand(dst, kSmiShift / kBitsPerByte), src);
771}
772
773
774void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
775                                                Register src,
776                                                int constant) {
777  if (dst.is(src)) {
778    addq(dst, Immediate(constant));
779  } else {
780    lea(dst, Operand(src, constant));
781  }
782  shl(dst, Immediate(kSmiShift));
783}
784
785
786void MacroAssembler::SmiToInteger32(Register dst, Register src) {
787  ASSERT_EQ(0, kSmiTag);
788  if (!dst.is(src)) {
789    movq(dst, src);
790  }
791  shr(dst, Immediate(kSmiShift));
792}
793
794
795void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
796  movl(dst, Operand(src, kSmiShift / kBitsPerByte));
797}
798
799
800void MacroAssembler::SmiToInteger64(Register dst, Register src) {
801  ASSERT_EQ(0, kSmiTag);
802  if (!dst.is(src)) {
803    movq(dst, src);
804  }
805  sar(dst, Immediate(kSmiShift));
806}
807
808
809void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
810  movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
811}
812
813
814void MacroAssembler::SmiTest(Register src) {
815  testq(src, src);
816}
817
818
819void MacroAssembler::SmiCompare(Register dst, Register src) {
820  cmpq(dst, src);
821}
822
823
824void MacroAssembler::SmiCompare(Register dst, Smi* src) {
825  ASSERT(!dst.is(kScratchRegister));
826  if (src->value() == 0) {
827    testq(dst, dst);
828  } else {
829    Register constant_reg = GetSmiConstant(src);
830    cmpq(dst, constant_reg);
831  }
832}
833
834
835void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
836  cmpq(dst, src);
837}
838
839
840void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
841  cmpq(dst, src);
842}
843
844
845void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
846  cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
847}
848
849
850void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
851  cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
852}
853
854
855void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
856                                                           Register src,
857                                                           int power) {
858  ASSERT(power >= 0);
859  ASSERT(power < 64);
860  if (power == 0) {
861    SmiToInteger64(dst, src);
862    return;
863  }
864  if (!dst.is(src)) {
865    movq(dst, src);
866  }
867  if (power < kSmiShift) {
868    sar(dst, Immediate(kSmiShift - power));
869  } else if (power > kSmiShift) {
870    shl(dst, Immediate(power - kSmiShift));
871  }
872}
873
874
875void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
876                                                         Register src,
877                                                         int power) {
878  ASSERT((0 <= power) && (power < 32));
879  if (dst.is(src)) {
880    shr(dst, Immediate(power + kSmiShift));
881  } else {
882    UNIMPLEMENTED();  // Not used.
883  }
884}
885
886
887Condition MacroAssembler::CheckSmi(Register src) {
888  ASSERT_EQ(0, kSmiTag);
889  testb(src, Immediate(kSmiTagMask));
890  return zero;
891}
892
893
894Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
895  ASSERT_EQ(0, kSmiTag);
896  // Make mask 0x8000000000000001 and test that both bits are zero.
897  movq(kScratchRegister, src);
898  rol(kScratchRegister, Immediate(1));
899  testb(kScratchRegister, Immediate(3));
900  return zero;
901}
902
903
904Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
905  if (first.is(second)) {
906    return CheckSmi(first);
907  }
908  ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
909  leal(kScratchRegister, Operand(first, second, times_1, 0));
910  testb(kScratchRegister, Immediate(0x03));
911  return zero;
912}
913
914
915Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
916                                                  Register second) {
917  if (first.is(second)) {
918    return CheckNonNegativeSmi(first);
919  }
920  movq(kScratchRegister, first);
921  or_(kScratchRegister, second);
922  rol(kScratchRegister, Immediate(1));
923  testl(kScratchRegister, Immediate(3));
924  return zero;
925}
926
927
928Condition MacroAssembler::CheckEitherSmi(Register first,
929                                         Register second,
930                                         Register scratch) {
931  if (first.is(second)) {
932    return CheckSmi(first);
933  }
934  if (scratch.is(second)) {
935    andl(scratch, first);
936  } else {
937    if (!scratch.is(first)) {
938      movl(scratch, first);
939    }
940    andl(scratch, second);
941  }
942  testb(scratch, Immediate(kSmiTagMask));
943  return zero;
944}
945
946
947Condition MacroAssembler::CheckIsMinSmi(Register src) {
948  ASSERT(!src.is(kScratchRegister));
949  // If we overflow by subtracting one, it's the minimal smi value.
950  cmpq(src, kSmiConstantRegister);
951  return overflow;
952}
953
954
955Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
956  // A 32-bit integer value can always be converted to a smi.
957  return always;
958}
959
960
961Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
962  // An unsigned 32-bit integer value is valid as long as the high bit
963  // is not set.
964  testl(src, src);
965  return positive;
966}
967
968
969void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
970  if (constant->value() == 0) {
971    if (!dst.is(src)) {
972      movq(dst, src);
973    }
974    return;
975  } else if (dst.is(src)) {
976    ASSERT(!dst.is(kScratchRegister));
977    switch (constant->value()) {
978      case 1:
979        addq(dst, kSmiConstantRegister);
980        return;
981      case 2:
982        lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
983        return;
984      case 4:
985        lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
986        return;
987      case 8:
988        lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
989        return;
990      default:
991        Register constant_reg = GetSmiConstant(constant);
992        addq(dst, constant_reg);
993        return;
994    }
995  } else {
996    switch (constant->value()) {
997      case 1:
998        lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
999        return;
1000      case 2:
1001        lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1002        return;
1003      case 4:
1004        lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1005        return;
1006      case 8:
1007        lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1008        return;
1009      default:
1010        LoadSmiConstant(dst, constant);
1011        addq(dst, src);
1012        return;
1013    }
1014  }
1015}
1016
1017
1018void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1019  if (constant->value() != 0) {
1020    addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
1021  }
1022}
1023
1024
1025void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1026  if (constant->value() == 0) {
1027    if (!dst.is(src)) {
1028      movq(dst, src);
1029    }
1030  } else if (dst.is(src)) {
1031    ASSERT(!dst.is(kScratchRegister));
1032    Register constant_reg = GetSmiConstant(constant);
1033    subq(dst, constant_reg);
1034  } else {
1035    if (constant->value() == Smi::kMinValue) {
1036      LoadSmiConstant(dst, constant);
1037      // Adding and subtracting the min-value gives the same result, it only
1038      // differs on the overflow bit, which we don't check here.
1039      addq(dst, src);
1040    } else {
1041      // Subtract by adding the negation.
1042      LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1043      addq(dst, src);
1044    }
1045  }
1046}
1047
1048
1049void MacroAssembler::SmiAdd(Register dst,
1050                            Register src1,
1051                            Register src2) {
1052  // No overflow checking. Use only when it's known that
1053  // overflowing is impossible.
1054  ASSERT(!dst.is(src2));
1055  if (dst.is(src1)) {
1056    addq(dst, src2);
1057  } else {
1058    movq(dst, src1);
1059    addq(dst, src2);
1060  }
1061  Assert(no_overflow, "Smi addition overflow");
1062}
1063
1064
1065void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1066  // No overflow checking. Use only when it's known that
1067  // overflowing is impossible (e.g., subtracting two positive smis).
1068  ASSERT(!dst.is(src2));
1069  if (dst.is(src1)) {
1070    subq(dst, src2);
1071  } else {
1072    movq(dst, src1);
1073    subq(dst, src2);
1074  }
1075  Assert(no_overflow, "Smi subtraction overflow");
1076}
1077
1078
1079void MacroAssembler::SmiSub(Register dst,
1080                            Register src1,
1081                            const Operand& src2) {
1082  // No overflow checking. Use only when it's known that
1083  // overflowing is impossible (e.g., subtracting two positive smis).
1084  if (dst.is(src1)) {
1085    subq(dst, src2);
1086  } else {
1087    movq(dst, src1);
1088    subq(dst, src2);
1089  }
1090  Assert(no_overflow, "Smi subtraction overflow");
1091}
1092
1093
1094void MacroAssembler::SmiNot(Register dst, Register src) {
1095  ASSERT(!dst.is(kScratchRegister));
1096  ASSERT(!src.is(kScratchRegister));
1097  // Set tag and padding bits before negating, so that they are zero afterwards.
1098  movl(kScratchRegister, Immediate(~0));
1099  if (dst.is(src)) {
1100    xor_(dst, kScratchRegister);
1101  } else {
1102    lea(dst, Operand(src, kScratchRegister, times_1, 0));
1103  }
1104  not_(dst);
1105}
1106
1107
1108void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1109  ASSERT(!dst.is(src2));
1110  if (!dst.is(src1)) {
1111    movq(dst, src1);
1112  }
1113  and_(dst, src2);
1114}
1115
1116
1117void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1118  if (constant->value() == 0) {
1119    xor_(dst, dst);
1120  } else if (dst.is(src)) {
1121    ASSERT(!dst.is(kScratchRegister));
1122    Register constant_reg = GetSmiConstant(constant);
1123    and_(dst, constant_reg);
1124  } else {
1125    LoadSmiConstant(dst, constant);
1126    and_(dst, src);
1127  }
1128}
1129
1130
1131void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1132  if (!dst.is(src1)) {
1133    movq(dst, src1);
1134  }
1135  or_(dst, src2);
1136}
1137
1138
1139void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1140  if (dst.is(src)) {
1141    ASSERT(!dst.is(kScratchRegister));
1142    Register constant_reg = GetSmiConstant(constant);
1143    or_(dst, constant_reg);
1144  } else {
1145    LoadSmiConstant(dst, constant);
1146    or_(dst, src);
1147  }
1148}
1149
1150
1151void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1152  if (!dst.is(src1)) {
1153    movq(dst, src1);
1154  }
1155  xor_(dst, src2);
1156}
1157
1158
1159void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1160  if (dst.is(src)) {
1161    ASSERT(!dst.is(kScratchRegister));
1162    Register constant_reg = GetSmiConstant(constant);
1163    xor_(dst, constant_reg);
1164  } else {
1165    LoadSmiConstant(dst, constant);
1166    xor_(dst, src);
1167  }
1168}
1169
1170
1171void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1172                                                     Register src,
1173                                                     int shift_value) {
1174  ASSERT(is_uint5(shift_value));
1175  if (shift_value > 0) {
1176    if (dst.is(src)) {
1177      sar(dst, Immediate(shift_value + kSmiShift));
1178      shl(dst, Immediate(kSmiShift));
1179    } else {
1180      UNIMPLEMENTED();  // Not used.
1181    }
1182  }
1183}
1184
1185
1186void MacroAssembler::SmiShiftLeftConstant(Register dst,
1187                                          Register src,
1188                                          int shift_value) {
1189  if (!dst.is(src)) {
1190    movq(dst, src);
1191  }
1192  if (shift_value > 0) {
1193    shl(dst, Immediate(shift_value));
1194  }
1195}
1196
1197
1198void MacroAssembler::SmiShiftLeft(Register dst,
1199                                  Register src1,
1200                                  Register src2) {
1201  ASSERT(!dst.is(rcx));
1202  NearLabel result_ok;
1203  // Untag shift amount.
1204  if (!dst.is(src1)) {
1205    movq(dst, src1);
1206  }
1207  SmiToInteger32(rcx, src2);
1208  // Shift amount specified by lower 5 bits, not six as the shl opcode.
1209  and_(rcx, Immediate(0x1f));
1210  shl_cl(dst);
1211}
1212
1213
1214void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1215                                             Register src1,
1216                                             Register src2) {
1217  ASSERT(!dst.is(kScratchRegister));
1218  ASSERT(!src1.is(kScratchRegister));
1219  ASSERT(!src2.is(kScratchRegister));
1220  ASSERT(!dst.is(rcx));
1221  if (src1.is(rcx)) {
1222    movq(kScratchRegister, src1);
1223  } else if (src2.is(rcx)) {
1224    movq(kScratchRegister, src2);
1225  }
1226  if (!dst.is(src1)) {
1227    movq(dst, src1);
1228  }
1229  SmiToInteger32(rcx, src2);
1230  orl(rcx, Immediate(kSmiShift));
1231  sar_cl(dst);  // Shift 32 + original rcx & 0x1f.
1232  shl(dst, Immediate(kSmiShift));
1233  if (src1.is(rcx)) {
1234    movq(src1, kScratchRegister);
1235  } else if (src2.is(rcx)) {
1236    movq(src2, kScratchRegister);
1237  }
1238}
1239
1240
1241SmiIndex MacroAssembler::SmiToIndex(Register dst,
1242                                    Register src,
1243                                    int shift) {
1244  ASSERT(is_uint6(shift));
1245  // There is a possible optimization if shift is in the range 60-63, but that
1246  // will (and must) never happen.
1247  if (!dst.is(src)) {
1248    movq(dst, src);
1249  }
1250  if (shift < kSmiShift) {
1251    sar(dst, Immediate(kSmiShift - shift));
1252  } else {
1253    shl(dst, Immediate(shift - kSmiShift));
1254  }
1255  return SmiIndex(dst, times_1);
1256}
1257
1258SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1259                                            Register src,
1260                                            int shift) {
1261  // Register src holds a positive smi.
1262  ASSERT(is_uint6(shift));
1263  if (!dst.is(src)) {
1264    movq(dst, src);
1265  }
1266  neg(dst);
1267  if (shift < kSmiShift) {
1268    sar(dst, Immediate(kSmiShift - shift));
1269  } else {
1270    shl(dst, Immediate(shift - kSmiShift));
1271  }
1272  return SmiIndex(dst, times_1);
1273}
1274
1275
1276void MacroAssembler::Move(Register dst, Register src) {
1277  if (!dst.is(src)) {
1278    movq(dst, src);
1279  }
1280}
1281
1282
1283
1284
1285void MacroAssembler::Move(Register dst, Handle<Object> source) {
1286  ASSERT(!source->IsFailure());
1287  if (source->IsSmi()) {
1288    Move(dst, Smi::cast(*source));
1289  } else {
1290    movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1291  }
1292}
1293
1294
1295void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
1296  ASSERT(!source->IsFailure());
1297  if (source->IsSmi()) {
1298    Move(dst, Smi::cast(*source));
1299  } else {
1300    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1301    movq(dst, kScratchRegister);
1302  }
1303}
1304
1305
1306void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1307  if (source->IsSmi()) {
1308    SmiCompare(dst, Smi::cast(*source));
1309  } else {
1310    Move(kScratchRegister, source);
1311    cmpq(dst, kScratchRegister);
1312  }
1313}
1314
1315
1316void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1317  if (source->IsSmi()) {
1318    SmiCompare(dst, Smi::cast(*source));
1319  } else {
1320    ASSERT(source->IsHeapObject());
1321    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1322    cmpq(dst, kScratchRegister);
1323  }
1324}
1325
1326
1327void MacroAssembler::Push(Handle<Object> source) {
1328  if (source->IsSmi()) {
1329    Push(Smi::cast(*source));
1330  } else {
1331    ASSERT(source->IsHeapObject());
1332    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1333    push(kScratchRegister);
1334  }
1335}
1336
1337
1338void MacroAssembler::Push(Smi* source) {
1339  intptr_t smi = reinterpret_cast<intptr_t>(source);
1340  if (is_int32(smi)) {
1341    push(Immediate(static_cast<int32_t>(smi)));
1342  } else {
1343    Register constant = GetSmiConstant(source);
1344    push(constant);
1345  }
1346}
1347
1348
1349void MacroAssembler::Drop(int stack_elements) {
1350  if (stack_elements > 0) {
1351    addq(rsp, Immediate(stack_elements * kPointerSize));
1352  }
1353}
1354
1355
1356void MacroAssembler::Test(const Operand& src, Smi* source) {
1357  testl(Operand(src, kIntSize), Immediate(source->value()));
1358}
1359
1360
1361void MacroAssembler::Jump(ExternalReference ext) {
1362  movq(kScratchRegister, ext);
1363  jmp(kScratchRegister);
1364}
1365
1366
1367void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1368  movq(kScratchRegister, destination, rmode);
1369  jmp(kScratchRegister);
1370}
1371
1372
1373void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1374  // TODO(X64): Inline this
1375  jmp(code_object, rmode);
1376}
1377
1378
1379void MacroAssembler::Call(ExternalReference ext) {
1380  movq(kScratchRegister, ext);
1381  call(kScratchRegister);
1382}
1383
1384
1385void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1386  movq(kScratchRegister, destination, rmode);
1387  call(kScratchRegister);
1388}
1389
1390
1391void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1392  ASSERT(RelocInfo::IsCodeTarget(rmode));
1393  call(code_object, rmode);
1394}
1395
1396
1397void MacroAssembler::PushTryHandler(CodeLocation try_location,
1398                                    HandlerType type) {
1399  // Adjust this code if not the case.
1400  ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1401
1402  // The pc (return address) is already on TOS.  This code pushes state,
1403  // frame pointer and current handler.  Check that they are expected
1404  // next on the stack, in that order.
1405  ASSERT_EQ(StackHandlerConstants::kStateOffset,
1406            StackHandlerConstants::kPCOffset - kPointerSize);
1407  ASSERT_EQ(StackHandlerConstants::kFPOffset,
1408            StackHandlerConstants::kStateOffset - kPointerSize);
1409  ASSERT_EQ(StackHandlerConstants::kNextOffset,
1410            StackHandlerConstants::kFPOffset - kPointerSize);
1411
1412  if (try_location == IN_JAVASCRIPT) {
1413    if (type == TRY_CATCH_HANDLER) {
1414      push(Immediate(StackHandler::TRY_CATCH));
1415    } else {
1416      push(Immediate(StackHandler::TRY_FINALLY));
1417    }
1418    push(rbp);
1419  } else {
1420    ASSERT(try_location == IN_JS_ENTRY);
1421    // The frame pointer does not point to a JS frame so we save NULL
1422    // for rbp. We expect the code throwing an exception to check rbp
1423    // before dereferencing it to restore the context.
1424    push(Immediate(StackHandler::ENTRY));
1425    push(Immediate(0));  // NULL frame pointer.
1426  }
1427  // Save the current handler.
1428  movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1429  push(Operand(kScratchRegister, 0));
1430  // Link this handler.
1431  movq(Operand(kScratchRegister, 0), rsp);
1432}
1433
1434
1435void MacroAssembler::PopTryHandler() {
1436  ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1437  // Unlink this handler.
1438  movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1439  pop(Operand(kScratchRegister, 0));
1440  // Remove the remaining fields.
1441  addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1442}
1443
1444
1445void MacroAssembler::Ret() {
1446  ret(0);
1447}
1448
1449
1450void MacroAssembler::FCmp() {
1451  fucomip();
1452  fstp(0);
1453}
1454
1455
1456void MacroAssembler::CmpObjectType(Register heap_object,
1457                                   InstanceType type,
1458                                   Register map) {
1459  movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1460  CmpInstanceType(map, type);
1461}
1462
1463
1464void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1465  cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1466       Immediate(static_cast<int8_t>(type)));
1467}
1468
1469
1470void MacroAssembler::CheckMap(Register obj,
1471                              Handle<Map> map,
1472                              Label* fail,
1473                              bool is_heap_object) {
1474  if (!is_heap_object) {
1475    JumpIfSmi(obj, fail);
1476  }
1477  Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1478  j(not_equal, fail);
1479}
1480
1481
1482void MacroAssembler::AbortIfNotNumber(Register object) {
1483  NearLabel ok;
1484  Condition is_smi = CheckSmi(object);
1485  j(is_smi, &ok);
1486  Cmp(FieldOperand(object, HeapObject::kMapOffset),
1487      Factory::heap_number_map());
1488  Assert(equal, "Operand not a number");
1489  bind(&ok);
1490}
1491
1492
1493void MacroAssembler::AbortIfSmi(Register object) {
1494  NearLabel ok;
1495  Condition is_smi = CheckSmi(object);
1496  Assert(NegateCondition(is_smi), "Operand is a smi");
1497}
1498
1499
1500void MacroAssembler::AbortIfNotSmi(Register object) {
1501  NearLabel ok;
1502  Condition is_smi = CheckSmi(object);
1503  Assert(is_smi, "Operand is not a smi");
1504}
1505
1506
1507void MacroAssembler::AbortIfNotString(Register object) {
1508  testb(object, Immediate(kSmiTagMask));
1509  Assert(not_equal, "Operand is not a string");
1510  push(object);
1511  movq(object, FieldOperand(object, HeapObject::kMapOffset));
1512  CmpInstanceType(object, FIRST_NONSTRING_TYPE);
1513  pop(object);
1514  Assert(below, "Operand is not a string");
1515}
1516
1517
1518void MacroAssembler::AbortIfNotRootValue(Register src,
1519                                         Heap::RootListIndex root_value_index,
1520                                         const char* message) {
1521  ASSERT(!src.is(kScratchRegister));
1522  LoadRoot(kScratchRegister, root_value_index);
1523  cmpq(src, kScratchRegister);
1524  Check(equal, message);
1525}
1526
1527
1528
1529Condition MacroAssembler::IsObjectStringType(Register heap_object,
1530                                             Register map,
1531                                             Register instance_type) {
1532  movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1533  movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
1534  ASSERT(kNotStringTag != 0);
1535  testb(instance_type, Immediate(kIsNotStringMask));
1536  return zero;
1537}
1538
1539
1540void MacroAssembler::TryGetFunctionPrototype(Register function,
1541                                             Register result,
1542                                             Label* miss) {
1543  // Check that the receiver isn't a smi.
1544  testl(function, Immediate(kSmiTagMask));
1545  j(zero, miss);
1546
1547  // Check that the function really is a function.
1548  CmpObjectType(function, JS_FUNCTION_TYPE, result);
1549  j(not_equal, miss);
1550
1551  // Make sure that the function has an instance prototype.
1552  NearLabel non_instance;
1553  testb(FieldOperand(result, Map::kBitFieldOffset),
1554        Immediate(1 << Map::kHasNonInstancePrototype));
1555  j(not_zero, &non_instance);
1556
1557  // Get the prototype or initial map from the function.
1558  movq(result,
1559       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1560
1561  // If the prototype or initial map is the hole, don't return it and
1562  // simply miss the cache instead. This will allow us to allocate a
1563  // prototype object on-demand in the runtime system.
1564  CompareRoot(result, Heap::kTheHoleValueRootIndex);
1565  j(equal, miss);
1566
1567  // If the function does not have an initial map, we're done.
1568  NearLabel done;
1569  CmpObjectType(result, MAP_TYPE, kScratchRegister);
1570  j(not_equal, &done);
1571
1572  // Get the prototype from the initial map.
1573  movq(result, FieldOperand(result, Map::kPrototypeOffset));
1574  jmp(&done);
1575
1576  // Non-instance prototype: Fetch prototype from constructor field
1577  // in initial map.
1578  bind(&non_instance);
1579  movq(result, FieldOperand(result, Map::kConstructorOffset));
1580
1581  // All done.
1582  bind(&done);
1583}
1584
1585
1586void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1587  if (FLAG_native_code_counters && counter->Enabled()) {
1588    movq(kScratchRegister, ExternalReference(counter));
1589    movl(Operand(kScratchRegister, 0), Immediate(value));
1590  }
1591}
1592
1593
1594void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1595  ASSERT(value > 0);
1596  if (FLAG_native_code_counters && counter->Enabled()) {
1597    movq(kScratchRegister, ExternalReference(counter));
1598    Operand operand(kScratchRegister, 0);
1599    if (value == 1) {
1600      incl(operand);
1601    } else {
1602      addl(operand, Immediate(value));
1603    }
1604  }
1605}
1606
1607
1608void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1609  ASSERT(value > 0);
1610  if (FLAG_native_code_counters && counter->Enabled()) {
1611    movq(kScratchRegister, ExternalReference(counter));
1612    Operand operand(kScratchRegister, 0);
1613    if (value == 1) {
1614      decl(operand);
1615    } else {
1616      subl(operand, Immediate(value));
1617    }
1618  }
1619}
1620
1621
1622#ifdef ENABLE_DEBUGGER_SUPPORT
1623void MacroAssembler::DebugBreak() {
1624  ASSERT(allow_stub_calls());
1625  xor_(rax, rax);  // no arguments
1626  movq(rbx, ExternalReference(Runtime::kDebugBreak));
1627  CEntryStub ces(1);
1628  Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1629}
1630#endif  // ENABLE_DEBUGGER_SUPPORT
1631
1632
1633void MacroAssembler::InvokeCode(Register code,
1634                                const ParameterCount& expected,
1635                                const ParameterCount& actual,
1636                                InvokeFlag flag) {
1637  NearLabel done;
1638  InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1639  if (flag == CALL_FUNCTION) {
1640    call(code);
1641  } else {
1642    ASSERT(flag == JUMP_FUNCTION);
1643    jmp(code);
1644  }
1645  bind(&done);
1646}
1647
1648
1649void MacroAssembler::InvokeCode(Handle<Code> code,
1650                                const ParameterCount& expected,
1651                                const ParameterCount& actual,
1652                                RelocInfo::Mode rmode,
1653                                InvokeFlag flag) {
1654  NearLabel done;
1655  Register dummy = rax;
1656  InvokePrologue(expected, actual, code, dummy, &done, flag);
1657  if (flag == CALL_FUNCTION) {
1658    Call(code, rmode);
1659  } else {
1660    ASSERT(flag == JUMP_FUNCTION);
1661    Jump(code, rmode);
1662  }
1663  bind(&done);
1664}
1665
1666
1667void MacroAssembler::InvokeFunction(Register function,
1668                                    const ParameterCount& actual,
1669                                    InvokeFlag flag) {
1670  ASSERT(function.is(rdi));
1671  movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1672  movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1673  movsxlq(rbx,
1674          FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
1675  // Advances rdx to the end of the Code object header, to the start of
1676  // the executable code.
1677  movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1678
1679  ParameterCount expected(rbx);
1680  InvokeCode(rdx, expected, actual, flag);
1681}
1682
1683
1684void MacroAssembler::InvokeFunction(JSFunction* function,
1685                                    const ParameterCount& actual,
1686                                    InvokeFlag flag) {
1687  ASSERT(function->is_compiled());
1688  // Get the function and setup the context.
1689  Move(rdi, Handle<JSFunction>(function));
1690  movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1691
1692  // Invoke the cached code.
1693  Handle<Code> code(function->code());
1694  ParameterCount expected(function->shared()->formal_parameter_count());
1695  InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1696}
1697
1698
1699void MacroAssembler::EnterFrame(StackFrame::Type type) {
1700  push(rbp);
1701  movq(rbp, rsp);
1702  push(rsi);  // Context.
1703  Push(Smi::FromInt(type));
1704  movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1705  push(kScratchRegister);
1706  if (FLAG_debug_code) {
1707    movq(kScratchRegister,
1708         Factory::undefined_value(),
1709         RelocInfo::EMBEDDED_OBJECT);
1710    cmpq(Operand(rsp, 0), kScratchRegister);
1711    Check(not_equal, "code object not properly patched");
1712  }
1713}
1714
1715
1716void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1717  if (FLAG_debug_code) {
1718    Move(kScratchRegister, Smi::FromInt(type));
1719    cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1720    Check(equal, "stack frame types must match");
1721  }
1722  movq(rsp, rbp);
1723  pop(rbp);
1724}
1725
1726
1727void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
1728  // Setup the frame structure on the stack.
1729  // All constants are relative to the frame pointer of the exit frame.
1730  ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1731  ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1732  ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
1733  push(rbp);
1734  movq(rbp, rsp);
1735
1736  // Reserve room for entry stack pointer and push the code object.
1737  ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
1738  push(Immediate(0));  // Saved entry sp, patched before call.
1739  movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1740  push(kScratchRegister);  // Accessed from EditFrame::code_slot.
1741
1742  // Save the frame pointer and the context in top.
1743  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1744  ExternalReference context_address(Top::k_context_address);
1745  if (save_rax) {
1746    movq(r14, rax);  // Backup rax before we use it.
1747  }
1748
1749  movq(rax, rbp);
1750  store_rax(c_entry_fp_address);
1751  movq(rax, rsi);
1752  store_rax(context_address);
1753}
1754
1755
1756void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) {
1757#ifdef _WIN64
1758  const int kShaddowSpace = 4;
1759  arg_stack_space += kShaddowSpace;
1760#endif
1761  if (arg_stack_space > 0) {
1762    subq(rsp, Immediate(arg_stack_space * kPointerSize));
1763  }
1764
1765  // Get the required frame alignment for the OS.
1766  static const int kFrameAlignment = OS::ActivationFrameAlignment();
1767  if (kFrameAlignment > 0) {
1768    ASSERT(IsPowerOf2(kFrameAlignment));
1769    movq(kScratchRegister, Immediate(-kFrameAlignment));
1770    and_(rsp, kScratchRegister);
1771  }
1772
1773  // Patch the saved entry sp.
1774  movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1775}
1776
1777
1778void MacroAssembler::EnterExitFrame(int arg_stack_space) {
1779  EnterExitFramePrologue(true);
1780
1781  // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1782  // so it must be retained across the C-call.
1783  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1784  lea(r12, Operand(rbp, r14, times_pointer_size, offset));
1785
1786  EnterExitFrameEpilogue(arg_stack_space);
1787}
1788
1789
1790void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
1791  EnterExitFramePrologue(false);
1792  EnterExitFrameEpilogue(arg_stack_space);
1793}
1794
1795
1796void MacroAssembler::LeaveExitFrame() {
1797  // Registers:
1798  // r12 : argv
1799
1800  // Get the return address from the stack and restore the frame pointer.
1801  movq(rcx, Operand(rbp, 1 * kPointerSize));
1802  movq(rbp, Operand(rbp, 0 * kPointerSize));
1803
1804  // Pop everything up to and including the arguments and the receiver
1805  // from the caller stack.
1806  lea(rsp, Operand(r12, 1 * kPointerSize));
1807
1808  // Push the return address to get ready to return.
1809  push(rcx);
1810
1811  LeaveExitFrameEpilogue();
1812}
1813
1814
1815void MacroAssembler::LeaveApiExitFrame() {
1816  movq(rsp, rbp);
1817  pop(rbp);
1818
1819  LeaveExitFrameEpilogue();
1820}
1821
1822
1823void MacroAssembler::LeaveExitFrameEpilogue() {
1824  // Restore current context from top and clear it in debug mode.
1825  ExternalReference context_address(Top::k_context_address);
1826  movq(kScratchRegister, context_address);
1827  movq(rsi, Operand(kScratchRegister, 0));
1828#ifdef DEBUG
1829  movq(Operand(kScratchRegister, 0), Immediate(0));
1830#endif
1831
1832  // Clear the top frame.
1833  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1834  movq(kScratchRegister, c_entry_fp_address);
1835  movq(Operand(kScratchRegister, 0), Immediate(0));
1836}
1837
1838
1839void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1840                                            Register scratch,
1841                                            Label* miss) {
1842  Label same_contexts;
1843
1844  ASSERT(!holder_reg.is(scratch));
1845  ASSERT(!scratch.is(kScratchRegister));
1846  // Load current lexical context from the stack frame.
1847  movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
1848
1849  // When generating debug code, make sure the lexical context is set.
1850  if (FLAG_debug_code) {
1851    cmpq(scratch, Immediate(0));
1852    Check(not_equal, "we should not have an empty lexical context");
1853  }
1854  // Load the global context of the current context.
1855  int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1856  movq(scratch, FieldOperand(scratch, offset));
1857  movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
1858
1859  // Check the context is a global context.
1860  if (FLAG_debug_code) {
1861    Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
1862        Factory::global_context_map());
1863    Check(equal, "JSGlobalObject::global_context should be a global context.");
1864  }
1865
1866  // Check if both contexts are the same.
1867  cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1868  j(equal, &same_contexts);
1869
1870  // Compare security tokens.
1871  // Check that the security token in the calling global object is
1872  // compatible with the security token in the receiving global
1873  // object.
1874
1875  // Check the context is a global context.
1876  if (FLAG_debug_code) {
1877    // Preserve original value of holder_reg.
1878    push(holder_reg);
1879    movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1880    CompareRoot(holder_reg, Heap::kNullValueRootIndex);
1881    Check(not_equal, "JSGlobalProxy::context() should not be null.");
1882
1883    // Read the first word and compare to global_context_map(),
1884    movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1885    CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
1886    Check(equal, "JSGlobalObject::global_context should be a global context.");
1887    pop(holder_reg);
1888  }
1889
1890  movq(kScratchRegister,
1891       FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1892  int token_offset =
1893      Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
1894  movq(scratch, FieldOperand(scratch, token_offset));
1895  cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
1896  j(not_equal, miss);
1897
1898  bind(&same_contexts);
1899}
1900
1901
1902void MacroAssembler::LoadAllocationTopHelper(Register result,
1903                                             Register scratch,
1904                                             AllocationFlags flags) {
1905  ExternalReference new_space_allocation_top =
1906      ExternalReference::new_space_allocation_top_address();
1907
1908  // Just return if allocation top is already known.
1909  if ((flags & RESULT_CONTAINS_TOP) != 0) {
1910    // No use of scratch if allocation top is provided.
1911    ASSERT(!scratch.is_valid());
1912#ifdef DEBUG
1913    // Assert that result actually contains top on entry.
1914    movq(kScratchRegister, new_space_allocation_top);
1915    cmpq(result, Operand(kScratchRegister, 0));
1916    Check(equal, "Unexpected allocation top");
1917#endif
1918    return;
1919  }
1920
1921  // Move address of new object to result. Use scratch register if available,
1922  // and keep address in scratch until call to UpdateAllocationTopHelper.
1923  if (scratch.is_valid()) {
1924    movq(scratch, new_space_allocation_top);
1925    movq(result, Operand(scratch, 0));
1926  } else if (result.is(rax)) {
1927    load_rax(new_space_allocation_top);
1928  } else {
1929    movq(kScratchRegister, new_space_allocation_top);
1930    movq(result, Operand(kScratchRegister, 0));
1931  }
1932}
1933
1934
1935void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1936                                               Register scratch) {
1937  if (FLAG_debug_code) {
1938    testq(result_end, Immediate(kObjectAlignmentMask));
1939    Check(zero, "Unaligned allocation in new space");
1940  }
1941
1942  ExternalReference new_space_allocation_top =
1943      ExternalReference::new_space_allocation_top_address();
1944
1945  // Update new top.
1946  if (result_end.is(rax)) {
1947    // rax can be stored directly to a memory location.
1948    store_rax(new_space_allocation_top);
1949  } else {
1950    // Register required - use scratch provided if available.
1951    if (scratch.is_valid()) {
1952      movq(Operand(scratch, 0), result_end);
1953    } else {
1954      movq(kScratchRegister, new_space_allocation_top);
1955      movq(Operand(kScratchRegister, 0), result_end);
1956    }
1957  }
1958}
1959
1960
1961void MacroAssembler::AllocateInNewSpace(int object_size,
1962                                        Register result,
1963                                        Register result_end,
1964                                        Register scratch,
1965                                        Label* gc_required,
1966                                        AllocationFlags flags) {
1967  if (!FLAG_inline_new) {
1968    if (FLAG_debug_code) {
1969      // Trash the registers to simulate an allocation failure.
1970      movl(result, Immediate(0x7091));
1971      if (result_end.is_valid()) {
1972        movl(result_end, Immediate(0x7191));
1973      }
1974      if (scratch.is_valid()) {
1975        movl(scratch, Immediate(0x7291));
1976      }
1977    }
1978    jmp(gc_required);
1979    return;
1980  }
1981  ASSERT(!result.is(result_end));
1982
1983  // Load address of new object into result.
1984  LoadAllocationTopHelper(result, scratch, flags);
1985
1986  // Calculate new top and bail out if new space is exhausted.
1987  ExternalReference new_space_allocation_limit =
1988      ExternalReference::new_space_allocation_limit_address();
1989
1990  Register top_reg = result_end.is_valid() ? result_end : result;
1991
1992  if (top_reg.is(result)) {
1993    addq(top_reg, Immediate(object_size));
1994  } else {
1995    lea(top_reg, Operand(result, object_size));
1996  }
1997  movq(kScratchRegister, new_space_allocation_limit);
1998  cmpq(top_reg, Operand(kScratchRegister, 0));
1999  j(above, gc_required);
2000
2001  // Update allocation top.
2002  UpdateAllocationTopHelper(top_reg, scratch);
2003
2004  if (top_reg.is(result)) {
2005    if ((flags & TAG_OBJECT) != 0) {
2006      subq(result, Immediate(object_size - kHeapObjectTag));
2007    } else {
2008      subq(result, Immediate(object_size));
2009    }
2010  } else if ((flags & TAG_OBJECT) != 0) {
2011    // Tag the result if requested.
2012    addq(result, Immediate(kHeapObjectTag));
2013  }
2014}
2015
2016
2017void MacroAssembler::AllocateInNewSpace(int header_size,
2018                                        ScaleFactor element_size,
2019                                        Register element_count,
2020                                        Register result,
2021                                        Register result_end,
2022                                        Register scratch,
2023                                        Label* gc_required,
2024                                        AllocationFlags flags) {
2025  if (!FLAG_inline_new) {
2026    if (FLAG_debug_code) {
2027      // Trash the registers to simulate an allocation failure.
2028      movl(result, Immediate(0x7091));
2029      movl(result_end, Immediate(0x7191));
2030      if (scratch.is_valid()) {
2031        movl(scratch, Immediate(0x7291));
2032      }
2033      // Register element_count is not modified by the function.
2034    }
2035    jmp(gc_required);
2036    return;
2037  }
2038  ASSERT(!result.is(result_end));
2039
2040  // Load address of new object into result.
2041  LoadAllocationTopHelper(result, scratch, flags);
2042
2043  // Calculate new top and bail out if new space is exhausted.
2044  ExternalReference new_space_allocation_limit =
2045      ExternalReference::new_space_allocation_limit_address();
2046  lea(result_end, Operand(result, element_count, element_size, header_size));
2047  movq(kScratchRegister, new_space_allocation_limit);
2048  cmpq(result_end, Operand(kScratchRegister, 0));
2049  j(above, gc_required);
2050
2051  // Update allocation top.
2052  UpdateAllocationTopHelper(result_end, scratch);
2053
2054  // Tag the result if requested.
2055  if ((flags & TAG_OBJECT) != 0) {
2056    addq(result, Immediate(kHeapObjectTag));
2057  }
2058}
2059
2060
2061void MacroAssembler::AllocateInNewSpace(Register object_size,
2062                                        Register result,
2063                                        Register result_end,
2064                                        Register scratch,
2065                                        Label* gc_required,
2066                                        AllocationFlags flags) {
2067  if (!FLAG_inline_new) {
2068    if (FLAG_debug_code) {
2069      // Trash the registers to simulate an allocation failure.
2070      movl(result, Immediate(0x7091));
2071      movl(result_end, Immediate(0x7191));
2072      if (scratch.is_valid()) {
2073        movl(scratch, Immediate(0x7291));
2074      }
2075      // object_size is left unchanged by this function.
2076    }
2077    jmp(gc_required);
2078    return;
2079  }
2080  ASSERT(!result.is(result_end));
2081
2082  // Load address of new object into result.
2083  LoadAllocationTopHelper(result, scratch, flags);
2084
2085  // Calculate new top and bail out if new space is exhausted.
2086  ExternalReference new_space_allocation_limit =
2087      ExternalReference::new_space_allocation_limit_address();
2088  if (!object_size.is(result_end)) {
2089    movq(result_end, object_size);
2090  }
2091  addq(result_end, result);
2092  movq(kScratchRegister, new_space_allocation_limit);
2093  cmpq(result_end, Operand(kScratchRegister, 0));
2094  j(above, gc_required);
2095
2096  // Update allocation top.
2097  UpdateAllocationTopHelper(result_end, scratch);
2098
2099  // Tag the result if requested.
2100  if ((flags & TAG_OBJECT) != 0) {
2101    addq(result, Immediate(kHeapObjectTag));
2102  }
2103}
2104
2105
2106void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2107  ExternalReference new_space_allocation_top =
2108      ExternalReference::new_space_allocation_top_address();
2109
2110  // Make sure the object has no tag before resetting top.
2111  and_(object, Immediate(~kHeapObjectTagMask));
2112  movq(kScratchRegister, new_space_allocation_top);
2113#ifdef DEBUG
2114  cmpq(object, Operand(kScratchRegister, 0));
2115  Check(below, "Undo allocation of non allocated memory");
2116#endif
2117  movq(Operand(kScratchRegister, 0), object);
2118}
2119
2120
2121void MacroAssembler::AllocateHeapNumber(Register result,
2122                                        Register scratch,
2123                                        Label* gc_required) {
2124  // Allocate heap number in new space.
2125  AllocateInNewSpace(HeapNumber::kSize,
2126                     result,
2127                     scratch,
2128                     no_reg,
2129                     gc_required,
2130                     TAG_OBJECT);
2131
2132  // Set the map.
2133  LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2134  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2135}
2136
2137
2138void MacroAssembler::AllocateTwoByteString(Register result,
2139                                           Register length,
2140                                           Register scratch1,
2141                                           Register scratch2,
2142                                           Register scratch3,
2143                                           Label* gc_required) {
2144  // Calculate the number of bytes needed for the characters in the string while
2145  // observing object alignment.
2146  const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2147                               kObjectAlignmentMask;
2148  ASSERT(kShortSize == 2);
2149  // scratch1 = length * 2 + kObjectAlignmentMask.
2150  lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2151                kHeaderAlignment));
2152  and_(scratch1, Immediate(~kObjectAlignmentMask));
2153  if (kHeaderAlignment > 0) {
2154    subq(scratch1, Immediate(kHeaderAlignment));
2155  }
2156
2157  // Allocate two byte string in new space.
2158  AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2159                     times_1,
2160                     scratch1,
2161                     result,
2162                     scratch2,
2163                     scratch3,
2164                     gc_required,
2165                     TAG_OBJECT);
2166
2167  // Set the map, length and hash field.
2168  LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2169  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2170  Integer32ToSmi(scratch1, length);
2171  movq(FieldOperand(result, String::kLengthOffset), scratch1);
2172  movq(FieldOperand(result, String::kHashFieldOffset),
2173       Immediate(String::kEmptyHashField));
2174}
2175
2176
2177void MacroAssembler::AllocateAsciiString(Register result,
2178                                         Register length,
2179                                         Register scratch1,
2180                                         Register scratch2,
2181                                         Register scratch3,
2182                                         Label* gc_required) {
2183  // Calculate the number of bytes needed for the characters in the string while
2184  // observing object alignment.
2185  const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2186                               kObjectAlignmentMask;
2187  movl(scratch1, length);
2188  ASSERT(kCharSize == 1);
2189  addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
2190  and_(scratch1, Immediate(~kObjectAlignmentMask));
2191  if (kHeaderAlignment > 0) {
2192    subq(scratch1, Immediate(kHeaderAlignment));
2193  }
2194
2195  // Allocate ascii string in new space.
2196  AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2197                     times_1,
2198                     scratch1,
2199                     result,
2200                     scratch2,
2201                     scratch3,
2202                     gc_required,
2203                     TAG_OBJECT);
2204
2205  // Set the map, length and hash field.
2206  LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2207  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2208  Integer32ToSmi(scratch1, length);
2209  movq(FieldOperand(result, String::kLengthOffset), scratch1);
2210  movq(FieldOperand(result, String::kHashFieldOffset),
2211       Immediate(String::kEmptyHashField));
2212}
2213
2214
2215void MacroAssembler::AllocateConsString(Register result,
2216                                        Register scratch1,
2217                                        Register scratch2,
2218                                        Label* gc_required) {
2219  // Allocate heap number in new space.
2220  AllocateInNewSpace(ConsString::kSize,
2221                     result,
2222                     scratch1,
2223                     scratch2,
2224                     gc_required,
2225                     TAG_OBJECT);
2226
2227  // Set the map. The other fields are left uninitialized.
2228  LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2229  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2230}
2231
2232
2233void MacroAssembler::AllocateAsciiConsString(Register result,
2234                                             Register scratch1,
2235                                             Register scratch2,
2236                                             Label* gc_required) {
2237  // Allocate heap number in new space.
2238  AllocateInNewSpace(ConsString::kSize,
2239                     result,
2240                     scratch1,
2241                     scratch2,
2242                     gc_required,
2243                     TAG_OBJECT);
2244
2245  // Set the map. The other fields are left uninitialized.
2246  LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2247  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2248}
2249
2250
2251void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2252  if (context_chain_length > 0) {
2253    // Move up the chain of contexts to the context containing the slot.
2254    movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2255    // Load the function context (which is the incoming, outer context).
2256    movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2257    for (int i = 1; i < context_chain_length; i++) {
2258      movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2259      movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2260    }
2261    // The context may be an intermediate context, not a function context.
2262    movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2263  } else {  // context is the current function context.
2264    // The context may be an intermediate context, not a function context.
2265    movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2266  }
2267}
2268
2269
2270int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2271  // On Windows 64 stack slots are reserved by the caller for all arguments
2272  // including the ones passed in registers, and space is always allocated for
2273  // the four register arguments even if the function takes fewer than four
2274  // arguments.
2275  // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2276  // and the caller does not reserve stack slots for them.
2277  ASSERT(num_arguments >= 0);
2278#ifdef _WIN64
2279  static const int kMinimumStackSlots = 4;
2280  if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2281  return num_arguments;
2282#else
2283  static const int kRegisterPassedArguments = 6;
2284  if (num_arguments < kRegisterPassedArguments) return 0;
2285  return num_arguments - kRegisterPassedArguments;
2286#endif
2287}
2288
2289
2290void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2291  int frame_alignment = OS::ActivationFrameAlignment();
2292  ASSERT(frame_alignment != 0);
2293  ASSERT(num_arguments >= 0);
2294  // Make stack end at alignment and allocate space for arguments and old rsp.
2295  movq(kScratchRegister, rsp);
2296  ASSERT(IsPowerOf2(frame_alignment));
2297  int argument_slots_on_stack =
2298      ArgumentStackSlotsForCFunctionCall(num_arguments);
2299  subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2300  and_(rsp, Immediate(-frame_alignment));
2301  movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2302}
2303
2304
2305void MacroAssembler::CallCFunction(ExternalReference function,
2306                                   int num_arguments) {
2307  movq(rax, function);
2308  CallCFunction(rax, num_arguments);
2309}
2310
2311
2312void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2313  // Check stack alignment.
2314  if (FLAG_debug_code) {
2315    CheckStackAlignment();
2316  }
2317
2318  call(function);
2319  ASSERT(OS::ActivationFrameAlignment() != 0);
2320  ASSERT(num_arguments >= 0);
2321  int argument_slots_on_stack =
2322      ArgumentStackSlotsForCFunctionCall(num_arguments);
2323  movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2324}
2325
2326
2327CodePatcher::CodePatcher(byte* address, int size)
2328    : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2329  // Create a new macro assembler pointing to the address of the code to patch.
2330  // The size is adjusted with kGap on order for the assembler to generate size
2331  // bytes of instructions without failing with buffer size constraints.
2332  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2333}
2334
2335
2336CodePatcher::~CodePatcher() {
2337  // Indicate that code has changed.
2338  CPU::FlushICache(address_, size_);
2339
2340  // Check that the code was patched as expected.
2341  ASSERT(masm_.pc_ == address_ + size_);
2342  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2343}
2344
2345} }  // namespace v8::internal
2346
2347#endif  // V8_TARGET_ARCH_X64
2348