macro-assembler-x64.cc revision 086aeeaae12517475c22695a200be45495516549
1// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "assembler-x64.h"
35#include "macro-assembler-x64.h"
36#include "serialize.h"
37#include "debug.h"
38#include "heap.h"
39
40namespace v8 {
41namespace internal {
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
44    : Assembler(buffer, size),
45      generating_stub_(false),
46      allow_stub_calls_(true),
47      code_object_(Heap::undefined_value()) {
48}
49
50
51void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
52  movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
53}
54
55
56void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57  movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
58}
59
60
61void MacroAssembler::PushRoot(Heap::RootListIndex index) {
62  push(Operand(kRootRegister, index << kPointerSizeLog2));
63}
64
65
66void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
67  cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
68}
69
70
71void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
72  LoadRoot(kScratchRegister, index);
73  cmpq(with, kScratchRegister);
74}
75
76
77void MacroAssembler::RecordWriteHelper(Register object,
78                                       Register addr,
79                                       Register scratch) {
80  if (FLAG_debug_code) {
81    // Check that the object is not in new space.
82    NearLabel not_in_new_space;
83    InNewSpace(object, scratch, not_equal, &not_in_new_space);
84    Abort("new-space object passed to RecordWriteHelper");
85    bind(&not_in_new_space);
86  }
87
88  // Compute the page start address from the heap object pointer, and reuse
89  // the 'object' register for it.
90  and_(object, Immediate(~Page::kPageAlignmentMask));
91
92  // Compute number of region covering addr. See Page::GetRegionNumberForAddress
93  // method for more details.
94  shrl(addr, Immediate(Page::kRegionSizeLog2));
95  andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
96
97  // Set dirty mark for region.
98  bts(Operand(object, Page::kDirtyFlagOffset), addr);
99}
100
101
102void MacroAssembler::RecordWrite(Register object,
103                                 int offset,
104                                 Register value,
105                                 Register index) {
106  // The compiled code assumes that record write doesn't change the
107  // context register, so we check that none of the clobbered
108  // registers are rsi.
109  ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
110
111  // First, check if a write barrier is even needed. The tests below
112  // catch stores of Smis and stores into young gen.
113  Label done;
114  JumpIfSmi(value, &done);
115
116  RecordWriteNonSmi(object, offset, value, index);
117  bind(&done);
118
119  // Clobber all input registers when running with the debug-code flag
120  // turned on to provoke errors. This clobbering repeats the
121  // clobbering done inside RecordWriteNonSmi but it's necessary to
122  // avoid having the fast case for smis leave the registers
123  // unchanged.
124  if (FLAG_debug_code) {
125    movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
126    movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
127    movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
128  }
129}
130
131
132void MacroAssembler::RecordWrite(Register object,
133                                 Register address,
134                                 Register value) {
135  // The compiled code assumes that record write doesn't change the
136  // context register, so we check that none of the clobbered
137  // registers are esi.
138  ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
139
140  // First, check if a write barrier is even needed. The tests below
141  // catch stores of Smis and stores into young gen.
142  Label done;
143  JumpIfSmi(value, &done);
144
145  InNewSpace(object, value, equal, &done);
146
147  RecordWriteHelper(object, address, value);
148
149  bind(&done);
150
151  // Clobber all input registers when running with the debug-code flag
152  // turned on to provoke errors.
153  if (FLAG_debug_code) {
154    movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
155    movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
156    movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
157  }
158}
159
160
161void MacroAssembler::RecordWriteNonSmi(Register object,
162                                       int offset,
163                                       Register scratch,
164                                       Register index) {
165  Label done;
166
167  if (FLAG_debug_code) {
168    NearLabel okay;
169    JumpIfNotSmi(object, &okay);
170    Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
171    bind(&okay);
172
173    if (offset == 0) {
174      // index must be int32.
175      Register tmp = index.is(rax) ? rbx : rax;
176      push(tmp);
177      movl(tmp, index);
178      cmpq(tmp, index);
179      Check(equal, "Index register for RecordWrite must be untagged int32.");
180      pop(tmp);
181    }
182  }
183
184  // Test that the object address is not in the new space. We cannot
185  // update page dirty marks for new space pages.
186  InNewSpace(object, scratch, equal, &done);
187
188  // The offset is relative to a tagged or untagged HeapObject pointer,
189  // so either offset or offset + kHeapObjectTag must be a
190  // multiple of kPointerSize.
191  ASSERT(IsAligned(offset, kPointerSize) ||
192         IsAligned(offset + kHeapObjectTag, kPointerSize));
193
194  Register dst = index;
195  if (offset != 0) {
196    lea(dst, Operand(object, offset));
197  } else {
198    // array access: calculate the destination address in the same manner as
199    // KeyedStoreIC::GenerateGeneric.
200    lea(dst, FieldOperand(object,
201                          index,
202                          times_pointer_size,
203                          FixedArray::kHeaderSize));
204  }
205  RecordWriteHelper(object, dst, scratch);
206
207  bind(&done);
208
209  // Clobber all input registers when running with the debug-code flag
210  // turned on to provoke errors.
211  if (FLAG_debug_code) {
212    movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
213    movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
214    movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
215  }
216}
217
218void MacroAssembler::Assert(Condition cc, const char* msg) {
219  if (FLAG_debug_code) Check(cc, msg);
220}
221
222
223void MacroAssembler::AssertFastElements(Register elements) {
224  if (FLAG_debug_code) {
225    NearLabel ok;
226    CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
227                Heap::kFixedArrayMapRootIndex);
228    j(equal, &ok);
229    CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
230                Heap::kFixedCOWArrayMapRootIndex);
231    j(equal, &ok);
232    Abort("JSObject with fast elements map has slow elements");
233    bind(&ok);
234  }
235}
236
237
238void MacroAssembler::Check(Condition cc, const char* msg) {
239  NearLabel L;
240  j(cc, &L);
241  Abort(msg);
242  // will not return here
243  bind(&L);
244}
245
246
247void MacroAssembler::CheckStackAlignment() {
248  int frame_alignment = OS::ActivationFrameAlignment();
249  int frame_alignment_mask = frame_alignment - 1;
250  if (frame_alignment > kPointerSize) {
251    ASSERT(IsPowerOf2(frame_alignment));
252    NearLabel alignment_as_expected;
253    testq(rsp, Immediate(frame_alignment_mask));
254    j(zero, &alignment_as_expected);
255    // Abort if stack is not aligned.
256    int3();
257    bind(&alignment_as_expected);
258  }
259}
260
261
262void MacroAssembler::NegativeZeroTest(Register result,
263                                      Register op,
264                                      Label* then_label) {
265  NearLabel ok;
266  testl(result, result);
267  j(not_zero, &ok);
268  testl(op, op);
269  j(sign, then_label);
270  bind(&ok);
271}
272
273
274void MacroAssembler::Abort(const char* msg) {
275  // We want to pass the msg string like a smi to avoid GC
276  // problems, however msg is not guaranteed to be aligned
277  // properly. Instead, we pass an aligned pointer that is
278  // a proper v8 smi, but also pass the alignment difference
279  // from the real pointer as a smi.
280  intptr_t p1 = reinterpret_cast<intptr_t>(msg);
281  intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
282  // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
283  ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
284#ifdef DEBUG
285  if (msg != NULL) {
286    RecordComment("Abort message: ");
287    RecordComment(msg);
288  }
289#endif
290  // Disable stub call restrictions to always allow calls to abort.
291  AllowStubCallsScope allow_scope(this, true);
292
293  push(rax);
294  movq(kScratchRegister, p0, RelocInfo::NONE);
295  push(kScratchRegister);
296  movq(kScratchRegister,
297       reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
298       RelocInfo::NONE);
299  push(kScratchRegister);
300  CallRuntime(Runtime::kAbort, 2);
301  // will not return here
302  int3();
303}
304
305
306void MacroAssembler::CallStub(CodeStub* stub) {
307  ASSERT(allow_stub_calls());  // calls are not allowed in some stubs
308  Call(stub->GetCode(), RelocInfo::CODE_TARGET);
309}
310
311
312MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
313  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
314  MaybeObject* result = stub->TryGetCode();
315  if (!result->IsFailure()) {
316    call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
317         RelocInfo::CODE_TARGET);
318  }
319  return result;
320}
321
322
323void MacroAssembler::TailCallStub(CodeStub* stub) {
324  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
325  Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
326}
327
328
329MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
330  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
331  MaybeObject* result = stub->TryGetCode();
332  if (!result->IsFailure()) {
333    jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
334        RelocInfo::CODE_TARGET);
335  }
336  return result;
337}
338
339
340void MacroAssembler::StubReturn(int argc) {
341  ASSERT(argc >= 1 && generating_stub());
342  ret((argc - 1) * kPointerSize);
343}
344
345
346void MacroAssembler::IllegalOperation(int num_arguments) {
347  if (num_arguments > 0) {
348    addq(rsp, Immediate(num_arguments * kPointerSize));
349  }
350  LoadRoot(rax, Heap::kUndefinedValueRootIndex);
351}
352
353
354void MacroAssembler::IndexFromHash(Register hash, Register index) {
355  // The assert checks that the constants for the maximum number of digits
356  // for an array index cached in the hash field and the number of bits
357  // reserved for it does not conflict.
358  ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
359         (1 << String::kArrayIndexValueBits));
360  // We want the smi-tagged index in key. Even if we subsequently go to
361  // the slow case, converting the key to a smi is always valid.
362  // key: string key
363  // hash: key's hash field, including its array index value.
364  and_(hash, Immediate(String::kArrayIndexValueMask));
365  shr(hash, Immediate(String::kHashShift));
366  // Here we actually clobber the key which will be used if calling into
367  // runtime later. However as the new key is the numeric value of a string key
368  // there is no difference in using either key.
369  Integer32ToSmi(index, hash);
370}
371
372
373void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
374  CallRuntime(Runtime::FunctionForId(id), num_arguments);
375}
376
377
378MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
379                                            int num_arguments) {
380  return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
381}
382
383
384void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
385  // If the expected number of arguments of the runtime function is
386  // constant, we check that the actual number of arguments match the
387  // expectation.
388  if (f->nargs >= 0 && f->nargs != num_arguments) {
389    IllegalOperation(num_arguments);
390    return;
391  }
392
393  // TODO(1236192): Most runtime routines don't need the number of
394  // arguments passed in because it is constant. At some point we
395  // should remove this need and make the runtime routine entry code
396  // smarter.
397  Set(rax, num_arguments);
398  movq(rbx, ExternalReference(f));
399  CEntryStub ces(f->result_size);
400  CallStub(&ces);
401}
402
403
404MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
405                                            int num_arguments) {
406  if (f->nargs >= 0 && f->nargs != num_arguments) {
407    IllegalOperation(num_arguments);
408    // Since we did not call the stub, there was no allocation failure.
409    // Return some non-failure object.
410    return Heap::undefined_value();
411  }
412
413  // TODO(1236192): Most runtime routines don't need the number of
414  // arguments passed in because it is constant. At some point we
415  // should remove this need and make the runtime routine entry code
416  // smarter.
417  Set(rax, num_arguments);
418  movq(rbx, ExternalReference(f));
419  CEntryStub ces(f->result_size);
420  return TryCallStub(&ces);
421}
422
423
424void MacroAssembler::CallExternalReference(const ExternalReference& ext,
425                                           int num_arguments) {
426  Set(rax, num_arguments);
427  movq(rbx, ext);
428
429  CEntryStub stub(1);
430  CallStub(&stub);
431}
432
433
434void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
435                                               int num_arguments,
436                                               int result_size) {
437  // ----------- S t a t e -------------
438  //  -- rsp[0] : return address
439  //  -- rsp[8] : argument num_arguments - 1
440  //  ...
441  //  -- rsp[8 * num_arguments] : argument 0 (receiver)
442  // -----------------------------------
443
444  // TODO(1236192): Most runtime routines don't need the number of
445  // arguments passed in because it is constant. At some point we
446  // should remove this need and make the runtime routine entry code
447  // smarter.
448  Set(rax, num_arguments);
449  JumpToExternalReference(ext, result_size);
450}
451
452
453MaybeObject* MacroAssembler::TryTailCallExternalReference(
454    const ExternalReference& ext, int num_arguments, int result_size) {
455  // ----------- S t a t e -------------
456  //  -- rsp[0] : return address
457  //  -- rsp[8] : argument num_arguments - 1
458  //  ...
459  //  -- rsp[8 * num_arguments] : argument 0 (receiver)
460  // -----------------------------------
461
462  // TODO(1236192): Most runtime routines don't need the number of
463  // arguments passed in because it is constant. At some point we
464  // should remove this need and make the runtime routine entry code
465  // smarter.
466  Set(rax, num_arguments);
467  return TryJumpToExternalReference(ext, result_size);
468}
469
470
471void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
472                                     int num_arguments,
473                                     int result_size) {
474  TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
475}
476
477
478MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
479                                                int num_arguments,
480                                                int result_size) {
481  return TryTailCallExternalReference(ExternalReference(fid),
482                                      num_arguments,
483                                      result_size);
484}
485
486
487static int Offset(ExternalReference ref0, ExternalReference ref1) {
488  int64_t offset = (ref0.address() - ref1.address());
489  // Check that fits into int.
490  ASSERT(static_cast<int>(offset) == offset);
491  return static_cast<int>(offset);
492}
493
494
495void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
496#ifdef _WIN64
497  // We need to prepare a slot for result handle on stack and put
498  // a pointer to it into 1st arg register.
499  EnterApiExitFrame(arg_stack_space + 1);
500
501  // rcx must be used to pass the pointer to the return value slot.
502  lea(rcx, StackSpaceOperand(arg_stack_space));
503#else
504  EnterApiExitFrame(arg_stack_space);
505#endif
506}
507
508
509MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
510    ApiFunction* function, int stack_space) {
511  Label empty_result;
512  Label prologue;
513  Label promote_scheduled_exception;
514  Label delete_allocated_handles;
515  Label leave_exit_frame;
516  Label write_back;
517
518  ExternalReference next_address =
519      ExternalReference::handle_scope_next_address();
520  const int kNextOffset = 0;
521  const int kLimitOffset = Offset(
522      ExternalReference::handle_scope_limit_address(),
523      next_address);
524  const int kLevelOffset = Offset(
525      ExternalReference::handle_scope_level_address(),
526      next_address);
527  ExternalReference scheduled_exception_address =
528      ExternalReference::scheduled_exception_address();
529
530  // Allocate HandleScope in callee-save registers.
531  Register prev_next_address_reg = r14;
532  Register prev_limit_reg = rbx;
533  Register base_reg = r12;
534  movq(base_reg, next_address);
535  movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
536  movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
537  addl(Operand(base_reg, kLevelOffset), Immediate(1));
538  // Call the api function!
539  movq(rax,
540       reinterpret_cast<int64_t>(function->address()),
541       RelocInfo::RUNTIME_ENTRY);
542  call(rax);
543
544#ifdef _WIN64
545  // rax keeps a pointer to v8::Handle, unpack it.
546  movq(rax, Operand(rax, 0));
547#endif
548  // Check if the result handle holds 0.
549  testq(rax, rax);
550  j(zero, &empty_result);
551  // It was non-zero.  Dereference to get the result value.
552  movq(rax, Operand(rax, 0));
553  bind(&prologue);
554
555  // No more valid handles (the result handle was the last one). Restore
556  // previous handle scope.
557  subl(Operand(base_reg, kLevelOffset), Immediate(1));
558  movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
559  cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
560  j(not_equal, &delete_allocated_handles);
561  bind(&leave_exit_frame);
562
563  // Check if the function scheduled an exception.
564  movq(rsi, scheduled_exception_address);
565  Cmp(Operand(rsi, 0), Factory::the_hole_value());
566  j(not_equal, &promote_scheduled_exception);
567
568  LeaveApiExitFrame();
569  ret(stack_space * kPointerSize);
570
571  bind(&promote_scheduled_exception);
572  MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
573                                           0, 1);
574  if (result->IsFailure()) {
575    return result;
576  }
577
578  bind(&empty_result);
579  // It was zero; the result is undefined.
580  Move(rax, Factory::undefined_value());
581  jmp(&prologue);
582
583  // HandleScope limit has changed. Delete allocated extensions.
584  bind(&delete_allocated_handles);
585  movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
586  movq(prev_limit_reg, rax);
587  movq(rax, ExternalReference::delete_handle_scope_extensions());
588  call(rax);
589  movq(rax, prev_limit_reg);
590  jmp(&leave_exit_frame);
591
592  return result;
593}
594
595
596void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
597                                             int result_size) {
598  // Set the entry point and jump to the C entry runtime stub.
599  movq(rbx, ext);
600  CEntryStub ces(result_size);
601  jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
602}
603
604
605MaybeObject* MacroAssembler::TryJumpToExternalReference(
606    const ExternalReference& ext, int result_size) {
607  // Set the entry point and jump to the C entry runtime stub.
608  movq(rbx, ext);
609  CEntryStub ces(result_size);
610  return TryTailCallStub(&ces);
611}
612
613
614void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
615  // Calls are not allowed in some stubs.
616  ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
617
618  // Rely on the assertion to check that the number of provided
619  // arguments match the expected number of arguments. Fake a
620  // parameter count to avoid emitting code to do the check.
621  ParameterCount expected(0);
622  GetBuiltinEntry(rdx, id);
623  InvokeCode(rdx, expected, expected, flag);
624}
625
626
627void MacroAssembler::GetBuiltinFunction(Register target,
628                                        Builtins::JavaScript id) {
629  // Load the builtins object into target register.
630  movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
631  movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
632  movq(target, FieldOperand(target,
633                            JSBuiltinsObject::OffsetOfFunctionWithId(id)));
634}
635
636
637void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
638  ASSERT(!target.is(rdi));
639  // Load the JavaScript builtin function from the builtins object.
640  GetBuiltinFunction(rdi, id);
641  movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
642}
643
644
645void MacroAssembler::Set(Register dst, int64_t x) {
646  if (x == 0) {
647    xorl(dst, dst);
648  } else if (is_int32(x)) {
649    movq(dst, Immediate(static_cast<int32_t>(x)));
650  } else if (is_uint32(x)) {
651    movl(dst, Immediate(static_cast<uint32_t>(x)));
652  } else {
653    movq(dst, x, RelocInfo::NONE);
654  }
655}
656
657void MacroAssembler::Set(const Operand& dst, int64_t x) {
658  if (is_int32(x)) {
659    movq(dst, Immediate(static_cast<int32_t>(x)));
660  } else {
661    movq(kScratchRegister, x, RelocInfo::NONE);
662    movq(dst, kScratchRegister);
663  }
664}
665
666// ----------------------------------------------------------------------------
667// Smi tagging, untagging and tag detection.
668
669Register MacroAssembler::GetSmiConstant(Smi* source) {
670  int value = source->value();
671  if (value == 0) {
672    xorl(kScratchRegister, kScratchRegister);
673    return kScratchRegister;
674  }
675  if (value == 1) {
676    return kSmiConstantRegister;
677  }
678  LoadSmiConstant(kScratchRegister, source);
679  return kScratchRegister;
680}
681
682void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
683  if (FLAG_debug_code) {
684    movq(dst,
685         reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
686         RelocInfo::NONE);
687    cmpq(dst, kSmiConstantRegister);
688    if (allow_stub_calls()) {
689      Assert(equal, "Uninitialized kSmiConstantRegister");
690    } else {
691      NearLabel ok;
692      j(equal, &ok);
693      int3();
694      bind(&ok);
695    }
696  }
697  if (source->value() == 0) {
698    xorl(dst, dst);
699    return;
700  }
701  int value = source->value();
702  bool negative = value < 0;
703  unsigned int uvalue = negative ? -value : value;
704
705  switch (uvalue) {
706    case 9:
707      lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
708      break;
709    case 8:
710      xorl(dst, dst);
711      lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
712      break;
713    case 4:
714      xorl(dst, dst);
715      lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
716      break;
717    case 5:
718      lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
719      break;
720    case 3:
721      lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
722      break;
723    case 2:
724      lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
725      break;
726    case 1:
727      movq(dst, kSmiConstantRegister);
728      break;
729    case 0:
730      UNREACHABLE();
731      return;
732    default:
733      movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
734      return;
735  }
736  if (negative) {
737    neg(dst);
738  }
739}
740
741
742void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
743  ASSERT_EQ(0, kSmiTag);
744  if (!dst.is(src)) {
745    movl(dst, src);
746  }
747  shl(dst, Immediate(kSmiShift));
748}
749
750
751void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
752  if (FLAG_debug_code) {
753    testb(dst, Immediate(0x01));
754    NearLabel ok;
755    j(zero, &ok);
756    if (allow_stub_calls()) {
757      Abort("Integer32ToSmiField writing to non-smi location");
758    } else {
759      int3();
760    }
761    bind(&ok);
762  }
763  ASSERT(kSmiShift % kBitsPerByte == 0);
764  movl(Operand(dst, kSmiShift / kBitsPerByte), src);
765}
766
767
768void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
769                                                Register src,
770                                                int constant) {
771  if (dst.is(src)) {
772    addq(dst, Immediate(constant));
773  } else {
774    lea(dst, Operand(src, constant));
775  }
776  shl(dst, Immediate(kSmiShift));
777}
778
779
780void MacroAssembler::SmiToInteger32(Register dst, Register src) {
781  ASSERT_EQ(0, kSmiTag);
782  if (!dst.is(src)) {
783    movq(dst, src);
784  }
785  shr(dst, Immediate(kSmiShift));
786}
787
788
789void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
790  movl(dst, Operand(src, kSmiShift / kBitsPerByte));
791}
792
793
794void MacroAssembler::SmiToInteger64(Register dst, Register src) {
795  ASSERT_EQ(0, kSmiTag);
796  if (!dst.is(src)) {
797    movq(dst, src);
798  }
799  sar(dst, Immediate(kSmiShift));
800}
801
802
803void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
804  movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
805}
806
807
808void MacroAssembler::SmiTest(Register src) {
809  testq(src, src);
810}
811
812
813void MacroAssembler::SmiCompare(Register dst, Register src) {
814  cmpq(dst, src);
815}
816
817
818void MacroAssembler::SmiCompare(Register dst, Smi* src) {
819  ASSERT(!dst.is(kScratchRegister));
820  if (src->value() == 0) {
821    testq(dst, dst);
822  } else {
823    Register constant_reg = GetSmiConstant(src);
824    cmpq(dst, constant_reg);
825  }
826}
827
828
829void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
830  cmpq(dst, src);
831}
832
833
834void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
835  cmpq(dst, src);
836}
837
838
839void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
840  cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
841}
842
843
844void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
845  cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
846}
847
848
849void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
850                                                           Register src,
851                                                           int power) {
852  ASSERT(power >= 0);
853  ASSERT(power < 64);
854  if (power == 0) {
855    SmiToInteger64(dst, src);
856    return;
857  }
858  if (!dst.is(src)) {
859    movq(dst, src);
860  }
861  if (power < kSmiShift) {
862    sar(dst, Immediate(kSmiShift - power));
863  } else if (power > kSmiShift) {
864    shl(dst, Immediate(power - kSmiShift));
865  }
866}
867
868
869void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
870                                                         Register src,
871                                                         int power) {
872  ASSERT((0 <= power) && (power < 32));
873  if (dst.is(src)) {
874    shr(dst, Immediate(power + kSmiShift));
875  } else {
876    UNIMPLEMENTED();  // Not used.
877  }
878}
879
880
881Condition MacroAssembler::CheckSmi(Register src) {
882  ASSERT_EQ(0, kSmiTag);
883  testb(src, Immediate(kSmiTagMask));
884  return zero;
885}
886
887
888Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
889  ASSERT_EQ(0, kSmiTag);
890  // Make mask 0x8000000000000001 and test that both bits are zero.
891  movq(kScratchRegister, src);
892  rol(kScratchRegister, Immediate(1));
893  testb(kScratchRegister, Immediate(3));
894  return zero;
895}
896
897
898Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
899  if (first.is(second)) {
900    return CheckSmi(first);
901  }
902  ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
903  leal(kScratchRegister, Operand(first, second, times_1, 0));
904  testb(kScratchRegister, Immediate(0x03));
905  return zero;
906}
907
908
909Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
910                                                  Register second) {
911  if (first.is(second)) {
912    return CheckNonNegativeSmi(first);
913  }
914  movq(kScratchRegister, first);
915  or_(kScratchRegister, second);
916  rol(kScratchRegister, Immediate(1));
917  testl(kScratchRegister, Immediate(3));
918  return zero;
919}
920
921
922Condition MacroAssembler::CheckEitherSmi(Register first,
923                                         Register second,
924                                         Register scratch) {
925  if (first.is(second)) {
926    return CheckSmi(first);
927  }
928  if (scratch.is(second)) {
929    andl(scratch, first);
930  } else {
931    if (!scratch.is(first)) {
932      movl(scratch, first);
933    }
934    andl(scratch, second);
935  }
936  testb(scratch, Immediate(kSmiTagMask));
937  return zero;
938}
939
940
941Condition MacroAssembler::CheckIsMinSmi(Register src) {
942  ASSERT(!src.is(kScratchRegister));
943  // If we overflow by subtracting one, it's the minimal smi value.
944  cmpq(src, kSmiConstantRegister);
945  return overflow;
946}
947
948
949Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
950  // A 32-bit integer value can always be converted to a smi.
951  return always;
952}
953
954
955Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
956  // An unsigned 32-bit integer value is valid as long as the high bit
957  // is not set.
958  testl(src, src);
959  return positive;
960}
961
962
963void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
964  if (constant->value() == 0) {
965    if (!dst.is(src)) {
966      movq(dst, src);
967    }
968    return;
969  } else if (dst.is(src)) {
970    ASSERT(!dst.is(kScratchRegister));
971    switch (constant->value()) {
972      case 1:
973        addq(dst, kSmiConstantRegister);
974        return;
975      case 2:
976        lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
977        return;
978      case 4:
979        lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
980        return;
981      case 8:
982        lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
983        return;
984      default:
985        Register constant_reg = GetSmiConstant(constant);
986        addq(dst, constant_reg);
987        return;
988    }
989  } else {
990    switch (constant->value()) {
991      case 1:
992        lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
993        return;
994      case 2:
995        lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
996        return;
997      case 4:
998        lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
999        return;
1000      case 8:
1001        lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1002        return;
1003      default:
1004        LoadSmiConstant(dst, constant);
1005        addq(dst, src);
1006        return;
1007    }
1008  }
1009}
1010
1011
1012void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1013  if (constant->value() != 0) {
1014    addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
1015  }
1016}
1017
1018
1019void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1020  if (constant->value() == 0) {
1021    if (!dst.is(src)) {
1022      movq(dst, src);
1023    }
1024  } else if (dst.is(src)) {
1025    ASSERT(!dst.is(kScratchRegister));
1026    Register constant_reg = GetSmiConstant(constant);
1027    subq(dst, constant_reg);
1028  } else {
1029    if (constant->value() == Smi::kMinValue) {
1030      LoadSmiConstant(dst, constant);
1031      // Adding and subtracting the min-value gives the same result, it only
1032      // differs on the overflow bit, which we don't check here.
1033      addq(dst, src);
1034    } else {
1035      // Subtract by adding the negation.
1036      LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1037      addq(dst, src);
1038    }
1039  }
1040}
1041
1042
1043void MacroAssembler::SmiAdd(Register dst,
1044                            Register src1,
1045                            Register src2) {
1046  // No overflow checking. Use only when it's known that
1047  // overflowing is impossible.
1048  ASSERT(!dst.is(src2));
1049  if (dst.is(src1)) {
1050    addq(dst, src2);
1051  } else {
1052    movq(dst, src1);
1053    addq(dst, src2);
1054  }
1055  Assert(no_overflow, "Smi addition overflow");
1056}
1057
1058
1059void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1060  // No overflow checking. Use only when it's known that
1061  // overflowing is impossible (e.g., subtracting two positive smis).
1062  ASSERT(!dst.is(src2));
1063  if (dst.is(src1)) {
1064    subq(dst, src2);
1065  } else {
1066    movq(dst, src1);
1067    subq(dst, src2);
1068  }
1069  Assert(no_overflow, "Smi subtraction overflow");
1070}
1071
1072
1073void MacroAssembler::SmiSub(Register dst,
1074                            Register src1,
1075                            const Operand& src2) {
1076  // No overflow checking. Use only when it's known that
1077  // overflowing is impossible (e.g., subtracting two positive smis).
1078  if (dst.is(src1)) {
1079    subq(dst, src2);
1080  } else {
1081    movq(dst, src1);
1082    subq(dst, src2);
1083  }
1084  Assert(no_overflow, "Smi subtraction overflow");
1085}
1086
1087
1088void MacroAssembler::SmiNot(Register dst, Register src) {
1089  ASSERT(!dst.is(kScratchRegister));
1090  ASSERT(!src.is(kScratchRegister));
1091  // Set tag and padding bits before negating, so that they are zero afterwards.
1092  movl(kScratchRegister, Immediate(~0));
1093  if (dst.is(src)) {
1094    xor_(dst, kScratchRegister);
1095  } else {
1096    lea(dst, Operand(src, kScratchRegister, times_1, 0));
1097  }
1098  not_(dst);
1099}
1100
1101
1102void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1103  ASSERT(!dst.is(src2));
1104  if (!dst.is(src1)) {
1105    movq(dst, src1);
1106  }
1107  and_(dst, src2);
1108}
1109
1110
1111void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1112  if (constant->value() == 0) {
1113    Set(dst, 0);
1114  } else if (dst.is(src)) {
1115    ASSERT(!dst.is(kScratchRegister));
1116    Register constant_reg = GetSmiConstant(constant);
1117    and_(dst, constant_reg);
1118  } else {
1119    LoadSmiConstant(dst, constant);
1120    and_(dst, src);
1121  }
1122}
1123
1124
1125void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1126  if (!dst.is(src1)) {
1127    movq(dst, src1);
1128  }
1129  or_(dst, src2);
1130}
1131
1132
1133void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1134  if (dst.is(src)) {
1135    ASSERT(!dst.is(kScratchRegister));
1136    Register constant_reg = GetSmiConstant(constant);
1137    or_(dst, constant_reg);
1138  } else {
1139    LoadSmiConstant(dst, constant);
1140    or_(dst, src);
1141  }
1142}
1143
1144
1145void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1146  if (!dst.is(src1)) {
1147    movq(dst, src1);
1148  }
1149  xor_(dst, src2);
1150}
1151
1152
1153void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1154  if (dst.is(src)) {
1155    ASSERT(!dst.is(kScratchRegister));
1156    Register constant_reg = GetSmiConstant(constant);
1157    xor_(dst, constant_reg);
1158  } else {
1159    LoadSmiConstant(dst, constant);
1160    xor_(dst, src);
1161  }
1162}
1163
1164
1165void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1166                                                     Register src,
1167                                                     int shift_value) {
1168  ASSERT(is_uint5(shift_value));
1169  if (shift_value > 0) {
1170    if (dst.is(src)) {
1171      sar(dst, Immediate(shift_value + kSmiShift));
1172      shl(dst, Immediate(kSmiShift));
1173    } else {
1174      UNIMPLEMENTED();  // Not used.
1175    }
1176  }
1177}
1178
1179
1180void MacroAssembler::SmiShiftLeftConstant(Register dst,
1181                                          Register src,
1182                                          int shift_value) {
1183  if (!dst.is(src)) {
1184    movq(dst, src);
1185  }
1186  if (shift_value > 0) {
1187    shl(dst, Immediate(shift_value));
1188  }
1189}
1190
1191
1192void MacroAssembler::SmiShiftLeft(Register dst,
1193                                  Register src1,
1194                                  Register src2) {
1195  ASSERT(!dst.is(rcx));
1196  NearLabel result_ok;
1197  // Untag shift amount.
1198  if (!dst.is(src1)) {
1199    movq(dst, src1);
1200  }
1201  SmiToInteger32(rcx, src2);
1202  // Shift amount specified by lower 5 bits, not six as the shl opcode.
1203  and_(rcx, Immediate(0x1f));
1204  shl_cl(dst);
1205}
1206
1207
1208void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1209                                             Register src1,
1210                                             Register src2) {
1211  ASSERT(!dst.is(kScratchRegister));
1212  ASSERT(!src1.is(kScratchRegister));
1213  ASSERT(!src2.is(kScratchRegister));
1214  ASSERT(!dst.is(rcx));
1215  if (src1.is(rcx)) {
1216    movq(kScratchRegister, src1);
1217  } else if (src2.is(rcx)) {
1218    movq(kScratchRegister, src2);
1219  }
1220  if (!dst.is(src1)) {
1221    movq(dst, src1);
1222  }
1223  SmiToInteger32(rcx, src2);
1224  orl(rcx, Immediate(kSmiShift));
1225  sar_cl(dst);  // Shift 32 + original rcx & 0x1f.
1226  shl(dst, Immediate(kSmiShift));
1227  if (src1.is(rcx)) {
1228    movq(src1, kScratchRegister);
1229  } else if (src2.is(rcx)) {
1230    movq(src2, kScratchRegister);
1231  }
1232}
1233
1234
1235SmiIndex MacroAssembler::SmiToIndex(Register dst,
1236                                    Register src,
1237                                    int shift) {
1238  ASSERT(is_uint6(shift));
1239  // There is a possible optimization if shift is in the range 60-63, but that
1240  // will (and must) never happen.
1241  if (!dst.is(src)) {
1242    movq(dst, src);
1243  }
1244  if (shift < kSmiShift) {
1245    sar(dst, Immediate(kSmiShift - shift));
1246  } else {
1247    shl(dst, Immediate(shift - kSmiShift));
1248  }
1249  return SmiIndex(dst, times_1);
1250}
1251
1252SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1253                                            Register src,
1254                                            int shift) {
1255  // Register src holds a positive smi.
1256  ASSERT(is_uint6(shift));
1257  if (!dst.is(src)) {
1258    movq(dst, src);
1259  }
1260  neg(dst);
1261  if (shift < kSmiShift) {
1262    sar(dst, Immediate(kSmiShift - shift));
1263  } else {
1264    shl(dst, Immediate(shift - kSmiShift));
1265  }
1266  return SmiIndex(dst, times_1);
1267}
1268
1269
1270void MacroAssembler::Move(Register dst, Register src) {
1271  if (!dst.is(src)) {
1272    movq(dst, src);
1273  }
1274}
1275
1276
1277
1278
1279void MacroAssembler::Move(Register dst, Handle<Object> source) {
1280  ASSERT(!source->IsFailure());
1281  if (source->IsSmi()) {
1282    Move(dst, Smi::cast(*source));
1283  } else {
1284    movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1285  }
1286}
1287
1288
1289void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
1290  ASSERT(!source->IsFailure());
1291  if (source->IsSmi()) {
1292    Move(dst, Smi::cast(*source));
1293  } else {
1294    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1295    movq(dst, kScratchRegister);
1296  }
1297}
1298
1299
1300void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1301  if (source->IsSmi()) {
1302    SmiCompare(dst, Smi::cast(*source));
1303  } else {
1304    Move(kScratchRegister, source);
1305    cmpq(dst, kScratchRegister);
1306  }
1307}
1308
1309
1310void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1311  if (source->IsSmi()) {
1312    SmiCompare(dst, Smi::cast(*source));
1313  } else {
1314    ASSERT(source->IsHeapObject());
1315    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1316    cmpq(dst, kScratchRegister);
1317  }
1318}
1319
1320
1321void MacroAssembler::Push(Handle<Object> source) {
1322  if (source->IsSmi()) {
1323    Push(Smi::cast(*source));
1324  } else {
1325    ASSERT(source->IsHeapObject());
1326    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1327    push(kScratchRegister);
1328  }
1329}
1330
1331
1332void MacroAssembler::Push(Smi* source) {
1333  intptr_t smi = reinterpret_cast<intptr_t>(source);
1334  if (is_int32(smi)) {
1335    push(Immediate(static_cast<int32_t>(smi)));
1336  } else {
1337    Register constant = GetSmiConstant(source);
1338    push(constant);
1339  }
1340}
1341
1342
1343void MacroAssembler::Drop(int stack_elements) {
1344  if (stack_elements > 0) {
1345    addq(rsp, Immediate(stack_elements * kPointerSize));
1346  }
1347}
1348
1349
1350void MacroAssembler::Test(const Operand& src, Smi* source) {
1351  testl(Operand(src, kIntSize), Immediate(source->value()));
1352}
1353
1354
1355void MacroAssembler::Jump(ExternalReference ext) {
1356  movq(kScratchRegister, ext);
1357  jmp(kScratchRegister);
1358}
1359
1360
1361void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1362  movq(kScratchRegister, destination, rmode);
1363  jmp(kScratchRegister);
1364}
1365
1366
1367void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1368  // TODO(X64): Inline this
1369  jmp(code_object, rmode);
1370}
1371
1372
1373void MacroAssembler::Call(ExternalReference ext) {
1374  movq(kScratchRegister, ext);
1375  call(kScratchRegister);
1376}
1377
1378
1379void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1380  movq(kScratchRegister, destination, rmode);
1381  call(kScratchRegister);
1382}
1383
1384
1385void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1386  ASSERT(RelocInfo::IsCodeTarget(rmode));
1387  call(code_object, rmode);
1388}
1389
1390
1391void MacroAssembler::PushTryHandler(CodeLocation try_location,
1392                                    HandlerType type) {
1393  // Adjust this code if not the case.
1394  ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1395
1396  // The pc (return address) is already on TOS.  This code pushes state,
1397  // frame pointer and current handler.  Check that they are expected
1398  // next on the stack, in that order.
1399  ASSERT_EQ(StackHandlerConstants::kStateOffset,
1400            StackHandlerConstants::kPCOffset - kPointerSize);
1401  ASSERT_EQ(StackHandlerConstants::kFPOffset,
1402            StackHandlerConstants::kStateOffset - kPointerSize);
1403  ASSERT_EQ(StackHandlerConstants::kNextOffset,
1404            StackHandlerConstants::kFPOffset - kPointerSize);
1405
1406  if (try_location == IN_JAVASCRIPT) {
1407    if (type == TRY_CATCH_HANDLER) {
1408      push(Immediate(StackHandler::TRY_CATCH));
1409    } else {
1410      push(Immediate(StackHandler::TRY_FINALLY));
1411    }
1412    push(rbp);
1413  } else {
1414    ASSERT(try_location == IN_JS_ENTRY);
1415    // The frame pointer does not point to a JS frame so we save NULL
1416    // for rbp. We expect the code throwing an exception to check rbp
1417    // before dereferencing it to restore the context.
1418    push(Immediate(StackHandler::ENTRY));
1419    push(Immediate(0));  // NULL frame pointer.
1420  }
1421  // Save the current handler.
1422  movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1423  push(Operand(kScratchRegister, 0));
1424  // Link this handler.
1425  movq(Operand(kScratchRegister, 0), rsp);
1426}
1427
1428
1429void MacroAssembler::PopTryHandler() {
1430  ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1431  // Unlink this handler.
1432  movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1433  pop(Operand(kScratchRegister, 0));
1434  // Remove the remaining fields.
1435  addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1436}
1437
1438
1439void MacroAssembler::Ret() {
1440  ret(0);
1441}
1442
1443
1444void MacroAssembler::FCmp() {
1445  fucomip();
1446  fstp(0);
1447}
1448
1449
1450void MacroAssembler::CmpObjectType(Register heap_object,
1451                                   InstanceType type,
1452                                   Register map) {
1453  movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1454  CmpInstanceType(map, type);
1455}
1456
1457
1458void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1459  cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1460       Immediate(static_cast<int8_t>(type)));
1461}
1462
1463
1464void MacroAssembler::CheckMap(Register obj,
1465                              Handle<Map> map,
1466                              Label* fail,
1467                              bool is_heap_object) {
1468  if (!is_heap_object) {
1469    JumpIfSmi(obj, fail);
1470  }
1471  Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1472  j(not_equal, fail);
1473}
1474
1475
1476void MacroAssembler::AbortIfNotNumber(Register object) {
1477  NearLabel ok;
1478  Condition is_smi = CheckSmi(object);
1479  j(is_smi, &ok);
1480  Cmp(FieldOperand(object, HeapObject::kMapOffset),
1481      Factory::heap_number_map());
1482  Assert(equal, "Operand not a number");
1483  bind(&ok);
1484}
1485
1486
1487void MacroAssembler::AbortIfSmi(Register object) {
1488  NearLabel ok;
1489  Condition is_smi = CheckSmi(object);
1490  Assert(NegateCondition(is_smi), "Operand is a smi");
1491}
1492
1493
1494void MacroAssembler::AbortIfNotSmi(Register object) {
1495  NearLabel ok;
1496  Condition is_smi = CheckSmi(object);
1497  Assert(is_smi, "Operand is not a smi");
1498}
1499
1500
1501void MacroAssembler::AbortIfNotRootValue(Register src,
1502                                         Heap::RootListIndex root_value_index,
1503                                         const char* message) {
1504  ASSERT(!src.is(kScratchRegister));
1505  LoadRoot(kScratchRegister, root_value_index);
1506  cmpq(src, kScratchRegister);
1507  Check(equal, message);
1508}
1509
1510
1511
1512Condition MacroAssembler::IsObjectStringType(Register heap_object,
1513                                             Register map,
1514                                             Register instance_type) {
1515  movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1516  movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
1517  ASSERT(kNotStringTag != 0);
1518  testb(instance_type, Immediate(kIsNotStringMask));
1519  return zero;
1520}
1521
1522
1523void MacroAssembler::TryGetFunctionPrototype(Register function,
1524                                             Register result,
1525                                             Label* miss) {
1526  // Check that the receiver isn't a smi.
1527  testl(function, Immediate(kSmiTagMask));
1528  j(zero, miss);
1529
1530  // Check that the function really is a function.
1531  CmpObjectType(function, JS_FUNCTION_TYPE, result);
1532  j(not_equal, miss);
1533
1534  // Make sure that the function has an instance prototype.
1535  NearLabel non_instance;
1536  testb(FieldOperand(result, Map::kBitFieldOffset),
1537        Immediate(1 << Map::kHasNonInstancePrototype));
1538  j(not_zero, &non_instance);
1539
1540  // Get the prototype or initial map from the function.
1541  movq(result,
1542       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1543
1544  // If the prototype or initial map is the hole, don't return it and
1545  // simply miss the cache instead. This will allow us to allocate a
1546  // prototype object on-demand in the runtime system.
1547  CompareRoot(result, Heap::kTheHoleValueRootIndex);
1548  j(equal, miss);
1549
1550  // If the function does not have an initial map, we're done.
1551  NearLabel done;
1552  CmpObjectType(result, MAP_TYPE, kScratchRegister);
1553  j(not_equal, &done);
1554
1555  // Get the prototype from the initial map.
1556  movq(result, FieldOperand(result, Map::kPrototypeOffset));
1557  jmp(&done);
1558
1559  // Non-instance prototype: Fetch prototype from constructor field
1560  // in initial map.
1561  bind(&non_instance);
1562  movq(result, FieldOperand(result, Map::kConstructorOffset));
1563
1564  // All done.
1565  bind(&done);
1566}
1567
1568
1569void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1570  if (FLAG_native_code_counters && counter->Enabled()) {
1571    movq(kScratchRegister, ExternalReference(counter));
1572    movl(Operand(kScratchRegister, 0), Immediate(value));
1573  }
1574}
1575
1576
1577void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1578  ASSERT(value > 0);
1579  if (FLAG_native_code_counters && counter->Enabled()) {
1580    movq(kScratchRegister, ExternalReference(counter));
1581    Operand operand(kScratchRegister, 0);
1582    if (value == 1) {
1583      incl(operand);
1584    } else {
1585      addl(operand, Immediate(value));
1586    }
1587  }
1588}
1589
1590
1591void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1592  ASSERT(value > 0);
1593  if (FLAG_native_code_counters && counter->Enabled()) {
1594    movq(kScratchRegister, ExternalReference(counter));
1595    Operand operand(kScratchRegister, 0);
1596    if (value == 1) {
1597      decl(operand);
1598    } else {
1599      subl(operand, Immediate(value));
1600    }
1601  }
1602}
1603
1604
1605#ifdef ENABLE_DEBUGGER_SUPPORT
1606void MacroAssembler::DebugBreak() {
1607  ASSERT(allow_stub_calls());
1608  Set(rax, 0);  // No arguments.
1609  movq(rbx, ExternalReference(Runtime::kDebugBreak));
1610  CEntryStub ces(1);
1611  Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1612}
1613#endif  // ENABLE_DEBUGGER_SUPPORT
1614
1615
1616void MacroAssembler::InvokeCode(Register code,
1617                                const ParameterCount& expected,
1618                                const ParameterCount& actual,
1619                                InvokeFlag flag) {
1620  NearLabel done;
1621  InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1622  if (flag == CALL_FUNCTION) {
1623    call(code);
1624  } else {
1625    ASSERT(flag == JUMP_FUNCTION);
1626    jmp(code);
1627  }
1628  bind(&done);
1629}
1630
1631
1632void MacroAssembler::InvokeCode(Handle<Code> code,
1633                                const ParameterCount& expected,
1634                                const ParameterCount& actual,
1635                                RelocInfo::Mode rmode,
1636                                InvokeFlag flag) {
1637  NearLabel done;
1638  Register dummy = rax;
1639  InvokePrologue(expected, actual, code, dummy, &done, flag);
1640  if (flag == CALL_FUNCTION) {
1641    Call(code, rmode);
1642  } else {
1643    ASSERT(flag == JUMP_FUNCTION);
1644    Jump(code, rmode);
1645  }
1646  bind(&done);
1647}
1648
1649
1650void MacroAssembler::InvokeFunction(Register function,
1651                                    const ParameterCount& actual,
1652                                    InvokeFlag flag) {
1653  ASSERT(function.is(rdi));
1654  movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1655  movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1656  movsxlq(rbx,
1657          FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
1658  // Advances rdx to the end of the Code object header, to the start of
1659  // the executable code.
1660  movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1661
1662  ParameterCount expected(rbx);
1663  InvokeCode(rdx, expected, actual, flag);
1664}
1665
1666
1667void MacroAssembler::InvokeFunction(JSFunction* function,
1668                                    const ParameterCount& actual,
1669                                    InvokeFlag flag) {
1670  ASSERT(function->is_compiled());
1671  // Get the function and setup the context.
1672  Move(rdi, Handle<JSFunction>(function));
1673  movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1674
1675  // Invoke the cached code.
1676  Handle<Code> code(function->code());
1677  ParameterCount expected(function->shared()->formal_parameter_count());
1678  InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1679}
1680
1681
1682void MacroAssembler::EnterFrame(StackFrame::Type type) {
1683  push(rbp);
1684  movq(rbp, rsp);
1685  push(rsi);  // Context.
1686  Push(Smi::FromInt(type));
1687  movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1688  push(kScratchRegister);
1689  if (FLAG_debug_code) {
1690    movq(kScratchRegister,
1691         Factory::undefined_value(),
1692         RelocInfo::EMBEDDED_OBJECT);
1693    cmpq(Operand(rsp, 0), kScratchRegister);
1694    Check(not_equal, "code object not properly patched");
1695  }
1696}
1697
1698
1699void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1700  if (FLAG_debug_code) {
1701    Move(kScratchRegister, Smi::FromInt(type));
1702    cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1703    Check(equal, "stack frame types must match");
1704  }
1705  movq(rsp, rbp);
1706  pop(rbp);
1707}
1708
1709
1710void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
1711  // Setup the frame structure on the stack.
1712  // All constants are relative to the frame pointer of the exit frame.
1713  ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1714  ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1715  ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
1716  push(rbp);
1717  movq(rbp, rsp);
1718
1719  // Reserve room for entry stack pointer and push the code object.
1720  ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
1721  push(Immediate(0));  // Saved entry sp, patched before call.
1722  movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1723  push(kScratchRegister);  // Accessed from EditFrame::code_slot.
1724
1725  // Save the frame pointer and the context in top.
1726  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1727  ExternalReference context_address(Top::k_context_address);
1728  if (save_rax) {
1729    movq(r14, rax);  // Backup rax before we use it.
1730  }
1731
1732  movq(rax, rbp);
1733  store_rax(c_entry_fp_address);
1734  movq(rax, rsi);
1735  store_rax(context_address);
1736}
1737
1738
1739void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) {
1740#ifdef _WIN64
1741  const int kShaddowSpace = 4;
1742  arg_stack_space += kShaddowSpace;
1743#endif
1744  if (arg_stack_space > 0) {
1745    subq(rsp, Immediate(arg_stack_space * kPointerSize));
1746  }
1747
1748  // Get the required frame alignment for the OS.
1749  static const int kFrameAlignment = OS::ActivationFrameAlignment();
1750  if (kFrameAlignment > 0) {
1751    ASSERT(IsPowerOf2(kFrameAlignment));
1752    movq(kScratchRegister, Immediate(-kFrameAlignment));
1753    and_(rsp, kScratchRegister);
1754  }
1755
1756  // Patch the saved entry sp.
1757  movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1758}
1759
1760
1761void MacroAssembler::EnterExitFrame(int arg_stack_space) {
1762  EnterExitFramePrologue(true);
1763
1764  // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1765  // so it must be retained across the C-call.
1766  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1767  lea(r12, Operand(rbp, r14, times_pointer_size, offset));
1768
1769  EnterExitFrameEpilogue(arg_stack_space);
1770}
1771
1772
1773void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
1774  EnterExitFramePrologue(false);
1775  EnterExitFrameEpilogue(arg_stack_space);
1776}
1777
1778
1779void MacroAssembler::LeaveExitFrame() {
1780  // Registers:
1781  // r12 : argv
1782
1783  // Get the return address from the stack and restore the frame pointer.
1784  movq(rcx, Operand(rbp, 1 * kPointerSize));
1785  movq(rbp, Operand(rbp, 0 * kPointerSize));
1786
1787  // Pop everything up to and including the arguments and the receiver
1788  // from the caller stack.
1789  lea(rsp, Operand(r12, 1 * kPointerSize));
1790
1791  // Push the return address to get ready to return.
1792  push(rcx);
1793
1794  LeaveExitFrameEpilogue();
1795}
1796
1797
1798void MacroAssembler::LeaveApiExitFrame() {
1799  movq(rsp, rbp);
1800  pop(rbp);
1801
1802  LeaveExitFrameEpilogue();
1803}
1804
1805
1806void MacroAssembler::LeaveExitFrameEpilogue() {
1807  // Restore current context from top and clear it in debug mode.
1808  ExternalReference context_address(Top::k_context_address);
1809  movq(kScratchRegister, context_address);
1810  movq(rsi, Operand(kScratchRegister, 0));
1811#ifdef DEBUG
1812  movq(Operand(kScratchRegister, 0), Immediate(0));
1813#endif
1814
1815  // Clear the top frame.
1816  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1817  movq(kScratchRegister, c_entry_fp_address);
1818  movq(Operand(kScratchRegister, 0), Immediate(0));
1819}
1820
1821
1822void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1823                                            Register scratch,
1824                                            Label* miss) {
1825  Label same_contexts;
1826
1827  ASSERT(!holder_reg.is(scratch));
1828  ASSERT(!scratch.is(kScratchRegister));
1829  // Load current lexical context from the stack frame.
1830  movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
1831
1832  // When generating debug code, make sure the lexical context is set.
1833  if (FLAG_debug_code) {
1834    cmpq(scratch, Immediate(0));
1835    Check(not_equal, "we should not have an empty lexical context");
1836  }
1837  // Load the global context of the current context.
1838  int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1839  movq(scratch, FieldOperand(scratch, offset));
1840  movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
1841
1842  // Check the context is a global context.
1843  if (FLAG_debug_code) {
1844    Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
1845        Factory::global_context_map());
1846    Check(equal, "JSGlobalObject::global_context should be a global context.");
1847  }
1848
1849  // Check if both contexts are the same.
1850  cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1851  j(equal, &same_contexts);
1852
1853  // Compare security tokens.
1854  // Check that the security token in the calling global object is
1855  // compatible with the security token in the receiving global
1856  // object.
1857
1858  // Check the context is a global context.
1859  if (FLAG_debug_code) {
1860    // Preserve original value of holder_reg.
1861    push(holder_reg);
1862    movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1863    CompareRoot(holder_reg, Heap::kNullValueRootIndex);
1864    Check(not_equal, "JSGlobalProxy::context() should not be null.");
1865
1866    // Read the first word and compare to global_context_map(),
1867    movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1868    CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
1869    Check(equal, "JSGlobalObject::global_context should be a global context.");
1870    pop(holder_reg);
1871  }
1872
1873  movq(kScratchRegister,
1874       FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1875  int token_offset =
1876      Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
1877  movq(scratch, FieldOperand(scratch, token_offset));
1878  cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
1879  j(not_equal, miss);
1880
1881  bind(&same_contexts);
1882}
1883
1884
1885void MacroAssembler::LoadAllocationTopHelper(Register result,
1886                                             Register scratch,
1887                                             AllocationFlags flags) {
1888  ExternalReference new_space_allocation_top =
1889      ExternalReference::new_space_allocation_top_address();
1890
1891  // Just return if allocation top is already known.
1892  if ((flags & RESULT_CONTAINS_TOP) != 0) {
1893    // No use of scratch if allocation top is provided.
1894    ASSERT(!scratch.is_valid());
1895#ifdef DEBUG
1896    // Assert that result actually contains top on entry.
1897    movq(kScratchRegister, new_space_allocation_top);
1898    cmpq(result, Operand(kScratchRegister, 0));
1899    Check(equal, "Unexpected allocation top");
1900#endif
1901    return;
1902  }
1903
1904  // Move address of new object to result. Use scratch register if available,
1905  // and keep address in scratch until call to UpdateAllocationTopHelper.
1906  if (scratch.is_valid()) {
1907    movq(scratch, new_space_allocation_top);
1908    movq(result, Operand(scratch, 0));
1909  } else if (result.is(rax)) {
1910    load_rax(new_space_allocation_top);
1911  } else {
1912    movq(kScratchRegister, new_space_allocation_top);
1913    movq(result, Operand(kScratchRegister, 0));
1914  }
1915}
1916
1917
1918void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1919                                               Register scratch) {
1920  if (FLAG_debug_code) {
1921    testq(result_end, Immediate(kObjectAlignmentMask));
1922    Check(zero, "Unaligned allocation in new space");
1923  }
1924
1925  ExternalReference new_space_allocation_top =
1926      ExternalReference::new_space_allocation_top_address();
1927
1928  // Update new top.
1929  if (result_end.is(rax)) {
1930    // rax can be stored directly to a memory location.
1931    store_rax(new_space_allocation_top);
1932  } else {
1933    // Register required - use scratch provided if available.
1934    if (scratch.is_valid()) {
1935      movq(Operand(scratch, 0), result_end);
1936    } else {
1937      movq(kScratchRegister, new_space_allocation_top);
1938      movq(Operand(kScratchRegister, 0), result_end);
1939    }
1940  }
1941}
1942
1943
1944void MacroAssembler::AllocateInNewSpace(int object_size,
1945                                        Register result,
1946                                        Register result_end,
1947                                        Register scratch,
1948                                        Label* gc_required,
1949                                        AllocationFlags flags) {
1950  if (!FLAG_inline_new) {
1951    if (FLAG_debug_code) {
1952      // Trash the registers to simulate an allocation failure.
1953      movl(result, Immediate(0x7091));
1954      if (result_end.is_valid()) {
1955        movl(result_end, Immediate(0x7191));
1956      }
1957      if (scratch.is_valid()) {
1958        movl(scratch, Immediate(0x7291));
1959      }
1960    }
1961    jmp(gc_required);
1962    return;
1963  }
1964  ASSERT(!result.is(result_end));
1965
1966  // Load address of new object into result.
1967  LoadAllocationTopHelper(result, scratch, flags);
1968
1969  // Calculate new top and bail out if new space is exhausted.
1970  ExternalReference new_space_allocation_limit =
1971      ExternalReference::new_space_allocation_limit_address();
1972
1973  Register top_reg = result_end.is_valid() ? result_end : result;
1974
1975  if (top_reg.is(result)) {
1976    addq(top_reg, Immediate(object_size));
1977  } else {
1978    lea(top_reg, Operand(result, object_size));
1979  }
1980  movq(kScratchRegister, new_space_allocation_limit);
1981  cmpq(top_reg, Operand(kScratchRegister, 0));
1982  j(above, gc_required);
1983
1984  // Update allocation top.
1985  UpdateAllocationTopHelper(top_reg, scratch);
1986
1987  if (top_reg.is(result)) {
1988    if ((flags & TAG_OBJECT) != 0) {
1989      subq(result, Immediate(object_size - kHeapObjectTag));
1990    } else {
1991      subq(result, Immediate(object_size));
1992    }
1993  } else if ((flags & TAG_OBJECT) != 0) {
1994    // Tag the result if requested.
1995    addq(result, Immediate(kHeapObjectTag));
1996  }
1997}
1998
1999
2000void MacroAssembler::AllocateInNewSpace(int header_size,
2001                                        ScaleFactor element_size,
2002                                        Register element_count,
2003                                        Register result,
2004                                        Register result_end,
2005                                        Register scratch,
2006                                        Label* gc_required,
2007                                        AllocationFlags flags) {
2008  if (!FLAG_inline_new) {
2009    if (FLAG_debug_code) {
2010      // Trash the registers to simulate an allocation failure.
2011      movl(result, Immediate(0x7091));
2012      movl(result_end, Immediate(0x7191));
2013      if (scratch.is_valid()) {
2014        movl(scratch, Immediate(0x7291));
2015      }
2016      // Register element_count is not modified by the function.
2017    }
2018    jmp(gc_required);
2019    return;
2020  }
2021  ASSERT(!result.is(result_end));
2022
2023  // Load address of new object into result.
2024  LoadAllocationTopHelper(result, scratch, flags);
2025
2026  // Calculate new top and bail out if new space is exhausted.
2027  ExternalReference new_space_allocation_limit =
2028      ExternalReference::new_space_allocation_limit_address();
2029  lea(result_end, Operand(result, element_count, element_size, header_size));
2030  movq(kScratchRegister, new_space_allocation_limit);
2031  cmpq(result_end, Operand(kScratchRegister, 0));
2032  j(above, gc_required);
2033
2034  // Update allocation top.
2035  UpdateAllocationTopHelper(result_end, scratch);
2036
2037  // Tag the result if requested.
2038  if ((flags & TAG_OBJECT) != 0) {
2039    addq(result, Immediate(kHeapObjectTag));
2040  }
2041}
2042
2043
2044void MacroAssembler::AllocateInNewSpace(Register object_size,
2045                                        Register result,
2046                                        Register result_end,
2047                                        Register scratch,
2048                                        Label* gc_required,
2049                                        AllocationFlags flags) {
2050  if (!FLAG_inline_new) {
2051    if (FLAG_debug_code) {
2052      // Trash the registers to simulate an allocation failure.
2053      movl(result, Immediate(0x7091));
2054      movl(result_end, Immediate(0x7191));
2055      if (scratch.is_valid()) {
2056        movl(scratch, Immediate(0x7291));
2057      }
2058      // object_size is left unchanged by this function.
2059    }
2060    jmp(gc_required);
2061    return;
2062  }
2063  ASSERT(!result.is(result_end));
2064
2065  // Load address of new object into result.
2066  LoadAllocationTopHelper(result, scratch, flags);
2067
2068  // Calculate new top and bail out if new space is exhausted.
2069  ExternalReference new_space_allocation_limit =
2070      ExternalReference::new_space_allocation_limit_address();
2071  if (!object_size.is(result_end)) {
2072    movq(result_end, object_size);
2073  }
2074  addq(result_end, result);
2075  movq(kScratchRegister, new_space_allocation_limit);
2076  cmpq(result_end, Operand(kScratchRegister, 0));
2077  j(above, gc_required);
2078
2079  // Update allocation top.
2080  UpdateAllocationTopHelper(result_end, scratch);
2081
2082  // Tag the result if requested.
2083  if ((flags & TAG_OBJECT) != 0) {
2084    addq(result, Immediate(kHeapObjectTag));
2085  }
2086}
2087
2088
2089void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2090  ExternalReference new_space_allocation_top =
2091      ExternalReference::new_space_allocation_top_address();
2092
2093  // Make sure the object has no tag before resetting top.
2094  and_(object, Immediate(~kHeapObjectTagMask));
2095  movq(kScratchRegister, new_space_allocation_top);
2096#ifdef DEBUG
2097  cmpq(object, Operand(kScratchRegister, 0));
2098  Check(below, "Undo allocation of non allocated memory");
2099#endif
2100  movq(Operand(kScratchRegister, 0), object);
2101}
2102
2103
2104void MacroAssembler::AllocateHeapNumber(Register result,
2105                                        Register scratch,
2106                                        Label* gc_required) {
2107  // Allocate heap number in new space.
2108  AllocateInNewSpace(HeapNumber::kSize,
2109                     result,
2110                     scratch,
2111                     no_reg,
2112                     gc_required,
2113                     TAG_OBJECT);
2114
2115  // Set the map.
2116  LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2117  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2118}
2119
2120
2121void MacroAssembler::AllocateTwoByteString(Register result,
2122                                           Register length,
2123                                           Register scratch1,
2124                                           Register scratch2,
2125                                           Register scratch3,
2126                                           Label* gc_required) {
2127  // Calculate the number of bytes needed for the characters in the string while
2128  // observing object alignment.
2129  const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2130                               kObjectAlignmentMask;
2131  ASSERT(kShortSize == 2);
2132  // scratch1 = length * 2 + kObjectAlignmentMask.
2133  lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2134                kHeaderAlignment));
2135  and_(scratch1, Immediate(~kObjectAlignmentMask));
2136  if (kHeaderAlignment > 0) {
2137    subq(scratch1, Immediate(kHeaderAlignment));
2138  }
2139
2140  // Allocate two byte string in new space.
2141  AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2142                     times_1,
2143                     scratch1,
2144                     result,
2145                     scratch2,
2146                     scratch3,
2147                     gc_required,
2148                     TAG_OBJECT);
2149
2150  // Set the map, length and hash field.
2151  LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2152  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2153  Integer32ToSmi(scratch1, length);
2154  movq(FieldOperand(result, String::kLengthOffset), scratch1);
2155  movq(FieldOperand(result, String::kHashFieldOffset),
2156       Immediate(String::kEmptyHashField));
2157}
2158
2159
2160void MacroAssembler::AllocateAsciiString(Register result,
2161                                         Register length,
2162                                         Register scratch1,
2163                                         Register scratch2,
2164                                         Register scratch3,
2165                                         Label* gc_required) {
2166  // Calculate the number of bytes needed for the characters in the string while
2167  // observing object alignment.
2168  const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2169                               kObjectAlignmentMask;
2170  movl(scratch1, length);
2171  ASSERT(kCharSize == 1);
2172  addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
2173  and_(scratch1, Immediate(~kObjectAlignmentMask));
2174  if (kHeaderAlignment > 0) {
2175    subq(scratch1, Immediate(kHeaderAlignment));
2176  }
2177
2178  // Allocate ascii string in new space.
2179  AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2180                     times_1,
2181                     scratch1,
2182                     result,
2183                     scratch2,
2184                     scratch3,
2185                     gc_required,
2186                     TAG_OBJECT);
2187
2188  // Set the map, length and hash field.
2189  LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2190  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2191  Integer32ToSmi(scratch1, length);
2192  movq(FieldOperand(result, String::kLengthOffset), scratch1);
2193  movq(FieldOperand(result, String::kHashFieldOffset),
2194       Immediate(String::kEmptyHashField));
2195}
2196
2197
2198void MacroAssembler::AllocateConsString(Register result,
2199                                        Register scratch1,
2200                                        Register scratch2,
2201                                        Label* gc_required) {
2202  // Allocate heap number in new space.
2203  AllocateInNewSpace(ConsString::kSize,
2204                     result,
2205                     scratch1,
2206                     scratch2,
2207                     gc_required,
2208                     TAG_OBJECT);
2209
2210  // Set the map. The other fields are left uninitialized.
2211  LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2212  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2213}
2214
2215
2216void MacroAssembler::AllocateAsciiConsString(Register result,
2217                                             Register scratch1,
2218                                             Register scratch2,
2219                                             Label* gc_required) {
2220  // Allocate heap number in new space.
2221  AllocateInNewSpace(ConsString::kSize,
2222                     result,
2223                     scratch1,
2224                     scratch2,
2225                     gc_required,
2226                     TAG_OBJECT);
2227
2228  // Set the map. The other fields are left uninitialized.
2229  LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2230  movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2231}
2232
2233
2234void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2235  if (context_chain_length > 0) {
2236    // Move up the chain of contexts to the context containing the slot.
2237    movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2238    // Load the function context (which is the incoming, outer context).
2239    movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2240    for (int i = 1; i < context_chain_length; i++) {
2241      movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2242      movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2243    }
2244    // The context may be an intermediate context, not a function context.
2245    movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2246  } else {  // context is the current function context.
2247    // The context may be an intermediate context, not a function context.
2248    movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2249  }
2250}
2251
2252
2253void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2254  // Load the global or builtins object from the current context.
2255  movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2256  // Load the global context from the global or builtins object.
2257  movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2258  // Load the function from the global context.
2259  movq(function, Operand(function, Context::SlotOffset(index)));
2260}
2261
2262
2263void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2264                                                  Register map) {
2265  // Load the initial map.  The global functions all have initial maps.
2266  movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2267  if (FLAG_debug_code) {
2268    Label ok, fail;
2269    CheckMap(map, Factory::meta_map(), &fail, false);
2270    jmp(&ok);
2271    bind(&fail);
2272    Abort("Global functions must have initial map");
2273    bind(&ok);
2274  }
2275}
2276
2277
2278int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2279  // On Windows 64 stack slots are reserved by the caller for all arguments
2280  // including the ones passed in registers, and space is always allocated for
2281  // the four register arguments even if the function takes fewer than four
2282  // arguments.
2283  // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2284  // and the caller does not reserve stack slots for them.
2285  ASSERT(num_arguments >= 0);
2286#ifdef _WIN64
2287  static const int kMinimumStackSlots = 4;
2288  if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2289  return num_arguments;
2290#else
2291  static const int kRegisterPassedArguments = 6;
2292  if (num_arguments < kRegisterPassedArguments) return 0;
2293  return num_arguments - kRegisterPassedArguments;
2294#endif
2295}
2296
2297
2298void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2299  int frame_alignment = OS::ActivationFrameAlignment();
2300  ASSERT(frame_alignment != 0);
2301  ASSERT(num_arguments >= 0);
2302  // Make stack end at alignment and allocate space for arguments and old rsp.
2303  movq(kScratchRegister, rsp);
2304  ASSERT(IsPowerOf2(frame_alignment));
2305  int argument_slots_on_stack =
2306      ArgumentStackSlotsForCFunctionCall(num_arguments);
2307  subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2308  and_(rsp, Immediate(-frame_alignment));
2309  movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2310}
2311
2312
2313void MacroAssembler::CallCFunction(ExternalReference function,
2314                                   int num_arguments) {
2315  movq(rax, function);
2316  CallCFunction(rax, num_arguments);
2317}
2318
2319
2320void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2321  // Check stack alignment.
2322  if (FLAG_debug_code) {
2323    CheckStackAlignment();
2324  }
2325
2326  call(function);
2327  ASSERT(OS::ActivationFrameAlignment() != 0);
2328  ASSERT(num_arguments >= 0);
2329  int argument_slots_on_stack =
2330      ArgumentStackSlotsForCFunctionCall(num_arguments);
2331  movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2332}
2333
2334
2335CodePatcher::CodePatcher(byte* address, int size)
2336    : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2337  // Create a new macro assembler pointing to the address of the code to patch.
2338  // The size is adjusted with kGap on order for the assembler to generate size
2339  // bytes of instructions without failing with buffer size constraints.
2340  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2341}
2342
2343
2344CodePatcher::~CodePatcher() {
2345  // Indicate that code has changed.
2346  CPU::FlushICache(address_, size_);
2347
2348  // Check that the code was patched as expected.
2349  ASSERT(masm_.pc_ == address_ + size_);
2350  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2351}
2352
2353} }  // namespace v8::internal
2354
2355#endif  // V8_TARGET_ARCH_X64
2356