macro-assembler-ia32.cc revision 3fb3ca8c7ca439d408449a395897395c0faae8d1
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
32#include "bootstrapper.h"
33#include "codegen.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45    : Assembler(arg_isolate, buffer, size),
46      generating_stub_(false),
47      allow_stub_calls_(true) {
48  if (isolate() != NULL) {
49    code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50                                  isolate());
51  }
52}
53
54
55void MacroAssembler::RecordWriteHelper(Register object,
56                                       Register addr,
57                                       Register scratch) {
58  if (emit_debug_code()) {
59    // Check that the object is not in new space.
60    Label not_in_new_space;
61    InNewSpace(object, scratch, not_equal, &not_in_new_space);
62    Abort("new-space object passed to RecordWriteHelper");
63    bind(&not_in_new_space);
64  }
65
66  // Compute the page start address from the heap object pointer, and reuse
67  // the 'object' register for it.
68  and_(object, ~Page::kPageAlignmentMask);
69
70  // Compute number of region covering addr. See Page::GetRegionNumberForAddress
71  // method for more details.
72  and_(addr, Page::kPageAlignmentMask);
73  shr(addr, Page::kRegionSizeLog2);
74
75  // Set dirty mark for region.
76  // Bit tests with a memory operand should be avoided on Intel processors,
77  // as they usually have long latency and multiple uops. We load the bit base
78  // operand to a register at first and store it back after bit set.
79  mov(scratch, Operand(object, Page::kDirtyFlagOffset));
80  bts(Operand(scratch), addr);
81  mov(Operand(object, Page::kDirtyFlagOffset), scratch);
82}
83
84
85void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
86                                        XMMRegister scratch_reg,
87                                        Register result_reg) {
88  Label done;
89  ExternalReference zero_ref = ExternalReference::address_of_zero();
90  movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
91  Set(result_reg, Immediate(0));
92  ucomisd(input_reg, scratch_reg);
93  j(below, &done, Label::kNear);
94  ExternalReference half_ref = ExternalReference::address_of_one_half();
95  movdbl(scratch_reg, Operand::StaticVariable(half_ref));
96  addsd(scratch_reg, input_reg);
97  cvttsd2si(result_reg, Operand(scratch_reg));
98  test(result_reg, Immediate(0xFFFFFF00));
99  j(zero, &done, Label::kNear);
100  Set(result_reg, Immediate(255));
101  bind(&done);
102}
103
104
105void MacroAssembler::ClampUint8(Register reg) {
106  Label done;
107  test(reg, Immediate(0xFFFFFF00));
108  j(zero, &done, Label::kNear);
109  setcc(negative, reg);  // 1 if negative, 0 if positive.
110  dec_b(reg);  // 0 if negative, 255 if positive.
111  bind(&done);
112}
113
114
115void MacroAssembler::InNewSpace(Register object,
116                                Register scratch,
117                                Condition cc,
118                                Label* branch,
119                                Label::Distance branch_near) {
120  ASSERT(cc == equal || cc == not_equal);
121  if (Serializer::enabled()) {
122    // Can't do arithmetic on external references if it might get serialized.
123    mov(scratch, Operand(object));
124    // The mask isn't really an address.  We load it as an external reference in
125    // case the size of the new space is different between the snapshot maker
126    // and the running system.
127    and_(Operand(scratch),
128         Immediate(ExternalReference::new_space_mask(isolate())));
129    cmp(Operand(scratch),
130        Immediate(ExternalReference::new_space_start(isolate())));
131    j(cc, branch, branch_near);
132  } else {
133    int32_t new_space_start = reinterpret_cast<int32_t>(
134        ExternalReference::new_space_start(isolate()).address());
135    lea(scratch, Operand(object, -new_space_start));
136    and_(scratch, isolate()->heap()->NewSpaceMask());
137    j(cc, branch, branch_near);
138  }
139}
140
141
142void MacroAssembler::RecordWrite(Register object,
143                                 int offset,
144                                 Register value,
145                                 Register scratch) {
146  // First, check if a write barrier is even needed. The tests below
147  // catch stores of Smis and stores into young gen.
148  Label done;
149
150  // Skip barrier if writing a smi.
151  ASSERT_EQ(0, kSmiTag);
152  JumpIfSmi(value, &done, Label::kNear);
153
154  InNewSpace(object, value, equal, &done, Label::kNear);
155
156  // The offset is relative to a tagged or untagged HeapObject pointer,
157  // so either offset or offset + kHeapObjectTag must be a
158  // multiple of kPointerSize.
159  ASSERT(IsAligned(offset, kPointerSize) ||
160         IsAligned(offset + kHeapObjectTag, kPointerSize));
161
162  Register dst = scratch;
163  if (offset != 0) {
164    lea(dst, Operand(object, offset));
165  } else {
166    // Array access: calculate the destination address in the same manner as
167    // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
168    // into an array of words.
169    ASSERT_EQ(1, kSmiTagSize);
170    ASSERT_EQ(0, kSmiTag);
171    lea(dst, Operand(object, dst, times_half_pointer_size,
172                     FixedArray::kHeaderSize - kHeapObjectTag));
173  }
174  RecordWriteHelper(object, dst, value);
175
176  bind(&done);
177
178  // Clobber all input registers when running with the debug-code flag
179  // turned on to provoke errors.
180  if (emit_debug_code()) {
181    mov(object, Immediate(BitCast<int32_t>(kZapValue)));
182    mov(value, Immediate(BitCast<int32_t>(kZapValue)));
183    mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
184  }
185}
186
187
188void MacroAssembler::RecordWrite(Register object,
189                                 Register address,
190                                 Register value) {
191  // First, check if a write barrier is even needed. The tests below
192  // catch stores of Smis and stores into young gen.
193  Label done;
194
195  // Skip barrier if writing a smi.
196  ASSERT_EQ(0, kSmiTag);
197  JumpIfSmi(value, &done, Label::kNear);
198
199  InNewSpace(object, value, equal, &done);
200
201  RecordWriteHelper(object, address, value);
202
203  bind(&done);
204
205  // Clobber all input registers when running with the debug-code flag
206  // turned on to provoke errors.
207  if (emit_debug_code()) {
208    mov(object, Immediate(BitCast<int32_t>(kZapValue)));
209    mov(address, Immediate(BitCast<int32_t>(kZapValue)));
210    mov(value, Immediate(BitCast<int32_t>(kZapValue)));
211  }
212}
213
214
215#ifdef ENABLE_DEBUGGER_SUPPORT
216void MacroAssembler::DebugBreak() {
217  Set(eax, Immediate(0));
218  mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
219  CEntryStub ces(1);
220  call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
221}
222#endif
223
224
225void MacroAssembler::Set(Register dst, const Immediate& x) {
226  if (x.is_zero()) {
227    xor_(dst, Operand(dst));  // Shorter than mov.
228  } else {
229    mov(dst, x);
230  }
231}
232
233
234void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
235  mov(dst, x);
236}
237
238
239bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
240  static const int kMaxImmediateBits = 17;
241  if (x.rmode_ != RelocInfo::NONE) return false;
242  return !is_intn(x.x_, kMaxImmediateBits);
243}
244
245
246void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
247  if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
248    Set(dst, Immediate(x.x_ ^ jit_cookie()));
249    xor_(dst, jit_cookie());
250  } else {
251    Set(dst, x);
252  }
253}
254
255
256void MacroAssembler::SafePush(const Immediate& x) {
257  if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
258    push(Immediate(x.x_ ^ jit_cookie()));
259    xor_(Operand(esp, 0), Immediate(jit_cookie()));
260  } else {
261    push(x);
262  }
263}
264
265
266void MacroAssembler::CmpObjectType(Register heap_object,
267                                   InstanceType type,
268                                   Register map) {
269  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
270  CmpInstanceType(map, type);
271}
272
273
274void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
275  cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
276       static_cast<int8_t>(type));
277}
278
279
280void MacroAssembler::CheckFastElements(Register map,
281                                       Label* fail,
282                                       Label::Distance distance) {
283  STATIC_ASSERT(JSObject::FAST_ELEMENTS == 0);
284  cmpb(FieldOperand(map, Map::kBitField2Offset),
285       Map::kMaximumBitField2FastElementValue);
286  j(above, fail, distance);
287}
288
289
290void MacroAssembler::CheckMap(Register obj,
291                              Handle<Map> map,
292                              Label* fail,
293                              SmiCheckType smi_check_type) {
294  if (smi_check_type == DO_SMI_CHECK) {
295    JumpIfSmi(obj, fail);
296  }
297  cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
298  j(not_equal, fail);
299}
300
301
302void MacroAssembler::DispatchMap(Register obj,
303                                 Handle<Map> map,
304                                 Handle<Code> success,
305                                 SmiCheckType smi_check_type) {
306  Label fail;
307  if (smi_check_type == DO_SMI_CHECK) {
308    JumpIfSmi(obj, &fail);
309  }
310  cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
311  j(equal, success);
312
313  bind(&fail);
314}
315
316
317Condition MacroAssembler::IsObjectStringType(Register heap_object,
318                                             Register map,
319                                             Register instance_type) {
320  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
321  movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
322  ASSERT(kNotStringTag != 0);
323  test(instance_type, Immediate(kIsNotStringMask));
324  return zero;
325}
326
327
328void MacroAssembler::IsObjectJSObjectType(Register heap_object,
329                                          Register map,
330                                          Register scratch,
331                                          Label* fail) {
332  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
333  IsInstanceJSObjectType(map, scratch, fail);
334}
335
336
337void MacroAssembler::IsInstanceJSObjectType(Register map,
338                                            Register scratch,
339                                            Label* fail) {
340  movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
341  sub(Operand(scratch), Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
342  cmp(scratch,
343      LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
344  j(above, fail);
345}
346
347
348void MacroAssembler::FCmp() {
349  if (CpuFeatures::IsSupported(CMOV)) {
350    fucomip();
351    ffree(0);
352    fincstp();
353  } else {
354    fucompp();
355    push(eax);
356    fnstsw_ax();
357    sahf();
358    pop(eax);
359  }
360}
361
362
363void MacroAssembler::AbortIfNotNumber(Register object) {
364  Label ok;
365  JumpIfSmi(object, &ok);
366  cmp(FieldOperand(object, HeapObject::kMapOffset),
367      isolate()->factory()->heap_number_map());
368  Assert(equal, "Operand not a number");
369  bind(&ok);
370}
371
372
373void MacroAssembler::AbortIfNotSmi(Register object) {
374  test(object, Immediate(kSmiTagMask));
375  Assert(equal, "Operand is not a smi");
376}
377
378
379void MacroAssembler::AbortIfNotString(Register object) {
380  test(object, Immediate(kSmiTagMask));
381  Assert(not_equal, "Operand is not a string");
382  push(object);
383  mov(object, FieldOperand(object, HeapObject::kMapOffset));
384  CmpInstanceType(object, FIRST_NONSTRING_TYPE);
385  pop(object);
386  Assert(below, "Operand is not a string");
387}
388
389
390void MacroAssembler::AbortIfSmi(Register object) {
391  test(object, Immediate(kSmiTagMask));
392  Assert(not_equal, "Operand is a smi");
393}
394
395
396void MacroAssembler::EnterFrame(StackFrame::Type type) {
397  push(ebp);
398  mov(ebp, Operand(esp));
399  push(esi);
400  push(Immediate(Smi::FromInt(type)));
401  push(Immediate(CodeObject()));
402  if (emit_debug_code()) {
403    cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
404    Check(not_equal, "code object not properly patched");
405  }
406}
407
408
409void MacroAssembler::LeaveFrame(StackFrame::Type type) {
410  if (emit_debug_code()) {
411    cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
412        Immediate(Smi::FromInt(type)));
413    Check(equal, "stack frame types must match");
414  }
415  leave();
416}
417
418
419void MacroAssembler::EnterExitFramePrologue() {
420  // Setup the frame structure on the stack.
421  ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
422  ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
423  ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
424  push(ebp);
425  mov(ebp, Operand(esp));
426
427  // Reserve room for entry stack pointer and push the code object.
428  ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
429  push(Immediate(0));  // Saved entry sp, patched before call.
430  push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
431
432  // Save the frame pointer and the context in top.
433  ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
434                                       isolate());
435  ExternalReference context_address(Isolate::k_context_address,
436                                    isolate());
437  mov(Operand::StaticVariable(c_entry_fp_address), ebp);
438  mov(Operand::StaticVariable(context_address), esi);
439}
440
441
442void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
443  // Optionally save all XMM registers.
444  if (save_doubles) {
445    CpuFeatures::Scope scope(SSE2);
446    int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
447    sub(Operand(esp), Immediate(space));
448    const int offset = -2 * kPointerSize;
449    for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
450      XMMRegister reg = XMMRegister::from_code(i);
451      movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
452    }
453  } else {
454    sub(Operand(esp), Immediate(argc * kPointerSize));
455  }
456
457  // Get the required frame alignment for the OS.
458  const int kFrameAlignment = OS::ActivationFrameAlignment();
459  if (kFrameAlignment > 0) {
460    ASSERT(IsPowerOf2(kFrameAlignment));
461    and_(esp, -kFrameAlignment);
462  }
463
464  // Patch the saved entry sp.
465  mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
466}
467
468
469void MacroAssembler::EnterExitFrame(bool save_doubles) {
470  EnterExitFramePrologue();
471
472  // Setup argc and argv in callee-saved registers.
473  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
474  mov(edi, Operand(eax));
475  lea(esi, Operand(ebp, eax, times_4, offset));
476
477  // Reserve space for argc, argv and isolate.
478  EnterExitFrameEpilogue(3, save_doubles);
479}
480
481
482void MacroAssembler::EnterApiExitFrame(int argc) {
483  EnterExitFramePrologue();
484  EnterExitFrameEpilogue(argc, false);
485}
486
487
488void MacroAssembler::LeaveExitFrame(bool save_doubles) {
489  // Optionally restore all XMM registers.
490  if (save_doubles) {
491    CpuFeatures::Scope scope(SSE2);
492    const int offset = -2 * kPointerSize;
493    for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
494      XMMRegister reg = XMMRegister::from_code(i);
495      movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
496    }
497  }
498
499  // Get the return address from the stack and restore the frame pointer.
500  mov(ecx, Operand(ebp, 1 * kPointerSize));
501  mov(ebp, Operand(ebp, 0 * kPointerSize));
502
503  // Pop the arguments and the receiver from the caller stack.
504  lea(esp, Operand(esi, 1 * kPointerSize));
505
506  // Push the return address to get ready to return.
507  push(ecx);
508
509  LeaveExitFrameEpilogue();
510}
511
512void MacroAssembler::LeaveExitFrameEpilogue() {
513  // Restore current context from top and clear it in debug mode.
514  ExternalReference context_address(Isolate::k_context_address, isolate());
515  mov(esi, Operand::StaticVariable(context_address));
516#ifdef DEBUG
517  mov(Operand::StaticVariable(context_address), Immediate(0));
518#endif
519
520  // Clear the top frame.
521  ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
522                                       isolate());
523  mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
524}
525
526
527void MacroAssembler::LeaveApiExitFrame() {
528  mov(esp, Operand(ebp));
529  pop(ebp);
530
531  LeaveExitFrameEpilogue();
532}
533
534
535void MacroAssembler::PushTryHandler(CodeLocation try_location,
536                                    HandlerType type) {
537  // Adjust this code if not the case.
538  ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
539  // The pc (return address) is already on TOS.
540  if (try_location == IN_JAVASCRIPT) {
541    if (type == TRY_CATCH_HANDLER) {
542      push(Immediate(StackHandler::TRY_CATCH));
543    } else {
544      push(Immediate(StackHandler::TRY_FINALLY));
545    }
546    push(ebp);
547  } else {
548    ASSERT(try_location == IN_JS_ENTRY);
549    // The frame pointer does not point to a JS frame so we save NULL
550    // for ebp. We expect the code throwing an exception to check ebp
551    // before dereferencing it to restore the context.
552    push(Immediate(StackHandler::ENTRY));
553    push(Immediate(0));  // NULL frame pointer.
554  }
555  // Save the current handler as the next handler.
556  push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
557                                                 isolate())));
558  // Link this handler as the new current one.
559  mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
560                                                isolate())),
561      esp);
562}
563
564
565void MacroAssembler::PopTryHandler() {
566  ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
567  pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
568                                                isolate())));
569  add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
570}
571
572
573void MacroAssembler::Throw(Register value) {
574  // Adjust this code if not the case.
575  STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
576
577  // eax must hold the exception.
578  if (!value.is(eax)) {
579    mov(eax, value);
580  }
581
582  // Drop the sp to the top of the handler.
583  ExternalReference handler_address(Isolate::k_handler_address,
584                                    isolate());
585  mov(esp, Operand::StaticVariable(handler_address));
586
587  // Restore next handler and frame pointer, discard handler state.
588  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
589  pop(Operand::StaticVariable(handler_address));
590  STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
591  pop(ebp);
592  pop(edx);  // Remove state.
593
594  // Before returning we restore the context from the frame pointer if
595  // not NULL.  The frame pointer is NULL in the exception handler of
596  // a JS entry frame.
597  Set(esi, Immediate(0));  // Tentatively set context pointer to NULL.
598  Label skip;
599  cmp(ebp, 0);
600  j(equal, &skip, Label::kNear);
601  mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
602  bind(&skip);
603
604  STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
605  ret(0);
606}
607
608
609void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
610                                      Register value) {
611  // Adjust this code if not the case.
612  STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
613
614  // eax must hold the exception.
615  if (!value.is(eax)) {
616    mov(eax, value);
617  }
618
619  // Drop sp to the top stack handler.
620  ExternalReference handler_address(Isolate::k_handler_address,
621                                    isolate());
622  mov(esp, Operand::StaticVariable(handler_address));
623
624  // Unwind the handlers until the ENTRY handler is found.
625  Label loop, done;
626  bind(&loop);
627  // Load the type of the current stack handler.
628  const int kStateOffset = StackHandlerConstants::kStateOffset;
629  cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
630  j(equal, &done, Label::kNear);
631  // Fetch the next handler in the list.
632  const int kNextOffset = StackHandlerConstants::kNextOffset;
633  mov(esp, Operand(esp, kNextOffset));
634  jmp(&loop);
635  bind(&done);
636
637  // Set the top handler address to next handler past the current ENTRY handler.
638  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
639  pop(Operand::StaticVariable(handler_address));
640
641  if (type == OUT_OF_MEMORY) {
642    // Set external caught exception to false.
643    ExternalReference external_caught(
644        Isolate::k_external_caught_exception_address,
645        isolate());
646    mov(eax, false);
647    mov(Operand::StaticVariable(external_caught), eax);
648
649    // Set pending exception and eax to out of memory exception.
650    ExternalReference pending_exception(Isolate::k_pending_exception_address,
651                                        isolate());
652    mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
653    mov(Operand::StaticVariable(pending_exception), eax);
654  }
655
656  // Clear the context pointer.
657  Set(esi, Immediate(0));
658
659  // Restore fp from handler and discard handler state.
660  STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
661  pop(ebp);
662  pop(edx);  // State.
663
664  STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
665  ret(0);
666}
667
668
669void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
670                                            Register scratch,
671                                            Label* miss) {
672  Label same_contexts;
673
674  ASSERT(!holder_reg.is(scratch));
675
676  // Load current lexical context from the stack frame.
677  mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
678
679  // When generating debug code, make sure the lexical context is set.
680  if (emit_debug_code()) {
681    cmp(Operand(scratch), Immediate(0));
682    Check(not_equal, "we should not have an empty lexical context");
683  }
684  // Load the global context of the current context.
685  int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
686  mov(scratch, FieldOperand(scratch, offset));
687  mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
688
689  // Check the context is a global context.
690  if (emit_debug_code()) {
691    push(scratch);
692    // Read the first word and compare to global_context_map.
693    mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
694    cmp(scratch, isolate()->factory()->global_context_map());
695    Check(equal, "JSGlobalObject::global_context should be a global context.");
696    pop(scratch);
697  }
698
699  // Check if both contexts are the same.
700  cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
701  j(equal, &same_contexts);
702
703  // Compare security tokens, save holder_reg on the stack so we can use it
704  // as a temporary register.
705  //
706  // TODO(119): avoid push(holder_reg)/pop(holder_reg)
707  push(holder_reg);
708  // Check that the security token in the calling global object is
709  // compatible with the security token in the receiving global
710  // object.
711  mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
712
713  // Check the context is a global context.
714  if (emit_debug_code()) {
715    cmp(holder_reg, isolate()->factory()->null_value());
716    Check(not_equal, "JSGlobalProxy::context() should not be null.");
717
718    push(holder_reg);
719    // Read the first word and compare to global_context_map(),
720    mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
721    cmp(holder_reg, isolate()->factory()->global_context_map());
722    Check(equal, "JSGlobalObject::global_context should be a global context.");
723    pop(holder_reg);
724  }
725
726  int token_offset = Context::kHeaderSize +
727                     Context::SECURITY_TOKEN_INDEX * kPointerSize;
728  mov(scratch, FieldOperand(scratch, token_offset));
729  cmp(scratch, FieldOperand(holder_reg, token_offset));
730  pop(holder_reg);
731  j(not_equal, miss);
732
733  bind(&same_contexts);
734}
735
736
737void MacroAssembler::LoadFromNumberDictionary(Label* miss,
738                                              Register elements,
739                                              Register key,
740                                              Register r0,
741                                              Register r1,
742                                              Register r2,
743                                              Register result) {
744  // Register use:
745  //
746  // elements - holds the slow-case elements of the receiver and is unchanged.
747  //
748  // key      - holds the smi key on entry and is unchanged.
749  //
750  // Scratch registers:
751  //
752  // r0 - holds the untagged key on entry and holds the hash once computed.
753  //
754  // r1 - used to hold the capacity mask of the dictionary
755  //
756  // r2 - used for the index into the dictionary.
757  //
758  // result - holds the result on exit if the load succeeds and we fall through.
759
760  Label done;
761
762  // Compute the hash code from the untagged key.  This must be kept in sync
763  // with ComputeIntegerHash in utils.h.
764  //
765  // hash = ~hash + (hash << 15);
766  mov(r1, r0);
767  not_(r0);
768  shl(r1, 15);
769  add(r0, Operand(r1));
770  // hash = hash ^ (hash >> 12);
771  mov(r1, r0);
772  shr(r1, 12);
773  xor_(r0, Operand(r1));
774  // hash = hash + (hash << 2);
775  lea(r0, Operand(r0, r0, times_4, 0));
776  // hash = hash ^ (hash >> 4);
777  mov(r1, r0);
778  shr(r1, 4);
779  xor_(r0, Operand(r1));
780  // hash = hash * 2057;
781  imul(r0, r0, 2057);
782  // hash = hash ^ (hash >> 16);
783  mov(r1, r0);
784  shr(r1, 16);
785  xor_(r0, Operand(r1));
786
787  // Compute capacity mask.
788  mov(r1, FieldOperand(elements, NumberDictionary::kCapacityOffset));
789  shr(r1, kSmiTagSize);  // convert smi to int
790  dec(r1);
791
792  // Generate an unrolled loop that performs a few probes before giving up.
793  const int kProbes = 4;
794  for (int i = 0; i < kProbes; i++) {
795    // Use r2 for index calculations and keep the hash intact in r0.
796    mov(r2, r0);
797    // Compute the masked index: (hash + i + i * i) & mask.
798    if (i > 0) {
799      add(Operand(r2), Immediate(NumberDictionary::GetProbeOffset(i)));
800    }
801    and_(r2, Operand(r1));
802
803    // Scale the index by multiplying by the entry size.
804    ASSERT(NumberDictionary::kEntrySize == 3);
805    lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
806
807    // Check if the key matches.
808    cmp(key, FieldOperand(elements,
809                          r2,
810                          times_pointer_size,
811                          NumberDictionary::kElementsStartOffset));
812    if (i != (kProbes - 1)) {
813      j(equal, &done);
814    } else {
815      j(not_equal, miss);
816    }
817  }
818
819  bind(&done);
820  // Check that the value is a normal propety.
821  const int kDetailsOffset =
822      NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
823  ASSERT_EQ(NORMAL, 0);
824  test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
825       Immediate(PropertyDetails::TypeField::mask() << kSmiTagSize));
826  j(not_zero, miss);
827
828  // Get the value at the masked, scaled index.
829  const int kValueOffset =
830      NumberDictionary::kElementsStartOffset + kPointerSize;
831  mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
832}
833
834
835void MacroAssembler::LoadAllocationTopHelper(Register result,
836                                             Register scratch,
837                                             AllocationFlags flags) {
838  ExternalReference new_space_allocation_top =
839      ExternalReference::new_space_allocation_top_address(isolate());
840
841  // Just return if allocation top is already known.
842  if ((flags & RESULT_CONTAINS_TOP) != 0) {
843    // No use of scratch if allocation top is provided.
844    ASSERT(scratch.is(no_reg));
845#ifdef DEBUG
846    // Assert that result actually contains top on entry.
847    cmp(result, Operand::StaticVariable(new_space_allocation_top));
848    Check(equal, "Unexpected allocation top");
849#endif
850    return;
851  }
852
853  // Move address of new object to result. Use scratch register if available.
854  if (scratch.is(no_reg)) {
855    mov(result, Operand::StaticVariable(new_space_allocation_top));
856  } else {
857    mov(Operand(scratch), Immediate(new_space_allocation_top));
858    mov(result, Operand(scratch, 0));
859  }
860}
861
862
863void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
864                                               Register scratch) {
865  if (emit_debug_code()) {
866    test(result_end, Immediate(kObjectAlignmentMask));
867    Check(zero, "Unaligned allocation in new space");
868  }
869
870  ExternalReference new_space_allocation_top =
871      ExternalReference::new_space_allocation_top_address(isolate());
872
873  // Update new top. Use scratch if available.
874  if (scratch.is(no_reg)) {
875    mov(Operand::StaticVariable(new_space_allocation_top), result_end);
876  } else {
877    mov(Operand(scratch, 0), result_end);
878  }
879}
880
881
882void MacroAssembler::AllocateInNewSpace(int object_size,
883                                        Register result,
884                                        Register result_end,
885                                        Register scratch,
886                                        Label* gc_required,
887                                        AllocationFlags flags) {
888  if (!FLAG_inline_new) {
889    if (emit_debug_code()) {
890      // Trash the registers to simulate an allocation failure.
891      mov(result, Immediate(0x7091));
892      if (result_end.is_valid()) {
893        mov(result_end, Immediate(0x7191));
894      }
895      if (scratch.is_valid()) {
896        mov(scratch, Immediate(0x7291));
897      }
898    }
899    jmp(gc_required);
900    return;
901  }
902  ASSERT(!result.is(result_end));
903
904  // Load address of new object into result.
905  LoadAllocationTopHelper(result, scratch, flags);
906
907  Register top_reg = result_end.is_valid() ? result_end : result;
908
909  // Calculate new top and bail out if new space is exhausted.
910  ExternalReference new_space_allocation_limit =
911      ExternalReference::new_space_allocation_limit_address(isolate());
912
913  if (!top_reg.is(result)) {
914    mov(top_reg, result);
915  }
916  add(Operand(top_reg), Immediate(object_size));
917  j(carry, gc_required);
918  cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
919  j(above, gc_required);
920
921  // Update allocation top.
922  UpdateAllocationTopHelper(top_reg, scratch);
923
924  // Tag result if requested.
925  if (top_reg.is(result)) {
926    if ((flags & TAG_OBJECT) != 0) {
927      sub(Operand(result), Immediate(object_size - kHeapObjectTag));
928    } else {
929      sub(Operand(result), Immediate(object_size));
930    }
931  } else if ((flags & TAG_OBJECT) != 0) {
932    add(Operand(result), Immediate(kHeapObjectTag));
933  }
934}
935
936
937void MacroAssembler::AllocateInNewSpace(int header_size,
938                                        ScaleFactor element_size,
939                                        Register element_count,
940                                        Register result,
941                                        Register result_end,
942                                        Register scratch,
943                                        Label* gc_required,
944                                        AllocationFlags flags) {
945  if (!FLAG_inline_new) {
946    if (emit_debug_code()) {
947      // Trash the registers to simulate an allocation failure.
948      mov(result, Immediate(0x7091));
949      mov(result_end, Immediate(0x7191));
950      if (scratch.is_valid()) {
951        mov(scratch, Immediate(0x7291));
952      }
953      // Register element_count is not modified by the function.
954    }
955    jmp(gc_required);
956    return;
957  }
958  ASSERT(!result.is(result_end));
959
960  // Load address of new object into result.
961  LoadAllocationTopHelper(result, scratch, flags);
962
963  // Calculate new top and bail out if new space is exhausted.
964  ExternalReference new_space_allocation_limit =
965      ExternalReference::new_space_allocation_limit_address(isolate());
966
967  // We assume that element_count*element_size + header_size does not
968  // overflow.
969  lea(result_end, Operand(element_count, element_size, header_size));
970  add(result_end, Operand(result));
971  j(carry, gc_required);
972  cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
973  j(above, gc_required);
974
975  // Tag result if requested.
976  if ((flags & TAG_OBJECT) != 0) {
977    lea(result, Operand(result, kHeapObjectTag));
978  }
979
980  // Update allocation top.
981  UpdateAllocationTopHelper(result_end, scratch);
982}
983
984
985void MacroAssembler::AllocateInNewSpace(Register object_size,
986                                        Register result,
987                                        Register result_end,
988                                        Register scratch,
989                                        Label* gc_required,
990                                        AllocationFlags flags) {
991  if (!FLAG_inline_new) {
992    if (emit_debug_code()) {
993      // Trash the registers to simulate an allocation failure.
994      mov(result, Immediate(0x7091));
995      mov(result_end, Immediate(0x7191));
996      if (scratch.is_valid()) {
997        mov(scratch, Immediate(0x7291));
998      }
999      // object_size is left unchanged by this function.
1000    }
1001    jmp(gc_required);
1002    return;
1003  }
1004  ASSERT(!result.is(result_end));
1005
1006  // Load address of new object into result.
1007  LoadAllocationTopHelper(result, scratch, flags);
1008
1009  // Calculate new top and bail out if new space is exhausted.
1010  ExternalReference new_space_allocation_limit =
1011      ExternalReference::new_space_allocation_limit_address(isolate());
1012  if (!object_size.is(result_end)) {
1013    mov(result_end, object_size);
1014  }
1015  add(result_end, Operand(result));
1016  j(carry, gc_required);
1017  cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1018  j(above, gc_required);
1019
1020  // Tag result if requested.
1021  if ((flags & TAG_OBJECT) != 0) {
1022    lea(result, Operand(result, kHeapObjectTag));
1023  }
1024
1025  // Update allocation top.
1026  UpdateAllocationTopHelper(result_end, scratch);
1027}
1028
1029
1030void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1031  ExternalReference new_space_allocation_top =
1032      ExternalReference::new_space_allocation_top_address(isolate());
1033
1034  // Make sure the object has no tag before resetting top.
1035  and_(Operand(object), Immediate(~kHeapObjectTagMask));
1036#ifdef DEBUG
1037  cmp(object, Operand::StaticVariable(new_space_allocation_top));
1038  Check(below, "Undo allocation of non allocated memory");
1039#endif
1040  mov(Operand::StaticVariable(new_space_allocation_top), object);
1041}
1042
1043
1044void MacroAssembler::AllocateHeapNumber(Register result,
1045                                        Register scratch1,
1046                                        Register scratch2,
1047                                        Label* gc_required) {
1048  // Allocate heap number in new space.
1049  AllocateInNewSpace(HeapNumber::kSize,
1050                     result,
1051                     scratch1,
1052                     scratch2,
1053                     gc_required,
1054                     TAG_OBJECT);
1055
1056  // Set the map.
1057  mov(FieldOperand(result, HeapObject::kMapOffset),
1058      Immediate(isolate()->factory()->heap_number_map()));
1059}
1060
1061
1062void MacroAssembler::AllocateTwoByteString(Register result,
1063                                           Register length,
1064                                           Register scratch1,
1065                                           Register scratch2,
1066                                           Register scratch3,
1067                                           Label* gc_required) {
1068  // Calculate the number of bytes needed for the characters in the string while
1069  // observing object alignment.
1070  ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1071  ASSERT(kShortSize == 2);
1072  // scratch1 = length * 2 + kObjectAlignmentMask.
1073  lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1074  and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
1075
1076  // Allocate two byte string in new space.
1077  AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1078                     times_1,
1079                     scratch1,
1080                     result,
1081                     scratch2,
1082                     scratch3,
1083                     gc_required,
1084                     TAG_OBJECT);
1085
1086  // Set the map, length and hash field.
1087  mov(FieldOperand(result, HeapObject::kMapOffset),
1088      Immediate(isolate()->factory()->string_map()));
1089  mov(scratch1, length);
1090  SmiTag(scratch1);
1091  mov(FieldOperand(result, String::kLengthOffset), scratch1);
1092  mov(FieldOperand(result, String::kHashFieldOffset),
1093      Immediate(String::kEmptyHashField));
1094}
1095
1096
1097void MacroAssembler::AllocateAsciiString(Register result,
1098                                         Register length,
1099                                         Register scratch1,
1100                                         Register scratch2,
1101                                         Register scratch3,
1102                                         Label* gc_required) {
1103  // Calculate the number of bytes needed for the characters in the string while
1104  // observing object alignment.
1105  ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1106  mov(scratch1, length);
1107  ASSERT(kCharSize == 1);
1108  add(Operand(scratch1), Immediate(kObjectAlignmentMask));
1109  and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
1110
1111  // Allocate ascii string in new space.
1112  AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1113                     times_1,
1114                     scratch1,
1115                     result,
1116                     scratch2,
1117                     scratch3,
1118                     gc_required,
1119                     TAG_OBJECT);
1120
1121  // Set the map, length and hash field.
1122  mov(FieldOperand(result, HeapObject::kMapOffset),
1123      Immediate(isolate()->factory()->ascii_string_map()));
1124  mov(scratch1, length);
1125  SmiTag(scratch1);
1126  mov(FieldOperand(result, String::kLengthOffset), scratch1);
1127  mov(FieldOperand(result, String::kHashFieldOffset),
1128      Immediate(String::kEmptyHashField));
1129}
1130
1131
1132void MacroAssembler::AllocateAsciiString(Register result,
1133                                         int length,
1134                                         Register scratch1,
1135                                         Register scratch2,
1136                                         Label* gc_required) {
1137  ASSERT(length > 0);
1138
1139  // Allocate ascii string in new space.
1140  AllocateInNewSpace(SeqAsciiString::SizeFor(length),
1141                     result,
1142                     scratch1,
1143                     scratch2,
1144                     gc_required,
1145                     TAG_OBJECT);
1146
1147  // Set the map, length and hash field.
1148  mov(FieldOperand(result, HeapObject::kMapOffset),
1149      Immediate(isolate()->factory()->ascii_string_map()));
1150  mov(FieldOperand(result, String::kLengthOffset),
1151      Immediate(Smi::FromInt(length)));
1152  mov(FieldOperand(result, String::kHashFieldOffset),
1153      Immediate(String::kEmptyHashField));
1154}
1155
1156
1157void MacroAssembler::AllocateConsString(Register result,
1158                                        Register scratch1,
1159                                        Register scratch2,
1160                                        Label* gc_required) {
1161  // Allocate heap number in new space.
1162  AllocateInNewSpace(ConsString::kSize,
1163                     result,
1164                     scratch1,
1165                     scratch2,
1166                     gc_required,
1167                     TAG_OBJECT);
1168
1169  // Set the map. The other fields are left uninitialized.
1170  mov(FieldOperand(result, HeapObject::kMapOffset),
1171      Immediate(isolate()->factory()->cons_string_map()));
1172}
1173
1174
1175void MacroAssembler::AllocateAsciiConsString(Register result,
1176                                             Register scratch1,
1177                                             Register scratch2,
1178                                             Label* gc_required) {
1179  // Allocate heap number in new space.
1180  AllocateInNewSpace(ConsString::kSize,
1181                     result,
1182                     scratch1,
1183                     scratch2,
1184                     gc_required,
1185                     TAG_OBJECT);
1186
1187  // Set the map. The other fields are left uninitialized.
1188  mov(FieldOperand(result, HeapObject::kMapOffset),
1189      Immediate(isolate()->factory()->cons_ascii_string_map()));
1190}
1191
1192
1193// Copy memory, byte-by-byte, from source to destination.  Not optimized for
1194// long or aligned copies.  The contents of scratch and length are destroyed.
1195// Source and destination are incremented by length.
1196// Many variants of movsb, loop unrolling, word moves, and indexed operands
1197// have been tried here already, and this is fastest.
1198// A simpler loop is faster on small copies, but 30% slower on large ones.
1199// The cld() instruction must have been emitted, to set the direction flag(),
1200// before calling this function.
1201void MacroAssembler::CopyBytes(Register source,
1202                               Register destination,
1203                               Register length,
1204                               Register scratch) {
1205  Label loop, done, short_string, short_loop;
1206  // Experimentation shows that the short string loop is faster if length < 10.
1207  cmp(Operand(length), Immediate(10));
1208  j(less_equal, &short_string);
1209
1210  ASSERT(source.is(esi));
1211  ASSERT(destination.is(edi));
1212  ASSERT(length.is(ecx));
1213
1214  // Because source is 4-byte aligned in our uses of this function,
1215  // we keep source aligned for the rep_movs call by copying the odd bytes
1216  // at the end of the ranges.
1217  mov(scratch, Operand(source, length, times_1, -4));
1218  mov(Operand(destination, length, times_1, -4), scratch);
1219  mov(scratch, ecx);
1220  shr(ecx, 2);
1221  rep_movs();
1222  and_(Operand(scratch), Immediate(0x3));
1223  add(destination, Operand(scratch));
1224  jmp(&done);
1225
1226  bind(&short_string);
1227  test(length, Operand(length));
1228  j(zero, &done);
1229
1230  bind(&short_loop);
1231  mov_b(scratch, Operand(source, 0));
1232  mov_b(Operand(destination, 0), scratch);
1233  inc(source);
1234  inc(destination);
1235  dec(length);
1236  j(not_zero, &short_loop);
1237
1238  bind(&done);
1239}
1240
1241
1242void MacroAssembler::NegativeZeroTest(Register result,
1243                                      Register op,
1244                                      Label* then_label) {
1245  Label ok;
1246  test(result, Operand(result));
1247  j(not_zero, &ok);
1248  test(op, Operand(op));
1249  j(sign, then_label);
1250  bind(&ok);
1251}
1252
1253
1254void MacroAssembler::NegativeZeroTest(Register result,
1255                                      Register op1,
1256                                      Register op2,
1257                                      Register scratch,
1258                                      Label* then_label) {
1259  Label ok;
1260  test(result, Operand(result));
1261  j(not_zero, &ok);
1262  mov(scratch, Operand(op1));
1263  or_(scratch, Operand(op2));
1264  j(sign, then_label);
1265  bind(&ok);
1266}
1267
1268
1269void MacroAssembler::TryGetFunctionPrototype(Register function,
1270                                             Register result,
1271                                             Register scratch,
1272                                             Label* miss) {
1273  // Check that the receiver isn't a smi.
1274  JumpIfSmi(function, miss);
1275
1276  // Check that the function really is a function.
1277  CmpObjectType(function, JS_FUNCTION_TYPE, result);
1278  j(not_equal, miss);
1279
1280  // Make sure that the function has an instance prototype.
1281  Label non_instance;
1282  movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1283  test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1284  j(not_zero, &non_instance);
1285
1286  // Get the prototype or initial map from the function.
1287  mov(result,
1288      FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1289
1290  // If the prototype or initial map is the hole, don't return it and
1291  // simply miss the cache instead. This will allow us to allocate a
1292  // prototype object on-demand in the runtime system.
1293  cmp(Operand(result), Immediate(isolate()->factory()->the_hole_value()));
1294  j(equal, miss);
1295
1296  // If the function does not have an initial map, we're done.
1297  Label done;
1298  CmpObjectType(result, MAP_TYPE, scratch);
1299  j(not_equal, &done);
1300
1301  // Get the prototype from the initial map.
1302  mov(result, FieldOperand(result, Map::kPrototypeOffset));
1303  jmp(&done);
1304
1305  // Non-instance prototype: Fetch prototype from constructor field
1306  // in initial map.
1307  bind(&non_instance);
1308  mov(result, FieldOperand(result, Map::kConstructorOffset));
1309
1310  // All done.
1311  bind(&done);
1312}
1313
1314
1315void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
1316  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
1317  call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1318}
1319
1320
1321MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
1322  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
1323  Object* result;
1324  { MaybeObject* maybe_result = stub->TryGetCode();
1325    if (!maybe_result->ToObject(&result)) return maybe_result;
1326  }
1327  call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1328  return result;
1329}
1330
1331
1332void MacroAssembler::TailCallStub(CodeStub* stub) {
1333  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
1334  jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1335}
1336
1337
1338MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
1339  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
1340  Object* result;
1341  { MaybeObject* maybe_result = stub->TryGetCode();
1342    if (!maybe_result->ToObject(&result)) return maybe_result;
1343  }
1344  jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1345  return result;
1346}
1347
1348
1349void MacroAssembler::StubReturn(int argc) {
1350  ASSERT(argc >= 1 && generating_stub());
1351  ret((argc - 1) * kPointerSize);
1352}
1353
1354
1355void MacroAssembler::IllegalOperation(int num_arguments) {
1356  if (num_arguments > 0) {
1357    add(Operand(esp), Immediate(num_arguments * kPointerSize));
1358  }
1359  mov(eax, Immediate(isolate()->factory()->undefined_value()));
1360}
1361
1362
1363void MacroAssembler::IndexFromHash(Register hash, Register index) {
1364  // The assert checks that the constants for the maximum number of digits
1365  // for an array index cached in the hash field and the number of bits
1366  // reserved for it does not conflict.
1367  ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1368         (1 << String::kArrayIndexValueBits));
1369  // We want the smi-tagged index in key.  kArrayIndexValueMask has zeros in
1370  // the low kHashShift bits.
1371  and_(hash, String::kArrayIndexValueMask);
1372  STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1373  if (String::kHashShift > kSmiTagSize) {
1374    shr(hash, String::kHashShift - kSmiTagSize);
1375  }
1376  if (!index.is(hash)) {
1377    mov(index, hash);
1378  }
1379}
1380
1381
1382void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1383  CallRuntime(Runtime::FunctionForId(id), num_arguments);
1384}
1385
1386
1387void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1388  const Runtime::Function* function = Runtime::FunctionForId(id);
1389  Set(eax, Immediate(function->nargs));
1390  mov(ebx, Immediate(ExternalReference(function, isolate())));
1391  CEntryStub ces(1);
1392  ces.SaveDoubles();
1393  CallStub(&ces);
1394}
1395
1396
1397MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1398                                            int num_arguments) {
1399  return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1400}
1401
1402
1403void MacroAssembler::CallRuntime(const Runtime::Function* f,
1404                                 int num_arguments) {
1405  // If the expected number of arguments of the runtime function is
1406  // constant, we check that the actual number of arguments match the
1407  // expectation.
1408  if (f->nargs >= 0 && f->nargs != num_arguments) {
1409    IllegalOperation(num_arguments);
1410    return;
1411  }
1412
1413  // TODO(1236192): Most runtime routines don't need the number of
1414  // arguments passed in because it is constant. At some point we
1415  // should remove this need and make the runtime routine entry code
1416  // smarter.
1417  Set(eax, Immediate(num_arguments));
1418  mov(ebx, Immediate(ExternalReference(f, isolate())));
1419  CEntryStub ces(1);
1420  CallStub(&ces);
1421}
1422
1423
1424MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
1425                                            int num_arguments) {
1426  if (f->nargs >= 0 && f->nargs != num_arguments) {
1427    IllegalOperation(num_arguments);
1428    // Since we did not call the stub, there was no allocation failure.
1429    // Return some non-failure object.
1430    return isolate()->heap()->undefined_value();
1431  }
1432
1433  // TODO(1236192): Most runtime routines don't need the number of
1434  // arguments passed in because it is constant. At some point we
1435  // should remove this need and make the runtime routine entry code
1436  // smarter.
1437  Set(eax, Immediate(num_arguments));
1438  mov(ebx, Immediate(ExternalReference(f, isolate())));
1439  CEntryStub ces(1);
1440  return TryCallStub(&ces);
1441}
1442
1443
1444void MacroAssembler::CallExternalReference(ExternalReference ref,
1445                                           int num_arguments) {
1446  mov(eax, Immediate(num_arguments));
1447  mov(ebx, Immediate(ref));
1448
1449  CEntryStub stub(1);
1450  CallStub(&stub);
1451}
1452
1453
1454void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1455                                               int num_arguments,
1456                                               int result_size) {
1457  // TODO(1236192): Most runtime routines don't need the number of
1458  // arguments passed in because it is constant. At some point we
1459  // should remove this need and make the runtime routine entry code
1460  // smarter.
1461  Set(eax, Immediate(num_arguments));
1462  JumpToExternalReference(ext);
1463}
1464
1465
1466MaybeObject* MacroAssembler::TryTailCallExternalReference(
1467    const ExternalReference& ext, int num_arguments, int result_size) {
1468  // TODO(1236192): Most runtime routines don't need the number of
1469  // arguments passed in because it is constant. At some point we
1470  // should remove this need and make the runtime routine entry code
1471  // smarter.
1472  Set(eax, Immediate(num_arguments));
1473  return TryJumpToExternalReference(ext);
1474}
1475
1476
1477void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1478                                     int num_arguments,
1479                                     int result_size) {
1480  TailCallExternalReference(ExternalReference(fid, isolate()),
1481                            num_arguments,
1482                            result_size);
1483}
1484
1485
1486MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
1487                                                int num_arguments,
1488                                                int result_size) {
1489  return TryTailCallExternalReference(
1490      ExternalReference(fid, isolate()), num_arguments, result_size);
1491}
1492
1493
1494// If true, a Handle<T> returned by value from a function with cdecl calling
1495// convention will be returned directly as a value of location_ field in a
1496// register eax.
1497// If false, it is returned as a pointer to a preallocated by caller memory
1498// region. Pointer to this region should be passed to a function as an
1499// implicit first argument.
1500#if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
1501static const bool kReturnHandlesDirectly = true;
1502#else
1503static const bool kReturnHandlesDirectly = false;
1504#endif
1505
1506
1507Operand ApiParameterOperand(int index) {
1508  return Operand(
1509      esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
1510}
1511
1512
1513void MacroAssembler::PrepareCallApiFunction(int argc) {
1514  if (kReturnHandlesDirectly) {
1515    EnterApiExitFrame(argc);
1516    // When handles are returned directly we don't have to allocate extra
1517    // space for and pass an out parameter.
1518    if (emit_debug_code()) {
1519      mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
1520    }
1521  } else {
1522    // We allocate two additional slots: return value and pointer to it.
1523    EnterApiExitFrame(argc + 2);
1524
1525    // The argument slots are filled as follows:
1526    //
1527    //   n + 1: output slot
1528    //   n: arg n
1529    //   ...
1530    //   1: arg1
1531    //   0: pointer to the output slot
1532
1533    lea(esi, Operand(esp, (argc + 1) * kPointerSize));
1534    mov(Operand(esp, 0 * kPointerSize), esi);
1535    if (emit_debug_code()) {
1536      mov(Operand(esi, 0), Immediate(0));
1537    }
1538  }
1539}
1540
1541
1542MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
1543                                                         int stack_space) {
1544  ExternalReference next_address =
1545      ExternalReference::handle_scope_next_address();
1546  ExternalReference limit_address =
1547      ExternalReference::handle_scope_limit_address();
1548  ExternalReference level_address =
1549      ExternalReference::handle_scope_level_address();
1550
1551  // Allocate HandleScope in callee-save registers.
1552  mov(ebx, Operand::StaticVariable(next_address));
1553  mov(edi, Operand::StaticVariable(limit_address));
1554  add(Operand::StaticVariable(level_address), Immediate(1));
1555
1556  // Call the api function!
1557  call(function->address(), RelocInfo::RUNTIME_ENTRY);
1558
1559  if (!kReturnHandlesDirectly) {
1560    // PrepareCallApiFunction saved pointer to the output slot into
1561    // callee-save register esi.
1562    mov(eax, Operand(esi, 0));
1563  }
1564
1565  Label empty_handle;
1566  Label prologue;
1567  Label promote_scheduled_exception;
1568  Label delete_allocated_handles;
1569  Label leave_exit_frame;
1570
1571  // Check if the result handle holds 0.
1572  test(eax, Operand(eax));
1573  j(zero, &empty_handle);
1574  // It was non-zero.  Dereference to get the result value.
1575  mov(eax, Operand(eax, 0));
1576  bind(&prologue);
1577  // No more valid handles (the result handle was the last one). Restore
1578  // previous handle scope.
1579  mov(Operand::StaticVariable(next_address), ebx);
1580  sub(Operand::StaticVariable(level_address), Immediate(1));
1581  Assert(above_equal, "Invalid HandleScope level");
1582  cmp(edi, Operand::StaticVariable(limit_address));
1583  j(not_equal, &delete_allocated_handles);
1584  bind(&leave_exit_frame);
1585
1586  // Check if the function scheduled an exception.
1587  ExternalReference scheduled_exception_address =
1588      ExternalReference::scheduled_exception_address(isolate());
1589  cmp(Operand::StaticVariable(scheduled_exception_address),
1590      Immediate(isolate()->factory()->the_hole_value()));
1591  j(not_equal, &promote_scheduled_exception);
1592  LeaveApiExitFrame();
1593  ret(stack_space * kPointerSize);
1594  bind(&promote_scheduled_exception);
1595  MaybeObject* result =
1596      TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1597  if (result->IsFailure()) {
1598    return result;
1599  }
1600  bind(&empty_handle);
1601  // It was zero; the result is undefined.
1602  mov(eax, isolate()->factory()->undefined_value());
1603  jmp(&prologue);
1604
1605  // HandleScope limit has changed. Delete allocated extensions.
1606  ExternalReference delete_extensions =
1607      ExternalReference::delete_handle_scope_extensions(isolate());
1608  bind(&delete_allocated_handles);
1609  mov(Operand::StaticVariable(limit_address), edi);
1610  mov(edi, eax);
1611  mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
1612  mov(eax, Immediate(delete_extensions));
1613  call(Operand(eax));
1614  mov(eax, edi);
1615  jmp(&leave_exit_frame);
1616
1617  return result;
1618}
1619
1620
1621void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
1622  // Set the entry point and jump to the C entry runtime stub.
1623  mov(ebx, Immediate(ext));
1624  CEntryStub ces(1);
1625  jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1626}
1627
1628
1629MaybeObject* MacroAssembler::TryJumpToExternalReference(
1630    const ExternalReference& ext) {
1631  // Set the entry point and jump to the C entry runtime stub.
1632  mov(ebx, Immediate(ext));
1633  CEntryStub ces(1);
1634  return TryTailCallStub(&ces);
1635}
1636
1637
1638void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
1639  // This macro takes the dst register to make the code more readable
1640  // at the call sites. However, the dst register has to be ecx to
1641  // follow the calling convention which requires the call type to be
1642  // in ecx.
1643  ASSERT(dst.is(ecx));
1644  if (call_kind == CALL_AS_FUNCTION) {
1645    // Set to some non-zero smi by updating the least significant
1646    // byte.
1647    mov_b(Operand(dst), 1 << kSmiTagSize);
1648  } else {
1649    // Set to smi zero by clearing the register.
1650    xor_(dst, Operand(dst));
1651  }
1652}
1653
1654
1655void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1656                                    const ParameterCount& actual,
1657                                    Handle<Code> code_constant,
1658                                    const Operand& code_operand,
1659                                    Label* done,
1660                                    InvokeFlag flag,
1661                                    Label::Distance done_near,
1662                                    const CallWrapper& call_wrapper,
1663                                    CallKind call_kind) {
1664  bool definitely_matches = false;
1665  Label invoke;
1666  if (expected.is_immediate()) {
1667    ASSERT(actual.is_immediate());
1668    if (expected.immediate() == actual.immediate()) {
1669      definitely_matches = true;
1670    } else {
1671      mov(eax, actual.immediate());
1672      const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1673      if (expected.immediate() == sentinel) {
1674        // Don't worry about adapting arguments for builtins that
1675        // don't want that done. Skip adaption code by making it look
1676        // like we have a match between expected and actual number of
1677        // arguments.
1678        definitely_matches = true;
1679      } else {
1680        mov(ebx, expected.immediate());
1681      }
1682    }
1683  } else {
1684    if (actual.is_immediate()) {
1685      // Expected is in register, actual is immediate. This is the
1686      // case when we invoke function values without going through the
1687      // IC mechanism.
1688      cmp(expected.reg(), actual.immediate());
1689      j(equal, &invoke);
1690      ASSERT(expected.reg().is(ebx));
1691      mov(eax, actual.immediate());
1692    } else if (!expected.reg().is(actual.reg())) {
1693      // Both expected and actual are in (different) registers. This
1694      // is the case when we invoke functions using call and apply.
1695      cmp(expected.reg(), Operand(actual.reg()));
1696      j(equal, &invoke);
1697      ASSERT(actual.reg().is(eax));
1698      ASSERT(expected.reg().is(ebx));
1699    }
1700  }
1701
1702  if (!definitely_matches) {
1703    Handle<Code> adaptor =
1704        isolate()->builtins()->ArgumentsAdaptorTrampoline();
1705    if (!code_constant.is_null()) {
1706      mov(edx, Immediate(code_constant));
1707      add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1708    } else if (!code_operand.is_reg(edx)) {
1709      mov(edx, code_operand);
1710    }
1711
1712    if (flag == CALL_FUNCTION) {
1713      call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
1714      SetCallKind(ecx, call_kind);
1715      call(adaptor, RelocInfo::CODE_TARGET);
1716      call_wrapper.AfterCall();
1717      jmp(done, done_near);
1718    } else {
1719      SetCallKind(ecx, call_kind);
1720      jmp(adaptor, RelocInfo::CODE_TARGET);
1721    }
1722    bind(&invoke);
1723  }
1724}
1725
1726
1727void MacroAssembler::InvokeCode(const Operand& code,
1728                                const ParameterCount& expected,
1729                                const ParameterCount& actual,
1730                                InvokeFlag flag,
1731                                const CallWrapper& call_wrapper,
1732                                CallKind call_kind) {
1733  Label done;
1734  InvokePrologue(expected, actual, Handle<Code>::null(), code,
1735                 &done, flag, Label::kNear, call_wrapper,
1736                 call_kind);
1737  if (flag == CALL_FUNCTION) {
1738    call_wrapper.BeforeCall(CallSize(code));
1739    SetCallKind(ecx, call_kind);
1740    call(code);
1741    call_wrapper.AfterCall();
1742  } else {
1743    ASSERT(flag == JUMP_FUNCTION);
1744    SetCallKind(ecx, call_kind);
1745    jmp(code);
1746  }
1747  bind(&done);
1748}
1749
1750
1751void MacroAssembler::InvokeCode(Handle<Code> code,
1752                                const ParameterCount& expected,
1753                                const ParameterCount& actual,
1754                                RelocInfo::Mode rmode,
1755                                InvokeFlag flag,
1756                                const CallWrapper& call_wrapper,
1757                                CallKind call_kind) {
1758  Label done;
1759  Operand dummy(eax);
1760  InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
1761                 call_wrapper, call_kind);
1762  if (flag == CALL_FUNCTION) {
1763    call_wrapper.BeforeCall(CallSize(code, rmode));
1764    SetCallKind(ecx, call_kind);
1765    call(code, rmode);
1766    call_wrapper.AfterCall();
1767  } else {
1768    ASSERT(flag == JUMP_FUNCTION);
1769    SetCallKind(ecx, call_kind);
1770    jmp(code, rmode);
1771  }
1772  bind(&done);
1773}
1774
1775
1776void MacroAssembler::InvokeFunction(Register fun,
1777                                    const ParameterCount& actual,
1778                                    InvokeFlag flag,
1779                                    const CallWrapper& call_wrapper,
1780                                    CallKind call_kind) {
1781  ASSERT(fun.is(edi));
1782  mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1783  mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1784  mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1785  SmiUntag(ebx);
1786
1787  ParameterCount expected(ebx);
1788  InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1789             expected, actual, flag, call_wrapper, call_kind);
1790}
1791
1792
1793void MacroAssembler::InvokeFunction(JSFunction* function,
1794                                    const ParameterCount& actual,
1795                                    InvokeFlag flag,
1796                                    const CallWrapper& call_wrapper,
1797                                    CallKind call_kind) {
1798  ASSERT(function->is_compiled());
1799  // Get the function and setup the context.
1800  mov(edi, Immediate(Handle<JSFunction>(function)));
1801  mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1802
1803  ParameterCount expected(function->shared()->formal_parameter_count());
1804  if (V8::UseCrankshaft()) {
1805    // TODO(kasperl): For now, we always call indirectly through the
1806    // code field in the function to allow recompilation to take effect
1807    // without changing any of the call sites.
1808    InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1809               expected, actual, flag, call_wrapper, call_kind);
1810  } else {
1811    Handle<Code> code(function->code());
1812    InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET,
1813               flag, call_wrapper, call_kind);
1814  }
1815}
1816
1817
1818void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1819                                   InvokeFlag flag,
1820                                   const CallWrapper& call_wrapper) {
1821  // Calls are not allowed in some stubs.
1822  ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1823
1824  // Rely on the assertion to check that the number of provided
1825  // arguments match the expected number of arguments. Fake a
1826  // parameter count to avoid emitting code to do the check.
1827  ParameterCount expected(0);
1828  GetBuiltinFunction(edi, id);
1829  InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1830             expected, expected, flag, call_wrapper, CALL_AS_METHOD);
1831}
1832
1833void MacroAssembler::GetBuiltinFunction(Register target,
1834                                        Builtins::JavaScript id) {
1835  // Load the JavaScript builtin function from the builtins object.
1836  mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1837  mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1838  mov(target, FieldOperand(target,
1839                           JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1840}
1841
1842void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
1843  ASSERT(!target.is(edi));
1844  // Load the JavaScript builtin function from the builtins object.
1845  GetBuiltinFunction(edi, id);
1846  // Load the code entry point from the function into the target register.
1847  mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
1848}
1849
1850
1851void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1852  if (context_chain_length > 0) {
1853    // Move up the chain of contexts to the context containing the slot.
1854    mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
1855    for (int i = 1; i < context_chain_length; i++) {
1856      mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
1857    }
1858  } else {
1859    // Slot is in the current function context.  Move it into the
1860    // destination register in case we store into it (the write barrier
1861    // cannot be allowed to destroy the context in esi).
1862    mov(dst, esi);
1863  }
1864
1865  // We should not have found a with context by walking the context chain
1866  // (i.e., the static scope chain and runtime context chain do not agree).
1867  // A variable occurring in such a scope should have slot type LOOKUP and
1868  // not CONTEXT.
1869  if (emit_debug_code()) {
1870    cmp(FieldOperand(dst, HeapObject::kMapOffset),
1871        isolate()->factory()->with_context_map());
1872    Check(not_equal, "Variable resolved to with context.");
1873  }
1874}
1875
1876
1877void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1878  // Load the global or builtins object from the current context.
1879  mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1880  // Load the global context from the global or builtins object.
1881  mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1882  // Load the function from the global context.
1883  mov(function, Operand(function, Context::SlotOffset(index)));
1884}
1885
1886
1887void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1888                                                  Register map) {
1889  // Load the initial map.  The global functions all have initial maps.
1890  mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1891  if (emit_debug_code()) {
1892    Label ok, fail;
1893    CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
1894    jmp(&ok);
1895    bind(&fail);
1896    Abort("Global functions must have initial map");
1897    bind(&ok);
1898  }
1899}
1900
1901
1902// Store the value in register src in the safepoint register stack
1903// slot for register dst.
1904void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1905  mov(SafepointRegisterSlot(dst), src);
1906}
1907
1908
1909void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
1910  mov(SafepointRegisterSlot(dst), src);
1911}
1912
1913
1914void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1915  mov(dst, SafepointRegisterSlot(src));
1916}
1917
1918
1919Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1920  return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1921}
1922
1923
1924int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
1925  // The registers are pushed starting with the lowest encoding,
1926  // which means that lowest encodings are furthest away from
1927  // the stack pointer.
1928  ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
1929  return kNumSafepointRegisters - reg_code - 1;
1930}
1931
1932
1933void MacroAssembler::Ret() {
1934  ret(0);
1935}
1936
1937
1938void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1939  if (is_uint16(bytes_dropped)) {
1940    ret(bytes_dropped);
1941  } else {
1942    pop(scratch);
1943    add(Operand(esp), Immediate(bytes_dropped));
1944    push(scratch);
1945    ret(0);
1946  }
1947}
1948
1949
1950
1951
1952void MacroAssembler::Drop(int stack_elements) {
1953  if (stack_elements > 0) {
1954    add(Operand(esp), Immediate(stack_elements * kPointerSize));
1955  }
1956}
1957
1958
1959void MacroAssembler::Move(Register dst, Register src) {
1960  if (!dst.is(src)) {
1961    mov(dst, src);
1962  }
1963}
1964
1965
1966void MacroAssembler::Move(Register dst, Handle<Object> value) {
1967  mov(dst, value);
1968}
1969
1970
1971void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1972  if (FLAG_native_code_counters && counter->Enabled()) {
1973    mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1974  }
1975}
1976
1977
1978void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1979  ASSERT(value > 0);
1980  if (FLAG_native_code_counters && counter->Enabled()) {
1981    Operand operand = Operand::StaticVariable(ExternalReference(counter));
1982    if (value == 1) {
1983      inc(operand);
1984    } else {
1985      add(operand, Immediate(value));
1986    }
1987  }
1988}
1989
1990
1991void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1992  ASSERT(value > 0);
1993  if (FLAG_native_code_counters && counter->Enabled()) {
1994    Operand operand = Operand::StaticVariable(ExternalReference(counter));
1995    if (value == 1) {
1996      dec(operand);
1997    } else {
1998      sub(operand, Immediate(value));
1999    }
2000  }
2001}
2002
2003
2004void MacroAssembler::IncrementCounter(Condition cc,
2005                                      StatsCounter* counter,
2006                                      int value) {
2007  ASSERT(value > 0);
2008  if (FLAG_native_code_counters && counter->Enabled()) {
2009    Label skip;
2010    j(NegateCondition(cc), &skip);
2011    pushfd();
2012    IncrementCounter(counter, value);
2013    popfd();
2014    bind(&skip);
2015  }
2016}
2017
2018
2019void MacroAssembler::DecrementCounter(Condition cc,
2020                                      StatsCounter* counter,
2021                                      int value) {
2022  ASSERT(value > 0);
2023  if (FLAG_native_code_counters && counter->Enabled()) {
2024    Label skip;
2025    j(NegateCondition(cc), &skip);
2026    pushfd();
2027    DecrementCounter(counter, value);
2028    popfd();
2029    bind(&skip);
2030  }
2031}
2032
2033
2034void MacroAssembler::Assert(Condition cc, const char* msg) {
2035  if (emit_debug_code()) Check(cc, msg);
2036}
2037
2038
2039void MacroAssembler::AssertFastElements(Register elements) {
2040  if (emit_debug_code()) {
2041    Factory* factory = isolate()->factory();
2042    Label ok;
2043    cmp(FieldOperand(elements, HeapObject::kMapOffset),
2044        Immediate(factory->fixed_array_map()));
2045    j(equal, &ok);
2046    cmp(FieldOperand(elements, HeapObject::kMapOffset),
2047        Immediate(factory->fixed_double_array_map()));
2048    j(equal, &ok);
2049    cmp(FieldOperand(elements, HeapObject::kMapOffset),
2050        Immediate(factory->fixed_cow_array_map()));
2051    j(equal, &ok);
2052    Abort("JSObject with fast elements map has slow elements");
2053    bind(&ok);
2054  }
2055}
2056
2057
2058void MacroAssembler::Check(Condition cc, const char* msg) {
2059  Label L;
2060  j(cc, &L);
2061  Abort(msg);
2062  // will not return here
2063  bind(&L);
2064}
2065
2066
2067void MacroAssembler::CheckStackAlignment() {
2068  int frame_alignment = OS::ActivationFrameAlignment();
2069  int frame_alignment_mask = frame_alignment - 1;
2070  if (frame_alignment > kPointerSize) {
2071    ASSERT(IsPowerOf2(frame_alignment));
2072    Label alignment_as_expected;
2073    test(esp, Immediate(frame_alignment_mask));
2074    j(zero, &alignment_as_expected);
2075    // Abort if stack is not aligned.
2076    int3();
2077    bind(&alignment_as_expected);
2078  }
2079}
2080
2081
2082void MacroAssembler::Abort(const char* msg) {
2083  // We want to pass the msg string like a smi to avoid GC
2084  // problems, however msg is not guaranteed to be aligned
2085  // properly. Instead, we pass an aligned pointer that is
2086  // a proper v8 smi, but also pass the alignment difference
2087  // from the real pointer as a smi.
2088  intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2089  intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2090  ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2091#ifdef DEBUG
2092  if (msg != NULL) {
2093    RecordComment("Abort message: ");
2094    RecordComment(msg);
2095  }
2096#endif
2097  // Disable stub call restrictions to always allow calls to abort.
2098  AllowStubCallsScope allow_scope(this, true);
2099
2100  push(eax);
2101  push(Immediate(p0));
2102  push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2103  CallRuntime(Runtime::kAbort, 2);
2104  // will not return here
2105  int3();
2106}
2107
2108
2109void MacroAssembler::LoadInstanceDescriptors(Register map,
2110                                             Register descriptors) {
2111  mov(descriptors,
2112      FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
2113  Label not_smi;
2114  JumpIfNotSmi(descriptors, &not_smi);
2115  mov(descriptors, isolate()->factory()->empty_descriptor_array());
2116  bind(&not_smi);
2117}
2118
2119
2120void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2121                                  Register scratch,
2122                                  int power) {
2123  ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2124                  HeapNumber::kExponentBits));
2125  mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2126  movd(dst, Operand(scratch));
2127  psllq(dst, HeapNumber::kMantissaBits);
2128}
2129
2130
2131void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2132    Register instance_type,
2133    Register scratch,
2134    Label* failure) {
2135  if (!scratch.is(instance_type)) {
2136    mov(scratch, instance_type);
2137  }
2138  and_(scratch,
2139       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2140  cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
2141  j(not_equal, failure);
2142}
2143
2144
2145void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2146                                                         Register object2,
2147                                                         Register scratch1,
2148                                                         Register scratch2,
2149                                                         Label* failure) {
2150  // Check that both objects are not smis.
2151  ASSERT_EQ(0, kSmiTag);
2152  mov(scratch1, Operand(object1));
2153  and_(scratch1, Operand(object2));
2154  JumpIfSmi(scratch1, failure);
2155
2156  // Load instance type for both strings.
2157  mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2158  mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2159  movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2160  movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2161
2162  // Check that both are flat ascii strings.
2163  const int kFlatAsciiStringMask =
2164      kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2165  const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2166  // Interleave bits from both instance types and compare them in one check.
2167  ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2168  and_(scratch1, kFlatAsciiStringMask);
2169  and_(scratch2, kFlatAsciiStringMask);
2170  lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2171  cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2172  j(not_equal, failure);
2173}
2174
2175
2176void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2177  int frame_alignment = OS::ActivationFrameAlignment();
2178  if (frame_alignment != 0) {
2179    // Make stack end at alignment and make room for num_arguments words
2180    // and the original value of esp.
2181    mov(scratch, esp);
2182    sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
2183    ASSERT(IsPowerOf2(frame_alignment));
2184    and_(esp, -frame_alignment);
2185    mov(Operand(esp, num_arguments * kPointerSize), scratch);
2186  } else {
2187    sub(Operand(esp), Immediate(num_arguments * kPointerSize));
2188  }
2189}
2190
2191
2192void MacroAssembler::CallCFunction(ExternalReference function,
2193                                   int num_arguments) {
2194  // Trashing eax is ok as it will be the return value.
2195  mov(Operand(eax), Immediate(function));
2196  CallCFunction(eax, num_arguments);
2197}
2198
2199
2200void MacroAssembler::CallCFunction(Register function,
2201                                   int num_arguments) {
2202  // Check stack alignment.
2203  if (emit_debug_code()) {
2204    CheckStackAlignment();
2205  }
2206
2207  call(Operand(function));
2208  if (OS::ActivationFrameAlignment() != 0) {
2209    mov(esp, Operand(esp, num_arguments * kPointerSize));
2210  } else {
2211    add(Operand(esp), Immediate(num_arguments * kPointerSize));
2212  }
2213}
2214
2215
2216CodePatcher::CodePatcher(byte* address, int size)
2217    : address_(address),
2218      size_(size),
2219      masm_(Isolate::Current(), address, size + Assembler::kGap) {
2220  // Create a new macro assembler pointing to the address of the code to patch.
2221  // The size is adjusted with kGap on order for the assembler to generate size
2222  // bytes of instructions without failing with buffer size constraints.
2223  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2224}
2225
2226
2227CodePatcher::~CodePatcher() {
2228  // Indicate that code has changed.
2229  CPU::FlushICache(address_, size_);
2230
2231  // Check that the code was patched as expected.
2232  ASSERT(masm_.pc_ == address_ + size_);
2233  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2234}
2235
2236
2237} }  // namespace v8::internal
2238
2239#endif  // V8_TARGET_ARCH_IA32
2240