macro-assembler-ia32.cc revision 5913587db4c6bab03d97bfe44b06289fd6d7270d
1// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
32#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45    : Assembler(buffer, size),
46      generating_stub_(false),
47      allow_stub_calls_(true),
48      code_object_(Heap::undefined_value()) {
49}
50
51
52void MacroAssembler::RecordWriteHelper(Register object,
53                                       Register addr,
54                                       Register scratch) {
55  if (FLAG_debug_code) {
56    // Check that the object is not in new space.
57    Label not_in_new_space;
58    InNewSpace(object, scratch, not_equal, &not_in_new_space);
59    Abort("new-space object passed to RecordWriteHelper");
60    bind(&not_in_new_space);
61  }
62
63  // Compute the page start address from the heap object pointer, and reuse
64  // the 'object' register for it.
65  and_(object, ~Page::kPageAlignmentMask);
66
67  // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68  // method for more details.
69  and_(addr, Page::kPageAlignmentMask);
70  shr(addr, Page::kRegionSizeLog2);
71
72  // Set dirty mark for region.
73  bts(Operand(object, Page::kDirtyFlagOffset), addr);
74}
75
76
77void MacroAssembler::InNewSpace(Register object,
78                                Register scratch,
79                                Condition cc,
80                                Label* branch) {
81  ASSERT(cc == equal || cc == not_equal);
82  if (Serializer::enabled()) {
83    // Can't do arithmetic on external references if it might get serialized.
84    mov(scratch, Operand(object));
85    // The mask isn't really an address.  We load it as an external reference in
86    // case the size of the new space is different between the snapshot maker
87    // and the running system.
88    and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
89    cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
90    j(cc, branch);
91  } else {
92    int32_t new_space_start = reinterpret_cast<int32_t>(
93        ExternalReference::new_space_start().address());
94    lea(scratch, Operand(object, -new_space_start));
95    and_(scratch, Heap::NewSpaceMask());
96    j(cc, branch);
97  }
98}
99
100
101void MacroAssembler::RecordWrite(Register object,
102                                 int offset,
103                                 Register value,
104                                 Register scratch) {
105  // The compiled code assumes that record write doesn't change the
106  // context register, so we check that none of the clobbered
107  // registers are esi.
108  ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
109
110  // First, check if a write barrier is even needed. The tests below
111  // catch stores of Smis and stores into young gen.
112  Label done;
113
114  // Skip barrier if writing a smi.
115  ASSERT_EQ(0, kSmiTag);
116  test(value, Immediate(kSmiTagMask));
117  j(zero, &done);
118
119  InNewSpace(object, value, equal, &done);
120
121  // The offset is relative to a tagged or untagged HeapObject pointer,
122  // so either offset or offset + kHeapObjectTag must be a
123  // multiple of kPointerSize.
124  ASSERT(IsAligned(offset, kPointerSize) ||
125         IsAligned(offset + kHeapObjectTag, kPointerSize));
126
127  Register dst = scratch;
128  if (offset != 0) {
129    lea(dst, Operand(object, offset));
130  } else {
131    // Array access: calculate the destination address in the same manner as
132    // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
133    // into an array of words.
134    ASSERT_EQ(1, kSmiTagSize);
135    ASSERT_EQ(0, kSmiTag);
136    lea(dst, Operand(object, dst, times_half_pointer_size,
137                     FixedArray::kHeaderSize - kHeapObjectTag));
138  }
139  RecordWriteHelper(object, dst, value);
140
141  bind(&done);
142
143  // Clobber all input registers when running with the debug-code flag
144  // turned on to provoke errors.
145  if (FLAG_debug_code) {
146    mov(object, Immediate(BitCast<int32_t>(kZapValue)));
147    mov(value, Immediate(BitCast<int32_t>(kZapValue)));
148    mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
149  }
150}
151
152
153void MacroAssembler::RecordWrite(Register object,
154                                 Register address,
155                                 Register value) {
156  // The compiled code assumes that record write doesn't change the
157  // context register, so we check that none of the clobbered
158  // registers are esi.
159  ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
160
161  // First, check if a write barrier is even needed. The tests below
162  // catch stores of Smis and stores into young gen.
163  Label done;
164
165  // Skip barrier if writing a smi.
166  ASSERT_EQ(0, kSmiTag);
167  test(value, Immediate(kSmiTagMask));
168  j(zero, &done);
169
170  InNewSpace(object, value, equal, &done);
171
172  RecordWriteHelper(object, address, value);
173
174  bind(&done);
175
176  // Clobber all input registers when running with the debug-code flag
177  // turned on to provoke errors.
178  if (FLAG_debug_code) {
179    mov(object, Immediate(BitCast<int32_t>(kZapValue)));
180    mov(address, Immediate(BitCast<int32_t>(kZapValue)));
181    mov(value, Immediate(BitCast<int32_t>(kZapValue)));
182  }
183}
184
185
186void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
187  cmp(esp,
188      Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
189  j(below, on_stack_overflow);
190}
191
192
193#ifdef ENABLE_DEBUGGER_SUPPORT
194void MacroAssembler::DebugBreak() {
195  Set(eax, Immediate(0));
196  mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
197  CEntryStub ces(1);
198  call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
199}
200#endif
201
202
203void MacroAssembler::Set(Register dst, const Immediate& x) {
204  if (x.is_zero()) {
205    xor_(dst, Operand(dst));  // shorter than mov
206  } else {
207    mov(dst, x);
208  }
209}
210
211
212void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
213  mov(dst, x);
214}
215
216
217void MacroAssembler::CmpObjectType(Register heap_object,
218                                   InstanceType type,
219                                   Register map) {
220  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
221  CmpInstanceType(map, type);
222}
223
224
225void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
226  cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
227       static_cast<int8_t>(type));
228}
229
230
231void MacroAssembler::CheckMap(Register obj,
232                              Handle<Map> map,
233                              Label* fail,
234                              bool is_heap_object) {
235  if (!is_heap_object) {
236    test(obj, Immediate(kSmiTagMask));
237    j(zero, fail);
238  }
239  cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
240  j(not_equal, fail);
241}
242
243
244Condition MacroAssembler::IsObjectStringType(Register heap_object,
245                                             Register map,
246                                             Register instance_type) {
247  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
248  movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
249  ASSERT(kNotStringTag != 0);
250  test(instance_type, Immediate(kIsNotStringMask));
251  return zero;
252}
253
254
255void MacroAssembler::IsObjectJSObjectType(Register heap_object,
256                                          Register map,
257                                          Register scratch,
258                                          Label* fail) {
259  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
260  IsInstanceJSObjectType(map, scratch, fail);
261}
262
263
264void MacroAssembler::IsInstanceJSObjectType(Register map,
265                                            Register scratch,
266                                            Label* fail) {
267  movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
268  sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
269  cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
270  j(above, fail);
271}
272
273
274void MacroAssembler::FCmp() {
275  if (CpuFeatures::IsSupported(CMOV)) {
276    fucomip();
277    ffree(0);
278    fincstp();
279  } else {
280    fucompp();
281    push(eax);
282    fnstsw_ax();
283    sahf();
284    pop(eax);
285  }
286}
287
288
289void MacroAssembler::AbortIfNotNumber(Register object) {
290  Label ok;
291  test(object, Immediate(kSmiTagMask));
292  j(zero, &ok);
293  cmp(FieldOperand(object, HeapObject::kMapOffset),
294      Factory::heap_number_map());
295  Assert(equal, "Operand not a number");
296  bind(&ok);
297}
298
299
300void MacroAssembler::AbortIfNotSmi(Register object) {
301  test(object, Immediate(kSmiTagMask));
302  Assert(equal, "Operand is not a smi");
303}
304
305
306void MacroAssembler::AbortIfNotString(Register object) {
307  test(object, Immediate(kSmiTagMask));
308  Assert(not_equal, "Operand is not a string");
309  push(object);
310  mov(object, FieldOperand(object, HeapObject::kMapOffset));
311  CmpInstanceType(object, FIRST_NONSTRING_TYPE);
312  pop(object);
313  Assert(below, "Operand is not a string");
314}
315
316
317void MacroAssembler::AbortIfSmi(Register object) {
318  test(object, Immediate(kSmiTagMask));
319  Assert(not_equal, "Operand is a smi");
320}
321
322
323void MacroAssembler::EnterFrame(StackFrame::Type type) {
324  push(ebp);
325  mov(ebp, Operand(esp));
326  push(esi);
327  push(Immediate(Smi::FromInt(type)));
328  push(Immediate(CodeObject()));
329  if (FLAG_debug_code) {
330    cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
331    Check(not_equal, "code object not properly patched");
332  }
333}
334
335
336void MacroAssembler::LeaveFrame(StackFrame::Type type) {
337  if (FLAG_debug_code) {
338    cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
339        Immediate(Smi::FromInt(type)));
340    Check(equal, "stack frame types must match");
341  }
342  leave();
343}
344
345
346void MacroAssembler::EnterExitFramePrologue() {
347  // Setup the frame structure on the stack.
348  ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
349  ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
350  ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
351  push(ebp);
352  mov(ebp, Operand(esp));
353
354  // Reserve room for entry stack pointer and push the code object.
355  ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
356  push(Immediate(0));  // Saved entry sp, patched before call.
357  push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
358
359  // Save the frame pointer and the context in top.
360  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
361  ExternalReference context_address(Top::k_context_address);
362  mov(Operand::StaticVariable(c_entry_fp_address), ebp);
363  mov(Operand::StaticVariable(context_address), esi);
364}
365
366
367void MacroAssembler::EnterExitFrameEpilogue(int argc) {
368  // Reserve space for arguments.
369  sub(Operand(esp), Immediate(argc * kPointerSize));
370
371  // Get the required frame alignment for the OS.
372  static const int kFrameAlignment = OS::ActivationFrameAlignment();
373  if (kFrameAlignment > 0) {
374    ASSERT(IsPowerOf2(kFrameAlignment));
375    and_(esp, -kFrameAlignment);
376  }
377
378  // Patch the saved entry sp.
379  mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
380}
381
382
383void MacroAssembler::EnterExitFrame() {
384  EnterExitFramePrologue();
385
386  // Setup argc and argv in callee-saved registers.
387  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
388  mov(edi, Operand(eax));
389  lea(esi, Operand(ebp, eax, times_4, offset));
390
391  EnterExitFrameEpilogue(2);
392}
393
394
395void MacroAssembler::EnterApiExitFrame(int stack_space,
396                                       int argc) {
397  EnterExitFramePrologue();
398
399  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
400  lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
401
402  EnterExitFrameEpilogue(argc);
403}
404
405
406void MacroAssembler::LeaveExitFrame() {
407  // Get the return address from the stack and restore the frame pointer.
408  mov(ecx, Operand(ebp, 1 * kPointerSize));
409  mov(ebp, Operand(ebp, 0 * kPointerSize));
410
411  // Pop the arguments and the receiver from the caller stack.
412  lea(esp, Operand(esi, 1 * kPointerSize));
413
414  // Restore current context from top and clear it in debug mode.
415  ExternalReference context_address(Top::k_context_address);
416  mov(esi, Operand::StaticVariable(context_address));
417#ifdef DEBUG
418  mov(Operand::StaticVariable(context_address), Immediate(0));
419#endif
420
421  // Push the return address to get ready to return.
422  push(ecx);
423
424  // Clear the top frame.
425  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
426  mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
427}
428
429
430void MacroAssembler::PushTryHandler(CodeLocation try_location,
431                                    HandlerType type) {
432  // Adjust this code if not the case.
433  ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
434  // The pc (return address) is already on TOS.
435  if (try_location == IN_JAVASCRIPT) {
436    if (type == TRY_CATCH_HANDLER) {
437      push(Immediate(StackHandler::TRY_CATCH));
438    } else {
439      push(Immediate(StackHandler::TRY_FINALLY));
440    }
441    push(ebp);
442  } else {
443    ASSERT(try_location == IN_JS_ENTRY);
444    // The frame pointer does not point to a JS frame so we save NULL
445    // for ebp. We expect the code throwing an exception to check ebp
446    // before dereferencing it to restore the context.
447    push(Immediate(StackHandler::ENTRY));
448    push(Immediate(0));  // NULL frame pointer.
449  }
450  // Save the current handler as the next handler.
451  push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
452  // Link this handler as the new current one.
453  mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
454}
455
456
457void MacroAssembler::PopTryHandler() {
458  ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
459  pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
460  add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
461}
462
463
464void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
465                                            Register scratch,
466                                            Label* miss) {
467  Label same_contexts;
468
469  ASSERT(!holder_reg.is(scratch));
470
471  // Load current lexical context from the stack frame.
472  mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
473
474  // When generating debug code, make sure the lexical context is set.
475  if (FLAG_debug_code) {
476    cmp(Operand(scratch), Immediate(0));
477    Check(not_equal, "we should not have an empty lexical context");
478  }
479  // Load the global context of the current context.
480  int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
481  mov(scratch, FieldOperand(scratch, offset));
482  mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
483
484  // Check the context is a global context.
485  if (FLAG_debug_code) {
486    push(scratch);
487    // Read the first word and compare to global_context_map.
488    mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
489    cmp(scratch, Factory::global_context_map());
490    Check(equal, "JSGlobalObject::global_context should be a global context.");
491    pop(scratch);
492  }
493
494  // Check if both contexts are the same.
495  cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
496  j(equal, &same_contexts, taken);
497
498  // Compare security tokens, save holder_reg on the stack so we can use it
499  // as a temporary register.
500  //
501  // TODO(119): avoid push(holder_reg)/pop(holder_reg)
502  push(holder_reg);
503  // Check that the security token in the calling global object is
504  // compatible with the security token in the receiving global
505  // object.
506  mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
507
508  // Check the context is a global context.
509  if (FLAG_debug_code) {
510    cmp(holder_reg, Factory::null_value());
511    Check(not_equal, "JSGlobalProxy::context() should not be null.");
512
513    push(holder_reg);
514    // Read the first word and compare to global_context_map(),
515    mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
516    cmp(holder_reg, Factory::global_context_map());
517    Check(equal, "JSGlobalObject::global_context should be a global context.");
518    pop(holder_reg);
519  }
520
521  int token_offset = Context::kHeaderSize +
522                     Context::SECURITY_TOKEN_INDEX * kPointerSize;
523  mov(scratch, FieldOperand(scratch, token_offset));
524  cmp(scratch, FieldOperand(holder_reg, token_offset));
525  pop(holder_reg);
526  j(not_equal, miss, not_taken);
527
528  bind(&same_contexts);
529}
530
531
532void MacroAssembler::LoadAllocationTopHelper(Register result,
533                                             Register result_end,
534                                             Register scratch,
535                                             AllocationFlags flags) {
536  ExternalReference new_space_allocation_top =
537      ExternalReference::new_space_allocation_top_address();
538
539  // Just return if allocation top is already known.
540  if ((flags & RESULT_CONTAINS_TOP) != 0) {
541    // No use of scratch if allocation top is provided.
542    ASSERT(scratch.is(no_reg));
543#ifdef DEBUG
544    // Assert that result actually contains top on entry.
545    cmp(result, Operand::StaticVariable(new_space_allocation_top));
546    Check(equal, "Unexpected allocation top");
547#endif
548    return;
549  }
550
551  // Move address of new object to result. Use scratch register if available.
552  if (scratch.is(no_reg)) {
553    mov(result, Operand::StaticVariable(new_space_allocation_top));
554  } else {
555    ASSERT(!scratch.is(result_end));
556    mov(Operand(scratch), Immediate(new_space_allocation_top));
557    mov(result, Operand(scratch, 0));
558  }
559}
560
561
562void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
563                                               Register scratch) {
564  if (FLAG_debug_code) {
565    test(result_end, Immediate(kObjectAlignmentMask));
566    Check(zero, "Unaligned allocation in new space");
567  }
568
569  ExternalReference new_space_allocation_top =
570      ExternalReference::new_space_allocation_top_address();
571
572  // Update new top. Use scratch if available.
573  if (scratch.is(no_reg)) {
574    mov(Operand::StaticVariable(new_space_allocation_top), result_end);
575  } else {
576    mov(Operand(scratch, 0), result_end);
577  }
578}
579
580
581void MacroAssembler::AllocateInNewSpace(int object_size,
582                                        Register result,
583                                        Register result_end,
584                                        Register scratch,
585                                        Label* gc_required,
586                                        AllocationFlags flags) {
587  if (!FLAG_inline_new) {
588    if (FLAG_debug_code) {
589      // Trash the registers to simulate an allocation failure.
590      mov(result, Immediate(0x7091));
591      if (result_end.is_valid()) {
592        mov(result_end, Immediate(0x7191));
593      }
594      if (scratch.is_valid()) {
595        mov(scratch, Immediate(0x7291));
596      }
597    }
598    jmp(gc_required);
599    return;
600  }
601  ASSERT(!result.is(result_end));
602
603  // Load address of new object into result.
604  LoadAllocationTopHelper(result, result_end, scratch, flags);
605
606  Register top_reg = result_end.is_valid() ? result_end : result;
607
608  // Calculate new top and bail out if new space is exhausted.
609  ExternalReference new_space_allocation_limit =
610      ExternalReference::new_space_allocation_limit_address();
611
612  if (top_reg.is(result)) {
613    add(Operand(top_reg), Immediate(object_size));
614  } else {
615    lea(top_reg, Operand(result, object_size));
616  }
617  cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
618  j(above, gc_required, not_taken);
619
620  // Update allocation top.
621  UpdateAllocationTopHelper(top_reg, scratch);
622
623  // Tag result if requested.
624  if (top_reg.is(result)) {
625    if ((flags & TAG_OBJECT) != 0) {
626      sub(Operand(result), Immediate(object_size - kHeapObjectTag));
627    } else {
628      sub(Operand(result), Immediate(object_size));
629    }
630  } else if ((flags & TAG_OBJECT) != 0) {
631    add(Operand(result), Immediate(kHeapObjectTag));
632  }
633}
634
635
636void MacroAssembler::AllocateInNewSpace(int header_size,
637                                        ScaleFactor element_size,
638                                        Register element_count,
639                                        Register result,
640                                        Register result_end,
641                                        Register scratch,
642                                        Label* gc_required,
643                                        AllocationFlags flags) {
644  if (!FLAG_inline_new) {
645    if (FLAG_debug_code) {
646      // Trash the registers to simulate an allocation failure.
647      mov(result, Immediate(0x7091));
648      mov(result_end, Immediate(0x7191));
649      if (scratch.is_valid()) {
650        mov(scratch, Immediate(0x7291));
651      }
652      // Register element_count is not modified by the function.
653    }
654    jmp(gc_required);
655    return;
656  }
657  ASSERT(!result.is(result_end));
658
659  // Load address of new object into result.
660  LoadAllocationTopHelper(result, result_end, scratch, flags);
661
662  // Calculate new top and bail out if new space is exhausted.
663  ExternalReference new_space_allocation_limit =
664      ExternalReference::new_space_allocation_limit_address();
665  lea(result_end, Operand(result, element_count, element_size, header_size));
666  cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
667  j(above, gc_required);
668
669  // Tag result if requested.
670  if ((flags & TAG_OBJECT) != 0) {
671    lea(result, Operand(result, kHeapObjectTag));
672  }
673
674  // Update allocation top.
675  UpdateAllocationTopHelper(result_end, scratch);
676}
677
678
679void MacroAssembler::AllocateInNewSpace(Register object_size,
680                                        Register result,
681                                        Register result_end,
682                                        Register scratch,
683                                        Label* gc_required,
684                                        AllocationFlags flags) {
685  if (!FLAG_inline_new) {
686    if (FLAG_debug_code) {
687      // Trash the registers to simulate an allocation failure.
688      mov(result, Immediate(0x7091));
689      mov(result_end, Immediate(0x7191));
690      if (scratch.is_valid()) {
691        mov(scratch, Immediate(0x7291));
692      }
693      // object_size is left unchanged by this function.
694    }
695    jmp(gc_required);
696    return;
697  }
698  ASSERT(!result.is(result_end));
699
700  // Load address of new object into result.
701  LoadAllocationTopHelper(result, result_end, scratch, flags);
702
703  // Calculate new top and bail out if new space is exhausted.
704  ExternalReference new_space_allocation_limit =
705      ExternalReference::new_space_allocation_limit_address();
706  if (!object_size.is(result_end)) {
707    mov(result_end, object_size);
708  }
709  add(result_end, Operand(result));
710  cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
711  j(above, gc_required, not_taken);
712
713  // Tag result if requested.
714  if ((flags & TAG_OBJECT) != 0) {
715    lea(result, Operand(result, kHeapObjectTag));
716  }
717
718  // Update allocation top.
719  UpdateAllocationTopHelper(result_end, scratch);
720}
721
722
723void MacroAssembler::UndoAllocationInNewSpace(Register object) {
724  ExternalReference new_space_allocation_top =
725      ExternalReference::new_space_allocation_top_address();
726
727  // Make sure the object has no tag before resetting top.
728  and_(Operand(object), Immediate(~kHeapObjectTagMask));
729#ifdef DEBUG
730  cmp(object, Operand::StaticVariable(new_space_allocation_top));
731  Check(below, "Undo allocation of non allocated memory");
732#endif
733  mov(Operand::StaticVariable(new_space_allocation_top), object);
734}
735
736
737void MacroAssembler::AllocateHeapNumber(Register result,
738                                        Register scratch1,
739                                        Register scratch2,
740                                        Label* gc_required) {
741  // Allocate heap number in new space.
742  AllocateInNewSpace(HeapNumber::kSize,
743                     result,
744                     scratch1,
745                     scratch2,
746                     gc_required,
747                     TAG_OBJECT);
748
749  // Set the map.
750  mov(FieldOperand(result, HeapObject::kMapOffset),
751      Immediate(Factory::heap_number_map()));
752}
753
754
755void MacroAssembler::AllocateTwoByteString(Register result,
756                                           Register length,
757                                           Register scratch1,
758                                           Register scratch2,
759                                           Register scratch3,
760                                           Label* gc_required) {
761  // Calculate the number of bytes needed for the characters in the string while
762  // observing object alignment.
763  ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
764  ASSERT(kShortSize == 2);
765  // scratch1 = length * 2 + kObjectAlignmentMask.
766  lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
767  and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
768
769  // Allocate two byte string in new space.
770  AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
771                     times_1,
772                     scratch1,
773                     result,
774                     scratch2,
775                     scratch3,
776                     gc_required,
777                     TAG_OBJECT);
778
779  // Set the map, length and hash field.
780  mov(FieldOperand(result, HeapObject::kMapOffset),
781      Immediate(Factory::string_map()));
782  mov(scratch1, length);
783  SmiTag(scratch1);
784  mov(FieldOperand(result, String::kLengthOffset), scratch1);
785  mov(FieldOperand(result, String::kHashFieldOffset),
786      Immediate(String::kEmptyHashField));
787}
788
789
790void MacroAssembler::AllocateAsciiString(Register result,
791                                         Register length,
792                                         Register scratch1,
793                                         Register scratch2,
794                                         Register scratch3,
795                                         Label* gc_required) {
796  // Calculate the number of bytes needed for the characters in the string while
797  // observing object alignment.
798  ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
799  mov(scratch1, length);
800  ASSERT(kCharSize == 1);
801  add(Operand(scratch1), Immediate(kObjectAlignmentMask));
802  and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
803
804  // Allocate ascii string in new space.
805  AllocateInNewSpace(SeqAsciiString::kHeaderSize,
806                     times_1,
807                     scratch1,
808                     result,
809                     scratch2,
810                     scratch3,
811                     gc_required,
812                     TAG_OBJECT);
813
814  // Set the map, length and hash field.
815  mov(FieldOperand(result, HeapObject::kMapOffset),
816      Immediate(Factory::ascii_string_map()));
817  mov(scratch1, length);
818  SmiTag(scratch1);
819  mov(FieldOperand(result, String::kLengthOffset), scratch1);
820  mov(FieldOperand(result, String::kHashFieldOffset),
821      Immediate(String::kEmptyHashField));
822}
823
824
825void MacroAssembler::AllocateAsciiString(Register result,
826                                         int length,
827                                         Register scratch1,
828                                         Register scratch2,
829                                         Label* gc_required) {
830  ASSERT(length > 0);
831
832  // Allocate ascii string in new space.
833  AllocateInNewSpace(SeqAsciiString::SizeFor(length),
834                     result,
835                     scratch1,
836                     scratch2,
837                     gc_required,
838                     TAG_OBJECT);
839
840  // Set the map, length and hash field.
841  mov(FieldOperand(result, HeapObject::kMapOffset),
842      Immediate(Factory::ascii_string_map()));
843  mov(FieldOperand(result, String::kLengthOffset),
844      Immediate(Smi::FromInt(length)));
845  mov(FieldOperand(result, String::kHashFieldOffset),
846      Immediate(String::kEmptyHashField));
847}
848
849
850void MacroAssembler::AllocateConsString(Register result,
851                                        Register scratch1,
852                                        Register scratch2,
853                                        Label* gc_required) {
854  // Allocate heap number in new space.
855  AllocateInNewSpace(ConsString::kSize,
856                     result,
857                     scratch1,
858                     scratch2,
859                     gc_required,
860                     TAG_OBJECT);
861
862  // Set the map. The other fields are left uninitialized.
863  mov(FieldOperand(result, HeapObject::kMapOffset),
864      Immediate(Factory::cons_string_map()));
865}
866
867
868void MacroAssembler::AllocateAsciiConsString(Register result,
869                                             Register scratch1,
870                                             Register scratch2,
871                                             Label* gc_required) {
872  // Allocate heap number in new space.
873  AllocateInNewSpace(ConsString::kSize,
874                     result,
875                     scratch1,
876                     scratch2,
877                     gc_required,
878                     TAG_OBJECT);
879
880  // Set the map. The other fields are left uninitialized.
881  mov(FieldOperand(result, HeapObject::kMapOffset),
882      Immediate(Factory::cons_ascii_string_map()));
883}
884
885
886void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
887                                      Register result,
888                                      Register op,
889                                      JumpTarget* then_target) {
890  JumpTarget ok;
891  test(result, Operand(result));
892  ok.Branch(not_zero, taken);
893  test(op, Operand(op));
894  then_target->Branch(sign, not_taken);
895  ok.Bind();
896}
897
898
899void MacroAssembler::NegativeZeroTest(Register result,
900                                      Register op,
901                                      Label* then_label) {
902  Label ok;
903  test(result, Operand(result));
904  j(not_zero, &ok, taken);
905  test(op, Operand(op));
906  j(sign, then_label, not_taken);
907  bind(&ok);
908}
909
910
911void MacroAssembler::NegativeZeroTest(Register result,
912                                      Register op1,
913                                      Register op2,
914                                      Register scratch,
915                                      Label* then_label) {
916  Label ok;
917  test(result, Operand(result));
918  j(not_zero, &ok, taken);
919  mov(scratch, Operand(op1));
920  or_(scratch, Operand(op2));
921  j(sign, then_label, not_taken);
922  bind(&ok);
923}
924
925
926void MacroAssembler::TryGetFunctionPrototype(Register function,
927                                             Register result,
928                                             Register scratch,
929                                             Label* miss) {
930  // Check that the receiver isn't a smi.
931  test(function, Immediate(kSmiTagMask));
932  j(zero, miss, not_taken);
933
934  // Check that the function really is a function.
935  CmpObjectType(function, JS_FUNCTION_TYPE, result);
936  j(not_equal, miss, not_taken);
937
938  // Make sure that the function has an instance prototype.
939  Label non_instance;
940  movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
941  test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
942  j(not_zero, &non_instance, not_taken);
943
944  // Get the prototype or initial map from the function.
945  mov(result,
946      FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
947
948  // If the prototype or initial map is the hole, don't return it and
949  // simply miss the cache instead. This will allow us to allocate a
950  // prototype object on-demand in the runtime system.
951  cmp(Operand(result), Immediate(Factory::the_hole_value()));
952  j(equal, miss, not_taken);
953
954  // If the function does not have an initial map, we're done.
955  Label done;
956  CmpObjectType(result, MAP_TYPE, scratch);
957  j(not_equal, &done);
958
959  // Get the prototype from the initial map.
960  mov(result, FieldOperand(result, Map::kPrototypeOffset));
961  jmp(&done);
962
963  // Non-instance prototype: Fetch prototype from constructor field
964  // in initial map.
965  bind(&non_instance);
966  mov(result, FieldOperand(result, Map::kConstructorOffset));
967
968  // All done.
969  bind(&done);
970}
971
972
973void MacroAssembler::CallStub(CodeStub* stub) {
974  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
975  call(stub->GetCode(), RelocInfo::CODE_TARGET);
976}
977
978
979MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
980  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
981  Object* result;
982  { MaybeObject* maybe_result = stub->TryGetCode();
983    if (!maybe_result->ToObject(&result)) return maybe_result;
984  }
985  call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
986  return result;
987}
988
989
990void MacroAssembler::TailCallStub(CodeStub* stub) {
991  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
992  jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
993}
994
995
996MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
997  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
998  Object* result;
999  { MaybeObject* maybe_result = stub->TryGetCode();
1000    if (!maybe_result->ToObject(&result)) return maybe_result;
1001  }
1002  jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1003  return result;
1004}
1005
1006
1007void MacroAssembler::StubReturn(int argc) {
1008  ASSERT(argc >= 1 && generating_stub());
1009  ret((argc - 1) * kPointerSize);
1010}
1011
1012
1013void MacroAssembler::IllegalOperation(int num_arguments) {
1014  if (num_arguments > 0) {
1015    add(Operand(esp), Immediate(num_arguments * kPointerSize));
1016  }
1017  mov(eax, Immediate(Factory::undefined_value()));
1018}
1019
1020
1021void MacroAssembler::IndexFromHash(Register hash, Register index) {
1022  // The assert checks that the constants for the maximum number of digits
1023  // for an array index cached in the hash field and the number of bits
1024  // reserved for it does not conflict.
1025  ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1026         (1 << String::kArrayIndexValueBits));
1027  // We want the smi-tagged index in key.  kArrayIndexValueMask has zeros in
1028  // the low kHashShift bits.
1029  and_(hash, String::kArrayIndexValueMask);
1030  STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1031  if (String::kHashShift > kSmiTagSize) {
1032    shr(hash, String::kHashShift - kSmiTagSize);
1033  }
1034  if (!index.is(hash)) {
1035    mov(index, hash);
1036  }
1037}
1038
1039
1040void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1041  CallRuntime(Runtime::FunctionForId(id), num_arguments);
1042}
1043
1044
1045MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1046                                            int num_arguments) {
1047  return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1048}
1049
1050
1051void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1052  // If the expected number of arguments of the runtime function is
1053  // constant, we check that the actual number of arguments match the
1054  // expectation.
1055  if (f->nargs >= 0 && f->nargs != num_arguments) {
1056    IllegalOperation(num_arguments);
1057    return;
1058  }
1059
1060  // TODO(1236192): Most runtime routines don't need the number of
1061  // arguments passed in because it is constant. At some point we
1062  // should remove this need and make the runtime routine entry code
1063  // smarter.
1064  Set(eax, Immediate(num_arguments));
1065  mov(ebx, Immediate(ExternalReference(f)));
1066  CEntryStub ces(1);
1067  CallStub(&ces);
1068}
1069
1070
1071MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1072                                            int num_arguments) {
1073  if (f->nargs >= 0 && f->nargs != num_arguments) {
1074    IllegalOperation(num_arguments);
1075    // Since we did not call the stub, there was no allocation failure.
1076    // Return some non-failure object.
1077    return Heap::undefined_value();
1078  }
1079
1080  // TODO(1236192): Most runtime routines don't need the number of
1081  // arguments passed in because it is constant. At some point we
1082  // should remove this need and make the runtime routine entry code
1083  // smarter.
1084  Set(eax, Immediate(num_arguments));
1085  mov(ebx, Immediate(ExternalReference(f)));
1086  CEntryStub ces(1);
1087  return TryCallStub(&ces);
1088}
1089
1090
1091void MacroAssembler::CallExternalReference(ExternalReference ref,
1092                                           int num_arguments) {
1093  mov(eax, Immediate(num_arguments));
1094  mov(ebx, Immediate(ref));
1095
1096  CEntryStub stub(1);
1097  CallStub(&stub);
1098}
1099
1100
1101void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1102                                               int num_arguments,
1103                                               int result_size) {
1104  // TODO(1236192): Most runtime routines don't need the number of
1105  // arguments passed in because it is constant. At some point we
1106  // should remove this need and make the runtime routine entry code
1107  // smarter.
1108  Set(eax, Immediate(num_arguments));
1109  JumpToExternalReference(ext);
1110}
1111
1112
1113void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1114                                     int num_arguments,
1115                                     int result_size) {
1116  TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
1117}
1118
1119
1120// If true, a Handle<T> passed by value is passed and returned by
1121// using the location_ field directly.  If false, it is passed and
1122// returned as a pointer to a handle.
1123#ifdef USING_BSD_ABI
1124static const bool kPassHandlesDirectly = true;
1125#else
1126static const bool kPassHandlesDirectly = false;
1127#endif
1128
1129
1130Operand ApiParameterOperand(int index) {
1131  return Operand(esp, (index + (kPassHandlesDirectly ? 0 : 1)) * kPointerSize);
1132}
1133
1134
1135void MacroAssembler::PrepareCallApiFunction(int stack_space, int argc) {
1136  if (kPassHandlesDirectly) {
1137    EnterApiExitFrame(stack_space, argc);
1138    // When handles as passed directly we don't have to allocate extra
1139    // space for and pass an out parameter.
1140  } else {
1141    // We allocate two additional slots: return value and pointer to it.
1142    EnterApiExitFrame(stack_space, argc + 2);
1143  }
1144}
1145
1146
1147void MacroAssembler::CallApiFunctionAndReturn(ApiFunction* function, int argc) {
1148  if (!kPassHandlesDirectly) {
1149    // The argument slots are filled as follows:
1150    //
1151    //   n + 1: output cell
1152    //   n: arg n
1153    //   ...
1154    //   1: arg1
1155    //   0: pointer to the output cell
1156    //
1157    // Note that this is one more "argument" than the function expects
1158    // so the out cell will have to be popped explicitly after returning
1159    // from the function. The out cell contains Handle.
1160    lea(eax, Operand(esp, (argc + 1) * kPointerSize));  // pointer to out cell.
1161    mov(Operand(esp, 0 * kPointerSize), eax);  // output.
1162    mov(Operand(esp, (argc + 1) * kPointerSize), Immediate(0));  // out cell.
1163  }
1164
1165  ExternalReference next_address =
1166      ExternalReference::handle_scope_next_address();
1167  ExternalReference limit_address =
1168      ExternalReference::handle_scope_limit_address();
1169  ExternalReference level_address =
1170      ExternalReference::handle_scope_level_address();
1171
1172  // Allocate HandleScope in callee-save registers.
1173  mov(ebx, Operand::StaticVariable(next_address));
1174  mov(edi, Operand::StaticVariable(limit_address));
1175  add(Operand::StaticVariable(level_address), Immediate(1));
1176
1177  // Call the api function!
1178  call(function->address(), RelocInfo::RUNTIME_ENTRY);
1179
1180  if (!kPassHandlesDirectly) {
1181    // The returned value is a pointer to the handle holding the result.
1182    // Dereference this to get to the location.
1183    mov(eax, Operand(eax, 0));
1184  }
1185
1186  Label empty_handle;
1187  Label prologue;
1188  Label promote_scheduled_exception;
1189  Label delete_allocated_handles;
1190  Label leave_exit_frame;
1191
1192  // Check if the result handle holds 0.
1193  test(eax, Operand(eax));
1194  j(zero, &empty_handle, not_taken);
1195  // It was non-zero.  Dereference to get the result value.
1196  mov(eax, Operand(eax, 0));
1197  bind(&prologue);
1198  // No more valid handles (the result handle was the last one). Restore
1199  // previous handle scope.
1200  mov(Operand::StaticVariable(next_address), ebx);
1201  sub(Operand::StaticVariable(level_address), Immediate(1));
1202  Assert(above_equal, "Invalid HandleScope level");
1203  cmp(edi, Operand::StaticVariable(limit_address));
1204  j(not_equal, &delete_allocated_handles, not_taken);
1205  bind(&leave_exit_frame);
1206
1207  // Check if the function scheduled an exception.
1208  ExternalReference scheduled_exception_address =
1209      ExternalReference::scheduled_exception_address();
1210  cmp(Operand::StaticVariable(scheduled_exception_address),
1211         Immediate(Factory::the_hole_value()));
1212  j(not_equal, &promote_scheduled_exception, not_taken);
1213  LeaveExitFrame();
1214  ret(0);
1215  bind(&promote_scheduled_exception);
1216  TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1217  bind(&empty_handle);
1218  // It was zero; the result is undefined.
1219  mov(eax, Factory::undefined_value());
1220  jmp(&prologue);
1221
1222  // HandleScope limit has changed. Delete allocated extensions.
1223  bind(&delete_allocated_handles);
1224  mov(Operand::StaticVariable(limit_address), edi);
1225  mov(edi, eax);
1226  mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions()));
1227  call(Operand(eax));
1228  mov(eax, edi);
1229  jmp(&leave_exit_frame);
1230}
1231
1232
1233void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
1234  // Set the entry point and jump to the C entry runtime stub.
1235  mov(ebx, Immediate(ext));
1236  CEntryStub ces(1);
1237  jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1238}
1239
1240
1241void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1242                                    const ParameterCount& actual,
1243                                    Handle<Code> code_constant,
1244                                    const Operand& code_operand,
1245                                    Label* done,
1246                                    InvokeFlag flag) {
1247  bool definitely_matches = false;
1248  Label invoke;
1249  if (expected.is_immediate()) {
1250    ASSERT(actual.is_immediate());
1251    if (expected.immediate() == actual.immediate()) {
1252      definitely_matches = true;
1253    } else {
1254      mov(eax, actual.immediate());
1255      const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1256      if (expected.immediate() == sentinel) {
1257        // Don't worry about adapting arguments for builtins that
1258        // don't want that done. Skip adaption code by making it look
1259        // like we have a match between expected and actual number of
1260        // arguments.
1261        definitely_matches = true;
1262      } else {
1263        mov(ebx, expected.immediate());
1264      }
1265    }
1266  } else {
1267    if (actual.is_immediate()) {
1268      // Expected is in register, actual is immediate. This is the
1269      // case when we invoke function values without going through the
1270      // IC mechanism.
1271      cmp(expected.reg(), actual.immediate());
1272      j(equal, &invoke);
1273      ASSERT(expected.reg().is(ebx));
1274      mov(eax, actual.immediate());
1275    } else if (!expected.reg().is(actual.reg())) {
1276      // Both expected and actual are in (different) registers. This
1277      // is the case when we invoke functions using call and apply.
1278      cmp(expected.reg(), Operand(actual.reg()));
1279      j(equal, &invoke);
1280      ASSERT(actual.reg().is(eax));
1281      ASSERT(expected.reg().is(ebx));
1282    }
1283  }
1284
1285  if (!definitely_matches) {
1286    Handle<Code> adaptor =
1287        Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1288    if (!code_constant.is_null()) {
1289      mov(edx, Immediate(code_constant));
1290      add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1291    } else if (!code_operand.is_reg(edx)) {
1292      mov(edx, code_operand);
1293    }
1294
1295    if (flag == CALL_FUNCTION) {
1296      call(adaptor, RelocInfo::CODE_TARGET);
1297      jmp(done);
1298    } else {
1299      jmp(adaptor, RelocInfo::CODE_TARGET);
1300    }
1301    bind(&invoke);
1302  }
1303}
1304
1305
1306void MacroAssembler::InvokeCode(const Operand& code,
1307                                const ParameterCount& expected,
1308                                const ParameterCount& actual,
1309                                InvokeFlag flag) {
1310  Label done;
1311  InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1312  if (flag == CALL_FUNCTION) {
1313    call(code);
1314  } else {
1315    ASSERT(flag == JUMP_FUNCTION);
1316    jmp(code);
1317  }
1318  bind(&done);
1319}
1320
1321
1322void MacroAssembler::InvokeCode(Handle<Code> code,
1323                                const ParameterCount& expected,
1324                                const ParameterCount& actual,
1325                                RelocInfo::Mode rmode,
1326                                InvokeFlag flag) {
1327  Label done;
1328  Operand dummy(eax);
1329  InvokePrologue(expected, actual, code, dummy, &done, flag);
1330  if (flag == CALL_FUNCTION) {
1331    call(code, rmode);
1332  } else {
1333    ASSERT(flag == JUMP_FUNCTION);
1334    jmp(code, rmode);
1335  }
1336  bind(&done);
1337}
1338
1339
1340void MacroAssembler::InvokeFunction(Register fun,
1341                                    const ParameterCount& actual,
1342                                    InvokeFlag flag) {
1343  ASSERT(fun.is(edi));
1344  mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1345  mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1346  mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1347  SmiUntag(ebx);
1348
1349  ParameterCount expected(ebx);
1350  InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1351             expected, actual, flag);
1352}
1353
1354
1355void MacroAssembler::InvokeFunction(JSFunction* function,
1356                                    const ParameterCount& actual,
1357                                    InvokeFlag flag) {
1358  ASSERT(function->is_compiled());
1359  // Get the function and setup the context.
1360  mov(edi, Immediate(Handle<JSFunction>(function)));
1361  mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1362  // Invoke the cached code.
1363  Handle<Code> code(function->code());
1364  ParameterCount expected(function->shared()->formal_parameter_count());
1365  InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1366}
1367
1368
1369void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
1370  // Calls are not allowed in some stubs.
1371  ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1372
1373  // Rely on the assertion to check that the number of provided
1374  // arguments match the expected number of arguments. Fake a
1375  // parameter count to avoid emitting code to do the check.
1376  ParameterCount expected(0);
1377  GetBuiltinFunction(edi, id);
1378  InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1379           expected, expected, flag);
1380}
1381
1382void MacroAssembler::GetBuiltinFunction(Register target,
1383                                        Builtins::JavaScript id) {
1384  // Load the JavaScript builtin function from the builtins object.
1385  mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1386  mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1387  mov(target, FieldOperand(target,
1388                           JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1389}
1390
1391void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
1392  ASSERT(!target.is(edi));
1393  // Load the JavaScript builtin function from the builtins object.
1394  GetBuiltinFunction(edi, id);
1395  // Load the code entry point from the function into the target register.
1396  mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
1397}
1398
1399
1400void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1401  if (context_chain_length > 0) {
1402    // Move up the chain of contexts to the context containing the slot.
1403    mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1404    // Load the function context (which is the incoming, outer context).
1405    mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1406    for (int i = 1; i < context_chain_length; i++) {
1407      mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1408      mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1409    }
1410    // The context may be an intermediate context, not a function context.
1411    mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1412  } else {  // Slot is in the current function context.
1413    // The context may be an intermediate context, not a function context.
1414    mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1415  }
1416}
1417
1418
1419void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1420  // Load the global or builtins object from the current context.
1421  mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1422  // Load the global context from the global or builtins object.
1423  mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1424  // Load the function from the global context.
1425  mov(function, Operand(function, Context::SlotOffset(index)));
1426}
1427
1428
1429void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1430                                                  Register map) {
1431  // Load the initial map.  The global functions all have initial maps.
1432  mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1433  if (FLAG_debug_code) {
1434    Label ok, fail;
1435    CheckMap(map, Factory::meta_map(), &fail, false);
1436    jmp(&ok);
1437    bind(&fail);
1438    Abort("Global functions must have initial map");
1439    bind(&ok);
1440  }
1441}
1442
1443
1444void MacroAssembler::Ret() {
1445  ret(0);
1446}
1447
1448
1449void MacroAssembler::Drop(int stack_elements) {
1450  if (stack_elements > 0) {
1451    add(Operand(esp), Immediate(stack_elements * kPointerSize));
1452  }
1453}
1454
1455
1456void MacroAssembler::Move(Register dst, Register src) {
1457  if (!dst.is(src)) {
1458    mov(dst, src);
1459  }
1460}
1461
1462
1463void MacroAssembler::Move(Register dst, Handle<Object> value) {
1464  mov(dst, value);
1465}
1466
1467
1468void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1469  if (FLAG_native_code_counters && counter->Enabled()) {
1470    mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1471  }
1472}
1473
1474
1475void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1476  ASSERT(value > 0);
1477  if (FLAG_native_code_counters && counter->Enabled()) {
1478    Operand operand = Operand::StaticVariable(ExternalReference(counter));
1479    if (value == 1) {
1480      inc(operand);
1481    } else {
1482      add(operand, Immediate(value));
1483    }
1484  }
1485}
1486
1487
1488void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1489  ASSERT(value > 0);
1490  if (FLAG_native_code_counters && counter->Enabled()) {
1491    Operand operand = Operand::StaticVariable(ExternalReference(counter));
1492    if (value == 1) {
1493      dec(operand);
1494    } else {
1495      sub(operand, Immediate(value));
1496    }
1497  }
1498}
1499
1500
1501void MacroAssembler::IncrementCounter(Condition cc,
1502                                      StatsCounter* counter,
1503                                      int value) {
1504  ASSERT(value > 0);
1505  if (FLAG_native_code_counters && counter->Enabled()) {
1506    Label skip;
1507    j(NegateCondition(cc), &skip);
1508    pushfd();
1509    IncrementCounter(counter, value);
1510    popfd();
1511    bind(&skip);
1512  }
1513}
1514
1515
1516void MacroAssembler::DecrementCounter(Condition cc,
1517                                      StatsCounter* counter,
1518                                      int value) {
1519  ASSERT(value > 0);
1520  if (FLAG_native_code_counters && counter->Enabled()) {
1521    Label skip;
1522    j(NegateCondition(cc), &skip);
1523    pushfd();
1524    DecrementCounter(counter, value);
1525    popfd();
1526    bind(&skip);
1527  }
1528}
1529
1530
1531void MacroAssembler::Assert(Condition cc, const char* msg) {
1532  if (FLAG_debug_code) Check(cc, msg);
1533}
1534
1535
1536void MacroAssembler::AssertFastElements(Register elements) {
1537  if (FLAG_debug_code) {
1538    Label ok;
1539    cmp(FieldOperand(elements, HeapObject::kMapOffset),
1540        Immediate(Factory::fixed_array_map()));
1541    j(equal, &ok);
1542    cmp(FieldOperand(elements, HeapObject::kMapOffset),
1543        Immediate(Factory::fixed_cow_array_map()));
1544    j(equal, &ok);
1545    Abort("JSObject with fast elements map has slow elements");
1546    bind(&ok);
1547  }
1548}
1549
1550
1551void MacroAssembler::Check(Condition cc, const char* msg) {
1552  Label L;
1553  j(cc, &L, taken);
1554  Abort(msg);
1555  // will not return here
1556  bind(&L);
1557}
1558
1559
1560void MacroAssembler::CheckStackAlignment() {
1561  int frame_alignment = OS::ActivationFrameAlignment();
1562  int frame_alignment_mask = frame_alignment - 1;
1563  if (frame_alignment > kPointerSize) {
1564    ASSERT(IsPowerOf2(frame_alignment));
1565    Label alignment_as_expected;
1566    test(esp, Immediate(frame_alignment_mask));
1567    j(zero, &alignment_as_expected);
1568    // Abort if stack is not aligned.
1569    int3();
1570    bind(&alignment_as_expected);
1571  }
1572}
1573
1574
1575void MacroAssembler::Abort(const char* msg) {
1576  // We want to pass the msg string like a smi to avoid GC
1577  // problems, however msg is not guaranteed to be aligned
1578  // properly. Instead, we pass an aligned pointer that is
1579  // a proper v8 smi, but also pass the alignment difference
1580  // from the real pointer as a smi.
1581  intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1582  intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1583  ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1584#ifdef DEBUG
1585  if (msg != NULL) {
1586    RecordComment("Abort message: ");
1587    RecordComment(msg);
1588  }
1589#endif
1590  // Disable stub call restrictions to always allow calls to abort.
1591  set_allow_stub_calls(true);
1592
1593  push(eax);
1594  push(Immediate(p0));
1595  push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1596  CallRuntime(Runtime::kAbort, 2);
1597  // will not return here
1598  int3();
1599}
1600
1601
1602void MacroAssembler::JumpIfNotNumber(Register reg,
1603                                     TypeInfo info,
1604                                     Label* on_not_number) {
1605  if (FLAG_debug_code) AbortIfSmi(reg);
1606  if (!info.IsNumber()) {
1607    cmp(FieldOperand(reg, HeapObject::kMapOffset),
1608        Factory::heap_number_map());
1609    j(not_equal, on_not_number);
1610  }
1611}
1612
1613
1614void MacroAssembler::ConvertToInt32(Register dst,
1615                                    Register source,
1616                                    Register scratch,
1617                                    TypeInfo info,
1618                                    Label* on_not_int32) {
1619  if (FLAG_debug_code) {
1620    AbortIfSmi(source);
1621    AbortIfNotNumber(source);
1622  }
1623  if (info.IsInteger32()) {
1624    cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset));
1625  } else {
1626    Label done;
1627    bool push_pop = (scratch.is(no_reg) && dst.is(source));
1628    ASSERT(!scratch.is(source));
1629    if (push_pop) {
1630      push(dst);
1631      scratch = dst;
1632    }
1633    if (scratch.is(no_reg)) scratch = dst;
1634    cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
1635    cmp(scratch, 0x80000000u);
1636    if (push_pop) {
1637      j(not_equal, &done);
1638      pop(dst);
1639      jmp(on_not_int32);
1640    } else {
1641      j(equal, on_not_int32);
1642    }
1643
1644    bind(&done);
1645    if (push_pop) {
1646      add(Operand(esp), Immediate(kPointerSize));  // Pop.
1647    }
1648    if (!scratch.is(dst)) {
1649      mov(dst, scratch);
1650    }
1651  }
1652}
1653
1654
1655void MacroAssembler::LoadPowerOf2(XMMRegister dst,
1656                                  Register scratch,
1657                                  int power) {
1658  ASSERT(is_uintn(power + HeapNumber::kExponentBias,
1659                  HeapNumber::kExponentBits));
1660  mov(scratch, Immediate(power + HeapNumber::kExponentBias));
1661  movd(dst, Operand(scratch));
1662  psllq(dst, HeapNumber::kMantissaBits);
1663}
1664
1665
1666void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1667    Register instance_type,
1668    Register scratch,
1669    Label* failure) {
1670  if (!scratch.is(instance_type)) {
1671    mov(scratch, instance_type);
1672  }
1673  and_(scratch,
1674       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1675  cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1676  j(not_equal, failure);
1677}
1678
1679
1680void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1681                                                         Register object2,
1682                                                         Register scratch1,
1683                                                         Register scratch2,
1684                                                         Label* failure) {
1685  // Check that both objects are not smis.
1686  ASSERT_EQ(0, kSmiTag);
1687  mov(scratch1, Operand(object1));
1688  and_(scratch1, Operand(object2));
1689  test(scratch1, Immediate(kSmiTagMask));
1690  j(zero, failure);
1691
1692  // Load instance type for both strings.
1693  mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1694  mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1695  movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1696  movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1697
1698  // Check that both are flat ascii strings.
1699  const int kFlatAsciiStringMask =
1700      kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1701  const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1702  // Interleave bits from both instance types and compare them in one check.
1703  ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1704  and_(scratch1, kFlatAsciiStringMask);
1705  and_(scratch2, kFlatAsciiStringMask);
1706  lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1707  cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1708  j(not_equal, failure);
1709}
1710
1711
1712void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1713  int frameAlignment = OS::ActivationFrameAlignment();
1714  if (frameAlignment != 0) {
1715    // Make stack end at alignment and make room for num_arguments words
1716    // and the original value of esp.
1717    mov(scratch, esp);
1718    sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1719    ASSERT(IsPowerOf2(frameAlignment));
1720    and_(esp, -frameAlignment);
1721    mov(Operand(esp, num_arguments * kPointerSize), scratch);
1722  } else {
1723    sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1724  }
1725}
1726
1727
1728void MacroAssembler::CallCFunction(ExternalReference function,
1729                                   int num_arguments) {
1730  // Trashing eax is ok as it will be the return value.
1731  mov(Operand(eax), Immediate(function));
1732  CallCFunction(eax, num_arguments);
1733}
1734
1735
1736void MacroAssembler::CallCFunction(Register function,
1737                                   int num_arguments) {
1738  // Check stack alignment.
1739  if (FLAG_debug_code) {
1740    CheckStackAlignment();
1741  }
1742
1743  call(Operand(function));
1744  if (OS::ActivationFrameAlignment() != 0) {
1745    mov(esp, Operand(esp, num_arguments * kPointerSize));
1746  } else {
1747    add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1748  }
1749}
1750
1751
1752CodePatcher::CodePatcher(byte* address, int size)
1753    : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1754  // Create a new macro assembler pointing to the address of the code to patch.
1755  // The size is adjusted with kGap on order for the assembler to generate size
1756  // bytes of instructions without failing with buffer size constraints.
1757  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1758}
1759
1760
1761CodePatcher::~CodePatcher() {
1762  // Indicate that code has changed.
1763  CPU::FlushICache(address_, size_);
1764
1765  // Check that the code was patched as expected.
1766  ASSERT(masm_.pc_ == address_ + size_);
1767  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1768}
1769
1770
1771} }  // namespace v8::internal
1772
1773#endif  // V8_TARGET_ARCH_IA32
1774