1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_X64
8
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/compiler.h"
12#include "src/debug.h"
13#include "src/full-codegen.h"
14#include "src/isolate-inl.h"
15#include "src/parser.h"
16#include "src/scopes.h"
17#include "src/stub-cache.h"
18
19namespace v8 {
20namespace internal {
21
22#define __ ACCESS_MASM(masm_)
23
24
25class JumpPatchSite BASE_EMBEDDED {
26 public:
27  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
28#ifdef DEBUG
29    info_emitted_ = false;
30#endif
31  }
32
33  ~JumpPatchSite() {
34    ASSERT(patch_site_.is_bound() == info_emitted_);
35  }
36
37  void EmitJumpIfNotSmi(Register reg,
38                        Label* target,
39                        Label::Distance near_jump = Label::kFar) {
40    __ testb(reg, Immediate(kSmiTagMask));
41    EmitJump(not_carry, target, near_jump);   // Always taken before patched.
42  }
43
44  void EmitJumpIfSmi(Register reg,
45                     Label* target,
46                     Label::Distance near_jump = Label::kFar) {
47    __ testb(reg, Immediate(kSmiTagMask));
48    EmitJump(carry, target, near_jump);  // Never taken before patched.
49  }
50
51  void EmitPatchInfo() {
52    if (patch_site_.is_bound()) {
53      int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
54      ASSERT(is_uint8(delta_to_patch_site));
55      __ testl(rax, Immediate(delta_to_patch_site));
56#ifdef DEBUG
57      info_emitted_ = true;
58#endif
59    } else {
60      __ nop();  // Signals no inlined code.
61    }
62  }
63
64 private:
65  // jc will be patched with jz, jnc will become jnz.
66  void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
67    ASSERT(!patch_site_.is_bound() && !info_emitted_);
68    ASSERT(cc == carry || cc == not_carry);
69    __ bind(&patch_site_);
70    __ j(cc, target, near_jump);
71  }
72
73  MacroAssembler* masm_;
74  Label patch_site_;
75#ifdef DEBUG
76  bool info_emitted_;
77#endif
78};
79
80
81// Generate code for a JS function.  On entry to the function the receiver
82// and arguments have been pushed on the stack left to right, with the
83// return address on top of them.  The actual argument count matches the
84// formal parameter count expected by the function.
85//
86// The live registers are:
87//   o rdi: the JS function object being called (i.e. ourselves)
88//   o rsi: our context
89//   o rbp: our caller's frame pointer
90//   o rsp: stack pointer (pointing to return address)
91//
92// The function builds a JS frame.  Please see JavaScriptFrameConstants in
93// frames-x64.h for its layout.
94void FullCodeGenerator::Generate() {
95  CompilationInfo* info = info_;
96  handler_table_ =
97      isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
98
99  profiling_counter_ = isolate()->factory()->NewCell(
100      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
101  SetFunctionPosition(function());
102  Comment cmnt(masm_, "[ function compiled by full code generator");
103
104  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
105
106#ifdef DEBUG
107  if (strlen(FLAG_stop_at) > 0 &&
108      info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
109    __ int3();
110  }
111#endif
112
113  // Sloppy mode functions and builtins need to replace the receiver with the
114  // global proxy when called as functions (without an explicit receiver
115  // object).
116  if (info->strict_mode() == SLOPPY && !info->is_native()) {
117    Label ok;
118    // +1 for return address.
119    StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
120    __ movp(rcx, args.GetReceiverOperand());
121
122    __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
123    __ j(not_equal, &ok, Label::kNear);
124
125    __ movp(rcx, GlobalObjectOperand());
126    __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
127
128    __ movp(args.GetReceiverOperand(), rcx);
129
130    __ bind(&ok);
131  }
132
133  // Open a frame scope to indicate that there is a frame on the stack.  The
134  // MANUAL indicates that the scope shouldn't actually generate code to set up
135  // the frame (that is done below).
136  FrameScope frame_scope(masm_, StackFrame::MANUAL);
137
138  info->set_prologue_offset(masm_->pc_offset());
139  __ Prologue(info->IsCodePreAgingActive());
140  info->AddNoFrameRange(0, masm_->pc_offset());
141
142  { Comment cmnt(masm_, "[ Allocate locals");
143    int locals_count = info->scope()->num_stack_slots();
144    // Generators allocate locals, if any, in context slots.
145    ASSERT(!info->function()->is_generator() || locals_count == 0);
146    if (locals_count == 1) {
147      __ PushRoot(Heap::kUndefinedValueRootIndex);
148    } else if (locals_count > 1) {
149      if (locals_count >= 128) {
150        Label ok;
151        __ movp(rcx, rsp);
152        __ subp(rcx, Immediate(locals_count * kPointerSize));
153        __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
154        __ j(above_equal, &ok, Label::kNear);
155        __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
156        __ bind(&ok);
157      }
158      __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
159      const int kMaxPushes = 32;
160      if (locals_count >= kMaxPushes) {
161        int loop_iterations = locals_count / kMaxPushes;
162        __ movp(rcx, Immediate(loop_iterations));
163        Label loop_header;
164        __ bind(&loop_header);
165        // Do pushes.
166        for (int i = 0; i < kMaxPushes; i++) {
167          __ Push(rdx);
168        }
169        // Continue loop if not done.
170        __ decp(rcx);
171        __ j(not_zero, &loop_header, Label::kNear);
172      }
173      int remaining = locals_count % kMaxPushes;
174      // Emit the remaining pushes.
175      for (int i  = 0; i < remaining; i++) {
176        __ Push(rdx);
177      }
178    }
179  }
180
181  bool function_in_register = true;
182
183  // Possibly allocate a local context.
184  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
185  if (heap_slots > 0) {
186    Comment cmnt(masm_, "[ Allocate context");
187    bool need_write_barrier = true;
188    // Argument to NewContext is the function, which is still in rdi.
189    if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
190      __ Push(rdi);
191      __ Push(info->scope()->GetScopeInfo());
192      __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
193    } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
194      FastNewContextStub stub(isolate(), heap_slots);
195      __ CallStub(&stub);
196      // Result of FastNewContextStub is always in new space.
197      need_write_barrier = false;
198    } else {
199      __ Push(rdi);
200      __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
201    }
202    function_in_register = false;
203    // Context is returned in rax.  It replaces the context passed to us.
204    // It's saved in the stack and kept live in rsi.
205    __ movp(rsi, rax);
206    __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
207
208    // Copy any necessary parameters into the context.
209    int num_parameters = info->scope()->num_parameters();
210    for (int i = 0; i < num_parameters; i++) {
211      Variable* var = scope()->parameter(i);
212      if (var->IsContextSlot()) {
213        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
214            (num_parameters - 1 - i) * kPointerSize;
215        // Load parameter from stack.
216        __ movp(rax, Operand(rbp, parameter_offset));
217        // Store it in the context.
218        int context_offset = Context::SlotOffset(var->index());
219        __ movp(Operand(rsi, context_offset), rax);
220        // Update the write barrier.  This clobbers rax and rbx.
221        if (need_write_barrier) {
222          __ RecordWriteContextSlot(
223              rsi, context_offset, rax, rbx, kDontSaveFPRegs);
224        } else if (FLAG_debug_code) {
225          Label done;
226          __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
227          __ Abort(kExpectedNewSpaceObject);
228          __ bind(&done);
229        }
230      }
231    }
232  }
233
234  // Possibly allocate an arguments object.
235  Variable* arguments = scope()->arguments();
236  if (arguments != NULL) {
237    // Arguments object must be allocated after the context object, in
238    // case the "arguments" or ".arguments" variables are in the context.
239    Comment cmnt(masm_, "[ Allocate arguments object");
240    if (function_in_register) {
241      __ Push(rdi);
242    } else {
243      __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
244    }
245    // The receiver is just before the parameters on the caller's stack.
246    int num_parameters = info->scope()->num_parameters();
247    int offset = num_parameters * kPointerSize;
248    __ leap(rdx,
249           Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
250    __ Push(rdx);
251    __ Push(Smi::FromInt(num_parameters));
252    // Arguments to ArgumentsAccessStub:
253    //   function, receiver address, parameter count.
254    // The stub will rewrite receiver and parameter count if the previous
255    // stack frame was an arguments adapter frame.
256    ArgumentsAccessStub::Type type;
257    if (strict_mode() == STRICT) {
258      type = ArgumentsAccessStub::NEW_STRICT;
259    } else if (function()->has_duplicate_parameters()) {
260      type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
261    } else {
262      type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
263    }
264    ArgumentsAccessStub stub(isolate(), type);
265    __ CallStub(&stub);
266
267    SetVar(arguments, rax, rbx, rdx);
268  }
269
270  if (FLAG_trace) {
271    __ CallRuntime(Runtime::kTraceEnter, 0);
272  }
273
274  // Visit the declarations and body unless there is an illegal
275  // redeclaration.
276  if (scope()->HasIllegalRedeclaration()) {
277    Comment cmnt(masm_, "[ Declarations");
278    scope()->VisitIllegalRedeclaration(this);
279
280  } else {
281    PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
282    { Comment cmnt(masm_, "[ Declarations");
283      // For named function expressions, declare the function name as a
284      // constant.
285      if (scope()->is_function_scope() && scope()->function() != NULL) {
286        VariableDeclaration* function = scope()->function();
287        ASSERT(function->proxy()->var()->mode() == CONST ||
288               function->proxy()->var()->mode() == CONST_LEGACY);
289        ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
290        VisitVariableDeclaration(function);
291      }
292      VisitDeclarations(scope()->declarations());
293    }
294
295    { Comment cmnt(masm_, "[ Stack check");
296      PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
297       Label ok;
298       __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
299       __ j(above_equal, &ok, Label::kNear);
300       __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
301       __ bind(&ok);
302    }
303
304    { Comment cmnt(masm_, "[ Body");
305      ASSERT(loop_depth() == 0);
306      VisitStatements(function()->body());
307      ASSERT(loop_depth() == 0);
308    }
309  }
310
311  // Always emit a 'return undefined' in case control fell off the end of
312  // the body.
313  { Comment cmnt(masm_, "[ return <undefined>;");
314    __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
315    EmitReturnSequence();
316  }
317}
318
319
320void FullCodeGenerator::ClearAccumulator() {
321  __ Set(rax, 0);
322}
323
324
325void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
326  __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
327  __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
328                    Smi::FromInt(-delta));
329}
330
331
332void FullCodeGenerator::EmitProfilingCounterReset() {
333  int reset_value = FLAG_interrupt_budget;
334  __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
335  __ Move(kScratchRegister, Smi::FromInt(reset_value));
336  __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
337}
338
339
340static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
341
342
343void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
344                                                Label* back_edge_target) {
345  Comment cmnt(masm_, "[ Back edge bookkeeping");
346  Label ok;
347
348  ASSERT(back_edge_target->is_bound());
349  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
350  int weight = Min(kMaxBackEdgeWeight,
351                   Max(1, distance / kCodeSizeMultiplier));
352  EmitProfilingCounterDecrement(weight);
353
354  __ j(positive, &ok, Label::kNear);
355  {
356    PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
357    DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
358    __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
359
360    // Record a mapping of this PC offset to the OSR id.  This is used to find
361    // the AST id from the unoptimized code in order to use it as a key into
362    // the deoptimization input data found in the optimized code.
363    RecordBackEdge(stmt->OsrEntryId());
364
365    EmitProfilingCounterReset();
366  }
367  __ bind(&ok);
368
369  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
370  // Record a mapping of the OSR id to this PC.  This is used if the OSR
371  // entry becomes the target of a bailout.  We don't expect it to be, but
372  // we want it to work if it is.
373  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
374}
375
376
377void FullCodeGenerator::EmitReturnSequence() {
378  Comment cmnt(masm_, "[ Return sequence");
379  if (return_label_.is_bound()) {
380    __ jmp(&return_label_);
381  } else {
382    __ bind(&return_label_);
383    if (FLAG_trace) {
384      __ Push(rax);
385      __ CallRuntime(Runtime::kTraceExit, 1);
386    }
387    // Pretend that the exit is a backwards jump to the entry.
388    int weight = 1;
389    if (info_->ShouldSelfOptimize()) {
390      weight = FLAG_interrupt_budget / FLAG_self_opt_count;
391    } else {
392      int distance = masm_->pc_offset();
393      weight = Min(kMaxBackEdgeWeight,
394                   Max(1, distance / kCodeSizeMultiplier));
395    }
396    EmitProfilingCounterDecrement(weight);
397    Label ok;
398    __ j(positive, &ok, Label::kNear);
399    __ Push(rax);
400    __ call(isolate()->builtins()->InterruptCheck(),
401            RelocInfo::CODE_TARGET);
402    __ Pop(rax);
403    EmitProfilingCounterReset();
404    __ bind(&ok);
405#ifdef DEBUG
406    // Add a label for checking the size of the code used for returning.
407    Label check_exit_codesize;
408    masm_->bind(&check_exit_codesize);
409#endif
410    CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
411    __ RecordJSReturn();
412    // Do not use the leave instruction here because it is too short to
413    // patch with the code required by the debugger.
414    __ movp(rsp, rbp);
415    __ popq(rbp);
416    int no_frame_start = masm_->pc_offset();
417
418    int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
419    __ Ret(arguments_bytes, rcx);
420
421    // Add padding that will be overwritten by a debugger breakpoint.  We
422    // have just generated at least 7 bytes: "movp rsp, rbp; pop rbp; ret k"
423    // (3 + 1 + 3) for x64 and at least 6 (2 + 1 + 3) bytes for x32.
424    const int kPadding = Assembler::kJSReturnSequenceLength -
425                         kPointerSize == kInt64Size ? 7 : 6;
426    for (int i = 0; i < kPadding; ++i) {
427      masm_->int3();
428    }
429    // Check that the size of the code used for returning is large enough
430    // for the debugger's requirements.
431    ASSERT(Assembler::kJSReturnSequenceLength <=
432           masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
433
434    info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
435  }
436}
437
438
439void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
440  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
441}
442
443
444void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
445  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
446  codegen()->GetVar(result_register(), var);
447}
448
449
450void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
451  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
452  MemOperand operand = codegen()->VarOperand(var, result_register());
453  __ Push(operand);
454}
455
456
457void FullCodeGenerator::TestContext::Plug(Variable* var) const {
458  codegen()->GetVar(result_register(), var);
459  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
460  codegen()->DoTest(this);
461}
462
463
464void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
465}
466
467
468void FullCodeGenerator::AccumulatorValueContext::Plug(
469    Heap::RootListIndex index) const {
470  __ LoadRoot(result_register(), index);
471}
472
473
474void FullCodeGenerator::StackValueContext::Plug(
475    Heap::RootListIndex index) const {
476  __ PushRoot(index);
477}
478
479
480void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
481  codegen()->PrepareForBailoutBeforeSplit(condition(),
482                                          true,
483                                          true_label_,
484                                          false_label_);
485  if (index == Heap::kUndefinedValueRootIndex ||
486      index == Heap::kNullValueRootIndex ||
487      index == Heap::kFalseValueRootIndex) {
488    if (false_label_ != fall_through_) __ jmp(false_label_);
489  } else if (index == Heap::kTrueValueRootIndex) {
490    if (true_label_ != fall_through_) __ jmp(true_label_);
491  } else {
492    __ LoadRoot(result_register(), index);
493    codegen()->DoTest(this);
494  }
495}
496
497
498void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
499}
500
501
502void FullCodeGenerator::AccumulatorValueContext::Plug(
503    Handle<Object> lit) const {
504  if (lit->IsSmi()) {
505    __ SafeMove(result_register(), Smi::cast(*lit));
506  } else {
507    __ Move(result_register(), lit);
508  }
509}
510
511
512void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
513  if (lit->IsSmi()) {
514    __ SafePush(Smi::cast(*lit));
515  } else {
516    __ Push(lit);
517  }
518}
519
520
521void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
522  codegen()->PrepareForBailoutBeforeSplit(condition(),
523                                          true,
524                                          true_label_,
525                                          false_label_);
526  ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
527  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
528    if (false_label_ != fall_through_) __ jmp(false_label_);
529  } else if (lit->IsTrue() || lit->IsJSObject()) {
530    if (true_label_ != fall_through_) __ jmp(true_label_);
531  } else if (lit->IsString()) {
532    if (String::cast(*lit)->length() == 0) {
533      if (false_label_ != fall_through_) __ jmp(false_label_);
534    } else {
535      if (true_label_ != fall_through_) __ jmp(true_label_);
536    }
537  } else if (lit->IsSmi()) {
538    if (Smi::cast(*lit)->value() == 0) {
539      if (false_label_ != fall_through_) __ jmp(false_label_);
540    } else {
541      if (true_label_ != fall_through_) __ jmp(true_label_);
542    }
543  } else {
544    // For simplicity we always test the accumulator register.
545    __ Move(result_register(), lit);
546    codegen()->DoTest(this);
547  }
548}
549
550
551void FullCodeGenerator::EffectContext::DropAndPlug(int count,
552                                                   Register reg) const {
553  ASSERT(count > 0);
554  __ Drop(count);
555}
556
557
558void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
559    int count,
560    Register reg) const {
561  ASSERT(count > 0);
562  __ Drop(count);
563  __ Move(result_register(), reg);
564}
565
566
567void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
568                                                       Register reg) const {
569  ASSERT(count > 0);
570  if (count > 1) __ Drop(count - 1);
571  __ movp(Operand(rsp, 0), reg);
572}
573
574
575void FullCodeGenerator::TestContext::DropAndPlug(int count,
576                                                 Register reg) const {
577  ASSERT(count > 0);
578  // For simplicity we always test the accumulator register.
579  __ Drop(count);
580  __ Move(result_register(), reg);
581  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
582  codegen()->DoTest(this);
583}
584
585
586void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
587                                            Label* materialize_false) const {
588  ASSERT(materialize_true == materialize_false);
589  __ bind(materialize_true);
590}
591
592
593void FullCodeGenerator::AccumulatorValueContext::Plug(
594    Label* materialize_true,
595    Label* materialize_false) const {
596  Label done;
597  __ bind(materialize_true);
598  __ Move(result_register(), isolate()->factory()->true_value());
599  __ jmp(&done, Label::kNear);
600  __ bind(materialize_false);
601  __ Move(result_register(), isolate()->factory()->false_value());
602  __ bind(&done);
603}
604
605
606void FullCodeGenerator::StackValueContext::Plug(
607    Label* materialize_true,
608    Label* materialize_false) const {
609  Label done;
610  __ bind(materialize_true);
611  __ Push(isolate()->factory()->true_value());
612  __ jmp(&done, Label::kNear);
613  __ bind(materialize_false);
614  __ Push(isolate()->factory()->false_value());
615  __ bind(&done);
616}
617
618
619void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
620                                          Label* materialize_false) const {
621  ASSERT(materialize_true == true_label_);
622  ASSERT(materialize_false == false_label_);
623}
624
625
626void FullCodeGenerator::EffectContext::Plug(bool flag) const {
627}
628
629
630void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
631  Heap::RootListIndex value_root_index =
632      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
633  __ LoadRoot(result_register(), value_root_index);
634}
635
636
637void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
638  Heap::RootListIndex value_root_index =
639      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
640  __ PushRoot(value_root_index);
641}
642
643
644void FullCodeGenerator::TestContext::Plug(bool flag) const {
645  codegen()->PrepareForBailoutBeforeSplit(condition(),
646                                          true,
647                                          true_label_,
648                                          false_label_);
649  if (flag) {
650    if (true_label_ != fall_through_) __ jmp(true_label_);
651  } else {
652    if (false_label_ != fall_through_) __ jmp(false_label_);
653  }
654}
655
656
657void FullCodeGenerator::DoTest(Expression* condition,
658                               Label* if_true,
659                               Label* if_false,
660                               Label* fall_through) {
661  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
662  CallIC(ic, condition->test_id());
663  __ testp(result_register(), result_register());
664  // The stub returns nonzero for true.
665  Split(not_zero, if_true, if_false, fall_through);
666}
667
668
669void FullCodeGenerator::Split(Condition cc,
670                              Label* if_true,
671                              Label* if_false,
672                              Label* fall_through) {
673  if (if_false == fall_through) {
674    __ j(cc, if_true);
675  } else if (if_true == fall_through) {
676    __ j(NegateCondition(cc), if_false);
677  } else {
678    __ j(cc, if_true);
679    __ jmp(if_false);
680  }
681}
682
683
684MemOperand FullCodeGenerator::StackOperand(Variable* var) {
685  ASSERT(var->IsStackAllocated());
686  // Offset is negative because higher indexes are at lower addresses.
687  int offset = -var->index() * kPointerSize;
688  // Adjust by a (parameter or local) base offset.
689  if (var->IsParameter()) {
690    offset += kFPOnStackSize + kPCOnStackSize +
691              (info_->scope()->num_parameters() - 1) * kPointerSize;
692  } else {
693    offset += JavaScriptFrameConstants::kLocal0Offset;
694  }
695  return Operand(rbp, offset);
696}
697
698
699MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
700  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
701  if (var->IsContextSlot()) {
702    int context_chain_length = scope()->ContextChainLength(var->scope());
703    __ LoadContext(scratch, context_chain_length);
704    return ContextOperand(scratch, var->index());
705  } else {
706    return StackOperand(var);
707  }
708}
709
710
711void FullCodeGenerator::GetVar(Register dest, Variable* var) {
712  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
713  MemOperand location = VarOperand(var, dest);
714  __ movp(dest, location);
715}
716
717
718void FullCodeGenerator::SetVar(Variable* var,
719                               Register src,
720                               Register scratch0,
721                               Register scratch1) {
722  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
723  ASSERT(!scratch0.is(src));
724  ASSERT(!scratch0.is(scratch1));
725  ASSERT(!scratch1.is(src));
726  MemOperand location = VarOperand(var, scratch0);
727  __ movp(location, src);
728
729  // Emit the write barrier code if the location is in the heap.
730  if (var->IsContextSlot()) {
731    int offset = Context::SlotOffset(var->index());
732    __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
733  }
734}
735
736
737void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
738                                                     bool should_normalize,
739                                                     Label* if_true,
740                                                     Label* if_false) {
741  // Only prepare for bailouts before splits if we're in a test
742  // context. Otherwise, we let the Visit function deal with the
743  // preparation to avoid preparing with the same AST id twice.
744  if (!context()->IsTest() || !info_->IsOptimizable()) return;
745
746  Label skip;
747  if (should_normalize) __ jmp(&skip, Label::kNear);
748  PrepareForBailout(expr, TOS_REG);
749  if (should_normalize) {
750    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
751    Split(equal, if_true, if_false, NULL);
752    __ bind(&skip);
753  }
754}
755
756
757void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
758  // The variable in the declaration always resides in the current context.
759  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
760  if (generate_debug_code_) {
761    // Check that we're not inside a with or catch context.
762    __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
763    __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
764    __ Check(not_equal, kDeclarationInWithContext);
765    __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
766    __ Check(not_equal, kDeclarationInCatchContext);
767  }
768}
769
770
771void FullCodeGenerator::VisitVariableDeclaration(
772    VariableDeclaration* declaration) {
773  // If it was not possible to allocate the variable at compile time, we
774  // need to "declare" it at runtime to make sure it actually exists in the
775  // local context.
776  VariableProxy* proxy = declaration->proxy();
777  VariableMode mode = declaration->mode();
778  Variable* variable = proxy->var();
779  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
780  switch (variable->location()) {
781    case Variable::UNALLOCATED:
782      globals_->Add(variable->name(), zone());
783      globals_->Add(variable->binding_needs_init()
784                        ? isolate()->factory()->the_hole_value()
785                    : isolate()->factory()->undefined_value(),
786                    zone());
787      break;
788
789    case Variable::PARAMETER:
790    case Variable::LOCAL:
791      if (hole_init) {
792        Comment cmnt(masm_, "[ VariableDeclaration");
793        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
794        __ movp(StackOperand(variable), kScratchRegister);
795      }
796      break;
797
798    case Variable::CONTEXT:
799      if (hole_init) {
800        Comment cmnt(masm_, "[ VariableDeclaration");
801        EmitDebugCheckDeclarationContext(variable);
802        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
803        __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
804        // No write barrier since the hole value is in old space.
805        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
806      }
807      break;
808
809    case Variable::LOOKUP: {
810      Comment cmnt(masm_, "[ VariableDeclaration");
811      __ Push(rsi);
812      __ Push(variable->name());
813      // Declaration nodes are always introduced in one of four modes.
814      ASSERT(IsDeclaredVariableMode(mode));
815      PropertyAttributes attr =
816          IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
817      __ Push(Smi::FromInt(attr));
818      // Push initial value, if any.
819      // Note: For variables we must not push an initial value (such as
820      // 'undefined') because we may have a (legal) redeclaration and we
821      // must not destroy the current value.
822      if (hole_init) {
823        __ PushRoot(Heap::kTheHoleValueRootIndex);
824      } else {
825        __ Push(Smi::FromInt(0));  // Indicates no initial value.
826      }
827      __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
828      break;
829    }
830  }
831}
832
833
834void FullCodeGenerator::VisitFunctionDeclaration(
835    FunctionDeclaration* declaration) {
836  VariableProxy* proxy = declaration->proxy();
837  Variable* variable = proxy->var();
838  switch (variable->location()) {
839    case Variable::UNALLOCATED: {
840      globals_->Add(variable->name(), zone());
841      Handle<SharedFunctionInfo> function =
842          Compiler::BuildFunctionInfo(declaration->fun(), script());
843      // Check for stack-overflow exception.
844      if (function.is_null()) return SetStackOverflow();
845      globals_->Add(function, zone());
846      break;
847    }
848
849    case Variable::PARAMETER:
850    case Variable::LOCAL: {
851      Comment cmnt(masm_, "[ FunctionDeclaration");
852      VisitForAccumulatorValue(declaration->fun());
853      __ movp(StackOperand(variable), result_register());
854      break;
855    }
856
857    case Variable::CONTEXT: {
858      Comment cmnt(masm_, "[ FunctionDeclaration");
859      EmitDebugCheckDeclarationContext(variable);
860      VisitForAccumulatorValue(declaration->fun());
861      __ movp(ContextOperand(rsi, variable->index()), result_register());
862      int offset = Context::SlotOffset(variable->index());
863      // We know that we have written a function, which is not a smi.
864      __ RecordWriteContextSlot(rsi,
865                                offset,
866                                result_register(),
867                                rcx,
868                                kDontSaveFPRegs,
869                                EMIT_REMEMBERED_SET,
870                                OMIT_SMI_CHECK);
871      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
872      break;
873    }
874
875    case Variable::LOOKUP: {
876      Comment cmnt(masm_, "[ FunctionDeclaration");
877      __ Push(rsi);
878      __ Push(variable->name());
879      __ Push(Smi::FromInt(NONE));
880      VisitForStackValue(declaration->fun());
881      __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
882      break;
883    }
884  }
885}
886
887
888void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
889  Variable* variable = declaration->proxy()->var();
890  ASSERT(variable->location() == Variable::CONTEXT);
891  ASSERT(variable->interface()->IsFrozen());
892
893  Comment cmnt(masm_, "[ ModuleDeclaration");
894  EmitDebugCheckDeclarationContext(variable);
895
896  // Load instance object.
897  __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope()));
898  __ movp(rax, ContextOperand(rax, variable->interface()->Index()));
899  __ movp(rax, ContextOperand(rax, Context::EXTENSION_INDEX));
900
901  // Assign it.
902  __ movp(ContextOperand(rsi, variable->index()), rax);
903  // We know that we have written a module, which is not a smi.
904  __ RecordWriteContextSlot(rsi,
905                            Context::SlotOffset(variable->index()),
906                            rax,
907                            rcx,
908                            kDontSaveFPRegs,
909                            EMIT_REMEMBERED_SET,
910                            OMIT_SMI_CHECK);
911  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
912
913  // Traverse into body.
914  Visit(declaration->module());
915}
916
917
918void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
919  VariableProxy* proxy = declaration->proxy();
920  Variable* variable = proxy->var();
921  switch (variable->location()) {
922    case Variable::UNALLOCATED:
923      // TODO(rossberg)
924      break;
925
926    case Variable::CONTEXT: {
927      Comment cmnt(masm_, "[ ImportDeclaration");
928      EmitDebugCheckDeclarationContext(variable);
929      // TODO(rossberg)
930      break;
931    }
932
933    case Variable::PARAMETER:
934    case Variable::LOCAL:
935    case Variable::LOOKUP:
936      UNREACHABLE();
937  }
938}
939
940
941void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
942  // TODO(rossberg)
943}
944
945
946void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
947  // Call the runtime to declare the globals.
948  __ Push(rsi);  // The context is the first argument.
949  __ Push(pairs);
950  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
951  __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
952  // Return value is ignored.
953}
954
955
956void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
957  // Call the runtime to declare the modules.
958  __ Push(descriptions);
959  __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
960  // Return value is ignored.
961}
962
963
964void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
965  Comment cmnt(masm_, "[ SwitchStatement");
966  Breakable nested_statement(this, stmt);
967  SetStatementPosition(stmt);
968
969  // Keep the switch value on the stack until a case matches.
970  VisitForStackValue(stmt->tag());
971  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
972
973  ZoneList<CaseClause*>* clauses = stmt->cases();
974  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
975
976  Label next_test;  // Recycled for each test.
977  // Compile all the tests with branches to their bodies.
978  for (int i = 0; i < clauses->length(); i++) {
979    CaseClause* clause = clauses->at(i);
980    clause->body_target()->Unuse();
981
982    // The default is not a test, but remember it as final fall through.
983    if (clause->is_default()) {
984      default_clause = clause;
985      continue;
986    }
987
988    Comment cmnt(masm_, "[ Case comparison");
989    __ bind(&next_test);
990    next_test.Unuse();
991
992    // Compile the label expression.
993    VisitForAccumulatorValue(clause->label());
994
995    // Perform the comparison as if via '==='.
996    __ movp(rdx, Operand(rsp, 0));  // Switch value.
997    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
998    JumpPatchSite patch_site(masm_);
999    if (inline_smi_code) {
1000      Label slow_case;
1001      __ movp(rcx, rdx);
1002      __ orp(rcx, rax);
1003      patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
1004
1005      __ cmpp(rdx, rax);
1006      __ j(not_equal, &next_test);
1007      __ Drop(1);  // Switch value is no longer needed.
1008      __ jmp(clause->body_target());
1009      __ bind(&slow_case);
1010    }
1011
1012    // Record position before stub call for type feedback.
1013    SetSourcePosition(clause->position());
1014    Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1015    CallIC(ic, clause->CompareId());
1016    patch_site.EmitPatchInfo();
1017
1018    Label skip;
1019    __ jmp(&skip, Label::kNear);
1020    PrepareForBailout(clause, TOS_REG);
1021    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
1022    __ j(not_equal, &next_test);
1023    __ Drop(1);
1024    __ jmp(clause->body_target());
1025    __ bind(&skip);
1026
1027    __ testp(rax, rax);
1028    __ j(not_equal, &next_test);
1029    __ Drop(1);  // Switch value is no longer needed.
1030    __ jmp(clause->body_target());
1031  }
1032
1033  // Discard the test value and jump to the default if present, otherwise to
1034  // the end of the statement.
1035  __ bind(&next_test);
1036  __ Drop(1);  // Switch value is no longer needed.
1037  if (default_clause == NULL) {
1038    __ jmp(nested_statement.break_label());
1039  } else {
1040    __ jmp(default_clause->body_target());
1041  }
1042
1043  // Compile all the case bodies.
1044  for (int i = 0; i < clauses->length(); i++) {
1045    Comment cmnt(masm_, "[ Case body");
1046    CaseClause* clause = clauses->at(i);
1047    __ bind(clause->body_target());
1048    PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1049    VisitStatements(clause->statements());
1050  }
1051
1052  __ bind(nested_statement.break_label());
1053  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1054}
1055
1056
1057void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1058  Comment cmnt(masm_, "[ ForInStatement");
1059  int slot = stmt->ForInFeedbackSlot();
1060  SetStatementPosition(stmt);
1061
1062  Label loop, exit;
1063  ForIn loop_statement(this, stmt);
1064  increment_loop_depth();
1065
1066  // Get the object to enumerate over. If the object is null or undefined, skip
1067  // over the loop.  See ECMA-262 version 5, section 12.6.4.
1068  VisitForAccumulatorValue(stmt->enumerable());
1069  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1070  __ j(equal, &exit);
1071  Register null_value = rdi;
1072  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1073  __ cmpp(rax, null_value);
1074  __ j(equal, &exit);
1075
1076  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1077
1078  // Convert the object to a JS object.
1079  Label convert, done_convert;
1080  __ JumpIfSmi(rax, &convert);
1081  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1082  __ j(above_equal, &done_convert);
1083  __ bind(&convert);
1084  __ Push(rax);
1085  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1086  __ bind(&done_convert);
1087  __ Push(rax);
1088
1089  // Check for proxies.
1090  Label call_runtime;
1091  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1092  __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1093  __ j(below_equal, &call_runtime);
1094
1095  // Check cache validity in generated code. This is a fast case for
1096  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1097  // guarantee cache validity, call the runtime system to check cache
1098  // validity or get the property names in a fixed array.
1099  __ CheckEnumCache(null_value, &call_runtime);
1100
1101  // The enum cache is valid.  Load the map of the object being
1102  // iterated over and use the cache for the iteration.
1103  Label use_cache;
1104  __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
1105  __ jmp(&use_cache, Label::kNear);
1106
1107  // Get the set of properties to enumerate.
1108  __ bind(&call_runtime);
1109  __ Push(rax);  // Duplicate the enumerable object on the stack.
1110  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1111
1112  // If we got a map from the runtime call, we can do a fast
1113  // modification check. Otherwise, we got a fixed array, and we have
1114  // to do a slow check.
1115  Label fixed_array;
1116  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1117                 Heap::kMetaMapRootIndex);
1118  __ j(not_equal, &fixed_array);
1119
1120  // We got a map in register rax. Get the enumeration cache from it.
1121  __ bind(&use_cache);
1122
1123  Label no_descriptors;
1124
1125  __ EnumLength(rdx, rax);
1126  __ Cmp(rdx, Smi::FromInt(0));
1127  __ j(equal, &no_descriptors);
1128
1129  __ LoadInstanceDescriptors(rax, rcx);
1130  __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1131  __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1132
1133  // Set up the four remaining stack slots.
1134  __ Push(rax);  // Map.
1135  __ Push(rcx);  // Enumeration cache.
1136  __ Push(rdx);  // Number of valid entries for the map in the enum cache.
1137  __ Push(Smi::FromInt(0));  // Initial index.
1138  __ jmp(&loop);
1139
1140  __ bind(&no_descriptors);
1141  __ addp(rsp, Immediate(kPointerSize));
1142  __ jmp(&exit);
1143
1144  // We got a fixed array in register rax. Iterate through that.
1145  Label non_proxy;
1146  __ bind(&fixed_array);
1147
1148  // No need for a write barrier, we are storing a Smi in the feedback vector.
1149  __ Move(rbx, FeedbackVector());
1150  __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)),
1151          TypeFeedbackInfo::MegamorphicSentinel(isolate()));
1152  __ Move(rbx, Smi::FromInt(1));  // Smi indicates slow check
1153  __ movp(rcx, Operand(rsp, 0 * kPointerSize));  // Get enumerated object
1154  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1155  __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1156  __ j(above, &non_proxy);
1157  __ Move(rbx, Smi::FromInt(0));  // Zero indicates proxy
1158  __ bind(&non_proxy);
1159  __ Push(rbx);  // Smi
1160  __ Push(rax);  // Array
1161  __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1162  __ Push(rax);  // Fixed array length (as smi).
1163  __ Push(Smi::FromInt(0));  // Initial index.
1164
1165  // Generate code for doing the condition check.
1166  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1167  __ bind(&loop);
1168  __ movp(rax, Operand(rsp, 0 * kPointerSize));  // Get the current index.
1169  __ cmpp(rax, Operand(rsp, 1 * kPointerSize));  // Compare to the array length.
1170  __ j(above_equal, loop_statement.break_label());
1171
1172  // Get the current entry of the array into register rbx.
1173  __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1174  SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1175  __ movp(rbx, FieldOperand(rbx,
1176                            index.reg,
1177                            index.scale,
1178                            FixedArray::kHeaderSize));
1179
1180  // Get the expected map from the stack or a smi in the
1181  // permanent slow case into register rdx.
1182  __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1183
1184  // Check if the expected map still matches that of the enumerable.
1185  // If not, we may have to filter the key.
1186  Label update_each;
1187  __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1188  __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1189  __ j(equal, &update_each, Label::kNear);
1190
1191  // For proxies, no filtering is done.
1192  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1193  __ Cmp(rdx, Smi::FromInt(0));
1194  __ j(equal, &update_each, Label::kNear);
1195
1196  // Convert the entry to a string or null if it isn't a property
1197  // anymore. If the property has been removed while iterating, we
1198  // just skip it.
1199  __ Push(rcx);  // Enumerable.
1200  __ Push(rbx);  // Current entry.
1201  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1202  __ Cmp(rax, Smi::FromInt(0));
1203  __ j(equal, loop_statement.continue_label());
1204  __ movp(rbx, rax);
1205
1206  // Update the 'each' property or variable from the possibly filtered
1207  // entry in register rbx.
1208  __ bind(&update_each);
1209  __ movp(result_register(), rbx);
1210  // Perform the assignment as if via '='.
1211  { EffectContext context(this);
1212    EmitAssignment(stmt->each());
1213  }
1214
1215  // Generate code for the body of the loop.
1216  Visit(stmt->body());
1217
1218  // Generate code for going to the next element by incrementing the
1219  // index (smi) stored on top of the stack.
1220  __ bind(loop_statement.continue_label());
1221  __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1222
1223  EmitBackEdgeBookkeeping(stmt, &loop);
1224  __ jmp(&loop);
1225
1226  // Remove the pointers stored on the stack.
1227  __ bind(loop_statement.break_label());
1228  __ addp(rsp, Immediate(5 * kPointerSize));
1229
1230  // Exit and decrement the loop depth.
1231  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1232  __ bind(&exit);
1233  decrement_loop_depth();
1234}
1235
1236
1237void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1238  Comment cmnt(masm_, "[ ForOfStatement");
1239  SetStatementPosition(stmt);
1240
1241  Iteration loop_statement(this, stmt);
1242  increment_loop_depth();
1243
1244  // var iterable = subject
1245  VisitForAccumulatorValue(stmt->assign_iterable());
1246
1247  // As with for-in, skip the loop if the iterable is null or undefined.
1248  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1249  __ j(equal, loop_statement.break_label());
1250  __ CompareRoot(rax, Heap::kNullValueRootIndex);
1251  __ j(equal, loop_statement.break_label());
1252
1253  // var iterator = iterable[Symbol.iterator]();
1254  VisitForEffect(stmt->assign_iterator());
1255
1256  // Loop entry.
1257  __ bind(loop_statement.continue_label());
1258
1259  // result = iterator.next()
1260  VisitForEffect(stmt->next_result());
1261
1262  // if (result.done) break;
1263  Label result_not_done;
1264  VisitForControl(stmt->result_done(),
1265                  loop_statement.break_label(),
1266                  &result_not_done,
1267                  &result_not_done);
1268  __ bind(&result_not_done);
1269
1270  // each = result.value
1271  VisitForEffect(stmt->assign_each());
1272
1273  // Generate code for the body of the loop.
1274  Visit(stmt->body());
1275
1276  // Check stack before looping.
1277  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1278  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1279  __ jmp(loop_statement.continue_label());
1280
1281  // Exit and decrement the loop depth.
1282  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1283  __ bind(loop_statement.break_label());
1284  decrement_loop_depth();
1285}
1286
1287
1288void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1289                                       bool pretenure) {
1290  // Use the fast case closure allocation code that allocates in new
1291  // space for nested functions that don't need literals cloning. If
1292  // we're running with the --always-opt or the --prepare-always-opt
1293  // flag, we need to use the runtime function so that the new function
1294  // we are creating here gets a chance to have its code optimized and
1295  // doesn't just get a copy of the existing unoptimized code.
1296  if (!FLAG_always_opt &&
1297      !FLAG_prepare_always_opt &&
1298      !pretenure &&
1299      scope()->is_function_scope() &&
1300      info->num_literals() == 0) {
1301    FastNewClosureStub stub(isolate(),
1302                            info->strict_mode(),
1303                            info->is_generator());
1304    __ Move(rbx, info);
1305    __ CallStub(&stub);
1306  } else {
1307    __ Push(rsi);
1308    __ Push(info);
1309    __ Push(pretenure
1310            ? isolate()->factory()->true_value()
1311            : isolate()->factory()->false_value());
1312    __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1313  }
1314  context()->Plug(rax);
1315}
1316
1317
1318void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1319  Comment cmnt(masm_, "[ VariableProxy");
1320  EmitVariableLoad(expr);
1321}
1322
1323
1324void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1325                                                      TypeofState typeof_state,
1326                                                      Label* slow) {
1327  Register context = rsi;
1328  Register temp = rdx;
1329
1330  Scope* s = scope();
1331  while (s != NULL) {
1332    if (s->num_heap_slots() > 0) {
1333      if (s->calls_sloppy_eval()) {
1334        // Check that extension is NULL.
1335        __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1336                Immediate(0));
1337        __ j(not_equal, slow);
1338      }
1339      // Load next context in chain.
1340      __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1341      // Walk the rest of the chain without clobbering rsi.
1342      context = temp;
1343    }
1344    // If no outer scope calls eval, we do not need to check more
1345    // context extensions.  If we have reached an eval scope, we check
1346    // all extensions from this point.
1347    if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1348    s = s->outer_scope();
1349  }
1350
1351  if (s != NULL && s->is_eval_scope()) {
1352    // Loop up the context chain.  There is no frame effect so it is
1353    // safe to use raw labels here.
1354    Label next, fast;
1355    if (!context.is(temp)) {
1356      __ movp(temp, context);
1357    }
1358    // Load map for comparison into register, outside loop.
1359    __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1360    __ bind(&next);
1361    // Terminate at native context.
1362    __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1363    __ j(equal, &fast, Label::kNear);
1364    // Check that extension is NULL.
1365    __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1366    __ j(not_equal, slow);
1367    // Load next context in chain.
1368    __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1369    __ jmp(&next);
1370    __ bind(&fast);
1371  }
1372
1373  // All extension objects were empty and it is safe to use a global
1374  // load IC call.
1375  __ movp(rax, GlobalObjectOperand());
1376  __ Move(rcx, var->name());
1377  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1378      ? NOT_CONTEXTUAL
1379      : CONTEXTUAL;
1380  CallLoadIC(mode);
1381}
1382
1383
1384MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1385                                                                Label* slow) {
1386  ASSERT(var->IsContextSlot());
1387  Register context = rsi;
1388  Register temp = rbx;
1389
1390  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1391    if (s->num_heap_slots() > 0) {
1392      if (s->calls_sloppy_eval()) {
1393        // Check that extension is NULL.
1394        __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1395                Immediate(0));
1396        __ j(not_equal, slow);
1397      }
1398      __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1399      // Walk the rest of the chain without clobbering rsi.
1400      context = temp;
1401    }
1402  }
1403  // Check that last extension is NULL.
1404  __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1405  __ j(not_equal, slow);
1406
1407  // This function is used only for loads, not stores, so it's safe to
1408  // return an rsi-based operand (the write barrier cannot be allowed to
1409  // destroy the rsi register).
1410  return ContextOperand(context, var->index());
1411}
1412
1413
1414void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1415                                                  TypeofState typeof_state,
1416                                                  Label* slow,
1417                                                  Label* done) {
1418  // Generate fast-case code for variables that might be shadowed by
1419  // eval-introduced variables.  Eval is used a lot without
1420  // introducing variables.  In those cases, we do not want to
1421  // perform a runtime call for all variables in the scope
1422  // containing the eval.
1423  if (var->mode() == DYNAMIC_GLOBAL) {
1424    EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1425    __ jmp(done);
1426  } else if (var->mode() == DYNAMIC_LOCAL) {
1427    Variable* local = var->local_if_not_shadowed();
1428    __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1429    if (local->mode() == LET || local->mode() == CONST ||
1430        local->mode() == CONST_LEGACY) {
1431      __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1432      __ j(not_equal, done);
1433      if (local->mode() == CONST_LEGACY) {
1434        __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1435      } else {  // LET || CONST
1436        __ Push(var->name());
1437        __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1438      }
1439    }
1440    __ jmp(done);
1441  }
1442}
1443
1444
1445void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1446  // Record position before possible IC call.
1447  SetSourcePosition(proxy->position());
1448  Variable* var = proxy->var();
1449
1450  // Three cases: global variables, lookup variables, and all other types of
1451  // variables.
1452  switch (var->location()) {
1453    case Variable::UNALLOCATED: {
1454      Comment cmnt(masm_, "[ Global variable");
1455      // Use inline caching. Variable name is passed in rcx and the global
1456      // object on the stack.
1457      __ Move(rcx, var->name());
1458      __ movp(rax, GlobalObjectOperand());
1459      CallLoadIC(CONTEXTUAL);
1460      context()->Plug(rax);
1461      break;
1462    }
1463
1464    case Variable::PARAMETER:
1465    case Variable::LOCAL:
1466    case Variable::CONTEXT: {
1467      Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1468                                               : "[ Stack slot");
1469      if (var->binding_needs_init()) {
1470        // var->scope() may be NULL when the proxy is located in eval code and
1471        // refers to a potential outside binding. Currently those bindings are
1472        // always looked up dynamically, i.e. in that case
1473        //     var->location() == LOOKUP.
1474        // always holds.
1475        ASSERT(var->scope() != NULL);
1476
1477        // Check if the binding really needs an initialization check. The check
1478        // can be skipped in the following situation: we have a LET or CONST
1479        // binding in harmony mode, both the Variable and the VariableProxy have
1480        // the same declaration scope (i.e. they are both in global code, in the
1481        // same function or in the same eval code) and the VariableProxy is in
1482        // the source physically located after the initializer of the variable.
1483        //
1484        // We cannot skip any initialization checks for CONST in non-harmony
1485        // mode because const variables may be declared but never initialized:
1486        //   if (false) { const x; }; var y = x;
1487        //
1488        // The condition on the declaration scopes is a conservative check for
1489        // nested functions that access a binding and are called before the
1490        // binding is initialized:
1491        //   function() { f(); let x = 1; function f() { x = 2; } }
1492        //
1493        bool skip_init_check;
1494        if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1495          skip_init_check = false;
1496        } else {
1497          // Check that we always have valid source position.
1498          ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1499          ASSERT(proxy->position() != RelocInfo::kNoPosition);
1500          skip_init_check = var->mode() != CONST_LEGACY &&
1501              var->initializer_position() < proxy->position();
1502        }
1503
1504        if (!skip_init_check) {
1505          // Let and const need a read barrier.
1506          Label done;
1507          GetVar(rax, var);
1508          __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1509          __ j(not_equal, &done, Label::kNear);
1510          if (var->mode() == LET || var->mode() == CONST) {
1511            // Throw a reference error when using an uninitialized let/const
1512            // binding in harmony mode.
1513            __ Push(var->name());
1514            __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1515          } else {
1516            // Uninitalized const bindings outside of harmony mode are unholed.
1517            ASSERT(var->mode() == CONST_LEGACY);
1518            __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1519          }
1520          __ bind(&done);
1521          context()->Plug(rax);
1522          break;
1523        }
1524      }
1525      context()->Plug(var);
1526      break;
1527    }
1528
1529    case Variable::LOOKUP: {
1530      Comment cmnt(masm_, "[ Lookup slot");
1531      Label done, slow;
1532      // Generate code for loading from variables potentially shadowed
1533      // by eval-introduced variables.
1534      EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1535      __ bind(&slow);
1536      __ Push(rsi);  // Context.
1537      __ Push(var->name());
1538      __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1539      __ bind(&done);
1540      context()->Plug(rax);
1541      break;
1542    }
1543  }
1544}
1545
1546
1547void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1548  Comment cmnt(masm_, "[ RegExpLiteral");
1549  Label materialized;
1550  // Registers will be used as follows:
1551  // rdi = JS function.
1552  // rcx = literals array.
1553  // rbx = regexp literal.
1554  // rax = regexp literal clone.
1555  __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1556  __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1557  int literal_offset =
1558      FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1559  __ movp(rbx, FieldOperand(rcx, literal_offset));
1560  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1561  __ j(not_equal, &materialized, Label::kNear);
1562
1563  // Create regexp literal using runtime function
1564  // Result will be in rax.
1565  __ Push(rcx);
1566  __ Push(Smi::FromInt(expr->literal_index()));
1567  __ Push(expr->pattern());
1568  __ Push(expr->flags());
1569  __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1570  __ movp(rbx, rax);
1571
1572  __ bind(&materialized);
1573  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1574  Label allocated, runtime_allocate;
1575  __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1576  __ jmp(&allocated);
1577
1578  __ bind(&runtime_allocate);
1579  __ Push(rbx);
1580  __ Push(Smi::FromInt(size));
1581  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1582  __ Pop(rbx);
1583
1584  __ bind(&allocated);
1585  // Copy the content into the newly allocated memory.
1586  // (Unroll copy loop once for better throughput).
1587  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1588    __ movp(rdx, FieldOperand(rbx, i));
1589    __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1590    __ movp(FieldOperand(rax, i), rdx);
1591    __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1592  }
1593  if ((size % (2 * kPointerSize)) != 0) {
1594    __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1595    __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1596  }
1597  context()->Plug(rax);
1598}
1599
1600
1601void FullCodeGenerator::EmitAccessor(Expression* expression) {
1602  if (expression == NULL) {
1603    __ PushRoot(Heap::kNullValueRootIndex);
1604  } else {
1605    VisitForStackValue(expression);
1606  }
1607}
1608
1609
1610void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1611  Comment cmnt(masm_, "[ ObjectLiteral");
1612
1613  expr->BuildConstantProperties(isolate());
1614  Handle<FixedArray> constant_properties = expr->constant_properties();
1615  int flags = expr->fast_elements()
1616      ? ObjectLiteral::kFastElements
1617      : ObjectLiteral::kNoFlags;
1618  flags |= expr->has_function()
1619      ? ObjectLiteral::kHasFunction
1620      : ObjectLiteral::kNoFlags;
1621  int properties_count = constant_properties->length() / 2;
1622  if (expr->may_store_doubles() || expr->depth() > 1 ||
1623      masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1624      properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1625    __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1626    __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1627    __ Push(Smi::FromInt(expr->literal_index()));
1628    __ Push(constant_properties);
1629    __ Push(Smi::FromInt(flags));
1630    __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1631  } else {
1632    __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1633    __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1634    __ Move(rbx, Smi::FromInt(expr->literal_index()));
1635    __ Move(rcx, constant_properties);
1636    __ Move(rdx, Smi::FromInt(flags));
1637    FastCloneShallowObjectStub stub(isolate(), properties_count);
1638    __ CallStub(&stub);
1639  }
1640
1641  // If result_saved is true the result is on top of the stack.  If
1642  // result_saved is false the result is in rax.
1643  bool result_saved = false;
1644
1645  // Mark all computed expressions that are bound to a key that
1646  // is shadowed by a later occurrence of the same key. For the
1647  // marked expressions, no store code is emitted.
1648  expr->CalculateEmitStore(zone());
1649
1650  AccessorTable accessor_table(zone());
1651  for (int i = 0; i < expr->properties()->length(); i++) {
1652    ObjectLiteral::Property* property = expr->properties()->at(i);
1653    if (property->IsCompileTimeValue()) continue;
1654
1655    Literal* key = property->key();
1656    Expression* value = property->value();
1657    if (!result_saved) {
1658      __ Push(rax);  // Save result on the stack
1659      result_saved = true;
1660    }
1661    switch (property->kind()) {
1662      case ObjectLiteral::Property::CONSTANT:
1663        UNREACHABLE();
1664      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1665        ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1666        // Fall through.
1667      case ObjectLiteral::Property::COMPUTED:
1668        if (key->value()->IsInternalizedString()) {
1669          if (property->emit_store()) {
1670            VisitForAccumulatorValue(value);
1671            __ Move(rcx, key->value());
1672            __ movp(rdx, Operand(rsp, 0));
1673            CallStoreIC(key->LiteralFeedbackId());
1674            PrepareForBailoutForId(key->id(), NO_REGISTERS);
1675          } else {
1676            VisitForEffect(value);
1677          }
1678          break;
1679        }
1680        __ Push(Operand(rsp, 0));  // Duplicate receiver.
1681        VisitForStackValue(key);
1682        VisitForStackValue(value);
1683        if (property->emit_store()) {
1684          __ Push(Smi::FromInt(NONE));    // PropertyAttributes
1685          __ CallRuntime(Runtime::kSetProperty, 4);
1686        } else {
1687          __ Drop(3);
1688        }
1689        break;
1690      case ObjectLiteral::Property::PROTOTYPE:
1691        __ Push(Operand(rsp, 0));  // Duplicate receiver.
1692        VisitForStackValue(value);
1693        if (property->emit_store()) {
1694          __ CallRuntime(Runtime::kSetPrototype, 2);
1695        } else {
1696          __ Drop(2);
1697        }
1698        break;
1699      case ObjectLiteral::Property::GETTER:
1700        accessor_table.lookup(key)->second->getter = value;
1701        break;
1702      case ObjectLiteral::Property::SETTER:
1703        accessor_table.lookup(key)->second->setter = value;
1704        break;
1705    }
1706  }
1707
1708  // Emit code to define accessors, using only a single call to the runtime for
1709  // each pair of corresponding getters and setters.
1710  for (AccessorTable::Iterator it = accessor_table.begin();
1711       it != accessor_table.end();
1712       ++it) {
1713    __ Push(Operand(rsp, 0));  // Duplicate receiver.
1714    VisitForStackValue(it->first);
1715    EmitAccessor(it->second->getter);
1716    EmitAccessor(it->second->setter);
1717    __ Push(Smi::FromInt(NONE));
1718    __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1719  }
1720
1721  if (expr->has_function()) {
1722    ASSERT(result_saved);
1723    __ Push(Operand(rsp, 0));
1724    __ CallRuntime(Runtime::kToFastProperties, 1);
1725  }
1726
1727  if (result_saved) {
1728    context()->PlugTOS();
1729  } else {
1730    context()->Plug(rax);
1731  }
1732}
1733
1734
1735void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1736  Comment cmnt(masm_, "[ ArrayLiteral");
1737
1738  expr->BuildConstantElements(isolate());
1739  int flags = expr->depth() == 1
1740      ? ArrayLiteral::kShallowElements
1741      : ArrayLiteral::kNoFlags;
1742
1743  ZoneList<Expression*>* subexprs = expr->values();
1744  int length = subexprs->length();
1745  Handle<FixedArray> constant_elements = expr->constant_elements();
1746  ASSERT_EQ(2, constant_elements->length());
1747  ElementsKind constant_elements_kind =
1748      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1749  bool has_constant_fast_elements =
1750      IsFastObjectElementsKind(constant_elements_kind);
1751  Handle<FixedArrayBase> constant_elements_values(
1752      FixedArrayBase::cast(constant_elements->get(1)));
1753
1754  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1755  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1756    // If the only customer of allocation sites is transitioning, then
1757    // we can turn it off if we don't have anywhere else to transition to.
1758    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1759  }
1760
1761  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1762    __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1763    __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1764    __ Push(Smi::FromInt(expr->literal_index()));
1765    __ Push(constant_elements);
1766    __ Push(Smi::FromInt(flags));
1767    __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1768  } else {
1769    __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1770    __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1771    __ Move(rbx, Smi::FromInt(expr->literal_index()));
1772    __ Move(rcx, constant_elements);
1773    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1774    __ CallStub(&stub);
1775  }
1776
1777  bool result_saved = false;  // Is the result saved to the stack?
1778
1779  // Emit code to evaluate all the non-constant subexpressions and to store
1780  // them into the newly cloned array.
1781  for (int i = 0; i < length; i++) {
1782    Expression* subexpr = subexprs->at(i);
1783    // If the subexpression is a literal or a simple materialized literal it
1784    // is already set in the cloned array.
1785    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1786
1787    if (!result_saved) {
1788      __ Push(rax);  // array literal
1789      __ Push(Smi::FromInt(expr->literal_index()));
1790      result_saved = true;
1791    }
1792    VisitForAccumulatorValue(subexpr);
1793
1794    if (IsFastObjectElementsKind(constant_elements_kind)) {
1795      // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1796      // cannot transition and don't need to call the runtime stub.
1797      int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1798      __ movp(rbx, Operand(rsp, kPointerSize));  // Copy of array literal.
1799      __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1800      // Store the subexpression value in the array's elements.
1801      __ movp(FieldOperand(rbx, offset), result_register());
1802      // Update the write barrier for the array store.
1803      __ RecordWriteField(rbx, offset, result_register(), rcx,
1804                          kDontSaveFPRegs,
1805                          EMIT_REMEMBERED_SET,
1806                          INLINE_SMI_CHECK);
1807    } else {
1808      // Store the subexpression value in the array's elements.
1809      __ Move(rcx, Smi::FromInt(i));
1810      StoreArrayLiteralElementStub stub(isolate());
1811      __ CallStub(&stub);
1812    }
1813
1814    PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1815  }
1816
1817  if (result_saved) {
1818    __ addp(rsp, Immediate(kPointerSize));  // literal index
1819    context()->PlugTOS();
1820  } else {
1821    context()->Plug(rax);
1822  }
1823}
1824
1825
1826void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1827  ASSERT(expr->target()->IsValidReferenceExpression());
1828
1829  Comment cmnt(masm_, "[ Assignment");
1830
1831  // Left-hand side can only be a property, a global or a (parameter or local)
1832  // slot.
1833  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1834  LhsKind assign_type = VARIABLE;
1835  Property* property = expr->target()->AsProperty();
1836  if (property != NULL) {
1837    assign_type = (property->key()->IsPropertyName())
1838        ? NAMED_PROPERTY
1839        : KEYED_PROPERTY;
1840  }
1841
1842  // Evaluate LHS expression.
1843  switch (assign_type) {
1844    case VARIABLE:
1845      // Nothing to do here.
1846      break;
1847    case NAMED_PROPERTY:
1848      if (expr->is_compound()) {
1849        // We need the receiver both on the stack and in the accumulator.
1850        VisitForAccumulatorValue(property->obj());
1851        __ Push(result_register());
1852      } else {
1853        VisitForStackValue(property->obj());
1854      }
1855      break;
1856    case KEYED_PROPERTY: {
1857      if (expr->is_compound()) {
1858        VisitForStackValue(property->obj());
1859        VisitForAccumulatorValue(property->key());
1860        __ movp(rdx, Operand(rsp, 0));
1861        __ Push(rax);
1862      } else {
1863        VisitForStackValue(property->obj());
1864        VisitForStackValue(property->key());
1865      }
1866      break;
1867    }
1868  }
1869
1870  // For compound assignments we need another deoptimization point after the
1871  // variable/property load.
1872  if (expr->is_compound()) {
1873    { AccumulatorValueContext context(this);
1874      switch (assign_type) {
1875        case VARIABLE:
1876          EmitVariableLoad(expr->target()->AsVariableProxy());
1877          PrepareForBailout(expr->target(), TOS_REG);
1878          break;
1879        case NAMED_PROPERTY:
1880          EmitNamedPropertyLoad(property);
1881          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1882          break;
1883        case KEYED_PROPERTY:
1884          EmitKeyedPropertyLoad(property);
1885          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1886          break;
1887      }
1888    }
1889
1890    Token::Value op = expr->binary_op();
1891    __ Push(rax);  // Left operand goes on the stack.
1892    VisitForAccumulatorValue(expr->value());
1893
1894    OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1895        ? OVERWRITE_RIGHT
1896        : NO_OVERWRITE;
1897    SetSourcePosition(expr->position() + 1);
1898    AccumulatorValueContext context(this);
1899    if (ShouldInlineSmiCase(op)) {
1900      EmitInlineSmiBinaryOp(expr->binary_operation(),
1901                            op,
1902                            mode,
1903                            expr->target(),
1904                            expr->value());
1905    } else {
1906      EmitBinaryOp(expr->binary_operation(), op, mode);
1907    }
1908    // Deoptimization point in case the binary operation may have side effects.
1909    PrepareForBailout(expr->binary_operation(), TOS_REG);
1910  } else {
1911    VisitForAccumulatorValue(expr->value());
1912  }
1913
1914  // Record source position before possible IC call.
1915  SetSourcePosition(expr->position());
1916
1917  // Store the value.
1918  switch (assign_type) {
1919    case VARIABLE:
1920      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1921                             expr->op());
1922      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1923      context()->Plug(rax);
1924      break;
1925    case NAMED_PROPERTY:
1926      EmitNamedPropertyAssignment(expr);
1927      break;
1928    case KEYED_PROPERTY:
1929      EmitKeyedPropertyAssignment(expr);
1930      break;
1931  }
1932}
1933
1934
1935void FullCodeGenerator::VisitYield(Yield* expr) {
1936  Comment cmnt(masm_, "[ Yield");
1937  // Evaluate yielded value first; the initial iterator definition depends on
1938  // this.  It stays on the stack while we update the iterator.
1939  VisitForStackValue(expr->expression());
1940
1941  switch (expr->yield_kind()) {
1942    case Yield::SUSPEND:
1943      // Pop value from top-of-stack slot; box result into result register.
1944      EmitCreateIteratorResult(false);
1945      __ Push(result_register());
1946      // Fall through.
1947    case Yield::INITIAL: {
1948      Label suspend, continuation, post_runtime, resume;
1949
1950      __ jmp(&suspend);
1951
1952      __ bind(&continuation);
1953      __ jmp(&resume);
1954
1955      __ bind(&suspend);
1956      VisitForAccumulatorValue(expr->generator_object());
1957      ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1958      __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
1959              Smi::FromInt(continuation.pos()));
1960      __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
1961      __ movp(rcx, rsi);
1962      __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
1963                          kDontSaveFPRegs);
1964      __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
1965      __ cmpp(rsp, rbx);
1966      __ j(equal, &post_runtime);
1967      __ Push(rax);  // generator object
1968      __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
1969      __ movp(context_register(),
1970              Operand(rbp, StandardFrameConstants::kContextOffset));
1971      __ bind(&post_runtime);
1972
1973      __ Pop(result_register());
1974      EmitReturnSequence();
1975
1976      __ bind(&resume);
1977      context()->Plug(result_register());
1978      break;
1979    }
1980
1981    case Yield::FINAL: {
1982      VisitForAccumulatorValue(expr->generator_object());
1983      __ Move(FieldOperand(result_register(),
1984                           JSGeneratorObject::kContinuationOffset),
1985              Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
1986      // Pop value from top-of-stack slot, box result into result register.
1987      EmitCreateIteratorResult(true);
1988      EmitUnwindBeforeReturn();
1989      EmitReturnSequence();
1990      break;
1991    }
1992
1993    case Yield::DELEGATING: {
1994      VisitForStackValue(expr->generator_object());
1995
1996      // Initial stack layout is as follows:
1997      // [sp + 1 * kPointerSize] iter
1998      // [sp + 0 * kPointerSize] g
1999
2000      Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2001      Label l_next, l_call, l_loop;
2002      // Initial send value is undefined.
2003      __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2004      __ jmp(&l_next);
2005
2006      // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2007      __ bind(&l_catch);
2008      handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2009      __ LoadRoot(rcx, Heap::kthrow_stringRootIndex);    // "throw"
2010      __ Push(rcx);
2011      __ Push(Operand(rsp, 2 * kPointerSize));           // iter
2012      __ Push(rax);                                      // exception
2013      __ jmp(&l_call);
2014
2015      // try { received = %yield result }
2016      // Shuffle the received result above a try handler and yield it without
2017      // re-boxing.
2018      __ bind(&l_try);
2019      __ Pop(rax);                                       // result
2020      __ PushTryHandler(StackHandler::CATCH, expr->index());
2021      const int handler_size = StackHandlerConstants::kSize;
2022      __ Push(rax);                                      // result
2023      __ jmp(&l_suspend);
2024      __ bind(&l_continuation);
2025      __ jmp(&l_resume);
2026      __ bind(&l_suspend);
2027      const int generator_object_depth = kPointerSize + handler_size;
2028      __ movp(rax, Operand(rsp, generator_object_depth));
2029      __ Push(rax);                                      // g
2030      ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2031      __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2032              Smi::FromInt(l_continuation.pos()));
2033      __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2034      __ movp(rcx, rsi);
2035      __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2036                          kDontSaveFPRegs);
2037      __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2038      __ movp(context_register(),
2039              Operand(rbp, StandardFrameConstants::kContextOffset));
2040      __ Pop(rax);                                       // result
2041      EmitReturnSequence();
2042      __ bind(&l_resume);                                // received in rax
2043      __ PopTryHandler();
2044
2045      // receiver = iter; f = 'next'; arg = received;
2046      __ bind(&l_next);
2047      __ LoadRoot(rcx, Heap::knext_stringRootIndex);     // "next"
2048      __ Push(rcx);
2049      __ Push(Operand(rsp, 2 * kPointerSize));           // iter
2050      __ Push(rax);                                      // received
2051
2052      // result = receiver[f](arg);
2053      __ bind(&l_call);
2054      __ movp(rdx, Operand(rsp, kPointerSize));
2055      __ movp(rax, Operand(rsp, 2 * kPointerSize));
2056      Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2057      CallIC(ic, TypeFeedbackId::None());
2058      __ movp(rdi, rax);
2059      __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2060      CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2061      __ CallStub(&stub);
2062
2063      __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2064      __ Drop(1);  // The function is still on the stack; drop it.
2065
2066      // if (!result.done) goto l_try;
2067      __ bind(&l_loop);
2068      __ Push(rax);                                      // save result
2069      __ LoadRoot(rcx, Heap::kdone_stringRootIndex);     // "done"
2070      CallLoadIC(NOT_CONTEXTUAL);                        // result.done in rax
2071      Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2072      CallIC(bool_ic);
2073      __ testp(result_register(), result_register());
2074      __ j(zero, &l_try);
2075
2076      // result.value
2077      __ Pop(rax);                                       // result
2078      __ LoadRoot(rcx, Heap::kvalue_stringRootIndex);    // "value"
2079      CallLoadIC(NOT_CONTEXTUAL);                        // result.value in rax
2080      context()->DropAndPlug(2, rax);                    // drop iter and g
2081      break;
2082    }
2083  }
2084}
2085
2086
2087void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2088    Expression *value,
2089    JSGeneratorObject::ResumeMode resume_mode) {
2090  // The value stays in rax, and is ultimately read by the resumed generator, as
2091  // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2092  // is read to throw the value when the resumed generator is already closed.
2093  // rbx will hold the generator object until the activation has been resumed.
2094  VisitForStackValue(generator);
2095  VisitForAccumulatorValue(value);
2096  __ Pop(rbx);
2097
2098  // Check generator state.
2099  Label wrong_state, closed_state, done;
2100  STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2101  STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2102  __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2103                Smi::FromInt(0));
2104  __ j(equal, &closed_state);
2105  __ j(less, &wrong_state);
2106
2107  // Load suspended function and context.
2108  __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2109  __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2110
2111  // Push receiver.
2112  __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2113
2114  // Push holes for arguments to generator function.
2115  __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2116  __ LoadSharedFunctionInfoSpecialField(rdx, rdx,
2117      SharedFunctionInfo::kFormalParameterCountOffset);
2118  __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2119  Label push_argument_holes, push_frame;
2120  __ bind(&push_argument_holes);
2121  __ subp(rdx, Immediate(1));
2122  __ j(carry, &push_frame);
2123  __ Push(rcx);
2124  __ jmp(&push_argument_holes);
2125
2126  // Enter a new JavaScript frame, and initialize its slots as they were when
2127  // the generator was suspended.
2128  Label resume_frame;
2129  __ bind(&push_frame);
2130  __ call(&resume_frame);
2131  __ jmp(&done);
2132  __ bind(&resume_frame);
2133  __ pushq(rbp);  // Caller's frame pointer.
2134  __ movp(rbp, rsp);
2135  __ Push(rsi);  // Callee's context.
2136  __ Push(rdi);  // Callee's JS Function.
2137
2138  // Load the operand stack size.
2139  __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2140  __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2141  __ SmiToInteger32(rdx, rdx);
2142
2143  // If we are sending a value and there is no operand stack, we can jump back
2144  // in directly.
2145  if (resume_mode == JSGeneratorObject::NEXT) {
2146    Label slow_resume;
2147    __ cmpp(rdx, Immediate(0));
2148    __ j(not_zero, &slow_resume);
2149    __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2150    __ SmiToInteger64(rcx,
2151        FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2152    __ addp(rdx, rcx);
2153    __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2154            Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2155    __ jmp(rdx);
2156    __ bind(&slow_resume);
2157  }
2158
2159  // Otherwise, we push holes for the operand stack and call the runtime to fix
2160  // up the stack and the handlers.
2161  Label push_operand_holes, call_resume;
2162  __ bind(&push_operand_holes);
2163  __ subp(rdx, Immediate(1));
2164  __ j(carry, &call_resume);
2165  __ Push(rcx);
2166  __ jmp(&push_operand_holes);
2167  __ bind(&call_resume);
2168  __ Push(rbx);
2169  __ Push(result_register());
2170  __ Push(Smi::FromInt(resume_mode));
2171  __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2172  // Not reached: the runtime call returns elsewhere.
2173  __ Abort(kGeneratorFailedToResume);
2174
2175  // Reach here when generator is closed.
2176  __ bind(&closed_state);
2177  if (resume_mode == JSGeneratorObject::NEXT) {
2178    // Return completed iterator result when generator is closed.
2179    __ PushRoot(Heap::kUndefinedValueRootIndex);
2180    // Pop value from top-of-stack slot; box result into result register.
2181    EmitCreateIteratorResult(true);
2182  } else {
2183    // Throw the provided value.
2184    __ Push(rax);
2185    __ CallRuntime(Runtime::kHiddenThrow, 1);
2186  }
2187  __ jmp(&done);
2188
2189  // Throw error if we attempt to operate on a running generator.
2190  __ bind(&wrong_state);
2191  __ Push(rbx);
2192  __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2193
2194  __ bind(&done);
2195  context()->Plug(result_register());
2196}
2197
2198
2199void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2200  Label gc_required;
2201  Label allocated;
2202
2203  Handle<Map> map(isolate()->native_context()->iterator_result_map());
2204
2205  __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
2206  __ jmp(&allocated);
2207
2208  __ bind(&gc_required);
2209  __ Push(Smi::FromInt(map->instance_size()));
2210  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2211  __ movp(context_register(),
2212          Operand(rbp, StandardFrameConstants::kContextOffset));
2213
2214  __ bind(&allocated);
2215  __ Move(rbx, map);
2216  __ Pop(rcx);
2217  __ Move(rdx, isolate()->factory()->ToBoolean(done));
2218  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2219  __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2220  __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2221          isolate()->factory()->empty_fixed_array());
2222  __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2223          isolate()->factory()->empty_fixed_array());
2224  __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2225          rcx);
2226  __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
2227          rdx);
2228
2229  // Only the value field needs a write barrier, as the other values are in the
2230  // root set.
2231  __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
2232                      rcx, rdx, kDontSaveFPRegs);
2233}
2234
2235
2236void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2237  SetSourcePosition(prop->position());
2238  Literal* key = prop->key()->AsLiteral();
2239  __ Move(rcx, key->value());
2240  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2241}
2242
2243
2244void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2245  SetSourcePosition(prop->position());
2246  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2247  CallIC(ic, prop->PropertyFeedbackId());
2248}
2249
2250
2251void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2252                                              Token::Value op,
2253                                              OverwriteMode mode,
2254                                              Expression* left,
2255                                              Expression* right) {
2256  // Do combined smi check of the operands. Left operand is on the
2257  // stack (popped into rdx). Right operand is in rax but moved into
2258  // rcx to make the shifts easier.
2259  Label done, stub_call, smi_case;
2260  __ Pop(rdx);
2261  __ movp(rcx, rax);
2262  __ orp(rax, rdx);
2263  JumpPatchSite patch_site(masm_);
2264  patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2265
2266  __ bind(&stub_call);
2267  __ movp(rax, rcx);
2268  BinaryOpICStub stub(isolate(), op, mode);
2269  CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2270  patch_site.EmitPatchInfo();
2271  __ jmp(&done, Label::kNear);
2272
2273  __ bind(&smi_case);
2274  switch (op) {
2275    case Token::SAR:
2276      __ SmiShiftArithmeticRight(rax, rdx, rcx);
2277      break;
2278    case Token::SHL:
2279      __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
2280      break;
2281    case Token::SHR:
2282      __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2283      break;
2284    case Token::ADD:
2285      __ SmiAdd(rax, rdx, rcx, &stub_call);
2286      break;
2287    case Token::SUB:
2288      __ SmiSub(rax, rdx, rcx, &stub_call);
2289      break;
2290    case Token::MUL:
2291      __ SmiMul(rax, rdx, rcx, &stub_call);
2292      break;
2293    case Token::BIT_OR:
2294      __ SmiOr(rax, rdx, rcx);
2295      break;
2296    case Token::BIT_AND:
2297      __ SmiAnd(rax, rdx, rcx);
2298      break;
2299    case Token::BIT_XOR:
2300      __ SmiXor(rax, rdx, rcx);
2301      break;
2302    default:
2303      UNREACHABLE();
2304      break;
2305  }
2306
2307  __ bind(&done);
2308  context()->Plug(rax);
2309}
2310
2311
2312void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2313                                     Token::Value op,
2314                                     OverwriteMode mode) {
2315  __ Pop(rdx);
2316  BinaryOpICStub stub(isolate(), op, mode);
2317  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2318  CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2319  patch_site.EmitPatchInfo();
2320  context()->Plug(rax);
2321}
2322
2323
2324void FullCodeGenerator::EmitAssignment(Expression* expr) {
2325  ASSERT(expr->IsValidReferenceExpression());
2326
2327  // Left-hand side can only be a property, a global or a (parameter or local)
2328  // slot.
2329  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2330  LhsKind assign_type = VARIABLE;
2331  Property* prop = expr->AsProperty();
2332  if (prop != NULL) {
2333    assign_type = (prop->key()->IsPropertyName())
2334        ? NAMED_PROPERTY
2335        : KEYED_PROPERTY;
2336  }
2337
2338  switch (assign_type) {
2339    case VARIABLE: {
2340      Variable* var = expr->AsVariableProxy()->var();
2341      EffectContext context(this);
2342      EmitVariableAssignment(var, Token::ASSIGN);
2343      break;
2344    }
2345    case NAMED_PROPERTY: {
2346      __ Push(rax);  // Preserve value.
2347      VisitForAccumulatorValue(prop->obj());
2348      __ movp(rdx, rax);
2349      __ Pop(rax);  // Restore value.
2350      __ Move(rcx, prop->key()->AsLiteral()->value());
2351      CallStoreIC();
2352      break;
2353    }
2354    case KEYED_PROPERTY: {
2355      __ Push(rax);  // Preserve value.
2356      VisitForStackValue(prop->obj());
2357      VisitForAccumulatorValue(prop->key());
2358      __ movp(rcx, rax);
2359      __ Pop(rdx);
2360      __ Pop(rax);  // Restore value.
2361      Handle<Code> ic = strict_mode() == SLOPPY
2362          ? isolate()->builtins()->KeyedStoreIC_Initialize()
2363          : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2364      CallIC(ic);
2365      break;
2366    }
2367  }
2368  context()->Plug(rax);
2369}
2370
2371
2372void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2373    Variable* var, MemOperand location) {
2374  __ movp(location, rax);
2375  if (var->IsContextSlot()) {
2376    __ movp(rdx, rax);
2377    __ RecordWriteContextSlot(
2378        rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2379  }
2380}
2381
2382
2383void FullCodeGenerator::EmitCallStoreContextSlot(
2384    Handle<String> name, StrictMode strict_mode) {
2385  __ Push(rax);  // Value.
2386  __ Push(rsi);  // Context.
2387  __ Push(name);
2388  __ Push(Smi::FromInt(strict_mode));
2389  __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2390}
2391
2392
2393void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2394                                               Token::Value op) {
2395  if (var->IsUnallocated()) {
2396    // Global var, const, or let.
2397    __ Move(rcx, var->name());
2398    __ movp(rdx, GlobalObjectOperand());
2399    CallStoreIC();
2400
2401  } else if (op == Token::INIT_CONST_LEGACY) {
2402    // Const initializers need a write barrier.
2403    ASSERT(!var->IsParameter());  // No const parameters.
2404    if (var->IsLookupSlot()) {
2405      __ Push(rax);
2406      __ Push(rsi);
2407      __ Push(var->name());
2408      __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2409    } else {
2410      ASSERT(var->IsStackLocal() || var->IsContextSlot());
2411      Label skip;
2412      MemOperand location = VarOperand(var, rcx);
2413      __ movp(rdx, location);
2414      __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2415      __ j(not_equal, &skip);
2416      EmitStoreToStackLocalOrContextSlot(var, location);
2417      __ bind(&skip);
2418    }
2419
2420  } else if (var->mode() == LET && op != Token::INIT_LET) {
2421    // Non-initializing assignment to let variable needs a write barrier.
2422    if (var->IsLookupSlot()) {
2423      EmitCallStoreContextSlot(var->name(), strict_mode());
2424    } else {
2425      ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2426      Label assign;
2427      MemOperand location = VarOperand(var, rcx);
2428      __ movp(rdx, location);
2429      __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2430      __ j(not_equal, &assign, Label::kNear);
2431      __ Push(var->name());
2432      __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2433      __ bind(&assign);
2434      EmitStoreToStackLocalOrContextSlot(var, location);
2435    }
2436
2437  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2438    // Assignment to var or initializing assignment to let/const
2439    // in harmony mode.
2440    if (var->IsLookupSlot()) {
2441      EmitCallStoreContextSlot(var->name(), strict_mode());
2442    } else {
2443      ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2444      MemOperand location = VarOperand(var, rcx);
2445      if (generate_debug_code_ && op == Token::INIT_LET) {
2446        // Check for an uninitialized let binding.
2447        __ movp(rdx, location);
2448        __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2449        __ Check(equal, kLetBindingReInitialization);
2450      }
2451      EmitStoreToStackLocalOrContextSlot(var, location);
2452    }
2453  }
2454  // Non-initializing assignments to consts are ignored.
2455}
2456
2457
2458void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2459  // Assignment to a property, using a named store IC.
2460  Property* prop = expr->target()->AsProperty();
2461  ASSERT(prop != NULL);
2462  ASSERT(prop->key()->IsLiteral());
2463
2464  // Record source code position before IC call.
2465  SetSourcePosition(expr->position());
2466  __ Move(rcx, prop->key()->AsLiteral()->value());
2467  __ Pop(rdx);
2468  CallStoreIC(expr->AssignmentFeedbackId());
2469
2470  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2471  context()->Plug(rax);
2472}
2473
2474
2475void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2476  // Assignment to a property, using a keyed store IC.
2477
2478  __ Pop(rcx);
2479  __ Pop(rdx);
2480  // Record source code position before IC call.
2481  SetSourcePosition(expr->position());
2482  Handle<Code> ic = strict_mode() == SLOPPY
2483      ? isolate()->builtins()->KeyedStoreIC_Initialize()
2484      : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2485  CallIC(ic, expr->AssignmentFeedbackId());
2486
2487  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2488  context()->Plug(rax);
2489}
2490
2491
2492void FullCodeGenerator::VisitProperty(Property* expr) {
2493  Comment cmnt(masm_, "[ Property");
2494  Expression* key = expr->key();
2495
2496  if (key->IsPropertyName()) {
2497    VisitForAccumulatorValue(expr->obj());
2498    EmitNamedPropertyLoad(expr);
2499    PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2500    context()->Plug(rax);
2501  } else {
2502    VisitForStackValue(expr->obj());
2503    VisitForAccumulatorValue(expr->key());
2504    __ Pop(rdx);
2505    EmitKeyedPropertyLoad(expr);
2506    context()->Plug(rax);
2507  }
2508}
2509
2510
2511void FullCodeGenerator::CallIC(Handle<Code> code,
2512                               TypeFeedbackId ast_id) {
2513  ic_total_count_++;
2514  __ call(code, RelocInfo::CODE_TARGET, ast_id);
2515}
2516
2517
2518// Code common for calls using the IC.
2519void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2520  Expression* callee = expr->expression();
2521
2522  CallIC::CallType call_type = callee->IsVariableProxy()
2523      ? CallIC::FUNCTION
2524      : CallIC::METHOD;
2525  // Get the target function.
2526  if (call_type == CallIC::FUNCTION) {
2527    { StackValueContext context(this);
2528      EmitVariableLoad(callee->AsVariableProxy());
2529      PrepareForBailout(callee, NO_REGISTERS);
2530    }
2531    // Push undefined as receiver. This is patched in the method prologue if it
2532    // is a sloppy mode method.
2533    __ Push(isolate()->factory()->undefined_value());
2534  } else {
2535    // Load the function from the receiver.
2536    ASSERT(callee->IsProperty());
2537    __ movp(rax, Operand(rsp, 0));
2538    EmitNamedPropertyLoad(callee->AsProperty());
2539    PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2540    // Push the target function under the receiver.
2541    __ Push(Operand(rsp, 0));
2542    __ movp(Operand(rsp, kPointerSize), rax);
2543  }
2544
2545  EmitCall(expr, call_type);
2546}
2547
2548
2549// Common code for calls using the IC.
2550void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2551                                                Expression* key) {
2552  // Load the key.
2553  VisitForAccumulatorValue(key);
2554
2555  Expression* callee = expr->expression();
2556
2557  // Load the function from the receiver.
2558  ASSERT(callee->IsProperty());
2559  __ movp(rdx, Operand(rsp, 0));
2560  EmitKeyedPropertyLoad(callee->AsProperty());
2561  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2562
2563  // Push the target function under the receiver.
2564  __ Push(Operand(rsp, 0));
2565  __ movp(Operand(rsp, kPointerSize), rax);
2566
2567  EmitCall(expr, CallIC::METHOD);
2568}
2569
2570
2571void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
2572  // Load the arguments.
2573  ZoneList<Expression*>* args = expr->arguments();
2574  int arg_count = args->length();
2575  { PreservePositionScope scope(masm()->positions_recorder());
2576    for (int i = 0; i < arg_count; i++) {
2577      VisitForStackValue(args->at(i));
2578    }
2579  }
2580
2581  // Record source position of the IC call.
2582  SetSourcePosition(expr->position());
2583  Handle<Code> ic = CallIC::initialize_stub(
2584      isolate(), arg_count, call_type);
2585  __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
2586  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2587  // Don't assign a type feedback id to the IC, since type feedback is provided
2588  // by the vector above.
2589  CallIC(ic);
2590
2591  RecordJSReturnSite(expr);
2592
2593  // Restore context register.
2594  __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2595  // Discard the function left on TOS.
2596  context()->DropAndPlug(1, rax);
2597}
2598
2599
2600void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2601  // Push copy of the first argument or undefined if it doesn't exist.
2602  if (arg_count > 0) {
2603    __ Push(Operand(rsp, arg_count * kPointerSize));
2604  } else {
2605    __ PushRoot(Heap::kUndefinedValueRootIndex);
2606  }
2607
2608  // Push the receiver of the enclosing function and do runtime call.
2609  StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2610  __ Push(args.GetReceiverOperand());
2611
2612  // Push the language mode.
2613  __ Push(Smi::FromInt(strict_mode()));
2614
2615  // Push the start position of the scope the calls resides in.
2616  __ Push(Smi::FromInt(scope()->start_position()));
2617
2618  // Do the runtime call.
2619  __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2620}
2621
2622
2623void FullCodeGenerator::VisitCall(Call* expr) {
2624#ifdef DEBUG
2625  // We want to verify that RecordJSReturnSite gets called on all paths
2626  // through this function.  Avoid early returns.
2627  expr->return_is_recorded_ = false;
2628#endif
2629
2630  Comment cmnt(masm_, "[ Call");
2631  Expression* callee = expr->expression();
2632  Call::CallType call_type = expr->GetCallType(isolate());
2633
2634  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2635    // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2636    // to resolve the function we need to call and the receiver of the call.
2637    // Then we call the resolved function using the given arguments.
2638    ZoneList<Expression*>* args = expr->arguments();
2639    int arg_count = args->length();
2640    { PreservePositionScope pos_scope(masm()->positions_recorder());
2641      VisitForStackValue(callee);
2642      __ PushRoot(Heap::kUndefinedValueRootIndex);  // Reserved receiver slot.
2643
2644      // Push the arguments.
2645      for (int i = 0; i < arg_count; i++) {
2646        VisitForStackValue(args->at(i));
2647      }
2648
2649      // Push a copy of the function (found below the arguments) and resolve
2650      // eval.
2651      __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
2652      EmitResolvePossiblyDirectEval(arg_count);
2653
2654      // The runtime call returns a pair of values in rax (function) and
2655      // rdx (receiver). Touch up the stack with the right values.
2656      __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2657      __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2658    }
2659    // Record source position for debugger.
2660    SetSourcePosition(expr->position());
2661    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2662    __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2663    __ CallStub(&stub);
2664    RecordJSReturnSite(expr);
2665    // Restore context register.
2666    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2667    context()->DropAndPlug(1, rax);
2668  } else if (call_type == Call::GLOBAL_CALL) {
2669    EmitCallWithLoadIC(expr);
2670
2671  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2672    // Call to a lookup slot (dynamically introduced variable).
2673    VariableProxy* proxy = callee->AsVariableProxy();
2674    Label slow, done;
2675
2676    { PreservePositionScope scope(masm()->positions_recorder());
2677      // Generate code for loading from variables potentially shadowed by
2678      // eval-introduced variables.
2679      EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2680    }
2681    __ bind(&slow);
2682    // Call the runtime to find the function to call (returned in rax) and
2683    // the object holding it (returned in rdx).
2684    __ Push(context_register());
2685    __ Push(proxy->name());
2686    __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2687    __ Push(rax);  // Function.
2688    __ Push(rdx);  // Receiver.
2689
2690    // If fast case code has been generated, emit code to push the function
2691    // and receiver and have the slow path jump around this code.
2692    if (done.is_linked()) {
2693      Label call;
2694      __ jmp(&call, Label::kNear);
2695      __ bind(&done);
2696      // Push function.
2697      __ Push(rax);
2698      // The receiver is implicitly the global receiver. Indicate this by
2699      // passing the hole to the call function stub.
2700      __ PushRoot(Heap::kUndefinedValueRootIndex);
2701      __ bind(&call);
2702    }
2703
2704    // The receiver is either the global receiver or an object found by
2705    // LoadContextSlot.
2706    EmitCall(expr);
2707  } else if (call_type == Call::PROPERTY_CALL) {
2708    Property* property = callee->AsProperty();
2709    { PreservePositionScope scope(masm()->positions_recorder());
2710      VisitForStackValue(property->obj());
2711    }
2712    if (property->key()->IsPropertyName()) {
2713      EmitCallWithLoadIC(expr);
2714    } else {
2715      EmitKeyedCallWithLoadIC(expr, property->key());
2716    }
2717  } else {
2718    ASSERT(call_type == Call::OTHER_CALL);
2719    // Call to an arbitrary expression not handled specially above.
2720    { PreservePositionScope scope(masm()->positions_recorder());
2721      VisitForStackValue(callee);
2722    }
2723    __ PushRoot(Heap::kUndefinedValueRootIndex);
2724    // Emit function call.
2725    EmitCall(expr);
2726  }
2727
2728#ifdef DEBUG
2729  // RecordJSReturnSite should have been called.
2730  ASSERT(expr->return_is_recorded_);
2731#endif
2732}
2733
2734
2735void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2736  Comment cmnt(masm_, "[ CallNew");
2737  // According to ECMA-262, section 11.2.2, page 44, the function
2738  // expression in new calls must be evaluated before the
2739  // arguments.
2740
2741  // Push constructor on the stack.  If it's not a function it's used as
2742  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2743  // ignored.
2744  VisitForStackValue(expr->expression());
2745
2746  // Push the arguments ("left-to-right") on the stack.
2747  ZoneList<Expression*>* args = expr->arguments();
2748  int arg_count = args->length();
2749  for (int i = 0; i < arg_count; i++) {
2750    VisitForStackValue(args->at(i));
2751  }
2752
2753  // Call the construct call builtin that handles allocation and
2754  // constructor invocation.
2755  SetSourcePosition(expr->position());
2756
2757  // Load function and argument count into rdi and rax.
2758  __ Set(rax, arg_count);
2759  __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2760
2761  // Record call targets in unoptimized code, but not in the snapshot.
2762  if (FLAG_pretenuring_call_new) {
2763    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2764    ASSERT(expr->AllocationSiteFeedbackSlot() ==
2765           expr->CallNewFeedbackSlot() + 1);
2766  }
2767
2768  __ Move(rbx, FeedbackVector());
2769  __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
2770
2771  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2772  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2773  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2774  context()->Plug(rax);
2775}
2776
2777
2778void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2779  ZoneList<Expression*>* args = expr->arguments();
2780  ASSERT(args->length() == 1);
2781
2782  VisitForAccumulatorValue(args->at(0));
2783
2784  Label materialize_true, materialize_false;
2785  Label* if_true = NULL;
2786  Label* if_false = NULL;
2787  Label* fall_through = NULL;
2788  context()->PrepareTest(&materialize_true, &materialize_false,
2789                         &if_true, &if_false, &fall_through);
2790
2791  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2792  __ JumpIfSmi(rax, if_true);
2793  __ jmp(if_false);
2794
2795  context()->Plug(if_true, if_false);
2796}
2797
2798
2799void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2800  ZoneList<Expression*>* args = expr->arguments();
2801  ASSERT(args->length() == 1);
2802
2803  VisitForAccumulatorValue(args->at(0));
2804
2805  Label materialize_true, materialize_false;
2806  Label* if_true = NULL;
2807  Label* if_false = NULL;
2808  Label* fall_through = NULL;
2809  context()->PrepareTest(&materialize_true, &materialize_false,
2810                         &if_true, &if_false, &fall_through);
2811
2812  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2813  Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2814  Split(non_negative_smi, if_true, if_false, fall_through);
2815
2816  context()->Plug(if_true, if_false);
2817}
2818
2819
2820void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2821  ZoneList<Expression*>* args = expr->arguments();
2822  ASSERT(args->length() == 1);
2823
2824  VisitForAccumulatorValue(args->at(0));
2825
2826  Label materialize_true, materialize_false;
2827  Label* if_true = NULL;
2828  Label* if_false = NULL;
2829  Label* fall_through = NULL;
2830  context()->PrepareTest(&materialize_true, &materialize_false,
2831                         &if_true, &if_false, &fall_through);
2832
2833  __ JumpIfSmi(rax, if_false);
2834  __ CompareRoot(rax, Heap::kNullValueRootIndex);
2835  __ j(equal, if_true);
2836  __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2837  // Undetectable objects behave like undefined when tested with typeof.
2838  __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2839           Immediate(1 << Map::kIsUndetectable));
2840  __ j(not_zero, if_false);
2841  __ movzxbp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2842  __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2843  __ j(below, if_false);
2844  __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2845  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2846  Split(below_equal, if_true, if_false, fall_through);
2847
2848  context()->Plug(if_true, if_false);
2849}
2850
2851
2852void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2853  ZoneList<Expression*>* args = expr->arguments();
2854  ASSERT(args->length() == 1);
2855
2856  VisitForAccumulatorValue(args->at(0));
2857
2858  Label materialize_true, materialize_false;
2859  Label* if_true = NULL;
2860  Label* if_false = NULL;
2861  Label* fall_through = NULL;
2862  context()->PrepareTest(&materialize_true, &materialize_false,
2863                         &if_true, &if_false, &fall_through);
2864
2865  __ JumpIfSmi(rax, if_false);
2866  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2867  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2868  Split(above_equal, if_true, if_false, fall_through);
2869
2870  context()->Plug(if_true, if_false);
2871}
2872
2873
2874void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2875  ZoneList<Expression*>* args = expr->arguments();
2876  ASSERT(args->length() == 1);
2877
2878  VisitForAccumulatorValue(args->at(0));
2879
2880  Label materialize_true, materialize_false;
2881  Label* if_true = NULL;
2882  Label* if_false = NULL;
2883  Label* fall_through = NULL;
2884  context()->PrepareTest(&materialize_true, &materialize_false,
2885                         &if_true, &if_false, &fall_through);
2886
2887  __ JumpIfSmi(rax, if_false);
2888  __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2889  __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2890           Immediate(1 << Map::kIsUndetectable));
2891  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2892  Split(not_zero, if_true, if_false, fall_through);
2893
2894  context()->Plug(if_true, if_false);
2895}
2896
2897
2898void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2899    CallRuntime* expr) {
2900  ZoneList<Expression*>* args = expr->arguments();
2901  ASSERT(args->length() == 1);
2902
2903  VisitForAccumulatorValue(args->at(0));
2904
2905  Label materialize_true, materialize_false, skip_lookup;
2906  Label* if_true = NULL;
2907  Label* if_false = NULL;
2908  Label* fall_through = NULL;
2909  context()->PrepareTest(&materialize_true, &materialize_false,
2910                         &if_true, &if_false, &fall_through);
2911
2912  __ AssertNotSmi(rax);
2913
2914  // Check whether this map has already been checked to be safe for default
2915  // valueOf.
2916  __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2917  __ testb(FieldOperand(rbx, Map::kBitField2Offset),
2918           Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2919  __ j(not_zero, &skip_lookup);
2920
2921  // Check for fast case object. Generate false result for slow case object.
2922  __ movp(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
2923  __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2924  __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2925  __ j(equal, if_false);
2926
2927  // Look for valueOf string in the descriptor array, and indicate false if
2928  // found. Since we omit an enumeration index check, if it is added via a
2929  // transition that shares its descriptor array, this is a false positive.
2930  Label entry, loop, done;
2931
2932  // Skip loop if no descriptors are valid.
2933  __ NumberOfOwnDescriptors(rcx, rbx);
2934  __ cmpp(rcx, Immediate(0));
2935  __ j(equal, &done);
2936
2937  __ LoadInstanceDescriptors(rbx, r8);
2938  // rbx: descriptor array.
2939  // rcx: valid entries in the descriptor array.
2940  // Calculate the end of the descriptor array.
2941  __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
2942  __ leap(rcx, Operand(r8, rcx, times_8, DescriptorArray::kFirstOffset));
2943  // Calculate location of the first key name.
2944  __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
2945  // Loop through all the keys in the descriptor array. If one of these is the
2946  // internalized string "valueOf" the result is false.
2947  __ jmp(&entry);
2948  __ bind(&loop);
2949  __ movp(rdx, FieldOperand(r8, 0));
2950  __ Cmp(rdx, isolate()->factory()->value_of_string());
2951  __ j(equal, if_false);
2952  __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
2953  __ bind(&entry);
2954  __ cmpp(r8, rcx);
2955  __ j(not_equal, &loop);
2956
2957  __ bind(&done);
2958
2959  // Set the bit in the map to indicate that there is no local valueOf field.
2960  __ orp(FieldOperand(rbx, Map::kBitField2Offset),
2961         Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2962
2963  __ bind(&skip_lookup);
2964
2965  // If a valueOf property is not found on the object check that its
2966  // prototype is the un-modified String prototype. If not result is false.
2967  __ movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
2968  __ testp(rcx, Immediate(kSmiTagMask));
2969  __ j(zero, if_false);
2970  __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2971  __ movp(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2972  __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
2973  __ cmpp(rcx,
2974          ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2975  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2976  Split(equal, if_true, if_false, fall_through);
2977
2978  context()->Plug(if_true, if_false);
2979}
2980
2981
2982void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2983  ZoneList<Expression*>* args = expr->arguments();
2984  ASSERT(args->length() == 1);
2985
2986  VisitForAccumulatorValue(args->at(0));
2987
2988  Label materialize_true, materialize_false;
2989  Label* if_true = NULL;
2990  Label* if_false = NULL;
2991  Label* fall_through = NULL;
2992  context()->PrepareTest(&materialize_true, &materialize_false,
2993                         &if_true, &if_false, &fall_through);
2994
2995  __ JumpIfSmi(rax, if_false);
2996  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2997  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2998  Split(equal, if_true, if_false, fall_through);
2999
3000  context()->Plug(if_true, if_false);
3001}
3002
3003
3004void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3005  ZoneList<Expression*>* args = expr->arguments();
3006  ASSERT(args->length() == 1);
3007
3008  VisitForAccumulatorValue(args->at(0));
3009
3010  Label materialize_true, materialize_false;
3011  Label* if_true = NULL;
3012  Label* if_false = NULL;
3013  Label* fall_through = NULL;
3014  context()->PrepareTest(&materialize_true, &materialize_false,
3015                         &if_true, &if_false, &fall_through);
3016
3017  Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3018  __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3019  __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3020          Immediate(0x1));
3021  __ j(no_overflow, if_false);
3022  __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3023          Immediate(0x00000000));
3024  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3025  Split(equal, if_true, if_false, fall_through);
3026
3027  context()->Plug(if_true, if_false);
3028}
3029
3030
3031void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3032  ZoneList<Expression*>* args = expr->arguments();
3033  ASSERT(args->length() == 1);
3034
3035  VisitForAccumulatorValue(args->at(0));
3036
3037  Label materialize_true, materialize_false;
3038  Label* if_true = NULL;
3039  Label* if_false = NULL;
3040  Label* fall_through = NULL;
3041  context()->PrepareTest(&materialize_true, &materialize_false,
3042                         &if_true, &if_false, &fall_through);
3043
3044  __ JumpIfSmi(rax, if_false);
3045  __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3046  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3047  Split(equal, if_true, if_false, fall_through);
3048
3049  context()->Plug(if_true, if_false);
3050}
3051
3052
3053void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3054  ZoneList<Expression*>* args = expr->arguments();
3055  ASSERT(args->length() == 1);
3056
3057  VisitForAccumulatorValue(args->at(0));
3058
3059  Label materialize_true, materialize_false;
3060  Label* if_true = NULL;
3061  Label* if_false = NULL;
3062  Label* fall_through = NULL;
3063  context()->PrepareTest(&materialize_true, &materialize_false,
3064                         &if_true, &if_false, &fall_through);
3065
3066  __ JumpIfSmi(rax, if_false);
3067  __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3068  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3069  Split(equal, if_true, if_false, fall_through);
3070
3071  context()->Plug(if_true, if_false);
3072}
3073
3074
3075
3076void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3077  ASSERT(expr->arguments()->length() == 0);
3078
3079  Label materialize_true, materialize_false;
3080  Label* if_true = NULL;
3081  Label* if_false = NULL;
3082  Label* fall_through = NULL;
3083  context()->PrepareTest(&materialize_true, &materialize_false,
3084                         &if_true, &if_false, &fall_through);
3085
3086  // Get the frame pointer for the calling frame.
3087  __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3088
3089  // Skip the arguments adaptor frame if it exists.
3090  Label check_frame_marker;
3091  __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
3092         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3093  __ j(not_equal, &check_frame_marker);
3094  __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3095
3096  // Check the marker in the calling frame.
3097  __ bind(&check_frame_marker);
3098  __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
3099         Smi::FromInt(StackFrame::CONSTRUCT));
3100  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3101  Split(equal, if_true, if_false, fall_through);
3102
3103  context()->Plug(if_true, if_false);
3104}
3105
3106
3107void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3108  ZoneList<Expression*>* args = expr->arguments();
3109  ASSERT(args->length() == 2);
3110
3111  // Load the two objects into registers and perform the comparison.
3112  VisitForStackValue(args->at(0));
3113  VisitForAccumulatorValue(args->at(1));
3114
3115  Label materialize_true, materialize_false;
3116  Label* if_true = NULL;
3117  Label* if_false = NULL;
3118  Label* fall_through = NULL;
3119  context()->PrepareTest(&materialize_true, &materialize_false,
3120                         &if_true, &if_false, &fall_through);
3121
3122  __ Pop(rbx);
3123  __ cmpp(rax, rbx);
3124  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3125  Split(equal, if_true, if_false, fall_through);
3126
3127  context()->Plug(if_true, if_false);
3128}
3129
3130
3131void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3132  ZoneList<Expression*>* args = expr->arguments();
3133  ASSERT(args->length() == 1);
3134
3135  // ArgumentsAccessStub expects the key in rdx and the formal
3136  // parameter count in rax.
3137  VisitForAccumulatorValue(args->at(0));
3138  __ movp(rdx, rax);
3139  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3140  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3141  __ CallStub(&stub);
3142  context()->Plug(rax);
3143}
3144
3145
3146void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3147  ASSERT(expr->arguments()->length() == 0);
3148
3149  Label exit;
3150  // Get the number of formal parameters.
3151  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3152
3153  // Check if the calling frame is an arguments adaptor frame.
3154  __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3155  __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
3156         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3157  __ j(not_equal, &exit, Label::kNear);
3158
3159  // Arguments adaptor case: Read the arguments length from the
3160  // adaptor frame.
3161  __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3162
3163  __ bind(&exit);
3164  __ AssertSmi(rax);
3165  context()->Plug(rax);
3166}
3167
3168
3169void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3170  ZoneList<Expression*>* args = expr->arguments();
3171  ASSERT(args->length() == 1);
3172  Label done, null, function, non_function_constructor;
3173
3174  VisitForAccumulatorValue(args->at(0));
3175
3176  // If the object is a smi, we return null.
3177  __ JumpIfSmi(rax, &null);
3178
3179  // Check that the object is a JS object but take special care of JS
3180  // functions to make sure they have 'Function' as their class.
3181  // Assume that there are only two callable types, and one of them is at
3182  // either end of the type range for JS object types. Saves extra comparisons.
3183  STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3184  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3185  // Map is now in rax.
3186  __ j(below, &null);
3187  STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3188                FIRST_SPEC_OBJECT_TYPE + 1);
3189  __ j(equal, &function);
3190
3191  __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3192  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3193                LAST_SPEC_OBJECT_TYPE - 1);
3194  __ j(equal, &function);
3195  // Assume that there is no larger type.
3196  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3197
3198  // Check if the constructor in the map is a JS function.
3199  __ movp(rax, FieldOperand(rax, Map::kConstructorOffset));
3200  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3201  __ j(not_equal, &non_function_constructor);
3202
3203  // rax now contains the constructor function. Grab the
3204  // instance class name from there.
3205  __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3206  __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
3207  __ jmp(&done);
3208
3209  // Functions have class 'Function'.
3210  __ bind(&function);
3211  __ Move(rax, isolate()->factory()->function_class_string());
3212  __ jmp(&done);
3213
3214  // Objects with a non-function constructor have class 'Object'.
3215  __ bind(&non_function_constructor);
3216  __ Move(rax, isolate()->factory()->Object_string());
3217  __ jmp(&done);
3218
3219  // Non-JS objects have class null.
3220  __ bind(&null);
3221  __ LoadRoot(rax, Heap::kNullValueRootIndex);
3222
3223  // All done.
3224  __ bind(&done);
3225
3226  context()->Plug(rax);
3227}
3228
3229
3230void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3231  // Load the arguments on the stack and call the stub.
3232  SubStringStub stub(isolate());
3233  ZoneList<Expression*>* args = expr->arguments();
3234  ASSERT(args->length() == 3);
3235  VisitForStackValue(args->at(0));
3236  VisitForStackValue(args->at(1));
3237  VisitForStackValue(args->at(2));
3238  __ CallStub(&stub);
3239  context()->Plug(rax);
3240}
3241
3242
3243void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3244  // Load the arguments on the stack and call the stub.
3245  RegExpExecStub stub(isolate());
3246  ZoneList<Expression*>* args = expr->arguments();
3247  ASSERT(args->length() == 4);
3248  VisitForStackValue(args->at(0));
3249  VisitForStackValue(args->at(1));
3250  VisitForStackValue(args->at(2));
3251  VisitForStackValue(args->at(3));
3252  __ CallStub(&stub);
3253  context()->Plug(rax);
3254}
3255
3256
3257void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3258  ZoneList<Expression*>* args = expr->arguments();
3259  ASSERT(args->length() == 1);
3260
3261  VisitForAccumulatorValue(args->at(0));  // Load the object.
3262
3263  Label done;
3264  // If the object is a smi return the object.
3265  __ JumpIfSmi(rax, &done);
3266  // If the object is not a value type, return the object.
3267  __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3268  __ j(not_equal, &done);
3269  __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
3270
3271  __ bind(&done);
3272  context()->Plug(rax);
3273}
3274
3275
3276void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3277  ZoneList<Expression*>* args = expr->arguments();
3278  ASSERT(args->length() == 2);
3279  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3280  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3281
3282  VisitForAccumulatorValue(args->at(0));  // Load the object.
3283
3284  Label runtime, done, not_date_object;
3285  Register object = rax;
3286  Register result = rax;
3287  Register scratch = rcx;
3288
3289  __ JumpIfSmi(object, &not_date_object);
3290  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3291  __ j(not_equal, &not_date_object);
3292
3293  if (index->value() == 0) {
3294    __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3295    __ jmp(&done);
3296  } else {
3297    if (index->value() < JSDate::kFirstUncachedField) {
3298      ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3299      Operand stamp_operand = __ ExternalOperand(stamp);
3300      __ movp(scratch, stamp_operand);
3301      __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3302      __ j(not_equal, &runtime, Label::kNear);
3303      __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3304                                           kPointerSize * index->value()));
3305      __ jmp(&done);
3306    }
3307    __ bind(&runtime);
3308    __ PrepareCallCFunction(2);
3309    __ movp(arg_reg_1, object);
3310    __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3311    __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3312    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3313    __ jmp(&done);
3314  }
3315
3316  __ bind(&not_date_object);
3317  __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3318  __ bind(&done);
3319  context()->Plug(rax);
3320}
3321
3322
3323void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3324  ZoneList<Expression*>* args = expr->arguments();
3325  ASSERT_EQ(3, args->length());
3326
3327  Register string = rax;
3328  Register index = rbx;
3329  Register value = rcx;
3330
3331  VisitForStackValue(args->at(1));  // index
3332  VisitForStackValue(args->at(2));  // value
3333  VisitForAccumulatorValue(args->at(0));  // string
3334  __ Pop(value);
3335  __ Pop(index);
3336
3337  if (FLAG_debug_code) {
3338    __ Check(__ CheckSmi(value), kNonSmiValue);
3339    __ Check(__ CheckSmi(index), kNonSmiValue);
3340  }
3341
3342  __ SmiToInteger32(value, value);
3343  __ SmiToInteger32(index, index);
3344
3345  if (FLAG_debug_code) {
3346    static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3347    __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3348  }
3349
3350  __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3351          value);
3352  context()->Plug(string);
3353}
3354
3355
3356void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3357  ZoneList<Expression*>* args = expr->arguments();
3358  ASSERT_EQ(3, args->length());
3359
3360  Register string = rax;
3361  Register index = rbx;
3362  Register value = rcx;
3363
3364  VisitForStackValue(args->at(1));  // index
3365  VisitForStackValue(args->at(2));  // value
3366  VisitForAccumulatorValue(args->at(0));  // string
3367  __ Pop(value);
3368  __ Pop(index);
3369
3370  if (FLAG_debug_code) {
3371    __ Check(__ CheckSmi(value), kNonSmiValue);
3372    __ Check(__ CheckSmi(index), kNonSmiValue);
3373  }
3374
3375  __ SmiToInteger32(value, value);
3376  __ SmiToInteger32(index, index);
3377
3378  if (FLAG_debug_code) {
3379    static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3380    __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3381  }
3382
3383  __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3384          value);
3385  context()->Plug(rax);
3386}
3387
3388
3389void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3390  // Load the arguments on the stack and call the runtime function.
3391  ZoneList<Expression*>* args = expr->arguments();
3392  ASSERT(args->length() == 2);
3393  VisitForStackValue(args->at(0));
3394  VisitForStackValue(args->at(1));
3395  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3396  __ CallStub(&stub);
3397  context()->Plug(rax);
3398}
3399
3400
3401void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3402  ZoneList<Expression*>* args = expr->arguments();
3403  ASSERT(args->length() == 2);
3404
3405  VisitForStackValue(args->at(0));  // Load the object.
3406  VisitForAccumulatorValue(args->at(1));  // Load the value.
3407  __ Pop(rbx);  // rax = value. rbx = object.
3408
3409  Label done;
3410  // If the object is a smi, return the value.
3411  __ JumpIfSmi(rbx, &done);
3412
3413  // If the object is not a value type, return the value.
3414  __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3415  __ j(not_equal, &done);
3416
3417  // Store the value.
3418  __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax);
3419  // Update the write barrier.  Save the value as it will be
3420  // overwritten by the write barrier code and is needed afterward.
3421  __ movp(rdx, rax);
3422  __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3423
3424  __ bind(&done);
3425  context()->Plug(rax);
3426}
3427
3428
3429void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3430  ZoneList<Expression*>* args = expr->arguments();
3431  ASSERT_EQ(args->length(), 1);
3432
3433  // Load the argument into rax and call the stub.
3434  VisitForAccumulatorValue(args->at(0));
3435
3436  NumberToStringStub stub(isolate());
3437  __ CallStub(&stub);
3438  context()->Plug(rax);
3439}
3440
3441
3442void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3443  ZoneList<Expression*>* args = expr->arguments();
3444  ASSERT(args->length() == 1);
3445
3446  VisitForAccumulatorValue(args->at(0));
3447
3448  Label done;
3449  StringCharFromCodeGenerator generator(rax, rbx);
3450  generator.GenerateFast(masm_);
3451  __ jmp(&done);
3452
3453  NopRuntimeCallHelper call_helper;
3454  generator.GenerateSlow(masm_, call_helper);
3455
3456  __ bind(&done);
3457  context()->Plug(rbx);
3458}
3459
3460
3461void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3462  ZoneList<Expression*>* args = expr->arguments();
3463  ASSERT(args->length() == 2);
3464
3465  VisitForStackValue(args->at(0));
3466  VisitForAccumulatorValue(args->at(1));
3467
3468  Register object = rbx;
3469  Register index = rax;
3470  Register result = rdx;
3471
3472  __ Pop(object);
3473
3474  Label need_conversion;
3475  Label index_out_of_range;
3476  Label done;
3477  StringCharCodeAtGenerator generator(object,
3478                                      index,
3479                                      result,
3480                                      &need_conversion,
3481                                      &need_conversion,
3482                                      &index_out_of_range,
3483                                      STRING_INDEX_IS_NUMBER);
3484  generator.GenerateFast(masm_);
3485  __ jmp(&done);
3486
3487  __ bind(&index_out_of_range);
3488  // When the index is out of range, the spec requires us to return
3489  // NaN.
3490  __ LoadRoot(result, Heap::kNanValueRootIndex);
3491  __ jmp(&done);
3492
3493  __ bind(&need_conversion);
3494  // Move the undefined value into the result register, which will
3495  // trigger conversion.
3496  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3497  __ jmp(&done);
3498
3499  NopRuntimeCallHelper call_helper;
3500  generator.GenerateSlow(masm_, call_helper);
3501
3502  __ bind(&done);
3503  context()->Plug(result);
3504}
3505
3506
3507void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3508  ZoneList<Expression*>* args = expr->arguments();
3509  ASSERT(args->length() == 2);
3510
3511  VisitForStackValue(args->at(0));
3512  VisitForAccumulatorValue(args->at(1));
3513
3514  Register object = rbx;
3515  Register index = rax;
3516  Register scratch = rdx;
3517  Register result = rax;
3518
3519  __ Pop(object);
3520
3521  Label need_conversion;
3522  Label index_out_of_range;
3523  Label done;
3524  StringCharAtGenerator generator(object,
3525                                  index,
3526                                  scratch,
3527                                  result,
3528                                  &need_conversion,
3529                                  &need_conversion,
3530                                  &index_out_of_range,
3531                                  STRING_INDEX_IS_NUMBER);
3532  generator.GenerateFast(masm_);
3533  __ jmp(&done);
3534
3535  __ bind(&index_out_of_range);
3536  // When the index is out of range, the spec requires us to return
3537  // the empty string.
3538  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3539  __ jmp(&done);
3540
3541  __ bind(&need_conversion);
3542  // Move smi zero into the result register, which will trigger
3543  // conversion.
3544  __ Move(result, Smi::FromInt(0));
3545  __ jmp(&done);
3546
3547  NopRuntimeCallHelper call_helper;
3548  generator.GenerateSlow(masm_, call_helper);
3549
3550  __ bind(&done);
3551  context()->Plug(result);
3552}
3553
3554
3555void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3556  ZoneList<Expression*>* args = expr->arguments();
3557  ASSERT_EQ(2, args->length());
3558  VisitForStackValue(args->at(0));
3559  VisitForAccumulatorValue(args->at(1));
3560
3561  __ Pop(rdx);
3562  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3563  __ CallStub(&stub);
3564  context()->Plug(rax);
3565}
3566
3567
3568void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3569  ZoneList<Expression*>* args = expr->arguments();
3570  ASSERT_EQ(2, args->length());
3571
3572  VisitForStackValue(args->at(0));
3573  VisitForStackValue(args->at(1));
3574
3575  StringCompareStub stub(isolate());
3576  __ CallStub(&stub);
3577  context()->Plug(rax);
3578}
3579
3580
3581void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3582  ZoneList<Expression*>* args = expr->arguments();
3583  ASSERT(args->length() >= 2);
3584
3585  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3586  for (int i = 0; i < arg_count + 1; i++) {
3587    VisitForStackValue(args->at(i));
3588  }
3589  VisitForAccumulatorValue(args->last());  // Function.
3590
3591  Label runtime, done;
3592  // Check for non-function argument (including proxy).
3593  __ JumpIfSmi(rax, &runtime);
3594  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3595  __ j(not_equal, &runtime);
3596
3597  // InvokeFunction requires the function in rdi. Move it in there.
3598  __ movp(rdi, result_register());
3599  ParameterCount count(arg_count);
3600  __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
3601  __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3602  __ jmp(&done);
3603
3604  __ bind(&runtime);
3605  __ Push(rax);
3606  __ CallRuntime(Runtime::kCall, args->length());
3607  __ bind(&done);
3608
3609  context()->Plug(rax);
3610}
3611
3612
3613void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3614  RegExpConstructResultStub stub(isolate());
3615  ZoneList<Expression*>* args = expr->arguments();
3616  ASSERT(args->length() == 3);
3617  VisitForStackValue(args->at(0));
3618  VisitForStackValue(args->at(1));
3619  VisitForAccumulatorValue(args->at(2));
3620  __ Pop(rbx);
3621  __ Pop(rcx);
3622  __ CallStub(&stub);
3623  context()->Plug(rax);
3624}
3625
3626
3627void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3628  ZoneList<Expression*>* args = expr->arguments();
3629  ASSERT_EQ(2, args->length());
3630
3631  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3632  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3633
3634  Handle<FixedArray> jsfunction_result_caches(
3635      isolate()->native_context()->jsfunction_result_caches());
3636  if (jsfunction_result_caches->length() <= cache_id) {
3637    __ Abort(kAttemptToUseUndefinedCache);
3638    __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3639    context()->Plug(rax);
3640    return;
3641  }
3642
3643  VisitForAccumulatorValue(args->at(1));
3644
3645  Register key = rax;
3646  Register cache = rbx;
3647  Register tmp = rcx;
3648  __ movp(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
3649  __ movp(cache,
3650          FieldOperand(cache, GlobalObject::kNativeContextOffset));
3651  __ movp(cache,
3652          ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3653  __ movp(cache,
3654          FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3655
3656  Label done, not_found;
3657  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3658  __ movp(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3659  // tmp now holds finger offset as a smi.
3660  SmiIndex index =
3661      __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3662  __ cmpp(key, FieldOperand(cache,
3663                            index.reg,
3664                            index.scale,
3665                            FixedArray::kHeaderSize));
3666  __ j(not_equal, &not_found, Label::kNear);
3667  __ movp(rax, FieldOperand(cache,
3668                            index.reg,
3669                            index.scale,
3670                            FixedArray::kHeaderSize + kPointerSize));
3671  __ jmp(&done, Label::kNear);
3672
3673  __ bind(&not_found);
3674  // Call runtime to perform the lookup.
3675  __ Push(cache);
3676  __ Push(key);
3677  __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3678
3679  __ bind(&done);
3680  context()->Plug(rax);
3681}
3682
3683
3684void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3685  ZoneList<Expression*>* args = expr->arguments();
3686  ASSERT(args->length() == 1);
3687
3688  VisitForAccumulatorValue(args->at(0));
3689
3690  Label materialize_true, materialize_false;
3691  Label* if_true = NULL;
3692  Label* if_false = NULL;
3693  Label* fall_through = NULL;
3694  context()->PrepareTest(&materialize_true, &materialize_false,
3695                         &if_true, &if_false, &fall_through);
3696
3697  __ testl(FieldOperand(rax, String::kHashFieldOffset),
3698           Immediate(String::kContainsCachedArrayIndexMask));
3699  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3700  __ j(zero, if_true);
3701  __ jmp(if_false);
3702
3703  context()->Plug(if_true, if_false);
3704}
3705
3706
3707void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3708  ZoneList<Expression*>* args = expr->arguments();
3709  ASSERT(args->length() == 1);
3710  VisitForAccumulatorValue(args->at(0));
3711
3712  __ AssertString(rax);
3713
3714  __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3715  ASSERT(String::kHashShift >= kSmiTagSize);
3716  __ IndexFromHash(rax, rax);
3717
3718  context()->Plug(rax);
3719}
3720
3721
3722void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3723  Label bailout, return_result, done, one_char_separator, long_separator,
3724      non_trivial_array, not_size_one_array, loop,
3725      loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3726  ZoneList<Expression*>* args = expr->arguments();
3727  ASSERT(args->length() == 2);
3728  // We will leave the separator on the stack until the end of the function.
3729  VisitForStackValue(args->at(1));
3730  // Load this to rax (= array)
3731  VisitForAccumulatorValue(args->at(0));
3732  // All aliases of the same register have disjoint lifetimes.
3733  Register array = rax;
3734  Register elements = no_reg;  // Will be rax.
3735
3736  Register index = rdx;
3737
3738  Register string_length = rcx;
3739
3740  Register string = rsi;
3741
3742  Register scratch = rbx;
3743
3744  Register array_length = rdi;
3745  Register result_pos = no_reg;  // Will be rdi.
3746
3747  Operand separator_operand =    Operand(rsp, 2 * kPointerSize);
3748  Operand result_operand =       Operand(rsp, 1 * kPointerSize);
3749  Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3750  // Separator operand is already pushed. Make room for the two
3751  // other stack fields, and clear the direction flag in anticipation
3752  // of calling CopyBytes.
3753  __ subp(rsp, Immediate(2 * kPointerSize));
3754  __ cld();
3755  // Check that the array is a JSArray
3756  __ JumpIfSmi(array, &bailout);
3757  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3758  __ j(not_equal, &bailout);
3759
3760  // Check that the array has fast elements.
3761  __ CheckFastElements(scratch, &bailout);
3762
3763  // Array has fast elements, so its length must be a smi.
3764  // If the array has length zero, return the empty string.
3765  __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
3766  __ SmiCompare(array_length, Smi::FromInt(0));
3767  __ j(not_zero, &non_trivial_array);
3768  __ LoadRoot(rax, Heap::kempty_stringRootIndex);
3769  __ jmp(&return_result);
3770
3771  // Save the array length on the stack.
3772  __ bind(&non_trivial_array);
3773  __ SmiToInteger32(array_length, array_length);
3774  __ movl(array_length_operand, array_length);
3775
3776  // Save the FixedArray containing array's elements.
3777  // End of array's live range.
3778  elements = array;
3779  __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
3780  array = no_reg;
3781
3782
3783  // Check that all array elements are sequential ASCII strings, and
3784  // accumulate the sum of their lengths, as a smi-encoded value.
3785  __ Set(index, 0);
3786  __ Set(string_length, 0);
3787  // Loop condition: while (index < array_length).
3788  // Live loop registers: index(int32), array_length(int32), string(String*),
3789  //                      scratch, string_length(int32), elements(FixedArray*).
3790  if (generate_debug_code_) {
3791    __ cmpp(index, array_length);
3792    __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3793  }
3794  __ bind(&loop);
3795  __ movp(string, FieldOperand(elements,
3796                               index,
3797                               times_pointer_size,
3798                               FixedArray::kHeaderSize));
3799  __ JumpIfSmi(string, &bailout);
3800  __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3801  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3802  __ andb(scratch, Immediate(
3803      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3804  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3805  __ j(not_equal, &bailout);
3806  __ AddSmiField(string_length,
3807                 FieldOperand(string, SeqOneByteString::kLengthOffset));
3808  __ j(overflow, &bailout);
3809  __ incl(index);
3810  __ cmpl(index, array_length);
3811  __ j(less, &loop);
3812
3813  // Live registers:
3814  // string_length: Sum of string lengths.
3815  // elements: FixedArray of strings.
3816  // index: Array length.
3817  // array_length: Array length.
3818
3819  // If array_length is 1, return elements[0], a string.
3820  __ cmpl(array_length, Immediate(1));
3821  __ j(not_equal, &not_size_one_array);
3822  __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3823  __ jmp(&return_result);
3824
3825  __ bind(&not_size_one_array);
3826
3827  // End of array_length live range.
3828  result_pos = array_length;
3829  array_length = no_reg;
3830
3831  // Live registers:
3832  // string_length: Sum of string lengths.
3833  // elements: FixedArray of strings.
3834  // index: Array length.
3835
3836  // Check that the separator is a sequential ASCII string.
3837  __ movp(string, separator_operand);
3838  __ JumpIfSmi(string, &bailout);
3839  __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3840  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3841  __ andb(scratch, Immediate(
3842      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3843  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3844  __ j(not_equal, &bailout);
3845
3846  // Live registers:
3847  // string_length: Sum of string lengths.
3848  // elements: FixedArray of strings.
3849  // index: Array length.
3850  // string: Separator string.
3851
3852  // Add (separator length times (array_length - 1)) to string_length.
3853  __ SmiToInteger32(scratch,
3854                    FieldOperand(string, SeqOneByteString::kLengthOffset));
3855  __ decl(index);
3856  __ imull(scratch, index);
3857  __ j(overflow, &bailout);
3858  __ addl(string_length, scratch);
3859  __ j(overflow, &bailout);
3860
3861  // Live registers and stack values:
3862  //   string_length: Total length of result string.
3863  //   elements: FixedArray of strings.
3864  __ AllocateAsciiString(result_pos, string_length, scratch,
3865                         index, string, &bailout);
3866  __ movp(result_operand, result_pos);
3867  __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
3868
3869  __ movp(string, separator_operand);
3870  __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
3871                Smi::FromInt(1));
3872  __ j(equal, &one_char_separator);
3873  __ j(greater, &long_separator);
3874
3875
3876  // Empty separator case:
3877  __ Set(index, 0);
3878  __ movl(scratch, array_length_operand);
3879  __ jmp(&loop_1_condition);
3880  // Loop condition: while (index < array_length).
3881  __ bind(&loop_1);
3882  // Each iteration of the loop concatenates one string to the result.
3883  // Live values in registers:
3884  //   index: which element of the elements array we are adding to the result.
3885  //   result_pos: the position to which we are currently copying characters.
3886  //   elements: the FixedArray of strings we are joining.
3887  //   scratch: array length.
3888
3889  // Get string = array[index].
3890  __ movp(string, FieldOperand(elements, index,
3891                               times_pointer_size,
3892                               FixedArray::kHeaderSize));
3893  __ SmiToInteger32(string_length,
3894                    FieldOperand(string, String::kLengthOffset));
3895  __ leap(string,
3896         FieldOperand(string, SeqOneByteString::kHeaderSize));
3897  __ CopyBytes(result_pos, string, string_length);
3898  __ incl(index);
3899  __ bind(&loop_1_condition);
3900  __ cmpl(index, scratch);
3901  __ j(less, &loop_1);  // Loop while (index < array_length).
3902  __ jmp(&done);
3903
3904  // Generic bailout code used from several places.
3905  __ bind(&bailout);
3906  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3907  __ jmp(&return_result);
3908
3909
3910  // One-character separator case
3911  __ bind(&one_char_separator);
3912  // Get the separator ASCII character value.
3913  // Register "string" holds the separator.
3914  __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
3915  __ Set(index, 0);
3916  // Jump into the loop after the code that copies the separator, so the first
3917  // element is not preceded by a separator
3918  __ jmp(&loop_2_entry);
3919  // Loop condition: while (index < length).
3920  __ bind(&loop_2);
3921  // Each iteration of the loop concatenates one string to the result.
3922  // Live values in registers:
3923  //   elements: The FixedArray of strings we are joining.
3924  //   index: which element of the elements array we are adding to the result.
3925  //   result_pos: the position to which we are currently copying characters.
3926  //   scratch: Separator character.
3927
3928  // Copy the separator character to the result.
3929  __ movb(Operand(result_pos, 0), scratch);
3930  __ incp(result_pos);
3931
3932  __ bind(&loop_2_entry);
3933  // Get string = array[index].
3934  __ movp(string, FieldOperand(elements, index,
3935                               times_pointer_size,
3936                               FixedArray::kHeaderSize));
3937  __ SmiToInteger32(string_length,
3938                    FieldOperand(string, String::kLengthOffset));
3939  __ leap(string,
3940         FieldOperand(string, SeqOneByteString::kHeaderSize));
3941  __ CopyBytes(result_pos, string, string_length);
3942  __ incl(index);
3943  __ cmpl(index, array_length_operand);
3944  __ j(less, &loop_2);  // End while (index < length).
3945  __ jmp(&done);
3946
3947
3948  // Long separator case (separator is more than one character).
3949  __ bind(&long_separator);
3950
3951  // Make elements point to end of elements array, and index
3952  // count from -array_length to zero, so we don't need to maintain
3953  // a loop limit.
3954  __ movl(index, array_length_operand);
3955  __ leap(elements, FieldOperand(elements, index, times_pointer_size,
3956                                FixedArray::kHeaderSize));
3957  __ negq(index);
3958
3959  // Replace separator string with pointer to its first character, and
3960  // make scratch be its length.
3961  __ movp(string, separator_operand);
3962  __ SmiToInteger32(scratch,
3963                    FieldOperand(string, String::kLengthOffset));
3964  __ leap(string,
3965         FieldOperand(string, SeqOneByteString::kHeaderSize));
3966  __ movp(separator_operand, string);
3967
3968  // Jump into the loop after the code that copies the separator, so the first
3969  // element is not preceded by a separator
3970  __ jmp(&loop_3_entry);
3971  // Loop condition: while (index < length).
3972  __ bind(&loop_3);
3973  // Each iteration of the loop concatenates one string to the result.
3974  // Live values in registers:
3975  //   index: which element of the elements array we are adding to the result.
3976  //   result_pos: the position to which we are currently copying characters.
3977  //   scratch: Separator length.
3978  //   separator_operand (rsp[0x10]): Address of first char of separator.
3979
3980  // Copy the separator to the result.
3981  __ movp(string, separator_operand);
3982  __ movl(string_length, scratch);
3983  __ CopyBytes(result_pos, string, string_length, 2);
3984
3985  __ bind(&loop_3_entry);
3986  // Get string = array[index].
3987  __ movp(string, Operand(elements, index, times_pointer_size, 0));
3988  __ SmiToInteger32(string_length,
3989                    FieldOperand(string, String::kLengthOffset));
3990  __ leap(string,
3991         FieldOperand(string, SeqOneByteString::kHeaderSize));
3992  __ CopyBytes(result_pos, string, string_length);
3993  __ incq(index);
3994  __ j(not_equal, &loop_3);  // Loop while (index < 0).
3995
3996  __ bind(&done);
3997  __ movp(rax, result_operand);
3998
3999  __ bind(&return_result);
4000  // Drop temp values from the stack, and restore context register.
4001  __ addp(rsp, Immediate(3 * kPointerSize));
4002  __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4003  context()->Plug(rax);
4004}
4005
4006
4007void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4008  if (expr->function() != NULL &&
4009      expr->function()->intrinsic_type == Runtime::INLINE) {
4010    Comment cmnt(masm_, "[ InlineRuntimeCall");
4011    EmitInlineRuntimeCall(expr);
4012    return;
4013  }
4014
4015  Comment cmnt(masm_, "[ CallRuntime");
4016  ZoneList<Expression*>* args = expr->arguments();
4017  int arg_count = args->length();
4018
4019  if (expr->is_jsruntime()) {
4020    // Push the builtins object as receiver.
4021    __ movp(rax, GlobalObjectOperand());
4022    __ Push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4023
4024    // Load the function from the receiver.
4025    __ movp(rax, Operand(rsp, 0));
4026    __ Move(rcx, expr->name());
4027    CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4028
4029    // Push the target function under the receiver.
4030    __ Push(Operand(rsp, 0));
4031    __ movp(Operand(rsp, kPointerSize), rax);
4032
4033    // Push the arguments ("left-to-right").
4034    for (int i = 0; i < arg_count; i++) {
4035      VisitForStackValue(args->at(i));
4036    }
4037
4038    // Record source position of the IC call.
4039    SetSourcePosition(expr->position());
4040    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4041    __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4042    __ CallStub(&stub);
4043
4044    // Restore context register.
4045    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4046    context()->DropAndPlug(1, rax);
4047
4048  } else {
4049    // Push the arguments ("left-to-right").
4050    for (int i = 0; i < arg_count; i++) {
4051      VisitForStackValue(args->at(i));
4052    }
4053
4054    // Call the C runtime.
4055    __ CallRuntime(expr->function(), arg_count);
4056    context()->Plug(rax);
4057  }
4058}
4059
4060
4061void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4062  switch (expr->op()) {
4063    case Token::DELETE: {
4064      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4065      Property* property = expr->expression()->AsProperty();
4066      VariableProxy* proxy = expr->expression()->AsVariableProxy();
4067
4068      if (property != NULL) {
4069        VisitForStackValue(property->obj());
4070        VisitForStackValue(property->key());
4071        __ Push(Smi::FromInt(strict_mode()));
4072        __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4073        context()->Plug(rax);
4074      } else if (proxy != NULL) {
4075        Variable* var = proxy->var();
4076        // Delete of an unqualified identifier is disallowed in strict mode
4077        // but "delete this" is allowed.
4078        ASSERT(strict_mode() == SLOPPY || var->is_this());
4079        if (var->IsUnallocated()) {
4080          __ Push(GlobalObjectOperand());
4081          __ Push(var->name());
4082          __ Push(Smi::FromInt(SLOPPY));
4083          __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4084          context()->Plug(rax);
4085        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4086          // Result of deleting non-global variables is false.  'this' is
4087          // not really a variable, though we implement it as one.  The
4088          // subexpression does not have side effects.
4089          context()->Plug(var->is_this());
4090        } else {
4091          // Non-global variable.  Call the runtime to try to delete from the
4092          // context where the variable was introduced.
4093          __ Push(context_register());
4094          __ Push(var->name());
4095          __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4096          context()->Plug(rax);
4097        }
4098      } else {
4099        // Result of deleting non-property, non-variable reference is true.
4100        // The subexpression may have side effects.
4101        VisitForEffect(expr->expression());
4102        context()->Plug(true);
4103      }
4104      break;
4105    }
4106
4107    case Token::VOID: {
4108      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4109      VisitForEffect(expr->expression());
4110      context()->Plug(Heap::kUndefinedValueRootIndex);
4111      break;
4112    }
4113
4114    case Token::NOT: {
4115      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4116      if (context()->IsEffect()) {
4117        // Unary NOT has no side effects so it's only necessary to visit the
4118        // subexpression.  Match the optimizing compiler by not branching.
4119        VisitForEffect(expr->expression());
4120      } else if (context()->IsTest()) {
4121        const TestContext* test = TestContext::cast(context());
4122        // The labels are swapped for the recursive call.
4123        VisitForControl(expr->expression(),
4124                        test->false_label(),
4125                        test->true_label(),
4126                        test->fall_through());
4127        context()->Plug(test->true_label(), test->false_label());
4128      } else {
4129        // We handle value contexts explicitly rather than simply visiting
4130        // for control and plugging the control flow into the context,
4131        // because we need to prepare a pair of extra administrative AST ids
4132        // for the optimizing compiler.
4133        ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4134        Label materialize_true, materialize_false, done;
4135        VisitForControl(expr->expression(),
4136                        &materialize_false,
4137                        &materialize_true,
4138                        &materialize_true);
4139        __ bind(&materialize_true);
4140        PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4141        if (context()->IsAccumulatorValue()) {
4142          __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4143        } else {
4144          __ PushRoot(Heap::kTrueValueRootIndex);
4145        }
4146        __ jmp(&done, Label::kNear);
4147        __ bind(&materialize_false);
4148        PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4149        if (context()->IsAccumulatorValue()) {
4150          __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4151        } else {
4152          __ PushRoot(Heap::kFalseValueRootIndex);
4153        }
4154        __ bind(&done);
4155      }
4156      break;
4157    }
4158
4159    case Token::TYPEOF: {
4160      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4161      { StackValueContext context(this);
4162        VisitForTypeofValue(expr->expression());
4163      }
4164      __ CallRuntime(Runtime::kTypeof, 1);
4165      context()->Plug(rax);
4166      break;
4167    }
4168
4169    default:
4170      UNREACHABLE();
4171  }
4172}
4173
4174
4175void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4176  ASSERT(expr->expression()->IsValidReferenceExpression());
4177
4178  Comment cmnt(masm_, "[ CountOperation");
4179  SetSourcePosition(expr->position());
4180
4181  // Expression can only be a property, a global or a (parameter or local)
4182  // slot.
4183  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4184  LhsKind assign_type = VARIABLE;
4185  Property* prop = expr->expression()->AsProperty();
4186  // In case of a property we use the uninitialized expression context
4187  // of the key to detect a named property.
4188  if (prop != NULL) {
4189    assign_type =
4190        (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4191  }
4192
4193  // Evaluate expression and get value.
4194  if (assign_type == VARIABLE) {
4195    ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4196    AccumulatorValueContext context(this);
4197    EmitVariableLoad(expr->expression()->AsVariableProxy());
4198  } else {
4199    // Reserve space for result of postfix operation.
4200    if (expr->is_postfix() && !context()->IsEffect()) {
4201      __ Push(Smi::FromInt(0));
4202    }
4203    if (assign_type == NAMED_PROPERTY) {
4204      VisitForAccumulatorValue(prop->obj());
4205      __ Push(rax);  // Copy of receiver, needed for later store.
4206      EmitNamedPropertyLoad(prop);
4207    } else {
4208      VisitForStackValue(prop->obj());
4209      VisitForAccumulatorValue(prop->key());
4210      __ movp(rdx, Operand(rsp, 0));  // Leave receiver on stack
4211      __ Push(rax);  // Copy of key, needed for later store.
4212      EmitKeyedPropertyLoad(prop);
4213    }
4214  }
4215
4216  // We need a second deoptimization point after loading the value
4217  // in case evaluating the property load my have a side effect.
4218  if (assign_type == VARIABLE) {
4219    PrepareForBailout(expr->expression(), TOS_REG);
4220  } else {
4221    PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4222  }
4223
4224  // Inline smi case if we are in a loop.
4225  Label done, stub_call;
4226  JumpPatchSite patch_site(masm_);
4227  if (ShouldInlineSmiCase(expr->op())) {
4228    Label slow;
4229    patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4230
4231    // Save result for postfix expressions.
4232    if (expr->is_postfix()) {
4233      if (!context()->IsEffect()) {
4234        // Save the result on the stack. If we have a named or keyed property
4235        // we store the result under the receiver that is currently on top
4236        // of the stack.
4237        switch (assign_type) {
4238          case VARIABLE:
4239            __ Push(rax);
4240            break;
4241          case NAMED_PROPERTY:
4242            __ movp(Operand(rsp, kPointerSize), rax);
4243            break;
4244          case KEYED_PROPERTY:
4245            __ movp(Operand(rsp, 2 * kPointerSize), rax);
4246            break;
4247        }
4248      }
4249    }
4250
4251    SmiOperationExecutionMode mode;
4252    mode.Add(PRESERVE_SOURCE_REGISTER);
4253    mode.Add(BAILOUT_ON_NO_OVERFLOW);
4254    if (expr->op() == Token::INC) {
4255      __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4256    } else {
4257      __ SmiSubConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4258    }
4259    __ jmp(&stub_call, Label::kNear);
4260    __ bind(&slow);
4261  }
4262
4263  ToNumberStub convert_stub(isolate());
4264  __ CallStub(&convert_stub);
4265
4266  // Save result for postfix expressions.
4267  if (expr->is_postfix()) {
4268    if (!context()->IsEffect()) {
4269      // Save the result on the stack. If we have a named or keyed property
4270      // we store the result under the receiver that is currently on top
4271      // of the stack.
4272      switch (assign_type) {
4273        case VARIABLE:
4274          __ Push(rax);
4275          break;
4276        case NAMED_PROPERTY:
4277          __ movp(Operand(rsp, kPointerSize), rax);
4278          break;
4279        case KEYED_PROPERTY:
4280          __ movp(Operand(rsp, 2 * kPointerSize), rax);
4281          break;
4282      }
4283    }
4284  }
4285
4286  // Record position before stub call.
4287  SetSourcePosition(expr->position());
4288
4289  // Call stub for +1/-1.
4290  __ bind(&stub_call);
4291  __ movp(rdx, rax);
4292  __ Move(rax, Smi::FromInt(1));
4293  BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE);
4294  CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
4295  patch_site.EmitPatchInfo();
4296  __ bind(&done);
4297
4298  // Store the value returned in rax.
4299  switch (assign_type) {
4300    case VARIABLE:
4301      if (expr->is_postfix()) {
4302        // Perform the assignment as if via '='.
4303        { EffectContext context(this);
4304          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4305                                 Token::ASSIGN);
4306          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4307          context.Plug(rax);
4308        }
4309        // For all contexts except kEffect: We have the result on
4310        // top of the stack.
4311        if (!context()->IsEffect()) {
4312          context()->PlugTOS();
4313        }
4314      } else {
4315        // Perform the assignment as if via '='.
4316        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4317                               Token::ASSIGN);
4318        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4319        context()->Plug(rax);
4320      }
4321      break;
4322    case NAMED_PROPERTY: {
4323      __ Move(rcx, prop->key()->AsLiteral()->value());
4324      __ Pop(rdx);
4325      CallStoreIC(expr->CountStoreFeedbackId());
4326      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4327      if (expr->is_postfix()) {
4328        if (!context()->IsEffect()) {
4329          context()->PlugTOS();
4330        }
4331      } else {
4332        context()->Plug(rax);
4333      }
4334      break;
4335    }
4336    case KEYED_PROPERTY: {
4337      __ Pop(rcx);
4338      __ Pop(rdx);
4339      Handle<Code> ic = strict_mode() == SLOPPY
4340          ? isolate()->builtins()->KeyedStoreIC_Initialize()
4341          : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4342      CallIC(ic, expr->CountStoreFeedbackId());
4343      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4344      if (expr->is_postfix()) {
4345        if (!context()->IsEffect()) {
4346          context()->PlugTOS();
4347        }
4348      } else {
4349        context()->Plug(rax);
4350      }
4351      break;
4352    }
4353  }
4354}
4355
4356
4357void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4358  VariableProxy* proxy = expr->AsVariableProxy();
4359  ASSERT(!context()->IsEffect());
4360  ASSERT(!context()->IsTest());
4361
4362  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4363    Comment cmnt(masm_, "[ Global variable");
4364    __ Move(rcx, proxy->name());
4365    __ movp(rax, GlobalObjectOperand());
4366    // Use a regular load, not a contextual load, to avoid a reference
4367    // error.
4368    CallLoadIC(NOT_CONTEXTUAL);
4369    PrepareForBailout(expr, TOS_REG);
4370    context()->Plug(rax);
4371  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4372    Comment cmnt(masm_, "[ Lookup slot");
4373    Label done, slow;
4374
4375    // Generate code for loading from variables potentially shadowed
4376    // by eval-introduced variables.
4377    EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4378
4379    __ bind(&slow);
4380    __ Push(rsi);
4381    __ Push(proxy->name());
4382    __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4383    PrepareForBailout(expr, TOS_REG);
4384    __ bind(&done);
4385
4386    context()->Plug(rax);
4387  } else {
4388    // This expression cannot throw a reference error at the top level.
4389    VisitInDuplicateContext(expr);
4390  }
4391}
4392
4393
4394void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4395                                                 Expression* sub_expr,
4396                                                 Handle<String> check) {
4397  Label materialize_true, materialize_false;
4398  Label* if_true = NULL;
4399  Label* if_false = NULL;
4400  Label* fall_through = NULL;
4401  context()->PrepareTest(&materialize_true, &materialize_false,
4402                         &if_true, &if_false, &fall_through);
4403
4404  { AccumulatorValueContext context(this);
4405    VisitForTypeofValue(sub_expr);
4406  }
4407  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4408
4409  Factory* factory = isolate()->factory();
4410  if (String::Equals(check, factory->number_string())) {
4411    __ JumpIfSmi(rax, if_true);
4412    __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
4413    __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4414    Split(equal, if_true, if_false, fall_through);
4415  } else if (String::Equals(check, factory->string_string())) {
4416    __ JumpIfSmi(rax, if_false);
4417    // Check for undetectable objects => false.
4418    __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4419    __ j(above_equal, if_false);
4420    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4421             Immediate(1 << Map::kIsUndetectable));
4422    Split(zero, if_true, if_false, fall_through);
4423  } else if (String::Equals(check, factory->symbol_string())) {
4424    __ JumpIfSmi(rax, if_false);
4425    __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
4426    Split(equal, if_true, if_false, fall_through);
4427  } else if (String::Equals(check, factory->boolean_string())) {
4428    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4429    __ j(equal, if_true);
4430    __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4431    Split(equal, if_true, if_false, fall_through);
4432  } else if (FLAG_harmony_typeof &&
4433             String::Equals(check, factory->null_string())) {
4434    __ CompareRoot(rax, Heap::kNullValueRootIndex);
4435    Split(equal, if_true, if_false, fall_through);
4436  } else if (String::Equals(check, factory->undefined_string())) {
4437    __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4438    __ j(equal, if_true);
4439    __ JumpIfSmi(rax, if_false);
4440    // Check for undetectable objects => true.
4441    __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4442    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4443             Immediate(1 << Map::kIsUndetectable));
4444    Split(not_zero, if_true, if_false, fall_through);
4445  } else if (String::Equals(check, factory->function_string())) {
4446    __ JumpIfSmi(rax, if_false);
4447    STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4448    __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4449    __ j(equal, if_true);
4450    __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4451    Split(equal, if_true, if_false, fall_through);
4452  } else if (String::Equals(check, factory->object_string())) {
4453    __ JumpIfSmi(rax, if_false);
4454    if (!FLAG_harmony_typeof) {
4455      __ CompareRoot(rax, Heap::kNullValueRootIndex);
4456      __ j(equal, if_true);
4457    }
4458    __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4459    __ j(below, if_false);
4460    __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4461    __ j(above, if_false);
4462    // Check for undetectable objects => false.
4463    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4464             Immediate(1 << Map::kIsUndetectable));
4465    Split(zero, if_true, if_false, fall_through);
4466  } else {
4467    if (if_false != fall_through) __ jmp(if_false);
4468  }
4469  context()->Plug(if_true, if_false);
4470}
4471
4472
4473void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4474  Comment cmnt(masm_, "[ CompareOperation");
4475  SetSourcePosition(expr->position());
4476
4477  // First we try a fast inlined version of the compare when one of
4478  // the operands is a literal.
4479  if (TryLiteralCompare(expr)) return;
4480
4481  // Always perform the comparison for its control flow.  Pack the result
4482  // into the expression's context after the comparison is performed.
4483  Label materialize_true, materialize_false;
4484  Label* if_true = NULL;
4485  Label* if_false = NULL;
4486  Label* fall_through = NULL;
4487  context()->PrepareTest(&materialize_true, &materialize_false,
4488                         &if_true, &if_false, &fall_through);
4489
4490  Token::Value op = expr->op();
4491  VisitForStackValue(expr->left());
4492  switch (op) {
4493    case Token::IN:
4494      VisitForStackValue(expr->right());
4495      __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4496      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4497      __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4498      Split(equal, if_true, if_false, fall_through);
4499      break;
4500
4501    case Token::INSTANCEOF: {
4502      VisitForStackValue(expr->right());
4503      InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4504      __ CallStub(&stub);
4505      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4506      __ testp(rax, rax);
4507       // The stub returns 0 for true.
4508      Split(zero, if_true, if_false, fall_through);
4509      break;
4510    }
4511
4512    default: {
4513      VisitForAccumulatorValue(expr->right());
4514      Condition cc = CompareIC::ComputeCondition(op);
4515      __ Pop(rdx);
4516
4517      bool inline_smi_code = ShouldInlineSmiCase(op);
4518      JumpPatchSite patch_site(masm_);
4519      if (inline_smi_code) {
4520        Label slow_case;
4521        __ movp(rcx, rdx);
4522        __ orp(rcx, rax);
4523        patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4524        __ cmpp(rdx, rax);
4525        Split(cc, if_true, if_false, NULL);
4526        __ bind(&slow_case);
4527      }
4528
4529      // Record position and call the compare IC.
4530      SetSourcePosition(expr->position());
4531      Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4532      CallIC(ic, expr->CompareOperationFeedbackId());
4533      patch_site.EmitPatchInfo();
4534
4535      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4536      __ testp(rax, rax);
4537      Split(cc, if_true, if_false, fall_through);
4538    }
4539  }
4540
4541  // Convert the result of the comparison into one expected for this
4542  // expression's context.
4543  context()->Plug(if_true, if_false);
4544}
4545
4546
4547void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4548                                              Expression* sub_expr,
4549                                              NilValue nil) {
4550  Label materialize_true, materialize_false;
4551  Label* if_true = NULL;
4552  Label* if_false = NULL;
4553  Label* fall_through = NULL;
4554  context()->PrepareTest(&materialize_true, &materialize_false,
4555                         &if_true, &if_false, &fall_through);
4556
4557  VisitForAccumulatorValue(sub_expr);
4558  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4559  if (expr->op() == Token::EQ_STRICT) {
4560    Heap::RootListIndex nil_value = nil == kNullValue ?
4561        Heap::kNullValueRootIndex :
4562        Heap::kUndefinedValueRootIndex;
4563    __ CompareRoot(rax, nil_value);
4564    Split(equal, if_true, if_false, fall_through);
4565  } else {
4566    Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4567    CallIC(ic, expr->CompareOperationFeedbackId());
4568    __ testp(rax, rax);
4569    Split(not_zero, if_true, if_false, fall_through);
4570  }
4571  context()->Plug(if_true, if_false);
4572}
4573
4574
4575void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4576  __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4577  context()->Plug(rax);
4578}
4579
4580
4581Register FullCodeGenerator::result_register() {
4582  return rax;
4583}
4584
4585
4586Register FullCodeGenerator::context_register() {
4587  return rsi;
4588}
4589
4590
4591void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4592  ASSERT(IsAligned(frame_offset, kPointerSize));
4593  __ movp(Operand(rbp, frame_offset), value);
4594}
4595
4596
4597void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4598  __ movp(dst, ContextOperand(rsi, context_index));
4599}
4600
4601
4602void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4603  Scope* declaration_scope = scope()->DeclarationScope();
4604  if (declaration_scope->is_global_scope() ||
4605      declaration_scope->is_module_scope()) {
4606    // Contexts nested in the native context have a canonical empty function
4607    // as their closure, not the anonymous closure containing the global
4608    // code.  Pass a smi sentinel and let the runtime look up the empty
4609    // function.
4610    __ Push(Smi::FromInt(0));
4611  } else if (declaration_scope->is_eval_scope()) {
4612    // Contexts created by a call to eval have the same closure as the
4613    // context calling eval, not the anonymous closure containing the eval
4614    // code.  Fetch it from the context.
4615    __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX));
4616  } else {
4617    ASSERT(declaration_scope->is_function_scope());
4618    __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4619  }
4620}
4621
4622
4623// ----------------------------------------------------------------------------
4624// Non-local control flow support.
4625
4626
4627void FullCodeGenerator::EnterFinallyBlock() {
4628  ASSERT(!result_register().is(rdx));
4629  ASSERT(!result_register().is(rcx));
4630  // Cook return address on top of stack (smi encoded Code* delta)
4631  __ PopReturnAddressTo(rdx);
4632  __ Move(rcx, masm_->CodeObject());
4633  __ subp(rdx, rcx);
4634  __ Integer32ToSmi(rdx, rdx);
4635  __ Push(rdx);
4636
4637  // Store result register while executing finally block.
4638  __ Push(result_register());
4639
4640  // Store pending message while executing finally block.
4641  ExternalReference pending_message_obj =
4642      ExternalReference::address_of_pending_message_obj(isolate());
4643  __ Load(rdx, pending_message_obj);
4644  __ Push(rdx);
4645
4646  ExternalReference has_pending_message =
4647      ExternalReference::address_of_has_pending_message(isolate());
4648  __ Load(rdx, has_pending_message);
4649  __ Integer32ToSmi(rdx, rdx);
4650  __ Push(rdx);
4651
4652  ExternalReference pending_message_script =
4653      ExternalReference::address_of_pending_message_script(isolate());
4654  __ Load(rdx, pending_message_script);
4655  __ Push(rdx);
4656}
4657
4658
4659void FullCodeGenerator::ExitFinallyBlock() {
4660  ASSERT(!result_register().is(rdx));
4661  ASSERT(!result_register().is(rcx));
4662  // Restore pending message from stack.
4663  __ Pop(rdx);
4664  ExternalReference pending_message_script =
4665      ExternalReference::address_of_pending_message_script(isolate());
4666  __ Store(pending_message_script, rdx);
4667
4668  __ Pop(rdx);
4669  __ SmiToInteger32(rdx, rdx);
4670  ExternalReference has_pending_message =
4671      ExternalReference::address_of_has_pending_message(isolate());
4672  __ Store(has_pending_message, rdx);
4673
4674  __ Pop(rdx);
4675  ExternalReference pending_message_obj =
4676      ExternalReference::address_of_pending_message_obj(isolate());
4677  __ Store(pending_message_obj, rdx);
4678
4679  // Restore result register from stack.
4680  __ Pop(result_register());
4681
4682  // Uncook return address.
4683  __ Pop(rdx);
4684  __ SmiToInteger32(rdx, rdx);
4685  __ Move(rcx, masm_->CodeObject());
4686  __ addp(rdx, rcx);
4687  __ jmp(rdx);
4688}
4689
4690
4691#undef __
4692
4693#define __ ACCESS_MASM(masm())
4694
4695FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4696    int* stack_depth,
4697    int* context_length) {
4698  // The macros used here must preserve the result register.
4699
4700  // Because the handler block contains the context of the finally
4701  // code, we can restore it directly from there for the finally code
4702  // rather than iteratively unwinding contexts via their previous
4703  // links.
4704  __ Drop(*stack_depth);  // Down to the handler block.
4705  if (*context_length > 0) {
4706    // Restore the context to its dedicated register and the stack.
4707    __ movp(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
4708    __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
4709  }
4710  __ PopTryHandler();
4711  __ call(finally_entry_);
4712
4713  *stack_depth = 0;
4714  *context_length = 0;
4715  return previous_;
4716}
4717
4718
4719#undef __
4720
4721
4722static const byte kJnsInstruction = 0x79;
4723static const byte kNopByteOne = 0x66;
4724static const byte kNopByteTwo = 0x90;
4725#ifdef DEBUG
4726static const byte kCallInstruction = 0xe8;
4727#endif
4728
4729
4730void BackEdgeTable::PatchAt(Code* unoptimized_code,
4731                            Address pc,
4732                            BackEdgeState target_state,
4733                            Code* replacement_code) {
4734  Address call_target_address = pc - kIntSize;
4735  Address jns_instr_address = call_target_address - 3;
4736  Address jns_offset_address = call_target_address - 2;
4737
4738  switch (target_state) {
4739    case INTERRUPT:
4740      //     sub <profiling_counter>, <delta>  ;; Not changed
4741      //     jns ok
4742      //     call <interrupt stub>
4743      //   ok:
4744      *jns_instr_address = kJnsInstruction;
4745      *jns_offset_address = kJnsOffset;
4746      break;
4747    case ON_STACK_REPLACEMENT:
4748    case OSR_AFTER_STACK_CHECK:
4749      //     sub <profiling_counter>, <delta>  ;; Not changed
4750      //     nop
4751      //     nop
4752      //     call <on-stack replacment>
4753      //   ok:
4754      *jns_instr_address = kNopByteOne;
4755      *jns_offset_address = kNopByteTwo;
4756      break;
4757  }
4758
4759  Assembler::set_target_address_at(call_target_address,
4760                                   unoptimized_code,
4761                                   replacement_code->entry());
4762  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4763      unoptimized_code, call_target_address, replacement_code);
4764}
4765
4766
4767BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4768    Isolate* isolate,
4769    Code* unoptimized_code,
4770    Address pc) {
4771  Address call_target_address = pc - kIntSize;
4772  Address jns_instr_address = call_target_address - 3;
4773  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4774
4775  if (*jns_instr_address == kJnsInstruction) {
4776    ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4777    ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4778              Assembler::target_address_at(call_target_address,
4779                                           unoptimized_code));
4780    return INTERRUPT;
4781  }
4782
4783  ASSERT_EQ(kNopByteOne, *jns_instr_address);
4784  ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4785
4786  if (Assembler::target_address_at(call_target_address,
4787                                   unoptimized_code) ==
4788      isolate->builtins()->OnStackReplacement()->entry()) {
4789    return ON_STACK_REPLACEMENT;
4790  }
4791
4792  ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4793            Assembler::target_address_at(call_target_address,
4794                                         unoptimized_code));
4795  return OSR_AFTER_STACK_CHECK;
4796}
4797
4798
4799} }  // namespace v8::internal
4800
4801#endif  // V8_TARGET_ARCH_X64
4802