1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_X64
8
9#include "src/code-factory.h"
10#include "src/code-stubs.h"
11#include "src/codegen.h"
12#include "src/compiler.h"
13#include "src/debug.h"
14#include "src/full-codegen.h"
15#include "src/ic/ic.h"
16#include "src/isolate-inl.h"
17#include "src/parser.h"
18#include "src/scopes.h"
19
20namespace v8 {
21namespace internal {
22
23#define __ ACCESS_MASM(masm_)
24
25
26class JumpPatchSite BASE_EMBEDDED {
27 public:
28  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
29#ifdef DEBUG
30    info_emitted_ = false;
31#endif
32  }
33
34  ~JumpPatchSite() {
35    DCHECK(patch_site_.is_bound() == info_emitted_);
36  }
37
38  void EmitJumpIfNotSmi(Register reg,
39                        Label* target,
40                        Label::Distance near_jump = Label::kFar) {
41    __ testb(reg, Immediate(kSmiTagMask));
42    EmitJump(not_carry, target, near_jump);   // Always taken before patched.
43  }
44
45  void EmitJumpIfSmi(Register reg,
46                     Label* target,
47                     Label::Distance near_jump = Label::kFar) {
48    __ testb(reg, Immediate(kSmiTagMask));
49    EmitJump(carry, target, near_jump);  // Never taken before patched.
50  }
51
52  void EmitPatchInfo() {
53    if (patch_site_.is_bound()) {
54      int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
55      DCHECK(is_uint8(delta_to_patch_site));
56      __ testl(rax, Immediate(delta_to_patch_site));
57#ifdef DEBUG
58      info_emitted_ = true;
59#endif
60    } else {
61      __ nop();  // Signals no inlined code.
62    }
63  }
64
65 private:
66  // jc will be patched with jz, jnc will become jnz.
67  void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
68    DCHECK(!patch_site_.is_bound() && !info_emitted_);
69    DCHECK(cc == carry || cc == not_carry);
70    __ bind(&patch_site_);
71    __ j(cc, target, near_jump);
72  }
73
74  MacroAssembler* masm_;
75  Label patch_site_;
76#ifdef DEBUG
77  bool info_emitted_;
78#endif
79};
80
81
82// Generate code for a JS function.  On entry to the function the receiver
83// and arguments have been pushed on the stack left to right, with the
84// return address on top of them.  The actual argument count matches the
85// formal parameter count expected by the function.
86//
87// The live registers are:
88//   o rdi: the JS function object being called (i.e. ourselves)
89//   o rsi: our context
90//   o rbp: our caller's frame pointer
91//   o rsp: stack pointer (pointing to return address)
92//
93// The function builds a JS frame.  Please see JavaScriptFrameConstants in
94// frames-x64.h for its layout.
95void FullCodeGenerator::Generate() {
96  CompilationInfo* info = info_;
97  handler_table_ =
98      isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
99
100  profiling_counter_ = isolate()->factory()->NewCell(
101      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
102  SetFunctionPosition(function());
103  Comment cmnt(masm_, "[ function compiled by full code generator");
104
105  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
106
107#ifdef DEBUG
108  if (strlen(FLAG_stop_at) > 0 &&
109      info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110    __ int3();
111  }
112#endif
113
114  // Sloppy mode functions and builtins need to replace the receiver with the
115  // global proxy when called as functions (without an explicit receiver
116  // object).
117  if (info->strict_mode() == SLOPPY && !info->is_native()) {
118    Label ok;
119    // +1 for return address.
120    StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
121    __ movp(rcx, args.GetReceiverOperand());
122
123    __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
124    __ j(not_equal, &ok, Label::kNear);
125
126    __ movp(rcx, GlobalObjectOperand());
127    __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalProxyOffset));
128
129    __ movp(args.GetReceiverOperand(), rcx);
130
131    __ bind(&ok);
132  }
133
134  // Open a frame scope to indicate that there is a frame on the stack.  The
135  // MANUAL indicates that the scope shouldn't actually generate code to set up
136  // the frame (that is done below).
137  FrameScope frame_scope(masm_, StackFrame::MANUAL);
138
139  info->set_prologue_offset(masm_->pc_offset());
140  __ Prologue(info->IsCodePreAgingActive());
141  info->AddNoFrameRange(0, masm_->pc_offset());
142
143  { Comment cmnt(masm_, "[ Allocate locals");
144    int locals_count = info->scope()->num_stack_slots();
145    // Generators allocate locals, if any, in context slots.
146    DCHECK(!info->function()->is_generator() || locals_count == 0);
147    if (locals_count == 1) {
148      __ PushRoot(Heap::kUndefinedValueRootIndex);
149    } else if (locals_count > 1) {
150      if (locals_count >= 128) {
151        Label ok;
152        __ movp(rcx, rsp);
153        __ subp(rcx, Immediate(locals_count * kPointerSize));
154        __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
155        __ j(above_equal, &ok, Label::kNear);
156        __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
157        __ bind(&ok);
158      }
159      __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
160      const int kMaxPushes = 32;
161      if (locals_count >= kMaxPushes) {
162        int loop_iterations = locals_count / kMaxPushes;
163        __ movp(rcx, Immediate(loop_iterations));
164        Label loop_header;
165        __ bind(&loop_header);
166        // Do pushes.
167        for (int i = 0; i < kMaxPushes; i++) {
168          __ Push(rdx);
169        }
170        // Continue loop if not done.
171        __ decp(rcx);
172        __ j(not_zero, &loop_header, Label::kNear);
173      }
174      int remaining = locals_count % kMaxPushes;
175      // Emit the remaining pushes.
176      for (int i  = 0; i < remaining; i++) {
177        __ Push(rdx);
178      }
179    }
180  }
181
182  bool function_in_register = true;
183
184  // Possibly allocate a local context.
185  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186  if (heap_slots > 0) {
187    Comment cmnt(masm_, "[ Allocate context");
188    bool need_write_barrier = true;
189    // Argument to NewContext is the function, which is still in rdi.
190    if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
191      __ Push(rdi);
192      __ Push(info->scope()->GetScopeInfo());
193      __ CallRuntime(Runtime::kNewGlobalContext, 2);
194    } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
195      FastNewContextStub stub(isolate(), heap_slots);
196      __ CallStub(&stub);
197      // Result of FastNewContextStub is always in new space.
198      need_write_barrier = false;
199    } else {
200      __ Push(rdi);
201      __ CallRuntime(Runtime::kNewFunctionContext, 1);
202    }
203    function_in_register = false;
204    // Context is returned in rax.  It replaces the context passed to us.
205    // It's saved in the stack and kept live in rsi.
206    __ movp(rsi, rax);
207    __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
208
209    // Copy any necessary parameters into the context.
210    int num_parameters = info->scope()->num_parameters();
211    for (int i = 0; i < num_parameters; i++) {
212      Variable* var = scope()->parameter(i);
213      if (var->IsContextSlot()) {
214        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
215            (num_parameters - 1 - i) * kPointerSize;
216        // Load parameter from stack.
217        __ movp(rax, Operand(rbp, parameter_offset));
218        // Store it in the context.
219        int context_offset = Context::SlotOffset(var->index());
220        __ movp(Operand(rsi, context_offset), rax);
221        // Update the write barrier.  This clobbers rax and rbx.
222        if (need_write_barrier) {
223          __ RecordWriteContextSlot(
224              rsi, context_offset, rax, rbx, kDontSaveFPRegs);
225        } else if (FLAG_debug_code) {
226          Label done;
227          __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
228          __ Abort(kExpectedNewSpaceObject);
229          __ bind(&done);
230        }
231      }
232    }
233  }
234
235  // Possibly allocate an arguments object.
236  Variable* arguments = scope()->arguments();
237  if (arguments != NULL) {
238    // Arguments object must be allocated after the context object, in
239    // case the "arguments" or ".arguments" variables are in the context.
240    Comment cmnt(masm_, "[ Allocate arguments object");
241    if (function_in_register) {
242      __ Push(rdi);
243    } else {
244      __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
245    }
246    // The receiver is just before the parameters on the caller's stack.
247    int num_parameters = info->scope()->num_parameters();
248    int offset = num_parameters * kPointerSize;
249    __ leap(rdx,
250           Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
251    __ Push(rdx);
252    __ Push(Smi::FromInt(num_parameters));
253    // Arguments to ArgumentsAccessStub:
254    //   function, receiver address, parameter count.
255    // The stub will rewrite receiver and parameter count if the previous
256    // stack frame was an arguments adapter frame.
257    ArgumentsAccessStub::Type type;
258    if (strict_mode() == STRICT) {
259      type = ArgumentsAccessStub::NEW_STRICT;
260    } else if (function()->has_duplicate_parameters()) {
261      type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
262    } else {
263      type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
264    }
265    ArgumentsAccessStub stub(isolate(), type);
266    __ CallStub(&stub);
267
268    SetVar(arguments, rax, rbx, rdx);
269  }
270
271  if (FLAG_trace) {
272    __ CallRuntime(Runtime::kTraceEnter, 0);
273  }
274
275  // Visit the declarations and body unless there is an illegal
276  // redeclaration.
277  if (scope()->HasIllegalRedeclaration()) {
278    Comment cmnt(masm_, "[ Declarations");
279    scope()->VisitIllegalRedeclaration(this);
280
281  } else {
282    PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
283    { Comment cmnt(masm_, "[ Declarations");
284      // For named function expressions, declare the function name as a
285      // constant.
286      if (scope()->is_function_scope() && scope()->function() != NULL) {
287        VariableDeclaration* function = scope()->function();
288        DCHECK(function->proxy()->var()->mode() == CONST ||
289               function->proxy()->var()->mode() == CONST_LEGACY);
290        DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
291        VisitVariableDeclaration(function);
292      }
293      VisitDeclarations(scope()->declarations());
294    }
295
296    { Comment cmnt(masm_, "[ Stack check");
297      PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
298       Label ok;
299       __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
300       __ j(above_equal, &ok, Label::kNear);
301       __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
302       __ bind(&ok);
303    }
304
305    { Comment cmnt(masm_, "[ Body");
306      DCHECK(loop_depth() == 0);
307      VisitStatements(function()->body());
308      DCHECK(loop_depth() == 0);
309    }
310  }
311
312  // Always emit a 'return undefined' in case control fell off the end of
313  // the body.
314  { Comment cmnt(masm_, "[ return <undefined>;");
315    __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
316    EmitReturnSequence();
317  }
318}
319
320
321void FullCodeGenerator::ClearAccumulator() {
322  __ Set(rax, 0);
323}
324
325
326void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
327  __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
328  __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
329                    Smi::FromInt(-delta));
330}
331
332
333void FullCodeGenerator::EmitProfilingCounterReset() {
334  int reset_value = FLAG_interrupt_budget;
335  __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
336  __ Move(kScratchRegister, Smi::FromInt(reset_value));
337  __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
338}
339
340
341static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
342
343
344void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
345                                                Label* back_edge_target) {
346  Comment cmnt(masm_, "[ Back edge bookkeeping");
347  Label ok;
348
349  DCHECK(back_edge_target->is_bound());
350  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
351  int weight = Min(kMaxBackEdgeWeight,
352                   Max(1, distance / kCodeSizeMultiplier));
353  EmitProfilingCounterDecrement(weight);
354
355  __ j(positive, &ok, Label::kNear);
356  {
357    PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
358    DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
359    __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
360
361    // Record a mapping of this PC offset to the OSR id.  This is used to find
362    // the AST id from the unoptimized code in order to use it as a key into
363    // the deoptimization input data found in the optimized code.
364    RecordBackEdge(stmt->OsrEntryId());
365
366    EmitProfilingCounterReset();
367  }
368  __ bind(&ok);
369
370  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
371  // Record a mapping of the OSR id to this PC.  This is used if the OSR
372  // entry becomes the target of a bailout.  We don't expect it to be, but
373  // we want it to work if it is.
374  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
375}
376
377
378void FullCodeGenerator::EmitReturnSequence() {
379  Comment cmnt(masm_, "[ Return sequence");
380  if (return_label_.is_bound()) {
381    __ jmp(&return_label_);
382  } else {
383    __ bind(&return_label_);
384    if (FLAG_trace) {
385      __ Push(rax);
386      __ CallRuntime(Runtime::kTraceExit, 1);
387    }
388    // Pretend that the exit is a backwards jump to the entry.
389    int weight = 1;
390    if (info_->ShouldSelfOptimize()) {
391      weight = FLAG_interrupt_budget / FLAG_self_opt_count;
392    } else {
393      int distance = masm_->pc_offset();
394      weight = Min(kMaxBackEdgeWeight,
395                   Max(1, distance / kCodeSizeMultiplier));
396    }
397    EmitProfilingCounterDecrement(weight);
398    Label ok;
399    __ j(positive, &ok, Label::kNear);
400    __ Push(rax);
401    __ call(isolate()->builtins()->InterruptCheck(),
402            RelocInfo::CODE_TARGET);
403    __ Pop(rax);
404    EmitProfilingCounterReset();
405    __ bind(&ok);
406#ifdef DEBUG
407    // Add a label for checking the size of the code used for returning.
408    Label check_exit_codesize;
409    masm_->bind(&check_exit_codesize);
410#endif
411    CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
412    __ RecordJSReturn();
413    // Do not use the leave instruction here because it is too short to
414    // patch with the code required by the debugger.
415    __ movp(rsp, rbp);
416    __ popq(rbp);
417    int no_frame_start = masm_->pc_offset();
418
419    int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
420    __ Ret(arguments_bytes, rcx);
421
422    // Add padding that will be overwritten by a debugger breakpoint.  We
423    // have just generated at least 7 bytes: "movp rsp, rbp; pop rbp; ret k"
424    // (3 + 1 + 3) for x64 and at least 6 (2 + 1 + 3) bytes for x32.
425    const int kPadding = Assembler::kJSReturnSequenceLength -
426                         kPointerSize == kInt64Size ? 7 : 6;
427    for (int i = 0; i < kPadding; ++i) {
428      masm_->int3();
429    }
430    // Check that the size of the code used for returning is large enough
431    // for the debugger's requirements.
432    DCHECK(Assembler::kJSReturnSequenceLength <=
433           masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
434
435    info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
436  }
437}
438
439
440void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
441  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
442}
443
444
445void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
446  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
447  codegen()->GetVar(result_register(), var);
448}
449
450
451void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
452  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
453  MemOperand operand = codegen()->VarOperand(var, result_register());
454  __ Push(operand);
455}
456
457
458void FullCodeGenerator::TestContext::Plug(Variable* var) const {
459  codegen()->GetVar(result_register(), var);
460  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
461  codegen()->DoTest(this);
462}
463
464
465void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
466}
467
468
469void FullCodeGenerator::AccumulatorValueContext::Plug(
470    Heap::RootListIndex index) const {
471  __ LoadRoot(result_register(), index);
472}
473
474
475void FullCodeGenerator::StackValueContext::Plug(
476    Heap::RootListIndex index) const {
477  __ PushRoot(index);
478}
479
480
481void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
482  codegen()->PrepareForBailoutBeforeSplit(condition(),
483                                          true,
484                                          true_label_,
485                                          false_label_);
486  if (index == Heap::kUndefinedValueRootIndex ||
487      index == Heap::kNullValueRootIndex ||
488      index == Heap::kFalseValueRootIndex) {
489    if (false_label_ != fall_through_) __ jmp(false_label_);
490  } else if (index == Heap::kTrueValueRootIndex) {
491    if (true_label_ != fall_through_) __ jmp(true_label_);
492  } else {
493    __ LoadRoot(result_register(), index);
494    codegen()->DoTest(this);
495  }
496}
497
498
499void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
500}
501
502
503void FullCodeGenerator::AccumulatorValueContext::Plug(
504    Handle<Object> lit) const {
505  if (lit->IsSmi()) {
506    __ SafeMove(result_register(), Smi::cast(*lit));
507  } else {
508    __ Move(result_register(), lit);
509  }
510}
511
512
513void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
514  if (lit->IsSmi()) {
515    __ SafePush(Smi::cast(*lit));
516  } else {
517    __ Push(lit);
518  }
519}
520
521
522void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
523  codegen()->PrepareForBailoutBeforeSplit(condition(),
524                                          true,
525                                          true_label_,
526                                          false_label_);
527  DCHECK(!lit->IsUndetectableObject());  // There are no undetectable literals.
528  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
529    if (false_label_ != fall_through_) __ jmp(false_label_);
530  } else if (lit->IsTrue() || lit->IsJSObject()) {
531    if (true_label_ != fall_through_) __ jmp(true_label_);
532  } else if (lit->IsString()) {
533    if (String::cast(*lit)->length() == 0) {
534      if (false_label_ != fall_through_) __ jmp(false_label_);
535    } else {
536      if (true_label_ != fall_through_) __ jmp(true_label_);
537    }
538  } else if (lit->IsSmi()) {
539    if (Smi::cast(*lit)->value() == 0) {
540      if (false_label_ != fall_through_) __ jmp(false_label_);
541    } else {
542      if (true_label_ != fall_through_) __ jmp(true_label_);
543    }
544  } else {
545    // For simplicity we always test the accumulator register.
546    __ Move(result_register(), lit);
547    codegen()->DoTest(this);
548  }
549}
550
551
552void FullCodeGenerator::EffectContext::DropAndPlug(int count,
553                                                   Register reg) const {
554  DCHECK(count > 0);
555  __ Drop(count);
556}
557
558
559void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
560    int count,
561    Register reg) const {
562  DCHECK(count > 0);
563  __ Drop(count);
564  __ Move(result_register(), reg);
565}
566
567
568void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
569                                                       Register reg) const {
570  DCHECK(count > 0);
571  if (count > 1) __ Drop(count - 1);
572  __ movp(Operand(rsp, 0), reg);
573}
574
575
576void FullCodeGenerator::TestContext::DropAndPlug(int count,
577                                                 Register reg) const {
578  DCHECK(count > 0);
579  // For simplicity we always test the accumulator register.
580  __ Drop(count);
581  __ Move(result_register(), reg);
582  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
583  codegen()->DoTest(this);
584}
585
586
587void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
588                                            Label* materialize_false) const {
589  DCHECK(materialize_true == materialize_false);
590  __ bind(materialize_true);
591}
592
593
594void FullCodeGenerator::AccumulatorValueContext::Plug(
595    Label* materialize_true,
596    Label* materialize_false) const {
597  Label done;
598  __ bind(materialize_true);
599  __ Move(result_register(), isolate()->factory()->true_value());
600  __ jmp(&done, Label::kNear);
601  __ bind(materialize_false);
602  __ Move(result_register(), isolate()->factory()->false_value());
603  __ bind(&done);
604}
605
606
607void FullCodeGenerator::StackValueContext::Plug(
608    Label* materialize_true,
609    Label* materialize_false) const {
610  Label done;
611  __ bind(materialize_true);
612  __ Push(isolate()->factory()->true_value());
613  __ jmp(&done, Label::kNear);
614  __ bind(materialize_false);
615  __ Push(isolate()->factory()->false_value());
616  __ bind(&done);
617}
618
619
620void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
621                                          Label* materialize_false) const {
622  DCHECK(materialize_true == true_label_);
623  DCHECK(materialize_false == false_label_);
624}
625
626
627void FullCodeGenerator::EffectContext::Plug(bool flag) const {
628}
629
630
631void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
632  Heap::RootListIndex value_root_index =
633      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
634  __ LoadRoot(result_register(), value_root_index);
635}
636
637
638void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
639  Heap::RootListIndex value_root_index =
640      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
641  __ PushRoot(value_root_index);
642}
643
644
645void FullCodeGenerator::TestContext::Plug(bool flag) const {
646  codegen()->PrepareForBailoutBeforeSplit(condition(),
647                                          true,
648                                          true_label_,
649                                          false_label_);
650  if (flag) {
651    if (true_label_ != fall_through_) __ jmp(true_label_);
652  } else {
653    if (false_label_ != fall_through_) __ jmp(false_label_);
654  }
655}
656
657
658void FullCodeGenerator::DoTest(Expression* condition,
659                               Label* if_true,
660                               Label* if_false,
661                               Label* fall_through) {
662  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
663  CallIC(ic, condition->test_id());
664  __ testp(result_register(), result_register());
665  // The stub returns nonzero for true.
666  Split(not_zero, if_true, if_false, fall_through);
667}
668
669
670void FullCodeGenerator::Split(Condition cc,
671                              Label* if_true,
672                              Label* if_false,
673                              Label* fall_through) {
674  if (if_false == fall_through) {
675    __ j(cc, if_true);
676  } else if (if_true == fall_through) {
677    __ j(NegateCondition(cc), if_false);
678  } else {
679    __ j(cc, if_true);
680    __ jmp(if_false);
681  }
682}
683
684
685MemOperand FullCodeGenerator::StackOperand(Variable* var) {
686  DCHECK(var->IsStackAllocated());
687  // Offset is negative because higher indexes are at lower addresses.
688  int offset = -var->index() * kPointerSize;
689  // Adjust by a (parameter or local) base offset.
690  if (var->IsParameter()) {
691    offset += kFPOnStackSize + kPCOnStackSize +
692              (info_->scope()->num_parameters() - 1) * kPointerSize;
693  } else {
694    offset += JavaScriptFrameConstants::kLocal0Offset;
695  }
696  return Operand(rbp, offset);
697}
698
699
700MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
701  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702  if (var->IsContextSlot()) {
703    int context_chain_length = scope()->ContextChainLength(var->scope());
704    __ LoadContext(scratch, context_chain_length);
705    return ContextOperand(scratch, var->index());
706  } else {
707    return StackOperand(var);
708  }
709}
710
711
712void FullCodeGenerator::GetVar(Register dest, Variable* var) {
713  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
714  MemOperand location = VarOperand(var, dest);
715  __ movp(dest, location);
716}
717
718
719void FullCodeGenerator::SetVar(Variable* var,
720                               Register src,
721                               Register scratch0,
722                               Register scratch1) {
723  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
724  DCHECK(!scratch0.is(src));
725  DCHECK(!scratch0.is(scratch1));
726  DCHECK(!scratch1.is(src));
727  MemOperand location = VarOperand(var, scratch0);
728  __ movp(location, src);
729
730  // Emit the write barrier code if the location is in the heap.
731  if (var->IsContextSlot()) {
732    int offset = Context::SlotOffset(var->index());
733    __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
734  }
735}
736
737
738void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
739                                                     bool should_normalize,
740                                                     Label* if_true,
741                                                     Label* if_false) {
742  // Only prepare for bailouts before splits if we're in a test
743  // context. Otherwise, we let the Visit function deal with the
744  // preparation to avoid preparing with the same AST id twice.
745  if (!context()->IsTest() || !info_->IsOptimizable()) return;
746
747  Label skip;
748  if (should_normalize) __ jmp(&skip, Label::kNear);
749  PrepareForBailout(expr, TOS_REG);
750  if (should_normalize) {
751    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
752    Split(equal, if_true, if_false, NULL);
753    __ bind(&skip);
754  }
755}
756
757
758void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
759  // The variable in the declaration always resides in the current context.
760  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
761  if (generate_debug_code_) {
762    // Check that we're not inside a with or catch context.
763    __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
764    __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
765    __ Check(not_equal, kDeclarationInWithContext);
766    __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
767    __ Check(not_equal, kDeclarationInCatchContext);
768  }
769}
770
771
772void FullCodeGenerator::VisitVariableDeclaration(
773    VariableDeclaration* declaration) {
774  // If it was not possible to allocate the variable at compile time, we
775  // need to "declare" it at runtime to make sure it actually exists in the
776  // local context.
777  VariableProxy* proxy = declaration->proxy();
778  VariableMode mode = declaration->mode();
779  Variable* variable = proxy->var();
780  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
781  switch (variable->location()) {
782    case Variable::UNALLOCATED:
783      globals_->Add(variable->name(), zone());
784      globals_->Add(variable->binding_needs_init()
785                        ? isolate()->factory()->the_hole_value()
786                    : isolate()->factory()->undefined_value(),
787                    zone());
788      break;
789
790    case Variable::PARAMETER:
791    case Variable::LOCAL:
792      if (hole_init) {
793        Comment cmnt(masm_, "[ VariableDeclaration");
794        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
795        __ movp(StackOperand(variable), kScratchRegister);
796      }
797      break;
798
799    case Variable::CONTEXT:
800      if (hole_init) {
801        Comment cmnt(masm_, "[ VariableDeclaration");
802        EmitDebugCheckDeclarationContext(variable);
803        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
804        __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
805        // No write barrier since the hole value is in old space.
806        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
807      }
808      break;
809
810    case Variable::LOOKUP: {
811      Comment cmnt(masm_, "[ VariableDeclaration");
812      __ Push(rsi);
813      __ Push(variable->name());
814      // Declaration nodes are always introduced in one of four modes.
815      DCHECK(IsDeclaredVariableMode(mode));
816      PropertyAttributes attr =
817          IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
818      __ Push(Smi::FromInt(attr));
819      // Push initial value, if any.
820      // Note: For variables we must not push an initial value (such as
821      // 'undefined') because we may have a (legal) redeclaration and we
822      // must not destroy the current value.
823      if (hole_init) {
824        __ PushRoot(Heap::kTheHoleValueRootIndex);
825      } else {
826        __ Push(Smi::FromInt(0));  // Indicates no initial value.
827      }
828      __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
829      break;
830    }
831  }
832}
833
834
835void FullCodeGenerator::VisitFunctionDeclaration(
836    FunctionDeclaration* declaration) {
837  VariableProxy* proxy = declaration->proxy();
838  Variable* variable = proxy->var();
839  switch (variable->location()) {
840    case Variable::UNALLOCATED: {
841      globals_->Add(variable->name(), zone());
842      Handle<SharedFunctionInfo> function =
843          Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
844      // Check for stack-overflow exception.
845      if (function.is_null()) return SetStackOverflow();
846      globals_->Add(function, zone());
847      break;
848    }
849
850    case Variable::PARAMETER:
851    case Variable::LOCAL: {
852      Comment cmnt(masm_, "[ FunctionDeclaration");
853      VisitForAccumulatorValue(declaration->fun());
854      __ movp(StackOperand(variable), result_register());
855      break;
856    }
857
858    case Variable::CONTEXT: {
859      Comment cmnt(masm_, "[ FunctionDeclaration");
860      EmitDebugCheckDeclarationContext(variable);
861      VisitForAccumulatorValue(declaration->fun());
862      __ movp(ContextOperand(rsi, variable->index()), result_register());
863      int offset = Context::SlotOffset(variable->index());
864      // We know that we have written a function, which is not a smi.
865      __ RecordWriteContextSlot(rsi,
866                                offset,
867                                result_register(),
868                                rcx,
869                                kDontSaveFPRegs,
870                                EMIT_REMEMBERED_SET,
871                                OMIT_SMI_CHECK);
872      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
873      break;
874    }
875
876    case Variable::LOOKUP: {
877      Comment cmnt(masm_, "[ FunctionDeclaration");
878      __ Push(rsi);
879      __ Push(variable->name());
880      __ Push(Smi::FromInt(NONE));
881      VisitForStackValue(declaration->fun());
882      __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
883      break;
884    }
885  }
886}
887
888
889void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
890  Variable* variable = declaration->proxy()->var();
891  DCHECK(variable->location() == Variable::CONTEXT);
892  DCHECK(variable->interface()->IsFrozen());
893
894  Comment cmnt(masm_, "[ ModuleDeclaration");
895  EmitDebugCheckDeclarationContext(variable);
896
897  // Load instance object.
898  __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope()));
899  __ movp(rax, ContextOperand(rax, variable->interface()->Index()));
900  __ movp(rax, ContextOperand(rax, Context::EXTENSION_INDEX));
901
902  // Assign it.
903  __ movp(ContextOperand(rsi, variable->index()), rax);
904  // We know that we have written a module, which is not a smi.
905  __ RecordWriteContextSlot(rsi,
906                            Context::SlotOffset(variable->index()),
907                            rax,
908                            rcx,
909                            kDontSaveFPRegs,
910                            EMIT_REMEMBERED_SET,
911                            OMIT_SMI_CHECK);
912  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
913
914  // Traverse into body.
915  Visit(declaration->module());
916}
917
918
919void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
920  VariableProxy* proxy = declaration->proxy();
921  Variable* variable = proxy->var();
922  switch (variable->location()) {
923    case Variable::UNALLOCATED:
924      // TODO(rossberg)
925      break;
926
927    case Variable::CONTEXT: {
928      Comment cmnt(masm_, "[ ImportDeclaration");
929      EmitDebugCheckDeclarationContext(variable);
930      // TODO(rossberg)
931      break;
932    }
933
934    case Variable::PARAMETER:
935    case Variable::LOCAL:
936    case Variable::LOOKUP:
937      UNREACHABLE();
938  }
939}
940
941
942void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
943  // TODO(rossberg)
944}
945
946
947void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
948  // Call the runtime to declare the globals.
949  __ Push(rsi);  // The context is the first argument.
950  __ Push(pairs);
951  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
952  __ CallRuntime(Runtime::kDeclareGlobals, 3);
953  // Return value is ignored.
954}
955
956
957void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
958  // Call the runtime to declare the modules.
959  __ Push(descriptions);
960  __ CallRuntime(Runtime::kDeclareModules, 1);
961  // Return value is ignored.
962}
963
964
965void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
966  Comment cmnt(masm_, "[ SwitchStatement");
967  Breakable nested_statement(this, stmt);
968  SetStatementPosition(stmt);
969
970  // Keep the switch value on the stack until a case matches.
971  VisitForStackValue(stmt->tag());
972  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
973
974  ZoneList<CaseClause*>* clauses = stmt->cases();
975  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
976
977  Label next_test;  // Recycled for each test.
978  // Compile all the tests with branches to their bodies.
979  for (int i = 0; i < clauses->length(); i++) {
980    CaseClause* clause = clauses->at(i);
981    clause->body_target()->Unuse();
982
983    // The default is not a test, but remember it as final fall through.
984    if (clause->is_default()) {
985      default_clause = clause;
986      continue;
987    }
988
989    Comment cmnt(masm_, "[ Case comparison");
990    __ bind(&next_test);
991    next_test.Unuse();
992
993    // Compile the label expression.
994    VisitForAccumulatorValue(clause->label());
995
996    // Perform the comparison as if via '==='.
997    __ movp(rdx, Operand(rsp, 0));  // Switch value.
998    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
999    JumpPatchSite patch_site(masm_);
1000    if (inline_smi_code) {
1001      Label slow_case;
1002      __ movp(rcx, rdx);
1003      __ orp(rcx, rax);
1004      patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
1005
1006      __ cmpp(rdx, rax);
1007      __ j(not_equal, &next_test);
1008      __ Drop(1);  // Switch value is no longer needed.
1009      __ jmp(clause->body_target());
1010      __ bind(&slow_case);
1011    }
1012
1013    // Record position before stub call for type feedback.
1014    SetSourcePosition(clause->position());
1015    Handle<Code> ic =
1016        CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1017    CallIC(ic, clause->CompareId());
1018    patch_site.EmitPatchInfo();
1019
1020    Label skip;
1021    __ jmp(&skip, Label::kNear);
1022    PrepareForBailout(clause, TOS_REG);
1023    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
1024    __ j(not_equal, &next_test);
1025    __ Drop(1);
1026    __ jmp(clause->body_target());
1027    __ bind(&skip);
1028
1029    __ testp(rax, rax);
1030    __ j(not_equal, &next_test);
1031    __ Drop(1);  // Switch value is no longer needed.
1032    __ jmp(clause->body_target());
1033  }
1034
1035  // Discard the test value and jump to the default if present, otherwise to
1036  // the end of the statement.
1037  __ bind(&next_test);
1038  __ Drop(1);  // Switch value is no longer needed.
1039  if (default_clause == NULL) {
1040    __ jmp(nested_statement.break_label());
1041  } else {
1042    __ jmp(default_clause->body_target());
1043  }
1044
1045  // Compile all the case bodies.
1046  for (int i = 0; i < clauses->length(); i++) {
1047    Comment cmnt(masm_, "[ Case body");
1048    CaseClause* clause = clauses->at(i);
1049    __ bind(clause->body_target());
1050    PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1051    VisitStatements(clause->statements());
1052  }
1053
1054  __ bind(nested_statement.break_label());
1055  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1056}
1057
1058
1059void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1060  Comment cmnt(masm_, "[ ForInStatement");
1061  int slot = stmt->ForInFeedbackSlot();
1062  SetStatementPosition(stmt);
1063
1064  Label loop, exit;
1065  ForIn loop_statement(this, stmt);
1066  increment_loop_depth();
1067
1068  // Get the object to enumerate over. If the object is null or undefined, skip
1069  // over the loop.  See ECMA-262 version 5, section 12.6.4.
1070  VisitForAccumulatorValue(stmt->enumerable());
1071  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1072  __ j(equal, &exit);
1073  Register null_value = rdi;
1074  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1075  __ cmpp(rax, null_value);
1076  __ j(equal, &exit);
1077
1078  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1079
1080  // Convert the object to a JS object.
1081  Label convert, done_convert;
1082  __ JumpIfSmi(rax, &convert);
1083  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1084  __ j(above_equal, &done_convert);
1085  __ bind(&convert);
1086  __ Push(rax);
1087  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1088  __ bind(&done_convert);
1089  __ Push(rax);
1090
1091  // Check for proxies.
1092  Label call_runtime;
1093  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1094  __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1095  __ j(below_equal, &call_runtime);
1096
1097  // Check cache validity in generated code. This is a fast case for
1098  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1099  // guarantee cache validity, call the runtime system to check cache
1100  // validity or get the property names in a fixed array.
1101  __ CheckEnumCache(null_value, &call_runtime);
1102
1103  // The enum cache is valid.  Load the map of the object being
1104  // iterated over and use the cache for the iteration.
1105  Label use_cache;
1106  __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
1107  __ jmp(&use_cache, Label::kNear);
1108
1109  // Get the set of properties to enumerate.
1110  __ bind(&call_runtime);
1111  __ Push(rax);  // Duplicate the enumerable object on the stack.
1112  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1113
1114  // If we got a map from the runtime call, we can do a fast
1115  // modification check. Otherwise, we got a fixed array, and we have
1116  // to do a slow check.
1117  Label fixed_array;
1118  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1119                 Heap::kMetaMapRootIndex);
1120  __ j(not_equal, &fixed_array);
1121
1122  // We got a map in register rax. Get the enumeration cache from it.
1123  __ bind(&use_cache);
1124
1125  Label no_descriptors;
1126
1127  __ EnumLength(rdx, rax);
1128  __ Cmp(rdx, Smi::FromInt(0));
1129  __ j(equal, &no_descriptors);
1130
1131  __ LoadInstanceDescriptors(rax, rcx);
1132  __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1133  __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1134
1135  // Set up the four remaining stack slots.
1136  __ Push(rax);  // Map.
1137  __ Push(rcx);  // Enumeration cache.
1138  __ Push(rdx);  // Number of valid entries for the map in the enum cache.
1139  __ Push(Smi::FromInt(0));  // Initial index.
1140  __ jmp(&loop);
1141
1142  __ bind(&no_descriptors);
1143  __ addp(rsp, Immediate(kPointerSize));
1144  __ jmp(&exit);
1145
1146  // We got a fixed array in register rax. Iterate through that.
1147  Label non_proxy;
1148  __ bind(&fixed_array);
1149
1150  // No need for a write barrier, we are storing a Smi in the feedback vector.
1151  __ Move(rbx, FeedbackVector());
1152  __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)),
1153          TypeFeedbackVector::MegamorphicSentinel(isolate()));
1154  __ Move(rbx, Smi::FromInt(1));  // Smi indicates slow check
1155  __ movp(rcx, Operand(rsp, 0 * kPointerSize));  // Get enumerated object
1156  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1157  __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1158  __ j(above, &non_proxy);
1159  __ Move(rbx, Smi::FromInt(0));  // Zero indicates proxy
1160  __ bind(&non_proxy);
1161  __ Push(rbx);  // Smi
1162  __ Push(rax);  // Array
1163  __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1164  __ Push(rax);  // Fixed array length (as smi).
1165  __ Push(Smi::FromInt(0));  // Initial index.
1166
1167  // Generate code for doing the condition check.
1168  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1169  __ bind(&loop);
1170  __ movp(rax, Operand(rsp, 0 * kPointerSize));  // Get the current index.
1171  __ cmpp(rax, Operand(rsp, 1 * kPointerSize));  // Compare to the array length.
1172  __ j(above_equal, loop_statement.break_label());
1173
1174  // Get the current entry of the array into register rbx.
1175  __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1176  SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1177  __ movp(rbx, FieldOperand(rbx,
1178                            index.reg,
1179                            index.scale,
1180                            FixedArray::kHeaderSize));
1181
1182  // Get the expected map from the stack or a smi in the
1183  // permanent slow case into register rdx.
1184  __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1185
1186  // Check if the expected map still matches that of the enumerable.
1187  // If not, we may have to filter the key.
1188  Label update_each;
1189  __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1190  __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1191  __ j(equal, &update_each, Label::kNear);
1192
1193  // For proxies, no filtering is done.
1194  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1195  __ Cmp(rdx, Smi::FromInt(0));
1196  __ j(equal, &update_each, Label::kNear);
1197
1198  // Convert the entry to a string or null if it isn't a property
1199  // anymore. If the property has been removed while iterating, we
1200  // just skip it.
1201  __ Push(rcx);  // Enumerable.
1202  __ Push(rbx);  // Current entry.
1203  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1204  __ Cmp(rax, Smi::FromInt(0));
1205  __ j(equal, loop_statement.continue_label());
1206  __ movp(rbx, rax);
1207
1208  // Update the 'each' property or variable from the possibly filtered
1209  // entry in register rbx.
1210  __ bind(&update_each);
1211  __ movp(result_register(), rbx);
1212  // Perform the assignment as if via '='.
1213  { EffectContext context(this);
1214    EmitAssignment(stmt->each());
1215  }
1216
1217  // Generate code for the body of the loop.
1218  Visit(stmt->body());
1219
1220  // Generate code for going to the next element by incrementing the
1221  // index (smi) stored on top of the stack.
1222  __ bind(loop_statement.continue_label());
1223  __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1224
1225  EmitBackEdgeBookkeeping(stmt, &loop);
1226  __ jmp(&loop);
1227
1228  // Remove the pointers stored on the stack.
1229  __ bind(loop_statement.break_label());
1230  __ addp(rsp, Immediate(5 * kPointerSize));
1231
1232  // Exit and decrement the loop depth.
1233  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1234  __ bind(&exit);
1235  decrement_loop_depth();
1236}
1237
1238
1239void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1240  Comment cmnt(masm_, "[ ForOfStatement");
1241  SetStatementPosition(stmt);
1242
1243  Iteration loop_statement(this, stmt);
1244  increment_loop_depth();
1245
1246  // var iterator = iterable[Symbol.iterator]();
1247  VisitForEffect(stmt->assign_iterator());
1248
1249  // Loop entry.
1250  __ bind(loop_statement.continue_label());
1251
1252  // result = iterator.next()
1253  VisitForEffect(stmt->next_result());
1254
1255  // if (result.done) break;
1256  Label result_not_done;
1257  VisitForControl(stmt->result_done(),
1258                  loop_statement.break_label(),
1259                  &result_not_done,
1260                  &result_not_done);
1261  __ bind(&result_not_done);
1262
1263  // each = result.value
1264  VisitForEffect(stmt->assign_each());
1265
1266  // Generate code for the body of the loop.
1267  Visit(stmt->body());
1268
1269  // Check stack before looping.
1270  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1271  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1272  __ jmp(loop_statement.continue_label());
1273
1274  // Exit and decrement the loop depth.
1275  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1276  __ bind(loop_statement.break_label());
1277  decrement_loop_depth();
1278}
1279
1280
1281void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1282                                       bool pretenure) {
1283  // Use the fast case closure allocation code that allocates in new
1284  // space for nested functions that don't need literals cloning. If
1285  // we're running with the --always-opt or the --prepare-always-opt
1286  // flag, we need to use the runtime function so that the new function
1287  // we are creating here gets a chance to have its code optimized and
1288  // doesn't just get a copy of the existing unoptimized code.
1289  if (!FLAG_always_opt &&
1290      !FLAG_prepare_always_opt &&
1291      !pretenure &&
1292      scope()->is_function_scope() &&
1293      info->num_literals() == 0) {
1294    FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1295    __ Move(rbx, info);
1296    __ CallStub(&stub);
1297  } else {
1298    __ Push(rsi);
1299    __ Push(info);
1300    __ Push(pretenure
1301            ? isolate()->factory()->true_value()
1302            : isolate()->factory()->false_value());
1303    __ CallRuntime(Runtime::kNewClosure, 3);
1304  }
1305  context()->Plug(rax);
1306}
1307
1308
1309void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1310  Comment cmnt(masm_, "[ VariableProxy");
1311  EmitVariableLoad(expr);
1312}
1313
1314
1315void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1316  Comment cnmt(masm_, "[ SuperReference ");
1317
1318  __ movp(LoadDescriptor::ReceiverRegister(),
1319          Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1320
1321  Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1322  __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1323
1324  CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1325
1326  __ Cmp(rax, isolate()->factory()->undefined_value());
1327  Label done;
1328  __ j(not_equal, &done);
1329  __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1330  __ bind(&done);
1331}
1332
1333
1334void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1335                                                      TypeofState typeof_state,
1336                                                      Label* slow) {
1337  Register context = rsi;
1338  Register temp = rdx;
1339
1340  Scope* s = scope();
1341  while (s != NULL) {
1342    if (s->num_heap_slots() > 0) {
1343      if (s->calls_sloppy_eval()) {
1344        // Check that extension is NULL.
1345        __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1346                Immediate(0));
1347        __ j(not_equal, slow);
1348      }
1349      // Load next context in chain.
1350      __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1351      // Walk the rest of the chain without clobbering rsi.
1352      context = temp;
1353    }
1354    // If no outer scope calls eval, we do not need to check more
1355    // context extensions.  If we have reached an eval scope, we check
1356    // all extensions from this point.
1357    if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1358    s = s->outer_scope();
1359  }
1360
1361  if (s != NULL && s->is_eval_scope()) {
1362    // Loop up the context chain.  There is no frame effect so it is
1363    // safe to use raw labels here.
1364    Label next, fast;
1365    if (!context.is(temp)) {
1366      __ movp(temp, context);
1367    }
1368    // Load map for comparison into register, outside loop.
1369    __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1370    __ bind(&next);
1371    // Terminate at native context.
1372    __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1373    __ j(equal, &fast, Label::kNear);
1374    // Check that extension is NULL.
1375    __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1376    __ j(not_equal, slow);
1377    // Load next context in chain.
1378    __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1379    __ jmp(&next);
1380    __ bind(&fast);
1381  }
1382
1383  // All extension objects were empty and it is safe to use a global
1384  // load IC call.
1385  __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1386  __ Move(LoadDescriptor::NameRegister(), proxy->var()->name());
1387  if (FLAG_vector_ics) {
1388    __ Move(VectorLoadICDescriptor::SlotRegister(),
1389            Smi::FromInt(proxy->VariableFeedbackSlot()));
1390  }
1391
1392  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1393      ? NOT_CONTEXTUAL
1394      : CONTEXTUAL;
1395  CallLoadIC(mode);
1396}
1397
1398
1399MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1400                                                                Label* slow) {
1401  DCHECK(var->IsContextSlot());
1402  Register context = rsi;
1403  Register temp = rbx;
1404
1405  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1406    if (s->num_heap_slots() > 0) {
1407      if (s->calls_sloppy_eval()) {
1408        // Check that extension is NULL.
1409        __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX),
1410                Immediate(0));
1411        __ j(not_equal, slow);
1412      }
1413      __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1414      // Walk the rest of the chain without clobbering rsi.
1415      context = temp;
1416    }
1417  }
1418  // Check that last extension is NULL.
1419  __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1420  __ j(not_equal, slow);
1421
1422  // This function is used only for loads, not stores, so it's safe to
1423  // return an rsi-based operand (the write barrier cannot be allowed to
1424  // destroy the rsi register).
1425  return ContextOperand(context, var->index());
1426}
1427
1428
1429void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1430                                                  TypeofState typeof_state,
1431                                                  Label* slow,
1432                                                  Label* done) {
1433  // Generate fast-case code for variables that might be shadowed by
1434  // eval-introduced variables.  Eval is used a lot without
1435  // introducing variables.  In those cases, we do not want to
1436  // perform a runtime call for all variables in the scope
1437  // containing the eval.
1438  Variable* var = proxy->var();
1439  if (var->mode() == DYNAMIC_GLOBAL) {
1440    EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1441    __ jmp(done);
1442  } else if (var->mode() == DYNAMIC_LOCAL) {
1443    Variable* local = var->local_if_not_shadowed();
1444    __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1445    if (local->mode() == LET || local->mode() == CONST ||
1446        local->mode() == CONST_LEGACY) {
1447      __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1448      __ j(not_equal, done);
1449      if (local->mode() == CONST_LEGACY) {
1450        __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1451      } else {  // LET || CONST
1452        __ Push(var->name());
1453        __ CallRuntime(Runtime::kThrowReferenceError, 1);
1454      }
1455    }
1456    __ jmp(done);
1457  }
1458}
1459
1460
1461void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1462  // Record position before possible IC call.
1463  SetSourcePosition(proxy->position());
1464  Variable* var = proxy->var();
1465
1466  // Three cases: global variables, lookup variables, and all other types of
1467  // variables.
1468  switch (var->location()) {
1469    case Variable::UNALLOCATED: {
1470      Comment cmnt(masm_, "[ Global variable");
1471      __ Move(LoadDescriptor::NameRegister(), var->name());
1472      __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1473      if (FLAG_vector_ics) {
1474        __ Move(VectorLoadICDescriptor::SlotRegister(),
1475                Smi::FromInt(proxy->VariableFeedbackSlot()));
1476      }
1477      CallLoadIC(CONTEXTUAL);
1478      context()->Plug(rax);
1479      break;
1480    }
1481
1482    case Variable::PARAMETER:
1483    case Variable::LOCAL:
1484    case Variable::CONTEXT: {
1485      Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1486                                               : "[ Stack slot");
1487      if (var->binding_needs_init()) {
1488        // var->scope() may be NULL when the proxy is located in eval code and
1489        // refers to a potential outside binding. Currently those bindings are
1490        // always looked up dynamically, i.e. in that case
1491        //     var->location() == LOOKUP.
1492        // always holds.
1493        DCHECK(var->scope() != NULL);
1494
1495        // Check if the binding really needs an initialization check. The check
1496        // can be skipped in the following situation: we have a LET or CONST
1497        // binding in harmony mode, both the Variable and the VariableProxy have
1498        // the same declaration scope (i.e. they are both in global code, in the
1499        // same function or in the same eval code) and the VariableProxy is in
1500        // the source physically located after the initializer of the variable.
1501        //
1502        // We cannot skip any initialization checks for CONST in non-harmony
1503        // mode because const variables may be declared but never initialized:
1504        //   if (false) { const x; }; var y = x;
1505        //
1506        // The condition on the declaration scopes is a conservative check for
1507        // nested functions that access a binding and are called before the
1508        // binding is initialized:
1509        //   function() { f(); let x = 1; function f() { x = 2; } }
1510        //
1511        bool skip_init_check;
1512        if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1513          skip_init_check = false;
1514        } else {
1515          // Check that we always have valid source position.
1516          DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1517          DCHECK(proxy->position() != RelocInfo::kNoPosition);
1518          skip_init_check = var->mode() != CONST_LEGACY &&
1519              var->initializer_position() < proxy->position();
1520        }
1521
1522        if (!skip_init_check) {
1523          // Let and const need a read barrier.
1524          Label done;
1525          GetVar(rax, var);
1526          __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1527          __ j(not_equal, &done, Label::kNear);
1528          if (var->mode() == LET || var->mode() == CONST) {
1529            // Throw a reference error when using an uninitialized let/const
1530            // binding in harmony mode.
1531            __ Push(var->name());
1532            __ CallRuntime(Runtime::kThrowReferenceError, 1);
1533          } else {
1534            // Uninitalized const bindings outside of harmony mode are unholed.
1535            DCHECK(var->mode() == CONST_LEGACY);
1536            __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1537          }
1538          __ bind(&done);
1539          context()->Plug(rax);
1540          break;
1541        }
1542      }
1543      context()->Plug(var);
1544      break;
1545    }
1546
1547    case Variable::LOOKUP: {
1548      Comment cmnt(masm_, "[ Lookup slot");
1549      Label done, slow;
1550      // Generate code for loading from variables potentially shadowed
1551      // by eval-introduced variables.
1552      EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1553      __ bind(&slow);
1554      __ Push(rsi);  // Context.
1555      __ Push(var->name());
1556      __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1557      __ bind(&done);
1558      context()->Plug(rax);
1559      break;
1560    }
1561  }
1562}
1563
1564
1565void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1566  Comment cmnt(masm_, "[ RegExpLiteral");
1567  Label materialized;
1568  // Registers will be used as follows:
1569  // rdi = JS function.
1570  // rcx = literals array.
1571  // rbx = regexp literal.
1572  // rax = regexp literal clone.
1573  __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1574  __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1575  int literal_offset =
1576      FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1577  __ movp(rbx, FieldOperand(rcx, literal_offset));
1578  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1579  __ j(not_equal, &materialized, Label::kNear);
1580
1581  // Create regexp literal using runtime function
1582  // Result will be in rax.
1583  __ Push(rcx);
1584  __ Push(Smi::FromInt(expr->literal_index()));
1585  __ Push(expr->pattern());
1586  __ Push(expr->flags());
1587  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1588  __ movp(rbx, rax);
1589
1590  __ bind(&materialized);
1591  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1592  Label allocated, runtime_allocate;
1593  __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1594  __ jmp(&allocated);
1595
1596  __ bind(&runtime_allocate);
1597  __ Push(rbx);
1598  __ Push(Smi::FromInt(size));
1599  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1600  __ Pop(rbx);
1601
1602  __ bind(&allocated);
1603  // Copy the content into the newly allocated memory.
1604  // (Unroll copy loop once for better throughput).
1605  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1606    __ movp(rdx, FieldOperand(rbx, i));
1607    __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1608    __ movp(FieldOperand(rax, i), rdx);
1609    __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1610  }
1611  if ((size % (2 * kPointerSize)) != 0) {
1612    __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1613    __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1614  }
1615  context()->Plug(rax);
1616}
1617
1618
1619void FullCodeGenerator::EmitAccessor(Expression* expression) {
1620  if (expression == NULL) {
1621    __ PushRoot(Heap::kNullValueRootIndex);
1622  } else {
1623    VisitForStackValue(expression);
1624  }
1625}
1626
1627
1628void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1629  Comment cmnt(masm_, "[ ObjectLiteral");
1630
1631  expr->BuildConstantProperties(isolate());
1632  Handle<FixedArray> constant_properties = expr->constant_properties();
1633  int flags = expr->fast_elements()
1634      ? ObjectLiteral::kFastElements
1635      : ObjectLiteral::kNoFlags;
1636  flags |= expr->has_function()
1637      ? ObjectLiteral::kHasFunction
1638      : ObjectLiteral::kNoFlags;
1639  int properties_count = constant_properties->length() / 2;
1640  if (expr->may_store_doubles() || expr->depth() > 1 ||
1641      masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1642      properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1643    __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1644    __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1645    __ Push(Smi::FromInt(expr->literal_index()));
1646    __ Push(constant_properties);
1647    __ Push(Smi::FromInt(flags));
1648    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1649  } else {
1650    __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1651    __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1652    __ Move(rbx, Smi::FromInt(expr->literal_index()));
1653    __ Move(rcx, constant_properties);
1654    __ Move(rdx, Smi::FromInt(flags));
1655    FastCloneShallowObjectStub stub(isolate(), properties_count);
1656    __ CallStub(&stub);
1657  }
1658
1659  // If result_saved is true the result is on top of the stack.  If
1660  // result_saved is false the result is in rax.
1661  bool result_saved = false;
1662
1663  // Mark all computed expressions that are bound to a key that
1664  // is shadowed by a later occurrence of the same key. For the
1665  // marked expressions, no store code is emitted.
1666  expr->CalculateEmitStore(zone());
1667
1668  AccessorTable accessor_table(zone());
1669  for (int i = 0; i < expr->properties()->length(); i++) {
1670    ObjectLiteral::Property* property = expr->properties()->at(i);
1671    if (property->IsCompileTimeValue()) continue;
1672
1673    Literal* key = property->key();
1674    Expression* value = property->value();
1675    if (!result_saved) {
1676      __ Push(rax);  // Save result on the stack
1677      result_saved = true;
1678    }
1679    switch (property->kind()) {
1680      case ObjectLiteral::Property::CONSTANT:
1681        UNREACHABLE();
1682      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1683        DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1684        // Fall through.
1685      case ObjectLiteral::Property::COMPUTED:
1686        if (key->value()->IsInternalizedString()) {
1687          if (property->emit_store()) {
1688            VisitForAccumulatorValue(value);
1689            DCHECK(StoreDescriptor::ValueRegister().is(rax));
1690            __ Move(StoreDescriptor::NameRegister(), key->value());
1691            __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1692            CallStoreIC(key->LiteralFeedbackId());
1693            PrepareForBailoutForId(key->id(), NO_REGISTERS);
1694          } else {
1695            VisitForEffect(value);
1696          }
1697          break;
1698        }
1699        __ Push(Operand(rsp, 0));  // Duplicate receiver.
1700        VisitForStackValue(key);
1701        VisitForStackValue(value);
1702        if (property->emit_store()) {
1703          __ Push(Smi::FromInt(SLOPPY));  // Strict mode
1704          __ CallRuntime(Runtime::kSetProperty, 4);
1705        } else {
1706          __ Drop(3);
1707        }
1708        break;
1709      case ObjectLiteral::Property::PROTOTYPE:
1710        __ Push(Operand(rsp, 0));  // Duplicate receiver.
1711        VisitForStackValue(value);
1712        if (property->emit_store()) {
1713          __ CallRuntime(Runtime::kSetPrototype, 2);
1714        } else {
1715          __ Drop(2);
1716        }
1717        break;
1718      case ObjectLiteral::Property::GETTER:
1719        accessor_table.lookup(key)->second->getter = value;
1720        break;
1721      case ObjectLiteral::Property::SETTER:
1722        accessor_table.lookup(key)->second->setter = value;
1723        break;
1724    }
1725  }
1726
1727  // Emit code to define accessors, using only a single call to the runtime for
1728  // each pair of corresponding getters and setters.
1729  for (AccessorTable::Iterator it = accessor_table.begin();
1730       it != accessor_table.end();
1731       ++it) {
1732    __ Push(Operand(rsp, 0));  // Duplicate receiver.
1733    VisitForStackValue(it->first);
1734    EmitAccessor(it->second->getter);
1735    EmitAccessor(it->second->setter);
1736    __ Push(Smi::FromInt(NONE));
1737    __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1738  }
1739
1740  if (expr->has_function()) {
1741    DCHECK(result_saved);
1742    __ Push(Operand(rsp, 0));
1743    __ CallRuntime(Runtime::kToFastProperties, 1);
1744  }
1745
1746  if (result_saved) {
1747    context()->PlugTOS();
1748  } else {
1749    context()->Plug(rax);
1750  }
1751}
1752
1753
1754void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1755  Comment cmnt(masm_, "[ ArrayLiteral");
1756
1757  expr->BuildConstantElements(isolate());
1758  int flags = expr->depth() == 1
1759      ? ArrayLiteral::kShallowElements
1760      : ArrayLiteral::kNoFlags;
1761
1762  ZoneList<Expression*>* subexprs = expr->values();
1763  int length = subexprs->length();
1764  Handle<FixedArray> constant_elements = expr->constant_elements();
1765  DCHECK_EQ(2, constant_elements->length());
1766  ElementsKind constant_elements_kind =
1767      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1768  bool has_constant_fast_elements =
1769      IsFastObjectElementsKind(constant_elements_kind);
1770  Handle<FixedArrayBase> constant_elements_values(
1771      FixedArrayBase::cast(constant_elements->get(1)));
1772
1773  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1774  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1775    // If the only customer of allocation sites is transitioning, then
1776    // we can turn it off if we don't have anywhere else to transition to.
1777    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1778  }
1779
1780  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1781    __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1782    __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1783    __ Push(Smi::FromInt(expr->literal_index()));
1784    __ Push(constant_elements);
1785    __ Push(Smi::FromInt(flags));
1786    __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1787  } else {
1788    __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1789    __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1790    __ Move(rbx, Smi::FromInt(expr->literal_index()));
1791    __ Move(rcx, constant_elements);
1792    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1793    __ CallStub(&stub);
1794  }
1795
1796  bool result_saved = false;  // Is the result saved to the stack?
1797
1798  // Emit code to evaluate all the non-constant subexpressions and to store
1799  // them into the newly cloned array.
1800  for (int i = 0; i < length; i++) {
1801    Expression* subexpr = subexprs->at(i);
1802    // If the subexpression is a literal or a simple materialized literal it
1803    // is already set in the cloned array.
1804    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1805
1806    if (!result_saved) {
1807      __ Push(rax);  // array literal
1808      __ Push(Smi::FromInt(expr->literal_index()));
1809      result_saved = true;
1810    }
1811    VisitForAccumulatorValue(subexpr);
1812
1813    if (IsFastObjectElementsKind(constant_elements_kind)) {
1814      // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1815      // cannot transition and don't need to call the runtime stub.
1816      int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1817      __ movp(rbx, Operand(rsp, kPointerSize));  // Copy of array literal.
1818      __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1819      // Store the subexpression value in the array's elements.
1820      __ movp(FieldOperand(rbx, offset), result_register());
1821      // Update the write barrier for the array store.
1822      __ RecordWriteField(rbx, offset, result_register(), rcx,
1823                          kDontSaveFPRegs,
1824                          EMIT_REMEMBERED_SET,
1825                          INLINE_SMI_CHECK);
1826    } else {
1827      // Store the subexpression value in the array's elements.
1828      __ Move(rcx, Smi::FromInt(i));
1829      StoreArrayLiteralElementStub stub(isolate());
1830      __ CallStub(&stub);
1831    }
1832
1833    PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1834  }
1835
1836  if (result_saved) {
1837    __ addp(rsp, Immediate(kPointerSize));  // literal index
1838    context()->PlugTOS();
1839  } else {
1840    context()->Plug(rax);
1841  }
1842}
1843
1844
1845void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1846  DCHECK(expr->target()->IsValidReferenceExpression());
1847
1848  Comment cmnt(masm_, "[ Assignment");
1849
1850  // Left-hand side can only be a property, a global or a (parameter or local)
1851  // slot.
1852  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1853  LhsKind assign_type = VARIABLE;
1854  Property* property = expr->target()->AsProperty();
1855  if (property != NULL) {
1856    assign_type = (property->key()->IsPropertyName())
1857        ? NAMED_PROPERTY
1858        : KEYED_PROPERTY;
1859  }
1860
1861  // Evaluate LHS expression.
1862  switch (assign_type) {
1863    case VARIABLE:
1864      // Nothing to do here.
1865      break;
1866    case NAMED_PROPERTY:
1867      if (expr->is_compound()) {
1868        // We need the receiver both on the stack and in the register.
1869        VisitForStackValue(property->obj());
1870        __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1871      } else {
1872        VisitForStackValue(property->obj());
1873      }
1874      break;
1875    case KEYED_PROPERTY: {
1876      if (expr->is_compound()) {
1877        VisitForStackValue(property->obj());
1878        VisitForStackValue(property->key());
1879        __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
1880        __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
1881      } else {
1882        VisitForStackValue(property->obj());
1883        VisitForStackValue(property->key());
1884      }
1885      break;
1886    }
1887  }
1888
1889  // For compound assignments we need another deoptimization point after the
1890  // variable/property load.
1891  if (expr->is_compound()) {
1892    { AccumulatorValueContext context(this);
1893      switch (assign_type) {
1894        case VARIABLE:
1895          EmitVariableLoad(expr->target()->AsVariableProxy());
1896          PrepareForBailout(expr->target(), TOS_REG);
1897          break;
1898        case NAMED_PROPERTY:
1899          EmitNamedPropertyLoad(property);
1900          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1901          break;
1902        case KEYED_PROPERTY:
1903          EmitKeyedPropertyLoad(property);
1904          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1905          break;
1906      }
1907    }
1908
1909    Token::Value op = expr->binary_op();
1910    __ Push(rax);  // Left operand goes on the stack.
1911    VisitForAccumulatorValue(expr->value());
1912
1913    OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1914        ? OVERWRITE_RIGHT
1915        : NO_OVERWRITE;
1916    SetSourcePosition(expr->position() + 1);
1917    AccumulatorValueContext context(this);
1918    if (ShouldInlineSmiCase(op)) {
1919      EmitInlineSmiBinaryOp(expr->binary_operation(),
1920                            op,
1921                            mode,
1922                            expr->target(),
1923                            expr->value());
1924    } else {
1925      EmitBinaryOp(expr->binary_operation(), op, mode);
1926    }
1927    // Deoptimization point in case the binary operation may have side effects.
1928    PrepareForBailout(expr->binary_operation(), TOS_REG);
1929  } else {
1930    VisitForAccumulatorValue(expr->value());
1931  }
1932
1933  // Record source position before possible IC call.
1934  SetSourcePosition(expr->position());
1935
1936  // Store the value.
1937  switch (assign_type) {
1938    case VARIABLE:
1939      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1940                             expr->op());
1941      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1942      context()->Plug(rax);
1943      break;
1944    case NAMED_PROPERTY:
1945      EmitNamedPropertyAssignment(expr);
1946      break;
1947    case KEYED_PROPERTY:
1948      EmitKeyedPropertyAssignment(expr);
1949      break;
1950  }
1951}
1952
1953
1954void FullCodeGenerator::VisitYield(Yield* expr) {
1955  Comment cmnt(masm_, "[ Yield");
1956  // Evaluate yielded value first; the initial iterator definition depends on
1957  // this.  It stays on the stack while we update the iterator.
1958  VisitForStackValue(expr->expression());
1959
1960  switch (expr->yield_kind()) {
1961    case Yield::kSuspend:
1962      // Pop value from top-of-stack slot; box result into result register.
1963      EmitCreateIteratorResult(false);
1964      __ Push(result_register());
1965      // Fall through.
1966    case Yield::kInitial: {
1967      Label suspend, continuation, post_runtime, resume;
1968
1969      __ jmp(&suspend);
1970
1971      __ bind(&continuation);
1972      __ jmp(&resume);
1973
1974      __ bind(&suspend);
1975      VisitForAccumulatorValue(expr->generator_object());
1976      DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1977      __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
1978              Smi::FromInt(continuation.pos()));
1979      __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
1980      __ movp(rcx, rsi);
1981      __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
1982                          kDontSaveFPRegs);
1983      __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
1984      __ cmpp(rsp, rbx);
1985      __ j(equal, &post_runtime);
1986      __ Push(rax);  // generator object
1987      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1988      __ movp(context_register(),
1989              Operand(rbp, StandardFrameConstants::kContextOffset));
1990      __ bind(&post_runtime);
1991
1992      __ Pop(result_register());
1993      EmitReturnSequence();
1994
1995      __ bind(&resume);
1996      context()->Plug(result_register());
1997      break;
1998    }
1999
2000    case Yield::kFinal: {
2001      VisitForAccumulatorValue(expr->generator_object());
2002      __ Move(FieldOperand(result_register(),
2003                           JSGeneratorObject::kContinuationOffset),
2004              Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2005      // Pop value from top-of-stack slot, box result into result register.
2006      EmitCreateIteratorResult(true);
2007      EmitUnwindBeforeReturn();
2008      EmitReturnSequence();
2009      break;
2010    }
2011
2012    case Yield::kDelegating: {
2013      VisitForStackValue(expr->generator_object());
2014
2015      // Initial stack layout is as follows:
2016      // [sp + 1 * kPointerSize] iter
2017      // [sp + 0 * kPointerSize] g
2018
2019      Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2020      Label l_next, l_call, l_loop;
2021      Register load_receiver = LoadDescriptor::ReceiverRegister();
2022      Register load_name = LoadDescriptor::NameRegister();
2023
2024      // Initial send value is undefined.
2025      __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2026      __ jmp(&l_next);
2027
2028      // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2029      __ bind(&l_catch);
2030      handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2031      __ LoadRoot(load_name, Heap::kthrow_stringRootIndex);  // "throw"
2032      __ Push(load_name);
2033      __ Push(Operand(rsp, 2 * kPointerSize));               // iter
2034      __ Push(rax);                                          // exception
2035      __ jmp(&l_call);
2036
2037      // try { received = %yield result }
2038      // Shuffle the received result above a try handler and yield it without
2039      // re-boxing.
2040      __ bind(&l_try);
2041      __ Pop(rax);                                       // result
2042      __ PushTryHandler(StackHandler::CATCH, expr->index());
2043      const int handler_size = StackHandlerConstants::kSize;
2044      __ Push(rax);                                      // result
2045      __ jmp(&l_suspend);
2046      __ bind(&l_continuation);
2047      __ jmp(&l_resume);
2048      __ bind(&l_suspend);
2049      const int generator_object_depth = kPointerSize + handler_size;
2050      __ movp(rax, Operand(rsp, generator_object_depth));
2051      __ Push(rax);                                      // g
2052      DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2053      __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2054              Smi::FromInt(l_continuation.pos()));
2055      __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2056      __ movp(rcx, rsi);
2057      __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2058                          kDontSaveFPRegs);
2059      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2060      __ movp(context_register(),
2061              Operand(rbp, StandardFrameConstants::kContextOffset));
2062      __ Pop(rax);                                       // result
2063      EmitReturnSequence();
2064      __ bind(&l_resume);                                // received in rax
2065      __ PopTryHandler();
2066
2067      // receiver = iter; f = 'next'; arg = received;
2068      __ bind(&l_next);
2069
2070      __ LoadRoot(load_name, Heap::knext_stringRootIndex);
2071      __ Push(load_name);                           // "next"
2072      __ Push(Operand(rsp, 2 * kPointerSize));      // iter
2073      __ Push(rax);                                 // received
2074
2075      // result = receiver[f](arg);
2076      __ bind(&l_call);
2077      __ movp(load_receiver, Operand(rsp, kPointerSize));
2078      if (FLAG_vector_ics) {
2079        __ Move(VectorLoadICDescriptor::SlotRegister(),
2080                Smi::FromInt(expr->KeyedLoadFeedbackSlot()));
2081      }
2082      Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2083      CallIC(ic, TypeFeedbackId::None());
2084      __ movp(rdi, rax);
2085      __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2086      CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2087      __ CallStub(&stub);
2088
2089      __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2090      __ Drop(1);  // The function is still on the stack; drop it.
2091
2092      // if (!result.done) goto l_try;
2093      __ bind(&l_loop);
2094      __ Move(load_receiver, rax);
2095      __ Push(load_receiver);                               // save result
2096      __ LoadRoot(load_name, Heap::kdone_stringRootIndex);  // "done"
2097      if (FLAG_vector_ics) {
2098        __ Move(VectorLoadICDescriptor::SlotRegister(),
2099                Smi::FromInt(expr->DoneFeedbackSlot()));
2100      }
2101      CallLoadIC(NOT_CONTEXTUAL);                           // rax=result.done
2102      Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2103      CallIC(bool_ic);
2104      __ testp(result_register(), result_register());
2105      __ j(zero, &l_try);
2106
2107      // result.value
2108      __ Pop(load_receiver);                             // result
2109      __ LoadRoot(load_name, Heap::kvalue_stringRootIndex);  // "value"
2110      if (FLAG_vector_ics) {
2111        __ Move(VectorLoadICDescriptor::SlotRegister(),
2112                Smi::FromInt(expr->ValueFeedbackSlot()));
2113      }
2114      CallLoadIC(NOT_CONTEXTUAL);                        // result.value in rax
2115      context()->DropAndPlug(2, rax);                    // drop iter and g
2116      break;
2117    }
2118  }
2119}
2120
2121
2122void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2123    Expression *value,
2124    JSGeneratorObject::ResumeMode resume_mode) {
2125  // The value stays in rax, and is ultimately read by the resumed generator, as
2126  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2127  // is read to throw the value when the resumed generator is already closed.
2128  // rbx will hold the generator object until the activation has been resumed.
2129  VisitForStackValue(generator);
2130  VisitForAccumulatorValue(value);
2131  __ Pop(rbx);
2132
2133  // Check generator state.
2134  Label wrong_state, closed_state, done;
2135  STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2136  STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2137  __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2138                Smi::FromInt(0));
2139  __ j(equal, &closed_state);
2140  __ j(less, &wrong_state);
2141
2142  // Load suspended function and context.
2143  __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2144  __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2145
2146  // Push receiver.
2147  __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2148
2149  // Push holes for arguments to generator function.
2150  __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2151  __ LoadSharedFunctionInfoSpecialField(rdx, rdx,
2152      SharedFunctionInfo::kFormalParameterCountOffset);
2153  __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2154  Label push_argument_holes, push_frame;
2155  __ bind(&push_argument_holes);
2156  __ subp(rdx, Immediate(1));
2157  __ j(carry, &push_frame);
2158  __ Push(rcx);
2159  __ jmp(&push_argument_holes);
2160
2161  // Enter a new JavaScript frame, and initialize its slots as they were when
2162  // the generator was suspended.
2163  Label resume_frame;
2164  __ bind(&push_frame);
2165  __ call(&resume_frame);
2166  __ jmp(&done);
2167  __ bind(&resume_frame);
2168  __ pushq(rbp);  // Caller's frame pointer.
2169  __ movp(rbp, rsp);
2170  __ Push(rsi);  // Callee's context.
2171  __ Push(rdi);  // Callee's JS Function.
2172
2173  // Load the operand stack size.
2174  __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2175  __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2176  __ SmiToInteger32(rdx, rdx);
2177
2178  // If we are sending a value and there is no operand stack, we can jump back
2179  // in directly.
2180  if (resume_mode == JSGeneratorObject::NEXT) {
2181    Label slow_resume;
2182    __ cmpp(rdx, Immediate(0));
2183    __ j(not_zero, &slow_resume);
2184    __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2185    __ SmiToInteger64(rcx,
2186        FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2187    __ addp(rdx, rcx);
2188    __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2189            Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2190    __ jmp(rdx);
2191    __ bind(&slow_resume);
2192  }
2193
2194  // Otherwise, we push holes for the operand stack and call the runtime to fix
2195  // up the stack and the handlers.
2196  Label push_operand_holes, call_resume;
2197  __ bind(&push_operand_holes);
2198  __ subp(rdx, Immediate(1));
2199  __ j(carry, &call_resume);
2200  __ Push(rcx);
2201  __ jmp(&push_operand_holes);
2202  __ bind(&call_resume);
2203  __ Push(rbx);
2204  __ Push(result_register());
2205  __ Push(Smi::FromInt(resume_mode));
2206  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2207  // Not reached: the runtime call returns elsewhere.
2208  __ Abort(kGeneratorFailedToResume);
2209
2210  // Reach here when generator is closed.
2211  __ bind(&closed_state);
2212  if (resume_mode == JSGeneratorObject::NEXT) {
2213    // Return completed iterator result when generator is closed.
2214    __ PushRoot(Heap::kUndefinedValueRootIndex);
2215    // Pop value from top-of-stack slot; box result into result register.
2216    EmitCreateIteratorResult(true);
2217  } else {
2218    // Throw the provided value.
2219    __ Push(rax);
2220    __ CallRuntime(Runtime::kThrow, 1);
2221  }
2222  __ jmp(&done);
2223
2224  // Throw error if we attempt to operate on a running generator.
2225  __ bind(&wrong_state);
2226  __ Push(rbx);
2227  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2228
2229  __ bind(&done);
2230  context()->Plug(result_register());
2231}
2232
2233
2234void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2235  Label gc_required;
2236  Label allocated;
2237
2238  Handle<Map> map(isolate()->native_context()->iterator_result_map());
2239
2240  __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
2241  __ jmp(&allocated);
2242
2243  __ bind(&gc_required);
2244  __ Push(Smi::FromInt(map->instance_size()));
2245  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2246  __ movp(context_register(),
2247          Operand(rbp, StandardFrameConstants::kContextOffset));
2248
2249  __ bind(&allocated);
2250  __ Move(rbx, map);
2251  __ Pop(rcx);
2252  __ Move(rdx, isolate()->factory()->ToBoolean(done));
2253  DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2254  __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2255  __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2256          isolate()->factory()->empty_fixed_array());
2257  __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2258          isolate()->factory()->empty_fixed_array());
2259  __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2260          rcx);
2261  __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
2262          rdx);
2263
2264  // Only the value field needs a write barrier, as the other values are in the
2265  // root set.
2266  __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
2267                      rcx, rdx, kDontSaveFPRegs);
2268}
2269
2270
2271void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2272  SetSourcePosition(prop->position());
2273  Literal* key = prop->key()->AsLiteral();
2274  __ Move(LoadDescriptor::NameRegister(), key->value());
2275  if (FLAG_vector_ics) {
2276    __ Move(VectorLoadICDescriptor::SlotRegister(),
2277            Smi::FromInt(prop->PropertyFeedbackSlot()));
2278    CallLoadIC(NOT_CONTEXTUAL);
2279  } else {
2280    CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2281  }
2282}
2283
2284
2285void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2286  SetSourcePosition(prop->position());
2287  Literal* key = prop->key()->AsLiteral();
2288  DCHECK(!key->value()->IsSmi());
2289  DCHECK(prop->IsSuperAccess());
2290
2291  SuperReference* super_ref = prop->obj()->AsSuperReference();
2292  EmitLoadHomeObject(super_ref);
2293  __ Push(rax);
2294  VisitForStackValue(super_ref->this_var());
2295  __ Push(key->value());
2296  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2297}
2298
2299
2300void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2301  SetSourcePosition(prop->position());
2302  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2303  if (FLAG_vector_ics) {
2304    __ Move(VectorLoadICDescriptor::SlotRegister(),
2305            Smi::FromInt(prop->PropertyFeedbackSlot()));
2306    CallIC(ic);
2307  } else {
2308    CallIC(ic, prop->PropertyFeedbackId());
2309  }
2310}
2311
2312
2313void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2314                                              Token::Value op,
2315                                              OverwriteMode mode,
2316                                              Expression* left,
2317                                              Expression* right) {
2318  // Do combined smi check of the operands. Left operand is on the
2319  // stack (popped into rdx). Right operand is in rax but moved into
2320  // rcx to make the shifts easier.
2321  Label done, stub_call, smi_case;
2322  __ Pop(rdx);
2323  __ movp(rcx, rax);
2324  __ orp(rax, rdx);
2325  JumpPatchSite patch_site(masm_);
2326  patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2327
2328  __ bind(&stub_call);
2329  __ movp(rax, rcx);
2330  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2331  CallIC(code, expr->BinaryOperationFeedbackId());
2332  patch_site.EmitPatchInfo();
2333  __ jmp(&done, Label::kNear);
2334
2335  __ bind(&smi_case);
2336  switch (op) {
2337    case Token::SAR:
2338      __ SmiShiftArithmeticRight(rax, rdx, rcx);
2339      break;
2340    case Token::SHL:
2341      __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
2342      break;
2343    case Token::SHR:
2344      __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2345      break;
2346    case Token::ADD:
2347      __ SmiAdd(rax, rdx, rcx, &stub_call);
2348      break;
2349    case Token::SUB:
2350      __ SmiSub(rax, rdx, rcx, &stub_call);
2351      break;
2352    case Token::MUL:
2353      __ SmiMul(rax, rdx, rcx, &stub_call);
2354      break;
2355    case Token::BIT_OR:
2356      __ SmiOr(rax, rdx, rcx);
2357      break;
2358    case Token::BIT_AND:
2359      __ SmiAnd(rax, rdx, rcx);
2360      break;
2361    case Token::BIT_XOR:
2362      __ SmiXor(rax, rdx, rcx);
2363      break;
2364    default:
2365      UNREACHABLE();
2366      break;
2367  }
2368
2369  __ bind(&done);
2370  context()->Plug(rax);
2371}
2372
2373
2374void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2375                                     Token::Value op,
2376                                     OverwriteMode mode) {
2377  __ Pop(rdx);
2378  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2379  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2380  CallIC(code, expr->BinaryOperationFeedbackId());
2381  patch_site.EmitPatchInfo();
2382  context()->Plug(rax);
2383}
2384
2385
2386void FullCodeGenerator::EmitAssignment(Expression* expr) {
2387  DCHECK(expr->IsValidReferenceExpression());
2388
2389  // Left-hand side can only be a property, a global or a (parameter or local)
2390  // slot.
2391  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2392  LhsKind assign_type = VARIABLE;
2393  Property* prop = expr->AsProperty();
2394  if (prop != NULL) {
2395    assign_type = (prop->key()->IsPropertyName())
2396        ? NAMED_PROPERTY
2397        : KEYED_PROPERTY;
2398  }
2399
2400  switch (assign_type) {
2401    case VARIABLE: {
2402      Variable* var = expr->AsVariableProxy()->var();
2403      EffectContext context(this);
2404      EmitVariableAssignment(var, Token::ASSIGN);
2405      break;
2406    }
2407    case NAMED_PROPERTY: {
2408      __ Push(rax);  // Preserve value.
2409      VisitForAccumulatorValue(prop->obj());
2410      __ Move(StoreDescriptor::ReceiverRegister(), rax);
2411      __ Pop(StoreDescriptor::ValueRegister());  // Restore value.
2412      __ Move(StoreDescriptor::NameRegister(),
2413              prop->key()->AsLiteral()->value());
2414      CallStoreIC();
2415      break;
2416    }
2417    case KEYED_PROPERTY: {
2418      __ Push(rax);  // Preserve value.
2419      VisitForStackValue(prop->obj());
2420      VisitForAccumulatorValue(prop->key());
2421      __ Move(StoreDescriptor::NameRegister(), rax);
2422      __ Pop(StoreDescriptor::ReceiverRegister());
2423      __ Pop(StoreDescriptor::ValueRegister());  // Restore value.
2424      Handle<Code> ic =
2425          CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2426      CallIC(ic);
2427      break;
2428    }
2429  }
2430  context()->Plug(rax);
2431}
2432
2433
2434void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2435    Variable* var, MemOperand location) {
2436  __ movp(location, rax);
2437  if (var->IsContextSlot()) {
2438    __ movp(rdx, rax);
2439    __ RecordWriteContextSlot(
2440        rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2441  }
2442}
2443
2444
2445void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2446                                               Token::Value op) {
2447  if (var->IsUnallocated()) {
2448    // Global var, const, or let.
2449    __ Move(StoreDescriptor::NameRegister(), var->name());
2450    __ movp(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2451    CallStoreIC();
2452
2453  } else if (op == Token::INIT_CONST_LEGACY) {
2454    // Const initializers need a write barrier.
2455    DCHECK(!var->IsParameter());  // No const parameters.
2456    if (var->IsLookupSlot()) {
2457      __ Push(rax);
2458      __ Push(rsi);
2459      __ Push(var->name());
2460      __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2461    } else {
2462      DCHECK(var->IsStackLocal() || var->IsContextSlot());
2463      Label skip;
2464      MemOperand location = VarOperand(var, rcx);
2465      __ movp(rdx, location);
2466      __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2467      __ j(not_equal, &skip);
2468      EmitStoreToStackLocalOrContextSlot(var, location);
2469      __ bind(&skip);
2470    }
2471
2472  } else if (var->mode() == LET && op != Token::INIT_LET) {
2473    // Non-initializing assignment to let variable needs a write barrier.
2474    DCHECK(!var->IsLookupSlot());
2475    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2476    Label assign;
2477    MemOperand location = VarOperand(var, rcx);
2478    __ movp(rdx, location);
2479    __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2480    __ j(not_equal, &assign, Label::kNear);
2481    __ Push(var->name());
2482    __ CallRuntime(Runtime::kThrowReferenceError, 1);
2483    __ bind(&assign);
2484    EmitStoreToStackLocalOrContextSlot(var, location);
2485
2486  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2487    if (var->IsLookupSlot()) {
2488      // Assignment to var.
2489      __ Push(rax);  // Value.
2490      __ Push(rsi);  // Context.
2491      __ Push(var->name());
2492      __ Push(Smi::FromInt(strict_mode()));
2493      __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2494    } else {
2495      // Assignment to var or initializing assignment to let/const in harmony
2496      // mode.
2497      DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2498      MemOperand location = VarOperand(var, rcx);
2499      if (generate_debug_code_ && op == Token::INIT_LET) {
2500        // Check for an uninitialized let binding.
2501        __ movp(rdx, location);
2502        __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2503        __ Check(equal, kLetBindingReInitialization);
2504      }
2505      EmitStoreToStackLocalOrContextSlot(var, location);
2506    }
2507  }
2508  // Non-initializing assignments to consts are ignored.
2509}
2510
2511
2512void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2513  // Assignment to a property, using a named store IC.
2514  Property* prop = expr->target()->AsProperty();
2515  DCHECK(prop != NULL);
2516  DCHECK(prop->key()->IsLiteral());
2517
2518  // Record source code position before IC call.
2519  SetSourcePosition(expr->position());
2520  __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2521  __ Pop(StoreDescriptor::ReceiverRegister());
2522  CallStoreIC(expr->AssignmentFeedbackId());
2523
2524  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2525  context()->Plug(rax);
2526}
2527
2528
2529void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2530  // Assignment to a property, using a keyed store IC.
2531
2532  __ Pop(StoreDescriptor::NameRegister());  // Key.
2533  __ Pop(StoreDescriptor::ReceiverRegister());
2534  DCHECK(StoreDescriptor::ValueRegister().is(rax));
2535  // Record source code position before IC call.
2536  SetSourcePosition(expr->position());
2537  Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2538  CallIC(ic, expr->AssignmentFeedbackId());
2539
2540  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2541  context()->Plug(rax);
2542}
2543
2544
2545void FullCodeGenerator::VisitProperty(Property* expr) {
2546  Comment cmnt(masm_, "[ Property");
2547  Expression* key = expr->key();
2548
2549  if (key->IsPropertyName()) {
2550    if (!expr->IsSuperAccess()) {
2551      VisitForAccumulatorValue(expr->obj());
2552      DCHECK(!rax.is(LoadDescriptor::ReceiverRegister()));
2553      __ movp(LoadDescriptor::ReceiverRegister(), rax);
2554      EmitNamedPropertyLoad(expr);
2555    } else {
2556      EmitNamedSuperPropertyLoad(expr);
2557    }
2558    PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2559    context()->Plug(rax);
2560  } else {
2561    VisitForStackValue(expr->obj());
2562    VisitForAccumulatorValue(expr->key());
2563    __ Move(LoadDescriptor::NameRegister(), rax);
2564    __ Pop(LoadDescriptor::ReceiverRegister());
2565    EmitKeyedPropertyLoad(expr);
2566    context()->Plug(rax);
2567  }
2568}
2569
2570
2571void FullCodeGenerator::CallIC(Handle<Code> code,
2572                               TypeFeedbackId ast_id) {
2573  ic_total_count_++;
2574  __ call(code, RelocInfo::CODE_TARGET, ast_id);
2575}
2576
2577
2578// Code common for calls using the IC.
2579void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2580  Expression* callee = expr->expression();
2581
2582  CallICState::CallType call_type =
2583      callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2584  // Get the target function.
2585  if (call_type == CallICState::FUNCTION) {
2586    { StackValueContext context(this);
2587      EmitVariableLoad(callee->AsVariableProxy());
2588      PrepareForBailout(callee, NO_REGISTERS);
2589    }
2590    // Push undefined as receiver. This is patched in the method prologue if it
2591    // is a sloppy mode method.
2592    __ Push(isolate()->factory()->undefined_value());
2593  } else {
2594    // Load the function from the receiver.
2595    DCHECK(callee->IsProperty());
2596    DCHECK(!callee->AsProperty()->IsSuperAccess());
2597    __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2598    EmitNamedPropertyLoad(callee->AsProperty());
2599    PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2600    // Push the target function under the receiver.
2601    __ Push(Operand(rsp, 0));
2602    __ movp(Operand(rsp, kPointerSize), rax);
2603  }
2604
2605  EmitCall(expr, call_type);
2606}
2607
2608
2609void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2610  Expression* callee = expr->expression();
2611  DCHECK(callee->IsProperty());
2612  Property* prop = callee->AsProperty();
2613  DCHECK(prop->IsSuperAccess());
2614
2615  SetSourcePosition(prop->position());
2616  Literal* key = prop->key()->AsLiteral();
2617  DCHECK(!key->value()->IsSmi());
2618  // Load the function from the receiver.
2619  SuperReference* super_ref = prop->obj()->AsSuperReference();
2620  EmitLoadHomeObject(super_ref);
2621  __ Push(rax);
2622  VisitForAccumulatorValue(super_ref->this_var());
2623  __ Push(rax);
2624  __ Push(Operand(rsp, kPointerSize));
2625  __ Push(rax);
2626  __ Push(key->value());
2627
2628  // Stack here:
2629  //  - home_object
2630  //  - this (receiver)
2631  //  - home_object <-- LoadFromSuper will pop here and below.
2632  //  - this (receiver)
2633  //  - key
2634  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2635
2636  // Replace home_object with target function.
2637  __ movp(Operand(rsp, kPointerSize), rax);
2638
2639  // Stack here:
2640  // - target function
2641  // - this (receiver)
2642  EmitCall(expr, CallICState::METHOD);
2643}
2644
2645
2646// Common code for calls using the IC.
2647void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2648                                                Expression* key) {
2649  // Load the key.
2650  VisitForAccumulatorValue(key);
2651
2652  Expression* callee = expr->expression();
2653
2654  // Load the function from the receiver.
2655  DCHECK(callee->IsProperty());
2656  __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2657  __ Move(LoadDescriptor::NameRegister(), rax);
2658  EmitKeyedPropertyLoad(callee->AsProperty());
2659  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2660
2661  // Push the target function under the receiver.
2662  __ Push(Operand(rsp, 0));
2663  __ movp(Operand(rsp, kPointerSize), rax);
2664
2665  EmitCall(expr, CallICState::METHOD);
2666}
2667
2668
2669void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2670  // Load the arguments.
2671  ZoneList<Expression*>* args = expr->arguments();
2672  int arg_count = args->length();
2673  { PreservePositionScope scope(masm()->positions_recorder());
2674    for (int i = 0; i < arg_count; i++) {
2675      VisitForStackValue(args->at(i));
2676    }
2677  }
2678
2679  // Record source position of the IC call.
2680  SetSourcePosition(expr->position());
2681  Handle<Code> ic = CallIC::initialize_stub(
2682      isolate(), arg_count, call_type);
2683  __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
2684  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2685  // Don't assign a type feedback id to the IC, since type feedback is provided
2686  // by the vector above.
2687  CallIC(ic);
2688
2689  RecordJSReturnSite(expr);
2690
2691  // Restore context register.
2692  __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2693  // Discard the function left on TOS.
2694  context()->DropAndPlug(1, rax);
2695}
2696
2697
2698void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2699  // Push copy of the first argument or undefined if it doesn't exist.
2700  if (arg_count > 0) {
2701    __ Push(Operand(rsp, arg_count * kPointerSize));
2702  } else {
2703    __ PushRoot(Heap::kUndefinedValueRootIndex);
2704  }
2705
2706  // Push the enclosing function.
2707  __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2708
2709  // Push the receiver of the enclosing function and do runtime call.
2710  StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2711  __ Push(args.GetReceiverOperand());
2712
2713  // Push the language mode.
2714  __ Push(Smi::FromInt(strict_mode()));
2715
2716  // Push the start position of the scope the calls resides in.
2717  __ Push(Smi::FromInt(scope()->start_position()));
2718
2719  // Do the runtime call.
2720  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2721}
2722
2723
2724void FullCodeGenerator::VisitCall(Call* expr) {
2725#ifdef DEBUG
2726  // We want to verify that RecordJSReturnSite gets called on all paths
2727  // through this function.  Avoid early returns.
2728  expr->return_is_recorded_ = false;
2729#endif
2730
2731  Comment cmnt(masm_, "[ Call");
2732  Expression* callee = expr->expression();
2733  Call::CallType call_type = expr->GetCallType(isolate());
2734
2735  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2736    // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2737    // to resolve the function we need to call and the receiver of the call.
2738    // Then we call the resolved function using the given arguments.
2739    ZoneList<Expression*>* args = expr->arguments();
2740    int arg_count = args->length();
2741    { PreservePositionScope pos_scope(masm()->positions_recorder());
2742      VisitForStackValue(callee);
2743      __ PushRoot(Heap::kUndefinedValueRootIndex);  // Reserved receiver slot.
2744
2745      // Push the arguments.
2746      for (int i = 0; i < arg_count; i++) {
2747        VisitForStackValue(args->at(i));
2748      }
2749
2750      // Push a copy of the function (found below the arguments) and resolve
2751      // eval.
2752      __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
2753      EmitResolvePossiblyDirectEval(arg_count);
2754
2755      // The runtime call returns a pair of values in rax (function) and
2756      // rdx (receiver). Touch up the stack with the right values.
2757      __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2758      __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2759    }
2760    // Record source position for debugger.
2761    SetSourcePosition(expr->position());
2762    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2763    __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2764    __ CallStub(&stub);
2765    RecordJSReturnSite(expr);
2766    // Restore context register.
2767    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2768    context()->DropAndPlug(1, rax);
2769  } else if (call_type == Call::GLOBAL_CALL) {
2770    EmitCallWithLoadIC(expr);
2771
2772  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2773    // Call to a lookup slot (dynamically introduced variable).
2774    VariableProxy* proxy = callee->AsVariableProxy();
2775    Label slow, done;
2776
2777    { PreservePositionScope scope(masm()->positions_recorder());
2778      // Generate code for loading from variables potentially shadowed by
2779      // eval-introduced variables.
2780      EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2781    }
2782    __ bind(&slow);
2783    // Call the runtime to find the function to call (returned in rax) and
2784    // the object holding it (returned in rdx).
2785    __ Push(context_register());
2786    __ Push(proxy->name());
2787    __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2788    __ Push(rax);  // Function.
2789    __ Push(rdx);  // Receiver.
2790
2791    // If fast case code has been generated, emit code to push the function
2792    // and receiver and have the slow path jump around this code.
2793    if (done.is_linked()) {
2794      Label call;
2795      __ jmp(&call, Label::kNear);
2796      __ bind(&done);
2797      // Push function.
2798      __ Push(rax);
2799      // The receiver is implicitly the global receiver. Indicate this by
2800      // passing the hole to the call function stub.
2801      __ PushRoot(Heap::kUndefinedValueRootIndex);
2802      __ bind(&call);
2803    }
2804
2805    // The receiver is either the global receiver or an object found by
2806    // LoadContextSlot.
2807    EmitCall(expr);
2808  } else if (call_type == Call::PROPERTY_CALL) {
2809    Property* property = callee->AsProperty();
2810    bool is_named_call = property->key()->IsPropertyName();
2811    // super.x() is handled in EmitCallWithLoadIC.
2812    if (property->IsSuperAccess() && is_named_call) {
2813      EmitSuperCallWithLoadIC(expr);
2814    } else {
2815      {
2816        PreservePositionScope scope(masm()->positions_recorder());
2817        VisitForStackValue(property->obj());
2818      }
2819      if (is_named_call) {
2820        EmitCallWithLoadIC(expr);
2821      } else {
2822        EmitKeyedCallWithLoadIC(expr, property->key());
2823      }
2824    }
2825  } else {
2826    DCHECK(call_type == Call::OTHER_CALL);
2827    // Call to an arbitrary expression not handled specially above.
2828    { PreservePositionScope scope(masm()->positions_recorder());
2829      VisitForStackValue(callee);
2830    }
2831    __ PushRoot(Heap::kUndefinedValueRootIndex);
2832    // Emit function call.
2833    EmitCall(expr);
2834  }
2835
2836#ifdef DEBUG
2837  // RecordJSReturnSite should have been called.
2838  DCHECK(expr->return_is_recorded_);
2839#endif
2840}
2841
2842
2843void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2844  Comment cmnt(masm_, "[ CallNew");
2845  // According to ECMA-262, section 11.2.2, page 44, the function
2846  // expression in new calls must be evaluated before the
2847  // arguments.
2848
2849  // Push constructor on the stack.  If it's not a function it's used as
2850  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2851  // ignored.
2852  VisitForStackValue(expr->expression());
2853
2854  // Push the arguments ("left-to-right") on the stack.
2855  ZoneList<Expression*>* args = expr->arguments();
2856  int arg_count = args->length();
2857  for (int i = 0; i < arg_count; i++) {
2858    VisitForStackValue(args->at(i));
2859  }
2860
2861  // Call the construct call builtin that handles allocation and
2862  // constructor invocation.
2863  SetSourcePosition(expr->position());
2864
2865  // Load function and argument count into rdi and rax.
2866  __ Set(rax, arg_count);
2867  __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2868
2869  // Record call targets in unoptimized code, but not in the snapshot.
2870  if (FLAG_pretenuring_call_new) {
2871    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2872    DCHECK(expr->AllocationSiteFeedbackSlot() ==
2873           expr->CallNewFeedbackSlot() + 1);
2874  }
2875
2876  __ Move(rbx, FeedbackVector());
2877  __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
2878
2879  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2880  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2881  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2882  context()->Plug(rax);
2883}
2884
2885
2886void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2887  ZoneList<Expression*>* args = expr->arguments();
2888  DCHECK(args->length() == 1);
2889
2890  VisitForAccumulatorValue(args->at(0));
2891
2892  Label materialize_true, materialize_false;
2893  Label* if_true = NULL;
2894  Label* if_false = NULL;
2895  Label* fall_through = NULL;
2896  context()->PrepareTest(&materialize_true, &materialize_false,
2897                         &if_true, &if_false, &fall_through);
2898
2899  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2900  __ JumpIfSmi(rax, if_true);
2901  __ jmp(if_false);
2902
2903  context()->Plug(if_true, if_false);
2904}
2905
2906
2907void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2908  ZoneList<Expression*>* args = expr->arguments();
2909  DCHECK(args->length() == 1);
2910
2911  VisitForAccumulatorValue(args->at(0));
2912
2913  Label materialize_true, materialize_false;
2914  Label* if_true = NULL;
2915  Label* if_false = NULL;
2916  Label* fall_through = NULL;
2917  context()->PrepareTest(&materialize_true, &materialize_false,
2918                         &if_true, &if_false, &fall_through);
2919
2920  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2921  Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2922  Split(non_negative_smi, if_true, if_false, fall_through);
2923
2924  context()->Plug(if_true, if_false);
2925}
2926
2927
2928void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2929  ZoneList<Expression*>* args = expr->arguments();
2930  DCHECK(args->length() == 1);
2931
2932  VisitForAccumulatorValue(args->at(0));
2933
2934  Label materialize_true, materialize_false;
2935  Label* if_true = NULL;
2936  Label* if_false = NULL;
2937  Label* fall_through = NULL;
2938  context()->PrepareTest(&materialize_true, &materialize_false,
2939                         &if_true, &if_false, &fall_through);
2940
2941  __ JumpIfSmi(rax, if_false);
2942  __ CompareRoot(rax, Heap::kNullValueRootIndex);
2943  __ j(equal, if_true);
2944  __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2945  // Undetectable objects behave like undefined when tested with typeof.
2946  __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2947           Immediate(1 << Map::kIsUndetectable));
2948  __ j(not_zero, if_false);
2949  __ movzxbp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2950  __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2951  __ j(below, if_false);
2952  __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2953  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2954  Split(below_equal, if_true, if_false, fall_through);
2955
2956  context()->Plug(if_true, if_false);
2957}
2958
2959
2960void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2961  ZoneList<Expression*>* args = expr->arguments();
2962  DCHECK(args->length() == 1);
2963
2964  VisitForAccumulatorValue(args->at(0));
2965
2966  Label materialize_true, materialize_false;
2967  Label* if_true = NULL;
2968  Label* if_false = NULL;
2969  Label* fall_through = NULL;
2970  context()->PrepareTest(&materialize_true, &materialize_false,
2971                         &if_true, &if_false, &fall_through);
2972
2973  __ JumpIfSmi(rax, if_false);
2974  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2975  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2976  Split(above_equal, if_true, if_false, fall_through);
2977
2978  context()->Plug(if_true, if_false);
2979}
2980
2981
2982void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2983  ZoneList<Expression*>* args = expr->arguments();
2984  DCHECK(args->length() == 1);
2985
2986  VisitForAccumulatorValue(args->at(0));
2987
2988  Label materialize_true, materialize_false;
2989  Label* if_true = NULL;
2990  Label* if_false = NULL;
2991  Label* fall_through = NULL;
2992  context()->PrepareTest(&materialize_true, &materialize_false,
2993                         &if_true, &if_false, &fall_through);
2994
2995  __ JumpIfSmi(rax, if_false);
2996  __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2997  __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2998           Immediate(1 << Map::kIsUndetectable));
2999  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3000  Split(not_zero, if_true, if_false, fall_through);
3001
3002  context()->Plug(if_true, if_false);
3003}
3004
3005
3006void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3007    CallRuntime* expr) {
3008  ZoneList<Expression*>* args = expr->arguments();
3009  DCHECK(args->length() == 1);
3010
3011  VisitForAccumulatorValue(args->at(0));
3012
3013  Label materialize_true, materialize_false, skip_lookup;
3014  Label* if_true = NULL;
3015  Label* if_false = NULL;
3016  Label* fall_through = NULL;
3017  context()->PrepareTest(&materialize_true, &materialize_false,
3018                         &if_true, &if_false, &fall_through);
3019
3020  __ AssertNotSmi(rax);
3021
3022  // Check whether this map has already been checked to be safe for default
3023  // valueOf.
3024  __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3025  __ testb(FieldOperand(rbx, Map::kBitField2Offset),
3026           Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3027  __ j(not_zero, &skip_lookup);
3028
3029  // Check for fast case object. Generate false result for slow case object.
3030  __ movp(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
3031  __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3032  __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
3033  __ j(equal, if_false);
3034
3035  // Look for valueOf string in the descriptor array, and indicate false if
3036  // found. Since we omit an enumeration index check, if it is added via a
3037  // transition that shares its descriptor array, this is a false positive.
3038  Label entry, loop, done;
3039
3040  // Skip loop if no descriptors are valid.
3041  __ NumberOfOwnDescriptors(rcx, rbx);
3042  __ cmpp(rcx, Immediate(0));
3043  __ j(equal, &done);
3044
3045  __ LoadInstanceDescriptors(rbx, r8);
3046  // rbx: descriptor array.
3047  // rcx: valid entries in the descriptor array.
3048  // Calculate the end of the descriptor array.
3049  __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
3050  __ leap(rcx,
3051          Operand(r8, rcx, times_pointer_size, DescriptorArray::kFirstOffset));
3052  // Calculate location of the first key name.
3053  __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
3054  // Loop through all the keys in the descriptor array. If one of these is the
3055  // internalized string "valueOf" the result is false.
3056  __ jmp(&entry);
3057  __ bind(&loop);
3058  __ movp(rdx, FieldOperand(r8, 0));
3059  __ Cmp(rdx, isolate()->factory()->value_of_string());
3060  __ j(equal, if_false);
3061  __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3062  __ bind(&entry);
3063  __ cmpp(r8, rcx);
3064  __ j(not_equal, &loop);
3065
3066  __ bind(&done);
3067
3068  // Set the bit in the map to indicate that there is no local valueOf field.
3069  __ orp(FieldOperand(rbx, Map::kBitField2Offset),
3070         Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3071
3072  __ bind(&skip_lookup);
3073
3074  // If a valueOf property is not found on the object check that its
3075  // prototype is the un-modified String prototype. If not result is false.
3076  __ movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
3077  __ testp(rcx, Immediate(kSmiTagMask));
3078  __ j(zero, if_false);
3079  __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3080  __ movp(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3081  __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
3082  __ cmpp(rcx,
3083          ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3084  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3085  Split(equal, if_true, if_false, fall_through);
3086
3087  context()->Plug(if_true, if_false);
3088}
3089
3090
3091void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3092  ZoneList<Expression*>* args = expr->arguments();
3093  DCHECK(args->length() == 1);
3094
3095  VisitForAccumulatorValue(args->at(0));
3096
3097  Label materialize_true, materialize_false;
3098  Label* if_true = NULL;
3099  Label* if_false = NULL;
3100  Label* fall_through = NULL;
3101  context()->PrepareTest(&materialize_true, &materialize_false,
3102                         &if_true, &if_false, &fall_through);
3103
3104  __ JumpIfSmi(rax, if_false);
3105  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3106  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3107  Split(equal, if_true, if_false, fall_through);
3108
3109  context()->Plug(if_true, if_false);
3110}
3111
3112
3113void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3114  ZoneList<Expression*>* args = expr->arguments();
3115  DCHECK(args->length() == 1);
3116
3117  VisitForAccumulatorValue(args->at(0));
3118
3119  Label materialize_true, materialize_false;
3120  Label* if_true = NULL;
3121  Label* if_false = NULL;
3122  Label* fall_through = NULL;
3123  context()->PrepareTest(&materialize_true, &materialize_false,
3124                         &if_true, &if_false, &fall_through);
3125
3126  Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3127  __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3128  __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3129          Immediate(0x1));
3130  __ j(no_overflow, if_false);
3131  __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3132          Immediate(0x00000000));
3133  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3134  Split(equal, if_true, if_false, fall_through);
3135
3136  context()->Plug(if_true, if_false);
3137}
3138
3139
3140void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3141  ZoneList<Expression*>* args = expr->arguments();
3142  DCHECK(args->length() == 1);
3143
3144  VisitForAccumulatorValue(args->at(0));
3145
3146  Label materialize_true, materialize_false;
3147  Label* if_true = NULL;
3148  Label* if_false = NULL;
3149  Label* fall_through = NULL;
3150  context()->PrepareTest(&materialize_true, &materialize_false,
3151                         &if_true, &if_false, &fall_through);
3152
3153  __ JumpIfSmi(rax, if_false);
3154  __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3155  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3156  Split(equal, if_true, if_false, fall_through);
3157
3158  context()->Plug(if_true, if_false);
3159}
3160
3161
3162void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3163  ZoneList<Expression*>* args = expr->arguments();
3164  DCHECK(args->length() == 1);
3165
3166  VisitForAccumulatorValue(args->at(0));
3167
3168  Label materialize_true, materialize_false;
3169  Label* if_true = NULL;
3170  Label* if_false = NULL;
3171  Label* fall_through = NULL;
3172  context()->PrepareTest(&materialize_true, &materialize_false,
3173                         &if_true, &if_false, &fall_through);
3174
3175  __ JumpIfSmi(rax, if_false);
3176  __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3177  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3178  Split(equal, if_true, if_false, fall_through);
3179
3180  context()->Plug(if_true, if_false);
3181}
3182
3183
3184
3185void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3186  DCHECK(expr->arguments()->length() == 0);
3187
3188  Label materialize_true, materialize_false;
3189  Label* if_true = NULL;
3190  Label* if_false = NULL;
3191  Label* fall_through = NULL;
3192  context()->PrepareTest(&materialize_true, &materialize_false,
3193                         &if_true, &if_false, &fall_through);
3194
3195  // Get the frame pointer for the calling frame.
3196  __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3197
3198  // Skip the arguments adaptor frame if it exists.
3199  Label check_frame_marker;
3200  __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
3201         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3202  __ j(not_equal, &check_frame_marker);
3203  __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3204
3205  // Check the marker in the calling frame.
3206  __ bind(&check_frame_marker);
3207  __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
3208         Smi::FromInt(StackFrame::CONSTRUCT));
3209  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3210  Split(equal, if_true, if_false, fall_through);
3211
3212  context()->Plug(if_true, if_false);
3213}
3214
3215
3216void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3217  ZoneList<Expression*>* args = expr->arguments();
3218  DCHECK(args->length() == 2);
3219
3220  // Load the two objects into registers and perform the comparison.
3221  VisitForStackValue(args->at(0));
3222  VisitForAccumulatorValue(args->at(1));
3223
3224  Label materialize_true, materialize_false;
3225  Label* if_true = NULL;
3226  Label* if_false = NULL;
3227  Label* fall_through = NULL;
3228  context()->PrepareTest(&materialize_true, &materialize_false,
3229                         &if_true, &if_false, &fall_through);
3230
3231  __ Pop(rbx);
3232  __ cmpp(rax, rbx);
3233  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3234  Split(equal, if_true, if_false, fall_through);
3235
3236  context()->Plug(if_true, if_false);
3237}
3238
3239
3240void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3241  ZoneList<Expression*>* args = expr->arguments();
3242  DCHECK(args->length() == 1);
3243
3244  // ArgumentsAccessStub expects the key in rdx and the formal
3245  // parameter count in rax.
3246  VisitForAccumulatorValue(args->at(0));
3247  __ movp(rdx, rax);
3248  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3249  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3250  __ CallStub(&stub);
3251  context()->Plug(rax);
3252}
3253
3254
3255void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3256  DCHECK(expr->arguments()->length() == 0);
3257
3258  Label exit;
3259  // Get the number of formal parameters.
3260  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3261
3262  // Check if the calling frame is an arguments adaptor frame.
3263  __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3264  __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
3265         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3266  __ j(not_equal, &exit, Label::kNear);
3267
3268  // Arguments adaptor case: Read the arguments length from the
3269  // adaptor frame.
3270  __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3271
3272  __ bind(&exit);
3273  __ AssertSmi(rax);
3274  context()->Plug(rax);
3275}
3276
3277
3278void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3279  ZoneList<Expression*>* args = expr->arguments();
3280  DCHECK(args->length() == 1);
3281  Label done, null, function, non_function_constructor;
3282
3283  VisitForAccumulatorValue(args->at(0));
3284
3285  // If the object is a smi, we return null.
3286  __ JumpIfSmi(rax, &null);
3287
3288  // Check that the object is a JS object but take special care of JS
3289  // functions to make sure they have 'Function' as their class.
3290  // Assume that there are only two callable types, and one of them is at
3291  // either end of the type range for JS object types. Saves extra comparisons.
3292  STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3293  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3294  // Map is now in rax.
3295  __ j(below, &null);
3296  STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3297                FIRST_SPEC_OBJECT_TYPE + 1);
3298  __ j(equal, &function);
3299
3300  __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3301  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3302                LAST_SPEC_OBJECT_TYPE - 1);
3303  __ j(equal, &function);
3304  // Assume that there is no larger type.
3305  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3306
3307  // Check if the constructor in the map is a JS function.
3308  __ movp(rax, FieldOperand(rax, Map::kConstructorOffset));
3309  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3310  __ j(not_equal, &non_function_constructor);
3311
3312  // rax now contains the constructor function. Grab the
3313  // instance class name from there.
3314  __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3315  __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
3316  __ jmp(&done);
3317
3318  // Functions have class 'Function'.
3319  __ bind(&function);
3320  __ Move(rax, isolate()->factory()->Function_string());
3321  __ jmp(&done);
3322
3323  // Objects with a non-function constructor have class 'Object'.
3324  __ bind(&non_function_constructor);
3325  __ Move(rax, isolate()->factory()->Object_string());
3326  __ jmp(&done);
3327
3328  // Non-JS objects have class null.
3329  __ bind(&null);
3330  __ LoadRoot(rax, Heap::kNullValueRootIndex);
3331
3332  // All done.
3333  __ bind(&done);
3334
3335  context()->Plug(rax);
3336}
3337
3338
3339void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3340  // Load the arguments on the stack and call the stub.
3341  SubStringStub stub(isolate());
3342  ZoneList<Expression*>* args = expr->arguments();
3343  DCHECK(args->length() == 3);
3344  VisitForStackValue(args->at(0));
3345  VisitForStackValue(args->at(1));
3346  VisitForStackValue(args->at(2));
3347  __ CallStub(&stub);
3348  context()->Plug(rax);
3349}
3350
3351
3352void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3353  // Load the arguments on the stack and call the stub.
3354  RegExpExecStub stub(isolate());
3355  ZoneList<Expression*>* args = expr->arguments();
3356  DCHECK(args->length() == 4);
3357  VisitForStackValue(args->at(0));
3358  VisitForStackValue(args->at(1));
3359  VisitForStackValue(args->at(2));
3360  VisitForStackValue(args->at(3));
3361  __ CallStub(&stub);
3362  context()->Plug(rax);
3363}
3364
3365
3366void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3367  ZoneList<Expression*>* args = expr->arguments();
3368  DCHECK(args->length() == 1);
3369
3370  VisitForAccumulatorValue(args->at(0));  // Load the object.
3371
3372  Label done;
3373  // If the object is a smi return the object.
3374  __ JumpIfSmi(rax, &done);
3375  // If the object is not a value type, return the object.
3376  __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3377  __ j(not_equal, &done);
3378  __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
3379
3380  __ bind(&done);
3381  context()->Plug(rax);
3382}
3383
3384
3385void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3386  ZoneList<Expression*>* args = expr->arguments();
3387  DCHECK(args->length() == 2);
3388  DCHECK_NE(NULL, args->at(1)->AsLiteral());
3389  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3390
3391  VisitForAccumulatorValue(args->at(0));  // Load the object.
3392
3393  Label runtime, done, not_date_object;
3394  Register object = rax;
3395  Register result = rax;
3396  Register scratch = rcx;
3397
3398  __ JumpIfSmi(object, &not_date_object);
3399  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3400  __ j(not_equal, &not_date_object);
3401
3402  if (index->value() == 0) {
3403    __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3404    __ jmp(&done);
3405  } else {
3406    if (index->value() < JSDate::kFirstUncachedField) {
3407      ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3408      Operand stamp_operand = __ ExternalOperand(stamp);
3409      __ movp(scratch, stamp_operand);
3410      __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3411      __ j(not_equal, &runtime, Label::kNear);
3412      __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3413                                           kPointerSize * index->value()));
3414      __ jmp(&done);
3415    }
3416    __ bind(&runtime);
3417    __ PrepareCallCFunction(2);
3418    __ movp(arg_reg_1, object);
3419    __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3420    __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3421    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3422    __ jmp(&done);
3423  }
3424
3425  __ bind(&not_date_object);
3426  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3427  __ bind(&done);
3428  context()->Plug(rax);
3429}
3430
3431
3432void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3433  ZoneList<Expression*>* args = expr->arguments();
3434  DCHECK_EQ(3, args->length());
3435
3436  Register string = rax;
3437  Register index = rbx;
3438  Register value = rcx;
3439
3440  VisitForStackValue(args->at(0));        // index
3441  VisitForStackValue(args->at(1));        // value
3442  VisitForAccumulatorValue(args->at(2));  // string
3443  __ Pop(value);
3444  __ Pop(index);
3445
3446  if (FLAG_debug_code) {
3447    __ Check(__ CheckSmi(value), kNonSmiValue);
3448    __ Check(__ CheckSmi(index), kNonSmiValue);
3449  }
3450
3451  __ SmiToInteger32(value, value);
3452  __ SmiToInteger32(index, index);
3453
3454  if (FLAG_debug_code) {
3455    static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3456    __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3457  }
3458
3459  __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3460          value);
3461  context()->Plug(string);
3462}
3463
3464
3465void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3466  ZoneList<Expression*>* args = expr->arguments();
3467  DCHECK_EQ(3, args->length());
3468
3469  Register string = rax;
3470  Register index = rbx;
3471  Register value = rcx;
3472
3473  VisitForStackValue(args->at(0));        // index
3474  VisitForStackValue(args->at(1));        // value
3475  VisitForAccumulatorValue(args->at(2));  // string
3476  __ Pop(value);
3477  __ Pop(index);
3478
3479  if (FLAG_debug_code) {
3480    __ Check(__ CheckSmi(value), kNonSmiValue);
3481    __ Check(__ CheckSmi(index), kNonSmiValue);
3482  }
3483
3484  __ SmiToInteger32(value, value);
3485  __ SmiToInteger32(index, index);
3486
3487  if (FLAG_debug_code) {
3488    static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3489    __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3490  }
3491
3492  __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3493          value);
3494  context()->Plug(rax);
3495}
3496
3497
3498void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3499  // Load the arguments on the stack and call the runtime function.
3500  ZoneList<Expression*>* args = expr->arguments();
3501  DCHECK(args->length() == 2);
3502  VisitForStackValue(args->at(0));
3503  VisitForStackValue(args->at(1));
3504  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3505  __ CallStub(&stub);
3506  context()->Plug(rax);
3507}
3508
3509
3510void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3511  ZoneList<Expression*>* args = expr->arguments();
3512  DCHECK(args->length() == 2);
3513
3514  VisitForStackValue(args->at(0));  // Load the object.
3515  VisitForAccumulatorValue(args->at(1));  // Load the value.
3516  __ Pop(rbx);  // rax = value. rbx = object.
3517
3518  Label done;
3519  // If the object is a smi, return the value.
3520  __ JumpIfSmi(rbx, &done);
3521
3522  // If the object is not a value type, return the value.
3523  __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3524  __ j(not_equal, &done);
3525
3526  // Store the value.
3527  __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax);
3528  // Update the write barrier.  Save the value as it will be
3529  // overwritten by the write barrier code and is needed afterward.
3530  __ movp(rdx, rax);
3531  __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3532
3533  __ bind(&done);
3534  context()->Plug(rax);
3535}
3536
3537
3538void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3539  ZoneList<Expression*>* args = expr->arguments();
3540  DCHECK_EQ(args->length(), 1);
3541
3542  // Load the argument into rax and call the stub.
3543  VisitForAccumulatorValue(args->at(0));
3544
3545  NumberToStringStub stub(isolate());
3546  __ CallStub(&stub);
3547  context()->Plug(rax);
3548}
3549
3550
3551void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3552  ZoneList<Expression*>* args = expr->arguments();
3553  DCHECK(args->length() == 1);
3554
3555  VisitForAccumulatorValue(args->at(0));
3556
3557  Label done;
3558  StringCharFromCodeGenerator generator(rax, rbx);
3559  generator.GenerateFast(masm_);
3560  __ jmp(&done);
3561
3562  NopRuntimeCallHelper call_helper;
3563  generator.GenerateSlow(masm_, call_helper);
3564
3565  __ bind(&done);
3566  context()->Plug(rbx);
3567}
3568
3569
3570void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3571  ZoneList<Expression*>* args = expr->arguments();
3572  DCHECK(args->length() == 2);
3573
3574  VisitForStackValue(args->at(0));
3575  VisitForAccumulatorValue(args->at(1));
3576
3577  Register object = rbx;
3578  Register index = rax;
3579  Register result = rdx;
3580
3581  __ Pop(object);
3582
3583  Label need_conversion;
3584  Label index_out_of_range;
3585  Label done;
3586  StringCharCodeAtGenerator generator(object,
3587                                      index,
3588                                      result,
3589                                      &need_conversion,
3590                                      &need_conversion,
3591                                      &index_out_of_range,
3592                                      STRING_INDEX_IS_NUMBER);
3593  generator.GenerateFast(masm_);
3594  __ jmp(&done);
3595
3596  __ bind(&index_out_of_range);
3597  // When the index is out of range, the spec requires us to return
3598  // NaN.
3599  __ LoadRoot(result, Heap::kNanValueRootIndex);
3600  __ jmp(&done);
3601
3602  __ bind(&need_conversion);
3603  // Move the undefined value into the result register, which will
3604  // trigger conversion.
3605  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3606  __ jmp(&done);
3607
3608  NopRuntimeCallHelper call_helper;
3609  generator.GenerateSlow(masm_, call_helper);
3610
3611  __ bind(&done);
3612  context()->Plug(result);
3613}
3614
3615
3616void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3617  ZoneList<Expression*>* args = expr->arguments();
3618  DCHECK(args->length() == 2);
3619
3620  VisitForStackValue(args->at(0));
3621  VisitForAccumulatorValue(args->at(1));
3622
3623  Register object = rbx;
3624  Register index = rax;
3625  Register scratch = rdx;
3626  Register result = rax;
3627
3628  __ Pop(object);
3629
3630  Label need_conversion;
3631  Label index_out_of_range;
3632  Label done;
3633  StringCharAtGenerator generator(object,
3634                                  index,
3635                                  scratch,
3636                                  result,
3637                                  &need_conversion,
3638                                  &need_conversion,
3639                                  &index_out_of_range,
3640                                  STRING_INDEX_IS_NUMBER);
3641  generator.GenerateFast(masm_);
3642  __ jmp(&done);
3643
3644  __ bind(&index_out_of_range);
3645  // When the index is out of range, the spec requires us to return
3646  // the empty string.
3647  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3648  __ jmp(&done);
3649
3650  __ bind(&need_conversion);
3651  // Move smi zero into the result register, which will trigger
3652  // conversion.
3653  __ Move(result, Smi::FromInt(0));
3654  __ jmp(&done);
3655
3656  NopRuntimeCallHelper call_helper;
3657  generator.GenerateSlow(masm_, call_helper);
3658
3659  __ bind(&done);
3660  context()->Plug(result);
3661}
3662
3663
3664void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3665  ZoneList<Expression*>* args = expr->arguments();
3666  DCHECK_EQ(2, args->length());
3667  VisitForStackValue(args->at(0));
3668  VisitForAccumulatorValue(args->at(1));
3669
3670  __ Pop(rdx);
3671  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3672  __ CallStub(&stub);
3673  context()->Plug(rax);
3674}
3675
3676
3677void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3678  ZoneList<Expression*>* args = expr->arguments();
3679  DCHECK_EQ(2, args->length());
3680
3681  VisitForStackValue(args->at(0));
3682  VisitForStackValue(args->at(1));
3683
3684  StringCompareStub stub(isolate());
3685  __ CallStub(&stub);
3686  context()->Plug(rax);
3687}
3688
3689
3690void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3691  ZoneList<Expression*>* args = expr->arguments();
3692  DCHECK(args->length() >= 2);
3693
3694  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3695  for (int i = 0; i < arg_count + 1; i++) {
3696    VisitForStackValue(args->at(i));
3697  }
3698  VisitForAccumulatorValue(args->last());  // Function.
3699
3700  Label runtime, done;
3701  // Check for non-function argument (including proxy).
3702  __ JumpIfSmi(rax, &runtime);
3703  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3704  __ j(not_equal, &runtime);
3705
3706  // InvokeFunction requires the function in rdi. Move it in there.
3707  __ movp(rdi, result_register());
3708  ParameterCount count(arg_count);
3709  __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
3710  __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3711  __ jmp(&done);
3712
3713  __ bind(&runtime);
3714  __ Push(rax);
3715  __ CallRuntime(Runtime::kCall, args->length());
3716  __ bind(&done);
3717
3718  context()->Plug(rax);
3719}
3720
3721
3722void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3723  RegExpConstructResultStub stub(isolate());
3724  ZoneList<Expression*>* args = expr->arguments();
3725  DCHECK(args->length() == 3);
3726  VisitForStackValue(args->at(0));
3727  VisitForStackValue(args->at(1));
3728  VisitForAccumulatorValue(args->at(2));
3729  __ Pop(rbx);
3730  __ Pop(rcx);
3731  __ CallStub(&stub);
3732  context()->Plug(rax);
3733}
3734
3735
3736void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3737  ZoneList<Expression*>* args = expr->arguments();
3738  DCHECK_EQ(2, args->length());
3739
3740  DCHECK_NE(NULL, args->at(0)->AsLiteral());
3741  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3742
3743  Handle<FixedArray> jsfunction_result_caches(
3744      isolate()->native_context()->jsfunction_result_caches());
3745  if (jsfunction_result_caches->length() <= cache_id) {
3746    __ Abort(kAttemptToUseUndefinedCache);
3747    __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3748    context()->Plug(rax);
3749    return;
3750  }
3751
3752  VisitForAccumulatorValue(args->at(1));
3753
3754  Register key = rax;
3755  Register cache = rbx;
3756  Register tmp = rcx;
3757  __ movp(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
3758  __ movp(cache,
3759          FieldOperand(cache, GlobalObject::kNativeContextOffset));
3760  __ movp(cache,
3761          ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3762  __ movp(cache,
3763          FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3764
3765  Label done, not_found;
3766  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3767  __ movp(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3768  // tmp now holds finger offset as a smi.
3769  SmiIndex index =
3770      __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3771  __ cmpp(key, FieldOperand(cache,
3772                            index.reg,
3773                            index.scale,
3774                            FixedArray::kHeaderSize));
3775  __ j(not_equal, &not_found, Label::kNear);
3776  __ movp(rax, FieldOperand(cache,
3777                            index.reg,
3778                            index.scale,
3779                            FixedArray::kHeaderSize + kPointerSize));
3780  __ jmp(&done, Label::kNear);
3781
3782  __ bind(&not_found);
3783  // Call runtime to perform the lookup.
3784  __ Push(cache);
3785  __ Push(key);
3786  __ CallRuntime(Runtime::kGetFromCache, 2);
3787
3788  __ bind(&done);
3789  context()->Plug(rax);
3790}
3791
3792
3793void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3794  ZoneList<Expression*>* args = expr->arguments();
3795  DCHECK(args->length() == 1);
3796
3797  VisitForAccumulatorValue(args->at(0));
3798
3799  Label materialize_true, materialize_false;
3800  Label* if_true = NULL;
3801  Label* if_false = NULL;
3802  Label* fall_through = NULL;
3803  context()->PrepareTest(&materialize_true, &materialize_false,
3804                         &if_true, &if_false, &fall_through);
3805
3806  __ testl(FieldOperand(rax, String::kHashFieldOffset),
3807           Immediate(String::kContainsCachedArrayIndexMask));
3808  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3809  __ j(zero, if_true);
3810  __ jmp(if_false);
3811
3812  context()->Plug(if_true, if_false);
3813}
3814
3815
3816void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3817  ZoneList<Expression*>* args = expr->arguments();
3818  DCHECK(args->length() == 1);
3819  VisitForAccumulatorValue(args->at(0));
3820
3821  __ AssertString(rax);
3822
3823  __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3824  DCHECK(String::kHashShift >= kSmiTagSize);
3825  __ IndexFromHash(rax, rax);
3826
3827  context()->Plug(rax);
3828}
3829
3830
3831void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3832  Label bailout, return_result, done, one_char_separator, long_separator,
3833      non_trivial_array, not_size_one_array, loop,
3834      loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3835  ZoneList<Expression*>* args = expr->arguments();
3836  DCHECK(args->length() == 2);
3837  // We will leave the separator on the stack until the end of the function.
3838  VisitForStackValue(args->at(1));
3839  // Load this to rax (= array)
3840  VisitForAccumulatorValue(args->at(0));
3841  // All aliases of the same register have disjoint lifetimes.
3842  Register array = rax;
3843  Register elements = no_reg;  // Will be rax.
3844
3845  Register index = rdx;
3846
3847  Register string_length = rcx;
3848
3849  Register string = rsi;
3850
3851  Register scratch = rbx;
3852
3853  Register array_length = rdi;
3854  Register result_pos = no_reg;  // Will be rdi.
3855
3856  Operand separator_operand =    Operand(rsp, 2 * kPointerSize);
3857  Operand result_operand =       Operand(rsp, 1 * kPointerSize);
3858  Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3859  // Separator operand is already pushed. Make room for the two
3860  // other stack fields, and clear the direction flag in anticipation
3861  // of calling CopyBytes.
3862  __ subp(rsp, Immediate(2 * kPointerSize));
3863  __ cld();
3864  // Check that the array is a JSArray
3865  __ JumpIfSmi(array, &bailout);
3866  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3867  __ j(not_equal, &bailout);
3868
3869  // Check that the array has fast elements.
3870  __ CheckFastElements(scratch, &bailout);
3871
3872  // Array has fast elements, so its length must be a smi.
3873  // If the array has length zero, return the empty string.
3874  __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
3875  __ SmiCompare(array_length, Smi::FromInt(0));
3876  __ j(not_zero, &non_trivial_array);
3877  __ LoadRoot(rax, Heap::kempty_stringRootIndex);
3878  __ jmp(&return_result);
3879
3880  // Save the array length on the stack.
3881  __ bind(&non_trivial_array);
3882  __ SmiToInteger32(array_length, array_length);
3883  __ movl(array_length_operand, array_length);
3884
3885  // Save the FixedArray containing array's elements.
3886  // End of array's live range.
3887  elements = array;
3888  __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
3889  array = no_reg;
3890
3891
3892  // Check that all array elements are sequential one-byte strings, and
3893  // accumulate the sum of their lengths, as a smi-encoded value.
3894  __ Set(index, 0);
3895  __ Set(string_length, 0);
3896  // Loop condition: while (index < array_length).
3897  // Live loop registers: index(int32), array_length(int32), string(String*),
3898  //                      scratch, string_length(int32), elements(FixedArray*).
3899  if (generate_debug_code_) {
3900    __ cmpp(index, array_length);
3901    __ Assert(below, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3902  }
3903  __ bind(&loop);
3904  __ movp(string, FieldOperand(elements,
3905                               index,
3906                               times_pointer_size,
3907                               FixedArray::kHeaderSize));
3908  __ JumpIfSmi(string, &bailout);
3909  __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3910  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3911  __ andb(scratch, Immediate(
3912      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3913  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3914  __ j(not_equal, &bailout);
3915  __ AddSmiField(string_length,
3916                 FieldOperand(string, SeqOneByteString::kLengthOffset));
3917  __ j(overflow, &bailout);
3918  __ incl(index);
3919  __ cmpl(index, array_length);
3920  __ j(less, &loop);
3921
3922  // Live registers:
3923  // string_length: Sum of string lengths.
3924  // elements: FixedArray of strings.
3925  // index: Array length.
3926  // array_length: Array length.
3927
3928  // If array_length is 1, return elements[0], a string.
3929  __ cmpl(array_length, Immediate(1));
3930  __ j(not_equal, &not_size_one_array);
3931  __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3932  __ jmp(&return_result);
3933
3934  __ bind(&not_size_one_array);
3935
3936  // End of array_length live range.
3937  result_pos = array_length;
3938  array_length = no_reg;
3939
3940  // Live registers:
3941  // string_length: Sum of string lengths.
3942  // elements: FixedArray of strings.
3943  // index: Array length.
3944
3945  // Check that the separator is a sequential one-byte string.
3946  __ movp(string, separator_operand);
3947  __ JumpIfSmi(string, &bailout);
3948  __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3949  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3950  __ andb(scratch, Immediate(
3951      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3952  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3953  __ j(not_equal, &bailout);
3954
3955  // Live registers:
3956  // string_length: Sum of string lengths.
3957  // elements: FixedArray of strings.
3958  // index: Array length.
3959  // string: Separator string.
3960
3961  // Add (separator length times (array_length - 1)) to string_length.
3962  __ SmiToInteger32(scratch,
3963                    FieldOperand(string, SeqOneByteString::kLengthOffset));
3964  __ decl(index);
3965  __ imull(scratch, index);
3966  __ j(overflow, &bailout);
3967  __ addl(string_length, scratch);
3968  __ j(overflow, &bailout);
3969
3970  // Live registers and stack values:
3971  //   string_length: Total length of result string.
3972  //   elements: FixedArray of strings.
3973  __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
3974                           &bailout);
3975  __ movp(result_operand, result_pos);
3976  __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
3977
3978  __ movp(string, separator_operand);
3979  __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
3980                Smi::FromInt(1));
3981  __ j(equal, &one_char_separator);
3982  __ j(greater, &long_separator);
3983
3984
3985  // Empty separator case:
3986  __ Set(index, 0);
3987  __ movl(scratch, array_length_operand);
3988  __ jmp(&loop_1_condition);
3989  // Loop condition: while (index < array_length).
3990  __ bind(&loop_1);
3991  // Each iteration of the loop concatenates one string to the result.
3992  // Live values in registers:
3993  //   index: which element of the elements array we are adding to the result.
3994  //   result_pos: the position to which we are currently copying characters.
3995  //   elements: the FixedArray of strings we are joining.
3996  //   scratch: array length.
3997
3998  // Get string = array[index].
3999  __ movp(string, FieldOperand(elements, index,
4000                               times_pointer_size,
4001                               FixedArray::kHeaderSize));
4002  __ SmiToInteger32(string_length,
4003                    FieldOperand(string, String::kLengthOffset));
4004  __ leap(string,
4005         FieldOperand(string, SeqOneByteString::kHeaderSize));
4006  __ CopyBytes(result_pos, string, string_length);
4007  __ incl(index);
4008  __ bind(&loop_1_condition);
4009  __ cmpl(index, scratch);
4010  __ j(less, &loop_1);  // Loop while (index < array_length).
4011  __ jmp(&done);
4012
4013  // Generic bailout code used from several places.
4014  __ bind(&bailout);
4015  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4016  __ jmp(&return_result);
4017
4018
4019  // One-character separator case
4020  __ bind(&one_char_separator);
4021  // Get the separator one-byte character value.
4022  // Register "string" holds the separator.
4023  __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4024  __ Set(index, 0);
4025  // Jump into the loop after the code that copies the separator, so the first
4026  // element is not preceded by a separator
4027  __ jmp(&loop_2_entry);
4028  // Loop condition: while (index < length).
4029  __ bind(&loop_2);
4030  // Each iteration of the loop concatenates one string to the result.
4031  // Live values in registers:
4032  //   elements: The FixedArray of strings we are joining.
4033  //   index: which element of the elements array we are adding to the result.
4034  //   result_pos: the position to which we are currently copying characters.
4035  //   scratch: Separator character.
4036
4037  // Copy the separator character to the result.
4038  __ movb(Operand(result_pos, 0), scratch);
4039  __ incp(result_pos);
4040
4041  __ bind(&loop_2_entry);
4042  // Get string = array[index].
4043  __ movp(string, FieldOperand(elements, index,
4044                               times_pointer_size,
4045                               FixedArray::kHeaderSize));
4046  __ SmiToInteger32(string_length,
4047                    FieldOperand(string, String::kLengthOffset));
4048  __ leap(string,
4049         FieldOperand(string, SeqOneByteString::kHeaderSize));
4050  __ CopyBytes(result_pos, string, string_length);
4051  __ incl(index);
4052  __ cmpl(index, array_length_operand);
4053  __ j(less, &loop_2);  // End while (index < length).
4054  __ jmp(&done);
4055
4056
4057  // Long separator case (separator is more than one character).
4058  __ bind(&long_separator);
4059
4060  // Make elements point to end of elements array, and index
4061  // count from -array_length to zero, so we don't need to maintain
4062  // a loop limit.
4063  __ movl(index, array_length_operand);
4064  __ leap(elements, FieldOperand(elements, index, times_pointer_size,
4065                                FixedArray::kHeaderSize));
4066  __ negq(index);
4067
4068  // Replace separator string with pointer to its first character, and
4069  // make scratch be its length.
4070  __ movp(string, separator_operand);
4071  __ SmiToInteger32(scratch,
4072                    FieldOperand(string, String::kLengthOffset));
4073  __ leap(string,
4074         FieldOperand(string, SeqOneByteString::kHeaderSize));
4075  __ movp(separator_operand, string);
4076
4077  // Jump into the loop after the code that copies the separator, so the first
4078  // element is not preceded by a separator
4079  __ jmp(&loop_3_entry);
4080  // Loop condition: while (index < length).
4081  __ bind(&loop_3);
4082  // Each iteration of the loop concatenates one string to the result.
4083  // Live values in registers:
4084  //   index: which element of the elements array we are adding to the result.
4085  //   result_pos: the position to which we are currently copying characters.
4086  //   scratch: Separator length.
4087  //   separator_operand (rsp[0x10]): Address of first char of separator.
4088
4089  // Copy the separator to the result.
4090  __ movp(string, separator_operand);
4091  __ movl(string_length, scratch);
4092  __ CopyBytes(result_pos, string, string_length, 2);
4093
4094  __ bind(&loop_3_entry);
4095  // Get string = array[index].
4096  __ movp(string, Operand(elements, index, times_pointer_size, 0));
4097  __ SmiToInteger32(string_length,
4098                    FieldOperand(string, String::kLengthOffset));
4099  __ leap(string,
4100         FieldOperand(string, SeqOneByteString::kHeaderSize));
4101  __ CopyBytes(result_pos, string, string_length);
4102  __ incq(index);
4103  __ j(not_equal, &loop_3);  // Loop while (index < 0).
4104
4105  __ bind(&done);
4106  __ movp(rax, result_operand);
4107
4108  __ bind(&return_result);
4109  // Drop temp values from the stack, and restore context register.
4110  __ addp(rsp, Immediate(3 * kPointerSize));
4111  __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4112  context()->Plug(rax);
4113}
4114
4115
4116void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4117  DCHECK(expr->arguments()->length() == 0);
4118  ExternalReference debug_is_active =
4119      ExternalReference::debug_is_active_address(isolate());
4120  __ Move(kScratchRegister, debug_is_active);
4121  __ movzxbp(rax, Operand(kScratchRegister, 0));
4122  __ Integer32ToSmi(rax, rax);
4123  context()->Plug(rax);
4124}
4125
4126
4127void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4128  if (expr->function() != NULL &&
4129      expr->function()->intrinsic_type == Runtime::INLINE) {
4130    Comment cmnt(masm_, "[ InlineRuntimeCall");
4131    EmitInlineRuntimeCall(expr);
4132    return;
4133  }
4134
4135  Comment cmnt(masm_, "[ CallRuntime");
4136  ZoneList<Expression*>* args = expr->arguments();
4137  int arg_count = args->length();
4138
4139  if (expr->is_jsruntime()) {
4140    // Push the builtins object as receiver.
4141    __ movp(rax, GlobalObjectOperand());
4142    __ Push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4143
4144    // Load the function from the receiver.
4145    __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4146    __ Move(LoadDescriptor::NameRegister(), expr->name());
4147    if (FLAG_vector_ics) {
4148      __ Move(VectorLoadICDescriptor::SlotRegister(),
4149              Smi::FromInt(expr->CallRuntimeFeedbackSlot()));
4150      CallLoadIC(NOT_CONTEXTUAL);
4151    } else {
4152      CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4153    }
4154
4155    // Push the target function under the receiver.
4156    __ Push(Operand(rsp, 0));
4157    __ movp(Operand(rsp, kPointerSize), rax);
4158
4159    // Push the arguments ("left-to-right").
4160    for (int i = 0; i < arg_count; i++) {
4161      VisitForStackValue(args->at(i));
4162    }
4163
4164    // Record source position of the IC call.
4165    SetSourcePosition(expr->position());
4166    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4167    __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4168    __ CallStub(&stub);
4169
4170    // Restore context register.
4171    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4172    context()->DropAndPlug(1, rax);
4173
4174  } else {
4175    // Push the arguments ("left-to-right").
4176    for (int i = 0; i < arg_count; i++) {
4177      VisitForStackValue(args->at(i));
4178    }
4179
4180    // Call the C runtime.
4181    __ CallRuntime(expr->function(), arg_count);
4182    context()->Plug(rax);
4183  }
4184}
4185
4186
4187void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4188  switch (expr->op()) {
4189    case Token::DELETE: {
4190      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4191      Property* property = expr->expression()->AsProperty();
4192      VariableProxy* proxy = expr->expression()->AsVariableProxy();
4193
4194      if (property != NULL) {
4195        VisitForStackValue(property->obj());
4196        VisitForStackValue(property->key());
4197        __ Push(Smi::FromInt(strict_mode()));
4198        __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4199        context()->Plug(rax);
4200      } else if (proxy != NULL) {
4201        Variable* var = proxy->var();
4202        // Delete of an unqualified identifier is disallowed in strict mode
4203        // but "delete this" is allowed.
4204        DCHECK(strict_mode() == SLOPPY || var->is_this());
4205        if (var->IsUnallocated()) {
4206          __ Push(GlobalObjectOperand());
4207          __ Push(var->name());
4208          __ Push(Smi::FromInt(SLOPPY));
4209          __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4210          context()->Plug(rax);
4211        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4212          // Result of deleting non-global variables is false.  'this' is
4213          // not really a variable, though we implement it as one.  The
4214          // subexpression does not have side effects.
4215          context()->Plug(var->is_this());
4216        } else {
4217          // Non-global variable.  Call the runtime to try to delete from the
4218          // context where the variable was introduced.
4219          __ Push(context_register());
4220          __ Push(var->name());
4221          __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4222          context()->Plug(rax);
4223        }
4224      } else {
4225        // Result of deleting non-property, non-variable reference is true.
4226        // The subexpression may have side effects.
4227        VisitForEffect(expr->expression());
4228        context()->Plug(true);
4229      }
4230      break;
4231    }
4232
4233    case Token::VOID: {
4234      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4235      VisitForEffect(expr->expression());
4236      context()->Plug(Heap::kUndefinedValueRootIndex);
4237      break;
4238    }
4239
4240    case Token::NOT: {
4241      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4242      if (context()->IsEffect()) {
4243        // Unary NOT has no side effects so it's only necessary to visit the
4244        // subexpression.  Match the optimizing compiler by not branching.
4245        VisitForEffect(expr->expression());
4246      } else if (context()->IsTest()) {
4247        const TestContext* test = TestContext::cast(context());
4248        // The labels are swapped for the recursive call.
4249        VisitForControl(expr->expression(),
4250                        test->false_label(),
4251                        test->true_label(),
4252                        test->fall_through());
4253        context()->Plug(test->true_label(), test->false_label());
4254      } else {
4255        // We handle value contexts explicitly rather than simply visiting
4256        // for control and plugging the control flow into the context,
4257        // because we need to prepare a pair of extra administrative AST ids
4258        // for the optimizing compiler.
4259        DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4260        Label materialize_true, materialize_false, done;
4261        VisitForControl(expr->expression(),
4262                        &materialize_false,
4263                        &materialize_true,
4264                        &materialize_true);
4265        __ bind(&materialize_true);
4266        PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4267        if (context()->IsAccumulatorValue()) {
4268          __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4269        } else {
4270          __ PushRoot(Heap::kTrueValueRootIndex);
4271        }
4272        __ jmp(&done, Label::kNear);
4273        __ bind(&materialize_false);
4274        PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4275        if (context()->IsAccumulatorValue()) {
4276          __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4277        } else {
4278          __ PushRoot(Heap::kFalseValueRootIndex);
4279        }
4280        __ bind(&done);
4281      }
4282      break;
4283    }
4284
4285    case Token::TYPEOF: {
4286      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4287      { StackValueContext context(this);
4288        VisitForTypeofValue(expr->expression());
4289      }
4290      __ CallRuntime(Runtime::kTypeof, 1);
4291      context()->Plug(rax);
4292      break;
4293    }
4294
4295    default:
4296      UNREACHABLE();
4297  }
4298}
4299
4300
4301void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4302  DCHECK(expr->expression()->IsValidReferenceExpression());
4303
4304  Comment cmnt(masm_, "[ CountOperation");
4305  SetSourcePosition(expr->position());
4306
4307  // Expression can only be a property, a global or a (parameter or local)
4308  // slot.
4309  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4310  LhsKind assign_type = VARIABLE;
4311  Property* prop = expr->expression()->AsProperty();
4312  // In case of a property we use the uninitialized expression context
4313  // of the key to detect a named property.
4314  if (prop != NULL) {
4315    assign_type =
4316        (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4317  }
4318
4319  // Evaluate expression and get value.
4320  if (assign_type == VARIABLE) {
4321    DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4322    AccumulatorValueContext context(this);
4323    EmitVariableLoad(expr->expression()->AsVariableProxy());
4324  } else {
4325    // Reserve space for result of postfix operation.
4326    if (expr->is_postfix() && !context()->IsEffect()) {
4327      __ Push(Smi::FromInt(0));
4328    }
4329    if (assign_type == NAMED_PROPERTY) {
4330      VisitForStackValue(prop->obj());
4331      __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4332      EmitNamedPropertyLoad(prop);
4333    } else {
4334      VisitForStackValue(prop->obj());
4335      VisitForStackValue(prop->key());
4336      // Leave receiver on stack
4337      __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
4338      // Copy of key, needed for later store.
4339      __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
4340      EmitKeyedPropertyLoad(prop);
4341    }
4342  }
4343
4344  // We need a second deoptimization point after loading the value
4345  // in case evaluating the property load my have a side effect.
4346  if (assign_type == VARIABLE) {
4347    PrepareForBailout(expr->expression(), TOS_REG);
4348  } else {
4349    PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4350  }
4351
4352  // Inline smi case if we are in a loop.
4353  Label done, stub_call;
4354  JumpPatchSite patch_site(masm_);
4355  if (ShouldInlineSmiCase(expr->op())) {
4356    Label slow;
4357    patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4358
4359    // Save result for postfix expressions.
4360    if (expr->is_postfix()) {
4361      if (!context()->IsEffect()) {
4362        // Save the result on the stack. If we have a named or keyed property
4363        // we store the result under the receiver that is currently on top
4364        // of the stack.
4365        switch (assign_type) {
4366          case VARIABLE:
4367            __ Push(rax);
4368            break;
4369          case NAMED_PROPERTY:
4370            __ movp(Operand(rsp, kPointerSize), rax);
4371            break;
4372          case KEYED_PROPERTY:
4373            __ movp(Operand(rsp, 2 * kPointerSize), rax);
4374            break;
4375        }
4376      }
4377    }
4378
4379    SmiOperationExecutionMode mode;
4380    mode.Add(PRESERVE_SOURCE_REGISTER);
4381    mode.Add(BAILOUT_ON_NO_OVERFLOW);
4382    if (expr->op() == Token::INC) {
4383      __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4384    } else {
4385      __ SmiSubConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4386    }
4387    __ jmp(&stub_call, Label::kNear);
4388    __ bind(&slow);
4389  }
4390
4391  ToNumberStub convert_stub(isolate());
4392  __ CallStub(&convert_stub);
4393
4394  // Save result for postfix expressions.
4395  if (expr->is_postfix()) {
4396    if (!context()->IsEffect()) {
4397      // Save the result on the stack. If we have a named or keyed property
4398      // we store the result under the receiver that is currently on top
4399      // of the stack.
4400      switch (assign_type) {
4401        case VARIABLE:
4402          __ Push(rax);
4403          break;
4404        case NAMED_PROPERTY:
4405          __ movp(Operand(rsp, kPointerSize), rax);
4406          break;
4407        case KEYED_PROPERTY:
4408          __ movp(Operand(rsp, 2 * kPointerSize), rax);
4409          break;
4410      }
4411    }
4412  }
4413
4414  // Record position before stub call.
4415  SetSourcePosition(expr->position());
4416
4417  // Call stub for +1/-1.
4418  __ bind(&stub_call);
4419  __ movp(rdx, rax);
4420  __ Move(rax, Smi::FromInt(1));
4421  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4422                                              NO_OVERWRITE).code();
4423  CallIC(code, expr->CountBinOpFeedbackId());
4424  patch_site.EmitPatchInfo();
4425  __ bind(&done);
4426
4427  // Store the value returned in rax.
4428  switch (assign_type) {
4429    case VARIABLE:
4430      if (expr->is_postfix()) {
4431        // Perform the assignment as if via '='.
4432        { EffectContext context(this);
4433          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4434                                 Token::ASSIGN);
4435          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4436          context.Plug(rax);
4437        }
4438        // For all contexts except kEffect: We have the result on
4439        // top of the stack.
4440        if (!context()->IsEffect()) {
4441          context()->PlugTOS();
4442        }
4443      } else {
4444        // Perform the assignment as if via '='.
4445        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4446                               Token::ASSIGN);
4447        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4448        context()->Plug(rax);
4449      }
4450      break;
4451    case NAMED_PROPERTY: {
4452      __ Move(StoreDescriptor::NameRegister(),
4453              prop->key()->AsLiteral()->value());
4454      __ Pop(StoreDescriptor::ReceiverRegister());
4455      CallStoreIC(expr->CountStoreFeedbackId());
4456      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4457      if (expr->is_postfix()) {
4458        if (!context()->IsEffect()) {
4459          context()->PlugTOS();
4460        }
4461      } else {
4462        context()->Plug(rax);
4463      }
4464      break;
4465    }
4466    case KEYED_PROPERTY: {
4467      __ Pop(StoreDescriptor::NameRegister());
4468      __ Pop(StoreDescriptor::ReceiverRegister());
4469      Handle<Code> ic =
4470          CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4471      CallIC(ic, expr->CountStoreFeedbackId());
4472      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4473      if (expr->is_postfix()) {
4474        if (!context()->IsEffect()) {
4475          context()->PlugTOS();
4476        }
4477      } else {
4478        context()->Plug(rax);
4479      }
4480      break;
4481    }
4482  }
4483}
4484
4485
4486void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4487  VariableProxy* proxy = expr->AsVariableProxy();
4488  DCHECK(!context()->IsEffect());
4489  DCHECK(!context()->IsTest());
4490
4491  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4492    Comment cmnt(masm_, "[ Global variable");
4493    __ Move(LoadDescriptor::NameRegister(), proxy->name());
4494    __ movp(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4495    if (FLAG_vector_ics) {
4496      __ Move(VectorLoadICDescriptor::SlotRegister(),
4497              Smi::FromInt(proxy->VariableFeedbackSlot()));
4498    }
4499    // Use a regular load, not a contextual load, to avoid a reference
4500    // error.
4501    CallLoadIC(NOT_CONTEXTUAL);
4502    PrepareForBailout(expr, TOS_REG);
4503    context()->Plug(rax);
4504  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4505    Comment cmnt(masm_, "[ Lookup slot");
4506    Label done, slow;
4507
4508    // Generate code for loading from variables potentially shadowed
4509    // by eval-introduced variables.
4510    EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4511
4512    __ bind(&slow);
4513    __ Push(rsi);
4514    __ Push(proxy->name());
4515    __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4516    PrepareForBailout(expr, TOS_REG);
4517    __ bind(&done);
4518
4519    context()->Plug(rax);
4520  } else {
4521    // This expression cannot throw a reference error at the top level.
4522    VisitInDuplicateContext(expr);
4523  }
4524}
4525
4526
4527void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4528                                                 Expression* sub_expr,
4529                                                 Handle<String> check) {
4530  Label materialize_true, materialize_false;
4531  Label* if_true = NULL;
4532  Label* if_false = NULL;
4533  Label* fall_through = NULL;
4534  context()->PrepareTest(&materialize_true, &materialize_false,
4535                         &if_true, &if_false, &fall_through);
4536
4537  { AccumulatorValueContext context(this);
4538    VisitForTypeofValue(sub_expr);
4539  }
4540  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4541
4542  Factory* factory = isolate()->factory();
4543  if (String::Equals(check, factory->number_string())) {
4544    __ JumpIfSmi(rax, if_true);
4545    __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
4546    __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4547    Split(equal, if_true, if_false, fall_through);
4548  } else if (String::Equals(check, factory->string_string())) {
4549    __ JumpIfSmi(rax, if_false);
4550    // Check for undetectable objects => false.
4551    __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4552    __ j(above_equal, if_false);
4553    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4554             Immediate(1 << Map::kIsUndetectable));
4555    Split(zero, if_true, if_false, fall_through);
4556  } else if (String::Equals(check, factory->symbol_string())) {
4557    __ JumpIfSmi(rax, if_false);
4558    __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
4559    Split(equal, if_true, if_false, fall_through);
4560  } else if (String::Equals(check, factory->boolean_string())) {
4561    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4562    __ j(equal, if_true);
4563    __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4564    Split(equal, if_true, if_false, fall_through);
4565  } else if (String::Equals(check, factory->undefined_string())) {
4566    __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4567    __ j(equal, if_true);
4568    __ JumpIfSmi(rax, if_false);
4569    // Check for undetectable objects => true.
4570    __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4571    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4572             Immediate(1 << Map::kIsUndetectable));
4573    Split(not_zero, if_true, if_false, fall_through);
4574  } else if (String::Equals(check, factory->function_string())) {
4575    __ JumpIfSmi(rax, if_false);
4576    STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4577    __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4578    __ j(equal, if_true);
4579    __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4580    Split(equal, if_true, if_false, fall_through);
4581  } else if (String::Equals(check, factory->object_string())) {
4582    __ JumpIfSmi(rax, if_false);
4583    __ CompareRoot(rax, Heap::kNullValueRootIndex);
4584    __ j(equal, if_true);
4585    __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4586    __ j(below, if_false);
4587    __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4588    __ j(above, if_false);
4589    // Check for undetectable objects => false.
4590    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4591             Immediate(1 << Map::kIsUndetectable));
4592    Split(zero, if_true, if_false, fall_through);
4593  } else {
4594    if (if_false != fall_through) __ jmp(if_false);
4595  }
4596  context()->Plug(if_true, if_false);
4597}
4598
4599
4600void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4601  Comment cmnt(masm_, "[ CompareOperation");
4602  SetSourcePosition(expr->position());
4603
4604  // First we try a fast inlined version of the compare when one of
4605  // the operands is a literal.
4606  if (TryLiteralCompare(expr)) return;
4607
4608  // Always perform the comparison for its control flow.  Pack the result
4609  // into the expression's context after the comparison is performed.
4610  Label materialize_true, materialize_false;
4611  Label* if_true = NULL;
4612  Label* if_false = NULL;
4613  Label* fall_through = NULL;
4614  context()->PrepareTest(&materialize_true, &materialize_false,
4615                         &if_true, &if_false, &fall_through);
4616
4617  Token::Value op = expr->op();
4618  VisitForStackValue(expr->left());
4619  switch (op) {
4620    case Token::IN:
4621      VisitForStackValue(expr->right());
4622      __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4623      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4624      __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4625      Split(equal, if_true, if_false, fall_through);
4626      break;
4627
4628    case Token::INSTANCEOF: {
4629      VisitForStackValue(expr->right());
4630      InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4631      __ CallStub(&stub);
4632      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4633      __ testp(rax, rax);
4634       // The stub returns 0 for true.
4635      Split(zero, if_true, if_false, fall_through);
4636      break;
4637    }
4638
4639    default: {
4640      VisitForAccumulatorValue(expr->right());
4641      Condition cc = CompareIC::ComputeCondition(op);
4642      __ Pop(rdx);
4643
4644      bool inline_smi_code = ShouldInlineSmiCase(op);
4645      JumpPatchSite patch_site(masm_);
4646      if (inline_smi_code) {
4647        Label slow_case;
4648        __ movp(rcx, rdx);
4649        __ orp(rcx, rax);
4650        patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4651        __ cmpp(rdx, rax);
4652        Split(cc, if_true, if_false, NULL);
4653        __ bind(&slow_case);
4654      }
4655
4656      // Record position and call the compare IC.
4657      SetSourcePosition(expr->position());
4658      Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4659      CallIC(ic, expr->CompareOperationFeedbackId());
4660      patch_site.EmitPatchInfo();
4661
4662      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4663      __ testp(rax, rax);
4664      Split(cc, if_true, if_false, fall_through);
4665    }
4666  }
4667
4668  // Convert the result of the comparison into one expected for this
4669  // expression's context.
4670  context()->Plug(if_true, if_false);
4671}
4672
4673
4674void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4675                                              Expression* sub_expr,
4676                                              NilValue nil) {
4677  Label materialize_true, materialize_false;
4678  Label* if_true = NULL;
4679  Label* if_false = NULL;
4680  Label* fall_through = NULL;
4681  context()->PrepareTest(&materialize_true, &materialize_false,
4682                         &if_true, &if_false, &fall_through);
4683
4684  VisitForAccumulatorValue(sub_expr);
4685  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4686  if (expr->op() == Token::EQ_STRICT) {
4687    Heap::RootListIndex nil_value = nil == kNullValue ?
4688        Heap::kNullValueRootIndex :
4689        Heap::kUndefinedValueRootIndex;
4690    __ CompareRoot(rax, nil_value);
4691    Split(equal, if_true, if_false, fall_through);
4692  } else {
4693    Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4694    CallIC(ic, expr->CompareOperationFeedbackId());
4695    __ testp(rax, rax);
4696    Split(not_zero, if_true, if_false, fall_through);
4697  }
4698  context()->Plug(if_true, if_false);
4699}
4700
4701
4702void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4703  __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4704  context()->Plug(rax);
4705}
4706
4707
4708Register FullCodeGenerator::result_register() {
4709  return rax;
4710}
4711
4712
4713Register FullCodeGenerator::context_register() {
4714  return rsi;
4715}
4716
4717
4718void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4719  DCHECK(IsAligned(frame_offset, kPointerSize));
4720  __ movp(Operand(rbp, frame_offset), value);
4721}
4722
4723
4724void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4725  __ movp(dst, ContextOperand(rsi, context_index));
4726}
4727
4728
4729void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4730  Scope* declaration_scope = scope()->DeclarationScope();
4731  if (declaration_scope->is_global_scope() ||
4732      declaration_scope->is_module_scope()) {
4733    // Contexts nested in the native context have a canonical empty function
4734    // as their closure, not the anonymous closure containing the global
4735    // code.  Pass a smi sentinel and let the runtime look up the empty
4736    // function.
4737    __ Push(Smi::FromInt(0));
4738  } else if (declaration_scope->is_eval_scope()) {
4739    // Contexts created by a call to eval have the same closure as the
4740    // context calling eval, not the anonymous closure containing the eval
4741    // code.  Fetch it from the context.
4742    __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX));
4743  } else {
4744    DCHECK(declaration_scope->is_function_scope());
4745    __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4746  }
4747}
4748
4749
4750// ----------------------------------------------------------------------------
4751// Non-local control flow support.
4752
4753
4754void FullCodeGenerator::EnterFinallyBlock() {
4755  DCHECK(!result_register().is(rdx));
4756  DCHECK(!result_register().is(rcx));
4757  // Cook return address on top of stack (smi encoded Code* delta)
4758  __ PopReturnAddressTo(rdx);
4759  __ Move(rcx, masm_->CodeObject());
4760  __ subp(rdx, rcx);
4761  __ Integer32ToSmi(rdx, rdx);
4762  __ Push(rdx);
4763
4764  // Store result register while executing finally block.
4765  __ Push(result_register());
4766
4767  // Store pending message while executing finally block.
4768  ExternalReference pending_message_obj =
4769      ExternalReference::address_of_pending_message_obj(isolate());
4770  __ Load(rdx, pending_message_obj);
4771  __ Push(rdx);
4772
4773  ExternalReference has_pending_message =
4774      ExternalReference::address_of_has_pending_message(isolate());
4775  __ Load(rdx, has_pending_message);
4776  __ Integer32ToSmi(rdx, rdx);
4777  __ Push(rdx);
4778
4779  ExternalReference pending_message_script =
4780      ExternalReference::address_of_pending_message_script(isolate());
4781  __ Load(rdx, pending_message_script);
4782  __ Push(rdx);
4783}
4784
4785
4786void FullCodeGenerator::ExitFinallyBlock() {
4787  DCHECK(!result_register().is(rdx));
4788  DCHECK(!result_register().is(rcx));
4789  // Restore pending message from stack.
4790  __ Pop(rdx);
4791  ExternalReference pending_message_script =
4792      ExternalReference::address_of_pending_message_script(isolate());
4793  __ Store(pending_message_script, rdx);
4794
4795  __ Pop(rdx);
4796  __ SmiToInteger32(rdx, rdx);
4797  ExternalReference has_pending_message =
4798      ExternalReference::address_of_has_pending_message(isolate());
4799  __ Store(has_pending_message, rdx);
4800
4801  __ Pop(rdx);
4802  ExternalReference pending_message_obj =
4803      ExternalReference::address_of_pending_message_obj(isolate());
4804  __ Store(pending_message_obj, rdx);
4805
4806  // Restore result register from stack.
4807  __ Pop(result_register());
4808
4809  // Uncook return address.
4810  __ Pop(rdx);
4811  __ SmiToInteger32(rdx, rdx);
4812  __ Move(rcx, masm_->CodeObject());
4813  __ addp(rdx, rcx);
4814  __ jmp(rdx);
4815}
4816
4817
4818#undef __
4819
4820#define __ ACCESS_MASM(masm())
4821
4822FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4823    int* stack_depth,
4824    int* context_length) {
4825  // The macros used here must preserve the result register.
4826
4827  // Because the handler block contains the context of the finally
4828  // code, we can restore it directly from there for the finally code
4829  // rather than iteratively unwinding contexts via their previous
4830  // links.
4831  __ Drop(*stack_depth);  // Down to the handler block.
4832  if (*context_length > 0) {
4833    // Restore the context to its dedicated register and the stack.
4834    __ movp(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
4835    __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
4836  }
4837  __ PopTryHandler();
4838  __ call(finally_entry_);
4839
4840  *stack_depth = 0;
4841  *context_length = 0;
4842  return previous_;
4843}
4844
4845
4846#undef __
4847
4848
4849static const byte kJnsInstruction = 0x79;
4850static const byte kNopByteOne = 0x66;
4851static const byte kNopByteTwo = 0x90;
4852#ifdef DEBUG
4853static const byte kCallInstruction = 0xe8;
4854#endif
4855
4856
4857void BackEdgeTable::PatchAt(Code* unoptimized_code,
4858                            Address pc,
4859                            BackEdgeState target_state,
4860                            Code* replacement_code) {
4861  Address call_target_address = pc - kIntSize;
4862  Address jns_instr_address = call_target_address - 3;
4863  Address jns_offset_address = call_target_address - 2;
4864
4865  switch (target_state) {
4866    case INTERRUPT:
4867      //     sub <profiling_counter>, <delta>  ;; Not changed
4868      //     jns ok
4869      //     call <interrupt stub>
4870      //   ok:
4871      *jns_instr_address = kJnsInstruction;
4872      *jns_offset_address = kJnsOffset;
4873      break;
4874    case ON_STACK_REPLACEMENT:
4875    case OSR_AFTER_STACK_CHECK:
4876      //     sub <profiling_counter>, <delta>  ;; Not changed
4877      //     nop
4878      //     nop
4879      //     call <on-stack replacment>
4880      //   ok:
4881      *jns_instr_address = kNopByteOne;
4882      *jns_offset_address = kNopByteTwo;
4883      break;
4884  }
4885
4886  Assembler::set_target_address_at(call_target_address,
4887                                   unoptimized_code,
4888                                   replacement_code->entry());
4889  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4890      unoptimized_code, call_target_address, replacement_code);
4891}
4892
4893
4894BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4895    Isolate* isolate,
4896    Code* unoptimized_code,
4897    Address pc) {
4898  Address call_target_address = pc - kIntSize;
4899  Address jns_instr_address = call_target_address - 3;
4900  DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4901
4902  if (*jns_instr_address == kJnsInstruction) {
4903    DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4904    DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4905              Assembler::target_address_at(call_target_address,
4906                                           unoptimized_code));
4907    return INTERRUPT;
4908  }
4909
4910  DCHECK_EQ(kNopByteOne, *jns_instr_address);
4911  DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4912
4913  if (Assembler::target_address_at(call_target_address,
4914                                   unoptimized_code) ==
4915      isolate->builtins()->OnStackReplacement()->entry()) {
4916    return ON_STACK_REPLACEMENT;
4917  }
4918
4919  DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4920            Assembler::target_address_at(call_target_address,
4921                                         unoptimized_code));
4922  return OSR_AFTER_STACK_CHECK;
4923}
4924
4925
4926} }  // namespace v8::internal
4927
4928#endif  // V8_TARGET_ARCH_X64
4929