1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X87
6
7#include "src/full-codegen/full-codegen.h"
8#include "src/ast/compile-time-value.h"
9#include "src/ast/scopes.h"
10#include "src/code-factory.h"
11#include "src/code-stubs.h"
12#include "src/codegen.h"
13#include "src/compilation-info.h"
14#include "src/compiler.h"
15#include "src/debug/debug.h"
16#include "src/ic/ic.h"
17#include "src/x87/frames-x87.h"
18
19namespace v8 {
20namespace internal {
21
22#define __ ACCESS_MASM(masm())
23
24class JumpPatchSite BASE_EMBEDDED {
25 public:
26  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
27#ifdef DEBUG
28    info_emitted_ = false;
29#endif
30  }
31
32  ~JumpPatchSite() {
33    DCHECK(patch_site_.is_bound() == info_emitted_);
34  }
35
36  void EmitJumpIfNotSmi(Register reg,
37                        Label* target,
38                        Label::Distance distance = Label::kFar) {
39    __ test(reg, Immediate(kSmiTagMask));
40    EmitJump(not_carry, target, distance);  // Always taken before patched.
41  }
42
43  void EmitJumpIfSmi(Register reg,
44                     Label* target,
45                     Label::Distance distance = Label::kFar) {
46    __ test(reg, Immediate(kSmiTagMask));
47    EmitJump(carry, target, distance);  // Never taken before patched.
48  }
49
50  void EmitPatchInfo() {
51    if (patch_site_.is_bound()) {
52      int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
53      DCHECK(is_uint8(delta_to_patch_site));
54      __ test(eax, Immediate(delta_to_patch_site));
55#ifdef DEBUG
56      info_emitted_ = true;
57#endif
58    } else {
59      __ nop();  // Signals no inlined code.
60    }
61  }
62
63 private:
64  // jc will be patched with jz, jnc will become jnz.
65  void EmitJump(Condition cc, Label* target, Label::Distance distance) {
66    DCHECK(!patch_site_.is_bound() && !info_emitted_);
67    DCHECK(cc == carry || cc == not_carry);
68    __ bind(&patch_site_);
69    __ j(cc, target, distance);
70  }
71
72  MacroAssembler* masm() { return masm_; }
73  MacroAssembler* masm_;
74  Label patch_site_;
75#ifdef DEBUG
76  bool info_emitted_;
77#endif
78};
79
80
81// Generate code for a JS function.  On entry to the function the receiver
82// and arguments have been pushed on the stack left to right, with the
83// return address on top of them.  The actual argument count matches the
84// formal parameter count expected by the function.
85//
86// The live registers are:
87//   o edi: the JS function object being called (i.e. ourselves)
88//   o edx: the new target value
89//   o esi: our context
90//   o ebp: our caller's frame pointer
91//   o esp: stack pointer (pointing to return address)
92//
93// The function builds a JS frame.  Please see JavaScriptFrameConstants in
94// frames-x87.h for its layout.
95void FullCodeGenerator::Generate() {
96  CompilationInfo* info = info_;
97  profiling_counter_ = isolate()->factory()->NewCell(
98      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
99  SetFunctionPosition(literal());
100  Comment cmnt(masm_, "[ function compiled by full code generator");
101
102  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
103
104  if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
105    int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
106    __ mov(ecx, Operand(esp, receiver_offset));
107    __ AssertNotSmi(ecx);
108    __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
109    __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
110  }
111
112  // Open a frame scope to indicate that there is a frame on the stack.  The
113  // MANUAL indicates that the scope shouldn't actually generate code to set up
114  // the frame (that is done below).
115  FrameScope frame_scope(masm_, StackFrame::MANUAL);
116
117  info->set_prologue_offset(masm_->pc_offset());
118  __ Prologue(info->GeneratePreagedPrologue());
119
120  // Increment invocation count for the function.
121  {
122    Comment cmnt(masm_, "[ Increment invocation count");
123    __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
124    __ mov(ecx, FieldOperand(ecx, LiteralsArray::kFeedbackVectorOffset));
125    __ add(FieldOperand(
126               ecx, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
127                        TypeFeedbackVector::kHeaderSize),
128           Immediate(Smi::FromInt(1)));
129  }
130
131  { Comment cmnt(masm_, "[ Allocate locals");
132    int locals_count = info->scope()->num_stack_slots();
133    // Generators allocate locals, if any, in context slots.
134    DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
135    OperandStackDepthIncrement(locals_count);
136    if (locals_count == 1) {
137      __ push(Immediate(isolate()->factory()->undefined_value()));
138    } else if (locals_count > 1) {
139      if (locals_count >= 128) {
140        Label ok;
141        __ mov(ecx, esp);
142        __ sub(ecx, Immediate(locals_count * kPointerSize));
143        ExternalReference stack_limit =
144            ExternalReference::address_of_real_stack_limit(isolate());
145        __ cmp(ecx, Operand::StaticVariable(stack_limit));
146        __ j(above_equal, &ok, Label::kNear);
147        __ CallRuntime(Runtime::kThrowStackOverflow);
148        __ bind(&ok);
149      }
150      __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
151      const int kMaxPushes = 32;
152      if (locals_count >= kMaxPushes) {
153        int loop_iterations = locals_count / kMaxPushes;
154        __ mov(ecx, loop_iterations);
155        Label loop_header;
156        __ bind(&loop_header);
157        // Do pushes.
158        for (int i = 0; i < kMaxPushes; i++) {
159          __ push(eax);
160        }
161        __ dec(ecx);
162        __ j(not_zero, &loop_header, Label::kNear);
163      }
164      int remaining = locals_count % kMaxPushes;
165      // Emit the remaining pushes.
166      for (int i  = 0; i < remaining; i++) {
167        __ push(eax);
168      }
169    }
170  }
171
172  bool function_in_register = true;
173
174  // Possibly allocate a local context.
175  if (info->scope()->NeedsContext()) {
176    Comment cmnt(masm_, "[ Allocate context");
177    bool need_write_barrier = true;
178    int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
179    // Argument to NewContext is the function, which is still in edi.
180    if (info->scope()->is_script_scope()) {
181      __ push(edi);
182      __ Push(info->scope()->scope_info());
183      __ CallRuntime(Runtime::kNewScriptContext);
184      PrepareForBailoutForId(BailoutId::ScriptContext(),
185                             BailoutState::TOS_REGISTER);
186      // The new target value is not used, clobbering is safe.
187      DCHECK_NULL(info->scope()->new_target_var());
188    } else {
189      if (info->scope()->new_target_var() != nullptr) {
190        __ push(edx);  // Preserve new target.
191      }
192      if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
193        FastNewFunctionContextStub stub(isolate());
194        __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
195               Immediate(slots));
196        __ CallStub(&stub);
197        // Result of FastNewFunctionContextStub is always in new space.
198        need_write_barrier = false;
199      } else {
200        __ push(edi);
201        __ CallRuntime(Runtime::kNewFunctionContext);
202      }
203      if (info->scope()->new_target_var() != nullptr) {
204        __ pop(edx);  // Restore new target.
205      }
206    }
207    function_in_register = false;
208    // Context is returned in eax.  It replaces the context passed to us.
209    // It's saved in the stack and kept live in esi.
210    __ mov(esi, eax);
211    __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
212
213    // Copy parameters into context if necessary.
214    int num_parameters = info->scope()->num_parameters();
215    int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
216    for (int i = first_parameter; i < num_parameters; i++) {
217      Variable* var =
218          (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
219      if (var->IsContextSlot()) {
220        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
221            (num_parameters - 1 - i) * kPointerSize;
222        // Load parameter from stack.
223        __ mov(eax, Operand(ebp, parameter_offset));
224        // Store it in the context.
225        int context_offset = Context::SlotOffset(var->index());
226        __ mov(Operand(esi, context_offset), eax);
227        // Update the write barrier. This clobbers eax and ebx.
228        if (need_write_barrier) {
229          __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
230                                    kDontSaveFPRegs);
231        } else if (FLAG_debug_code) {
232          Label done;
233          __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
234          __ Abort(kExpectedNewSpaceObject);
235          __ bind(&done);
236        }
237      }
238    }
239  }
240
241  // Register holding this function and new target are both trashed in case we
242  // bailout here. But since that can happen only when new target is not used
243  // and we allocate a context, the value of |function_in_register| is correct.
244  PrepareForBailoutForId(BailoutId::FunctionContext(),
245                         BailoutState::NO_REGISTERS);
246
247  // Possibly set up a local binding to the this function which is used in
248  // derived constructors with super calls.
249  Variable* this_function_var = info->scope()->this_function_var();
250  if (this_function_var != nullptr) {
251    Comment cmnt(masm_, "[ This function");
252    if (!function_in_register) {
253      __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
254      // The write barrier clobbers register again, keep it marked as such.
255    }
256    SetVar(this_function_var, edi, ebx, ecx);
257  }
258
259  // Possibly set up a local binding to the new target value.
260  Variable* new_target_var = info->scope()->new_target_var();
261  if (new_target_var != nullptr) {
262    Comment cmnt(masm_, "[ new.target");
263    SetVar(new_target_var, edx, ebx, ecx);
264  }
265
266  // Possibly allocate RestParameters
267  Variable* rest_param = info->scope()->rest_parameter();
268  if (rest_param != nullptr) {
269    Comment cmnt(masm_, "[ Allocate rest parameter array");
270    if (!function_in_register) {
271      __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
272    }
273    FastNewRestParameterStub stub(isolate());
274    __ CallStub(&stub);
275    function_in_register = false;
276    SetVar(rest_param, eax, ebx, edx);
277  }
278
279  Variable* arguments = info->scope()->arguments();
280  if (arguments != NULL) {
281    // Arguments object must be allocated after the context object, in
282    // case the "arguments" or ".arguments" variables are in the context.
283    Comment cmnt(masm_, "[ Allocate arguments object");
284    if (!function_in_register) {
285      __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
286    }
287    if (is_strict(language_mode()) || !has_simple_parameters()) {
288      FastNewStrictArgumentsStub stub(isolate());
289      __ CallStub(&stub);
290    } else if (literal()->has_duplicate_parameters()) {
291      __ Push(edi);
292      __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
293    } else {
294      FastNewSloppyArgumentsStub stub(isolate());
295      __ CallStub(&stub);
296    }
297
298    SetVar(arguments, eax, ebx, edx);
299  }
300
301  if (FLAG_trace) {
302    __ CallRuntime(Runtime::kTraceEnter);
303  }
304
305  // Visit the declarations and body.
306  PrepareForBailoutForId(BailoutId::FunctionEntry(),
307                         BailoutState::NO_REGISTERS);
308  {
309    Comment cmnt(masm_, "[ Declarations");
310    VisitDeclarations(scope()->declarations());
311  }
312
313  // Assert that the declarations do not use ICs. Otherwise the debugger
314  // won't be able to redirect a PC at an IC to the correct IC in newly
315  // recompiled code.
316  DCHECK_EQ(0, ic_total_count_);
317
318  {
319    Comment cmnt(masm_, "[ Stack check");
320    PrepareForBailoutForId(BailoutId::Declarations(),
321                           BailoutState::NO_REGISTERS);
322    Label ok;
323    ExternalReference stack_limit =
324        ExternalReference::address_of_stack_limit(isolate());
325    __ cmp(esp, Operand::StaticVariable(stack_limit));
326    __ j(above_equal, &ok, Label::kNear);
327    __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
328    __ bind(&ok);
329  }
330
331  {
332    Comment cmnt(masm_, "[ Body");
333    DCHECK(loop_depth() == 0);
334    VisitStatements(literal()->body());
335    DCHECK(loop_depth() == 0);
336  }
337
338  // Always emit a 'return undefined' in case control fell off the end of
339  // the body.
340  { Comment cmnt(masm_, "[ return <undefined>;");
341    __ mov(eax, isolate()->factory()->undefined_value());
342    EmitReturnSequence();
343  }
344}
345
346
347void FullCodeGenerator::ClearAccumulator() {
348  __ Move(eax, Immediate(Smi::kZero));
349}
350
351
352void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
353  __ mov(ebx, Immediate(profiling_counter_));
354  __ sub(FieldOperand(ebx, Cell::kValueOffset),
355         Immediate(Smi::FromInt(delta)));
356}
357
358
359void FullCodeGenerator::EmitProfilingCounterReset() {
360  int reset_value = FLAG_interrupt_budget;
361  __ mov(ebx, Immediate(profiling_counter_));
362  __ mov(FieldOperand(ebx, Cell::kValueOffset),
363         Immediate(Smi::FromInt(reset_value)));
364}
365
366
367void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
368                                                Label* back_edge_target) {
369  Comment cmnt(masm_, "[ Back edge bookkeeping");
370  Label ok;
371
372  DCHECK(back_edge_target->is_bound());
373  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
374  int weight = Min(kMaxBackEdgeWeight,
375                   Max(1, distance / kCodeSizeMultiplier));
376  EmitProfilingCounterDecrement(weight);
377  __ j(positive, &ok, Label::kNear);
378  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
379
380  // Record a mapping of this PC offset to the OSR id.  This is used to find
381  // the AST id from the unoptimized code in order to use it as a key into
382  // the deoptimization input data found in the optimized code.
383  RecordBackEdge(stmt->OsrEntryId());
384
385  EmitProfilingCounterReset();
386
387  __ bind(&ok);
388  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
389  // Record a mapping of the OSR id to this PC.  This is used if the OSR
390  // entry becomes the target of a bailout.  We don't expect it to be, but
391  // we want it to work if it is.
392  PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
393}
394
395void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
396    bool is_tail_call) {
397  // Pretend that the exit is a backwards jump to the entry.
398  int weight = 1;
399  if (info_->ShouldSelfOptimize()) {
400    weight = FLAG_interrupt_budget / FLAG_self_opt_count;
401  } else {
402    int distance = masm_->pc_offset();
403    weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
404  }
405  EmitProfilingCounterDecrement(weight);
406  Label ok;
407  __ j(positive, &ok, Label::kNear);
408  // Don't need to save result register if we are going to do a tail call.
409  if (!is_tail_call) {
410    __ push(eax);
411  }
412  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
413  if (!is_tail_call) {
414    __ pop(eax);
415  }
416  EmitProfilingCounterReset();
417  __ bind(&ok);
418}
419
420void FullCodeGenerator::EmitReturnSequence() {
421  Comment cmnt(masm_, "[ Return sequence");
422  if (return_label_.is_bound()) {
423    __ jmp(&return_label_);
424  } else {
425    // Common return label
426    __ bind(&return_label_);
427    if (FLAG_trace) {
428      __ push(eax);
429      __ CallRuntime(Runtime::kTraceExit);
430    }
431    EmitProfilingCounterHandlingForReturnSequence(false);
432
433    SetReturnPosition(literal());
434    __ leave();
435
436    int arg_count = info_->scope()->num_parameters() + 1;
437    int arguments_bytes = arg_count * kPointerSize;
438    __ Ret(arguments_bytes, ecx);
439  }
440}
441
442void FullCodeGenerator::RestoreContext() {
443  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
444}
445
446void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
447  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
448  MemOperand operand = codegen()->VarOperand(var, result_register());
449  // Memory operands can be pushed directly.
450  codegen()->PushOperand(operand);
451}
452
453
454void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
455  UNREACHABLE();  // Not used on X87.
456}
457
458
459void FullCodeGenerator::AccumulatorValueContext::Plug(
460    Heap::RootListIndex index) const {
461  UNREACHABLE();  // Not used on X87.
462}
463
464
465void FullCodeGenerator::StackValueContext::Plug(
466    Heap::RootListIndex index) const {
467  UNREACHABLE();  // Not used on X87.
468}
469
470
471void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
472  UNREACHABLE();  // Not used on X87.
473}
474
475
476void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
477}
478
479
480void FullCodeGenerator::AccumulatorValueContext::Plug(
481    Handle<Object> lit) const {
482  if (lit->IsSmi()) {
483    __ SafeMove(result_register(), Immediate(lit));
484  } else {
485    __ Move(result_register(), Immediate(lit));
486  }
487}
488
489
490void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
491  codegen()->OperandStackDepthIncrement(1);
492  if (lit->IsSmi()) {
493    __ SafePush(Immediate(lit));
494  } else {
495    __ push(Immediate(lit));
496  }
497}
498
499
500void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
501  codegen()->PrepareForBailoutBeforeSplit(condition(),
502                                          true,
503                                          true_label_,
504                                          false_label_);
505  DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
506         !lit->IsUndetectable());
507  if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
508      lit->IsFalse(isolate())) {
509    if (false_label_ != fall_through_) __ jmp(false_label_);
510  } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
511    if (true_label_ != fall_through_) __ jmp(true_label_);
512  } else if (lit->IsString()) {
513    if (String::cast(*lit)->length() == 0) {
514      if (false_label_ != fall_through_) __ jmp(false_label_);
515    } else {
516      if (true_label_ != fall_through_) __ jmp(true_label_);
517    }
518  } else if (lit->IsSmi()) {
519    if (Smi::cast(*lit)->value() == 0) {
520      if (false_label_ != fall_through_) __ jmp(false_label_);
521    } else {
522      if (true_label_ != fall_through_) __ jmp(true_label_);
523    }
524  } else {
525    // For simplicity we always test the accumulator register.
526    __ mov(result_register(), lit);
527    codegen()->DoTest(this);
528  }
529}
530
531
532void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
533                                                       Register reg) const {
534  DCHECK(count > 0);
535  if (count > 1) codegen()->DropOperands(count - 1);
536  __ mov(Operand(esp, 0), reg);
537}
538
539
540void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
541                                            Label* materialize_false) const {
542  DCHECK(materialize_true == materialize_false);
543  __ bind(materialize_true);
544}
545
546
547void FullCodeGenerator::AccumulatorValueContext::Plug(
548    Label* materialize_true,
549    Label* materialize_false) const {
550  Label done;
551  __ bind(materialize_true);
552  __ mov(result_register(), isolate()->factory()->true_value());
553  __ jmp(&done, Label::kNear);
554  __ bind(materialize_false);
555  __ mov(result_register(), isolate()->factory()->false_value());
556  __ bind(&done);
557}
558
559
560void FullCodeGenerator::StackValueContext::Plug(
561    Label* materialize_true,
562    Label* materialize_false) const {
563  codegen()->OperandStackDepthIncrement(1);
564  Label done;
565  __ bind(materialize_true);
566  __ push(Immediate(isolate()->factory()->true_value()));
567  __ jmp(&done, Label::kNear);
568  __ bind(materialize_false);
569  __ push(Immediate(isolate()->factory()->false_value()));
570  __ bind(&done);
571}
572
573
574void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
575                                          Label* materialize_false) const {
576  DCHECK(materialize_true == true_label_);
577  DCHECK(materialize_false == false_label_);
578}
579
580
581void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
582  Handle<Object> value = flag
583      ? isolate()->factory()->true_value()
584      : isolate()->factory()->false_value();
585  __ mov(result_register(), value);
586}
587
588
589void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
590  codegen()->OperandStackDepthIncrement(1);
591  Handle<Object> value = flag
592      ? isolate()->factory()->true_value()
593      : isolate()->factory()->false_value();
594  __ push(Immediate(value));
595}
596
597
598void FullCodeGenerator::TestContext::Plug(bool flag) const {
599  codegen()->PrepareForBailoutBeforeSplit(condition(),
600                                          true,
601                                          true_label_,
602                                          false_label_);
603  if (flag) {
604    if (true_label_ != fall_through_) __ jmp(true_label_);
605  } else {
606    if (false_label_ != fall_through_) __ jmp(false_label_);
607  }
608}
609
610
611void FullCodeGenerator::DoTest(Expression* condition,
612                               Label* if_true,
613                               Label* if_false,
614                               Label* fall_through) {
615  Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
616  CallIC(ic, condition->test_id());
617  __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
618  Split(equal, if_true, if_false, fall_through);
619}
620
621
622void FullCodeGenerator::Split(Condition cc,
623                              Label* if_true,
624                              Label* if_false,
625                              Label* fall_through) {
626  if (if_false == fall_through) {
627    __ j(cc, if_true);
628  } else if (if_true == fall_through) {
629    __ j(NegateCondition(cc), if_false);
630  } else {
631    __ j(cc, if_true);
632    __ jmp(if_false);
633  }
634}
635
636
637MemOperand FullCodeGenerator::StackOperand(Variable* var) {
638  DCHECK(var->IsStackAllocated());
639  // Offset is negative because higher indexes are at lower addresses.
640  int offset = -var->index() * kPointerSize;
641  // Adjust by a (parameter or local) base offset.
642  if (var->IsParameter()) {
643    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
644  } else {
645    offset += JavaScriptFrameConstants::kLocal0Offset;
646  }
647  return Operand(ebp, offset);
648}
649
650
651MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
652  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
653  if (var->IsContextSlot()) {
654    int context_chain_length = scope()->ContextChainLength(var->scope());
655    __ LoadContext(scratch, context_chain_length);
656    return ContextOperand(scratch, var->index());
657  } else {
658    return StackOperand(var);
659  }
660}
661
662
663void FullCodeGenerator::GetVar(Register dest, Variable* var) {
664  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
665  MemOperand location = VarOperand(var, dest);
666  __ mov(dest, location);
667}
668
669
670void FullCodeGenerator::SetVar(Variable* var,
671                               Register src,
672                               Register scratch0,
673                               Register scratch1) {
674  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
675  DCHECK(!scratch0.is(src));
676  DCHECK(!scratch0.is(scratch1));
677  DCHECK(!scratch1.is(src));
678  MemOperand location = VarOperand(var, scratch0);
679  __ mov(location, src);
680
681  // Emit the write barrier code if the location is in the heap.
682  if (var->IsContextSlot()) {
683    int offset = Context::SlotOffset(var->index());
684    DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
685    __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
686  }
687}
688
689
690void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
691                                                     bool should_normalize,
692                                                     Label* if_true,
693                                                     Label* if_false) {
694  // Only prepare for bailouts before splits if we're in a test
695  // context. Otherwise, we let the Visit function deal with the
696  // preparation to avoid preparing with the same AST id twice.
697  if (!context()->IsTest()) return;
698
699  Label skip;
700  if (should_normalize) __ jmp(&skip, Label::kNear);
701  PrepareForBailout(expr, BailoutState::TOS_REGISTER);
702  if (should_normalize) {
703    __ cmp(eax, isolate()->factory()->true_value());
704    Split(equal, if_true, if_false, NULL);
705    __ bind(&skip);
706  }
707}
708
709
710void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
711  // The variable in the declaration always resides in the current context.
712  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
713  if (FLAG_debug_code) {
714    // Check that we're not inside a with or catch context.
715    __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
716    __ cmp(ebx, isolate()->factory()->with_context_map());
717    __ Check(not_equal, kDeclarationInWithContext);
718    __ cmp(ebx, isolate()->factory()->catch_context_map());
719    __ Check(not_equal, kDeclarationInCatchContext);
720  }
721}
722
723
724void FullCodeGenerator::VisitVariableDeclaration(
725    VariableDeclaration* declaration) {
726  VariableProxy* proxy = declaration->proxy();
727  Variable* variable = proxy->var();
728  switch (variable->location()) {
729    case VariableLocation::UNALLOCATED: {
730      DCHECK(!variable->binding_needs_init());
731      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
732      DCHECK(!slot.IsInvalid());
733      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
734      globals_->Add(isolate()->factory()->undefined_value(), zone());
735      break;
736    }
737    case VariableLocation::PARAMETER:
738    case VariableLocation::LOCAL:
739      if (variable->binding_needs_init()) {
740        Comment cmnt(masm_, "[ VariableDeclaration");
741        __ mov(StackOperand(variable),
742               Immediate(isolate()->factory()->the_hole_value()));
743      }
744      break;
745
746    case VariableLocation::CONTEXT:
747      if (variable->binding_needs_init()) {
748        Comment cmnt(masm_, "[ VariableDeclaration");
749        EmitDebugCheckDeclarationContext(variable);
750        __ mov(ContextOperand(esi, variable->index()),
751               Immediate(isolate()->factory()->the_hole_value()));
752        // No write barrier since the hole value is in old space.
753        PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
754      }
755      break;
756
757    case VariableLocation::LOOKUP: {
758      Comment cmnt(masm_, "[ VariableDeclaration");
759      DCHECK_EQ(VAR, variable->mode());
760      DCHECK(!variable->binding_needs_init());
761      __ push(Immediate(variable->name()));
762      __ CallRuntime(Runtime::kDeclareEvalVar);
763      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
764      break;
765    }
766
767    case VariableLocation::MODULE:
768      UNREACHABLE();
769  }
770}
771
772void FullCodeGenerator::VisitFunctionDeclaration(
773    FunctionDeclaration* declaration) {
774  VariableProxy* proxy = declaration->proxy();
775  Variable* variable = proxy->var();
776  switch (variable->location()) {
777    case VariableLocation::UNALLOCATED: {
778      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
779      DCHECK(!slot.IsInvalid());
780      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
781      Handle<SharedFunctionInfo> function =
782          Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
783      // Check for stack-overflow exception.
784      if (function.is_null()) return SetStackOverflow();
785      globals_->Add(function, zone());
786      break;
787    }
788
789    case VariableLocation::PARAMETER:
790    case VariableLocation::LOCAL: {
791      Comment cmnt(masm_, "[ FunctionDeclaration");
792      VisitForAccumulatorValue(declaration->fun());
793      __ mov(StackOperand(variable), result_register());
794      break;
795    }
796
797    case VariableLocation::CONTEXT: {
798      Comment cmnt(masm_, "[ FunctionDeclaration");
799      EmitDebugCheckDeclarationContext(variable);
800      VisitForAccumulatorValue(declaration->fun());
801      __ mov(ContextOperand(esi, variable->index()), result_register());
802      // We know that we have written a function, which is not a smi.
803      __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
804                                result_register(), ecx, kDontSaveFPRegs,
805                                EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
806      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
807      break;
808    }
809
810    case VariableLocation::LOOKUP: {
811      Comment cmnt(masm_, "[ FunctionDeclaration");
812      PushOperand(variable->name());
813      VisitForStackValue(declaration->fun());
814      CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
815      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
816      break;
817    }
818
819    case VariableLocation::MODULE:
820      UNREACHABLE();
821  }
822}
823
824
825void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
826  // Call the runtime to declare the globals.
827  __ Push(pairs);
828  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
829  __ EmitLoadTypeFeedbackVector(eax);
830  __ Push(eax);
831  __ CallRuntime(Runtime::kDeclareGlobals);
832  // Return value is ignored.
833}
834
835
836void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
837  Comment cmnt(masm_, "[ SwitchStatement");
838  Breakable nested_statement(this, stmt);
839  SetStatementPosition(stmt);
840
841  // Keep the switch value on the stack until a case matches.
842  VisitForStackValue(stmt->tag());
843  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
844
845  ZoneList<CaseClause*>* clauses = stmt->cases();
846  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
847
848  Label next_test;  // Recycled for each test.
849  // Compile all the tests with branches to their bodies.
850  for (int i = 0; i < clauses->length(); i++) {
851    CaseClause* clause = clauses->at(i);
852    clause->body_target()->Unuse();
853
854    // The default is not a test, but remember it as final fall through.
855    if (clause->is_default()) {
856      default_clause = clause;
857      continue;
858    }
859
860    Comment cmnt(masm_, "[ Case comparison");
861    __ bind(&next_test);
862    next_test.Unuse();
863
864    // Compile the label expression.
865    VisitForAccumulatorValue(clause->label());
866
867    // Perform the comparison as if via '==='.
868    __ mov(edx, Operand(esp, 0));  // Switch value.
869    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
870    JumpPatchSite patch_site(masm_);
871    if (inline_smi_code) {
872      Label slow_case;
873      __ mov(ecx, edx);
874      __ or_(ecx, eax);
875      patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
876
877      __ cmp(edx, eax);
878      __ j(not_equal, &next_test);
879      __ Drop(1);  // Switch value is no longer needed.
880      __ jmp(clause->body_target());
881      __ bind(&slow_case);
882    }
883
884    SetExpressionPosition(clause);
885    Handle<Code> ic =
886        CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
887    CallIC(ic, clause->CompareId());
888    patch_site.EmitPatchInfo();
889
890    Label skip;
891    __ jmp(&skip, Label::kNear);
892    PrepareForBailout(clause, BailoutState::TOS_REGISTER);
893    __ cmp(eax, isolate()->factory()->true_value());
894    __ j(not_equal, &next_test);
895    __ Drop(1);
896    __ jmp(clause->body_target());
897    __ bind(&skip);
898
899    __ test(eax, eax);
900    __ j(not_equal, &next_test);
901    __ Drop(1);  // Switch value is no longer needed.
902    __ jmp(clause->body_target());
903  }
904
905  // Discard the test value and jump to the default if present, otherwise to
906  // the end of the statement.
907  __ bind(&next_test);
908  DropOperands(1);  // Switch value is no longer needed.
909  if (default_clause == NULL) {
910    __ jmp(nested_statement.break_label());
911  } else {
912    __ jmp(default_clause->body_target());
913  }
914
915  // Compile all the case bodies.
916  for (int i = 0; i < clauses->length(); i++) {
917    Comment cmnt(masm_, "[ Case body");
918    CaseClause* clause = clauses->at(i);
919    __ bind(clause->body_target());
920    PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
921    VisitStatements(clause->statements());
922  }
923
924  __ bind(nested_statement.break_label());
925  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
926}
927
928
929void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
930  Comment cmnt(masm_, "[ ForInStatement");
931  SetStatementPosition(stmt, SKIP_BREAK);
932
933  FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
934
935  // Get the object to enumerate over.
936  SetExpressionAsStatementPosition(stmt->enumerable());
937  VisitForAccumulatorValue(stmt->enumerable());
938  OperandStackDepthIncrement(5);
939
940  Label loop, exit;
941  Iteration loop_statement(this, stmt);
942  increment_loop_depth();
943
944  // If the object is null or undefined, skip over the loop, otherwise convert
945  // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
946  Label convert, done_convert;
947  __ JumpIfSmi(eax, &convert, Label::kNear);
948  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
949  __ j(above_equal, &done_convert, Label::kNear);
950  __ cmp(eax, isolate()->factory()->undefined_value());
951  __ j(equal, &exit);
952  __ cmp(eax, isolate()->factory()->null_value());
953  __ j(equal, &exit);
954  __ bind(&convert);
955  __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
956  RestoreContext();
957  __ bind(&done_convert);
958  PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
959  __ push(eax);
960
961  // Check cache validity in generated code. If we cannot guarantee cache
962  // validity, call the runtime system to check cache validity or get the
963  // property names in a fixed array. Note: Proxies never have an enum cache,
964  // so will always take the slow path.
965  Label call_runtime, use_cache, fixed_array;
966  __ CheckEnumCache(&call_runtime);
967
968  __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
969  __ jmp(&use_cache, Label::kNear);
970
971  // Get the set of properties to enumerate.
972  __ bind(&call_runtime);
973  __ push(eax);
974  __ CallRuntime(Runtime::kForInEnumerate);
975  PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
976  __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
977         isolate()->factory()->meta_map());
978  __ j(not_equal, &fixed_array);
979
980
981  // We got a map in register eax. Get the enumeration cache from it.
982  Label no_descriptors;
983  __ bind(&use_cache);
984
985  __ EnumLength(edx, eax);
986  __ cmp(edx, Immediate(Smi::kZero));
987  __ j(equal, &no_descriptors);
988
989  __ LoadInstanceDescriptors(eax, ecx);
990  __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
991  __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
992
993  // Set up the four remaining stack slots.
994  __ push(eax);  // Map.
995  __ push(ecx);  // Enumeration cache.
996  __ push(edx);  // Number of valid entries for the map in the enum cache.
997  __ push(Immediate(Smi::kZero));  // Initial index.
998  __ jmp(&loop);
999
1000  __ bind(&no_descriptors);
1001  __ add(esp, Immediate(kPointerSize));
1002  __ jmp(&exit);
1003
1004  // We got a fixed array in register eax. Iterate through that.
1005  __ bind(&fixed_array);
1006
1007  __ push(Immediate(Smi::FromInt(1)));  // Smi(1) indicates slow check
1008  __ push(eax);  // Array
1009  __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1010  __ push(eax);  // Fixed array length (as smi).
1011  PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1012  __ push(Immediate(Smi::kZero));  // Initial index.
1013
1014  // Generate code for doing the condition check.
1015  __ bind(&loop);
1016  SetExpressionAsStatementPosition(stmt->each());
1017
1018  __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
1019  __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
1020  __ j(above_equal, loop_statement.break_label());
1021
1022  // Get the current entry of the array into register eax.
1023  __ mov(ebx, Operand(esp, 2 * kPointerSize));
1024  __ mov(eax, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1025
1026  // Get the expected map from the stack or a smi in the
1027  // permanent slow case into register edx.
1028  __ mov(edx, Operand(esp, 3 * kPointerSize));
1029
1030  // Check if the expected map still matches that of the enumerable.
1031  // If not, we may have to filter the key.
1032  Label update_each;
1033  __ mov(ebx, Operand(esp, 4 * kPointerSize));
1034  __ cmp(edx, FieldOperand(ebx, HeapObject::kMapOffset));
1035  __ j(equal, &update_each, Label::kNear);
1036
1037  // We need to filter the key, record slow-path here.
1038  int const vector_index = SmiFromSlot(slot)->value();
1039  __ EmitLoadTypeFeedbackVector(edx);
1040  __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1041         Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1042
1043  // eax contains the key.  The receiver in ebx is the second argument to the
1044  // ForInFilter.  ForInFilter returns undefined if the receiver doesn't
1045  // have the key or returns the name-converted key.
1046  __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1047  RestoreContext();
1048  PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1049  __ JumpIfRoot(result_register(), Heap::kUndefinedValueRootIndex,
1050                loop_statement.continue_label());
1051
1052  // Update the 'each' property or variable from the possibly filtered
1053  // entry in register eax.
1054  __ bind(&update_each);
1055  // Perform the assignment as if via '='.
1056  { EffectContext context(this);
1057    EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1058    PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1059  }
1060
1061  // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1062  PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1063  // Generate code for the body of the loop.
1064  Visit(stmt->body());
1065
1066  // Generate code for going to the next element by incrementing the
1067  // index (smi) stored on top of the stack.
1068  __ bind(loop_statement.continue_label());
1069  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1070  __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1071
1072  EmitBackEdgeBookkeeping(stmt, &loop);
1073  __ jmp(&loop);
1074
1075  // Remove the pointers stored on the stack.
1076  __ bind(loop_statement.break_label());
1077  DropOperands(5);
1078
1079  // Exit and decrement the loop depth.
1080  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1081  __ bind(&exit);
1082  decrement_loop_depth();
1083}
1084
1085
1086void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1087                                          FeedbackVectorSlot slot) {
1088  DCHECK(NeedsHomeObject(initializer));
1089  __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1090  __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1091  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1092}
1093
1094
1095void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1096                                                     int offset,
1097                                                     FeedbackVectorSlot slot) {
1098  DCHECK(NeedsHomeObject(initializer));
1099  __ mov(StoreDescriptor::ReceiverRegister(), eax);
1100  __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1101  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1102}
1103
1104
1105void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1106                                                      TypeofMode typeof_mode,
1107                                                      Label* slow) {
1108  Register context = esi;
1109  Register temp = edx;
1110
1111  int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
1112  for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
1113    if (!s->NeedsContext()) continue;
1114    if (s->calls_sloppy_eval()) {
1115      // Check that extension is "the hole".
1116      __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1117                       Heap::kTheHoleValueRootIndex, slow);
1118    }
1119    // Load next context in chain.
1120    __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1121    // Walk the rest of the chain without clobbering esi.
1122    context = temp;
1123    to_check--;
1124  }
1125
1126  // All extension objects were empty and it is safe to use a normal global
1127  // load machinery.
1128  EmitGlobalVariableLoad(proxy, typeof_mode);
1129}
1130
1131
1132MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1133                                                                Label* slow) {
1134  DCHECK(var->IsContextSlot());
1135  Register context = esi;
1136  Register temp = ebx;
1137
1138  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1139    if (s->NeedsContext()) {
1140      if (s->calls_sloppy_eval()) {
1141        // Check that extension is "the hole".
1142        __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1143                         Heap::kTheHoleValueRootIndex, slow);
1144      }
1145      __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1146      // Walk the rest of the chain without clobbering esi.
1147      context = temp;
1148    }
1149  }
1150  // Check that last extension is "the hole".
1151  __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1152                   Heap::kTheHoleValueRootIndex, slow);
1153
1154  // This function is used only for loads, not stores, so it's safe to
1155  // return an esi-based operand (the write barrier cannot be allowed to
1156  // destroy the esi register).
1157  return ContextOperand(context, var->index());
1158}
1159
1160
1161void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1162                                                  TypeofMode typeof_mode,
1163                                                  Label* slow, Label* done) {
1164  // Generate fast-case code for variables that might be shadowed by
1165  // eval-introduced variables.  Eval is used a lot without
1166  // introducing variables.  In those cases, we do not want to
1167  // perform a runtime call for all variables in the scope
1168  // containing the eval.
1169  Variable* var = proxy->var();
1170  if (var->mode() == DYNAMIC_GLOBAL) {
1171    EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1172    __ jmp(done);
1173  } else if (var->mode() == DYNAMIC_LOCAL) {
1174    Variable* local = var->local_if_not_shadowed();
1175    __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1176    if (local->binding_needs_init()) {
1177      __ cmp(eax, isolate()->factory()->the_hole_value());
1178      __ j(not_equal, done);
1179      __ push(Immediate(var->name()));
1180      __ CallRuntime(Runtime::kThrowReferenceError);
1181    } else {
1182      __ jmp(done);
1183    }
1184  }
1185}
1186
1187void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1188                                         TypeofMode typeof_mode) {
1189  SetExpressionPosition(proxy);
1190  PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1191  Variable* var = proxy->var();
1192
1193  // Three cases: global variables, lookup variables, and all other types of
1194  // variables.
1195  switch (var->location()) {
1196    case VariableLocation::UNALLOCATED: {
1197      Comment cmnt(masm_, "[ Global variable");
1198      EmitGlobalVariableLoad(proxy, typeof_mode);
1199      context()->Plug(eax);
1200      break;
1201    }
1202
1203    case VariableLocation::PARAMETER:
1204    case VariableLocation::LOCAL:
1205    case VariableLocation::CONTEXT: {
1206      DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1207      Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1208                                               : "[ Stack variable");
1209
1210      if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1211        // Throw a reference error when using an uninitialized let/const
1212        // binding in harmony mode.
1213        Label done;
1214        GetVar(eax, var);
1215        __ cmp(eax, isolate()->factory()->the_hole_value());
1216        __ j(not_equal, &done, Label::kNear);
1217        __ push(Immediate(var->name()));
1218        __ CallRuntime(Runtime::kThrowReferenceError);
1219        __ bind(&done);
1220        context()->Plug(eax);
1221        break;
1222      }
1223      context()->Plug(var);
1224      break;
1225    }
1226
1227    case VariableLocation::LOOKUP: {
1228      Comment cmnt(masm_, "[ Lookup variable");
1229      Label done, slow;
1230      // Generate code for loading from variables potentially shadowed
1231      // by eval-introduced variables.
1232      EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1233      __ bind(&slow);
1234      __ push(Immediate(var->name()));
1235      Runtime::FunctionId function_id =
1236          typeof_mode == NOT_INSIDE_TYPEOF
1237              ? Runtime::kLoadLookupSlot
1238              : Runtime::kLoadLookupSlotInsideTypeof;
1239      __ CallRuntime(function_id);
1240      __ bind(&done);
1241      context()->Plug(eax);
1242      break;
1243    }
1244
1245    case VariableLocation::MODULE:
1246      UNREACHABLE();
1247  }
1248}
1249
1250
1251void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1252  Expression* expression = (property == NULL) ? NULL : property->value();
1253  if (expression == NULL) {
1254    PushOperand(isolate()->factory()->null_value());
1255  } else {
1256    VisitForStackValue(expression);
1257    if (NeedsHomeObject(expression)) {
1258      DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1259             property->kind() == ObjectLiteral::Property::SETTER);
1260      int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1261      EmitSetHomeObject(expression, offset, property->GetSlot());
1262    }
1263  }
1264}
1265
1266
1267void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1268  Comment cmnt(masm_, "[ ObjectLiteral");
1269
1270  Handle<FixedArray> constant_properties = expr->constant_properties();
1271  int flags = expr->ComputeFlags();
1272  // If any of the keys would store to the elements array, then we shouldn't
1273  // allow it.
1274  if (MustCreateObjectLiteralWithRuntime(expr)) {
1275    __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1276    __ push(Immediate(Smi::FromInt(expr->literal_index())));
1277    __ push(Immediate(constant_properties));
1278    __ push(Immediate(Smi::FromInt(flags)));
1279    __ CallRuntime(Runtime::kCreateObjectLiteral);
1280  } else {
1281    __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1282    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1283    __ mov(ecx, Immediate(constant_properties));
1284    __ mov(edx, Immediate(Smi::FromInt(flags)));
1285    FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1286    __ CallStub(&stub);
1287    RestoreContext();
1288  }
1289  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1290
1291  // If result_saved is true the result is on top of the stack.  If
1292  // result_saved is false the result is in eax.
1293  bool result_saved = false;
1294
1295  AccessorTable accessor_table(zone());
1296  int property_index = 0;
1297  for (; property_index < expr->properties()->length(); property_index++) {
1298    ObjectLiteral::Property* property = expr->properties()->at(property_index);
1299    if (property->is_computed_name()) break;
1300    if (property->IsCompileTimeValue()) continue;
1301
1302    Literal* key = property->key()->AsLiteral();
1303    Expression* value = property->value();
1304    if (!result_saved) {
1305      PushOperand(eax);  // Save result on the stack
1306      result_saved = true;
1307    }
1308    switch (property->kind()) {
1309      case ObjectLiteral::Property::CONSTANT:
1310        UNREACHABLE();
1311      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1312        DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1313        // Fall through.
1314      case ObjectLiteral::Property::COMPUTED:
1315        // It is safe to use [[Put]] here because the boilerplate already
1316        // contains computed properties with an uninitialized value.
1317        if (key->IsStringLiteral()) {
1318          DCHECK(key->IsPropertyName());
1319          if (property->emit_store()) {
1320            VisitForAccumulatorValue(value);
1321            DCHECK(StoreDescriptor::ValueRegister().is(eax));
1322            __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1323            CallStoreIC(property->GetSlot(0), key->value());
1324            PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1325            if (NeedsHomeObject(value)) {
1326              EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1327            }
1328          } else {
1329            VisitForEffect(value);
1330          }
1331          break;
1332        }
1333        PushOperand(Operand(esp, 0));  // Duplicate receiver.
1334        VisitForStackValue(key);
1335        VisitForStackValue(value);
1336        if (property->emit_store()) {
1337          if (NeedsHomeObject(value)) {
1338            EmitSetHomeObject(value, 2, property->GetSlot());
1339          }
1340          PushOperand(Smi::FromInt(SLOPPY));  // Language mode
1341          CallRuntimeWithOperands(Runtime::kSetProperty);
1342        } else {
1343          DropOperands(3);
1344        }
1345        break;
1346      case ObjectLiteral::Property::PROTOTYPE:
1347        PushOperand(Operand(esp, 0));  // Duplicate receiver.
1348        VisitForStackValue(value);
1349        DCHECK(property->emit_store());
1350        CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1351        PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1352                               BailoutState::NO_REGISTERS);
1353        break;
1354      case ObjectLiteral::Property::GETTER:
1355        if (property->emit_store()) {
1356          AccessorTable::Iterator it = accessor_table.lookup(key);
1357          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1358          it->second->getter = property;
1359        }
1360        break;
1361      case ObjectLiteral::Property::SETTER:
1362        if (property->emit_store()) {
1363          AccessorTable::Iterator it = accessor_table.lookup(key);
1364          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1365          it->second->setter = property;
1366        }
1367        break;
1368    }
1369  }
1370
1371  // Emit code to define accessors, using only a single call to the runtime for
1372  // each pair of corresponding getters and setters.
1373  for (AccessorTable::Iterator it = accessor_table.begin();
1374       it != accessor_table.end();
1375       ++it) {
1376    PushOperand(Operand(esp, 0));  // Duplicate receiver.
1377    VisitForStackValue(it->first);
1378
1379    EmitAccessor(it->second->getter);
1380    EmitAccessor(it->second->setter);
1381
1382    PushOperand(Smi::FromInt(NONE));
1383    CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1384    PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1385  }
1386
1387  // Object literals have two parts. The "static" part on the left contains no
1388  // computed property names, and so we can compute its map ahead of time; see
1389  // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1390  // starts with the first computed property name, and continues with all
1391  // properties to its right.  All the code from above initializes the static
1392  // component of the object literal, and arranges for the map of the result to
1393  // reflect the static order in which the keys appear. For the dynamic
1394  // properties, we compile them into a series of "SetOwnProperty" runtime
1395  // calls. This will preserve insertion order.
1396  for (; property_index < expr->properties()->length(); property_index++) {
1397    ObjectLiteral::Property* property = expr->properties()->at(property_index);
1398
1399    Expression* value = property->value();
1400    if (!result_saved) {
1401      PushOperand(eax);  // Save result on the stack
1402      result_saved = true;
1403    }
1404
1405    PushOperand(Operand(esp, 0));  // Duplicate receiver.
1406
1407    if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1408      DCHECK(!property->is_computed_name());
1409      VisitForStackValue(value);
1410      DCHECK(property->emit_store());
1411      CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1412      PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1413                             BailoutState::NO_REGISTERS);
1414    } else {
1415      EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1416      VisitForStackValue(value);
1417      if (NeedsHomeObject(value)) {
1418        EmitSetHomeObject(value, 2, property->GetSlot());
1419      }
1420
1421      switch (property->kind()) {
1422        case ObjectLiteral::Property::CONSTANT:
1423        case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1424        case ObjectLiteral::Property::COMPUTED:
1425          if (property->emit_store()) {
1426            PushOperand(Smi::FromInt(NONE));
1427            PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1428            CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1429            PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1430                                   BailoutState::NO_REGISTERS);
1431          } else {
1432            DropOperands(3);
1433          }
1434          break;
1435
1436        case ObjectLiteral::Property::PROTOTYPE:
1437          UNREACHABLE();
1438          break;
1439
1440        case ObjectLiteral::Property::GETTER:
1441          PushOperand(Smi::FromInt(NONE));
1442          CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1443          break;
1444
1445        case ObjectLiteral::Property::SETTER:
1446          PushOperand(Smi::FromInt(NONE));
1447          CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1448          break;
1449      }
1450    }
1451  }
1452
1453  if (result_saved) {
1454    context()->PlugTOS();
1455  } else {
1456    context()->Plug(eax);
1457  }
1458}
1459
1460
1461void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1462  Comment cmnt(masm_, "[ ArrayLiteral");
1463
1464  Handle<FixedArray> constant_elements = expr->constant_elements();
1465  bool has_constant_fast_elements =
1466      IsFastObjectElementsKind(expr->constant_elements_kind());
1467
1468  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1469  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1470    // If the only customer of allocation sites is transitioning, then
1471    // we can turn it off if we don't have anywhere else to transition to.
1472    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1473  }
1474
1475  if (MustCreateArrayLiteralWithRuntime(expr)) {
1476    __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1477    __ push(Immediate(Smi::FromInt(expr->literal_index())));
1478    __ push(Immediate(constant_elements));
1479    __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1480    __ CallRuntime(Runtime::kCreateArrayLiteral);
1481  } else {
1482    __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1483    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1484    __ mov(ecx, Immediate(constant_elements));
1485    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1486    __ CallStub(&stub);
1487    RestoreContext();
1488  }
1489  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1490
1491  bool result_saved = false;  // Is the result saved to the stack?
1492  ZoneList<Expression*>* subexprs = expr->values();
1493  int length = subexprs->length();
1494
1495  // Emit code to evaluate all the non-constant subexpressions and to store
1496  // them into the newly cloned array.
1497  for (int array_index = 0; array_index < length; array_index++) {
1498    Expression* subexpr = subexprs->at(array_index);
1499    DCHECK(!subexpr->IsSpread());
1500
1501    // If the subexpression is a literal or a simple materialized literal it
1502    // is already set in the cloned array.
1503    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1504
1505    if (!result_saved) {
1506      PushOperand(eax);  // array literal.
1507      result_saved = true;
1508    }
1509    VisitForAccumulatorValue(subexpr);
1510
1511    __ mov(StoreDescriptor::NameRegister(),
1512           Immediate(Smi::FromInt(array_index)));
1513    __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1514    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1515    PrepareForBailoutForId(expr->GetIdForElement(array_index),
1516                           BailoutState::NO_REGISTERS);
1517  }
1518
1519  if (result_saved) {
1520    context()->PlugTOS();
1521  } else {
1522    context()->Plug(eax);
1523  }
1524}
1525
1526
1527void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1528  DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1529
1530  Comment cmnt(masm_, "[ Assignment");
1531
1532  Property* property = expr->target()->AsProperty();
1533  LhsKind assign_type = Property::GetAssignType(property);
1534
1535  // Evaluate LHS expression.
1536  switch (assign_type) {
1537    case VARIABLE:
1538      // Nothing to do here.
1539      break;
1540    case NAMED_SUPER_PROPERTY:
1541      VisitForStackValue(
1542          property->obj()->AsSuperPropertyReference()->this_var());
1543      VisitForAccumulatorValue(
1544          property->obj()->AsSuperPropertyReference()->home_object());
1545      PushOperand(result_register());
1546      if (expr->is_compound()) {
1547        PushOperand(MemOperand(esp, kPointerSize));
1548        PushOperand(result_register());
1549      }
1550      break;
1551    case NAMED_PROPERTY:
1552      if (expr->is_compound()) {
1553        // We need the receiver both on the stack and in the register.
1554        VisitForStackValue(property->obj());
1555        __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1556      } else {
1557        VisitForStackValue(property->obj());
1558      }
1559      break;
1560    case KEYED_SUPER_PROPERTY:
1561      VisitForStackValue(
1562          property->obj()->AsSuperPropertyReference()->this_var());
1563      VisitForStackValue(
1564          property->obj()->AsSuperPropertyReference()->home_object());
1565      VisitForAccumulatorValue(property->key());
1566      PushOperand(result_register());
1567      if (expr->is_compound()) {
1568        PushOperand(MemOperand(esp, 2 * kPointerSize));
1569        PushOperand(MemOperand(esp, 2 * kPointerSize));
1570        PushOperand(result_register());
1571      }
1572      break;
1573    case KEYED_PROPERTY: {
1574      if (expr->is_compound()) {
1575        VisitForStackValue(property->obj());
1576        VisitForStackValue(property->key());
1577        __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1578        __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1579      } else {
1580        VisitForStackValue(property->obj());
1581        VisitForStackValue(property->key());
1582      }
1583      break;
1584    }
1585  }
1586
1587  // For compound assignments we need another deoptimization point after the
1588  // variable/property load.
1589  if (expr->is_compound()) {
1590    AccumulatorValueContext result_context(this);
1591    { AccumulatorValueContext left_operand_context(this);
1592      switch (assign_type) {
1593        case VARIABLE:
1594          EmitVariableLoad(expr->target()->AsVariableProxy());
1595          PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1596          break;
1597        case NAMED_SUPER_PROPERTY:
1598          EmitNamedSuperPropertyLoad(property);
1599          PrepareForBailoutForId(property->LoadId(),
1600                                 BailoutState::TOS_REGISTER);
1601          break;
1602        case NAMED_PROPERTY:
1603          EmitNamedPropertyLoad(property);
1604          PrepareForBailoutForId(property->LoadId(),
1605                                 BailoutState::TOS_REGISTER);
1606          break;
1607        case KEYED_SUPER_PROPERTY:
1608          EmitKeyedSuperPropertyLoad(property);
1609          PrepareForBailoutForId(property->LoadId(),
1610                                 BailoutState::TOS_REGISTER);
1611          break;
1612        case KEYED_PROPERTY:
1613          EmitKeyedPropertyLoad(property);
1614          PrepareForBailoutForId(property->LoadId(),
1615                                 BailoutState::TOS_REGISTER);
1616          break;
1617      }
1618    }
1619
1620    Token::Value op = expr->binary_op();
1621    PushOperand(eax);  // Left operand goes on the stack.
1622    VisitForAccumulatorValue(expr->value());
1623
1624    if (ShouldInlineSmiCase(op)) {
1625      EmitInlineSmiBinaryOp(expr->binary_operation(),
1626                            op,
1627                            expr->target(),
1628                            expr->value());
1629    } else {
1630      EmitBinaryOp(expr->binary_operation(), op);
1631    }
1632
1633    // Deoptimization point in case the binary operation may have side effects.
1634    PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1635  } else {
1636    VisitForAccumulatorValue(expr->value());
1637  }
1638
1639  SetExpressionPosition(expr);
1640
1641  // Store the value.
1642  switch (assign_type) {
1643    case VARIABLE: {
1644      VariableProxy* proxy = expr->target()->AsVariableProxy();
1645      EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1646                             proxy->hole_check_mode());
1647      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1648      context()->Plug(eax);
1649      break;
1650    }
1651    case NAMED_PROPERTY:
1652      EmitNamedPropertyAssignment(expr);
1653      break;
1654    case NAMED_SUPER_PROPERTY:
1655      EmitNamedSuperPropertyStore(property);
1656      context()->Plug(result_register());
1657      break;
1658    case KEYED_SUPER_PROPERTY:
1659      EmitKeyedSuperPropertyStore(property);
1660      context()->Plug(result_register());
1661      break;
1662    case KEYED_PROPERTY:
1663      EmitKeyedPropertyAssignment(expr);
1664      break;
1665  }
1666}
1667
1668
1669void FullCodeGenerator::VisitYield(Yield* expr) {
1670  Comment cmnt(masm_, "[ Yield");
1671  SetExpressionPosition(expr);
1672
1673  // Evaluate yielded value first; the initial iterator definition depends on
1674  // this.  It stays on the stack while we update the iterator.
1675  VisitForStackValue(expr->expression());
1676
1677  Label suspend, continuation, post_runtime, resume, exception;
1678
1679  __ jmp(&suspend);
1680  __ bind(&continuation);
1681  // When we arrive here, eax holds the generator object.
1682  __ RecordGeneratorContinuation();
1683  __ mov(ebx, FieldOperand(eax, JSGeneratorObject::kResumeModeOffset));
1684  __ mov(eax, FieldOperand(eax, JSGeneratorObject::kInputOrDebugPosOffset));
1685  STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1686  STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1687  __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::kReturn)));
1688  __ j(less, &resume);
1689  __ Push(result_register());
1690  __ j(greater, &exception);
1691  EmitCreateIteratorResult(true);
1692  EmitUnwindAndReturn();
1693
1694  __ bind(&exception);
1695  __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
1696                                              : Runtime::kThrow);
1697
1698  __ bind(&suspend);
1699  OperandStackDepthIncrement(1);  // Not popped on this path.
1700  VisitForAccumulatorValue(expr->generator_object());
1701  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1702  __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1703         Immediate(Smi::FromInt(continuation.pos())));
1704  __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1705  __ mov(ecx, esi);
1706  __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1707                      kDontSaveFPRegs);
1708  __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1709  __ cmp(esp, ebx);
1710  __ j(equal, &post_runtime);
1711  __ push(eax);  // generator object
1712  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1713  RestoreContext();
1714  __ bind(&post_runtime);
1715  PopOperand(result_register());
1716  EmitReturnSequence();
1717
1718  __ bind(&resume);
1719  context()->Plug(result_register());
1720}
1721
1722void FullCodeGenerator::PushOperand(MemOperand operand) {
1723  OperandStackDepthIncrement(1);
1724  __ Push(operand);
1725}
1726
1727void FullCodeGenerator::EmitOperandStackDepthCheck() {
1728  if (FLAG_debug_code) {
1729    int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1730                        operand_stack_depth_ * kPointerSize;
1731    __ mov(eax, ebp);
1732    __ sub(eax, esp);
1733    __ cmp(eax, Immediate(expected_diff));
1734    __ Assert(equal, kUnexpectedStackDepth);
1735  }
1736}
1737
1738void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1739  Label allocate, done_allocate;
1740
1741  __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate,
1742              NO_ALLOCATION_FLAGS);
1743  __ jmp(&done_allocate, Label::kNear);
1744
1745  __ bind(&allocate);
1746  __ Push(Smi::FromInt(JSIteratorResult::kSize));
1747  __ CallRuntime(Runtime::kAllocateInNewSpace);
1748
1749  __ bind(&done_allocate);
1750  __ mov(ebx, NativeContextOperand());
1751  __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1752  __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1753  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1754         isolate()->factory()->empty_fixed_array());
1755  __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1756         isolate()->factory()->empty_fixed_array());
1757  __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1758  __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1759         isolate()->factory()->ToBoolean(done));
1760  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1761  OperandStackDepthDecrement(1);
1762}
1763
1764
1765void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1766                                              Token::Value op,
1767                                              Expression* left,
1768                                              Expression* right) {
1769  // Do combined smi check of the operands. Left operand is on the
1770  // stack. Right operand is in eax.
1771  Label smi_case, done, stub_call;
1772  PopOperand(edx);
1773  __ mov(ecx, eax);
1774  __ or_(eax, edx);
1775  JumpPatchSite patch_site(masm_);
1776  patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1777
1778  __ bind(&stub_call);
1779  __ mov(eax, ecx);
1780  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1781  CallIC(code, expr->BinaryOperationFeedbackId());
1782  patch_site.EmitPatchInfo();
1783  __ jmp(&done, Label::kNear);
1784
1785  // Smi case.
1786  __ bind(&smi_case);
1787  __ mov(eax, edx);  // Copy left operand in case of a stub call.
1788
1789  switch (op) {
1790    case Token::SAR:
1791      __ SmiUntag(ecx);
1792      __ sar_cl(eax);  // No checks of result necessary
1793      __ and_(eax, Immediate(~kSmiTagMask));
1794      break;
1795    case Token::SHL: {
1796      Label result_ok;
1797      __ SmiUntag(eax);
1798      __ SmiUntag(ecx);
1799      __ shl_cl(eax);
1800      // Check that the *signed* result fits in a smi.
1801      __ cmp(eax, 0xc0000000);
1802      __ j(positive, &result_ok);
1803      __ SmiTag(ecx);
1804      __ jmp(&stub_call);
1805      __ bind(&result_ok);
1806      __ SmiTag(eax);
1807      break;
1808    }
1809    case Token::SHR: {
1810      Label result_ok;
1811      __ SmiUntag(eax);
1812      __ SmiUntag(ecx);
1813      __ shr_cl(eax);
1814      __ test(eax, Immediate(0xc0000000));
1815      __ j(zero, &result_ok);
1816      __ SmiTag(ecx);
1817      __ jmp(&stub_call);
1818      __ bind(&result_ok);
1819      __ SmiTag(eax);
1820      break;
1821    }
1822    case Token::ADD:
1823      __ add(eax, ecx);
1824      __ j(overflow, &stub_call);
1825      break;
1826    case Token::SUB:
1827      __ sub(eax, ecx);
1828      __ j(overflow, &stub_call);
1829      break;
1830    case Token::MUL: {
1831      __ SmiUntag(eax);
1832      __ imul(eax, ecx);
1833      __ j(overflow, &stub_call);
1834      __ test(eax, eax);
1835      __ j(not_zero, &done, Label::kNear);
1836      __ mov(ebx, edx);
1837      __ or_(ebx, ecx);
1838      __ j(negative, &stub_call);
1839      break;
1840    }
1841    case Token::BIT_OR:
1842      __ or_(eax, ecx);
1843      break;
1844    case Token::BIT_AND:
1845      __ and_(eax, ecx);
1846      break;
1847    case Token::BIT_XOR:
1848      __ xor_(eax, ecx);
1849      break;
1850    default:
1851      UNREACHABLE();
1852  }
1853
1854  __ bind(&done);
1855  context()->Plug(eax);
1856}
1857
1858
1859void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1860  for (int i = 0; i < lit->properties()->length(); i++) {
1861    ClassLiteral::Property* property = lit->properties()->at(i);
1862    Expression* value = property->value();
1863
1864    if (property->is_static()) {
1865      PushOperand(Operand(esp, kPointerSize));  // constructor
1866    } else {
1867      PushOperand(Operand(esp, 0));  // prototype
1868    }
1869    EmitPropertyKey(property, lit->GetIdForProperty(i));
1870
1871    // The static prototype property is read only. We handle the non computed
1872    // property name case in the parser. Since this is the only case where we
1873    // need to check for an own read only property we special case this so we do
1874    // not need to do this for every property.
1875    if (property->is_static() && property->is_computed_name()) {
1876      __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1877      __ push(eax);
1878    }
1879
1880    VisitForStackValue(value);
1881    if (NeedsHomeObject(value)) {
1882      EmitSetHomeObject(value, 2, property->GetSlot());
1883    }
1884
1885    switch (property->kind()) {
1886      case ClassLiteral::Property::METHOD:
1887        PushOperand(Smi::FromInt(DONT_ENUM));
1888        PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1889        CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1890        break;
1891
1892      case ClassLiteral::Property::GETTER:
1893        PushOperand(Smi::FromInt(DONT_ENUM));
1894        CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1895        break;
1896
1897      case ClassLiteral::Property::SETTER:
1898        PushOperand(Smi::FromInt(DONT_ENUM));
1899        CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1900        break;
1901
1902      case ClassLiteral::Property::FIELD:
1903        UNREACHABLE();
1904        break;
1905    }
1906  }
1907}
1908
1909
1910void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1911  PopOperand(edx);
1912  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1913  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
1914  CallIC(code, expr->BinaryOperationFeedbackId());
1915  patch_site.EmitPatchInfo();
1916  context()->Plug(eax);
1917}
1918
1919
1920void FullCodeGenerator::EmitAssignment(Expression* expr,
1921                                       FeedbackVectorSlot slot) {
1922  DCHECK(expr->IsValidReferenceExpressionOrThis());
1923
1924  Property* prop = expr->AsProperty();
1925  LhsKind assign_type = Property::GetAssignType(prop);
1926
1927  switch (assign_type) {
1928    case VARIABLE: {
1929      VariableProxy* proxy = expr->AsVariableProxy();
1930      EffectContext context(this);
1931      EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
1932                             proxy->hole_check_mode());
1933      break;
1934    }
1935    case NAMED_PROPERTY: {
1936      PushOperand(eax);  // Preserve value.
1937      VisitForAccumulatorValue(prop->obj());
1938      __ Move(StoreDescriptor::ReceiverRegister(), eax);
1939      PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
1940      CallStoreIC(slot, prop->key()->AsLiteral()->value());
1941      break;
1942    }
1943    case NAMED_SUPER_PROPERTY: {
1944      PushOperand(eax);
1945      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
1946      VisitForAccumulatorValue(
1947          prop->obj()->AsSuperPropertyReference()->home_object());
1948      // stack: value, this; eax: home_object
1949      Register scratch = ecx;
1950      Register scratch2 = edx;
1951      __ mov(scratch, result_register());               // home_object
1952      __ mov(eax, MemOperand(esp, kPointerSize));       // value
1953      __ mov(scratch2, MemOperand(esp, 0));             // this
1954      __ mov(MemOperand(esp, kPointerSize), scratch2);  // this
1955      __ mov(MemOperand(esp, 0), scratch);              // home_object
1956      // stack: this, home_object. eax: value
1957      EmitNamedSuperPropertyStore(prop);
1958      break;
1959    }
1960    case KEYED_SUPER_PROPERTY: {
1961      PushOperand(eax);
1962      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
1963      VisitForStackValue(
1964          prop->obj()->AsSuperPropertyReference()->home_object());
1965      VisitForAccumulatorValue(prop->key());
1966      Register scratch = ecx;
1967      Register scratch2 = edx;
1968      __ mov(scratch2, MemOperand(esp, 2 * kPointerSize));  // value
1969      // stack: value, this, home_object; eax: key, edx: value
1970      __ mov(scratch, MemOperand(esp, kPointerSize));  // this
1971      __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
1972      __ mov(scratch, MemOperand(esp, 0));  // home_object
1973      __ mov(MemOperand(esp, kPointerSize), scratch);
1974      __ mov(MemOperand(esp, 0), eax);
1975      __ mov(eax, scratch2);
1976      // stack: this, home_object, key; eax: value.
1977      EmitKeyedSuperPropertyStore(prop);
1978      break;
1979    }
1980    case KEYED_PROPERTY: {
1981      PushOperand(eax);  // Preserve value.
1982      VisitForStackValue(prop->obj());
1983      VisitForAccumulatorValue(prop->key());
1984      __ Move(StoreDescriptor::NameRegister(), eax);
1985      PopOperand(StoreDescriptor::ReceiverRegister());  // Receiver.
1986      PopOperand(StoreDescriptor::ValueRegister());     // Restore value.
1987      CallKeyedStoreIC(slot);
1988      break;
1989    }
1990  }
1991  context()->Plug(eax);
1992}
1993
1994
1995void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
1996    Variable* var, MemOperand location) {
1997  __ mov(location, eax);
1998  if (var->IsContextSlot()) {
1999    __ mov(edx, eax);
2000    int offset = Context::SlotOffset(var->index());
2001    __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2002  }
2003}
2004
2005void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2006                                               FeedbackVectorSlot slot,
2007                                               HoleCheckMode hole_check_mode) {
2008  if (var->IsUnallocated()) {
2009    // Global var, const, or let.
2010    __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2011    __ mov(StoreDescriptor::ReceiverRegister(),
2012           ContextOperand(StoreDescriptor::ReceiverRegister(),
2013                          Context::EXTENSION_INDEX));
2014    CallStoreIC(slot, var->name());
2015
2016  } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2017    DCHECK(!var->IsLookupSlot());
2018    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2019    MemOperand location = VarOperand(var, ecx);
2020    // Perform an initialization check for lexically declared variables.
2021    if (hole_check_mode == HoleCheckMode::kRequired) {
2022      Label assign;
2023      __ mov(edx, location);
2024      __ cmp(edx, isolate()->factory()->the_hole_value());
2025      __ j(not_equal, &assign, Label::kNear);
2026      __ push(Immediate(var->name()));
2027      __ CallRuntime(Runtime::kThrowReferenceError);
2028      __ bind(&assign);
2029    }
2030    if (var->mode() != CONST) {
2031      EmitStoreToStackLocalOrContextSlot(var, location);
2032    } else if (var->throw_on_const_assignment(language_mode())) {
2033      __ CallRuntime(Runtime::kThrowConstAssignError);
2034    }
2035  } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2036    // Initializing assignment to const {this} needs a write barrier.
2037    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2038    Label uninitialized_this;
2039    MemOperand location = VarOperand(var, ecx);
2040    __ mov(edx, location);
2041    __ cmp(edx, isolate()->factory()->the_hole_value());
2042    __ j(equal, &uninitialized_this);
2043    __ push(Immediate(var->name()));
2044    __ CallRuntime(Runtime::kThrowReferenceError);
2045    __ bind(&uninitialized_this);
2046    EmitStoreToStackLocalOrContextSlot(var, location);
2047
2048  } else {
2049    DCHECK(var->mode() != CONST || op == Token::INIT);
2050    if (var->IsLookupSlot()) {
2051      // Assignment to var.
2052      __ Push(Immediate(var->name()));
2053      __ Push(eax);
2054      __ CallRuntime(is_strict(language_mode())
2055                         ? Runtime::kStoreLookupSlot_Strict
2056                         : Runtime::kStoreLookupSlot_Sloppy);
2057    } else {
2058      // Assignment to var or initializing assignment to let/const in harmony
2059      // mode.
2060      DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2061      MemOperand location = VarOperand(var, ecx);
2062      if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2063        // Check for an uninitialized let binding.
2064        __ mov(edx, location);
2065        __ cmp(edx, isolate()->factory()->the_hole_value());
2066        __ Check(equal, kLetBindingReInitialization);
2067      }
2068      EmitStoreToStackLocalOrContextSlot(var, location);
2069    }
2070  }
2071}
2072
2073
2074void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2075  // Assignment to a property, using a named store IC.
2076  // eax    : value
2077  // esp[0] : receiver
2078  Property* prop = expr->target()->AsProperty();
2079  DCHECK(prop != NULL);
2080  DCHECK(prop->key()->IsLiteral());
2081
2082  PopOperand(StoreDescriptor::ReceiverRegister());
2083  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
2084  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2085  context()->Plug(eax);
2086}
2087
2088
2089void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2090  // Assignment to named property of super.
2091  // eax : value
2092  // stack : receiver ('this'), home_object
2093  DCHECK(prop != NULL);
2094  Literal* key = prop->key()->AsLiteral();
2095  DCHECK(key != NULL);
2096
2097  PushOperand(key->value());
2098  PushOperand(eax);
2099  CallRuntimeWithOperands(is_strict(language_mode())
2100                              ? Runtime::kStoreToSuper_Strict
2101                              : Runtime::kStoreToSuper_Sloppy);
2102}
2103
2104
2105void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2106  // Assignment to named property of super.
2107  // eax : value
2108  // stack : receiver ('this'), home_object, key
2109
2110  PushOperand(eax);
2111  CallRuntimeWithOperands(is_strict(language_mode())
2112                              ? Runtime::kStoreKeyedToSuper_Strict
2113                              : Runtime::kStoreKeyedToSuper_Sloppy);
2114}
2115
2116
2117void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2118  // Assignment to a property, using a keyed store IC.
2119  // eax               : value
2120  // esp[0]            : key
2121  // esp[kPointerSize] : receiver
2122
2123  PopOperand(StoreDescriptor::NameRegister());  // Key.
2124  PopOperand(StoreDescriptor::ReceiverRegister());
2125  DCHECK(StoreDescriptor::ValueRegister().is(eax));
2126  CallKeyedStoreIC(expr->AssignmentSlot());
2127  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2128  context()->Plug(eax);
2129}
2130
2131// Code common for calls using the IC.
2132void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2133  Expression* callee = expr->expression();
2134
2135  // Get the target function.
2136  ConvertReceiverMode convert_mode;
2137  if (callee->IsVariableProxy()) {
2138    { StackValueContext context(this);
2139      EmitVariableLoad(callee->AsVariableProxy());
2140      PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2141    }
2142    // Push undefined as receiver. This is patched in the method prologue if it
2143    // is a sloppy mode method.
2144    PushOperand(isolate()->factory()->undefined_value());
2145    convert_mode = ConvertReceiverMode::kNullOrUndefined;
2146  } else {
2147    // Load the function from the receiver.
2148    DCHECK(callee->IsProperty());
2149    DCHECK(!callee->AsProperty()->IsSuperAccess());
2150    __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2151    EmitNamedPropertyLoad(callee->AsProperty());
2152    PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2153                           BailoutState::TOS_REGISTER);
2154    // Push the target function under the receiver.
2155    PushOperand(Operand(esp, 0));
2156    __ mov(Operand(esp, kPointerSize), eax);
2157    convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2158  }
2159
2160  EmitCall(expr, convert_mode);
2161}
2162
2163
2164void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2165  SetExpressionPosition(expr);
2166  Expression* callee = expr->expression();
2167  DCHECK(callee->IsProperty());
2168  Property* prop = callee->AsProperty();
2169  DCHECK(prop->IsSuperAccess());
2170
2171  Literal* key = prop->key()->AsLiteral();
2172  DCHECK(!key->value()->IsSmi());
2173  // Load the function from the receiver.
2174  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2175  VisitForStackValue(super_ref->home_object());
2176  VisitForAccumulatorValue(super_ref->this_var());
2177  PushOperand(eax);
2178  PushOperand(eax);
2179  PushOperand(Operand(esp, kPointerSize * 2));
2180  PushOperand(key->value());
2181  // Stack here:
2182  //  - home_object
2183  //  - this (receiver)
2184  //  - this (receiver) <-- LoadFromSuper will pop here and below.
2185  //  - home_object
2186  //  - key
2187  CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2188  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2189
2190  // Replace home_object with target function.
2191  __ mov(Operand(esp, kPointerSize), eax);
2192
2193  // Stack here:
2194  // - target function
2195  // - this (receiver)
2196  EmitCall(expr);
2197}
2198
2199
2200// Code common for calls using the IC.
2201void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2202                                                Expression* key) {
2203  // Load the key.
2204  VisitForAccumulatorValue(key);
2205
2206  Expression* callee = expr->expression();
2207
2208  // Load the function from the receiver.
2209  DCHECK(callee->IsProperty());
2210  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2211  __ mov(LoadDescriptor::NameRegister(), eax);
2212  EmitKeyedPropertyLoad(callee->AsProperty());
2213  PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2214                         BailoutState::TOS_REGISTER);
2215
2216  // Push the target function under the receiver.
2217  PushOperand(Operand(esp, 0));
2218  __ mov(Operand(esp, kPointerSize), eax);
2219
2220  EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2221}
2222
2223
2224void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2225  Expression* callee = expr->expression();
2226  DCHECK(callee->IsProperty());
2227  Property* prop = callee->AsProperty();
2228  DCHECK(prop->IsSuperAccess());
2229
2230  SetExpressionPosition(prop);
2231  // Load the function from the receiver.
2232  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2233  VisitForStackValue(super_ref->home_object());
2234  VisitForAccumulatorValue(super_ref->this_var());
2235  PushOperand(eax);
2236  PushOperand(eax);
2237  PushOperand(Operand(esp, kPointerSize * 2));
2238  VisitForStackValue(prop->key());
2239  // Stack here:
2240  //  - home_object
2241  //  - this (receiver)
2242  //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2243  //  - home_object
2244  //  - key
2245  CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2246  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2247
2248  // Replace home_object with target function.
2249  __ mov(Operand(esp, kPointerSize), eax);
2250
2251  // Stack here:
2252  // - target function
2253  // - this (receiver)
2254  EmitCall(expr);
2255}
2256
2257
2258void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2259  // Load the arguments.
2260  ZoneList<Expression*>* args = expr->arguments();
2261  int arg_count = args->length();
2262  for (int i = 0; i < arg_count; i++) {
2263    VisitForStackValue(args->at(i));
2264  }
2265
2266  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2267  SetCallPosition(expr, expr->tail_call_mode());
2268  if (expr->tail_call_mode() == TailCallMode::kAllow) {
2269    if (FLAG_trace) {
2270      __ CallRuntime(Runtime::kTraceTailCall);
2271    }
2272    // Update profiling counters before the tail call since we will
2273    // not return to this function.
2274    EmitProfilingCounterHandlingForReturnSequence(true);
2275  }
2276  Handle<Code> code =
2277      CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
2278  __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2279  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2280  __ Move(eax, Immediate(arg_count));
2281  CallIC(code);
2282  OperandStackDepthDecrement(arg_count + 1);
2283
2284  RecordJSReturnSite(expr);
2285  RestoreContext();
2286  context()->DropAndPlug(1, eax);
2287}
2288
2289void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2290  int arg_count = expr->arguments()->length();
2291  // Push copy of the first argument or undefined if it doesn't exist.
2292  if (arg_count > 0) {
2293    __ push(Operand(esp, arg_count * kPointerSize));
2294  } else {
2295    __ push(Immediate(isolate()->factory()->undefined_value()));
2296  }
2297
2298  // Push the enclosing function.
2299  __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2300
2301  // Push the language mode.
2302  __ push(Immediate(Smi::FromInt(language_mode())));
2303
2304  // Push the start position of the scope the calls resides in.
2305  __ push(Immediate(Smi::FromInt(scope()->start_position())));
2306
2307  // Push the source position of the eval call.
2308  __ push(Immediate(Smi::FromInt(expr->position())));
2309
2310  // Do the runtime call.
2311  __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2312}
2313
2314
2315// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2316void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2317  VariableProxy* callee = expr->expression()->AsVariableProxy();
2318  if (callee->var()->IsLookupSlot()) {
2319    Label slow, done;
2320    SetExpressionPosition(callee);
2321    // Generate code for loading from variables potentially shadowed by
2322    // eval-introduced variables.
2323    EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2324
2325    __ bind(&slow);
2326    // Call the runtime to find the function to call (returned in eax) and
2327    // the object holding it (returned in edx).
2328    __ Push(callee->name());
2329    __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2330    PushOperand(eax);  // Function.
2331    PushOperand(edx);  // Receiver.
2332    PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2333
2334    // If fast case code has been generated, emit code to push the function
2335    // and receiver and have the slow path jump around this code.
2336    if (done.is_linked()) {
2337      Label call;
2338      __ jmp(&call, Label::kNear);
2339      __ bind(&done);
2340      // Push function.
2341      __ push(eax);
2342      // The receiver is implicitly the global receiver. Indicate this by
2343      // passing the hole to the call function stub.
2344      __ push(Immediate(isolate()->factory()->undefined_value()));
2345      __ bind(&call);
2346    }
2347  } else {
2348    VisitForStackValue(callee);
2349    // refEnv.WithBaseObject()
2350    PushOperand(isolate()->factory()->undefined_value());
2351  }
2352}
2353
2354
2355void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2356  // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2357  // to resolve the function we need to call.  Then we call the resolved
2358  // function using the given arguments.
2359  ZoneList<Expression*>* args = expr->arguments();
2360  int arg_count = args->length();
2361
2362  PushCalleeAndWithBaseObject(expr);
2363
2364  // Push the arguments.
2365  for (int i = 0; i < arg_count; i++) {
2366    VisitForStackValue(args->at(i));
2367  }
2368
2369  // Push a copy of the function (found below the arguments) and
2370  // resolve eval.
2371  __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2372  EmitResolvePossiblyDirectEval(expr);
2373
2374  // Touch up the stack with the resolved function.
2375  __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2376
2377  PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2378
2379  SetCallPosition(expr);
2380  Handle<Code> code = CodeFactory::CallIC(isolate(), ConvertReceiverMode::kAny,
2381                                          expr->tail_call_mode())
2382                          .code();
2383  __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2384  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2385  __ Move(eax, Immediate(arg_count));
2386  __ call(code, RelocInfo::CODE_TARGET);
2387  OperandStackDepthDecrement(arg_count + 1);
2388  RecordJSReturnSite(expr);
2389  RestoreContext();
2390  context()->DropAndPlug(1, eax);
2391}
2392
2393
2394void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2395  Comment cmnt(masm_, "[ CallNew");
2396  // According to ECMA-262, section 11.2.2, page 44, the function
2397  // expression in new calls must be evaluated before the
2398  // arguments.
2399
2400  // Push constructor on the stack.  If it's not a function it's used as
2401  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2402  // ignored.
2403  DCHECK(!expr->expression()->IsSuperPropertyReference());
2404  VisitForStackValue(expr->expression());
2405
2406  // Push the arguments ("left-to-right") on the stack.
2407  ZoneList<Expression*>* args = expr->arguments();
2408  int arg_count = args->length();
2409  for (int i = 0; i < arg_count; i++) {
2410    VisitForStackValue(args->at(i));
2411  }
2412
2413  // Call the construct call builtin that handles allocation and
2414  // constructor invocation.
2415  SetConstructCallPosition(expr);
2416
2417  // Load function and argument count into edi and eax.
2418  __ Move(eax, Immediate(arg_count));
2419  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2420
2421  // Record call targets in unoptimized code.
2422  __ EmitLoadTypeFeedbackVector(ebx);
2423  __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2424
2425  CallConstructStub stub(isolate());
2426  CallIC(stub.GetCode());
2427  OperandStackDepthDecrement(arg_count + 1);
2428  PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2429  RestoreContext();
2430  context()->Plug(eax);
2431}
2432
2433
2434void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2435  SuperCallReference* super_call_ref =
2436      expr->expression()->AsSuperCallReference();
2437  DCHECK_NOT_NULL(super_call_ref);
2438
2439  // Push the super constructor target on the stack (may be null,
2440  // but the Construct builtin can deal with that properly).
2441  VisitForAccumulatorValue(super_call_ref->this_function_var());
2442  __ AssertFunction(result_register());
2443  __ mov(result_register(),
2444         FieldOperand(result_register(), HeapObject::kMapOffset));
2445  PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
2446
2447  // Push the arguments ("left-to-right") on the stack.
2448  ZoneList<Expression*>* args = expr->arguments();
2449  int arg_count = args->length();
2450  for (int i = 0; i < arg_count; i++) {
2451    VisitForStackValue(args->at(i));
2452  }
2453
2454  // Call the construct call builtin that handles allocation and
2455  // constructor invocation.
2456  SetConstructCallPosition(expr);
2457
2458  // Load new target into edx.
2459  VisitForAccumulatorValue(super_call_ref->new_target_var());
2460  __ mov(edx, result_register());
2461
2462  // Load function and argument count into edi and eax.
2463  __ Move(eax, Immediate(arg_count));
2464  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2465
2466  __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2467  OperandStackDepthDecrement(arg_count + 1);
2468
2469  RecordJSReturnSite(expr);
2470  RestoreContext();
2471  context()->Plug(eax);
2472}
2473
2474
2475void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2476  ZoneList<Expression*>* args = expr->arguments();
2477  DCHECK(args->length() == 1);
2478
2479  VisitForAccumulatorValue(args->at(0));
2480
2481  Label materialize_true, materialize_false;
2482  Label* if_true = NULL;
2483  Label* if_false = NULL;
2484  Label* fall_through = NULL;
2485  context()->PrepareTest(&materialize_true, &materialize_false,
2486                         &if_true, &if_false, &fall_through);
2487
2488  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2489  __ test(eax, Immediate(kSmiTagMask));
2490  Split(zero, if_true, if_false, fall_through);
2491
2492  context()->Plug(if_true, if_false);
2493}
2494
2495
2496void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2497  ZoneList<Expression*>* args = expr->arguments();
2498  DCHECK(args->length() == 1);
2499
2500  VisitForAccumulatorValue(args->at(0));
2501
2502  Label materialize_true, materialize_false;
2503  Label* if_true = NULL;
2504  Label* if_false = NULL;
2505  Label* fall_through = NULL;
2506  context()->PrepareTest(&materialize_true, &materialize_false,
2507                         &if_true, &if_false, &fall_through);
2508
2509  __ JumpIfSmi(eax, if_false);
2510  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2511  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2512  Split(above_equal, if_true, if_false, fall_through);
2513
2514  context()->Plug(if_true, if_false);
2515}
2516
2517
2518void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2519  ZoneList<Expression*>* args = expr->arguments();
2520  DCHECK(args->length() == 1);
2521
2522  VisitForAccumulatorValue(args->at(0));
2523
2524  Label materialize_true, materialize_false;
2525  Label* if_true = NULL;
2526  Label* if_false = NULL;
2527  Label* fall_through = NULL;
2528  context()->PrepareTest(&materialize_true, &materialize_false,
2529                         &if_true, &if_false, &fall_through);
2530
2531  __ JumpIfSmi(eax, if_false);
2532  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2533  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2534  Split(equal, if_true, if_false, fall_through);
2535
2536  context()->Plug(if_true, if_false);
2537}
2538
2539
2540void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2541  ZoneList<Expression*>* args = expr->arguments();
2542  DCHECK(args->length() == 1);
2543
2544  VisitForAccumulatorValue(args->at(0));
2545
2546  Label materialize_true, materialize_false;
2547  Label* if_true = NULL;
2548  Label* if_false = NULL;
2549  Label* fall_through = NULL;
2550  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2551                         &if_false, &fall_through);
2552
2553  __ JumpIfSmi(eax, if_false);
2554  __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
2555  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2556  Split(equal, if_true, if_false, fall_through);
2557
2558  context()->Plug(if_true, if_false);
2559}
2560
2561
2562void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2563  ZoneList<Expression*>* args = expr->arguments();
2564  DCHECK(args->length() == 1);
2565
2566  VisitForAccumulatorValue(args->at(0));
2567
2568  Label materialize_true, materialize_false;
2569  Label* if_true = NULL;
2570  Label* if_false = NULL;
2571  Label* fall_through = NULL;
2572  context()->PrepareTest(&materialize_true, &materialize_false,
2573                         &if_true, &if_false, &fall_through);
2574
2575  __ JumpIfSmi(eax, if_false);
2576  __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2577  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2578  Split(equal, if_true, if_false, fall_through);
2579
2580  context()->Plug(if_true, if_false);
2581}
2582
2583
2584void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2585  ZoneList<Expression*>* args = expr->arguments();
2586  DCHECK(args->length() == 1);
2587
2588  VisitForAccumulatorValue(args->at(0));
2589
2590  Label materialize_true, materialize_false;
2591  Label* if_true = NULL;
2592  Label* if_false = NULL;
2593  Label* fall_through = NULL;
2594  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2595                         &if_false, &fall_through);
2596
2597  __ JumpIfSmi(eax, if_false);
2598  __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
2599  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2600  Split(equal, if_true, if_false, fall_through);
2601
2602  context()->Plug(if_true, if_false);
2603}
2604
2605
2606void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2607  ZoneList<Expression*>* args = expr->arguments();
2608  DCHECK(args->length() == 1);
2609  Label done, null, function, non_function_constructor;
2610
2611  VisitForAccumulatorValue(args->at(0));
2612
2613  // If the object is not a JSReceiver, we return null.
2614  __ JumpIfSmi(eax, &null, Label::kNear);
2615  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2616  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
2617  __ j(below, &null, Label::kNear);
2618
2619  // Return 'Function' for JSFunction and JSBoundFunction objects.
2620  __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE);
2621  STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2622  __ j(above_equal, &function, Label::kNear);
2623
2624  // Check if the constructor in the map is a JS function.
2625  __ GetMapConstructor(eax, eax, ebx);
2626  __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
2627  __ j(not_equal, &non_function_constructor, Label::kNear);
2628
2629  // eax now contains the constructor function. Grab the
2630  // instance class name from there.
2631  __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2632  __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2633  __ jmp(&done, Label::kNear);
2634
2635  // Non-JS objects have class null.
2636  __ bind(&null);
2637  __ mov(eax, isolate()->factory()->null_value());
2638  __ jmp(&done, Label::kNear);
2639
2640  // Functions have class 'Function'.
2641  __ bind(&function);
2642  __ mov(eax, isolate()->factory()->Function_string());
2643  __ jmp(&done, Label::kNear);
2644
2645  // Objects with a non-function constructor have class 'Object'.
2646  __ bind(&non_function_constructor);
2647  __ mov(eax, isolate()->factory()->Object_string());
2648
2649  // All done.
2650  __ bind(&done);
2651
2652  context()->Plug(eax);
2653}
2654
2655
2656void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2657  ZoneList<Expression*>* args = expr->arguments();
2658  DCHECK(args->length() == 2);
2659
2660  VisitForStackValue(args->at(0));
2661  VisitForAccumulatorValue(args->at(1));
2662
2663  Register object = ebx;
2664  Register index = eax;
2665  Register result = edx;
2666
2667  PopOperand(object);
2668
2669  Label need_conversion;
2670  Label index_out_of_range;
2671  Label done;
2672  StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2673                                      &need_conversion, &index_out_of_range);
2674  generator.GenerateFast(masm_);
2675  __ jmp(&done);
2676
2677  __ bind(&index_out_of_range);
2678  // When the index is out of range, the spec requires us to return
2679  // NaN.
2680  __ Move(result, Immediate(isolate()->factory()->nan_value()));
2681  __ jmp(&done);
2682
2683  __ bind(&need_conversion);
2684  // Move the undefined value into the result register, which will
2685  // trigger conversion.
2686  __ Move(result, Immediate(isolate()->factory()->undefined_value()));
2687  __ jmp(&done);
2688
2689  NopRuntimeCallHelper call_helper;
2690  generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2691
2692  __ bind(&done);
2693  context()->Plug(result);
2694}
2695
2696
2697void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2698  ZoneList<Expression*>* args = expr->arguments();
2699  DCHECK_LE(2, args->length());
2700  // Push target, receiver and arguments onto the stack.
2701  for (Expression* const arg : *args) {
2702    VisitForStackValue(arg);
2703  }
2704  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2705  // Move target to edi.
2706  int const argc = args->length() - 2;
2707  __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
2708  // Call the target.
2709  __ mov(eax, Immediate(argc));
2710  __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2711  OperandStackDepthDecrement(argc + 1);
2712  RestoreContext();
2713  // Discard the function left on TOS.
2714  context()->DropAndPlug(1, eax);
2715}
2716
2717void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2718  ZoneList<Expression*>* args = expr->arguments();
2719  DCHECK_EQ(1, args->length());
2720  VisitForAccumulatorValue(args->at(0));
2721  __ AssertFunction(eax);
2722  __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
2723  __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
2724  context()->Plug(eax);
2725}
2726
2727void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2728  DCHECK(expr->arguments()->length() == 0);
2729  ExternalReference debug_is_active =
2730      ExternalReference::debug_is_active_address(isolate());
2731  __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
2732  __ SmiTag(eax);
2733  context()->Plug(eax);
2734}
2735
2736
2737void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2738  ZoneList<Expression*>* args = expr->arguments();
2739  DCHECK_EQ(2, args->length());
2740  VisitForStackValue(args->at(0));
2741  VisitForStackValue(args->at(1));
2742
2743  Label runtime, done;
2744
2745  __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime,
2746              NO_ALLOCATION_FLAGS);
2747  __ mov(ebx, NativeContextOperand());
2748  __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2749  __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2750  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2751         isolate()->factory()->empty_fixed_array());
2752  __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2753         isolate()->factory()->empty_fixed_array());
2754  __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
2755  __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
2756  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2757  __ jmp(&done, Label::kNear);
2758
2759  __ bind(&runtime);
2760  CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2761
2762  __ bind(&done);
2763  context()->Plug(eax);
2764}
2765
2766
2767void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2768  // Push function.
2769  __ LoadGlobalFunction(expr->context_index(), eax);
2770  PushOperand(eax);
2771
2772  // Push undefined as receiver.
2773  PushOperand(isolate()->factory()->undefined_value());
2774}
2775
2776
2777void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2778  ZoneList<Expression*>* args = expr->arguments();
2779  int arg_count = args->length();
2780
2781  SetCallPosition(expr);
2782  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2783  __ Set(eax, arg_count);
2784  __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2785          RelocInfo::CODE_TARGET);
2786  OperandStackDepthDecrement(arg_count + 1);
2787  RestoreContext();
2788}
2789
2790
2791void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2792  switch (expr->op()) {
2793    case Token::DELETE: {
2794      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2795      Property* property = expr->expression()->AsProperty();
2796      VariableProxy* proxy = expr->expression()->AsVariableProxy();
2797
2798      if (property != NULL) {
2799        VisitForStackValue(property->obj());
2800        VisitForStackValue(property->key());
2801        CallRuntimeWithOperands(is_strict(language_mode())
2802                                    ? Runtime::kDeleteProperty_Strict
2803                                    : Runtime::kDeleteProperty_Sloppy);
2804        context()->Plug(eax);
2805      } else if (proxy != NULL) {
2806        Variable* var = proxy->var();
2807        // Delete of an unqualified identifier is disallowed in strict mode but
2808        // "delete this" is allowed.
2809        bool is_this = var->is_this();
2810        DCHECK(is_sloppy(language_mode()) || is_this);
2811        if (var->IsUnallocated()) {
2812          __ mov(eax, NativeContextOperand());
2813          __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
2814          __ push(Immediate(var->name()));
2815          __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2816          context()->Plug(eax);
2817        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2818          // Result of deleting non-global variables is false.  'this' is
2819          // not really a variable, though we implement it as one.  The
2820          // subexpression does not have side effects.
2821          context()->Plug(is_this);
2822        } else {
2823          // Non-global variable.  Call the runtime to try to delete from the
2824          // context where the variable was introduced.
2825          __ Push(var->name());
2826          __ CallRuntime(Runtime::kDeleteLookupSlot);
2827          context()->Plug(eax);
2828        }
2829      } else {
2830        // Result of deleting non-property, non-variable reference is true.
2831        // The subexpression may have side effects.
2832        VisitForEffect(expr->expression());
2833        context()->Plug(true);
2834      }
2835      break;
2836    }
2837
2838    case Token::VOID: {
2839      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2840      VisitForEffect(expr->expression());
2841      context()->Plug(isolate()->factory()->undefined_value());
2842      break;
2843    }
2844
2845    case Token::NOT: {
2846      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2847      if (context()->IsEffect()) {
2848        // Unary NOT has no side effects so it's only necessary to visit the
2849        // subexpression.  Match the optimizing compiler by not branching.
2850        VisitForEffect(expr->expression());
2851      } else if (context()->IsTest()) {
2852        const TestContext* test = TestContext::cast(context());
2853        // The labels are swapped for the recursive call.
2854        VisitForControl(expr->expression(),
2855                        test->false_label(),
2856                        test->true_label(),
2857                        test->fall_through());
2858        context()->Plug(test->true_label(), test->false_label());
2859      } else {
2860        // We handle value contexts explicitly rather than simply visiting
2861        // for control and plugging the control flow into the context,
2862        // because we need to prepare a pair of extra administrative AST ids
2863        // for the optimizing compiler.
2864        DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2865        Label materialize_true, materialize_false, done;
2866        VisitForControl(expr->expression(),
2867                        &materialize_false,
2868                        &materialize_true,
2869                        &materialize_true);
2870        if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2871        __ bind(&materialize_true);
2872        PrepareForBailoutForId(expr->MaterializeTrueId(),
2873                               BailoutState::NO_REGISTERS);
2874        if (context()->IsAccumulatorValue()) {
2875          __ mov(eax, isolate()->factory()->true_value());
2876        } else {
2877          __ Push(isolate()->factory()->true_value());
2878        }
2879        __ jmp(&done, Label::kNear);
2880        __ bind(&materialize_false);
2881        PrepareForBailoutForId(expr->MaterializeFalseId(),
2882                               BailoutState::NO_REGISTERS);
2883        if (context()->IsAccumulatorValue()) {
2884          __ mov(eax, isolate()->factory()->false_value());
2885        } else {
2886          __ Push(isolate()->factory()->false_value());
2887        }
2888        __ bind(&done);
2889      }
2890      break;
2891    }
2892
2893    case Token::TYPEOF: {
2894      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2895      {
2896        AccumulatorValueContext context(this);
2897        VisitForTypeofValue(expr->expression());
2898      }
2899      __ mov(ebx, eax);
2900      __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
2901      context()->Plug(eax);
2902      break;
2903    }
2904
2905    default:
2906      UNREACHABLE();
2907  }
2908}
2909
2910
2911void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2912  DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
2913
2914  Comment cmnt(masm_, "[ CountOperation");
2915
2916  Property* prop = expr->expression()->AsProperty();
2917  LhsKind assign_type = Property::GetAssignType(prop);
2918
2919  // Evaluate expression and get value.
2920  if (assign_type == VARIABLE) {
2921    DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
2922    AccumulatorValueContext context(this);
2923    EmitVariableLoad(expr->expression()->AsVariableProxy());
2924  } else {
2925    // Reserve space for result of postfix operation.
2926    if (expr->is_postfix() && !context()->IsEffect()) {
2927      PushOperand(Smi::kZero);
2928    }
2929    switch (assign_type) {
2930      case NAMED_PROPERTY: {
2931        // Put the object both on the stack and in the register.
2932        VisitForStackValue(prop->obj());
2933        __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2934        EmitNamedPropertyLoad(prop);
2935        break;
2936      }
2937
2938      case NAMED_SUPER_PROPERTY: {
2939        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2940        VisitForAccumulatorValue(
2941            prop->obj()->AsSuperPropertyReference()->home_object());
2942        PushOperand(result_register());
2943        PushOperand(MemOperand(esp, kPointerSize));
2944        PushOperand(result_register());
2945        EmitNamedSuperPropertyLoad(prop);
2946        break;
2947      }
2948
2949      case KEYED_SUPER_PROPERTY: {
2950        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2951        VisitForStackValue(
2952            prop->obj()->AsSuperPropertyReference()->home_object());
2953        VisitForAccumulatorValue(prop->key());
2954        PushOperand(result_register());
2955        PushOperand(MemOperand(esp, 2 * kPointerSize));
2956        PushOperand(MemOperand(esp, 2 * kPointerSize));
2957        PushOperand(result_register());
2958        EmitKeyedSuperPropertyLoad(prop);
2959        break;
2960      }
2961
2962      case KEYED_PROPERTY: {
2963        VisitForStackValue(prop->obj());
2964        VisitForStackValue(prop->key());
2965        __ mov(LoadDescriptor::ReceiverRegister(),
2966               Operand(esp, kPointerSize));                       // Object.
2967        __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));  // Key.
2968        EmitKeyedPropertyLoad(prop);
2969        break;
2970      }
2971
2972      case VARIABLE:
2973        UNREACHABLE();
2974    }
2975  }
2976
2977  // We need a second deoptimization point after loading the value
2978  // in case evaluating the property load my have a side effect.
2979  if (assign_type == VARIABLE) {
2980    PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
2981  } else {
2982    PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2983  }
2984
2985  // Inline smi case if we are in a loop.
2986  Label done, stub_call;
2987  JumpPatchSite patch_site(masm_);
2988  if (ShouldInlineSmiCase(expr->op())) {
2989    Label slow;
2990    patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
2991
2992    // Save result for postfix expressions.
2993    if (expr->is_postfix()) {
2994      if (!context()->IsEffect()) {
2995        // Save the result on the stack. If we have a named or keyed property
2996        // we store the result under the receiver that is currently on top
2997        // of the stack.
2998        switch (assign_type) {
2999          case VARIABLE:
3000            __ push(eax);
3001            break;
3002          case NAMED_PROPERTY:
3003            __ mov(Operand(esp, kPointerSize), eax);
3004            break;
3005          case NAMED_SUPER_PROPERTY:
3006            __ mov(Operand(esp, 2 * kPointerSize), eax);
3007            break;
3008          case KEYED_PROPERTY:
3009            __ mov(Operand(esp, 2 * kPointerSize), eax);
3010            break;
3011          case KEYED_SUPER_PROPERTY:
3012            __ mov(Operand(esp, 3 * kPointerSize), eax);
3013            break;
3014        }
3015      }
3016    }
3017
3018    if (expr->op() == Token::INC) {
3019      __ add(eax, Immediate(Smi::FromInt(1)));
3020    } else {
3021      __ sub(eax, Immediate(Smi::FromInt(1)));
3022    }
3023    __ j(no_overflow, &done, Label::kNear);
3024    // Call stub. Undo operation first.
3025    if (expr->op() == Token::INC) {
3026      __ sub(eax, Immediate(Smi::FromInt(1)));
3027    } else {
3028      __ add(eax, Immediate(Smi::FromInt(1)));
3029    }
3030    __ jmp(&stub_call, Label::kNear);
3031    __ bind(&slow);
3032  }
3033
3034  // Convert old value into a number.
3035  __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3036  RestoreContext();
3037  PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3038
3039  // Save result for postfix expressions.
3040  if (expr->is_postfix()) {
3041    if (!context()->IsEffect()) {
3042      // Save the result on the stack. If we have a named or keyed property
3043      // we store the result under the receiver that is currently on top
3044      // of the stack.
3045      switch (assign_type) {
3046        case VARIABLE:
3047          PushOperand(eax);
3048          break;
3049        case NAMED_PROPERTY:
3050          __ mov(Operand(esp, kPointerSize), eax);
3051          break;
3052        case NAMED_SUPER_PROPERTY:
3053          __ mov(Operand(esp, 2 * kPointerSize), eax);
3054          break;
3055        case KEYED_PROPERTY:
3056          __ mov(Operand(esp, 2 * kPointerSize), eax);
3057          break;
3058        case KEYED_SUPER_PROPERTY:
3059          __ mov(Operand(esp, 3 * kPointerSize), eax);
3060          break;
3061      }
3062    }
3063  }
3064
3065  SetExpressionPosition(expr);
3066
3067  // Call stub for +1/-1.
3068  __ bind(&stub_call);
3069  __ mov(edx, eax);
3070  __ mov(eax, Immediate(Smi::FromInt(1)));
3071  Handle<Code> code =
3072      CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
3073  CallIC(code, expr->CountBinOpFeedbackId());
3074  patch_site.EmitPatchInfo();
3075  __ bind(&done);
3076
3077  // Store the value returned in eax.
3078  switch (assign_type) {
3079    case VARIABLE: {
3080      VariableProxy* proxy = expr->expression()->AsVariableProxy();
3081      if (expr->is_postfix()) {
3082        // Perform the assignment as if via '='.
3083        { EffectContext context(this);
3084          EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3085                                 proxy->hole_check_mode());
3086          PrepareForBailoutForId(expr->AssignmentId(),
3087                                 BailoutState::TOS_REGISTER);
3088          context.Plug(eax);
3089        }
3090        // For all contexts except EffectContext We have the result on
3091        // top of the stack.
3092        if (!context()->IsEffect()) {
3093          context()->PlugTOS();
3094        }
3095      } else {
3096        // Perform the assignment as if via '='.
3097        EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3098                               proxy->hole_check_mode());
3099        PrepareForBailoutForId(expr->AssignmentId(),
3100                               BailoutState::TOS_REGISTER);
3101        context()->Plug(eax);
3102      }
3103      break;
3104    }
3105    case NAMED_PROPERTY: {
3106      PopOperand(StoreDescriptor::ReceiverRegister());
3107      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
3108      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3109      if (expr->is_postfix()) {
3110        if (!context()->IsEffect()) {
3111          context()->PlugTOS();
3112        }
3113      } else {
3114        context()->Plug(eax);
3115      }
3116      break;
3117    }
3118    case NAMED_SUPER_PROPERTY: {
3119      EmitNamedSuperPropertyStore(prop);
3120      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3121      if (expr->is_postfix()) {
3122        if (!context()->IsEffect()) {
3123          context()->PlugTOS();
3124        }
3125      } else {
3126        context()->Plug(eax);
3127      }
3128      break;
3129    }
3130    case KEYED_SUPER_PROPERTY: {
3131      EmitKeyedSuperPropertyStore(prop);
3132      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3133      if (expr->is_postfix()) {
3134        if (!context()->IsEffect()) {
3135          context()->PlugTOS();
3136        }
3137      } else {
3138        context()->Plug(eax);
3139      }
3140      break;
3141    }
3142    case KEYED_PROPERTY: {
3143      PopOperand(StoreDescriptor::NameRegister());
3144      PopOperand(StoreDescriptor::ReceiverRegister());
3145      CallKeyedStoreIC(expr->CountSlot());
3146      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3147      if (expr->is_postfix()) {
3148        // Result is on the stack
3149        if (!context()->IsEffect()) {
3150          context()->PlugTOS();
3151        }
3152      } else {
3153        context()->Plug(eax);
3154      }
3155      break;
3156    }
3157  }
3158}
3159
3160
3161void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3162                                                 Expression* sub_expr,
3163                                                 Handle<String> check) {
3164  Label materialize_true, materialize_false;
3165  Label* if_true = NULL;
3166  Label* if_false = NULL;
3167  Label* fall_through = NULL;
3168  context()->PrepareTest(&materialize_true, &materialize_false,
3169                         &if_true, &if_false, &fall_through);
3170
3171  { AccumulatorValueContext context(this);
3172    VisitForTypeofValue(sub_expr);
3173  }
3174  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3175
3176  Factory* factory = isolate()->factory();
3177  if (String::Equals(check, factory->number_string())) {
3178    __ JumpIfSmi(eax, if_true);
3179    __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3180           isolate()->factory()->heap_number_map());
3181    Split(equal, if_true, if_false, fall_through);
3182  } else if (String::Equals(check, factory->string_string())) {
3183    __ JumpIfSmi(eax, if_false);
3184    __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
3185    Split(below, if_true, if_false, fall_through);
3186  } else if (String::Equals(check, factory->symbol_string())) {
3187    __ JumpIfSmi(eax, if_false);
3188    __ CmpObjectType(eax, SYMBOL_TYPE, edx);
3189    Split(equal, if_true, if_false, fall_through);
3190  } else if (String::Equals(check, factory->boolean_string())) {
3191    __ cmp(eax, isolate()->factory()->true_value());
3192    __ j(equal, if_true);
3193    __ cmp(eax, isolate()->factory()->false_value());
3194    Split(equal, if_true, if_false, fall_through);
3195  } else if (String::Equals(check, factory->undefined_string())) {
3196    __ cmp(eax, isolate()->factory()->null_value());
3197    __ j(equal, if_false);
3198    __ JumpIfSmi(eax, if_false);
3199    // Check for undetectable objects => true.
3200    __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3201    __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3202              Immediate(1 << Map::kIsUndetectable));
3203    Split(not_zero, if_true, if_false, fall_through);
3204  } else if (String::Equals(check, factory->function_string())) {
3205    __ JumpIfSmi(eax, if_false);
3206    // Check for callable and not undetectable objects => true.
3207    __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3208    __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3209    __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3210    __ cmp(ecx, 1 << Map::kIsCallable);
3211    Split(equal, if_true, if_false, fall_through);
3212  } else if (String::Equals(check, factory->object_string())) {
3213    __ JumpIfSmi(eax, if_false);
3214    __ cmp(eax, isolate()->factory()->null_value());
3215    __ j(equal, if_true);
3216    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3217    __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
3218    __ j(below, if_false);
3219    // Check for callable or undetectable objects => false.
3220    __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3221              Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3222    Split(zero, if_true, if_false, fall_through);
3223// clang-format off
3224#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
3225  } else if (String::Equals(check, factory->type##_string())) { \
3226    __ JumpIfSmi(eax, if_false);                                \
3227    __ cmp(FieldOperand(eax, HeapObject::kMapOffset),           \
3228           isolate()->factory()->type##_map());                 \
3229    Split(equal, if_true, if_false, fall_through);
3230  SIMD128_TYPES(SIMD128_TYPE)
3231#undef SIMD128_TYPE
3232    // clang-format on
3233  } else {
3234    if (if_false != fall_through) __ jmp(if_false);
3235  }
3236  context()->Plug(if_true, if_false);
3237}
3238
3239
3240void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3241  Comment cmnt(masm_, "[ CompareOperation");
3242
3243  // First we try a fast inlined version of the compare when one of
3244  // the operands is a literal.
3245  if (TryLiteralCompare(expr)) return;
3246
3247  // Always perform the comparison for its control flow.  Pack the result
3248  // into the expression's context after the comparison is performed.
3249  Label materialize_true, materialize_false;
3250  Label* if_true = NULL;
3251  Label* if_false = NULL;
3252  Label* fall_through = NULL;
3253  context()->PrepareTest(&materialize_true, &materialize_false,
3254                         &if_true, &if_false, &fall_through);
3255
3256  Token::Value op = expr->op();
3257  VisitForStackValue(expr->left());
3258  switch (op) {
3259    case Token::IN:
3260      VisitForStackValue(expr->right());
3261      SetExpressionPosition(expr);
3262      EmitHasProperty();
3263      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3264      __ cmp(eax, isolate()->factory()->true_value());
3265      Split(equal, if_true, if_false, fall_through);
3266      break;
3267
3268    case Token::INSTANCEOF: {
3269      VisitForAccumulatorValue(expr->right());
3270      SetExpressionPosition(expr);
3271      PopOperand(edx);
3272      __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
3273      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3274      __ cmp(eax, isolate()->factory()->true_value());
3275      Split(equal, if_true, if_false, fall_through);
3276      break;
3277    }
3278
3279    default: {
3280      VisitForAccumulatorValue(expr->right());
3281      SetExpressionPosition(expr);
3282      Condition cc = CompareIC::ComputeCondition(op);
3283      PopOperand(edx);
3284
3285      bool inline_smi_code = ShouldInlineSmiCase(op);
3286      JumpPatchSite patch_site(masm_);
3287      if (inline_smi_code) {
3288        Label slow_case;
3289        __ mov(ecx, edx);
3290        __ or_(ecx, eax);
3291        patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
3292        __ cmp(edx, eax);
3293        Split(cc, if_true, if_false, NULL);
3294        __ bind(&slow_case);
3295      }
3296
3297      Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3298      CallIC(ic, expr->CompareOperationFeedbackId());
3299      patch_site.EmitPatchInfo();
3300
3301      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3302      __ test(eax, eax);
3303      Split(cc, if_true, if_false, fall_through);
3304    }
3305  }
3306
3307  // Convert the result of the comparison into one expected for this
3308  // expression's context.
3309  context()->Plug(if_true, if_false);
3310}
3311
3312
3313void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3314                                              Expression* sub_expr,
3315                                              NilValue nil) {
3316  Label materialize_true, materialize_false;
3317  Label* if_true = NULL;
3318  Label* if_false = NULL;
3319  Label* fall_through = NULL;
3320  context()->PrepareTest(&materialize_true, &materialize_false,
3321                         &if_true, &if_false, &fall_through);
3322
3323  VisitForAccumulatorValue(sub_expr);
3324  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3325
3326  Handle<Object> nil_value = nil == kNullValue
3327      ? isolate()->factory()->null_value()
3328      : isolate()->factory()->undefined_value();
3329  if (expr->op() == Token::EQ_STRICT) {
3330    __ cmp(eax, nil_value);
3331    Split(equal, if_true, if_false, fall_through);
3332  } else {
3333    __ JumpIfSmi(eax, if_false);
3334    __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3335    __ test_b(FieldOperand(eax, Map::kBitFieldOffset),
3336              Immediate(1 << Map::kIsUndetectable));
3337    Split(not_zero, if_true, if_false, fall_through);
3338  }
3339  context()->Plug(if_true, if_false);
3340}
3341
3342
3343Register FullCodeGenerator::result_register() {
3344  return eax;
3345}
3346
3347
3348Register FullCodeGenerator::context_register() {
3349  return esi;
3350}
3351
3352void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3353  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3354  __ mov(value, Operand(ebp, frame_offset));
3355}
3356
3357void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3358  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3359  __ mov(Operand(ebp, frame_offset), value);
3360}
3361
3362
3363void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3364  __ mov(dst, ContextOperand(esi, context_index));
3365}
3366
3367
3368void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3369  DeclarationScope* closure_scope = scope()->GetClosureScope();
3370  if (closure_scope->is_script_scope() ||
3371      closure_scope->is_module_scope()) {
3372    // Contexts nested in the native context have a canonical empty function
3373    // as their closure, not the anonymous closure containing the global
3374    // code.
3375    __ mov(eax, NativeContextOperand());
3376    PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
3377  } else if (closure_scope->is_eval_scope()) {
3378    // Contexts nested inside eval code have the same closure as the context
3379    // calling eval, not the anonymous closure containing the eval code.
3380    // Fetch it from the context.
3381    PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
3382  } else {
3383    DCHECK(closure_scope->is_function_scope());
3384    PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3385  }
3386}
3387
3388
3389// ----------------------------------------------------------------------------
3390// Non-local control flow support.
3391
3392void FullCodeGenerator::EnterFinallyBlock() {
3393  // Store pending message while executing finally block.
3394  ExternalReference pending_message_obj =
3395      ExternalReference::address_of_pending_message_obj(isolate());
3396  __ mov(edx, Operand::StaticVariable(pending_message_obj));
3397  PushOperand(edx);
3398
3399  ClearPendingMessage();
3400}
3401
3402
3403void FullCodeGenerator::ExitFinallyBlock() {
3404  DCHECK(!result_register().is(edx));
3405  // Restore pending message from stack.
3406  PopOperand(edx);
3407  ExternalReference pending_message_obj =
3408      ExternalReference::address_of_pending_message_obj(isolate());
3409  __ mov(Operand::StaticVariable(pending_message_obj), edx);
3410}
3411
3412
3413void FullCodeGenerator::ClearPendingMessage() {
3414  DCHECK(!result_register().is(edx));
3415  ExternalReference pending_message_obj =
3416      ExternalReference::address_of_pending_message_obj(isolate());
3417  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
3418  __ mov(Operand::StaticVariable(pending_message_obj), edx);
3419}
3420
3421
3422void FullCodeGenerator::DeferredCommands::EmitCommands() {
3423  DCHECK(!result_register().is(edx));
3424  __ Pop(result_register());  // Restore the accumulator.
3425  __ Pop(edx);                // Get the token.
3426  for (DeferredCommand cmd : commands_) {
3427    Label skip;
3428    __ cmp(edx, Immediate(Smi::FromInt(cmd.token)));
3429    __ j(not_equal, &skip);
3430    switch (cmd.command) {
3431      case kReturn:
3432        codegen_->EmitUnwindAndReturn();
3433        break;
3434      case kThrow:
3435        __ Push(result_register());
3436        __ CallRuntime(Runtime::kReThrow);
3437        break;
3438      case kContinue:
3439        codegen_->EmitContinue(cmd.target);
3440        break;
3441      case kBreak:
3442        codegen_->EmitBreak(cmd.target);
3443        break;
3444    }
3445    __ bind(&skip);
3446  }
3447}
3448
3449#undef __
3450
3451
3452static const byte kJnsInstruction = 0x79;
3453static const byte kJnsOffset = 0x11;
3454static const byte kNopByteOne = 0x66;
3455static const byte kNopByteTwo = 0x90;
3456#ifdef DEBUG
3457static const byte kCallInstruction = 0xe8;
3458#endif
3459
3460
3461void BackEdgeTable::PatchAt(Code* unoptimized_code,
3462                            Address pc,
3463                            BackEdgeState target_state,
3464                            Code* replacement_code) {
3465  Address call_target_address = pc - kIntSize;
3466  Address jns_instr_address = call_target_address - 3;
3467  Address jns_offset_address = call_target_address - 2;
3468
3469  switch (target_state) {
3470    case INTERRUPT:
3471      //     sub <profiling_counter>, <delta>  ;; Not changed
3472      //     jns ok
3473      //     call <interrupt stub>
3474      //   ok:
3475      *jns_instr_address = kJnsInstruction;
3476      *jns_offset_address = kJnsOffset;
3477      break;
3478    case ON_STACK_REPLACEMENT:
3479      //     sub <profiling_counter>, <delta>  ;; Not changed
3480      //     nop
3481      //     nop
3482      //     call <on-stack replacment>
3483      //   ok:
3484      *jns_instr_address = kNopByteOne;
3485      *jns_offset_address = kNopByteTwo;
3486      break;
3487  }
3488
3489  Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
3490                                   call_target_address, unoptimized_code,
3491                                   replacement_code->entry());
3492  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3493      unoptimized_code, call_target_address, replacement_code);
3494}
3495
3496
3497BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3498    Isolate* isolate,
3499    Code* unoptimized_code,
3500    Address pc) {
3501  Address call_target_address = pc - kIntSize;
3502  Address jns_instr_address = call_target_address - 3;
3503  DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
3504
3505  if (*jns_instr_address == kJnsInstruction) {
3506    DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
3507    DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
3508              Assembler::target_address_at(call_target_address,
3509                                           unoptimized_code));
3510    return INTERRUPT;
3511  }
3512
3513  DCHECK_EQ(kNopByteOne, *jns_instr_address);
3514  DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
3515
3516  DCHECK_EQ(
3517      isolate->builtins()->OnStackReplacement()->entry(),
3518      Assembler::target_address_at(call_target_address, unoptimized_code));
3519  return ON_STACK_REPLACEMENT;
3520}
3521
3522
3523}  // namespace internal
3524}  // namespace v8
3525
3526#endif  // V8_TARGET_ARCH_X87
3527