1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_IA32
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ia32/frames-ia32.h"
14#include "src/ic/ic.h"
15#include "src/parsing/parser.h"
16
17namespace v8 {
18namespace internal {
19
20#define __ ACCESS_MASM(masm_)
21
22
23class JumpPatchSite BASE_EMBEDDED {
24 public:
25  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
26#ifdef DEBUG
27    info_emitted_ = false;
28#endif
29  }
30
31  ~JumpPatchSite() {
32    DCHECK(patch_site_.is_bound() == info_emitted_);
33  }
34
35  void EmitJumpIfNotSmi(Register reg,
36                        Label* target,
37                        Label::Distance distance = Label::kFar) {
38    __ test(reg, Immediate(kSmiTagMask));
39    EmitJump(not_carry, target, distance);  // Always taken before patched.
40  }
41
42  void EmitJumpIfSmi(Register reg,
43                     Label* target,
44                     Label::Distance distance = Label::kFar) {
45    __ test(reg, Immediate(kSmiTagMask));
46    EmitJump(carry, target, distance);  // Never taken before patched.
47  }
48
49  void EmitPatchInfo() {
50    if (patch_site_.is_bound()) {
51      int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
52      DCHECK(is_uint8(delta_to_patch_site));
53      __ test(eax, Immediate(delta_to_patch_site));
54#ifdef DEBUG
55      info_emitted_ = true;
56#endif
57    } else {
58      __ nop();  // Signals no inlined code.
59    }
60  }
61
62 private:
63  // jc will be patched with jz, jnc will become jnz.
64  void EmitJump(Condition cc, Label* target, Label::Distance distance) {
65    DCHECK(!patch_site_.is_bound() && !info_emitted_);
66    DCHECK(cc == carry || cc == not_carry);
67    __ bind(&patch_site_);
68    __ j(cc, target, distance);
69  }
70
71  MacroAssembler* masm_;
72  Label patch_site_;
73#ifdef DEBUG
74  bool info_emitted_;
75#endif
76};
77
78
79// Generate code for a JS function.  On entry to the function the receiver
80// and arguments have been pushed on the stack left to right, with the
81// return address on top of them.  The actual argument count matches the
82// formal parameter count expected by the function.
83//
84// The live registers are:
85//   o edi: the JS function object being called (i.e. ourselves)
86//   o edx: the new target value
87//   o esi: our context
88//   o ebp: our caller's frame pointer
89//   o esp: stack pointer (pointing to return address)
90//
91// The function builds a JS frame.  Please see JavaScriptFrameConstants in
92// frames-ia32.h for its layout.
93void FullCodeGenerator::Generate() {
94  CompilationInfo* info = info_;
95  profiling_counter_ = isolate()->factory()->NewCell(
96      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97  SetFunctionPosition(literal());
98  Comment cmnt(masm_, "[ function compiled by full code generator");
99
100  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
101
102#ifdef DEBUG
103  if (strlen(FLAG_stop_at) > 0 &&
104      literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
105    __ int3();
106  }
107#endif
108
109  if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
110    int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
111    __ mov(ecx, Operand(esp, receiver_offset));
112    __ AssertNotSmi(ecx);
113    __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
114    __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
115  }
116
117  // Open a frame scope to indicate that there is a frame on the stack.  The
118  // MANUAL indicates that the scope shouldn't actually generate code to set up
119  // the frame (that is done below).
120  FrameScope frame_scope(masm_, StackFrame::MANUAL);
121
122  info->set_prologue_offset(masm_->pc_offset());
123  __ Prologue(info->GeneratePreagedPrologue());
124
125  { Comment cmnt(masm_, "[ Allocate locals");
126    int locals_count = info->scope()->num_stack_slots();
127    // Generators allocate locals, if any, in context slots.
128    DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
129    if (locals_count == 1) {
130      __ push(Immediate(isolate()->factory()->undefined_value()));
131    } else if (locals_count > 1) {
132      if (locals_count >= 128) {
133        Label ok;
134        __ mov(ecx, esp);
135        __ sub(ecx, Immediate(locals_count * kPointerSize));
136        ExternalReference stack_limit =
137            ExternalReference::address_of_real_stack_limit(isolate());
138        __ cmp(ecx, Operand::StaticVariable(stack_limit));
139        __ j(above_equal, &ok, Label::kNear);
140        __ CallRuntime(Runtime::kThrowStackOverflow);
141        __ bind(&ok);
142      }
143      __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
144      const int kMaxPushes = 32;
145      if (locals_count >= kMaxPushes) {
146        int loop_iterations = locals_count / kMaxPushes;
147        __ mov(ecx, loop_iterations);
148        Label loop_header;
149        __ bind(&loop_header);
150        // Do pushes.
151        for (int i = 0; i < kMaxPushes; i++) {
152          __ push(eax);
153        }
154        __ dec(ecx);
155        __ j(not_zero, &loop_header, Label::kNear);
156      }
157      int remaining = locals_count % kMaxPushes;
158      // Emit the remaining pushes.
159      for (int i  = 0; i < remaining; i++) {
160        __ push(eax);
161      }
162    }
163  }
164
165  bool function_in_register = true;
166
167  // Possibly allocate a local context.
168  if (info->scope()->num_heap_slots() > 0) {
169    Comment cmnt(masm_, "[ Allocate context");
170    bool need_write_barrier = true;
171    int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
172    // Argument to NewContext is the function, which is still in edi.
173    if (info->scope()->is_script_scope()) {
174      __ push(edi);
175      __ Push(info->scope()->GetScopeInfo(info->isolate()));
176      __ CallRuntime(Runtime::kNewScriptContext);
177      PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
178      // The new target value is not used, clobbering is safe.
179      DCHECK_NULL(info->scope()->new_target_var());
180    } else {
181      if (info->scope()->new_target_var() != nullptr) {
182        __ push(edx);  // Preserve new target.
183      }
184      if (slots <= FastNewContextStub::kMaximumSlots) {
185        FastNewContextStub stub(isolate(), slots);
186        __ CallStub(&stub);
187        // Result of FastNewContextStub is always in new space.
188        need_write_barrier = false;
189      } else {
190        __ push(edi);
191        __ CallRuntime(Runtime::kNewFunctionContext);
192      }
193      if (info->scope()->new_target_var() != nullptr) {
194        __ pop(edx);  // Restore new target.
195      }
196    }
197    function_in_register = false;
198    // Context is returned in eax.  It replaces the context passed to us.
199    // It's saved in the stack and kept live in esi.
200    __ mov(esi, eax);
201    __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
202
203    // Copy parameters into context if necessary.
204    int num_parameters = info->scope()->num_parameters();
205    int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
206    for (int i = first_parameter; i < num_parameters; i++) {
207      Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
208      if (var->IsContextSlot()) {
209        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
210            (num_parameters - 1 - i) * kPointerSize;
211        // Load parameter from stack.
212        __ mov(eax, Operand(ebp, parameter_offset));
213        // Store it in the context.
214        int context_offset = Context::SlotOffset(var->index());
215        __ mov(Operand(esi, context_offset), eax);
216        // Update the write barrier. This clobbers eax and ebx.
217        if (need_write_barrier) {
218          __ RecordWriteContextSlot(esi,
219                                    context_offset,
220                                    eax,
221                                    ebx,
222                                    kDontSaveFPRegs);
223        } else if (FLAG_debug_code) {
224          Label done;
225          __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
226          __ Abort(kExpectedNewSpaceObject);
227          __ bind(&done);
228        }
229      }
230    }
231  }
232
233  // Register holding this function and new target are both trashed in case we
234  // bailout here. But since that can happen only when new target is not used
235  // and we allocate a context, the value of |function_in_register| is correct.
236  PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
237
238  // Possibly set up a local binding to the this function which is used in
239  // derived constructors with super calls.
240  Variable* this_function_var = scope()->this_function_var();
241  if (this_function_var != nullptr) {
242    Comment cmnt(masm_, "[ This function");
243    if (!function_in_register) {
244      __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
245      // The write barrier clobbers register again, keep it marked as such.
246    }
247    SetVar(this_function_var, edi, ebx, ecx);
248  }
249
250  // Possibly set up a local binding to the new target value.
251  Variable* new_target_var = scope()->new_target_var();
252  if (new_target_var != nullptr) {
253    Comment cmnt(masm_, "[ new.target");
254    SetVar(new_target_var, edx, ebx, ecx);
255  }
256
257  // Possibly allocate RestParameters
258  int rest_index;
259  Variable* rest_param = scope()->rest_parameter(&rest_index);
260  if (rest_param) {
261    Comment cmnt(masm_, "[ Allocate rest parameter array");
262
263    int num_parameters = info->scope()->num_parameters();
264    int offset = num_parameters * kPointerSize;
265
266    __ mov(RestParamAccessDescriptor::parameter_count(),
267           Immediate(Smi::FromInt(num_parameters)));
268    __ lea(RestParamAccessDescriptor::parameter_pointer(),
269           Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
270    __ mov(RestParamAccessDescriptor::rest_parameter_index(),
271           Immediate(Smi::FromInt(rest_index)));
272    function_in_register = false;
273
274    RestParamAccessStub stub(isolate());
275    __ CallStub(&stub);
276    SetVar(rest_param, eax, ebx, edx);
277  }
278
279  Variable* arguments = scope()->arguments();
280  if (arguments != NULL) {
281    // Function uses arguments object.
282    Comment cmnt(masm_, "[ Allocate arguments object");
283    DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
284    if (!function_in_register) {
285      __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
286    }
287    // Receiver is just before the parameters on the caller's stack.
288    int num_parameters = info->scope()->num_parameters();
289    int offset = num_parameters * kPointerSize;
290    __ mov(ArgumentsAccessNewDescriptor::parameter_count(),
291           Immediate(Smi::FromInt(num_parameters)));
292    __ lea(ArgumentsAccessNewDescriptor::parameter_pointer(),
293           Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
294
295    // Arguments to ArgumentsAccessStub:
296    //   function, parameter pointer, parameter count.
297    // The stub will rewrite parameter pointer and parameter count if the
298    // previous stack frame was an arguments adapter frame.
299    bool is_unmapped = is_strict(language_mode()) || !has_simple_parameters();
300    ArgumentsAccessStub::Type type = ArgumentsAccessStub::ComputeType(
301        is_unmapped, literal()->has_duplicate_parameters());
302    ArgumentsAccessStub stub(isolate(), type);
303    __ CallStub(&stub);
304
305    SetVar(arguments, eax, ebx, edx);
306  }
307
308  if (FLAG_trace) {
309    __ CallRuntime(Runtime::kTraceEnter);
310  }
311
312  // Visit the declarations and body unless there is an illegal
313  // redeclaration.
314  if (scope()->HasIllegalRedeclaration()) {
315    Comment cmnt(masm_, "[ Declarations");
316    VisitForEffect(scope()->GetIllegalRedeclaration());
317
318  } else {
319    PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
320    { Comment cmnt(masm_, "[ Declarations");
321      VisitDeclarations(scope()->declarations());
322    }
323
324    // Assert that the declarations do not use ICs. Otherwise the debugger
325    // won't be able to redirect a PC at an IC to the correct IC in newly
326    // recompiled code.
327    DCHECK_EQ(0, ic_total_count_);
328
329    { Comment cmnt(masm_, "[ Stack check");
330      PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
331      Label ok;
332      ExternalReference stack_limit
333          = ExternalReference::address_of_stack_limit(isolate());
334      __ cmp(esp, Operand::StaticVariable(stack_limit));
335      __ j(above_equal, &ok, Label::kNear);
336      __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
337      __ bind(&ok);
338    }
339
340    { Comment cmnt(masm_, "[ Body");
341      DCHECK(loop_depth() == 0);
342      VisitStatements(literal()->body());
343      DCHECK(loop_depth() == 0);
344    }
345  }
346
347  // Always emit a 'return undefined' in case control fell off the end of
348  // the body.
349  { Comment cmnt(masm_, "[ return <undefined>;");
350    __ mov(eax, isolate()->factory()->undefined_value());
351    EmitReturnSequence();
352  }
353}
354
355
356void FullCodeGenerator::ClearAccumulator() {
357  __ Move(eax, Immediate(Smi::FromInt(0)));
358}
359
360
361void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
362  __ mov(ebx, Immediate(profiling_counter_));
363  __ sub(FieldOperand(ebx, Cell::kValueOffset),
364         Immediate(Smi::FromInt(delta)));
365}
366
367
368void FullCodeGenerator::EmitProfilingCounterReset() {
369  int reset_value = FLAG_interrupt_budget;
370  __ mov(ebx, Immediate(profiling_counter_));
371  __ mov(FieldOperand(ebx, Cell::kValueOffset),
372         Immediate(Smi::FromInt(reset_value)));
373}
374
375
376void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
377                                                Label* back_edge_target) {
378  Comment cmnt(masm_, "[ Back edge bookkeeping");
379  Label ok;
380
381  DCHECK(back_edge_target->is_bound());
382  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
383  int weight = Min(kMaxBackEdgeWeight,
384                   Max(1, distance / kCodeSizeMultiplier));
385  EmitProfilingCounterDecrement(weight);
386  __ j(positive, &ok, Label::kNear);
387  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
388
389  // Record a mapping of this PC offset to the OSR id.  This is used to find
390  // the AST id from the unoptimized code in order to use it as a key into
391  // the deoptimization input data found in the optimized code.
392  RecordBackEdge(stmt->OsrEntryId());
393
394  EmitProfilingCounterReset();
395
396  __ bind(&ok);
397  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
398  // Record a mapping of the OSR id to this PC.  This is used if the OSR
399  // entry becomes the target of a bailout.  We don't expect it to be, but
400  // we want it to work if it is.
401  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
402}
403
404
405void FullCodeGenerator::EmitReturnSequence() {
406  Comment cmnt(masm_, "[ Return sequence");
407  if (return_label_.is_bound()) {
408    __ jmp(&return_label_);
409  } else {
410    // Common return label
411    __ bind(&return_label_);
412    if (FLAG_trace) {
413      __ push(eax);
414      __ CallRuntime(Runtime::kTraceExit);
415    }
416    // Pretend that the exit is a backwards jump to the entry.
417    int weight = 1;
418    if (info_->ShouldSelfOptimize()) {
419      weight = FLAG_interrupt_budget / FLAG_self_opt_count;
420    } else {
421      int distance = masm_->pc_offset();
422      weight = Min(kMaxBackEdgeWeight,
423                   Max(1, distance / kCodeSizeMultiplier));
424    }
425    EmitProfilingCounterDecrement(weight);
426    Label ok;
427    __ j(positive, &ok, Label::kNear);
428    __ push(eax);
429    __ call(isolate()->builtins()->InterruptCheck(),
430            RelocInfo::CODE_TARGET);
431    __ pop(eax);
432    EmitProfilingCounterReset();
433    __ bind(&ok);
434
435    SetReturnPosition(literal());
436    __ leave();
437
438    int arg_count = info_->scope()->num_parameters() + 1;
439    int arguments_bytes = arg_count * kPointerSize;
440    __ Ret(arguments_bytes, ecx);
441  }
442}
443
444
445void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
446  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
447  MemOperand operand = codegen()->VarOperand(var, result_register());
448  // Memory operands can be pushed directly.
449  __ push(operand);
450}
451
452
453void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
454  UNREACHABLE();  // Not used on IA32.
455}
456
457
458void FullCodeGenerator::AccumulatorValueContext::Plug(
459    Heap::RootListIndex index) const {
460  UNREACHABLE();  // Not used on IA32.
461}
462
463
464void FullCodeGenerator::StackValueContext::Plug(
465    Heap::RootListIndex index) const {
466  UNREACHABLE();  // Not used on IA32.
467}
468
469
470void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
471  UNREACHABLE();  // Not used on IA32.
472}
473
474
475void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
476}
477
478
479void FullCodeGenerator::AccumulatorValueContext::Plug(
480    Handle<Object> lit) const {
481  if (lit->IsSmi()) {
482    __ SafeMove(result_register(), Immediate(lit));
483  } else {
484    __ Move(result_register(), Immediate(lit));
485  }
486}
487
488
489void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
490  if (lit->IsSmi()) {
491    __ SafePush(Immediate(lit));
492  } else {
493    __ push(Immediate(lit));
494  }
495}
496
497
498void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
499  codegen()->PrepareForBailoutBeforeSplit(condition(),
500                                          true,
501                                          true_label_,
502                                          false_label_);
503  DCHECK(!lit->IsUndetectableObject());  // There are no undetectable literals.
504  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
505    if (false_label_ != fall_through_) __ jmp(false_label_);
506  } else if (lit->IsTrue() || lit->IsJSObject()) {
507    if (true_label_ != fall_through_) __ jmp(true_label_);
508  } else if (lit->IsString()) {
509    if (String::cast(*lit)->length() == 0) {
510      if (false_label_ != fall_through_) __ jmp(false_label_);
511    } else {
512      if (true_label_ != fall_through_) __ jmp(true_label_);
513    }
514  } else if (lit->IsSmi()) {
515    if (Smi::cast(*lit)->value() == 0) {
516      if (false_label_ != fall_through_) __ jmp(false_label_);
517    } else {
518      if (true_label_ != fall_through_) __ jmp(true_label_);
519    }
520  } else {
521    // For simplicity we always test the accumulator register.
522    __ mov(result_register(), lit);
523    codegen()->DoTest(this);
524  }
525}
526
527
528void FullCodeGenerator::EffectContext::DropAndPlug(int count,
529                                                   Register reg) const {
530  DCHECK(count > 0);
531  __ Drop(count);
532}
533
534
535void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
536    int count,
537    Register reg) const {
538  DCHECK(count > 0);
539  __ Drop(count);
540  __ Move(result_register(), reg);
541}
542
543
544void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
545                                                       Register reg) const {
546  DCHECK(count > 0);
547  if (count > 1) __ Drop(count - 1);
548  __ mov(Operand(esp, 0), reg);
549}
550
551
552void FullCodeGenerator::TestContext::DropAndPlug(int count,
553                                                 Register reg) const {
554  DCHECK(count > 0);
555  // For simplicity we always test the accumulator register.
556  __ Drop(count);
557  __ Move(result_register(), reg);
558  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
559  codegen()->DoTest(this);
560}
561
562
563void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
564                                            Label* materialize_false) const {
565  DCHECK(materialize_true == materialize_false);
566  __ bind(materialize_true);
567}
568
569
570void FullCodeGenerator::AccumulatorValueContext::Plug(
571    Label* materialize_true,
572    Label* materialize_false) const {
573  Label done;
574  __ bind(materialize_true);
575  __ mov(result_register(), isolate()->factory()->true_value());
576  __ jmp(&done, Label::kNear);
577  __ bind(materialize_false);
578  __ mov(result_register(), isolate()->factory()->false_value());
579  __ bind(&done);
580}
581
582
583void FullCodeGenerator::StackValueContext::Plug(
584    Label* materialize_true,
585    Label* materialize_false) const {
586  Label done;
587  __ bind(materialize_true);
588  __ push(Immediate(isolate()->factory()->true_value()));
589  __ jmp(&done, Label::kNear);
590  __ bind(materialize_false);
591  __ push(Immediate(isolate()->factory()->false_value()));
592  __ bind(&done);
593}
594
595
596void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
597                                          Label* materialize_false) const {
598  DCHECK(materialize_true == true_label_);
599  DCHECK(materialize_false == false_label_);
600}
601
602
603void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
604  Handle<Object> value = flag
605      ? isolate()->factory()->true_value()
606      : isolate()->factory()->false_value();
607  __ mov(result_register(), value);
608}
609
610
611void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
612  Handle<Object> value = flag
613      ? isolate()->factory()->true_value()
614      : isolate()->factory()->false_value();
615  __ push(Immediate(value));
616}
617
618
619void FullCodeGenerator::TestContext::Plug(bool flag) const {
620  codegen()->PrepareForBailoutBeforeSplit(condition(),
621                                          true,
622                                          true_label_,
623                                          false_label_);
624  if (flag) {
625    if (true_label_ != fall_through_) __ jmp(true_label_);
626  } else {
627    if (false_label_ != fall_through_) __ jmp(false_label_);
628  }
629}
630
631
632void FullCodeGenerator::DoTest(Expression* condition,
633                               Label* if_true,
634                               Label* if_false,
635                               Label* fall_through) {
636  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
637  CallIC(ic, condition->test_id());
638  __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
639  Split(equal, if_true, if_false, fall_through);
640}
641
642
643void FullCodeGenerator::Split(Condition cc,
644                              Label* if_true,
645                              Label* if_false,
646                              Label* fall_through) {
647  if (if_false == fall_through) {
648    __ j(cc, if_true);
649  } else if (if_true == fall_through) {
650    __ j(NegateCondition(cc), if_false);
651  } else {
652    __ j(cc, if_true);
653    __ jmp(if_false);
654  }
655}
656
657
658MemOperand FullCodeGenerator::StackOperand(Variable* var) {
659  DCHECK(var->IsStackAllocated());
660  // Offset is negative because higher indexes are at lower addresses.
661  int offset = -var->index() * kPointerSize;
662  // Adjust by a (parameter or local) base offset.
663  if (var->IsParameter()) {
664    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
665  } else {
666    offset += JavaScriptFrameConstants::kLocal0Offset;
667  }
668  return Operand(ebp, offset);
669}
670
671
672MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
673  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
674  if (var->IsContextSlot()) {
675    int context_chain_length = scope()->ContextChainLength(var->scope());
676    __ LoadContext(scratch, context_chain_length);
677    return ContextOperand(scratch, var->index());
678  } else {
679    return StackOperand(var);
680  }
681}
682
683
684void FullCodeGenerator::GetVar(Register dest, Variable* var) {
685  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
686  MemOperand location = VarOperand(var, dest);
687  __ mov(dest, location);
688}
689
690
691void FullCodeGenerator::SetVar(Variable* var,
692                               Register src,
693                               Register scratch0,
694                               Register scratch1) {
695  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
696  DCHECK(!scratch0.is(src));
697  DCHECK(!scratch0.is(scratch1));
698  DCHECK(!scratch1.is(src));
699  MemOperand location = VarOperand(var, scratch0);
700  __ mov(location, src);
701
702  // Emit the write barrier code if the location is in the heap.
703  if (var->IsContextSlot()) {
704    int offset = Context::SlotOffset(var->index());
705    DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
706    __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
707  }
708}
709
710
711void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
712                                                     bool should_normalize,
713                                                     Label* if_true,
714                                                     Label* if_false) {
715  // Only prepare for bailouts before splits if we're in a test
716  // context. Otherwise, we let the Visit function deal with the
717  // preparation to avoid preparing with the same AST id twice.
718  if (!context()->IsTest()) return;
719
720  Label skip;
721  if (should_normalize) __ jmp(&skip, Label::kNear);
722  PrepareForBailout(expr, TOS_REG);
723  if (should_normalize) {
724    __ cmp(eax, isolate()->factory()->true_value());
725    Split(equal, if_true, if_false, NULL);
726    __ bind(&skip);
727  }
728}
729
730
731void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
732  // The variable in the declaration always resides in the current context.
733  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
734  if (generate_debug_code_) {
735    // Check that we're not inside a with or catch context.
736    __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
737    __ cmp(ebx, isolate()->factory()->with_context_map());
738    __ Check(not_equal, kDeclarationInWithContext);
739    __ cmp(ebx, isolate()->factory()->catch_context_map());
740    __ Check(not_equal, kDeclarationInCatchContext);
741  }
742}
743
744
745void FullCodeGenerator::VisitVariableDeclaration(
746    VariableDeclaration* declaration) {
747  // If it was not possible to allocate the variable at compile time, we
748  // need to "declare" it at runtime to make sure it actually exists in the
749  // local context.
750  VariableProxy* proxy = declaration->proxy();
751  VariableMode mode = declaration->mode();
752  Variable* variable = proxy->var();
753  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
754  switch (variable->location()) {
755    case VariableLocation::GLOBAL:
756    case VariableLocation::UNALLOCATED:
757      globals_->Add(variable->name(), zone());
758      globals_->Add(variable->binding_needs_init()
759                        ? isolate()->factory()->the_hole_value()
760                        : isolate()->factory()->undefined_value(), zone());
761      break;
762
763    case VariableLocation::PARAMETER:
764    case VariableLocation::LOCAL:
765      if (hole_init) {
766        Comment cmnt(masm_, "[ VariableDeclaration");
767        __ mov(StackOperand(variable),
768               Immediate(isolate()->factory()->the_hole_value()));
769      }
770      break;
771
772    case VariableLocation::CONTEXT:
773      if (hole_init) {
774        Comment cmnt(masm_, "[ VariableDeclaration");
775        EmitDebugCheckDeclarationContext(variable);
776        __ mov(ContextOperand(esi, variable->index()),
777               Immediate(isolate()->factory()->the_hole_value()));
778        // No write barrier since the hole value is in old space.
779        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
780      }
781      break;
782
783    case VariableLocation::LOOKUP: {
784      Comment cmnt(masm_, "[ VariableDeclaration");
785      __ push(Immediate(variable->name()));
786      // VariableDeclaration nodes are always introduced in one of four modes.
787      DCHECK(IsDeclaredVariableMode(mode));
788      // Push initial value, if any.
789      // Note: For variables we must not push an initial value (such as
790      // 'undefined') because we may have a (legal) redeclaration and we
791      // must not destroy the current value.
792      if (hole_init) {
793        __ push(Immediate(isolate()->factory()->the_hole_value()));
794      } else {
795        __ push(Immediate(Smi::FromInt(0)));  // Indicates no initial value.
796      }
797      __ push(
798          Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
799      __ CallRuntime(Runtime::kDeclareLookupSlot);
800      break;
801    }
802  }
803}
804
805
806void FullCodeGenerator::VisitFunctionDeclaration(
807    FunctionDeclaration* declaration) {
808  VariableProxy* proxy = declaration->proxy();
809  Variable* variable = proxy->var();
810  switch (variable->location()) {
811    case VariableLocation::GLOBAL:
812    case VariableLocation::UNALLOCATED: {
813      globals_->Add(variable->name(), zone());
814      Handle<SharedFunctionInfo> function =
815          Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
816      // Check for stack-overflow exception.
817      if (function.is_null()) return SetStackOverflow();
818      globals_->Add(function, zone());
819      break;
820    }
821
822    case VariableLocation::PARAMETER:
823    case VariableLocation::LOCAL: {
824      Comment cmnt(masm_, "[ FunctionDeclaration");
825      VisitForAccumulatorValue(declaration->fun());
826      __ mov(StackOperand(variable), result_register());
827      break;
828    }
829
830    case VariableLocation::CONTEXT: {
831      Comment cmnt(masm_, "[ FunctionDeclaration");
832      EmitDebugCheckDeclarationContext(variable);
833      VisitForAccumulatorValue(declaration->fun());
834      __ mov(ContextOperand(esi, variable->index()), result_register());
835      // We know that we have written a function, which is not a smi.
836      __ RecordWriteContextSlot(esi,
837                                Context::SlotOffset(variable->index()),
838                                result_register(),
839                                ecx,
840                                kDontSaveFPRegs,
841                                EMIT_REMEMBERED_SET,
842                                OMIT_SMI_CHECK);
843      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
844      break;
845    }
846
847    case VariableLocation::LOOKUP: {
848      Comment cmnt(masm_, "[ FunctionDeclaration");
849      __ push(Immediate(variable->name()));
850      VisitForStackValue(declaration->fun());
851      __ push(
852          Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
853      __ CallRuntime(Runtime::kDeclareLookupSlot);
854      break;
855    }
856  }
857}
858
859
860void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
861  // Call the runtime to declare the globals.
862  __ Push(pairs);
863  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
864  __ CallRuntime(Runtime::kDeclareGlobals);
865  // Return value is ignored.
866}
867
868
869void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
870  // Call the runtime to declare the modules.
871  __ Push(descriptions);
872  __ CallRuntime(Runtime::kDeclareModules);
873  // Return value is ignored.
874}
875
876
877void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
878  Comment cmnt(masm_, "[ SwitchStatement");
879  Breakable nested_statement(this, stmt);
880  SetStatementPosition(stmt);
881
882  // Keep the switch value on the stack until a case matches.
883  VisitForStackValue(stmt->tag());
884  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
885
886  ZoneList<CaseClause*>* clauses = stmt->cases();
887  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
888
889  Label next_test;  // Recycled for each test.
890  // Compile all the tests with branches to their bodies.
891  for (int i = 0; i < clauses->length(); i++) {
892    CaseClause* clause = clauses->at(i);
893    clause->body_target()->Unuse();
894
895    // The default is not a test, but remember it as final fall through.
896    if (clause->is_default()) {
897      default_clause = clause;
898      continue;
899    }
900
901    Comment cmnt(masm_, "[ Case comparison");
902    __ bind(&next_test);
903    next_test.Unuse();
904
905    // Compile the label expression.
906    VisitForAccumulatorValue(clause->label());
907
908    // Perform the comparison as if via '==='.
909    __ mov(edx, Operand(esp, 0));  // Switch value.
910    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
911    JumpPatchSite patch_site(masm_);
912    if (inline_smi_code) {
913      Label slow_case;
914      __ mov(ecx, edx);
915      __ or_(ecx, eax);
916      patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
917
918      __ cmp(edx, eax);
919      __ j(not_equal, &next_test);
920      __ Drop(1);  // Switch value is no longer needed.
921      __ jmp(clause->body_target());
922      __ bind(&slow_case);
923    }
924
925    SetExpressionPosition(clause);
926    Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
927                                             strength(language_mode())).code();
928    CallIC(ic, clause->CompareId());
929    patch_site.EmitPatchInfo();
930
931    Label skip;
932    __ jmp(&skip, Label::kNear);
933    PrepareForBailout(clause, TOS_REG);
934    __ cmp(eax, isolate()->factory()->true_value());
935    __ j(not_equal, &next_test);
936    __ Drop(1);
937    __ jmp(clause->body_target());
938    __ bind(&skip);
939
940    __ test(eax, eax);
941    __ j(not_equal, &next_test);
942    __ Drop(1);  // Switch value is no longer needed.
943    __ jmp(clause->body_target());
944  }
945
946  // Discard the test value and jump to the default if present, otherwise to
947  // the end of the statement.
948  __ bind(&next_test);
949  __ Drop(1);  // Switch value is no longer needed.
950  if (default_clause == NULL) {
951    __ jmp(nested_statement.break_label());
952  } else {
953    __ jmp(default_clause->body_target());
954  }
955
956  // Compile all the case bodies.
957  for (int i = 0; i < clauses->length(); i++) {
958    Comment cmnt(masm_, "[ Case body");
959    CaseClause* clause = clauses->at(i);
960    __ bind(clause->body_target());
961    PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
962    VisitStatements(clause->statements());
963  }
964
965  __ bind(nested_statement.break_label());
966  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
967}
968
969
970void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
971  Comment cmnt(masm_, "[ ForInStatement");
972  SetStatementPosition(stmt, SKIP_BREAK);
973
974  FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
975
976  Label loop, exit;
977  ForIn loop_statement(this, stmt);
978  increment_loop_depth();
979
980  // Get the object to enumerate over. If the object is null or undefined, skip
981  // over the loop.  See ECMA-262 version 5, section 12.6.4.
982  SetExpressionAsStatementPosition(stmt->enumerable());
983  VisitForAccumulatorValue(stmt->enumerable());
984  __ cmp(eax, isolate()->factory()->undefined_value());
985  __ j(equal, &exit);
986  __ cmp(eax, isolate()->factory()->null_value());
987  __ j(equal, &exit);
988
989  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
990
991  // Convert the object to a JS object.
992  Label convert, done_convert;
993  __ JumpIfSmi(eax, &convert, Label::kNear);
994  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
995  __ j(above_equal, &done_convert, Label::kNear);
996  __ bind(&convert);
997  ToObjectStub stub(isolate());
998  __ CallStub(&stub);
999  __ bind(&done_convert);
1000  PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1001  __ push(eax);
1002
1003  // Check for proxies.
1004  Label call_runtime, use_cache, fixed_array;
1005  __ CmpObjectType(eax, JS_PROXY_TYPE, ecx);
1006  __ j(equal, &call_runtime);
1007
1008  // Check cache validity in generated code. This is a fast case for
1009  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1010  // guarantee cache validity, call the runtime system to check cache
1011  // validity or get the property names in a fixed array.
1012  __ CheckEnumCache(&call_runtime);
1013
1014  __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1015  __ jmp(&use_cache, Label::kNear);
1016
1017  // Get the set of properties to enumerate.
1018  __ bind(&call_runtime);
1019  __ push(eax);
1020  __ CallRuntime(Runtime::kGetPropertyNamesFast);
1021  PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1022  __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1023         isolate()->factory()->meta_map());
1024  __ j(not_equal, &fixed_array);
1025
1026
1027  // We got a map in register eax. Get the enumeration cache from it.
1028  Label no_descriptors;
1029  __ bind(&use_cache);
1030
1031  __ EnumLength(edx, eax);
1032  __ cmp(edx, Immediate(Smi::FromInt(0)));
1033  __ j(equal, &no_descriptors);
1034
1035  __ LoadInstanceDescriptors(eax, ecx);
1036  __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1037  __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1038
1039  // Set up the four remaining stack slots.
1040  __ push(eax);  // Map.
1041  __ push(ecx);  // Enumeration cache.
1042  __ push(edx);  // Number of valid entries for the map in the enum cache.
1043  __ push(Immediate(Smi::FromInt(0)));  // Initial index.
1044  __ jmp(&loop);
1045
1046  __ bind(&no_descriptors);
1047  __ add(esp, Immediate(kPointerSize));
1048  __ jmp(&exit);
1049
1050  // We got a fixed array in register eax. Iterate through that.
1051  __ bind(&fixed_array);
1052
1053  // No need for a write barrier, we are storing a Smi in the feedback vector.
1054  __ EmitLoadTypeFeedbackVector(ebx);
1055  int vector_index = SmiFromSlot(slot)->value();
1056  __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1057         Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1058  __ push(Immediate(Smi::FromInt(1)));  // Smi(1) indicates slow check
1059  __ push(eax);  // Array
1060  __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1061  __ push(eax);  // Fixed array length (as smi).
1062  __ push(Immediate(Smi::FromInt(0)));  // Initial index.
1063
1064  // Generate code for doing the condition check.
1065  __ bind(&loop);
1066  SetExpressionAsStatementPosition(stmt->each());
1067
1068  __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
1069  __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
1070  __ j(above_equal, loop_statement.break_label());
1071
1072  // Get the current entry of the array into register ebx.
1073  __ mov(ebx, Operand(esp, 2 * kPointerSize));
1074  __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1075
1076  // Get the expected map from the stack or a smi in the
1077  // permanent slow case into register edx.
1078  __ mov(edx, Operand(esp, 3 * kPointerSize));
1079
1080  // Check if the expected map still matches that of the enumerable.
1081  // If not, we may have to filter the key.
1082  Label update_each;
1083  __ mov(ecx, Operand(esp, 4 * kPointerSize));
1084  __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1085  __ j(equal, &update_each, Label::kNear);
1086
1087  // Convert the entry to a string or null if it isn't a property
1088  // anymore. If the property has been removed while iterating, we
1089  // just skip it.
1090  __ push(ecx);  // Enumerable.
1091  __ push(ebx);  // Current entry.
1092  __ CallRuntime(Runtime::kForInFilter);
1093  PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1094  __ cmp(eax, isolate()->factory()->undefined_value());
1095  __ j(equal, loop_statement.continue_label());
1096  __ mov(ebx, eax);
1097
1098  // Update the 'each' property or variable from the possibly filtered
1099  // entry in register ebx.
1100  __ bind(&update_each);
1101  __ mov(result_register(), ebx);
1102  // Perform the assignment as if via '='.
1103  { EffectContext context(this);
1104    EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1105    PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1106  }
1107
1108  // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1109  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1110  // Generate code for the body of the loop.
1111  Visit(stmt->body());
1112
1113  // Generate code for going to the next element by incrementing the
1114  // index (smi) stored on top of the stack.
1115  __ bind(loop_statement.continue_label());
1116  __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1117
1118  EmitBackEdgeBookkeeping(stmt, &loop);
1119  __ jmp(&loop);
1120
1121  // Remove the pointers stored on the stack.
1122  __ bind(loop_statement.break_label());
1123  __ add(esp, Immediate(5 * kPointerSize));
1124
1125  // Exit and decrement the loop depth.
1126  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1127  __ bind(&exit);
1128  decrement_loop_depth();
1129}
1130
1131
1132void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1133                                       bool pretenure) {
1134  // Use the fast case closure allocation code that allocates in new
1135  // space for nested functions that don't need literals cloning. If
1136  // we're running with the --always-opt or the --prepare-always-opt
1137  // flag, we need to use the runtime function so that the new function
1138  // we are creating here gets a chance to have its code optimized and
1139  // doesn't just get a copy of the existing unoptimized code.
1140  if (!FLAG_always_opt &&
1141      !FLAG_prepare_always_opt &&
1142      !pretenure &&
1143      scope()->is_function_scope() &&
1144      info->num_literals() == 0) {
1145    FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1146    __ mov(ebx, Immediate(info));
1147    __ CallStub(&stub);
1148  } else {
1149    __ push(Immediate(info));
1150    __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1151                             : Runtime::kNewClosure);
1152  }
1153  context()->Plug(eax);
1154}
1155
1156
1157void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1158                                          FeedbackVectorSlot slot) {
1159  DCHECK(NeedsHomeObject(initializer));
1160  __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1161  __ mov(StoreDescriptor::NameRegister(),
1162         Immediate(isolate()->factory()->home_object_symbol()));
1163  __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1164  EmitLoadStoreICSlot(slot);
1165  CallStoreIC();
1166}
1167
1168
1169void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1170                                                     int offset,
1171                                                     FeedbackVectorSlot slot) {
1172  DCHECK(NeedsHomeObject(initializer));
1173  __ mov(StoreDescriptor::ReceiverRegister(), eax);
1174  __ mov(StoreDescriptor::NameRegister(),
1175         Immediate(isolate()->factory()->home_object_symbol()));
1176  __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1177  EmitLoadStoreICSlot(slot);
1178  CallStoreIC();
1179}
1180
1181
1182void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1183                                                      TypeofMode typeof_mode,
1184                                                      Label* slow) {
1185  Register context = esi;
1186  Register temp = edx;
1187
1188  Scope* s = scope();
1189  while (s != NULL) {
1190    if (s->num_heap_slots() > 0) {
1191      if (s->calls_sloppy_eval()) {
1192        // Check that extension is "the hole".
1193        __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1194                         Heap::kTheHoleValueRootIndex, slow);
1195      }
1196      // Load next context in chain.
1197      __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1198      // Walk the rest of the chain without clobbering esi.
1199      context = temp;
1200    }
1201    // If no outer scope calls eval, we do not need to check more
1202    // context extensions.  If we have reached an eval scope, we check
1203    // all extensions from this point.
1204    if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1205    s = s->outer_scope();
1206  }
1207
1208  if (s != NULL && s->is_eval_scope()) {
1209    // Loop up the context chain.  There is no frame effect so it is
1210    // safe to use raw labels here.
1211    Label next, fast;
1212    if (!context.is(temp)) {
1213      __ mov(temp, context);
1214    }
1215    __ bind(&next);
1216    // Terminate at native context.
1217    __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1218           Immediate(isolate()->factory()->native_context_map()));
1219    __ j(equal, &fast, Label::kNear);
1220    // Check that extension is "the hole".
1221    __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1222                     Heap::kTheHoleValueRootIndex, slow);
1223    // Load next context in chain.
1224    __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1225    __ jmp(&next);
1226    __ bind(&fast);
1227  }
1228
1229  // All extension objects were empty and it is safe to use a normal global
1230  // load machinery.
1231  EmitGlobalVariableLoad(proxy, typeof_mode);
1232}
1233
1234
1235MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1236                                                                Label* slow) {
1237  DCHECK(var->IsContextSlot());
1238  Register context = esi;
1239  Register temp = ebx;
1240
1241  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1242    if (s->num_heap_slots() > 0) {
1243      if (s->calls_sloppy_eval()) {
1244        // Check that extension is "the hole".
1245        __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1246                         Heap::kTheHoleValueRootIndex, slow);
1247      }
1248      __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1249      // Walk the rest of the chain without clobbering esi.
1250      context = temp;
1251    }
1252  }
1253  // Check that last extension is "the hole".
1254  __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1255                   Heap::kTheHoleValueRootIndex, slow);
1256
1257  // This function is used only for loads, not stores, so it's safe to
1258  // return an esi-based operand (the write barrier cannot be allowed to
1259  // destroy the esi register).
1260  return ContextOperand(context, var->index());
1261}
1262
1263
1264void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1265                                                  TypeofMode typeof_mode,
1266                                                  Label* slow, Label* done) {
1267  // Generate fast-case code for variables that might be shadowed by
1268  // eval-introduced variables.  Eval is used a lot without
1269  // introducing variables.  In those cases, we do not want to
1270  // perform a runtime call for all variables in the scope
1271  // containing the eval.
1272  Variable* var = proxy->var();
1273  if (var->mode() == DYNAMIC_GLOBAL) {
1274    EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1275    __ jmp(done);
1276  } else if (var->mode() == DYNAMIC_LOCAL) {
1277    Variable* local = var->local_if_not_shadowed();
1278    __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1279    if (local->mode() == LET || local->mode() == CONST ||
1280        local->mode() == CONST_LEGACY) {
1281      __ cmp(eax, isolate()->factory()->the_hole_value());
1282      __ j(not_equal, done);
1283      if (local->mode() == CONST_LEGACY) {
1284        __ mov(eax, isolate()->factory()->undefined_value());
1285      } else {  // LET || CONST
1286        __ push(Immediate(var->name()));
1287        __ CallRuntime(Runtime::kThrowReferenceError);
1288      }
1289    }
1290    __ jmp(done);
1291  }
1292}
1293
1294
1295void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1296                                               TypeofMode typeof_mode) {
1297  Variable* var = proxy->var();
1298  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1299         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1300  __ mov(LoadDescriptor::ReceiverRegister(), NativeContextOperand());
1301  __ mov(LoadDescriptor::ReceiverRegister(),
1302         ContextOperand(LoadDescriptor::ReceiverRegister(),
1303                        Context::EXTENSION_INDEX));
1304  __ mov(LoadDescriptor::NameRegister(), var->name());
1305  __ mov(LoadDescriptor::SlotRegister(),
1306         Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1307  CallLoadIC(typeof_mode);
1308}
1309
1310
1311void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1312                                         TypeofMode typeof_mode) {
1313  SetExpressionPosition(proxy);
1314  PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1315  Variable* var = proxy->var();
1316
1317  // Three cases: global variables, lookup variables, and all other types of
1318  // variables.
1319  switch (var->location()) {
1320    case VariableLocation::GLOBAL:
1321    case VariableLocation::UNALLOCATED: {
1322      Comment cmnt(masm_, "[ Global variable");
1323      EmitGlobalVariableLoad(proxy, typeof_mode);
1324      context()->Plug(eax);
1325      break;
1326    }
1327
1328    case VariableLocation::PARAMETER:
1329    case VariableLocation::LOCAL:
1330    case VariableLocation::CONTEXT: {
1331      DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1332      Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1333                                               : "[ Stack variable");
1334
1335      if (NeedsHoleCheckForLoad(proxy)) {
1336        // Let and const need a read barrier.
1337        Label done;
1338        GetVar(eax, var);
1339        __ cmp(eax, isolate()->factory()->the_hole_value());
1340        __ j(not_equal, &done, Label::kNear);
1341        if (var->mode() == LET || var->mode() == CONST) {
1342          // Throw a reference error when using an uninitialized let/const
1343          // binding in harmony mode.
1344          __ push(Immediate(var->name()));
1345          __ CallRuntime(Runtime::kThrowReferenceError);
1346        } else {
1347          // Uninitialized legacy const bindings are unholed.
1348          DCHECK(var->mode() == CONST_LEGACY);
1349          __ mov(eax, isolate()->factory()->undefined_value());
1350        }
1351        __ bind(&done);
1352        context()->Plug(eax);
1353        break;
1354      }
1355      context()->Plug(var);
1356      break;
1357    }
1358
1359    case VariableLocation::LOOKUP: {
1360      Comment cmnt(masm_, "[ Lookup variable");
1361      Label done, slow;
1362      // Generate code for loading from variables potentially shadowed
1363      // by eval-introduced variables.
1364      EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1365      __ bind(&slow);
1366      __ push(esi);  // Context.
1367      __ push(Immediate(var->name()));
1368      Runtime::FunctionId function_id =
1369          typeof_mode == NOT_INSIDE_TYPEOF
1370              ? Runtime::kLoadLookupSlot
1371              : Runtime::kLoadLookupSlotNoReferenceError;
1372      __ CallRuntime(function_id);
1373      __ bind(&done);
1374      context()->Plug(eax);
1375      break;
1376    }
1377  }
1378}
1379
1380
1381void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1382  Comment cmnt(masm_, "[ RegExpLiteral");
1383  __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1384  __ Move(eax, Immediate(Smi::FromInt(expr->literal_index())));
1385  __ Move(ecx, Immediate(expr->pattern()));
1386  __ Move(edx, Immediate(Smi::FromInt(expr->flags())));
1387  FastCloneRegExpStub stub(isolate());
1388  __ CallStub(&stub);
1389  context()->Plug(eax);
1390}
1391
1392
1393void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1394  Expression* expression = (property == NULL) ? NULL : property->value();
1395  if (expression == NULL) {
1396    __ push(Immediate(isolate()->factory()->null_value()));
1397  } else {
1398    VisitForStackValue(expression);
1399    if (NeedsHomeObject(expression)) {
1400      DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1401             property->kind() == ObjectLiteral::Property::SETTER);
1402      int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1403      EmitSetHomeObject(expression, offset, property->GetSlot());
1404    }
1405  }
1406}
1407
1408
1409void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1410  Comment cmnt(masm_, "[ ObjectLiteral");
1411
1412  Handle<FixedArray> constant_properties = expr->constant_properties();
1413  int flags = expr->ComputeFlags();
1414  // If any of the keys would store to the elements array, then we shouldn't
1415  // allow it.
1416  if (MustCreateObjectLiteralWithRuntime(expr)) {
1417    __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1418    __ push(Immediate(Smi::FromInt(expr->literal_index())));
1419    __ push(Immediate(constant_properties));
1420    __ push(Immediate(Smi::FromInt(flags)));
1421    __ CallRuntime(Runtime::kCreateObjectLiteral);
1422  } else {
1423    __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1424    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1425    __ mov(ecx, Immediate(constant_properties));
1426    __ mov(edx, Immediate(Smi::FromInt(flags)));
1427    FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1428    __ CallStub(&stub);
1429  }
1430  PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1431
1432  // If result_saved is true the result is on top of the stack.  If
1433  // result_saved is false the result is in eax.
1434  bool result_saved = false;
1435
1436  AccessorTable accessor_table(zone());
1437  int property_index = 0;
1438  for (; property_index < expr->properties()->length(); property_index++) {
1439    ObjectLiteral::Property* property = expr->properties()->at(property_index);
1440    if (property->is_computed_name()) break;
1441    if (property->IsCompileTimeValue()) continue;
1442
1443    Literal* key = property->key()->AsLiteral();
1444    Expression* value = property->value();
1445    if (!result_saved) {
1446      __ push(eax);  // Save result on the stack
1447      result_saved = true;
1448    }
1449    switch (property->kind()) {
1450      case ObjectLiteral::Property::CONSTANT:
1451        UNREACHABLE();
1452      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1453        DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1454        // Fall through.
1455      case ObjectLiteral::Property::COMPUTED:
1456        // It is safe to use [[Put]] here because the boilerplate already
1457        // contains computed properties with an uninitialized value.
1458        if (key->value()->IsInternalizedString()) {
1459          if (property->emit_store()) {
1460            VisitForAccumulatorValue(value);
1461            DCHECK(StoreDescriptor::ValueRegister().is(eax));
1462            __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1463            __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1464            EmitLoadStoreICSlot(property->GetSlot(0));
1465            CallStoreIC();
1466            PrepareForBailoutForId(key->id(), NO_REGISTERS);
1467            if (NeedsHomeObject(value)) {
1468              EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1469            }
1470          } else {
1471            VisitForEffect(value);
1472          }
1473          break;
1474        }
1475        __ push(Operand(esp, 0));  // Duplicate receiver.
1476        VisitForStackValue(key);
1477        VisitForStackValue(value);
1478        if (property->emit_store()) {
1479          if (NeedsHomeObject(value)) {
1480            EmitSetHomeObject(value, 2, property->GetSlot());
1481          }
1482          __ push(Immediate(Smi::FromInt(SLOPPY)));  // Language mode
1483          __ CallRuntime(Runtime::kSetProperty);
1484        } else {
1485          __ Drop(3);
1486        }
1487        break;
1488      case ObjectLiteral::Property::PROTOTYPE:
1489        __ push(Operand(esp, 0));  // Duplicate receiver.
1490        VisitForStackValue(value);
1491        DCHECK(property->emit_store());
1492        __ CallRuntime(Runtime::kInternalSetPrototype);
1493        PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1494                               NO_REGISTERS);
1495        break;
1496      case ObjectLiteral::Property::GETTER:
1497        if (property->emit_store()) {
1498          accessor_table.lookup(key)->second->getter = property;
1499        }
1500        break;
1501      case ObjectLiteral::Property::SETTER:
1502        if (property->emit_store()) {
1503          accessor_table.lookup(key)->second->setter = property;
1504        }
1505        break;
1506    }
1507  }
1508
1509  // Emit code to define accessors, using only a single call to the runtime for
1510  // each pair of corresponding getters and setters.
1511  for (AccessorTable::Iterator it = accessor_table.begin();
1512       it != accessor_table.end();
1513       ++it) {
1514    __ push(Operand(esp, 0));  // Duplicate receiver.
1515    VisitForStackValue(it->first);
1516
1517    EmitAccessor(it->second->getter);
1518    EmitAccessor(it->second->setter);
1519
1520    __ push(Immediate(Smi::FromInt(NONE)));
1521    __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked);
1522  }
1523
1524  // Object literals have two parts. The "static" part on the left contains no
1525  // computed property names, and so we can compute its map ahead of time; see
1526  // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1527  // starts with the first computed property name, and continues with all
1528  // properties to its right.  All the code from above initializes the static
1529  // component of the object literal, and arranges for the map of the result to
1530  // reflect the static order in which the keys appear. For the dynamic
1531  // properties, we compile them into a series of "SetOwnProperty" runtime
1532  // calls. This will preserve insertion order.
1533  for (; property_index < expr->properties()->length(); property_index++) {
1534    ObjectLiteral::Property* property = expr->properties()->at(property_index);
1535
1536    Expression* value = property->value();
1537    if (!result_saved) {
1538      __ push(eax);  // Save result on the stack
1539      result_saved = true;
1540    }
1541
1542    __ push(Operand(esp, 0));  // Duplicate receiver.
1543
1544    if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1545      DCHECK(!property->is_computed_name());
1546      VisitForStackValue(value);
1547      DCHECK(property->emit_store());
1548      __ CallRuntime(Runtime::kInternalSetPrototype);
1549      PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1550                             NO_REGISTERS);
1551    } else {
1552      EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1553      VisitForStackValue(value);
1554      if (NeedsHomeObject(value)) {
1555        EmitSetHomeObject(value, 2, property->GetSlot());
1556      }
1557
1558      switch (property->kind()) {
1559        case ObjectLiteral::Property::CONSTANT:
1560        case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1561        case ObjectLiteral::Property::COMPUTED:
1562          if (property->emit_store()) {
1563            __ push(Immediate(Smi::FromInt(NONE)));
1564            __ CallRuntime(Runtime::kDefineDataPropertyUnchecked);
1565          } else {
1566            __ Drop(3);
1567          }
1568          break;
1569
1570        case ObjectLiteral::Property::PROTOTYPE:
1571          UNREACHABLE();
1572          break;
1573
1574        case ObjectLiteral::Property::GETTER:
1575          __ push(Immediate(Smi::FromInt(NONE)));
1576          __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
1577          break;
1578
1579        case ObjectLiteral::Property::SETTER:
1580          __ push(Immediate(Smi::FromInt(NONE)));
1581          __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
1582          break;
1583      }
1584    }
1585  }
1586
1587  if (expr->has_function()) {
1588    DCHECK(result_saved);
1589    __ push(Operand(esp, 0));
1590    __ CallRuntime(Runtime::kToFastProperties);
1591  }
1592
1593  if (result_saved) {
1594    context()->PlugTOS();
1595  } else {
1596    context()->Plug(eax);
1597  }
1598}
1599
1600
1601void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1602  Comment cmnt(masm_, "[ ArrayLiteral");
1603
1604  Handle<FixedArray> constant_elements = expr->constant_elements();
1605  bool has_constant_fast_elements =
1606      IsFastObjectElementsKind(expr->constant_elements_kind());
1607
1608  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1609  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1610    // If the only customer of allocation sites is transitioning, then
1611    // we can turn it off if we don't have anywhere else to transition to.
1612    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1613  }
1614
1615  if (MustCreateArrayLiteralWithRuntime(expr)) {
1616    __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1617    __ push(Immediate(Smi::FromInt(expr->literal_index())));
1618    __ push(Immediate(constant_elements));
1619    __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1620    __ CallRuntime(Runtime::kCreateArrayLiteral);
1621  } else {
1622    __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1623    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1624    __ mov(ecx, Immediate(constant_elements));
1625    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1626    __ CallStub(&stub);
1627  }
1628  PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1629
1630  bool result_saved = false;  // Is the result saved to the stack?
1631  ZoneList<Expression*>* subexprs = expr->values();
1632  int length = subexprs->length();
1633
1634  // Emit code to evaluate all the non-constant subexpressions and to store
1635  // them into the newly cloned array.
1636  int array_index = 0;
1637  for (; array_index < length; array_index++) {
1638    Expression* subexpr = subexprs->at(array_index);
1639    if (subexpr->IsSpread()) break;
1640
1641    // If the subexpression is a literal or a simple materialized literal it
1642    // is already set in the cloned array.
1643    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1644
1645    if (!result_saved) {
1646      __ push(eax);  // array literal.
1647      result_saved = true;
1648    }
1649    VisitForAccumulatorValue(subexpr);
1650
1651    __ mov(StoreDescriptor::NameRegister(),
1652           Immediate(Smi::FromInt(array_index)));
1653    __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1654    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1655    Handle<Code> ic =
1656        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1657    CallIC(ic);
1658    PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1659  }
1660
1661  // In case the array literal contains spread expressions it has two parts. The
1662  // first part is  the "static" array which has a literal index is  handled
1663  // above. The second part is the part after the first spread expression
1664  // (inclusive) and these elements gets appended to the array. Note that the
1665  // number elements an iterable produces is unknown ahead of time.
1666  if (array_index < length && result_saved) {
1667    __ Pop(eax);
1668    result_saved = false;
1669  }
1670  for (; array_index < length; array_index++) {
1671    Expression* subexpr = subexprs->at(array_index);
1672
1673    __ Push(eax);
1674    if (subexpr->IsSpread()) {
1675      VisitForStackValue(subexpr->AsSpread()->expression());
1676      __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1677                       CALL_FUNCTION);
1678    } else {
1679      VisitForStackValue(subexpr);
1680      __ CallRuntime(Runtime::kAppendElement);
1681    }
1682
1683    PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1684  }
1685
1686  if (result_saved) {
1687    context()->PlugTOS();
1688  } else {
1689    context()->Plug(eax);
1690  }
1691}
1692
1693
1694void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1695  DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1696
1697  Comment cmnt(masm_, "[ Assignment");
1698  SetExpressionPosition(expr, INSERT_BREAK);
1699
1700  Property* property = expr->target()->AsProperty();
1701  LhsKind assign_type = Property::GetAssignType(property);
1702
1703  // Evaluate LHS expression.
1704  switch (assign_type) {
1705    case VARIABLE:
1706      // Nothing to do here.
1707      break;
1708    case NAMED_SUPER_PROPERTY:
1709      VisitForStackValue(
1710          property->obj()->AsSuperPropertyReference()->this_var());
1711      VisitForAccumulatorValue(
1712          property->obj()->AsSuperPropertyReference()->home_object());
1713      __ push(result_register());
1714      if (expr->is_compound()) {
1715        __ push(MemOperand(esp, kPointerSize));
1716        __ push(result_register());
1717      }
1718      break;
1719    case NAMED_PROPERTY:
1720      if (expr->is_compound()) {
1721        // We need the receiver both on the stack and in the register.
1722        VisitForStackValue(property->obj());
1723        __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1724      } else {
1725        VisitForStackValue(property->obj());
1726      }
1727      break;
1728    case KEYED_SUPER_PROPERTY:
1729      VisitForStackValue(
1730          property->obj()->AsSuperPropertyReference()->this_var());
1731      VisitForStackValue(
1732          property->obj()->AsSuperPropertyReference()->home_object());
1733      VisitForAccumulatorValue(property->key());
1734      __ Push(result_register());
1735      if (expr->is_compound()) {
1736        __ push(MemOperand(esp, 2 * kPointerSize));
1737        __ push(MemOperand(esp, 2 * kPointerSize));
1738        __ push(result_register());
1739      }
1740      break;
1741    case KEYED_PROPERTY: {
1742      if (expr->is_compound()) {
1743        VisitForStackValue(property->obj());
1744        VisitForStackValue(property->key());
1745        __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1746        __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1747      } else {
1748        VisitForStackValue(property->obj());
1749        VisitForStackValue(property->key());
1750      }
1751      break;
1752    }
1753  }
1754
1755  // For compound assignments we need another deoptimization point after the
1756  // variable/property load.
1757  if (expr->is_compound()) {
1758    AccumulatorValueContext result_context(this);
1759    { AccumulatorValueContext left_operand_context(this);
1760      switch (assign_type) {
1761        case VARIABLE:
1762          EmitVariableLoad(expr->target()->AsVariableProxy());
1763          PrepareForBailout(expr->target(), TOS_REG);
1764          break;
1765        case NAMED_SUPER_PROPERTY:
1766          EmitNamedSuperPropertyLoad(property);
1767          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1768          break;
1769        case NAMED_PROPERTY:
1770          EmitNamedPropertyLoad(property);
1771          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1772          break;
1773        case KEYED_SUPER_PROPERTY:
1774          EmitKeyedSuperPropertyLoad(property);
1775          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1776          break;
1777        case KEYED_PROPERTY:
1778          EmitKeyedPropertyLoad(property);
1779          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1780          break;
1781      }
1782    }
1783
1784    Token::Value op = expr->binary_op();
1785    __ push(eax);  // Left operand goes on the stack.
1786    VisitForAccumulatorValue(expr->value());
1787
1788    if (ShouldInlineSmiCase(op)) {
1789      EmitInlineSmiBinaryOp(expr->binary_operation(),
1790                            op,
1791                            expr->target(),
1792                            expr->value());
1793    } else {
1794      EmitBinaryOp(expr->binary_operation(), op);
1795    }
1796
1797    // Deoptimization point in case the binary operation may have side effects.
1798    PrepareForBailout(expr->binary_operation(), TOS_REG);
1799  } else {
1800    VisitForAccumulatorValue(expr->value());
1801  }
1802
1803  SetExpressionPosition(expr);
1804
1805  // Store the value.
1806  switch (assign_type) {
1807    case VARIABLE:
1808      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1809                             expr->op(), expr->AssignmentSlot());
1810      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1811      context()->Plug(eax);
1812      break;
1813    case NAMED_PROPERTY:
1814      EmitNamedPropertyAssignment(expr);
1815      break;
1816    case NAMED_SUPER_PROPERTY:
1817      EmitNamedSuperPropertyStore(property);
1818      context()->Plug(result_register());
1819      break;
1820    case KEYED_SUPER_PROPERTY:
1821      EmitKeyedSuperPropertyStore(property);
1822      context()->Plug(result_register());
1823      break;
1824    case KEYED_PROPERTY:
1825      EmitKeyedPropertyAssignment(expr);
1826      break;
1827  }
1828}
1829
1830
1831void FullCodeGenerator::VisitYield(Yield* expr) {
1832  Comment cmnt(masm_, "[ Yield");
1833  SetExpressionPosition(expr);
1834
1835  // Evaluate yielded value first; the initial iterator definition depends on
1836  // this.  It stays on the stack while we update the iterator.
1837  VisitForStackValue(expr->expression());
1838
1839  switch (expr->yield_kind()) {
1840    case Yield::kSuspend:
1841      // Pop value from top-of-stack slot; box result into result register.
1842      EmitCreateIteratorResult(false);
1843      __ push(result_register());
1844      // Fall through.
1845    case Yield::kInitial: {
1846      Label suspend, continuation, post_runtime, resume;
1847
1848      __ jmp(&suspend);
1849      __ bind(&continuation);
1850      __ RecordGeneratorContinuation();
1851      __ jmp(&resume);
1852
1853      __ bind(&suspend);
1854      VisitForAccumulatorValue(expr->generator_object());
1855      DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1856      __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1857             Immediate(Smi::FromInt(continuation.pos())));
1858      __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1859      __ mov(ecx, esi);
1860      __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1861                          kDontSaveFPRegs);
1862      __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1863      __ cmp(esp, ebx);
1864      __ j(equal, &post_runtime);
1865      __ push(eax);  // generator object
1866      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1867      __ mov(context_register(),
1868             Operand(ebp, StandardFrameConstants::kContextOffset));
1869      __ bind(&post_runtime);
1870      __ pop(result_register());
1871      EmitReturnSequence();
1872
1873      __ bind(&resume);
1874      context()->Plug(result_register());
1875      break;
1876    }
1877
1878    case Yield::kFinal: {
1879      VisitForAccumulatorValue(expr->generator_object());
1880      __ mov(FieldOperand(result_register(),
1881                          JSGeneratorObject::kContinuationOffset),
1882             Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
1883      // Pop value from top-of-stack slot, box result into result register.
1884      EmitCreateIteratorResult(true);
1885      EmitUnwindBeforeReturn();
1886      EmitReturnSequence();
1887      break;
1888    }
1889
1890    case Yield::kDelegating: {
1891      VisitForStackValue(expr->generator_object());
1892
1893      // Initial stack layout is as follows:
1894      // [sp + 1 * kPointerSize] iter
1895      // [sp + 0 * kPointerSize] g
1896
1897      Label l_catch, l_try, l_suspend, l_continuation, l_resume;
1898      Label l_next, l_call, l_loop;
1899      Register load_receiver = LoadDescriptor::ReceiverRegister();
1900      Register load_name = LoadDescriptor::NameRegister();
1901
1902      // Initial send value is undefined.
1903      __ mov(eax, isolate()->factory()->undefined_value());
1904      __ jmp(&l_next);
1905
1906      // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
1907      __ bind(&l_catch);
1908      __ mov(load_name, isolate()->factory()->throw_string());  // "throw"
1909      __ push(load_name);                                       // "throw"
1910      __ push(Operand(esp, 2 * kPointerSize));                  // iter
1911      __ push(eax);                                             // exception
1912      __ jmp(&l_call);
1913
1914      // try { received = %yield result }
1915      // Shuffle the received result above a try handler and yield it without
1916      // re-boxing.
1917      __ bind(&l_try);
1918      __ pop(eax);                                       // result
1919      int handler_index = NewHandlerTableEntry();
1920      EnterTryBlock(handler_index, &l_catch);
1921      const int try_block_size = TryCatch::kElementCount * kPointerSize;
1922      __ push(eax);                                      // result
1923
1924      __ jmp(&l_suspend);
1925      __ bind(&l_continuation);
1926      __ RecordGeneratorContinuation();
1927      __ jmp(&l_resume);
1928
1929      __ bind(&l_suspend);
1930      const int generator_object_depth = kPointerSize + try_block_size;
1931      __ mov(eax, Operand(esp, generator_object_depth));
1932      __ push(eax);                                      // g
1933      __ push(Immediate(Smi::FromInt(handler_index)));   // handler-index
1934      DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
1935      __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1936             Immediate(Smi::FromInt(l_continuation.pos())));
1937      __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1938      __ mov(ecx, esi);
1939      __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1940                          kDontSaveFPRegs);
1941      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
1942      __ mov(context_register(),
1943             Operand(ebp, StandardFrameConstants::kContextOffset));
1944      __ pop(eax);                                       // result
1945      EmitReturnSequence();
1946      __ bind(&l_resume);                                // received in eax
1947      ExitTryBlock(handler_index);
1948
1949      // receiver = iter; f = iter.next; arg = received;
1950      __ bind(&l_next);
1951
1952      __ mov(load_name, isolate()->factory()->next_string());
1953      __ push(load_name);                           // "next"
1954      __ push(Operand(esp, 2 * kPointerSize));      // iter
1955      __ push(eax);                                 // received
1956
1957      // result = receiver[f](arg);
1958      __ bind(&l_call);
1959      __ mov(load_receiver, Operand(esp, kPointerSize));
1960      __ mov(LoadDescriptor::SlotRegister(),
1961             Immediate(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
1962      Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
1963      CallIC(ic, TypeFeedbackId::None());
1964      __ mov(edi, eax);
1965      __ mov(Operand(esp, 2 * kPointerSize), edi);
1966      SetCallPosition(expr);
1967      __ Set(eax, 1);
1968      __ Call(
1969          isolate()->builtins()->Call(ConvertReceiverMode::kNotNullOrUndefined),
1970          RelocInfo::CODE_TARGET);
1971
1972      __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1973      __ Drop(1);  // The function is still on the stack; drop it.
1974
1975      // if (!result.done) goto l_try;
1976      __ bind(&l_loop);
1977      __ push(eax);                                      // save result
1978      __ Move(load_receiver, eax);                       // result
1979      __ mov(load_name,
1980             isolate()->factory()->done_string());       // "done"
1981      __ mov(LoadDescriptor::SlotRegister(),
1982             Immediate(SmiFromSlot(expr->DoneFeedbackSlot())));
1983      CallLoadIC(NOT_INSIDE_TYPEOF);  // result.done in eax
1984      Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
1985      CallIC(bool_ic);
1986      __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
1987      __ j(not_equal, &l_try);
1988
1989      // result.value
1990      __ pop(load_receiver);                              // result
1991      __ mov(load_name,
1992             isolate()->factory()->value_string());       // "value"
1993      __ mov(LoadDescriptor::SlotRegister(),
1994             Immediate(SmiFromSlot(expr->ValueFeedbackSlot())));
1995      CallLoadIC(NOT_INSIDE_TYPEOF);                      // result.value in eax
1996      context()->DropAndPlug(2, eax);                     // drop iter and g
1997      break;
1998    }
1999  }
2000}
2001
2002
2003void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2004    Expression *value,
2005    JSGeneratorObject::ResumeMode resume_mode) {
2006  // The value stays in eax, and is ultimately read by the resumed generator, as
2007  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2008  // is read to throw the value when the resumed generator is already closed.
2009  // ebx will hold the generator object until the activation has been resumed.
2010  VisitForStackValue(generator);
2011  VisitForAccumulatorValue(value);
2012  __ pop(ebx);
2013
2014  // Load suspended function and context.
2015  __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2016  __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2017
2018  // Push receiver.
2019  __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2020
2021  // Push holes for arguments to generator function.
2022  __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2023  __ mov(edx,
2024         FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2025  __ mov(ecx, isolate()->factory()->the_hole_value());
2026  Label push_argument_holes, push_frame;
2027  __ bind(&push_argument_holes);
2028  __ sub(edx, Immediate(Smi::FromInt(1)));
2029  __ j(carry, &push_frame);
2030  __ push(ecx);
2031  __ jmp(&push_argument_holes);
2032
2033  // Enter a new JavaScript frame, and initialize its slots as they were when
2034  // the generator was suspended.
2035  Label resume_frame, done;
2036  __ bind(&push_frame);
2037  __ call(&resume_frame);
2038  __ jmp(&done);
2039  __ bind(&resume_frame);
2040  __ push(ebp);  // Caller's frame pointer.
2041  __ mov(ebp, esp);
2042  __ push(esi);  // Callee's context.
2043  __ push(edi);  // Callee's JS Function.
2044
2045  // Load the operand stack size.
2046  __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2047  __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2048  __ SmiUntag(edx);
2049
2050  // If we are sending a value and there is no operand stack, we can jump back
2051  // in directly.
2052  if (resume_mode == JSGeneratorObject::NEXT) {
2053    Label slow_resume;
2054    __ cmp(edx, Immediate(0));
2055    __ j(not_zero, &slow_resume);
2056    __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2057    __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2058    __ SmiUntag(ecx);
2059    __ add(edx, ecx);
2060    __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2061           Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2062    __ jmp(edx);
2063    __ bind(&slow_resume);
2064  }
2065
2066  // Otherwise, we push holes for the operand stack and call the runtime to fix
2067  // up the stack and the handlers.
2068  Label push_operand_holes, call_resume;
2069  __ bind(&push_operand_holes);
2070  __ sub(edx, Immediate(1));
2071  __ j(carry, &call_resume);
2072  __ push(ecx);
2073  __ jmp(&push_operand_holes);
2074  __ bind(&call_resume);
2075  __ push(ebx);
2076  __ push(result_register());
2077  __ Push(Smi::FromInt(resume_mode));
2078  __ CallRuntime(Runtime::kResumeJSGeneratorObject);
2079  // Not reached: the runtime call returns elsewhere.
2080  __ Abort(kGeneratorFailedToResume);
2081
2082  __ bind(&done);
2083  context()->Plug(result_register());
2084}
2085
2086
2087void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2088  Label allocate, done_allocate;
2089
2090  __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate, TAG_OBJECT);
2091  __ jmp(&done_allocate, Label::kNear);
2092
2093  __ bind(&allocate);
2094  __ Push(Smi::FromInt(JSIteratorResult::kSize));
2095  __ CallRuntime(Runtime::kAllocateInNewSpace);
2096
2097  __ bind(&done_allocate);
2098  __ mov(ebx, NativeContextOperand());
2099  __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2100  __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2101  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2102         isolate()->factory()->empty_fixed_array());
2103  __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2104         isolate()->factory()->empty_fixed_array());
2105  __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
2106  __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
2107         isolate()->factory()->ToBoolean(done));
2108  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2109}
2110
2111
2112void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2113  SetExpressionPosition(prop);
2114  Literal* key = prop->key()->AsLiteral();
2115  DCHECK(!key->value()->IsSmi());
2116  DCHECK(!prop->IsSuperAccess());
2117
2118  __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2119  __ mov(LoadDescriptor::SlotRegister(),
2120         Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2121  CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2122}
2123
2124
2125void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2126  // Stack: receiver, home_object.
2127  SetExpressionPosition(prop);
2128  Literal* key = prop->key()->AsLiteral();
2129  DCHECK(!key->value()->IsSmi());
2130  DCHECK(prop->IsSuperAccess());
2131
2132  __ push(Immediate(key->value()));
2133  __ push(Immediate(Smi::FromInt(language_mode())));
2134  __ CallRuntime(Runtime::kLoadFromSuper);
2135}
2136
2137
2138void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2139  SetExpressionPosition(prop);
2140  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2141  __ mov(LoadDescriptor::SlotRegister(),
2142         Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2143  CallIC(ic);
2144}
2145
2146
2147void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2148  // Stack: receiver, home_object, key.
2149  SetExpressionPosition(prop);
2150  __ push(Immediate(Smi::FromInt(language_mode())));
2151  __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2152}
2153
2154
2155void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2156                                              Token::Value op,
2157                                              Expression* left,
2158                                              Expression* right) {
2159  // Do combined smi check of the operands. Left operand is on the
2160  // stack. Right operand is in eax.
2161  Label smi_case, done, stub_call;
2162  __ pop(edx);
2163  __ mov(ecx, eax);
2164  __ or_(eax, edx);
2165  JumpPatchSite patch_site(masm_);
2166  patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2167
2168  __ bind(&stub_call);
2169  __ mov(eax, ecx);
2170  Handle<Code> code =
2171      CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2172  CallIC(code, expr->BinaryOperationFeedbackId());
2173  patch_site.EmitPatchInfo();
2174  __ jmp(&done, Label::kNear);
2175
2176  // Smi case.
2177  __ bind(&smi_case);
2178  __ mov(eax, edx);  // Copy left operand in case of a stub call.
2179
2180  switch (op) {
2181    case Token::SAR:
2182      __ SmiUntag(ecx);
2183      __ sar_cl(eax);  // No checks of result necessary
2184      __ and_(eax, Immediate(~kSmiTagMask));
2185      break;
2186    case Token::SHL: {
2187      Label result_ok;
2188      __ SmiUntag(eax);
2189      __ SmiUntag(ecx);
2190      __ shl_cl(eax);
2191      // Check that the *signed* result fits in a smi.
2192      __ cmp(eax, 0xc0000000);
2193      __ j(positive, &result_ok);
2194      __ SmiTag(ecx);
2195      __ jmp(&stub_call);
2196      __ bind(&result_ok);
2197      __ SmiTag(eax);
2198      break;
2199    }
2200    case Token::SHR: {
2201      Label result_ok;
2202      __ SmiUntag(eax);
2203      __ SmiUntag(ecx);
2204      __ shr_cl(eax);
2205      __ test(eax, Immediate(0xc0000000));
2206      __ j(zero, &result_ok);
2207      __ SmiTag(ecx);
2208      __ jmp(&stub_call);
2209      __ bind(&result_ok);
2210      __ SmiTag(eax);
2211      break;
2212    }
2213    case Token::ADD:
2214      __ add(eax, ecx);
2215      __ j(overflow, &stub_call);
2216      break;
2217    case Token::SUB:
2218      __ sub(eax, ecx);
2219      __ j(overflow, &stub_call);
2220      break;
2221    case Token::MUL: {
2222      __ SmiUntag(eax);
2223      __ imul(eax, ecx);
2224      __ j(overflow, &stub_call);
2225      __ test(eax, eax);
2226      __ j(not_zero, &done, Label::kNear);
2227      __ mov(ebx, edx);
2228      __ or_(ebx, ecx);
2229      __ j(negative, &stub_call);
2230      break;
2231    }
2232    case Token::BIT_OR:
2233      __ or_(eax, ecx);
2234      break;
2235    case Token::BIT_AND:
2236      __ and_(eax, ecx);
2237      break;
2238    case Token::BIT_XOR:
2239      __ xor_(eax, ecx);
2240      break;
2241    default:
2242      UNREACHABLE();
2243  }
2244
2245  __ bind(&done);
2246  context()->Plug(eax);
2247}
2248
2249
2250void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2251  // Constructor is in eax.
2252  DCHECK(lit != NULL);
2253  __ push(eax);
2254
2255  // No access check is needed here since the constructor is created by the
2256  // class literal.
2257  Register scratch = ebx;
2258  __ mov(scratch, FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2259  __ Push(scratch);
2260
2261  for (int i = 0; i < lit->properties()->length(); i++) {
2262    ObjectLiteral::Property* property = lit->properties()->at(i);
2263    Expression* value = property->value();
2264
2265    if (property->is_static()) {
2266      __ push(Operand(esp, kPointerSize));  // constructor
2267    } else {
2268      __ push(Operand(esp, 0));  // prototype
2269    }
2270    EmitPropertyKey(property, lit->GetIdForProperty(i));
2271
2272    // The static prototype property is read only. We handle the non computed
2273    // property name case in the parser. Since this is the only case where we
2274    // need to check for an own read only property we special case this so we do
2275    // not need to do this for every property.
2276    if (property->is_static() && property->is_computed_name()) {
2277      __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2278      __ push(eax);
2279    }
2280
2281    VisitForStackValue(value);
2282    if (NeedsHomeObject(value)) {
2283      EmitSetHomeObject(value, 2, property->GetSlot());
2284    }
2285
2286    switch (property->kind()) {
2287      case ObjectLiteral::Property::CONSTANT:
2288      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2289      case ObjectLiteral::Property::PROTOTYPE:
2290        UNREACHABLE();
2291      case ObjectLiteral::Property::COMPUTED:
2292        __ CallRuntime(Runtime::kDefineClassMethod);
2293        break;
2294
2295      case ObjectLiteral::Property::GETTER:
2296        __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2297        __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
2298        break;
2299
2300      case ObjectLiteral::Property::SETTER:
2301        __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2302        __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
2303        break;
2304    }
2305  }
2306
2307  // Set both the prototype and constructor to have fast properties, and also
2308  // freeze them in strong mode.
2309  __ CallRuntime(Runtime::kFinalizeClassDefinition);
2310}
2311
2312
2313void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2314  __ pop(edx);
2315  Handle<Code> code =
2316      CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2317  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2318  CallIC(code, expr->BinaryOperationFeedbackId());
2319  patch_site.EmitPatchInfo();
2320  context()->Plug(eax);
2321}
2322
2323
2324void FullCodeGenerator::EmitAssignment(Expression* expr,
2325                                       FeedbackVectorSlot slot) {
2326  DCHECK(expr->IsValidReferenceExpressionOrThis());
2327
2328  Property* prop = expr->AsProperty();
2329  LhsKind assign_type = Property::GetAssignType(prop);
2330
2331  switch (assign_type) {
2332    case VARIABLE: {
2333      Variable* var = expr->AsVariableProxy()->var();
2334      EffectContext context(this);
2335      EmitVariableAssignment(var, Token::ASSIGN, slot);
2336      break;
2337    }
2338    case NAMED_PROPERTY: {
2339      __ push(eax);  // Preserve value.
2340      VisitForAccumulatorValue(prop->obj());
2341      __ Move(StoreDescriptor::ReceiverRegister(), eax);
2342      __ pop(StoreDescriptor::ValueRegister());  // Restore value.
2343      __ mov(StoreDescriptor::NameRegister(),
2344             prop->key()->AsLiteral()->value());
2345      EmitLoadStoreICSlot(slot);
2346      CallStoreIC();
2347      break;
2348    }
2349    case NAMED_SUPER_PROPERTY: {
2350      __ push(eax);
2351      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2352      VisitForAccumulatorValue(
2353          prop->obj()->AsSuperPropertyReference()->home_object());
2354      // stack: value, this; eax: home_object
2355      Register scratch = ecx;
2356      Register scratch2 = edx;
2357      __ mov(scratch, result_register());               // home_object
2358      __ mov(eax, MemOperand(esp, kPointerSize));       // value
2359      __ mov(scratch2, MemOperand(esp, 0));             // this
2360      __ mov(MemOperand(esp, kPointerSize), scratch2);  // this
2361      __ mov(MemOperand(esp, 0), scratch);              // home_object
2362      // stack: this, home_object. eax: value
2363      EmitNamedSuperPropertyStore(prop);
2364      break;
2365    }
2366    case KEYED_SUPER_PROPERTY: {
2367      __ push(eax);
2368      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2369      VisitForStackValue(
2370          prop->obj()->AsSuperPropertyReference()->home_object());
2371      VisitForAccumulatorValue(prop->key());
2372      Register scratch = ecx;
2373      Register scratch2 = edx;
2374      __ mov(scratch2, MemOperand(esp, 2 * kPointerSize));  // value
2375      // stack: value, this, home_object; eax: key, edx: value
2376      __ mov(scratch, MemOperand(esp, kPointerSize));  // this
2377      __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2378      __ mov(scratch, MemOperand(esp, 0));  // home_object
2379      __ mov(MemOperand(esp, kPointerSize), scratch);
2380      __ mov(MemOperand(esp, 0), eax);
2381      __ mov(eax, scratch2);
2382      // stack: this, home_object, key; eax: value.
2383      EmitKeyedSuperPropertyStore(prop);
2384      break;
2385    }
2386    case KEYED_PROPERTY: {
2387      __ push(eax);  // Preserve value.
2388      VisitForStackValue(prop->obj());
2389      VisitForAccumulatorValue(prop->key());
2390      __ Move(StoreDescriptor::NameRegister(), eax);
2391      __ pop(StoreDescriptor::ReceiverRegister());  // Receiver.
2392      __ pop(StoreDescriptor::ValueRegister());     // Restore value.
2393      EmitLoadStoreICSlot(slot);
2394      Handle<Code> ic =
2395          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2396      CallIC(ic);
2397      break;
2398    }
2399  }
2400  context()->Plug(eax);
2401}
2402
2403
2404void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2405    Variable* var, MemOperand location) {
2406  __ mov(location, eax);
2407  if (var->IsContextSlot()) {
2408    __ mov(edx, eax);
2409    int offset = Context::SlotOffset(var->index());
2410    __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2411  }
2412}
2413
2414
2415void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2416                                               FeedbackVectorSlot slot) {
2417  if (var->IsUnallocated()) {
2418    // Global var, const, or let.
2419    __ mov(StoreDescriptor::NameRegister(), var->name());
2420    __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2421    __ mov(StoreDescriptor::ReceiverRegister(),
2422           ContextOperand(StoreDescriptor::ReceiverRegister(),
2423                          Context::EXTENSION_INDEX));
2424    EmitLoadStoreICSlot(slot);
2425    CallStoreIC();
2426
2427  } else if (var->mode() == LET && op != Token::INIT) {
2428    // Non-initializing assignment to let variable needs a write barrier.
2429    DCHECK(!var->IsLookupSlot());
2430    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2431    Label assign;
2432    MemOperand location = VarOperand(var, ecx);
2433    __ mov(edx, location);
2434    __ cmp(edx, isolate()->factory()->the_hole_value());
2435    __ j(not_equal, &assign, Label::kNear);
2436    __ push(Immediate(var->name()));
2437    __ CallRuntime(Runtime::kThrowReferenceError);
2438    __ bind(&assign);
2439    EmitStoreToStackLocalOrContextSlot(var, location);
2440
2441  } else if (var->mode() == CONST && op != Token::INIT) {
2442    // Assignment to const variable needs a write barrier.
2443    DCHECK(!var->IsLookupSlot());
2444    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2445    Label const_error;
2446    MemOperand location = VarOperand(var, ecx);
2447    __ mov(edx, location);
2448    __ cmp(edx, isolate()->factory()->the_hole_value());
2449    __ j(not_equal, &const_error, Label::kNear);
2450    __ push(Immediate(var->name()));
2451    __ CallRuntime(Runtime::kThrowReferenceError);
2452    __ bind(&const_error);
2453    __ CallRuntime(Runtime::kThrowConstAssignError);
2454
2455  } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2456    // Initializing assignment to const {this} needs a write barrier.
2457    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2458    Label uninitialized_this;
2459    MemOperand location = VarOperand(var, ecx);
2460    __ mov(edx, location);
2461    __ cmp(edx, isolate()->factory()->the_hole_value());
2462    __ j(equal, &uninitialized_this);
2463    __ push(Immediate(var->name()));
2464    __ CallRuntime(Runtime::kThrowReferenceError);
2465    __ bind(&uninitialized_this);
2466    EmitStoreToStackLocalOrContextSlot(var, location);
2467
2468  } else if (!var->is_const_mode() ||
2469             (var->mode() == CONST && op == Token::INIT)) {
2470    if (var->IsLookupSlot()) {
2471      // Assignment to var.
2472      __ push(eax);  // Value.
2473      __ push(esi);  // Context.
2474      __ push(Immediate(var->name()));
2475      __ push(Immediate(Smi::FromInt(language_mode())));
2476      __ CallRuntime(Runtime::kStoreLookupSlot);
2477    } else {
2478      // Assignment to var or initializing assignment to let/const in harmony
2479      // mode.
2480      DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2481      MemOperand location = VarOperand(var, ecx);
2482      if (generate_debug_code_ && var->mode() == LET && op == Token::INIT) {
2483        // Check for an uninitialized let binding.
2484        __ mov(edx, location);
2485        __ cmp(edx, isolate()->factory()->the_hole_value());
2486        __ Check(equal, kLetBindingReInitialization);
2487      }
2488      EmitStoreToStackLocalOrContextSlot(var, location);
2489    }
2490
2491  } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2492    // Const initializers need a write barrier.
2493    DCHECK(!var->IsParameter());  // No const parameters.
2494    if (var->IsLookupSlot()) {
2495      __ push(eax);
2496      __ push(esi);
2497      __ push(Immediate(var->name()));
2498      __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2499    } else {
2500      DCHECK(var->IsStackLocal() || var->IsContextSlot());
2501      Label skip;
2502      MemOperand location = VarOperand(var, ecx);
2503      __ mov(edx, location);
2504      __ cmp(edx, isolate()->factory()->the_hole_value());
2505      __ j(not_equal, &skip, Label::kNear);
2506      EmitStoreToStackLocalOrContextSlot(var, location);
2507      __ bind(&skip);
2508    }
2509
2510  } else {
2511    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2512    if (is_strict(language_mode())) {
2513      __ CallRuntime(Runtime::kThrowConstAssignError);
2514    }
2515    // Silently ignore store in sloppy mode.
2516  }
2517}
2518
2519
2520void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2521  // Assignment to a property, using a named store IC.
2522  // eax    : value
2523  // esp[0] : receiver
2524  Property* prop = expr->target()->AsProperty();
2525  DCHECK(prop != NULL);
2526  DCHECK(prop->key()->IsLiteral());
2527
2528  __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2529  __ pop(StoreDescriptor::ReceiverRegister());
2530  EmitLoadStoreICSlot(expr->AssignmentSlot());
2531  CallStoreIC();
2532  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2533  context()->Plug(eax);
2534}
2535
2536
2537void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2538  // Assignment to named property of super.
2539  // eax : value
2540  // stack : receiver ('this'), home_object
2541  DCHECK(prop != NULL);
2542  Literal* key = prop->key()->AsLiteral();
2543  DCHECK(key != NULL);
2544
2545  __ push(Immediate(key->value()));
2546  __ push(eax);
2547  __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2548                                             : Runtime::kStoreToSuper_Sloppy));
2549}
2550
2551
2552void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2553  // Assignment to named property of super.
2554  // eax : value
2555  // stack : receiver ('this'), home_object, key
2556
2557  __ push(eax);
2558  __ CallRuntime((is_strict(language_mode())
2559                      ? Runtime::kStoreKeyedToSuper_Strict
2560                      : Runtime::kStoreKeyedToSuper_Sloppy));
2561}
2562
2563
2564void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2565  // Assignment to a property, using a keyed store IC.
2566  // eax               : value
2567  // esp[0]            : key
2568  // esp[kPointerSize] : receiver
2569
2570  __ pop(StoreDescriptor::NameRegister());  // Key.
2571  __ pop(StoreDescriptor::ReceiverRegister());
2572  DCHECK(StoreDescriptor::ValueRegister().is(eax));
2573  Handle<Code> ic =
2574      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2575  EmitLoadStoreICSlot(expr->AssignmentSlot());
2576  CallIC(ic);
2577  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2578  context()->Plug(eax);
2579}
2580
2581
2582void FullCodeGenerator::VisitProperty(Property* expr) {
2583  Comment cmnt(masm_, "[ Property");
2584  SetExpressionPosition(expr);
2585
2586  Expression* key = expr->key();
2587
2588  if (key->IsPropertyName()) {
2589    if (!expr->IsSuperAccess()) {
2590      VisitForAccumulatorValue(expr->obj());
2591      __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2592      EmitNamedPropertyLoad(expr);
2593    } else {
2594      VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2595      VisitForStackValue(
2596          expr->obj()->AsSuperPropertyReference()->home_object());
2597      EmitNamedSuperPropertyLoad(expr);
2598    }
2599  } else {
2600    if (!expr->IsSuperAccess()) {
2601      VisitForStackValue(expr->obj());
2602      VisitForAccumulatorValue(expr->key());
2603      __ pop(LoadDescriptor::ReceiverRegister());                  // Object.
2604      __ Move(LoadDescriptor::NameRegister(), result_register());  // Key.
2605      EmitKeyedPropertyLoad(expr);
2606    } else {
2607      VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2608      VisitForStackValue(
2609          expr->obj()->AsSuperPropertyReference()->home_object());
2610      VisitForStackValue(expr->key());
2611      EmitKeyedSuperPropertyLoad(expr);
2612    }
2613  }
2614  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2615  context()->Plug(eax);
2616}
2617
2618
2619void FullCodeGenerator::CallIC(Handle<Code> code,
2620                               TypeFeedbackId ast_id) {
2621  ic_total_count_++;
2622  __ call(code, RelocInfo::CODE_TARGET, ast_id);
2623}
2624
2625
2626// Code common for calls using the IC.
2627void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2628  Expression* callee = expr->expression();
2629
2630  // Get the target function.
2631  ConvertReceiverMode convert_mode;
2632  if (callee->IsVariableProxy()) {
2633    { StackValueContext context(this);
2634      EmitVariableLoad(callee->AsVariableProxy());
2635      PrepareForBailout(callee, NO_REGISTERS);
2636    }
2637    // Push undefined as receiver. This is patched in the method prologue if it
2638    // is a sloppy mode method.
2639    __ push(Immediate(isolate()->factory()->undefined_value()));
2640    convert_mode = ConvertReceiverMode::kNullOrUndefined;
2641  } else {
2642    // Load the function from the receiver.
2643    DCHECK(callee->IsProperty());
2644    DCHECK(!callee->AsProperty()->IsSuperAccess());
2645    __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2646    EmitNamedPropertyLoad(callee->AsProperty());
2647    PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2648    // Push the target function under the receiver.
2649    __ push(Operand(esp, 0));
2650    __ mov(Operand(esp, kPointerSize), eax);
2651    convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2652  }
2653
2654  EmitCall(expr, convert_mode);
2655}
2656
2657
2658void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2659  SetExpressionPosition(expr);
2660  Expression* callee = expr->expression();
2661  DCHECK(callee->IsProperty());
2662  Property* prop = callee->AsProperty();
2663  DCHECK(prop->IsSuperAccess());
2664
2665  Literal* key = prop->key()->AsLiteral();
2666  DCHECK(!key->value()->IsSmi());
2667  // Load the function from the receiver.
2668  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2669  VisitForStackValue(super_ref->home_object());
2670  VisitForAccumulatorValue(super_ref->this_var());
2671  __ push(eax);
2672  __ push(eax);
2673  __ push(Operand(esp, kPointerSize * 2));
2674  __ push(Immediate(key->value()));
2675  __ push(Immediate(Smi::FromInt(language_mode())));
2676  // Stack here:
2677  //  - home_object
2678  //  - this (receiver)
2679  //  - this (receiver) <-- LoadFromSuper will pop here and below.
2680  //  - home_object
2681  //  - key
2682  //  - language_mode
2683  __ CallRuntime(Runtime::kLoadFromSuper);
2684
2685  // Replace home_object with target function.
2686  __ mov(Operand(esp, kPointerSize), eax);
2687
2688  // Stack here:
2689  // - target function
2690  // - this (receiver)
2691  EmitCall(expr);
2692}
2693
2694
2695// Code common for calls using the IC.
2696void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2697                                                Expression* key) {
2698  // Load the key.
2699  VisitForAccumulatorValue(key);
2700
2701  Expression* callee = expr->expression();
2702
2703  // Load the function from the receiver.
2704  DCHECK(callee->IsProperty());
2705  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2706  __ mov(LoadDescriptor::NameRegister(), eax);
2707  EmitKeyedPropertyLoad(callee->AsProperty());
2708  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2709
2710  // Push the target function under the receiver.
2711  __ push(Operand(esp, 0));
2712  __ mov(Operand(esp, kPointerSize), eax);
2713
2714  EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2715}
2716
2717
2718void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2719  Expression* callee = expr->expression();
2720  DCHECK(callee->IsProperty());
2721  Property* prop = callee->AsProperty();
2722  DCHECK(prop->IsSuperAccess());
2723
2724  SetExpressionPosition(prop);
2725  // Load the function from the receiver.
2726  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2727  VisitForStackValue(super_ref->home_object());
2728  VisitForAccumulatorValue(super_ref->this_var());
2729  __ push(eax);
2730  __ push(eax);
2731  __ push(Operand(esp, kPointerSize * 2));
2732  VisitForStackValue(prop->key());
2733  __ push(Immediate(Smi::FromInt(language_mode())));
2734  // Stack here:
2735  //  - home_object
2736  //  - this (receiver)
2737  //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2738  //  - home_object
2739  //  - key
2740  //  - language_mode
2741  __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2742
2743  // Replace home_object with target function.
2744  __ mov(Operand(esp, kPointerSize), eax);
2745
2746  // Stack here:
2747  // - target function
2748  // - this (receiver)
2749  EmitCall(expr);
2750}
2751
2752
2753void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2754  // Load the arguments.
2755  ZoneList<Expression*>* args = expr->arguments();
2756  int arg_count = args->length();
2757  for (int i = 0; i < arg_count; i++) {
2758    VisitForStackValue(args->at(i));
2759  }
2760
2761  PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2762  SetCallPosition(expr);
2763  Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, mode).code();
2764  __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2765  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2766  // Don't assign a type feedback id to the IC, since type feedback is provided
2767  // by the vector above.
2768  CallIC(ic);
2769
2770  RecordJSReturnSite(expr);
2771
2772  // Restore context register.
2773  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2774
2775  context()->DropAndPlug(1, eax);
2776}
2777
2778
2779void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2780  // Push copy of the first argument or undefined if it doesn't exist.
2781  if (arg_count > 0) {
2782    __ push(Operand(esp, arg_count * kPointerSize));
2783  } else {
2784    __ push(Immediate(isolate()->factory()->undefined_value()));
2785  }
2786
2787  // Push the enclosing function.
2788  __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2789
2790  // Push the language mode.
2791  __ push(Immediate(Smi::FromInt(language_mode())));
2792
2793  // Push the start position of the scope the calls resides in.
2794  __ push(Immediate(Smi::FromInt(scope()->start_position())));
2795
2796  // Do the runtime call.
2797  __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2798}
2799
2800
2801// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2802void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2803  VariableProxy* callee = expr->expression()->AsVariableProxy();
2804  if (callee->var()->IsLookupSlot()) {
2805    Label slow, done;
2806    SetExpressionPosition(callee);
2807    // Generate code for loading from variables potentially shadowed by
2808    // eval-introduced variables.
2809    EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2810
2811    __ bind(&slow);
2812    // Call the runtime to find the function to call (returned in eax) and
2813    // the object holding it (returned in edx).
2814    __ push(context_register());
2815    __ push(Immediate(callee->name()));
2816    __ CallRuntime(Runtime::kLoadLookupSlot);
2817    __ push(eax);  // Function.
2818    __ push(edx);  // Receiver.
2819    PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2820
2821    // If fast case code has been generated, emit code to push the function
2822    // and receiver and have the slow path jump around this code.
2823    if (done.is_linked()) {
2824      Label call;
2825      __ jmp(&call, Label::kNear);
2826      __ bind(&done);
2827      // Push function.
2828      __ push(eax);
2829      // The receiver is implicitly the global receiver. Indicate this by
2830      // passing the hole to the call function stub.
2831      __ push(Immediate(isolate()->factory()->undefined_value()));
2832      __ bind(&call);
2833    }
2834  } else {
2835    VisitForStackValue(callee);
2836    // refEnv.WithBaseObject()
2837    __ push(Immediate(isolate()->factory()->undefined_value()));
2838  }
2839}
2840
2841
2842void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2843  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2844  // to resolve the function we need to call.  Then we call the resolved
2845  // function using the given arguments.
2846  ZoneList<Expression*>* args = expr->arguments();
2847  int arg_count = args->length();
2848
2849  PushCalleeAndWithBaseObject(expr);
2850
2851  // Push the arguments.
2852  for (int i = 0; i < arg_count; i++) {
2853    VisitForStackValue(args->at(i));
2854  }
2855
2856  // Push a copy of the function (found below the arguments) and
2857  // resolve eval.
2858  __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2859  EmitResolvePossiblyDirectEval(arg_count);
2860
2861  // Touch up the stack with the resolved function.
2862  __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2863
2864  PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2865
2866  SetCallPosition(expr);
2867  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2868  __ Set(eax, arg_count);
2869  __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2870  RecordJSReturnSite(expr);
2871  // Restore context register.
2872  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2873  context()->DropAndPlug(1, eax);
2874}
2875
2876
2877void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2878  Comment cmnt(masm_, "[ CallNew");
2879  // According to ECMA-262, section 11.2.2, page 44, the function
2880  // expression in new calls must be evaluated before the
2881  // arguments.
2882
2883  // Push constructor on the stack.  If it's not a function it's used as
2884  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2885  // ignored.
2886  DCHECK(!expr->expression()->IsSuperPropertyReference());
2887  VisitForStackValue(expr->expression());
2888
2889  // Push the arguments ("left-to-right") on the stack.
2890  ZoneList<Expression*>* args = expr->arguments();
2891  int arg_count = args->length();
2892  for (int i = 0; i < arg_count; i++) {
2893    VisitForStackValue(args->at(i));
2894  }
2895
2896  // Call the construct call builtin that handles allocation and
2897  // constructor invocation.
2898  SetConstructCallPosition(expr);
2899
2900  // Load function and argument count into edi and eax.
2901  __ Move(eax, Immediate(arg_count));
2902  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2903
2904  // Record call targets in unoptimized code.
2905  __ EmitLoadTypeFeedbackVector(ebx);
2906  __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2907
2908  CallConstructStub stub(isolate());
2909  __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
2910  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2911  // Restore context register.
2912  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2913  context()->Plug(eax);
2914}
2915
2916
2917void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2918  SuperCallReference* super_call_ref =
2919      expr->expression()->AsSuperCallReference();
2920  DCHECK_NOT_NULL(super_call_ref);
2921
2922  // Push the super constructor target on the stack (may be null,
2923  // but the Construct builtin can deal with that properly).
2924  VisitForAccumulatorValue(super_call_ref->this_function_var());
2925  __ AssertFunction(result_register());
2926  __ mov(result_register(),
2927         FieldOperand(result_register(), HeapObject::kMapOffset));
2928  __ Push(FieldOperand(result_register(), Map::kPrototypeOffset));
2929
2930  // Push the arguments ("left-to-right") on the stack.
2931  ZoneList<Expression*>* args = expr->arguments();
2932  int arg_count = args->length();
2933  for (int i = 0; i < arg_count; i++) {
2934    VisitForStackValue(args->at(i));
2935  }
2936
2937  // Call the construct call builtin that handles allocation and
2938  // constructor invocation.
2939  SetConstructCallPosition(expr);
2940
2941  // Load new target into edx.
2942  VisitForAccumulatorValue(super_call_ref->new_target_var());
2943  __ mov(edx, result_register());
2944
2945  // Load function and argument count into edi and eax.
2946  __ Move(eax, Immediate(arg_count));
2947  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2948
2949  __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2950
2951  RecordJSReturnSite(expr);
2952
2953  // Restore context register.
2954  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2955  context()->Plug(eax);
2956}
2957
2958
2959void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2960  ZoneList<Expression*>* args = expr->arguments();
2961  DCHECK(args->length() == 1);
2962
2963  VisitForAccumulatorValue(args->at(0));
2964
2965  Label materialize_true, materialize_false;
2966  Label* if_true = NULL;
2967  Label* if_false = NULL;
2968  Label* fall_through = NULL;
2969  context()->PrepareTest(&materialize_true, &materialize_false,
2970                         &if_true, &if_false, &fall_through);
2971
2972  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2973  __ test(eax, Immediate(kSmiTagMask));
2974  Split(zero, if_true, if_false, fall_through);
2975
2976  context()->Plug(if_true, if_false);
2977}
2978
2979
2980void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2981  ZoneList<Expression*>* args = expr->arguments();
2982  DCHECK(args->length() == 1);
2983
2984  VisitForAccumulatorValue(args->at(0));
2985
2986  Label materialize_true, materialize_false;
2987  Label* if_true = NULL;
2988  Label* if_false = NULL;
2989  Label* fall_through = NULL;
2990  context()->PrepareTest(&materialize_true, &materialize_false,
2991                         &if_true, &if_false, &fall_through);
2992
2993  __ JumpIfSmi(eax, if_false);
2994  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2995  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2996  Split(above_equal, if_true, if_false, fall_through);
2997
2998  context()->Plug(if_true, if_false);
2999}
3000
3001
3002void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3003  ZoneList<Expression*>* args = expr->arguments();
3004  DCHECK(args->length() == 1);
3005
3006  VisitForAccumulatorValue(args->at(0));
3007
3008  Label materialize_true, materialize_false;
3009  Label* if_true = NULL;
3010  Label* if_false = NULL;
3011  Label* fall_through = NULL;
3012  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3013                         &if_false, &fall_through);
3014
3015  __ JumpIfSmi(eax, if_false);
3016  __ CmpObjectType(eax, SIMD128_VALUE_TYPE, ebx);
3017  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3018  Split(equal, if_true, if_false, fall_through);
3019
3020  context()->Plug(if_true, if_false);
3021}
3022
3023
3024void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3025  ZoneList<Expression*>* args = expr->arguments();
3026  DCHECK(args->length() == 1);
3027
3028  VisitForAccumulatorValue(args->at(0));
3029
3030  Label materialize_true, materialize_false;
3031  Label* if_true = NULL;
3032  Label* if_false = NULL;
3033  Label* fall_through = NULL;
3034  context()->PrepareTest(&materialize_true, &materialize_false,
3035                         &if_true, &if_false, &fall_through);
3036
3037  __ JumpIfSmi(eax, if_false);
3038  __ CmpObjectType(eax, FIRST_FUNCTION_TYPE, ebx);
3039  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3040  Split(above_equal, if_true, if_false, fall_through);
3041
3042  context()->Plug(if_true, if_false);
3043}
3044
3045
3046void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3047  ZoneList<Expression*>* args = expr->arguments();
3048  DCHECK(args->length() == 1);
3049
3050  VisitForAccumulatorValue(args->at(0));
3051
3052  Label materialize_true, materialize_false;
3053  Label* if_true = NULL;
3054  Label* if_false = NULL;
3055  Label* fall_through = NULL;
3056  context()->PrepareTest(&materialize_true, &materialize_false,
3057                         &if_true, &if_false, &fall_through);
3058
3059  Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3060  __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3061  // Check if the exponent half is 0x80000000. Comparing against 1 and
3062  // checking for overflow is the shortest possible encoding.
3063  __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3064  __ j(no_overflow, if_false);
3065  __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3066  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3067  Split(equal, if_true, if_false, fall_through);
3068
3069  context()->Plug(if_true, if_false);
3070}
3071
3072
3073void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3074  ZoneList<Expression*>* args = expr->arguments();
3075  DCHECK(args->length() == 1);
3076
3077  VisitForAccumulatorValue(args->at(0));
3078
3079  Label materialize_true, materialize_false;
3080  Label* if_true = NULL;
3081  Label* if_false = NULL;
3082  Label* fall_through = NULL;
3083  context()->PrepareTest(&materialize_true, &materialize_false,
3084                         &if_true, &if_false, &fall_through);
3085
3086  __ JumpIfSmi(eax, if_false);
3087  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3088  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3089  Split(equal, if_true, if_false, fall_through);
3090
3091  context()->Plug(if_true, if_false);
3092}
3093
3094
3095void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3096  ZoneList<Expression*>* args = expr->arguments();
3097  DCHECK(args->length() == 1);
3098
3099  VisitForAccumulatorValue(args->at(0));
3100
3101  Label materialize_true, materialize_false;
3102  Label* if_true = NULL;
3103  Label* if_false = NULL;
3104  Label* fall_through = NULL;
3105  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3106                         &if_false, &fall_through);
3107
3108  __ JumpIfSmi(eax, if_false);
3109  __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
3110  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3111  Split(equal, if_true, if_false, fall_through);
3112
3113  context()->Plug(if_true, if_false);
3114}
3115
3116
3117void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3118  ZoneList<Expression*>* args = expr->arguments();
3119  DCHECK(args->length() == 1);
3120
3121  VisitForAccumulatorValue(args->at(0));
3122
3123  Label materialize_true, materialize_false;
3124  Label* if_true = NULL;
3125  Label* if_false = NULL;
3126  Label* fall_through = NULL;
3127  context()->PrepareTest(&materialize_true, &materialize_false,
3128                         &if_true, &if_false, &fall_through);
3129
3130  __ JumpIfSmi(eax, if_false);
3131  __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3132  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3133  Split(equal, if_true, if_false, fall_through);
3134
3135  context()->Plug(if_true, if_false);
3136}
3137
3138
3139void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3140  ZoneList<Expression*>* args = expr->arguments();
3141  DCHECK(args->length() == 1);
3142
3143  VisitForAccumulatorValue(args->at(0));
3144
3145  Label materialize_true, materialize_false;
3146  Label* if_true = NULL;
3147  Label* if_false = NULL;
3148  Label* fall_through = NULL;
3149  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3150                         &if_false, &fall_through);
3151
3152  __ JumpIfSmi(eax, if_false);
3153  __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
3154  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3155  Split(equal, if_true, if_false, fall_through);
3156
3157  context()->Plug(if_true, if_false);
3158}
3159
3160
3161void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3162  ZoneList<Expression*>* args = expr->arguments();
3163  DCHECK(args->length() == 2);
3164
3165  // Load the two objects into registers and perform the comparison.
3166  VisitForStackValue(args->at(0));
3167  VisitForAccumulatorValue(args->at(1));
3168
3169  Label materialize_true, materialize_false;
3170  Label* if_true = NULL;
3171  Label* if_false = NULL;
3172  Label* fall_through = NULL;
3173  context()->PrepareTest(&materialize_true, &materialize_false,
3174                         &if_true, &if_false, &fall_through);
3175
3176  __ pop(ebx);
3177  __ cmp(eax, ebx);
3178  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3179  Split(equal, if_true, if_false, fall_through);
3180
3181  context()->Plug(if_true, if_false);
3182}
3183
3184
3185void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3186  ZoneList<Expression*>* args = expr->arguments();
3187  DCHECK(args->length() == 1);
3188
3189  // ArgumentsAccessStub expects the key in edx and the formal
3190  // parameter count in eax.
3191  VisitForAccumulatorValue(args->at(0));
3192  __ mov(edx, eax);
3193  __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3194  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3195  __ CallStub(&stub);
3196  context()->Plug(eax);
3197}
3198
3199
3200void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3201  DCHECK(expr->arguments()->length() == 0);
3202
3203  Label exit;
3204  // Get the number of formal parameters.
3205  __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3206
3207  // Check if the calling frame is an arguments adaptor frame.
3208  __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3209  __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3210         Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3211  __ j(not_equal, &exit);
3212
3213  // Arguments adaptor case: Read the arguments length from the
3214  // adaptor frame.
3215  __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3216
3217  __ bind(&exit);
3218  __ AssertSmi(eax);
3219  context()->Plug(eax);
3220}
3221
3222
3223void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3224  ZoneList<Expression*>* args = expr->arguments();
3225  DCHECK(args->length() == 1);
3226  Label done, null, function, non_function_constructor;
3227
3228  VisitForAccumulatorValue(args->at(0));
3229
3230  // If the object is not a JSReceiver, we return null.
3231  __ JumpIfSmi(eax, &null, Label::kNear);
3232  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3233  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
3234  __ j(below, &null, Label::kNear);
3235
3236  // Return 'Function' for JSFunction objects.
3237  __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
3238  __ j(equal, &function, Label::kNear);
3239
3240  // Check if the constructor in the map is a JS function.
3241  __ GetMapConstructor(eax, eax, ebx);
3242  __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
3243  __ j(not_equal, &non_function_constructor, Label::kNear);
3244
3245  // eax now contains the constructor function. Grab the
3246  // instance class name from there.
3247  __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3248  __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3249  __ jmp(&done, Label::kNear);
3250
3251  // Non-JS objects have class null.
3252  __ bind(&null);
3253  __ mov(eax, isolate()->factory()->null_value());
3254  __ jmp(&done, Label::kNear);
3255
3256  // Functions have class 'Function'.
3257  __ bind(&function);
3258  __ mov(eax, isolate()->factory()->Function_string());
3259  __ jmp(&done, Label::kNear);
3260
3261  // Objects with a non-function constructor have class 'Object'.
3262  __ bind(&non_function_constructor);
3263  __ mov(eax, isolate()->factory()->Object_string());
3264
3265  // All done.
3266  __ bind(&done);
3267
3268  context()->Plug(eax);
3269}
3270
3271
3272void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3273  ZoneList<Expression*>* args = expr->arguments();
3274  DCHECK(args->length() == 1);
3275
3276  VisitForAccumulatorValue(args->at(0));  // Load the object.
3277
3278  Label done;
3279  // If the object is a smi return the object.
3280  __ JumpIfSmi(eax, &done, Label::kNear);
3281  // If the object is not a value type, return the object.
3282  __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3283  __ j(not_equal, &done, Label::kNear);
3284  __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3285
3286  __ bind(&done);
3287  context()->Plug(eax);
3288}
3289
3290
3291void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3292  ZoneList<Expression*>* args = expr->arguments();
3293  DCHECK_EQ(1, args->length());
3294
3295  VisitForAccumulatorValue(args->at(0));
3296
3297  Label materialize_true, materialize_false;
3298  Label* if_true = nullptr;
3299  Label* if_false = nullptr;
3300  Label* fall_through = nullptr;
3301  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3302                         &if_false, &fall_through);
3303
3304  __ JumpIfSmi(eax, if_false);
3305  __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
3306  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3307  Split(equal, if_true, if_false, fall_through);
3308
3309  context()->Plug(if_true, if_false);
3310}
3311
3312
3313void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3314  ZoneList<Expression*>* args = expr->arguments();
3315  DCHECK_EQ(3, args->length());
3316
3317  Register string = eax;
3318  Register index = ebx;
3319  Register value = ecx;
3320
3321  VisitForStackValue(args->at(0));        // index
3322  VisitForStackValue(args->at(1));        // value
3323  VisitForAccumulatorValue(args->at(2));  // string
3324
3325  __ pop(value);
3326  __ pop(index);
3327
3328  if (FLAG_debug_code) {
3329    __ test(value, Immediate(kSmiTagMask));
3330    __ Check(zero, kNonSmiValue);
3331    __ test(index, Immediate(kSmiTagMask));
3332    __ Check(zero, kNonSmiValue);
3333  }
3334
3335  __ SmiUntag(value);
3336  __ SmiUntag(index);
3337
3338  if (FLAG_debug_code) {
3339    static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3340    __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3341  }
3342
3343  __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3344           value);
3345  context()->Plug(string);
3346}
3347
3348
3349void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3350  ZoneList<Expression*>* args = expr->arguments();
3351  DCHECK_EQ(3, args->length());
3352
3353  Register string = eax;
3354  Register index = ebx;
3355  Register value = ecx;
3356
3357  VisitForStackValue(args->at(0));        // index
3358  VisitForStackValue(args->at(1));        // value
3359  VisitForAccumulatorValue(args->at(2));  // string
3360  __ pop(value);
3361  __ pop(index);
3362
3363  if (FLAG_debug_code) {
3364    __ test(value, Immediate(kSmiTagMask));
3365    __ Check(zero, kNonSmiValue);
3366    __ test(index, Immediate(kSmiTagMask));
3367    __ Check(zero, kNonSmiValue);
3368    __ SmiUntag(index);
3369    static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3370    __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3371    __ SmiTag(index);
3372  }
3373
3374  __ SmiUntag(value);
3375  // No need to untag a smi for two-byte addressing.
3376  __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3377           value);
3378  context()->Plug(string);
3379}
3380
3381
3382void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3383  ZoneList<Expression*>* args = expr->arguments();
3384  DCHECK(args->length() == 2);
3385
3386  VisitForStackValue(args->at(0));  // Load the object.
3387  VisitForAccumulatorValue(args->at(1));  // Load the value.
3388  __ pop(ebx);  // eax = value. ebx = object.
3389
3390  Label done;
3391  // If the object is a smi, return the value.
3392  __ JumpIfSmi(ebx, &done, Label::kNear);
3393
3394  // If the object is not a value type, return the value.
3395  __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3396  __ j(not_equal, &done, Label::kNear);
3397
3398  // Store the value.
3399  __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3400
3401  // Update the write barrier.  Save the value as it will be
3402  // overwritten by the write barrier code and is needed afterward.
3403  __ mov(edx, eax);
3404  __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3405
3406  __ bind(&done);
3407  context()->Plug(eax);
3408}
3409
3410
3411void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3412  ZoneList<Expression*>* args = expr->arguments();
3413  DCHECK_EQ(1, args->length());
3414
3415  // Load the argument into eax and convert it.
3416  VisitForAccumulatorValue(args->at(0));
3417
3418  // Convert the object to an integer.
3419  Label done_convert;
3420  __ JumpIfSmi(eax, &done_convert, Label::kNear);
3421  __ Push(eax);
3422  __ CallRuntime(Runtime::kToInteger);
3423  __ bind(&done_convert);
3424  context()->Plug(eax);
3425}
3426
3427
3428void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3429  ZoneList<Expression*>* args = expr->arguments();
3430  DCHECK_EQ(1, args->length());
3431
3432  // Load the argument into eax and convert it.
3433  VisitForAccumulatorValue(args->at(0));
3434
3435  // Convert the object to a name.
3436  Label convert, done_convert;
3437  __ JumpIfSmi(eax, &convert, Label::kNear);
3438  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3439  __ CmpObjectType(eax, LAST_NAME_TYPE, ecx);
3440  __ j(below_equal, &done_convert, Label::kNear);
3441  __ bind(&convert);
3442  __ Push(eax);
3443  __ CallRuntime(Runtime::kToName);
3444  __ bind(&done_convert);
3445  context()->Plug(eax);
3446}
3447
3448
3449void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3450  ZoneList<Expression*>* args = expr->arguments();
3451  DCHECK(args->length() == 1);
3452
3453  VisitForAccumulatorValue(args->at(0));
3454
3455  Label done;
3456  StringCharFromCodeGenerator generator(eax, ebx);
3457  generator.GenerateFast(masm_);
3458  __ jmp(&done);
3459
3460  NopRuntimeCallHelper call_helper;
3461  generator.GenerateSlow(masm_, call_helper);
3462
3463  __ bind(&done);
3464  context()->Plug(ebx);
3465}
3466
3467
3468void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3469  ZoneList<Expression*>* args = expr->arguments();
3470  DCHECK(args->length() == 2);
3471
3472  VisitForStackValue(args->at(0));
3473  VisitForAccumulatorValue(args->at(1));
3474
3475  Register object = ebx;
3476  Register index = eax;
3477  Register result = edx;
3478
3479  __ pop(object);
3480
3481  Label need_conversion;
3482  Label index_out_of_range;
3483  Label done;
3484  StringCharCodeAtGenerator generator(object,
3485                                      index,
3486                                      result,
3487                                      &need_conversion,
3488                                      &need_conversion,
3489                                      &index_out_of_range,
3490                                      STRING_INDEX_IS_NUMBER);
3491  generator.GenerateFast(masm_);
3492  __ jmp(&done);
3493
3494  __ bind(&index_out_of_range);
3495  // When the index is out of range, the spec requires us to return
3496  // NaN.
3497  __ Move(result, Immediate(isolate()->factory()->nan_value()));
3498  __ jmp(&done);
3499
3500  __ bind(&need_conversion);
3501  // Move the undefined value into the result register, which will
3502  // trigger conversion.
3503  __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3504  __ jmp(&done);
3505
3506  NopRuntimeCallHelper call_helper;
3507  generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3508
3509  __ bind(&done);
3510  context()->Plug(result);
3511}
3512
3513
3514void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3515  ZoneList<Expression*>* args = expr->arguments();
3516  DCHECK(args->length() == 2);
3517
3518  VisitForStackValue(args->at(0));
3519  VisitForAccumulatorValue(args->at(1));
3520
3521  Register object = ebx;
3522  Register index = eax;
3523  Register scratch = edx;
3524  Register result = eax;
3525
3526  __ pop(object);
3527
3528  Label need_conversion;
3529  Label index_out_of_range;
3530  Label done;
3531  StringCharAtGenerator generator(object,
3532                                  index,
3533                                  scratch,
3534                                  result,
3535                                  &need_conversion,
3536                                  &need_conversion,
3537                                  &index_out_of_range,
3538                                  STRING_INDEX_IS_NUMBER);
3539  generator.GenerateFast(masm_);
3540  __ jmp(&done);
3541
3542  __ bind(&index_out_of_range);
3543  // When the index is out of range, the spec requires us to return
3544  // the empty string.
3545  __ Move(result, Immediate(isolate()->factory()->empty_string()));
3546  __ jmp(&done);
3547
3548  __ bind(&need_conversion);
3549  // Move smi zero into the result register, which will trigger
3550  // conversion.
3551  __ Move(result, Immediate(Smi::FromInt(0)));
3552  __ jmp(&done);
3553
3554  NopRuntimeCallHelper call_helper;
3555  generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3556
3557  __ bind(&done);
3558  context()->Plug(result);
3559}
3560
3561
3562void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3563  ZoneList<Expression*>* args = expr->arguments();
3564  DCHECK_LE(2, args->length());
3565  // Push target, receiver and arguments onto the stack.
3566  for (Expression* const arg : *args) {
3567    VisitForStackValue(arg);
3568  }
3569  PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3570  // Move target to edi.
3571  int const argc = args->length() - 2;
3572  __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
3573  // Call the target.
3574  __ mov(eax, Immediate(argc));
3575  __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3576  // Restore context register.
3577  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3578  // Discard the function left on TOS.
3579  context()->DropAndPlug(1, eax);
3580}
3581
3582
3583void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3584  ZoneList<Expression*>* args = expr->arguments();
3585  DCHECK(args->length() == 1);
3586
3587  VisitForAccumulatorValue(args->at(0));
3588
3589  __ AssertString(eax);
3590
3591  Label materialize_true, materialize_false;
3592  Label* if_true = NULL;
3593  Label* if_false = NULL;
3594  Label* fall_through = NULL;
3595  context()->PrepareTest(&materialize_true, &materialize_false,
3596                         &if_true, &if_false, &fall_through);
3597
3598  __ test(FieldOperand(eax, String::kHashFieldOffset),
3599          Immediate(String::kContainsCachedArrayIndexMask));
3600  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3601  Split(zero, if_true, if_false, fall_through);
3602
3603  context()->Plug(if_true, if_false);
3604}
3605
3606
3607void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3608  ZoneList<Expression*>* args = expr->arguments();
3609  DCHECK(args->length() == 1);
3610  VisitForAccumulatorValue(args->at(0));
3611
3612  __ AssertString(eax);
3613
3614  __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3615  __ IndexFromHash(eax, eax);
3616
3617  context()->Plug(eax);
3618}
3619
3620
3621void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3622  ZoneList<Expression*>* args = expr->arguments();
3623  DCHECK_EQ(1, args->length());
3624  VisitForAccumulatorValue(args->at(0));
3625  __ AssertFunction(eax);
3626  __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3627  __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
3628  context()->Plug(eax);
3629}
3630
3631
3632void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3633  Label bailout, done, one_char_separator, long_separator,
3634      non_trivial_array, not_size_one_array, loop,
3635      loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3636
3637  ZoneList<Expression*>* args = expr->arguments();
3638  DCHECK(args->length() == 2);
3639  // We will leave the separator on the stack until the end of the function.
3640  VisitForStackValue(args->at(1));
3641  // Load this to eax (= array)
3642  VisitForAccumulatorValue(args->at(0));
3643  // All aliases of the same register have disjoint lifetimes.
3644  Register array = eax;
3645  Register elements = no_reg;  // Will be eax.
3646
3647  Register index = edx;
3648
3649  Register string_length = ecx;
3650
3651  Register string = esi;
3652
3653  Register scratch = ebx;
3654
3655  Register array_length = edi;
3656  Register result_pos = no_reg;  // Will be edi.
3657
3658  // Separator operand is already pushed.
3659  Operand separator_operand = Operand(esp, 2 * kPointerSize);
3660  Operand result_operand = Operand(esp, 1 * kPointerSize);
3661  Operand array_length_operand = Operand(esp, 0);
3662  __ sub(esp, Immediate(2 * kPointerSize));
3663  __ cld();
3664  // Check that the array is a JSArray
3665  __ JumpIfSmi(array, &bailout);
3666  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3667  __ j(not_equal, &bailout);
3668
3669  // Check that the array has fast elements.
3670  __ CheckFastElements(scratch, &bailout);
3671
3672  // If the array has length zero, return the empty string.
3673  __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3674  __ SmiUntag(array_length);
3675  __ j(not_zero, &non_trivial_array);
3676  __ mov(result_operand, isolate()->factory()->empty_string());
3677  __ jmp(&done);
3678
3679  // Save the array length.
3680  __ bind(&non_trivial_array);
3681  __ mov(array_length_operand, array_length);
3682
3683  // Save the FixedArray containing array's elements.
3684  // End of array's live range.
3685  elements = array;
3686  __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3687  array = no_reg;
3688
3689
3690  // Check that all array elements are sequential one-byte strings, and
3691  // accumulate the sum of their lengths, as a smi-encoded value.
3692  __ Move(index, Immediate(0));
3693  __ Move(string_length, Immediate(0));
3694  // Loop condition: while (index < length).
3695  // Live loop registers: index, array_length, string,
3696  //                      scratch, string_length, elements.
3697  if (generate_debug_code_) {
3698    __ cmp(index, array_length);
3699    __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3700  }
3701  __ bind(&loop);
3702  __ mov(string, FieldOperand(elements,
3703                              index,
3704                              times_pointer_size,
3705                              FixedArray::kHeaderSize));
3706  __ JumpIfSmi(string, &bailout);
3707  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3708  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3709  __ and_(scratch, Immediate(
3710      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3711  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3712  __ j(not_equal, &bailout);
3713  __ add(string_length,
3714         FieldOperand(string, SeqOneByteString::kLengthOffset));
3715  __ j(overflow, &bailout);
3716  __ add(index, Immediate(1));
3717  __ cmp(index, array_length);
3718  __ j(less, &loop);
3719
3720  // If array_length is 1, return elements[0], a string.
3721  __ cmp(array_length, 1);
3722  __ j(not_equal, &not_size_one_array);
3723  __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3724  __ mov(result_operand, scratch);
3725  __ jmp(&done);
3726
3727  __ bind(&not_size_one_array);
3728
3729  // End of array_length live range.
3730  result_pos = array_length;
3731  array_length = no_reg;
3732
3733  // Live registers:
3734  // string_length: Sum of string lengths, as a smi.
3735  // elements: FixedArray of strings.
3736
3737  // Check that the separator is a flat one-byte string.
3738  __ mov(string, separator_operand);
3739  __ JumpIfSmi(string, &bailout);
3740  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3741  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3742  __ and_(scratch, Immediate(
3743      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3744  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3745  __ j(not_equal, &bailout);
3746
3747  // Add (separator length times array_length) - separator length
3748  // to string_length.
3749  __ mov(scratch, separator_operand);
3750  __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
3751  __ sub(string_length, scratch);  // May be negative, temporarily.
3752  __ imul(scratch, array_length_operand);
3753  __ j(overflow, &bailout);
3754  __ add(string_length, scratch);
3755  __ j(overflow, &bailout);
3756
3757  __ shr(string_length, 1);
3758
3759  // Bailout for large object allocations.
3760  __ cmp(string_length, Page::kMaxRegularHeapObjectSize);
3761  __ j(greater, &bailout);
3762
3763  // Live registers and stack values:
3764  //   string_length
3765  //   elements
3766  __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
3767                           &bailout);
3768  __ mov(result_operand, result_pos);
3769  __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
3770
3771
3772  __ mov(string, separator_operand);
3773  __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
3774         Immediate(Smi::FromInt(1)));
3775  __ j(equal, &one_char_separator);
3776  __ j(greater, &long_separator);
3777
3778
3779  // Empty separator case
3780  __ mov(index, Immediate(0));
3781  __ jmp(&loop_1_condition);
3782  // Loop condition: while (index < length).
3783  __ bind(&loop_1);
3784  // Each iteration of the loop concatenates one string to the result.
3785  // Live values in registers:
3786  //   index: which element of the elements array we are adding to the result.
3787  //   result_pos: the position to which we are currently copying characters.
3788  //   elements: the FixedArray of strings we are joining.
3789
3790  // Get string = array[index].
3791  __ mov(string, FieldOperand(elements, index,
3792                              times_pointer_size,
3793                              FixedArray::kHeaderSize));
3794  __ mov(string_length,
3795         FieldOperand(string, String::kLengthOffset));
3796  __ shr(string_length, 1);
3797  __ lea(string,
3798         FieldOperand(string, SeqOneByteString::kHeaderSize));
3799  __ CopyBytes(string, result_pos, string_length, scratch);
3800  __ add(index, Immediate(1));
3801  __ bind(&loop_1_condition);
3802  __ cmp(index, array_length_operand);
3803  __ j(less, &loop_1);  // End while (index < length).
3804  __ jmp(&done);
3805
3806
3807
3808  // One-character separator case
3809  __ bind(&one_char_separator);
3810  // Replace separator with its one-byte character value.
3811  __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
3812  __ mov_b(separator_operand, scratch);
3813
3814  __ Move(index, Immediate(0));
3815  // Jump into the loop after the code that copies the separator, so the first
3816  // element is not preceded by a separator
3817  __ jmp(&loop_2_entry);
3818  // Loop condition: while (index < length).
3819  __ bind(&loop_2);
3820  // Each iteration of the loop concatenates one string to the result.
3821  // Live values in registers:
3822  //   index: which element of the elements array we are adding to the result.
3823  //   result_pos: the position to which we are currently copying characters.
3824
3825  // Copy the separator character to the result.
3826  __ mov_b(scratch, separator_operand);
3827  __ mov_b(Operand(result_pos, 0), scratch);
3828  __ inc(result_pos);
3829
3830  __ bind(&loop_2_entry);
3831  // Get string = array[index].
3832  __ mov(string, FieldOperand(elements, index,
3833                              times_pointer_size,
3834                              FixedArray::kHeaderSize));
3835  __ mov(string_length,
3836         FieldOperand(string, String::kLengthOffset));
3837  __ shr(string_length, 1);
3838  __ lea(string,
3839         FieldOperand(string, SeqOneByteString::kHeaderSize));
3840  __ CopyBytes(string, result_pos, string_length, scratch);
3841  __ add(index, Immediate(1));
3842
3843  __ cmp(index, array_length_operand);
3844  __ j(less, &loop_2);  // End while (index < length).
3845  __ jmp(&done);
3846
3847
3848  // Long separator case (separator is more than one character).
3849  __ bind(&long_separator);
3850
3851  __ Move(index, Immediate(0));
3852  // Jump into the loop after the code that copies the separator, so the first
3853  // element is not preceded by a separator
3854  __ jmp(&loop_3_entry);
3855  // Loop condition: while (index < length).
3856  __ bind(&loop_3);
3857  // Each iteration of the loop concatenates one string to the result.
3858  // Live values in registers:
3859  //   index: which element of the elements array we are adding to the result.
3860  //   result_pos: the position to which we are currently copying characters.
3861
3862  // Copy the separator to the result.
3863  __ mov(string, separator_operand);
3864  __ mov(string_length,
3865         FieldOperand(string, String::kLengthOffset));
3866  __ shr(string_length, 1);
3867  __ lea(string,
3868         FieldOperand(string, SeqOneByteString::kHeaderSize));
3869  __ CopyBytes(string, result_pos, string_length, scratch);
3870
3871  __ bind(&loop_3_entry);
3872  // Get string = array[index].
3873  __ mov(string, FieldOperand(elements, index,
3874                              times_pointer_size,
3875                              FixedArray::kHeaderSize));
3876  __ mov(string_length,
3877         FieldOperand(string, String::kLengthOffset));
3878  __ shr(string_length, 1);
3879  __ lea(string,
3880         FieldOperand(string, SeqOneByteString::kHeaderSize));
3881  __ CopyBytes(string, result_pos, string_length, scratch);
3882  __ add(index, Immediate(1));
3883
3884  __ cmp(index, array_length_operand);
3885  __ j(less, &loop_3);  // End while (index < length).
3886  __ jmp(&done);
3887
3888
3889  __ bind(&bailout);
3890  __ mov(result_operand, isolate()->factory()->undefined_value());
3891  __ bind(&done);
3892  __ mov(eax, result_operand);
3893  // Drop temp values from the stack, and restore context register.
3894  __ add(esp, Immediate(3 * kPointerSize));
3895
3896  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3897  context()->Plug(eax);
3898}
3899
3900
3901void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3902  DCHECK(expr->arguments()->length() == 0);
3903  ExternalReference debug_is_active =
3904      ExternalReference::debug_is_active_address(isolate());
3905  __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
3906  __ SmiTag(eax);
3907  context()->Plug(eax);
3908}
3909
3910
3911void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3912  ZoneList<Expression*>* args = expr->arguments();
3913  DCHECK_EQ(2, args->length());
3914  VisitForStackValue(args->at(0));
3915  VisitForStackValue(args->at(1));
3916
3917  Label runtime, done;
3918
3919  __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime, TAG_OBJECT);
3920  __ mov(ebx, NativeContextOperand());
3921  __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
3922  __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
3923  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3924         isolate()->factory()->empty_fixed_array());
3925  __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3926         isolate()->factory()->empty_fixed_array());
3927  __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
3928  __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
3929  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3930  __ jmp(&done, Label::kNear);
3931
3932  __ bind(&runtime);
3933  __ CallRuntime(Runtime::kCreateIterResultObject);
3934
3935  __ bind(&done);
3936  context()->Plug(eax);
3937}
3938
3939
3940void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3941  // Push undefined as receiver.
3942  __ push(Immediate(isolate()->factory()->undefined_value()));
3943
3944  __ LoadGlobalFunction(expr->context_index(), eax);
3945}
3946
3947
3948void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3949  ZoneList<Expression*>* args = expr->arguments();
3950  int arg_count = args->length();
3951
3952  SetCallPosition(expr);
3953  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3954  __ Set(eax, arg_count);
3955  __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3956          RelocInfo::CODE_TARGET);
3957}
3958
3959
3960void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3961  ZoneList<Expression*>* args = expr->arguments();
3962  int arg_count = args->length();
3963
3964  if (expr->is_jsruntime()) {
3965    Comment cmnt(masm_, "[ CallRuntime");
3966    EmitLoadJSRuntimeFunction(expr);
3967
3968    // Push the target function under the receiver.
3969    __ push(Operand(esp, 0));
3970    __ mov(Operand(esp, kPointerSize), eax);
3971
3972    // Push the arguments ("left-to-right").
3973    for (int i = 0; i < arg_count; i++) {
3974      VisitForStackValue(args->at(i));
3975    }
3976
3977    PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3978    EmitCallJSRuntimeFunction(expr);
3979
3980    // Restore context register.
3981    __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3982    context()->DropAndPlug(1, eax);
3983
3984  } else {
3985    const Runtime::Function* function = expr->function();
3986    switch (function->function_id) {
3987#define CALL_INTRINSIC_GENERATOR(Name)     \
3988  case Runtime::kInline##Name: {           \
3989    Comment cmnt(masm_, "[ Inline" #Name); \
3990    return Emit##Name(expr);               \
3991  }
3992      FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3993#undef CALL_INTRINSIC_GENERATOR
3994      default: {
3995        Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3996        // Push the arguments ("left-to-right").
3997        for (int i = 0; i < arg_count; i++) {
3998          VisitForStackValue(args->at(i));
3999        }
4000
4001        // Call the C runtime function.
4002        PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4003        __ CallRuntime(expr->function(), arg_count);
4004        context()->Plug(eax);
4005      }
4006    }
4007  }
4008}
4009
4010
4011void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4012  switch (expr->op()) {
4013    case Token::DELETE: {
4014      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4015      Property* property = expr->expression()->AsProperty();
4016      VariableProxy* proxy = expr->expression()->AsVariableProxy();
4017
4018      if (property != NULL) {
4019        VisitForStackValue(property->obj());
4020        VisitForStackValue(property->key());
4021        __ CallRuntime(is_strict(language_mode())
4022                           ? Runtime::kDeleteProperty_Strict
4023                           : Runtime::kDeleteProperty_Sloppy);
4024        context()->Plug(eax);
4025      } else if (proxy != NULL) {
4026        Variable* var = proxy->var();
4027        // Delete of an unqualified identifier is disallowed in strict mode but
4028        // "delete this" is allowed.
4029        bool is_this = var->HasThisName(isolate());
4030        DCHECK(is_sloppy(language_mode()) || is_this);
4031        if (var->IsUnallocatedOrGlobalSlot()) {
4032          __ mov(eax, NativeContextOperand());
4033          __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
4034          __ push(Immediate(var->name()));
4035          __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
4036          context()->Plug(eax);
4037        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4038          // Result of deleting non-global variables is false.  'this' is
4039          // not really a variable, though we implement it as one.  The
4040          // subexpression does not have side effects.
4041          context()->Plug(is_this);
4042        } else {
4043          // Non-global variable.  Call the runtime to try to delete from the
4044          // context where the variable was introduced.
4045          __ push(context_register());
4046          __ push(Immediate(var->name()));
4047          __ CallRuntime(Runtime::kDeleteLookupSlot);
4048          context()->Plug(eax);
4049        }
4050      } else {
4051        // Result of deleting non-property, non-variable reference is true.
4052        // The subexpression may have side effects.
4053        VisitForEffect(expr->expression());
4054        context()->Plug(true);
4055      }
4056      break;
4057    }
4058
4059    case Token::VOID: {
4060      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4061      VisitForEffect(expr->expression());
4062      context()->Plug(isolate()->factory()->undefined_value());
4063      break;
4064    }
4065
4066    case Token::NOT: {
4067      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4068      if (context()->IsEffect()) {
4069        // Unary NOT has no side effects so it's only necessary to visit the
4070        // subexpression.  Match the optimizing compiler by not branching.
4071        VisitForEffect(expr->expression());
4072      } else if (context()->IsTest()) {
4073        const TestContext* test = TestContext::cast(context());
4074        // The labels are swapped for the recursive call.
4075        VisitForControl(expr->expression(),
4076                        test->false_label(),
4077                        test->true_label(),
4078                        test->fall_through());
4079        context()->Plug(test->true_label(), test->false_label());
4080      } else {
4081        // We handle value contexts explicitly rather than simply visiting
4082        // for control and plugging the control flow into the context,
4083        // because we need to prepare a pair of extra administrative AST ids
4084        // for the optimizing compiler.
4085        DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4086        Label materialize_true, materialize_false, done;
4087        VisitForControl(expr->expression(),
4088                        &materialize_false,
4089                        &materialize_true,
4090                        &materialize_true);
4091        __ bind(&materialize_true);
4092        PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4093        if (context()->IsAccumulatorValue()) {
4094          __ mov(eax, isolate()->factory()->true_value());
4095        } else {
4096          __ Push(isolate()->factory()->true_value());
4097        }
4098        __ jmp(&done, Label::kNear);
4099        __ bind(&materialize_false);
4100        PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4101        if (context()->IsAccumulatorValue()) {
4102          __ mov(eax, isolate()->factory()->false_value());
4103        } else {
4104          __ Push(isolate()->factory()->false_value());
4105        }
4106        __ bind(&done);
4107      }
4108      break;
4109    }
4110
4111    case Token::TYPEOF: {
4112      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4113      {
4114        AccumulatorValueContext context(this);
4115        VisitForTypeofValue(expr->expression());
4116      }
4117      __ mov(ebx, eax);
4118      TypeofStub typeof_stub(isolate());
4119      __ CallStub(&typeof_stub);
4120      context()->Plug(eax);
4121      break;
4122    }
4123
4124    default:
4125      UNREACHABLE();
4126  }
4127}
4128
4129
4130void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4131  DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4132
4133  Comment cmnt(masm_, "[ CountOperation");
4134
4135  Property* prop = expr->expression()->AsProperty();
4136  LhsKind assign_type = Property::GetAssignType(prop);
4137
4138  // Evaluate expression and get value.
4139  if (assign_type == VARIABLE) {
4140    DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4141    AccumulatorValueContext context(this);
4142    EmitVariableLoad(expr->expression()->AsVariableProxy());
4143  } else {
4144    // Reserve space for result of postfix operation.
4145    if (expr->is_postfix() && !context()->IsEffect()) {
4146      __ push(Immediate(Smi::FromInt(0)));
4147    }
4148    switch (assign_type) {
4149      case NAMED_PROPERTY: {
4150        // Put the object both on the stack and in the register.
4151        VisitForStackValue(prop->obj());
4152        __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4153        EmitNamedPropertyLoad(prop);
4154        break;
4155      }
4156
4157      case NAMED_SUPER_PROPERTY: {
4158        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4159        VisitForAccumulatorValue(
4160            prop->obj()->AsSuperPropertyReference()->home_object());
4161        __ push(result_register());
4162        __ push(MemOperand(esp, kPointerSize));
4163        __ push(result_register());
4164        EmitNamedSuperPropertyLoad(prop);
4165        break;
4166      }
4167
4168      case KEYED_SUPER_PROPERTY: {
4169        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4170        VisitForStackValue(
4171            prop->obj()->AsSuperPropertyReference()->home_object());
4172        VisitForAccumulatorValue(prop->key());
4173        __ push(result_register());
4174        __ push(MemOperand(esp, 2 * kPointerSize));
4175        __ push(MemOperand(esp, 2 * kPointerSize));
4176        __ push(result_register());
4177        EmitKeyedSuperPropertyLoad(prop);
4178        break;
4179      }
4180
4181      case KEYED_PROPERTY: {
4182        VisitForStackValue(prop->obj());
4183        VisitForStackValue(prop->key());
4184        __ mov(LoadDescriptor::ReceiverRegister(),
4185               Operand(esp, kPointerSize));                       // Object.
4186        __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));  // Key.
4187        EmitKeyedPropertyLoad(prop);
4188        break;
4189      }
4190
4191      case VARIABLE:
4192        UNREACHABLE();
4193    }
4194  }
4195
4196  // We need a second deoptimization point after loading the value
4197  // in case evaluating the property load my have a side effect.
4198  if (assign_type == VARIABLE) {
4199    PrepareForBailout(expr->expression(), TOS_REG);
4200  } else {
4201    PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4202  }
4203
4204  // Inline smi case if we are in a loop.
4205  Label done, stub_call;
4206  JumpPatchSite patch_site(masm_);
4207  if (ShouldInlineSmiCase(expr->op())) {
4208    Label slow;
4209    patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4210
4211    // Save result for postfix expressions.
4212    if (expr->is_postfix()) {
4213      if (!context()->IsEffect()) {
4214        // Save the result on the stack. If we have a named or keyed property
4215        // we store the result under the receiver that is currently on top
4216        // of the stack.
4217        switch (assign_type) {
4218          case VARIABLE:
4219            __ push(eax);
4220            break;
4221          case NAMED_PROPERTY:
4222            __ mov(Operand(esp, kPointerSize), eax);
4223            break;
4224          case NAMED_SUPER_PROPERTY:
4225            __ mov(Operand(esp, 2 * kPointerSize), eax);
4226            break;
4227          case KEYED_PROPERTY:
4228            __ mov(Operand(esp, 2 * kPointerSize), eax);
4229            break;
4230          case KEYED_SUPER_PROPERTY:
4231            __ mov(Operand(esp, 3 * kPointerSize), eax);
4232            break;
4233        }
4234      }
4235    }
4236
4237    if (expr->op() == Token::INC) {
4238      __ add(eax, Immediate(Smi::FromInt(1)));
4239    } else {
4240      __ sub(eax, Immediate(Smi::FromInt(1)));
4241    }
4242    __ j(no_overflow, &done, Label::kNear);
4243    // Call stub. Undo operation first.
4244    if (expr->op() == Token::INC) {
4245      __ sub(eax, Immediate(Smi::FromInt(1)));
4246    } else {
4247      __ add(eax, Immediate(Smi::FromInt(1)));
4248    }
4249    __ jmp(&stub_call, Label::kNear);
4250    __ bind(&slow);
4251  }
4252  if (!is_strong(language_mode())) {
4253    ToNumberStub convert_stub(isolate());
4254    __ CallStub(&convert_stub);
4255    PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4256  }
4257
4258  // Save result for postfix expressions.
4259  if (expr->is_postfix()) {
4260    if (!context()->IsEffect()) {
4261      // Save the result on the stack. If we have a named or keyed property
4262      // we store the result under the receiver that is currently on top
4263      // of the stack.
4264      switch (assign_type) {
4265        case VARIABLE:
4266          __ push(eax);
4267          break;
4268        case NAMED_PROPERTY:
4269          __ mov(Operand(esp, kPointerSize), eax);
4270          break;
4271        case NAMED_SUPER_PROPERTY:
4272          __ mov(Operand(esp, 2 * kPointerSize), eax);
4273          break;
4274        case KEYED_PROPERTY:
4275          __ mov(Operand(esp, 2 * kPointerSize), eax);
4276          break;
4277        case KEYED_SUPER_PROPERTY:
4278          __ mov(Operand(esp, 3 * kPointerSize), eax);
4279          break;
4280      }
4281    }
4282  }
4283
4284  SetExpressionPosition(expr);
4285
4286  // Call stub for +1/-1.
4287  __ bind(&stub_call);
4288  __ mov(edx, eax);
4289  __ mov(eax, Immediate(Smi::FromInt(1)));
4290  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4291                                              strength(language_mode())).code();
4292  CallIC(code, expr->CountBinOpFeedbackId());
4293  patch_site.EmitPatchInfo();
4294  __ bind(&done);
4295
4296  if (is_strong(language_mode())) {
4297    PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4298  }
4299  // Store the value returned in eax.
4300  switch (assign_type) {
4301    case VARIABLE:
4302      if (expr->is_postfix()) {
4303        // Perform the assignment as if via '='.
4304        { EffectContext context(this);
4305          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4306                                 Token::ASSIGN, expr->CountSlot());
4307          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4308          context.Plug(eax);
4309        }
4310        // For all contexts except EffectContext We have the result on
4311        // top of the stack.
4312        if (!context()->IsEffect()) {
4313          context()->PlugTOS();
4314        }
4315      } else {
4316        // Perform the assignment as if via '='.
4317        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4318                               Token::ASSIGN, expr->CountSlot());
4319        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4320        context()->Plug(eax);
4321      }
4322      break;
4323    case NAMED_PROPERTY: {
4324      __ mov(StoreDescriptor::NameRegister(),
4325             prop->key()->AsLiteral()->value());
4326      __ pop(StoreDescriptor::ReceiverRegister());
4327      EmitLoadStoreICSlot(expr->CountSlot());
4328      CallStoreIC();
4329      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4330      if (expr->is_postfix()) {
4331        if (!context()->IsEffect()) {
4332          context()->PlugTOS();
4333        }
4334      } else {
4335        context()->Plug(eax);
4336      }
4337      break;
4338    }
4339    case NAMED_SUPER_PROPERTY: {
4340      EmitNamedSuperPropertyStore(prop);
4341      if (expr->is_postfix()) {
4342        if (!context()->IsEffect()) {
4343          context()->PlugTOS();
4344        }
4345      } else {
4346        context()->Plug(eax);
4347      }
4348      break;
4349    }
4350    case KEYED_SUPER_PROPERTY: {
4351      EmitKeyedSuperPropertyStore(prop);
4352      if (expr->is_postfix()) {
4353        if (!context()->IsEffect()) {
4354          context()->PlugTOS();
4355        }
4356      } else {
4357        context()->Plug(eax);
4358      }
4359      break;
4360    }
4361    case KEYED_PROPERTY: {
4362      __ pop(StoreDescriptor::NameRegister());
4363      __ pop(StoreDescriptor::ReceiverRegister());
4364      Handle<Code> ic =
4365          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4366      EmitLoadStoreICSlot(expr->CountSlot());
4367      CallIC(ic);
4368      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4369      if (expr->is_postfix()) {
4370        // Result is on the stack
4371        if (!context()->IsEffect()) {
4372          context()->PlugTOS();
4373        }
4374      } else {
4375        context()->Plug(eax);
4376      }
4377      break;
4378    }
4379  }
4380}
4381
4382
4383void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4384                                                 Expression* sub_expr,
4385                                                 Handle<String> check) {
4386  Label materialize_true, materialize_false;
4387  Label* if_true = NULL;
4388  Label* if_false = NULL;
4389  Label* fall_through = NULL;
4390  context()->PrepareTest(&materialize_true, &materialize_false,
4391                         &if_true, &if_false, &fall_through);
4392
4393  { AccumulatorValueContext context(this);
4394    VisitForTypeofValue(sub_expr);
4395  }
4396  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4397
4398  Factory* factory = isolate()->factory();
4399  if (String::Equals(check, factory->number_string())) {
4400    __ JumpIfSmi(eax, if_true);
4401    __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4402           isolate()->factory()->heap_number_map());
4403    Split(equal, if_true, if_false, fall_through);
4404  } else if (String::Equals(check, factory->string_string())) {
4405    __ JumpIfSmi(eax, if_false);
4406    __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4407    Split(below, if_true, if_false, fall_through);
4408  } else if (String::Equals(check, factory->symbol_string())) {
4409    __ JumpIfSmi(eax, if_false);
4410    __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4411    Split(equal, if_true, if_false, fall_through);
4412  } else if (String::Equals(check, factory->boolean_string())) {
4413    __ cmp(eax, isolate()->factory()->true_value());
4414    __ j(equal, if_true);
4415    __ cmp(eax, isolate()->factory()->false_value());
4416    Split(equal, if_true, if_false, fall_through);
4417  } else if (String::Equals(check, factory->undefined_string())) {
4418    __ cmp(eax, isolate()->factory()->undefined_value());
4419    __ j(equal, if_true);
4420    __ JumpIfSmi(eax, if_false);
4421    // Check for undetectable objects => true.
4422    __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4423    __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4424              1 << Map::kIsUndetectable);
4425    Split(not_zero, if_true, if_false, fall_through);
4426  } else if (String::Equals(check, factory->function_string())) {
4427    __ JumpIfSmi(eax, if_false);
4428    // Check for callable and not undetectable objects => true.
4429    __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4430    __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4431    __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
4432    __ cmp(ecx, 1 << Map::kIsCallable);
4433    Split(equal, if_true, if_false, fall_through);
4434  } else if (String::Equals(check, factory->object_string())) {
4435    __ JumpIfSmi(eax, if_false);
4436    __ cmp(eax, isolate()->factory()->null_value());
4437    __ j(equal, if_true);
4438    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4439    __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
4440    __ j(below, if_false);
4441    // Check for callable or undetectable objects => false.
4442    __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4443              (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
4444    Split(zero, if_true, if_false, fall_through);
4445// clang-format off
4446#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
4447  } else if (String::Equals(check, factory->type##_string())) { \
4448    __ JumpIfSmi(eax, if_false);                                \
4449    __ cmp(FieldOperand(eax, HeapObject::kMapOffset),           \
4450           isolate()->factory()->type##_map());                 \
4451    Split(equal, if_true, if_false, fall_through);
4452  SIMD128_TYPES(SIMD128_TYPE)
4453#undef SIMD128_TYPE
4454    // clang-format on
4455  } else {
4456    if (if_false != fall_through) __ jmp(if_false);
4457  }
4458  context()->Plug(if_true, if_false);
4459}
4460
4461
4462void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4463  Comment cmnt(masm_, "[ CompareOperation");
4464  SetExpressionPosition(expr);
4465
4466  // First we try a fast inlined version of the compare when one of
4467  // the operands is a literal.
4468  if (TryLiteralCompare(expr)) return;
4469
4470  // Always perform the comparison for its control flow.  Pack the result
4471  // into the expression's context after the comparison is performed.
4472  Label materialize_true, materialize_false;
4473  Label* if_true = NULL;
4474  Label* if_false = NULL;
4475  Label* fall_through = NULL;
4476  context()->PrepareTest(&materialize_true, &materialize_false,
4477                         &if_true, &if_false, &fall_through);
4478
4479  Token::Value op = expr->op();
4480  VisitForStackValue(expr->left());
4481  switch (op) {
4482    case Token::IN:
4483      VisitForStackValue(expr->right());
4484      __ CallRuntime(Runtime::kHasProperty);
4485      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4486      __ cmp(eax, isolate()->factory()->true_value());
4487      Split(equal, if_true, if_false, fall_through);
4488      break;
4489
4490    case Token::INSTANCEOF: {
4491      VisitForAccumulatorValue(expr->right());
4492      __ Pop(edx);
4493      InstanceOfStub stub(isolate());
4494      __ CallStub(&stub);
4495      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4496      __ cmp(eax, isolate()->factory()->true_value());
4497      Split(equal, if_true, if_false, fall_through);
4498      break;
4499    }
4500
4501    default: {
4502      VisitForAccumulatorValue(expr->right());
4503      Condition cc = CompareIC::ComputeCondition(op);
4504      __ pop(edx);
4505
4506      bool inline_smi_code = ShouldInlineSmiCase(op);
4507      JumpPatchSite patch_site(masm_);
4508      if (inline_smi_code) {
4509        Label slow_case;
4510        __ mov(ecx, edx);
4511        __ or_(ecx, eax);
4512        patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4513        __ cmp(edx, eax);
4514        Split(cc, if_true, if_false, NULL);
4515        __ bind(&slow_case);
4516      }
4517
4518      Handle<Code> ic = CodeFactory::CompareIC(
4519                            isolate(), op, strength(language_mode())).code();
4520      CallIC(ic, expr->CompareOperationFeedbackId());
4521      patch_site.EmitPatchInfo();
4522
4523      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4524      __ test(eax, eax);
4525      Split(cc, if_true, if_false, fall_through);
4526    }
4527  }
4528
4529  // Convert the result of the comparison into one expected for this
4530  // expression's context.
4531  context()->Plug(if_true, if_false);
4532}
4533
4534
4535void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4536                                              Expression* sub_expr,
4537                                              NilValue nil) {
4538  Label materialize_true, materialize_false;
4539  Label* if_true = NULL;
4540  Label* if_false = NULL;
4541  Label* fall_through = NULL;
4542  context()->PrepareTest(&materialize_true, &materialize_false,
4543                         &if_true, &if_false, &fall_through);
4544
4545  VisitForAccumulatorValue(sub_expr);
4546  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4547
4548  Handle<Object> nil_value = nil == kNullValue
4549      ? isolate()->factory()->null_value()
4550      : isolate()->factory()->undefined_value();
4551  if (expr->op() == Token::EQ_STRICT) {
4552    __ cmp(eax, nil_value);
4553    Split(equal, if_true, if_false, fall_through);
4554  } else {
4555    Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4556    CallIC(ic, expr->CompareOperationFeedbackId());
4557    __ cmp(eax, isolate()->factory()->true_value());
4558    Split(equal, if_true, if_false, fall_through);
4559  }
4560  context()->Plug(if_true, if_false);
4561}
4562
4563
4564void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4565  __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4566  context()->Plug(eax);
4567}
4568
4569
4570Register FullCodeGenerator::result_register() {
4571  return eax;
4572}
4573
4574
4575Register FullCodeGenerator::context_register() {
4576  return esi;
4577}
4578
4579
4580void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4581  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4582  __ mov(Operand(ebp, frame_offset), value);
4583}
4584
4585
4586void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4587  __ mov(dst, ContextOperand(esi, context_index));
4588}
4589
4590
4591void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4592  Scope* closure_scope = scope()->ClosureScope();
4593  if (closure_scope->is_script_scope() ||
4594      closure_scope->is_module_scope()) {
4595    // Contexts nested in the native context have a canonical empty function
4596    // as their closure, not the anonymous closure containing the global
4597    // code.
4598    __ mov(eax, NativeContextOperand());
4599    __ push(ContextOperand(eax, Context::CLOSURE_INDEX));
4600  } else if (closure_scope->is_eval_scope()) {
4601    // Contexts nested inside eval code have the same closure as the context
4602    // calling eval, not the anonymous closure containing the eval code.
4603    // Fetch it from the context.
4604    __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
4605  } else {
4606    DCHECK(closure_scope->is_function_scope());
4607    __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4608  }
4609}
4610
4611
4612// ----------------------------------------------------------------------------
4613// Non-local control flow support.
4614
4615void FullCodeGenerator::EnterFinallyBlock() {
4616  // Cook return address on top of stack (smi encoded Code* delta)
4617  DCHECK(!result_register().is(edx));
4618  __ pop(edx);
4619  __ sub(edx, Immediate(masm_->CodeObject()));
4620  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4621  STATIC_ASSERT(kSmiTag == 0);
4622  __ SmiTag(edx);
4623  __ push(edx);
4624
4625  // Store result register while executing finally block.
4626  __ push(result_register());
4627
4628  // Store pending message while executing finally block.
4629  ExternalReference pending_message_obj =
4630      ExternalReference::address_of_pending_message_obj(isolate());
4631  __ mov(edx, Operand::StaticVariable(pending_message_obj));
4632  __ push(edx);
4633
4634  ClearPendingMessage();
4635}
4636
4637
4638void FullCodeGenerator::ExitFinallyBlock() {
4639  DCHECK(!result_register().is(edx));
4640  // Restore pending message from stack.
4641  __ pop(edx);
4642  ExternalReference pending_message_obj =
4643      ExternalReference::address_of_pending_message_obj(isolate());
4644  __ mov(Operand::StaticVariable(pending_message_obj), edx);
4645
4646  // Restore result register from stack.
4647  __ pop(result_register());
4648
4649  // Uncook return address.
4650  __ pop(edx);
4651  __ SmiUntag(edx);
4652  __ add(edx, Immediate(masm_->CodeObject()));
4653  __ jmp(edx);
4654}
4655
4656
4657void FullCodeGenerator::ClearPendingMessage() {
4658  DCHECK(!result_register().is(edx));
4659  ExternalReference pending_message_obj =
4660      ExternalReference::address_of_pending_message_obj(isolate());
4661  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
4662  __ mov(Operand::StaticVariable(pending_message_obj), edx);
4663}
4664
4665
4666void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4667  DCHECK(!slot.IsInvalid());
4668  __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
4669         Immediate(SmiFromSlot(slot)));
4670}
4671
4672
4673#undef __
4674
4675
4676static const byte kJnsInstruction = 0x79;
4677static const byte kJnsOffset = 0x11;
4678static const byte kNopByteOne = 0x66;
4679static const byte kNopByteTwo = 0x90;
4680#ifdef DEBUG
4681static const byte kCallInstruction = 0xe8;
4682#endif
4683
4684
4685void BackEdgeTable::PatchAt(Code* unoptimized_code,
4686                            Address pc,
4687                            BackEdgeState target_state,
4688                            Code* replacement_code) {
4689  Address call_target_address = pc - kIntSize;
4690  Address jns_instr_address = call_target_address - 3;
4691  Address jns_offset_address = call_target_address - 2;
4692
4693  switch (target_state) {
4694    case INTERRUPT:
4695      //     sub <profiling_counter>, <delta>  ;; Not changed
4696      //     jns ok
4697      //     call <interrupt stub>
4698      //   ok:
4699      *jns_instr_address = kJnsInstruction;
4700      *jns_offset_address = kJnsOffset;
4701      break;
4702    case ON_STACK_REPLACEMENT:
4703    case OSR_AFTER_STACK_CHECK:
4704      //     sub <profiling_counter>, <delta>  ;; Not changed
4705      //     nop
4706      //     nop
4707      //     call <on-stack replacment>
4708      //   ok:
4709      *jns_instr_address = kNopByteOne;
4710      *jns_offset_address = kNopByteTwo;
4711      break;
4712  }
4713
4714  Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
4715                                   call_target_address, unoptimized_code,
4716                                   replacement_code->entry());
4717  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4718      unoptimized_code, call_target_address, replacement_code);
4719}
4720
4721
4722BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4723    Isolate* isolate,
4724    Code* unoptimized_code,
4725    Address pc) {
4726  Address call_target_address = pc - kIntSize;
4727  Address jns_instr_address = call_target_address - 3;
4728  DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4729
4730  if (*jns_instr_address == kJnsInstruction) {
4731    DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4732    DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4733              Assembler::target_address_at(call_target_address,
4734                                           unoptimized_code));
4735    return INTERRUPT;
4736  }
4737
4738  DCHECK_EQ(kNopByteOne, *jns_instr_address);
4739  DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4740
4741  if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4742      isolate->builtins()->OnStackReplacement()->entry()) {
4743    return ON_STACK_REPLACEMENT;
4744  }
4745
4746  DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4747            Assembler::target_address_at(call_target_address,
4748                                         unoptimized_code));
4749  return OSR_AFTER_STACK_CHECK;
4750}
4751
4752
4753}  // namespace internal
4754}  // namespace v8
4755
4756#endif  // V8_TARGET_ARCH_IA32
4757