1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X87
6
7#include "src/ast/compile-time-value.h"
8#include "src/ast/scopes.h"
9#include "src/builtins/builtins-constructor.h"
10#include "src/code-factory.h"
11#include "src/code-stubs.h"
12#include "src/codegen.h"
13#include "src/compilation-info.h"
14#include "src/compiler.h"
15#include "src/debug/debug.h"
16#include "src/full-codegen/full-codegen.h"
17#include "src/ic/ic.h"
18#include "src/x87/frames-x87.h"
19
20namespace v8 {
21namespace internal {
22
23#define __ ACCESS_MASM(masm())
24
25class JumpPatchSite BASE_EMBEDDED {
26 public:
27  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
28#ifdef DEBUG
29    info_emitted_ = false;
30#endif
31  }
32
33  ~JumpPatchSite() {
34    DCHECK(patch_site_.is_bound() == info_emitted_);
35  }
36
37  void EmitJumpIfNotSmi(Register reg,
38                        Label* target,
39                        Label::Distance distance = Label::kFar) {
40    __ test(reg, Immediate(kSmiTagMask));
41    EmitJump(not_carry, target, distance);  // Always taken before patched.
42  }
43
44  void EmitJumpIfSmi(Register reg,
45                     Label* target,
46                     Label::Distance distance = Label::kFar) {
47    __ test(reg, Immediate(kSmiTagMask));
48    EmitJump(carry, target, distance);  // Never taken before patched.
49  }
50
51  void EmitPatchInfo() {
52    if (patch_site_.is_bound()) {
53      int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
54      DCHECK(is_uint8(delta_to_patch_site));
55      __ test(eax, Immediate(delta_to_patch_site));
56#ifdef DEBUG
57      info_emitted_ = true;
58#endif
59    } else {
60      __ nop();  // Signals no inlined code.
61    }
62  }
63
64 private:
65  // jc will be patched with jz, jnc will become jnz.
66  void EmitJump(Condition cc, Label* target, Label::Distance distance) {
67    DCHECK(!patch_site_.is_bound() && !info_emitted_);
68    DCHECK(cc == carry || cc == not_carry);
69    __ bind(&patch_site_);
70    __ j(cc, target, distance);
71  }
72
73  MacroAssembler* masm() { return masm_; }
74  MacroAssembler* masm_;
75  Label patch_site_;
76#ifdef DEBUG
77  bool info_emitted_;
78#endif
79};
80
81
82// Generate code for a JS function.  On entry to the function the receiver
83// and arguments have been pushed on the stack left to right, with the
84// return address on top of them.  The actual argument count matches the
85// formal parameter count expected by the function.
86//
87// The live registers are:
88//   o edi: the JS function object being called (i.e. ourselves)
89//   o edx: the new target value
90//   o esi: our context
91//   o ebp: our caller's frame pointer
92//   o esp: stack pointer (pointing to return address)
93//
94// The function builds a JS frame.  Please see JavaScriptFrameConstants in
95// frames-x87.h for its layout.
96void FullCodeGenerator::Generate() {
97  CompilationInfo* info = info_;
98  profiling_counter_ = isolate()->factory()->NewCell(
99      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
100  SetFunctionPosition(literal());
101  Comment cmnt(masm_, "[ function compiled by full code generator");
102
103  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
104
105  if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
106    int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
107    __ mov(ecx, Operand(esp, receiver_offset));
108    __ AssertNotSmi(ecx);
109    __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
110    __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
111  }
112
113  // Open a frame scope to indicate that there is a frame on the stack.  The
114  // MANUAL indicates that the scope shouldn't actually generate code to set up
115  // the frame (that is done below).
116  FrameScope frame_scope(masm_, StackFrame::MANUAL);
117
118  info->set_prologue_offset(masm_->pc_offset());
119  __ Prologue(info->GeneratePreagedPrologue());
120
121  // Increment invocation count for the function.
122  {
123    Comment cmnt(masm_, "[ Increment invocation count");
124    __ mov(ecx, FieldOperand(edi, JSFunction::kFeedbackVectorOffset));
125    __ mov(ecx, FieldOperand(ecx, Cell::kValueOffset));
126    __ add(
127        FieldOperand(ecx, FeedbackVector::kInvocationCountIndex * kPointerSize +
128                              FeedbackVector::kHeaderSize),
129        Immediate(Smi::FromInt(1)));
130  }
131
132  { Comment cmnt(masm_, "[ Allocate locals");
133    int locals_count = info->scope()->num_stack_slots();
134    OperandStackDepthIncrement(locals_count);
135    if (locals_count == 1) {
136      __ push(Immediate(isolate()->factory()->undefined_value()));
137    } else if (locals_count > 1) {
138      if (locals_count >= 128) {
139        Label ok;
140        __ mov(ecx, esp);
141        __ sub(ecx, Immediate(locals_count * kPointerSize));
142        ExternalReference stack_limit =
143            ExternalReference::address_of_real_stack_limit(isolate());
144        __ cmp(ecx, Operand::StaticVariable(stack_limit));
145        __ j(above_equal, &ok, Label::kNear);
146        __ CallRuntime(Runtime::kThrowStackOverflow);
147        __ bind(&ok);
148      }
149      __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
150      const int kMaxPushes = 32;
151      if (locals_count >= kMaxPushes) {
152        int loop_iterations = locals_count / kMaxPushes;
153        __ mov(ecx, loop_iterations);
154        Label loop_header;
155        __ bind(&loop_header);
156        // Do pushes.
157        for (int i = 0; i < kMaxPushes; i++) {
158          __ push(eax);
159        }
160        __ dec(ecx);
161        __ j(not_zero, &loop_header, Label::kNear);
162      }
163      int remaining = locals_count % kMaxPushes;
164      // Emit the remaining pushes.
165      for (int i  = 0; i < remaining; i++) {
166        __ push(eax);
167      }
168    }
169  }
170
171  bool function_in_register = true;
172
173  // Possibly allocate a local context.
174  if (info->scope()->NeedsContext()) {
175    Comment cmnt(masm_, "[ Allocate context");
176    bool need_write_barrier = true;
177    int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
178    // Argument to NewContext is the function, which is still in edi.
179    if (info->scope()->is_script_scope()) {
180      __ push(edi);
181      __ Push(info->scope()->scope_info());
182      __ CallRuntime(Runtime::kNewScriptContext);
183      PrepareForBailoutForId(BailoutId::ScriptContext(),
184                             BailoutState::TOS_REGISTER);
185      // The new target value is not used, clobbering is safe.
186      DCHECK_NULL(info->scope()->new_target_var());
187    } else {
188      if (info->scope()->new_target_var() != nullptr) {
189        __ push(edx);  // Preserve new target.
190      }
191      if (slots <=
192          ConstructorBuiltinsAssembler::MaximumFunctionContextSlots()) {
193        Callable callable = CodeFactory::FastNewFunctionContext(
194            isolate(), info->scope()->scope_type());
195        __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
196               Immediate(slots));
197        __ Call(callable.code(), RelocInfo::CODE_TARGET);
198        // Result of the FastNewFunctionContext builtin is always in new space.
199        need_write_barrier = false;
200      } else {
201        __ push(edi);
202        __ Push(Smi::FromInt(info->scope()->scope_type()));
203        __ CallRuntime(Runtime::kNewFunctionContext);
204      }
205      if (info->scope()->new_target_var() != nullptr) {
206        __ pop(edx);  // Restore new target.
207      }
208    }
209    function_in_register = false;
210    // Context is returned in eax.  It replaces the context passed to us.
211    // It's saved in the stack and kept live in esi.
212    __ mov(esi, eax);
213    __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
214
215    // Copy parameters into context if necessary.
216    int num_parameters = info->scope()->num_parameters();
217    int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
218    for (int i = first_parameter; i < num_parameters; i++) {
219      Variable* var =
220          (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
221      if (var->IsContextSlot()) {
222        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223            (num_parameters - 1 - i) * kPointerSize;
224        // Load parameter from stack.
225        __ mov(eax, Operand(ebp, parameter_offset));
226        // Store it in the context.
227        int context_offset = Context::SlotOffset(var->index());
228        __ mov(Operand(esi, context_offset), eax);
229        // Update the write barrier. This clobbers eax and ebx.
230        if (need_write_barrier) {
231          __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
232                                    kDontSaveFPRegs);
233        } else if (FLAG_debug_code) {
234          Label done;
235          __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
236          __ Abort(kExpectedNewSpaceObject);
237          __ bind(&done);
238        }
239      }
240    }
241  }
242
243  // Register holding this function and new target are both trashed in case we
244  // bailout here. But since that can happen only when new target is not used
245  // and we allocate a context, the value of |function_in_register| is correct.
246  PrepareForBailoutForId(BailoutId::FunctionContext(),
247                         BailoutState::NO_REGISTERS);
248
249  // We don't support new.target and rest parameters here.
250  DCHECK_NULL(info->scope()->new_target_var());
251  DCHECK_NULL(info->scope()->rest_parameter());
252  DCHECK_NULL(info->scope()->this_function_var());
253
254  Variable* arguments = info->scope()->arguments();
255  if (arguments != NULL) {
256    // Arguments object must be allocated after the context object, in
257    // case the "arguments" or ".arguments" variables are in the context.
258    Comment cmnt(masm_, "[ Allocate arguments object");
259    if (!function_in_register) {
260      __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
261    }
262    if (is_strict(language_mode()) || !has_simple_parameters()) {
263      FastNewStrictArgumentsStub stub(isolate());
264      __ CallStub(&stub);
265    } else if (literal()->has_duplicate_parameters()) {
266      __ Push(edi);
267      __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
268    } else {
269      FastNewSloppyArgumentsStub stub(isolate());
270      __ CallStub(&stub);
271    }
272
273    SetVar(arguments, eax, ebx, edx);
274  }
275
276  if (FLAG_trace) {
277    __ CallRuntime(Runtime::kTraceEnter);
278  }
279
280  // Visit the declarations and body.
281  PrepareForBailoutForId(BailoutId::FunctionEntry(),
282                         BailoutState::NO_REGISTERS);
283  {
284    Comment cmnt(masm_, "[ Declarations");
285    VisitDeclarations(scope()->declarations());
286  }
287
288  // Assert that the declarations do not use ICs. Otherwise the debugger
289  // won't be able to redirect a PC at an IC to the correct IC in newly
290  // recompiled code.
291  DCHECK_EQ(0, ic_total_count_);
292
293  {
294    Comment cmnt(masm_, "[ Stack check");
295    PrepareForBailoutForId(BailoutId::Declarations(),
296                           BailoutState::NO_REGISTERS);
297    Label ok;
298    ExternalReference stack_limit =
299        ExternalReference::address_of_stack_limit(isolate());
300    __ cmp(esp, Operand::StaticVariable(stack_limit));
301    __ j(above_equal, &ok, Label::kNear);
302    __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
303    __ bind(&ok);
304  }
305
306  {
307    Comment cmnt(masm_, "[ Body");
308    DCHECK(loop_depth() == 0);
309    VisitStatements(literal()->body());
310    DCHECK(loop_depth() == 0);
311  }
312
313  // Always emit a 'return undefined' in case control fell off the end of
314  // the body.
315  { Comment cmnt(masm_, "[ return <undefined>;");
316    __ mov(eax, isolate()->factory()->undefined_value());
317    EmitReturnSequence();
318  }
319}
320
321
322void FullCodeGenerator::ClearAccumulator() {
323  __ Move(eax, Immediate(Smi::kZero));
324}
325
326
327void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
328  __ mov(ebx, Immediate(profiling_counter_));
329  __ sub(FieldOperand(ebx, Cell::kValueOffset),
330         Immediate(Smi::FromInt(delta)));
331}
332
333
334void FullCodeGenerator::EmitProfilingCounterReset() {
335  int reset_value = FLAG_interrupt_budget;
336  __ mov(ebx, Immediate(profiling_counter_));
337  __ mov(FieldOperand(ebx, Cell::kValueOffset),
338         Immediate(Smi::FromInt(reset_value)));
339}
340
341
342void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
343                                                Label* back_edge_target) {
344  Comment cmnt(masm_, "[ Back edge bookkeeping");
345  Label ok;
346
347  DCHECK(back_edge_target->is_bound());
348  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
349  int weight = Min(kMaxBackEdgeWeight,
350                   Max(1, distance / kCodeSizeMultiplier));
351  EmitProfilingCounterDecrement(weight);
352  __ j(positive, &ok, Label::kNear);
353  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
354
355  // Record a mapping of this PC offset to the OSR id.  This is used to find
356  // the AST id from the unoptimized code in order to use it as a key into
357  // the deoptimization input data found in the optimized code.
358  RecordBackEdge(stmt->OsrEntryId());
359
360  EmitProfilingCounterReset();
361
362  __ bind(&ok);
363  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
364  // Record a mapping of the OSR id to this PC.  This is used if the OSR
365  // entry becomes the target of a bailout.  We don't expect it to be, but
366  // we want it to work if it is.
367  PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
368}
369
370void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
371    bool is_tail_call) {
372  // Pretend that the exit is a backwards jump to the entry.
373  int weight = 1;
374  if (info_->ShouldSelfOptimize()) {
375    weight = FLAG_interrupt_budget / FLAG_self_opt_count;
376  } else {
377    int distance = masm_->pc_offset();
378    weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
379  }
380  EmitProfilingCounterDecrement(weight);
381  Label ok;
382  __ j(positive, &ok, Label::kNear);
383  // Don't need to save result register if we are going to do a tail call.
384  if (!is_tail_call) {
385    __ push(eax);
386  }
387  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
388  if (!is_tail_call) {
389    __ pop(eax);
390  }
391  EmitProfilingCounterReset();
392  __ bind(&ok);
393}
394
395void FullCodeGenerator::EmitReturnSequence() {
396  Comment cmnt(masm_, "[ Return sequence");
397  if (return_label_.is_bound()) {
398    __ jmp(&return_label_);
399  } else {
400    // Common return label
401    __ bind(&return_label_);
402    if (FLAG_trace) {
403      __ push(eax);
404      __ CallRuntime(Runtime::kTraceExit);
405    }
406    EmitProfilingCounterHandlingForReturnSequence(false);
407
408    SetReturnPosition(literal());
409    __ leave();
410
411    int arg_count = info_->scope()->num_parameters() + 1;
412    int arguments_bytes = arg_count * kPointerSize;
413    __ Ret(arguments_bytes, ecx);
414  }
415}
416
417void FullCodeGenerator::RestoreContext() {
418  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
419}
420
421void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
422  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
423  MemOperand operand = codegen()->VarOperand(var, result_register());
424  // Memory operands can be pushed directly.
425  codegen()->PushOperand(operand);
426}
427
428
429void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
430  UNREACHABLE();  // Not used on X87.
431}
432
433
434void FullCodeGenerator::AccumulatorValueContext::Plug(
435    Heap::RootListIndex index) const {
436  UNREACHABLE();  // Not used on X87.
437}
438
439
440void FullCodeGenerator::StackValueContext::Plug(
441    Heap::RootListIndex index) const {
442  UNREACHABLE();  // Not used on X87.
443}
444
445
446void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
447  UNREACHABLE();  // Not used on X87.
448}
449
450
451void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
452}
453
454
455void FullCodeGenerator::AccumulatorValueContext::Plug(
456    Handle<Object> lit) const {
457  if (lit->IsSmi()) {
458    __ SafeMove(result_register(), Immediate(lit));
459  } else {
460    __ Move(result_register(), Immediate(lit));
461  }
462}
463
464
465void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
466  codegen()->OperandStackDepthIncrement(1);
467  if (lit->IsSmi()) {
468    __ SafePush(Immediate(lit));
469  } else {
470    __ push(Immediate(lit));
471  }
472}
473
474
475void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
476  codegen()->PrepareForBailoutBeforeSplit(condition(),
477                                          true,
478                                          true_label_,
479                                          false_label_);
480  DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable());
481  if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) {
482    if (false_label_ != fall_through_) __ jmp(false_label_);
483  } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
484    if (true_label_ != fall_through_) __ jmp(true_label_);
485  } else if (lit->IsString()) {
486    if (String::cast(*lit)->length() == 0) {
487      if (false_label_ != fall_through_) __ jmp(false_label_);
488    } else {
489      if (true_label_ != fall_through_) __ jmp(true_label_);
490    }
491  } else if (lit->IsSmi()) {
492    if (Smi::cast(*lit)->value() == 0) {
493      if (false_label_ != fall_through_) __ jmp(false_label_);
494    } else {
495      if (true_label_ != fall_through_) __ jmp(true_label_);
496    }
497  } else {
498    // For simplicity we always test the accumulator register.
499    __ mov(result_register(), lit);
500    codegen()->DoTest(this);
501  }
502}
503
504
505void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
506                                                       Register reg) const {
507  DCHECK(count > 0);
508  if (count > 1) codegen()->DropOperands(count - 1);
509  __ mov(Operand(esp, 0), reg);
510}
511
512
513void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
514                                            Label* materialize_false) const {
515  DCHECK(materialize_true == materialize_false);
516  __ bind(materialize_true);
517}
518
519
520void FullCodeGenerator::AccumulatorValueContext::Plug(
521    Label* materialize_true,
522    Label* materialize_false) const {
523  Label done;
524  __ bind(materialize_true);
525  __ mov(result_register(), isolate()->factory()->true_value());
526  __ jmp(&done, Label::kNear);
527  __ bind(materialize_false);
528  __ mov(result_register(), isolate()->factory()->false_value());
529  __ bind(&done);
530}
531
532
533void FullCodeGenerator::StackValueContext::Plug(
534    Label* materialize_true,
535    Label* materialize_false) const {
536  codegen()->OperandStackDepthIncrement(1);
537  Label done;
538  __ bind(materialize_true);
539  __ push(Immediate(isolate()->factory()->true_value()));
540  __ jmp(&done, Label::kNear);
541  __ bind(materialize_false);
542  __ push(Immediate(isolate()->factory()->false_value()));
543  __ bind(&done);
544}
545
546
547void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
548                                          Label* materialize_false) const {
549  DCHECK(materialize_true == true_label_);
550  DCHECK(materialize_false == false_label_);
551}
552
553
554void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
555  Handle<Object> value = flag
556      ? isolate()->factory()->true_value()
557      : isolate()->factory()->false_value();
558  __ mov(result_register(), value);
559}
560
561
562void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
563  codegen()->OperandStackDepthIncrement(1);
564  Handle<Object> value = flag
565      ? isolate()->factory()->true_value()
566      : isolate()->factory()->false_value();
567  __ push(Immediate(value));
568}
569
570
571void FullCodeGenerator::TestContext::Plug(bool flag) const {
572  codegen()->PrepareForBailoutBeforeSplit(condition(),
573                                          true,
574                                          true_label_,
575                                          false_label_);
576  if (flag) {
577    if (true_label_ != fall_through_) __ jmp(true_label_);
578  } else {
579    if (false_label_ != fall_through_) __ jmp(false_label_);
580  }
581}
582
583
584void FullCodeGenerator::DoTest(Expression* condition,
585                               Label* if_true,
586                               Label* if_false,
587                               Label* fall_through) {
588  Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
589  CallIC(ic, condition->test_id());
590  __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
591  Split(equal, if_true, if_false, fall_through);
592}
593
594
595void FullCodeGenerator::Split(Condition cc,
596                              Label* if_true,
597                              Label* if_false,
598                              Label* fall_through) {
599  if (if_false == fall_through) {
600    __ j(cc, if_true);
601  } else if (if_true == fall_through) {
602    __ j(NegateCondition(cc), if_false);
603  } else {
604    __ j(cc, if_true);
605    __ jmp(if_false);
606  }
607}
608
609
610MemOperand FullCodeGenerator::StackOperand(Variable* var) {
611  DCHECK(var->IsStackAllocated());
612  // Offset is negative because higher indexes are at lower addresses.
613  int offset = -var->index() * kPointerSize;
614  // Adjust by a (parameter or local) base offset.
615  if (var->IsParameter()) {
616    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
617  } else {
618    offset += JavaScriptFrameConstants::kLocal0Offset;
619  }
620  return Operand(ebp, offset);
621}
622
623
624MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
625  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
626  if (var->IsContextSlot()) {
627    int context_chain_length = scope()->ContextChainLength(var->scope());
628    __ LoadContext(scratch, context_chain_length);
629    return ContextOperand(scratch, var->index());
630  } else {
631    return StackOperand(var);
632  }
633}
634
635
636void FullCodeGenerator::GetVar(Register dest, Variable* var) {
637  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
638  MemOperand location = VarOperand(var, dest);
639  __ mov(dest, location);
640}
641
642
643void FullCodeGenerator::SetVar(Variable* var,
644                               Register src,
645                               Register scratch0,
646                               Register scratch1) {
647  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
648  DCHECK(!scratch0.is(src));
649  DCHECK(!scratch0.is(scratch1));
650  DCHECK(!scratch1.is(src));
651  MemOperand location = VarOperand(var, scratch0);
652  __ mov(location, src);
653
654  // Emit the write barrier code if the location is in the heap.
655  if (var->IsContextSlot()) {
656    int offset = Context::SlotOffset(var->index());
657    DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
658    __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
659  }
660}
661
662
663void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
664                                                     bool should_normalize,
665                                                     Label* if_true,
666                                                     Label* if_false) {
667  // Only prepare for bailouts before splits if we're in a test
668  // context. Otherwise, we let the Visit function deal with the
669  // preparation to avoid preparing with the same AST id twice.
670  if (!context()->IsTest()) return;
671
672  Label skip;
673  if (should_normalize) __ jmp(&skip, Label::kNear);
674  PrepareForBailout(expr, BailoutState::TOS_REGISTER);
675  if (should_normalize) {
676    __ cmp(eax, isolate()->factory()->true_value());
677    Split(equal, if_true, if_false, NULL);
678    __ bind(&skip);
679  }
680}
681
682
683void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
684  // The variable in the declaration always resides in the current context.
685  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
686  if (FLAG_debug_code) {
687    // Check that we're not inside a with or catch context.
688    __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
689    __ cmp(ebx, isolate()->factory()->with_context_map());
690    __ Check(not_equal, kDeclarationInWithContext);
691    __ cmp(ebx, isolate()->factory()->catch_context_map());
692    __ Check(not_equal, kDeclarationInCatchContext);
693  }
694}
695
696
697void FullCodeGenerator::VisitVariableDeclaration(
698    VariableDeclaration* declaration) {
699  VariableProxy* proxy = declaration->proxy();
700  Variable* variable = proxy->var();
701  switch (variable->location()) {
702    case VariableLocation::UNALLOCATED: {
703      DCHECK(!variable->binding_needs_init());
704      globals_->Add(variable->name(), zone());
705      FeedbackSlot slot = proxy->VariableFeedbackSlot();
706      DCHECK(!slot.IsInvalid());
707      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
708      globals_->Add(isolate()->factory()->undefined_value(), zone());
709      globals_->Add(isolate()->factory()->undefined_value(), zone());
710      break;
711    }
712    case VariableLocation::PARAMETER:
713    case VariableLocation::LOCAL:
714      if (variable->binding_needs_init()) {
715        Comment cmnt(masm_, "[ VariableDeclaration");
716        __ mov(StackOperand(variable),
717               Immediate(isolate()->factory()->the_hole_value()));
718      }
719      break;
720
721    case VariableLocation::CONTEXT:
722      if (variable->binding_needs_init()) {
723        Comment cmnt(masm_, "[ VariableDeclaration");
724        EmitDebugCheckDeclarationContext(variable);
725        __ mov(ContextOperand(esi, variable->index()),
726               Immediate(isolate()->factory()->the_hole_value()));
727        // No write barrier since the hole value is in old space.
728        PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
729      }
730      break;
731
732    case VariableLocation::LOOKUP:
733    case VariableLocation::MODULE:
734      UNREACHABLE();
735  }
736}
737
738void FullCodeGenerator::VisitFunctionDeclaration(
739    FunctionDeclaration* declaration) {
740  VariableProxy* proxy = declaration->proxy();
741  Variable* variable = proxy->var();
742  switch (variable->location()) {
743    case VariableLocation::UNALLOCATED: {
744      globals_->Add(variable->name(), zone());
745      FeedbackSlot slot = proxy->VariableFeedbackSlot();
746      DCHECK(!slot.IsInvalid());
747      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
748
749      // We need the slot where the literals array lives, too.
750      slot = declaration->fun()->LiteralFeedbackSlot();
751      DCHECK(!slot.IsInvalid());
752      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
753
754      Handle<SharedFunctionInfo> function =
755          Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
756      // Check for stack-overflow exception.
757      if (function.is_null()) return SetStackOverflow();
758      globals_->Add(function, zone());
759      break;
760    }
761
762    case VariableLocation::PARAMETER:
763    case VariableLocation::LOCAL: {
764      Comment cmnt(masm_, "[ FunctionDeclaration");
765      VisitForAccumulatorValue(declaration->fun());
766      __ mov(StackOperand(variable), result_register());
767      break;
768    }
769
770    case VariableLocation::CONTEXT: {
771      Comment cmnt(masm_, "[ FunctionDeclaration");
772      EmitDebugCheckDeclarationContext(variable);
773      VisitForAccumulatorValue(declaration->fun());
774      __ mov(ContextOperand(esi, variable->index()), result_register());
775      // We know that we have written a function, which is not a smi.
776      __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
777                                result_register(), ecx, kDontSaveFPRegs,
778                                EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
779      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
780      break;
781    }
782
783    case VariableLocation::LOOKUP:
784    case VariableLocation::MODULE:
785      UNREACHABLE();
786  }
787}
788
789
790void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
791  // Call the runtime to declare the globals.
792  __ Push(pairs);
793  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
794  __ EmitLoadFeedbackVector(eax);
795  __ Push(eax);
796  __ CallRuntime(Runtime::kDeclareGlobals);
797  // Return value is ignored.
798}
799
800
801void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
802  Comment cmnt(masm_, "[ SwitchStatement");
803  Breakable nested_statement(this, stmt);
804  SetStatementPosition(stmt);
805
806  // Keep the switch value on the stack until a case matches.
807  VisitForStackValue(stmt->tag());
808  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
809
810  ZoneList<CaseClause*>* clauses = stmt->cases();
811  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
812
813  Label next_test;  // Recycled for each test.
814  // Compile all the tests with branches to their bodies.
815  for (int i = 0; i < clauses->length(); i++) {
816    CaseClause* clause = clauses->at(i);
817    clause->body_target()->Unuse();
818
819    // The default is not a test, but remember it as final fall through.
820    if (clause->is_default()) {
821      default_clause = clause;
822      continue;
823    }
824
825    Comment cmnt(masm_, "[ Case comparison");
826    __ bind(&next_test);
827    next_test.Unuse();
828
829    // Compile the label expression.
830    VisitForAccumulatorValue(clause->label());
831
832    // Perform the comparison as if via '==='.
833    __ mov(edx, Operand(esp, 0));  // Switch value.
834    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
835    JumpPatchSite patch_site(masm_);
836    if (inline_smi_code) {
837      Label slow_case;
838      __ mov(ecx, edx);
839      __ or_(ecx, eax);
840      patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
841
842      __ cmp(edx, eax);
843      __ j(not_equal, &next_test);
844      __ Drop(1);  // Switch value is no longer needed.
845      __ jmp(clause->body_target());
846      __ bind(&slow_case);
847    }
848
849    SetExpressionPosition(clause);
850    Handle<Code> ic =
851        CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
852    CallIC(ic, clause->CompareId());
853    patch_site.EmitPatchInfo();
854
855    Label skip;
856    __ jmp(&skip, Label::kNear);
857    PrepareForBailout(clause, BailoutState::TOS_REGISTER);
858    __ cmp(eax, isolate()->factory()->true_value());
859    __ j(not_equal, &next_test);
860    __ Drop(1);
861    __ jmp(clause->body_target());
862    __ bind(&skip);
863
864    __ test(eax, eax);
865    __ j(not_equal, &next_test);
866    __ Drop(1);  // Switch value is no longer needed.
867    __ jmp(clause->body_target());
868  }
869
870  // Discard the test value and jump to the default if present, otherwise to
871  // the end of the statement.
872  __ bind(&next_test);
873  DropOperands(1);  // Switch value is no longer needed.
874  if (default_clause == NULL) {
875    __ jmp(nested_statement.break_label());
876  } else {
877    __ jmp(default_clause->body_target());
878  }
879
880  // Compile all the case bodies.
881  for (int i = 0; i < clauses->length(); i++) {
882    Comment cmnt(masm_, "[ Case body");
883    CaseClause* clause = clauses->at(i);
884    __ bind(clause->body_target());
885    PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
886    VisitStatements(clause->statements());
887  }
888
889  __ bind(nested_statement.break_label());
890  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
891}
892
893
894void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
895  Comment cmnt(masm_, "[ ForInStatement");
896  SetStatementPosition(stmt, SKIP_BREAK);
897
898  FeedbackSlot slot = stmt->ForInFeedbackSlot();
899
900  // Get the object to enumerate over.
901  SetExpressionAsStatementPosition(stmt->enumerable());
902  VisitForAccumulatorValue(stmt->enumerable());
903  OperandStackDepthIncrement(5);
904
905  Label loop, exit;
906  Iteration loop_statement(this, stmt);
907  increment_loop_depth();
908
909  // If the object is null or undefined, skip over the loop, otherwise convert
910  // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
911  Label convert, done_convert;
912  __ JumpIfSmi(eax, &convert, Label::kNear);
913  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
914  __ j(above_equal, &done_convert, Label::kNear);
915  __ cmp(eax, isolate()->factory()->undefined_value());
916  __ j(equal, &exit);
917  __ cmp(eax, isolate()->factory()->null_value());
918  __ j(equal, &exit);
919  __ bind(&convert);
920  __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
921  RestoreContext();
922  __ bind(&done_convert);
923  PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
924  __ push(eax);
925
926  // Check cache validity in generated code. If we cannot guarantee cache
927  // validity, call the runtime system to check cache validity or get the
928  // property names in a fixed array. Note: Proxies never have an enum cache,
929  // so will always take the slow path.
930  Label call_runtime, use_cache, fixed_array;
931  __ CheckEnumCache(&call_runtime);
932
933  __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
934  __ jmp(&use_cache, Label::kNear);
935
936  // Get the set of properties to enumerate.
937  __ bind(&call_runtime);
938  __ push(eax);
939  __ CallRuntime(Runtime::kForInEnumerate);
940  PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
941  __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
942         isolate()->factory()->meta_map());
943  __ j(not_equal, &fixed_array);
944
945
946  // We got a map in register eax. Get the enumeration cache from it.
947  Label no_descriptors;
948  __ bind(&use_cache);
949
950  __ EnumLength(edx, eax);
951  __ cmp(edx, Immediate(Smi::kZero));
952  __ j(equal, &no_descriptors);
953
954  __ LoadInstanceDescriptors(eax, ecx);
955  __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
956  __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
957
958  // Set up the four remaining stack slots.
959  __ push(eax);  // Map.
960  __ push(ecx);  // Enumeration cache.
961  __ push(edx);  // Number of valid entries for the map in the enum cache.
962  __ push(Immediate(Smi::kZero));  // Initial index.
963  __ jmp(&loop);
964
965  __ bind(&no_descriptors);
966  __ add(esp, Immediate(kPointerSize));
967  __ jmp(&exit);
968
969  // We got a fixed array in register eax. Iterate through that.
970  __ bind(&fixed_array);
971
972  __ push(Immediate(Smi::FromInt(1)));  // Smi(1) indicates slow check
973  __ push(eax);  // Array
974  __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
975  __ push(eax);  // Fixed array length (as smi).
976  PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
977  __ push(Immediate(Smi::kZero));  // Initial index.
978
979  // Generate code for doing the condition check.
980  __ bind(&loop);
981  SetExpressionAsStatementPosition(stmt->each());
982
983  __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
984  __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
985  __ j(above_equal, loop_statement.break_label());
986
987  // Get the current entry of the array into register eax.
988  __ mov(ebx, Operand(esp, 2 * kPointerSize));
989  __ mov(eax, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
990
991  // Get the expected map from the stack or a smi in the
992  // permanent slow case into register edx.
993  __ mov(edx, Operand(esp, 3 * kPointerSize));
994
995  // Check if the expected map still matches that of the enumerable.
996  // If not, we may have to filter the key.
997  Label update_each;
998  __ mov(ebx, Operand(esp, 4 * kPointerSize));
999  __ cmp(edx, FieldOperand(ebx, HeapObject::kMapOffset));
1000  __ j(equal, &update_each, Label::kNear);
1001
1002  // We need to filter the key, record slow-path here.
1003  int const vector_index = SmiFromSlot(slot)->value();
1004  __ EmitLoadFeedbackVector(edx);
1005  __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1006         Immediate(FeedbackVector::MegamorphicSentinel(isolate())));
1007
1008  // eax contains the key.  The receiver in ebx is the second argument to the
1009  // ForInFilter.  ForInFilter returns undefined if the receiver doesn't
1010  // have the key or returns the name-converted key.
1011  __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1012  RestoreContext();
1013  PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1014  __ JumpIfRoot(result_register(), Heap::kUndefinedValueRootIndex,
1015                loop_statement.continue_label());
1016
1017  // Update the 'each' property or variable from the possibly filtered
1018  // entry in register eax.
1019  __ bind(&update_each);
1020  // Perform the assignment as if via '='.
1021  { EffectContext context(this);
1022    EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1023    PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1024  }
1025
1026  // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1027  PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1028  // Generate code for the body of the loop.
1029  Visit(stmt->body());
1030
1031  // Generate code for going to the next element by incrementing the
1032  // index (smi) stored on top of the stack.
1033  __ bind(loop_statement.continue_label());
1034  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1035  __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1036
1037  EmitBackEdgeBookkeeping(stmt, &loop);
1038  __ jmp(&loop);
1039
1040  // Remove the pointers stored on the stack.
1041  __ bind(loop_statement.break_label());
1042  DropOperands(5);
1043
1044  // Exit and decrement the loop depth.
1045  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1046  __ bind(&exit);
1047  decrement_loop_depth();
1048}
1049
1050void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1051                                          FeedbackSlot slot) {
1052  DCHECK(NeedsHomeObject(initializer));
1053  __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1054  __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1055  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1056}
1057
1058void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1059                                                     int offset,
1060                                                     FeedbackSlot slot) {
1061  DCHECK(NeedsHomeObject(initializer));
1062  __ mov(StoreDescriptor::ReceiverRegister(), eax);
1063  __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1064  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1065}
1066
1067void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1068                                         TypeofMode typeof_mode) {
1069  SetExpressionPosition(proxy);
1070  PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1071  Variable* var = proxy->var();
1072
1073  // Two cases: global variables and all other types of variables.
1074  switch (var->location()) {
1075    case VariableLocation::UNALLOCATED: {
1076      Comment cmnt(masm_, "[ Global variable");
1077      EmitGlobalVariableLoad(proxy, typeof_mode);
1078      context()->Plug(eax);
1079      break;
1080    }
1081
1082    case VariableLocation::PARAMETER:
1083    case VariableLocation::LOCAL:
1084    case VariableLocation::CONTEXT: {
1085      DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1086      Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1087                                               : "[ Stack variable");
1088
1089      if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1090        // Throw a reference error when using an uninitialized let/const
1091        // binding in harmony mode.
1092        Label done;
1093        GetVar(eax, var);
1094        __ cmp(eax, isolate()->factory()->the_hole_value());
1095        __ j(not_equal, &done, Label::kNear);
1096        __ push(Immediate(var->name()));
1097        __ CallRuntime(Runtime::kThrowReferenceError);
1098        __ bind(&done);
1099        context()->Plug(eax);
1100        break;
1101      }
1102      context()->Plug(var);
1103      break;
1104    }
1105
1106    case VariableLocation::LOOKUP:
1107    case VariableLocation::MODULE:
1108      UNREACHABLE();
1109  }
1110}
1111
1112
1113void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1114  Expression* expression = (property == NULL) ? NULL : property->value();
1115  if (expression == NULL) {
1116    PushOperand(isolate()->factory()->null_value());
1117  } else {
1118    VisitForStackValue(expression);
1119    if (NeedsHomeObject(expression)) {
1120      DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1121             property->kind() == ObjectLiteral::Property::SETTER);
1122      int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1123      EmitSetHomeObject(expression, offset, property->GetSlot());
1124    }
1125  }
1126}
1127
1128
1129void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1130  Comment cmnt(masm_, "[ ObjectLiteral");
1131
1132  Handle<BoilerplateDescription> constant_properties =
1133      expr->GetOrBuildConstantProperties(isolate());
1134  int flags = expr->ComputeFlags();
1135  // If any of the keys would store to the elements array, then we shouldn't
1136  // allow it.
1137  if (MustCreateObjectLiteralWithRuntime(expr)) {
1138    __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1139    __ push(Immediate(Smi::FromInt(expr->literal_index())));
1140    __ push(Immediate(constant_properties));
1141    __ push(Immediate(Smi::FromInt(flags)));
1142    __ CallRuntime(Runtime::kCreateObjectLiteral);
1143  } else {
1144    __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1145    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1146    __ mov(ecx, Immediate(constant_properties));
1147    __ mov(edx, Immediate(Smi::FromInt(flags)));
1148    Callable callable = CodeFactory::FastCloneShallowObject(
1149        isolate(), expr->properties_count());
1150    __ Call(callable.code(), RelocInfo::CODE_TARGET);
1151    RestoreContext();
1152  }
1153  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1154
1155  // If result_saved is true the result is on top of the stack.  If
1156  // result_saved is false the result is in eax.
1157  bool result_saved = false;
1158
1159  AccessorTable accessor_table(zone());
1160  for (int i = 0; i < expr->properties()->length(); i++) {
1161    ObjectLiteral::Property* property = expr->properties()->at(i);
1162    DCHECK(!property->is_computed_name());
1163    if (property->IsCompileTimeValue()) continue;
1164
1165    Literal* key = property->key()->AsLiteral();
1166    Expression* value = property->value();
1167    if (!result_saved) {
1168      PushOperand(eax);  // Save result on the stack
1169      result_saved = true;
1170    }
1171    switch (property->kind()) {
1172      case ObjectLiteral::Property::SPREAD:
1173      case ObjectLiteral::Property::CONSTANT:
1174        UNREACHABLE();
1175      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1176        DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1177        // Fall through.
1178      case ObjectLiteral::Property::COMPUTED:
1179        // It is safe to use [[Put]] here because the boilerplate already
1180        // contains computed properties with an uninitialized value.
1181        if (key->IsStringLiteral()) {
1182          DCHECK(key->IsPropertyName());
1183          if (property->emit_store()) {
1184            VisitForAccumulatorValue(value);
1185            DCHECK(StoreDescriptor::ValueRegister().is(eax));
1186            __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1187            CallStoreIC(property->GetSlot(0), key->value(), true);
1188            PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1189            if (NeedsHomeObject(value)) {
1190              EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1191            }
1192          } else {
1193            VisitForEffect(value);
1194          }
1195          break;
1196        }
1197        PushOperand(Operand(esp, 0));  // Duplicate receiver.
1198        VisitForStackValue(key);
1199        VisitForStackValue(value);
1200        if (property->emit_store()) {
1201          if (NeedsHomeObject(value)) {
1202            EmitSetHomeObject(value, 2, property->GetSlot());
1203          }
1204          PushOperand(Smi::FromInt(SLOPPY));  // Language mode
1205          CallRuntimeWithOperands(Runtime::kSetProperty);
1206        } else {
1207          DropOperands(3);
1208        }
1209        break;
1210      case ObjectLiteral::Property::PROTOTYPE:
1211        PushOperand(Operand(esp, 0));  // Duplicate receiver.
1212        VisitForStackValue(value);
1213        DCHECK(property->emit_store());
1214        CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1215        PrepareForBailoutForId(expr->GetIdForPropertySet(i),
1216                               BailoutState::NO_REGISTERS);
1217        break;
1218      case ObjectLiteral::Property::GETTER:
1219        if (property->emit_store()) {
1220          AccessorTable::Iterator it = accessor_table.lookup(key);
1221          it->second->bailout_id = expr->GetIdForPropertySet(i);
1222          it->second->getter = property;
1223        }
1224        break;
1225      case ObjectLiteral::Property::SETTER:
1226        if (property->emit_store()) {
1227          AccessorTable::Iterator it = accessor_table.lookup(key);
1228          it->second->bailout_id = expr->GetIdForPropertySet(i);
1229          it->second->setter = property;
1230        }
1231        break;
1232    }
1233  }
1234
1235  // Emit code to define accessors, using only a single call to the runtime for
1236  // each pair of corresponding getters and setters.
1237  for (AccessorTable::Iterator it = accessor_table.begin();
1238       it != accessor_table.end();
1239       ++it) {
1240    PushOperand(Operand(esp, 0));  // Duplicate receiver.
1241    VisitForStackValue(it->first);
1242
1243    EmitAccessor(it->second->getter);
1244    EmitAccessor(it->second->setter);
1245
1246    PushOperand(Smi::FromInt(NONE));
1247    CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1248    PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1249  }
1250
1251  if (result_saved) {
1252    context()->PlugTOS();
1253  } else {
1254    context()->Plug(eax);
1255  }
1256}
1257
1258
1259void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1260  Comment cmnt(masm_, "[ ArrayLiteral");
1261
1262  Handle<ConstantElementsPair> constant_elements =
1263      expr->GetOrBuildConstantElements(isolate());
1264
1265  if (MustCreateArrayLiteralWithRuntime(expr)) {
1266    __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1267    __ push(Immediate(Smi::FromInt(expr->literal_index())));
1268    __ push(Immediate(constant_elements));
1269    __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1270    __ CallRuntime(Runtime::kCreateArrayLiteral);
1271  } else {
1272    __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1273    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1274    __ mov(ecx, Immediate(constant_elements));
1275    Callable callable =
1276        CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE);
1277    __ Call(callable.code(), RelocInfo::CODE_TARGET);
1278    RestoreContext();
1279  }
1280  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1281
1282  bool result_saved = false;  // Is the result saved to the stack?
1283  ZoneList<Expression*>* subexprs = expr->values();
1284  int length = subexprs->length();
1285
1286  // Emit code to evaluate all the non-constant subexpressions and to store
1287  // them into the newly cloned array.
1288  for (int array_index = 0; array_index < length; array_index++) {
1289    Expression* subexpr = subexprs->at(array_index);
1290    DCHECK(!subexpr->IsSpread());
1291
1292    // If the subexpression is a literal or a simple materialized literal it
1293    // is already set in the cloned array.
1294    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1295
1296    if (!result_saved) {
1297      PushOperand(eax);  // array literal.
1298      result_saved = true;
1299    }
1300    VisitForAccumulatorValue(subexpr);
1301
1302    __ mov(StoreDescriptor::NameRegister(),
1303           Immediate(Smi::FromInt(array_index)));
1304    __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1305    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1306    PrepareForBailoutForId(expr->GetIdForElement(array_index),
1307                           BailoutState::NO_REGISTERS);
1308  }
1309
1310  if (result_saved) {
1311    context()->PlugTOS();
1312  } else {
1313    context()->Plug(eax);
1314  }
1315}
1316
1317
1318void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1319  DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1320
1321  Comment cmnt(masm_, "[ Assignment");
1322
1323  Property* property = expr->target()->AsProperty();
1324  LhsKind assign_type = Property::GetAssignType(property);
1325
1326  // Evaluate LHS expression.
1327  switch (assign_type) {
1328    case VARIABLE:
1329      // Nothing to do here.
1330      break;
1331    case NAMED_PROPERTY:
1332      if (expr->is_compound()) {
1333        // We need the receiver both on the stack and in the register.
1334        VisitForStackValue(property->obj());
1335        __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1336      } else {
1337        VisitForStackValue(property->obj());
1338      }
1339      break;
1340    case KEYED_PROPERTY: {
1341      if (expr->is_compound()) {
1342        VisitForStackValue(property->obj());
1343        VisitForStackValue(property->key());
1344        __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1345        __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1346      } else {
1347        VisitForStackValue(property->obj());
1348        VisitForStackValue(property->key());
1349      }
1350      break;
1351    }
1352    case NAMED_SUPER_PROPERTY:
1353    case KEYED_SUPER_PROPERTY:
1354      UNREACHABLE();
1355      break;
1356  }
1357
1358  // For compound assignments we need another deoptimization point after the
1359  // variable/property load.
1360  if (expr->is_compound()) {
1361    AccumulatorValueContext result_context(this);
1362    { AccumulatorValueContext left_operand_context(this);
1363      switch (assign_type) {
1364        case VARIABLE:
1365          EmitVariableLoad(expr->target()->AsVariableProxy());
1366          PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1367          break;
1368        case NAMED_PROPERTY:
1369          EmitNamedPropertyLoad(property);
1370          PrepareForBailoutForId(property->LoadId(),
1371                                 BailoutState::TOS_REGISTER);
1372          break;
1373        case KEYED_PROPERTY:
1374          EmitKeyedPropertyLoad(property);
1375          PrepareForBailoutForId(property->LoadId(),
1376                                 BailoutState::TOS_REGISTER);
1377          break;
1378        case NAMED_SUPER_PROPERTY:
1379        case KEYED_SUPER_PROPERTY:
1380          UNREACHABLE();
1381          break;
1382      }
1383    }
1384
1385    Token::Value op = expr->binary_op();
1386    PushOperand(eax);  // Left operand goes on the stack.
1387    VisitForAccumulatorValue(expr->value());
1388
1389    if (ShouldInlineSmiCase(op)) {
1390      EmitInlineSmiBinaryOp(expr->binary_operation(),
1391                            op,
1392                            expr->target(),
1393                            expr->value());
1394    } else {
1395      EmitBinaryOp(expr->binary_operation(), op);
1396    }
1397
1398    // Deoptimization point in case the binary operation may have side effects.
1399    PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1400  } else {
1401    VisitForAccumulatorValue(expr->value());
1402  }
1403
1404  SetExpressionPosition(expr);
1405
1406  // Store the value.
1407  switch (assign_type) {
1408    case VARIABLE: {
1409      VariableProxy* proxy = expr->target()->AsVariableProxy();
1410      EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1411                             proxy->hole_check_mode());
1412      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1413      context()->Plug(eax);
1414      break;
1415    }
1416    case NAMED_PROPERTY:
1417      EmitNamedPropertyAssignment(expr);
1418      break;
1419    case KEYED_PROPERTY:
1420      EmitKeyedPropertyAssignment(expr);
1421      break;
1422    case NAMED_SUPER_PROPERTY:
1423    case KEYED_SUPER_PROPERTY:
1424      UNREACHABLE();
1425      break;
1426  }
1427}
1428
1429
1430void FullCodeGenerator::VisitYield(Yield* expr) {
1431  // Resumable functions are not supported.
1432  UNREACHABLE();
1433}
1434
1435void FullCodeGenerator::PushOperand(MemOperand operand) {
1436  OperandStackDepthIncrement(1);
1437  __ Push(operand);
1438}
1439
1440void FullCodeGenerator::EmitOperandStackDepthCheck() {
1441  if (FLAG_debug_code) {
1442    int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1443                        operand_stack_depth_ * kPointerSize;
1444    __ mov(eax, ebp);
1445    __ sub(eax, esp);
1446    __ cmp(eax, Immediate(expected_diff));
1447    __ Assert(equal, kUnexpectedStackDepth);
1448  }
1449}
1450
1451void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1452  Label allocate, done_allocate;
1453
1454  __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate,
1455              NO_ALLOCATION_FLAGS);
1456  __ jmp(&done_allocate, Label::kNear);
1457
1458  __ bind(&allocate);
1459  __ Push(Smi::FromInt(JSIteratorResult::kSize));
1460  __ CallRuntime(Runtime::kAllocateInNewSpace);
1461
1462  __ bind(&done_allocate);
1463  __ mov(ebx, NativeContextOperand());
1464  __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1465  __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1466  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1467         isolate()->factory()->empty_fixed_array());
1468  __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1469         isolate()->factory()->empty_fixed_array());
1470  __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1471  __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1472         isolate()->factory()->ToBoolean(done));
1473  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1474  OperandStackDepthDecrement(1);
1475}
1476
1477
1478void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1479                                              Token::Value op,
1480                                              Expression* left,
1481                                              Expression* right) {
1482  // Do combined smi check of the operands. Left operand is on the
1483  // stack. Right operand is in eax.
1484  Label smi_case, done, stub_call;
1485  PopOperand(edx);
1486  __ mov(ecx, eax);
1487  __ or_(eax, edx);
1488  JumpPatchSite patch_site(masm_);
1489  patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1490
1491  __ bind(&stub_call);
1492  __ mov(eax, ecx);
1493  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1494  CallIC(code, expr->BinaryOperationFeedbackId());
1495  patch_site.EmitPatchInfo();
1496  __ jmp(&done, Label::kNear);
1497
1498  // Smi case.
1499  __ bind(&smi_case);
1500  __ mov(eax, edx);  // Copy left operand in case of a stub call.
1501
1502  switch (op) {
1503    case Token::SAR:
1504      __ SmiUntag(ecx);
1505      __ sar_cl(eax);  // No checks of result necessary
1506      __ and_(eax, Immediate(~kSmiTagMask));
1507      break;
1508    case Token::SHL: {
1509      Label result_ok;
1510      __ SmiUntag(eax);
1511      __ SmiUntag(ecx);
1512      __ shl_cl(eax);
1513      // Check that the *signed* result fits in a smi.
1514      __ cmp(eax, 0xc0000000);
1515      __ j(positive, &result_ok);
1516      __ SmiTag(ecx);
1517      __ jmp(&stub_call);
1518      __ bind(&result_ok);
1519      __ SmiTag(eax);
1520      break;
1521    }
1522    case Token::SHR: {
1523      Label result_ok;
1524      __ SmiUntag(eax);
1525      __ SmiUntag(ecx);
1526      __ shr_cl(eax);
1527      __ test(eax, Immediate(0xc0000000));
1528      __ j(zero, &result_ok);
1529      __ SmiTag(ecx);
1530      __ jmp(&stub_call);
1531      __ bind(&result_ok);
1532      __ SmiTag(eax);
1533      break;
1534    }
1535    case Token::ADD:
1536      __ add(eax, ecx);
1537      __ j(overflow, &stub_call);
1538      break;
1539    case Token::SUB:
1540      __ sub(eax, ecx);
1541      __ j(overflow, &stub_call);
1542      break;
1543    case Token::MUL: {
1544      __ SmiUntag(eax);
1545      __ imul(eax, ecx);
1546      __ j(overflow, &stub_call);
1547      __ test(eax, eax);
1548      __ j(not_zero, &done, Label::kNear);
1549      __ mov(ebx, edx);
1550      __ or_(ebx, ecx);
1551      __ j(negative, &stub_call);
1552      break;
1553    }
1554    case Token::BIT_OR:
1555      __ or_(eax, ecx);
1556      break;
1557    case Token::BIT_AND:
1558      __ and_(eax, ecx);
1559      break;
1560    case Token::BIT_XOR:
1561      __ xor_(eax, ecx);
1562      break;
1563    default:
1564      UNREACHABLE();
1565  }
1566
1567  __ bind(&done);
1568  context()->Plug(eax);
1569}
1570
1571void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1572  PopOperand(edx);
1573  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1574  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
1575  CallIC(code, expr->BinaryOperationFeedbackId());
1576  patch_site.EmitPatchInfo();
1577  context()->Plug(eax);
1578}
1579
1580void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) {
1581  DCHECK(expr->IsValidReferenceExpressionOrThis());
1582
1583  Property* prop = expr->AsProperty();
1584  LhsKind assign_type = Property::GetAssignType(prop);
1585
1586  switch (assign_type) {
1587    case VARIABLE: {
1588      VariableProxy* proxy = expr->AsVariableProxy();
1589      EffectContext context(this);
1590      EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
1591                             proxy->hole_check_mode());
1592      break;
1593    }
1594    case NAMED_PROPERTY: {
1595      PushOperand(eax);  // Preserve value.
1596      VisitForAccumulatorValue(prop->obj());
1597      __ Move(StoreDescriptor::ReceiverRegister(), eax);
1598      PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
1599      CallStoreIC(slot, prop->key()->AsLiteral()->value());
1600      break;
1601    }
1602    case KEYED_PROPERTY: {
1603      PushOperand(eax);  // Preserve value.
1604      VisitForStackValue(prop->obj());
1605      VisitForAccumulatorValue(prop->key());
1606      __ Move(StoreDescriptor::NameRegister(), eax);
1607      PopOperand(StoreDescriptor::ReceiverRegister());  // Receiver.
1608      PopOperand(StoreDescriptor::ValueRegister());     // Restore value.
1609      CallKeyedStoreIC(slot);
1610      break;
1611    }
1612    case NAMED_SUPER_PROPERTY:
1613    case KEYED_SUPER_PROPERTY:
1614      UNREACHABLE();
1615      break;
1616  }
1617  context()->Plug(eax);
1618}
1619
1620
1621void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
1622    Variable* var, MemOperand location) {
1623  __ mov(location, eax);
1624  if (var->IsContextSlot()) {
1625    __ mov(edx, eax);
1626    int offset = Context::SlotOffset(var->index());
1627    __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
1628  }
1629}
1630
1631void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
1632                                               FeedbackSlot slot,
1633                                               HoleCheckMode hole_check_mode) {
1634  if (var->IsUnallocated()) {
1635    // Global var, const, or let.
1636    __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
1637    __ mov(StoreDescriptor::ReceiverRegister(),
1638           ContextOperand(StoreDescriptor::ReceiverRegister(),
1639                          Context::EXTENSION_INDEX));
1640    CallStoreIC(slot, var->name());
1641
1642  } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
1643    DCHECK(!var->IsLookupSlot());
1644    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1645    MemOperand location = VarOperand(var, ecx);
1646    // Perform an initialization check for lexically declared variables.
1647    if (hole_check_mode == HoleCheckMode::kRequired) {
1648      Label assign;
1649      __ mov(edx, location);
1650      __ cmp(edx, isolate()->factory()->the_hole_value());
1651      __ j(not_equal, &assign, Label::kNear);
1652      __ push(Immediate(var->name()));
1653      __ CallRuntime(Runtime::kThrowReferenceError);
1654      __ bind(&assign);
1655    }
1656    if (var->mode() != CONST) {
1657      EmitStoreToStackLocalOrContextSlot(var, location);
1658    } else if (var->throw_on_const_assignment(language_mode())) {
1659      __ CallRuntime(Runtime::kThrowConstAssignError);
1660    }
1661  } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
1662    // Initializing assignment to const {this} needs a write barrier.
1663    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1664    Label uninitialized_this;
1665    MemOperand location = VarOperand(var, ecx);
1666    __ mov(edx, location);
1667    __ cmp(edx, isolate()->factory()->the_hole_value());
1668    __ j(equal, &uninitialized_this);
1669    __ push(Immediate(var->name()));
1670    __ CallRuntime(Runtime::kThrowReferenceError);
1671    __ bind(&uninitialized_this);
1672    EmitStoreToStackLocalOrContextSlot(var, location);
1673
1674  } else {
1675    DCHECK(var->mode() != CONST || op == Token::INIT);
1676    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1677    DCHECK(!var->IsLookupSlot());
1678    // Assignment to var or initializing assignment to let/const in harmony
1679    // mode.
1680    MemOperand location = VarOperand(var, ecx);
1681    if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
1682      // Check for an uninitialized let binding.
1683      __ mov(edx, location);
1684      __ cmp(edx, isolate()->factory()->the_hole_value());
1685      __ Check(equal, kLetBindingReInitialization);
1686    }
1687    EmitStoreToStackLocalOrContextSlot(var, location);
1688  }
1689}
1690
1691
1692void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1693  // Assignment to a property, using a named store IC.
1694  // eax    : value
1695  // esp[0] : receiver
1696  Property* prop = expr->target()->AsProperty();
1697  DCHECK(prop != NULL);
1698  DCHECK(prop->key()->IsLiteral());
1699
1700  PopOperand(StoreDescriptor::ReceiverRegister());
1701  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
1702  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1703  context()->Plug(eax);
1704}
1705
1706
1707void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1708  // Assignment to a property, using a keyed store IC.
1709  // eax               : value
1710  // esp[0]            : key
1711  // esp[kPointerSize] : receiver
1712
1713  PopOperand(StoreDescriptor::NameRegister());  // Key.
1714  PopOperand(StoreDescriptor::ReceiverRegister());
1715  DCHECK(StoreDescriptor::ValueRegister().is(eax));
1716  CallKeyedStoreIC(expr->AssignmentSlot());
1717  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1718  context()->Plug(eax);
1719}
1720
1721// Code common for calls using the IC.
1722void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
1723  Expression* callee = expr->expression();
1724
1725  // Get the target function.
1726  ConvertReceiverMode convert_mode;
1727  if (callee->IsVariableProxy()) {
1728    { StackValueContext context(this);
1729      EmitVariableLoad(callee->AsVariableProxy());
1730      PrepareForBailout(callee, BailoutState::NO_REGISTERS);
1731    }
1732    // Push undefined as receiver. This is patched in the method prologue if it
1733    // is a sloppy mode method.
1734    PushOperand(isolate()->factory()->undefined_value());
1735    convert_mode = ConvertReceiverMode::kNullOrUndefined;
1736  } else {
1737    // Load the function from the receiver.
1738    DCHECK(callee->IsProperty());
1739    DCHECK(!callee->AsProperty()->IsSuperAccess());
1740    __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1741    EmitNamedPropertyLoad(callee->AsProperty());
1742    PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1743                           BailoutState::TOS_REGISTER);
1744    // Push the target function under the receiver.
1745    PushOperand(Operand(esp, 0));
1746    __ mov(Operand(esp, kPointerSize), eax);
1747    convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
1748  }
1749
1750  EmitCall(expr, convert_mode);
1751}
1752
1753
1754// Code common for calls using the IC.
1755void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
1756                                                Expression* key) {
1757  // Load the key.
1758  VisitForAccumulatorValue(key);
1759
1760  Expression* callee = expr->expression();
1761
1762  // Load the function from the receiver.
1763  DCHECK(callee->IsProperty());
1764  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1765  __ mov(LoadDescriptor::NameRegister(), eax);
1766  EmitKeyedPropertyLoad(callee->AsProperty());
1767  PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1768                         BailoutState::TOS_REGISTER);
1769
1770  // Push the target function under the receiver.
1771  PushOperand(Operand(esp, 0));
1772  __ mov(Operand(esp, kPointerSize), eax);
1773
1774  EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
1775}
1776
1777
1778void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
1779  // Load the arguments.
1780  ZoneList<Expression*>* args = expr->arguments();
1781  int arg_count = args->length();
1782  for (int i = 0; i < arg_count; i++) {
1783    VisitForStackValue(args->at(i));
1784  }
1785
1786  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
1787  SetCallPosition(expr, expr->tail_call_mode());
1788  if (expr->tail_call_mode() == TailCallMode::kAllow) {
1789    if (FLAG_trace) {
1790      __ CallRuntime(Runtime::kTraceTailCall);
1791    }
1792    // Update profiling counters before the tail call since we will
1793    // not return to this function.
1794    EmitProfilingCounterHandlingForReturnSequence(true);
1795  }
1796  Handle<Code> code =
1797      CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
1798          .code();
1799  __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
1800  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
1801  __ Move(eax, Immediate(arg_count));
1802  CallIC(code);
1803  OperandStackDepthDecrement(arg_count + 1);
1804
1805  RecordJSReturnSite(expr);
1806  RestoreContext();
1807  context()->DropAndPlug(1, eax);
1808}
1809
1810void FullCodeGenerator::VisitCallNew(CallNew* expr) {
1811  Comment cmnt(masm_, "[ CallNew");
1812  // According to ECMA-262, section 11.2.2, page 44, the function
1813  // expression in new calls must be evaluated before the
1814  // arguments.
1815
1816  // Push constructor on the stack.  If it's not a function it's used as
1817  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
1818  // ignored.
1819  DCHECK(!expr->expression()->IsSuperPropertyReference());
1820  VisitForStackValue(expr->expression());
1821
1822  // Push the arguments ("left-to-right") on the stack.
1823  ZoneList<Expression*>* args = expr->arguments();
1824  int arg_count = args->length();
1825  for (int i = 0; i < arg_count; i++) {
1826    VisitForStackValue(args->at(i));
1827  }
1828
1829  // Call the construct call builtin that handles allocation and
1830  // constructor invocation.
1831  SetConstructCallPosition(expr);
1832
1833  // Load function and argument count into edi and eax.
1834  __ Move(eax, Immediate(arg_count));
1835  __ mov(edi, Operand(esp, arg_count * kPointerSize));
1836
1837  // Record call targets in unoptimized code.
1838  __ EmitLoadFeedbackVector(ebx);
1839  __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
1840
1841  CallConstructStub stub(isolate());
1842  CallIC(stub.GetCode());
1843  OperandStackDepthDecrement(arg_count + 1);
1844  PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
1845  RestoreContext();
1846  context()->Plug(eax);
1847}
1848
1849
1850void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
1851  ZoneList<Expression*>* args = expr->arguments();
1852  DCHECK(args->length() == 1);
1853
1854  VisitForAccumulatorValue(args->at(0));
1855
1856  Label materialize_true, materialize_false;
1857  Label* if_true = NULL;
1858  Label* if_false = NULL;
1859  Label* fall_through = NULL;
1860  context()->PrepareTest(&materialize_true, &materialize_false,
1861                         &if_true, &if_false, &fall_through);
1862
1863  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1864  __ test(eax, Immediate(kSmiTagMask));
1865  Split(zero, if_true, if_false, fall_through);
1866
1867  context()->Plug(if_true, if_false);
1868}
1869
1870
1871void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
1872  ZoneList<Expression*>* args = expr->arguments();
1873  DCHECK(args->length() == 1);
1874
1875  VisitForAccumulatorValue(args->at(0));
1876
1877  Label materialize_true, materialize_false;
1878  Label* if_true = NULL;
1879  Label* if_false = NULL;
1880  Label* fall_through = NULL;
1881  context()->PrepareTest(&materialize_true, &materialize_false,
1882                         &if_true, &if_false, &fall_through);
1883
1884  __ JumpIfSmi(eax, if_false);
1885  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
1886  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1887  Split(above_equal, if_true, if_false, fall_through);
1888
1889  context()->Plug(if_true, if_false);
1890}
1891
1892
1893void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
1894  ZoneList<Expression*>* args = expr->arguments();
1895  DCHECK(args->length() == 1);
1896
1897  VisitForAccumulatorValue(args->at(0));
1898
1899  Label materialize_true, materialize_false;
1900  Label* if_true = NULL;
1901  Label* if_false = NULL;
1902  Label* fall_through = NULL;
1903  context()->PrepareTest(&materialize_true, &materialize_false,
1904                         &if_true, &if_false, &fall_through);
1905
1906  __ JumpIfSmi(eax, if_false);
1907  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
1908  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1909  Split(equal, if_true, if_false, fall_through);
1910
1911  context()->Plug(if_true, if_false);
1912}
1913
1914
1915void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
1916  ZoneList<Expression*>* args = expr->arguments();
1917  DCHECK(args->length() == 1);
1918
1919  VisitForAccumulatorValue(args->at(0));
1920
1921  Label materialize_true, materialize_false;
1922  Label* if_true = NULL;
1923  Label* if_false = NULL;
1924  Label* fall_through = NULL;
1925  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
1926                         &if_false, &fall_through);
1927
1928  __ JumpIfSmi(eax, if_false);
1929  __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
1930  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1931  Split(equal, if_true, if_false, fall_through);
1932
1933  context()->Plug(if_true, if_false);
1934}
1935
1936
1937void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
1938  ZoneList<Expression*>* args = expr->arguments();
1939  DCHECK(args->length() == 1);
1940
1941  VisitForAccumulatorValue(args->at(0));
1942
1943  Label materialize_true, materialize_false;
1944  Label* if_true = NULL;
1945  Label* if_false = NULL;
1946  Label* fall_through = NULL;
1947  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
1948                         &if_false, &fall_through);
1949
1950  __ JumpIfSmi(eax, if_false);
1951  __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
1952  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1953  Split(equal, if_true, if_false, fall_through);
1954
1955  context()->Plug(if_true, if_false);
1956}
1957
1958
1959void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
1960  ZoneList<Expression*>* args = expr->arguments();
1961  DCHECK(args->length() == 1);
1962  Label done, null, function, non_function_constructor;
1963
1964  VisitForAccumulatorValue(args->at(0));
1965
1966  // If the object is not a JSReceiver, we return null.
1967  __ JumpIfSmi(eax, &null, Label::kNear);
1968  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1969  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
1970  __ j(below, &null, Label::kNear);
1971
1972  // Return 'Function' for JSFunction and JSBoundFunction objects.
1973  __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE);
1974  STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
1975  __ j(above_equal, &function, Label::kNear);
1976
1977  // Check if the constructor in the map is a JS function.
1978  __ GetMapConstructor(eax, eax, ebx);
1979  __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
1980  __ j(not_equal, &non_function_constructor, Label::kNear);
1981
1982  // eax now contains the constructor function. Grab the
1983  // instance class name from there.
1984  __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
1985  __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
1986  __ jmp(&done, Label::kNear);
1987
1988  // Non-JS objects have class null.
1989  __ bind(&null);
1990  __ mov(eax, isolate()->factory()->null_value());
1991  __ jmp(&done, Label::kNear);
1992
1993  // Functions have class 'Function'.
1994  __ bind(&function);
1995  __ mov(eax, isolate()->factory()->Function_string());
1996  __ jmp(&done, Label::kNear);
1997
1998  // Objects with a non-function constructor have class 'Object'.
1999  __ bind(&non_function_constructor);
2000  __ mov(eax, isolate()->factory()->Object_string());
2001
2002  // All done.
2003  __ bind(&done);
2004
2005  context()->Plug(eax);
2006}
2007
2008
2009void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2010  ZoneList<Expression*>* args = expr->arguments();
2011  DCHECK(args->length() == 2);
2012
2013  VisitForStackValue(args->at(0));
2014  VisitForAccumulatorValue(args->at(1));
2015
2016  Register object = ebx;
2017  Register index = eax;
2018  Register result = edx;
2019
2020  PopOperand(object);
2021
2022  Label need_conversion;
2023  Label index_out_of_range;
2024  Label done;
2025  StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2026                                      &need_conversion, &index_out_of_range);
2027  generator.GenerateFast(masm_);
2028  __ jmp(&done);
2029
2030  __ bind(&index_out_of_range);
2031  // When the index is out of range, the spec requires us to return
2032  // NaN.
2033  __ Move(result, Immediate(isolate()->factory()->nan_value()));
2034  __ jmp(&done);
2035
2036  __ bind(&need_conversion);
2037  // Move the undefined value into the result register, which will
2038  // trigger conversion.
2039  __ Move(result, Immediate(isolate()->factory()->undefined_value()));
2040  __ jmp(&done);
2041
2042  NopRuntimeCallHelper call_helper;
2043  generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2044
2045  __ bind(&done);
2046  context()->Plug(result);
2047}
2048
2049
2050void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2051  ZoneList<Expression*>* args = expr->arguments();
2052  DCHECK_LE(2, args->length());
2053  // Push target, receiver and arguments onto the stack.
2054  for (Expression* const arg : *args) {
2055    VisitForStackValue(arg);
2056  }
2057  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2058  // Move target to edi.
2059  int const argc = args->length() - 2;
2060  __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
2061  // Call the target.
2062  __ mov(eax, Immediate(argc));
2063  __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2064  OperandStackDepthDecrement(argc + 1);
2065  RestoreContext();
2066  // Discard the function left on TOS.
2067  context()->DropAndPlug(1, eax);
2068}
2069
2070void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2071  ZoneList<Expression*>* args = expr->arguments();
2072  DCHECK_EQ(1, args->length());
2073  VisitForAccumulatorValue(args->at(0));
2074  __ AssertFunction(eax);
2075  __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
2076  __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
2077  context()->Plug(eax);
2078}
2079
2080void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2081  DCHECK(expr->arguments()->length() == 0);
2082  ExternalReference debug_is_active =
2083      ExternalReference::debug_is_active_address(isolate());
2084  __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
2085  __ SmiTag(eax);
2086  context()->Plug(eax);
2087}
2088
2089
2090void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2091  ZoneList<Expression*>* args = expr->arguments();
2092  DCHECK_EQ(2, args->length());
2093  VisitForStackValue(args->at(0));
2094  VisitForStackValue(args->at(1));
2095
2096  Label runtime, done;
2097
2098  __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime,
2099              NO_ALLOCATION_FLAGS);
2100  __ mov(ebx, NativeContextOperand());
2101  __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2102  __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2103  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2104         isolate()->factory()->empty_fixed_array());
2105  __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2106         isolate()->factory()->empty_fixed_array());
2107  __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
2108  __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
2109  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2110  __ jmp(&done, Label::kNear);
2111
2112  __ bind(&runtime);
2113  CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2114
2115  __ bind(&done);
2116  context()->Plug(eax);
2117}
2118
2119
2120void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2121  // Push function.
2122  __ LoadGlobalFunction(expr->context_index(), eax);
2123  PushOperand(eax);
2124
2125  // Push undefined as receiver.
2126  PushOperand(isolate()->factory()->undefined_value());
2127}
2128
2129
2130void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2131  ZoneList<Expression*>* args = expr->arguments();
2132  int arg_count = args->length();
2133
2134  SetCallPosition(expr);
2135  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2136  __ Set(eax, arg_count);
2137  __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2138          RelocInfo::CODE_TARGET);
2139  OperandStackDepthDecrement(arg_count + 1);
2140  RestoreContext();
2141}
2142
2143
2144void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2145  switch (expr->op()) {
2146    case Token::DELETE: {
2147      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2148      Property* property = expr->expression()->AsProperty();
2149      VariableProxy* proxy = expr->expression()->AsVariableProxy();
2150
2151      if (property != NULL) {
2152        VisitForStackValue(property->obj());
2153        VisitForStackValue(property->key());
2154        CallRuntimeWithOperands(is_strict(language_mode())
2155                                    ? Runtime::kDeleteProperty_Strict
2156                                    : Runtime::kDeleteProperty_Sloppy);
2157        context()->Plug(eax);
2158      } else if (proxy != NULL) {
2159        Variable* var = proxy->var();
2160        // Delete of an unqualified identifier is disallowed in strict mode but
2161        // "delete this" is allowed.
2162        bool is_this = var->is_this();
2163        DCHECK(is_sloppy(language_mode()) || is_this);
2164        if (var->IsUnallocated()) {
2165          __ mov(eax, NativeContextOperand());
2166          __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
2167          __ push(Immediate(var->name()));
2168          __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2169          context()->Plug(eax);
2170        } else {
2171          DCHECK(!var->IsLookupSlot());
2172          DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2173          // Result of deleting non-global variables is false.  'this' is
2174          // not really a variable, though we implement it as one.  The
2175          // subexpression does not have side effects.
2176          context()->Plug(is_this);
2177        }
2178      } else {
2179        // Result of deleting non-property, non-variable reference is true.
2180        // The subexpression may have side effects.
2181        VisitForEffect(expr->expression());
2182        context()->Plug(true);
2183      }
2184      break;
2185    }
2186
2187    case Token::VOID: {
2188      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2189      VisitForEffect(expr->expression());
2190      context()->Plug(isolate()->factory()->undefined_value());
2191      break;
2192    }
2193
2194    case Token::NOT: {
2195      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2196      if (context()->IsEffect()) {
2197        // Unary NOT has no side effects so it's only necessary to visit the
2198        // subexpression.  Match the optimizing compiler by not branching.
2199        VisitForEffect(expr->expression());
2200      } else if (context()->IsTest()) {
2201        const TestContext* test = TestContext::cast(context());
2202        // The labels are swapped for the recursive call.
2203        VisitForControl(expr->expression(),
2204                        test->false_label(),
2205                        test->true_label(),
2206                        test->fall_through());
2207        context()->Plug(test->true_label(), test->false_label());
2208      } else {
2209        // We handle value contexts explicitly rather than simply visiting
2210        // for control and plugging the control flow into the context,
2211        // because we need to prepare a pair of extra administrative AST ids
2212        // for the optimizing compiler.
2213        DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2214        Label materialize_true, materialize_false, done;
2215        VisitForControl(expr->expression(),
2216                        &materialize_false,
2217                        &materialize_true,
2218                        &materialize_true);
2219        if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2220        __ bind(&materialize_true);
2221        PrepareForBailoutForId(expr->MaterializeTrueId(),
2222                               BailoutState::NO_REGISTERS);
2223        if (context()->IsAccumulatorValue()) {
2224          __ mov(eax, isolate()->factory()->true_value());
2225        } else {
2226          __ Push(isolate()->factory()->true_value());
2227        }
2228        __ jmp(&done, Label::kNear);
2229        __ bind(&materialize_false);
2230        PrepareForBailoutForId(expr->MaterializeFalseId(),
2231                               BailoutState::NO_REGISTERS);
2232        if (context()->IsAccumulatorValue()) {
2233          __ mov(eax, isolate()->factory()->false_value());
2234        } else {
2235          __ Push(isolate()->factory()->false_value());
2236        }
2237        __ bind(&done);
2238      }
2239      break;
2240    }
2241
2242    case Token::TYPEOF: {
2243      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2244      {
2245        AccumulatorValueContext context(this);
2246        VisitForTypeofValue(expr->expression());
2247      }
2248      __ mov(ebx, eax);
2249      __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
2250      context()->Plug(eax);
2251      break;
2252    }
2253
2254    default:
2255      UNREACHABLE();
2256  }
2257}
2258
2259
2260void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2261  DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
2262
2263  Comment cmnt(masm_, "[ CountOperation");
2264
2265  Property* prop = expr->expression()->AsProperty();
2266  LhsKind assign_type = Property::GetAssignType(prop);
2267
2268  // Evaluate expression and get value.
2269  if (assign_type == VARIABLE) {
2270    DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
2271    AccumulatorValueContext context(this);
2272    EmitVariableLoad(expr->expression()->AsVariableProxy());
2273  } else {
2274    // Reserve space for result of postfix operation.
2275    if (expr->is_postfix() && !context()->IsEffect()) {
2276      PushOperand(Smi::kZero);
2277    }
2278    switch (assign_type) {
2279      case NAMED_PROPERTY: {
2280        // Put the object both on the stack and in the register.
2281        VisitForStackValue(prop->obj());
2282        __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2283        EmitNamedPropertyLoad(prop);
2284        break;
2285      }
2286
2287      case KEYED_PROPERTY: {
2288        VisitForStackValue(prop->obj());
2289        VisitForStackValue(prop->key());
2290        __ mov(LoadDescriptor::ReceiverRegister(),
2291               Operand(esp, kPointerSize));                       // Object.
2292        __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));  // Key.
2293        EmitKeyedPropertyLoad(prop);
2294        break;
2295      }
2296
2297      case NAMED_SUPER_PROPERTY:
2298      case KEYED_SUPER_PROPERTY:
2299      case VARIABLE:
2300        UNREACHABLE();
2301    }
2302  }
2303
2304  // We need a second deoptimization point after loading the value
2305  // in case evaluating the property load my have a side effect.
2306  if (assign_type == VARIABLE) {
2307    PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
2308  } else {
2309    PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2310  }
2311
2312  // Inline smi case if we are in a loop.
2313  Label done, stub_call;
2314  JumpPatchSite patch_site(masm_);
2315  if (ShouldInlineSmiCase(expr->op())) {
2316    Label slow;
2317    patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
2318
2319    // Save result for postfix expressions.
2320    if (expr->is_postfix()) {
2321      if (!context()->IsEffect()) {
2322        // Save the result on the stack. If we have a named or keyed property
2323        // we store the result under the receiver that is currently on top
2324        // of the stack.
2325        switch (assign_type) {
2326          case VARIABLE:
2327            __ push(eax);
2328            break;
2329          case NAMED_PROPERTY:
2330            __ mov(Operand(esp, kPointerSize), eax);
2331            break;
2332          case KEYED_PROPERTY:
2333            __ mov(Operand(esp, 2 * kPointerSize), eax);
2334            break;
2335          case NAMED_SUPER_PROPERTY:
2336          case KEYED_SUPER_PROPERTY:
2337            UNREACHABLE();
2338            break;
2339        }
2340      }
2341    }
2342
2343    if (expr->op() == Token::INC) {
2344      __ add(eax, Immediate(Smi::FromInt(1)));
2345    } else {
2346      __ sub(eax, Immediate(Smi::FromInt(1)));
2347    }
2348    __ j(no_overflow, &done, Label::kNear);
2349    // Call stub. Undo operation first.
2350    if (expr->op() == Token::INC) {
2351      __ sub(eax, Immediate(Smi::FromInt(1)));
2352    } else {
2353      __ add(eax, Immediate(Smi::FromInt(1)));
2354    }
2355    __ jmp(&stub_call, Label::kNear);
2356    __ bind(&slow);
2357  }
2358
2359  // Convert old value into a number.
2360  __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
2361  RestoreContext();
2362  PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
2363
2364  // Save result for postfix expressions.
2365  if (expr->is_postfix()) {
2366    if (!context()->IsEffect()) {
2367      // Save the result on the stack. If we have a named or keyed property
2368      // we store the result under the receiver that is currently on top
2369      // of the stack.
2370      switch (assign_type) {
2371        case VARIABLE:
2372          PushOperand(eax);
2373          break;
2374        case NAMED_PROPERTY:
2375          __ mov(Operand(esp, kPointerSize), eax);
2376          break;
2377        case KEYED_PROPERTY:
2378          __ mov(Operand(esp, 2 * kPointerSize), eax);
2379          break;
2380        case NAMED_SUPER_PROPERTY:
2381        case KEYED_SUPER_PROPERTY:
2382          UNREACHABLE();
2383          break;
2384      }
2385    }
2386  }
2387
2388  SetExpressionPosition(expr);
2389
2390  // Call stub for +1/-1.
2391  __ bind(&stub_call);
2392  __ mov(edx, eax);
2393  __ mov(eax, Immediate(Smi::FromInt(1)));
2394  Handle<Code> code =
2395      CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
2396  CallIC(code, expr->CountBinOpFeedbackId());
2397  patch_site.EmitPatchInfo();
2398  __ bind(&done);
2399
2400  // Store the value returned in eax.
2401  switch (assign_type) {
2402    case VARIABLE: {
2403      VariableProxy* proxy = expr->expression()->AsVariableProxy();
2404      if (expr->is_postfix()) {
2405        // Perform the assignment as if via '='.
2406        { EffectContext context(this);
2407          EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
2408                                 proxy->hole_check_mode());
2409          PrepareForBailoutForId(expr->AssignmentId(),
2410                                 BailoutState::TOS_REGISTER);
2411          context.Plug(eax);
2412        }
2413        // For all contexts except EffectContext We have the result on
2414        // top of the stack.
2415        if (!context()->IsEffect()) {
2416          context()->PlugTOS();
2417        }
2418      } else {
2419        // Perform the assignment as if via '='.
2420        EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
2421                               proxy->hole_check_mode());
2422        PrepareForBailoutForId(expr->AssignmentId(),
2423                               BailoutState::TOS_REGISTER);
2424        context()->Plug(eax);
2425      }
2426      break;
2427    }
2428    case NAMED_PROPERTY: {
2429      PopOperand(StoreDescriptor::ReceiverRegister());
2430      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
2431      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2432      if (expr->is_postfix()) {
2433        if (!context()->IsEffect()) {
2434          context()->PlugTOS();
2435        }
2436      } else {
2437        context()->Plug(eax);
2438      }
2439      break;
2440    }
2441    case KEYED_PROPERTY: {
2442      PopOperand(StoreDescriptor::NameRegister());
2443      PopOperand(StoreDescriptor::ReceiverRegister());
2444      CallKeyedStoreIC(expr->CountSlot());
2445      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2446      if (expr->is_postfix()) {
2447        // Result is on the stack
2448        if (!context()->IsEffect()) {
2449          context()->PlugTOS();
2450        }
2451      } else {
2452        context()->Plug(eax);
2453      }
2454      break;
2455    }
2456    case NAMED_SUPER_PROPERTY:
2457    case KEYED_SUPER_PROPERTY:
2458      UNREACHABLE();
2459      break;
2460  }
2461}
2462
2463
2464void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
2465                                                 Expression* sub_expr,
2466                                                 Handle<String> check) {
2467  Label materialize_true, materialize_false;
2468  Label* if_true = NULL;
2469  Label* if_false = NULL;
2470  Label* fall_through = NULL;
2471  context()->PrepareTest(&materialize_true, &materialize_false,
2472                         &if_true, &if_false, &fall_through);
2473
2474  { AccumulatorValueContext context(this);
2475    VisitForTypeofValue(sub_expr);
2476  }
2477  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2478
2479  Factory* factory = isolate()->factory();
2480  if (String::Equals(check, factory->number_string())) {
2481    __ JumpIfSmi(eax, if_true);
2482    __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
2483           isolate()->factory()->heap_number_map());
2484    Split(equal, if_true, if_false, fall_through);
2485  } else if (String::Equals(check, factory->string_string())) {
2486    __ JumpIfSmi(eax, if_false);
2487    __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
2488    Split(below, if_true, if_false, fall_through);
2489  } else if (String::Equals(check, factory->symbol_string())) {
2490    __ JumpIfSmi(eax, if_false);
2491    __ CmpObjectType(eax, SYMBOL_TYPE, edx);
2492    Split(equal, if_true, if_false, fall_through);
2493  } else if (String::Equals(check, factory->boolean_string())) {
2494    __ cmp(eax, isolate()->factory()->true_value());
2495    __ j(equal, if_true);
2496    __ cmp(eax, isolate()->factory()->false_value());
2497    Split(equal, if_true, if_false, fall_through);
2498  } else if (String::Equals(check, factory->undefined_string())) {
2499    __ cmp(eax, isolate()->factory()->null_value());
2500    __ j(equal, if_false);
2501    __ JumpIfSmi(eax, if_false);
2502    // Check for undetectable objects => true.
2503    __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
2504    __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
2505              Immediate(1 << Map::kIsUndetectable));
2506    Split(not_zero, if_true, if_false, fall_through);
2507  } else if (String::Equals(check, factory->function_string())) {
2508    __ JumpIfSmi(eax, if_false);
2509    // Check for callable and not undetectable objects => true.
2510    __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
2511    __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
2512    __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
2513    __ cmp(ecx, 1 << Map::kIsCallable);
2514    Split(equal, if_true, if_false, fall_through);
2515  } else if (String::Equals(check, factory->object_string())) {
2516    __ JumpIfSmi(eax, if_false);
2517    __ cmp(eax, isolate()->factory()->null_value());
2518    __ j(equal, if_true);
2519    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2520    __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
2521    __ j(below, if_false);
2522    // Check for callable or undetectable objects => false.
2523    __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
2524              Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2525    Split(zero, if_true, if_false, fall_through);
2526  } else {
2527    if (if_false != fall_through) __ jmp(if_false);
2528  }
2529  context()->Plug(if_true, if_false);
2530}
2531
2532
2533void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
2534  Comment cmnt(masm_, "[ CompareOperation");
2535
2536  // First we try a fast inlined version of the compare when one of
2537  // the operands is a literal.
2538  if (TryLiteralCompare(expr)) return;
2539
2540  // Always perform the comparison for its control flow.  Pack the result
2541  // into the expression's context after the comparison is performed.
2542  Label materialize_true, materialize_false;
2543  Label* if_true = NULL;
2544  Label* if_false = NULL;
2545  Label* fall_through = NULL;
2546  context()->PrepareTest(&materialize_true, &materialize_false,
2547                         &if_true, &if_false, &fall_through);
2548
2549  Token::Value op = expr->op();
2550  VisitForStackValue(expr->left());
2551  switch (op) {
2552    case Token::IN:
2553      VisitForStackValue(expr->right());
2554      SetExpressionPosition(expr);
2555      EmitHasProperty();
2556      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
2557      __ cmp(eax, isolate()->factory()->true_value());
2558      Split(equal, if_true, if_false, fall_through);
2559      break;
2560
2561    case Token::INSTANCEOF: {
2562      VisitForAccumulatorValue(expr->right());
2563      SetExpressionPosition(expr);
2564      PopOperand(edx);
2565      __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
2566      RestoreContext();
2567      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
2568      __ cmp(eax, isolate()->factory()->true_value());
2569      Split(equal, if_true, if_false, fall_through);
2570      break;
2571    }
2572
2573    default: {
2574      VisitForAccumulatorValue(expr->right());
2575      SetExpressionPosition(expr);
2576      Condition cc = CompareIC::ComputeCondition(op);
2577      PopOperand(edx);
2578
2579      bool inline_smi_code = ShouldInlineSmiCase(op);
2580      JumpPatchSite patch_site(masm_);
2581      if (inline_smi_code) {
2582        Label slow_case;
2583        __ mov(ecx, edx);
2584        __ or_(ecx, eax);
2585        patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
2586        __ cmp(edx, eax);
2587        Split(cc, if_true, if_false, NULL);
2588        __ bind(&slow_case);
2589      }
2590
2591      Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2592      CallIC(ic, expr->CompareOperationFeedbackId());
2593      patch_site.EmitPatchInfo();
2594
2595      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2596      __ test(eax, eax);
2597      Split(cc, if_true, if_false, fall_through);
2598    }
2599  }
2600
2601  // Convert the result of the comparison into one expected for this
2602  // expression's context.
2603  context()->Plug(if_true, if_false);
2604}
2605
2606
2607void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
2608                                              Expression* sub_expr,
2609                                              NilValue nil) {
2610  Label materialize_true, materialize_false;
2611  Label* if_true = NULL;
2612  Label* if_false = NULL;
2613  Label* fall_through = NULL;
2614  context()->PrepareTest(&materialize_true, &materialize_false,
2615                         &if_true, &if_false, &fall_through);
2616
2617  VisitForAccumulatorValue(sub_expr);
2618  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2619
2620  Handle<Object> nil_value = nil == kNullValue
2621      ? isolate()->factory()->null_value()
2622      : isolate()->factory()->undefined_value();
2623  if (expr->op() == Token::EQ_STRICT) {
2624    __ cmp(eax, nil_value);
2625    Split(equal, if_true, if_false, fall_through);
2626  } else {
2627    __ JumpIfSmi(eax, if_false);
2628    __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
2629    __ test_b(FieldOperand(eax, Map::kBitFieldOffset),
2630              Immediate(1 << Map::kIsUndetectable));
2631    Split(not_zero, if_true, if_false, fall_through);
2632  }
2633  context()->Plug(if_true, if_false);
2634}
2635
2636
2637Register FullCodeGenerator::result_register() {
2638  return eax;
2639}
2640
2641
2642Register FullCodeGenerator::context_register() {
2643  return esi;
2644}
2645
2646void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
2647  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
2648  __ mov(value, Operand(ebp, frame_offset));
2649}
2650
2651void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
2652  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
2653  __ mov(Operand(ebp, frame_offset), value);
2654}
2655
2656
2657void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
2658  __ mov(dst, ContextOperand(esi, context_index));
2659}
2660
2661
2662void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
2663  DeclarationScope* closure_scope = scope()->GetClosureScope();
2664  if (closure_scope->is_script_scope() ||
2665      closure_scope->is_module_scope()) {
2666    // Contexts nested in the native context have a canonical empty function
2667    // as their closure, not the anonymous closure containing the global
2668    // code.
2669    __ mov(eax, NativeContextOperand());
2670    PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
2671  } else if (closure_scope->is_eval_scope()) {
2672    // Contexts nested inside eval code have the same closure as the context
2673    // calling eval, not the anonymous closure containing the eval code.
2674    // Fetch it from the context.
2675    PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
2676  } else {
2677    DCHECK(closure_scope->is_function_scope());
2678    PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2679  }
2680}
2681
2682
2683#undef __
2684
2685
2686static const byte kJnsInstruction = 0x79;
2687static const byte kJnsOffset = 0x11;
2688static const byte kNopByteOne = 0x66;
2689static const byte kNopByteTwo = 0x90;
2690#ifdef DEBUG
2691static const byte kCallInstruction = 0xe8;
2692#endif
2693
2694
2695void BackEdgeTable::PatchAt(Code* unoptimized_code,
2696                            Address pc,
2697                            BackEdgeState target_state,
2698                            Code* replacement_code) {
2699  Address call_target_address = pc - kIntSize;
2700  Address jns_instr_address = call_target_address - 3;
2701  Address jns_offset_address = call_target_address - 2;
2702
2703  switch (target_state) {
2704    case INTERRUPT:
2705      //     sub <profiling_counter>, <delta>  ;; Not changed
2706      //     jns ok
2707      //     call <interrupt stub>
2708      //   ok:
2709      *jns_instr_address = kJnsInstruction;
2710      *jns_offset_address = kJnsOffset;
2711      break;
2712    case ON_STACK_REPLACEMENT:
2713      //     sub <profiling_counter>, <delta>  ;; Not changed
2714      //     nop
2715      //     nop
2716      //     call <on-stack replacment>
2717      //   ok:
2718      *jns_instr_address = kNopByteOne;
2719      *jns_offset_address = kNopByteTwo;
2720      break;
2721  }
2722
2723  Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
2724                                   call_target_address, unoptimized_code,
2725                                   replacement_code->entry());
2726  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
2727      unoptimized_code, call_target_address, replacement_code);
2728}
2729
2730
2731BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
2732    Isolate* isolate,
2733    Code* unoptimized_code,
2734    Address pc) {
2735  Address call_target_address = pc - kIntSize;
2736  Address jns_instr_address = call_target_address - 3;
2737  DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
2738
2739  if (*jns_instr_address == kJnsInstruction) {
2740    DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
2741    DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
2742              Assembler::target_address_at(call_target_address,
2743                                           unoptimized_code));
2744    return INTERRUPT;
2745  }
2746
2747  DCHECK_EQ(kNopByteOne, *jns_instr_address);
2748  DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
2749
2750  DCHECK_EQ(
2751      isolate->builtins()->OnStackReplacement()->entry(),
2752      Assembler::target_address_at(call_target_address, unoptimized_code));
2753  return ON_STACK_REPLACEMENT;
2754}
2755
2756
2757}  // namespace internal
2758}  // namespace v8
2759
2760#endif  // V8_TARGET_ARCH_X87
2761