full-codegen-mips.cc revision f3b273f5e6ffd2f6ba1c18a27a17db41dfb113c3
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_MIPS
6
7// Note on Mips implementation:
8//
9// The result_register() for mips is the 'v0' register, which is defined
10// by the ABI to contain function return values. However, the first
11// parameter to a function is defined to be 'a0'. So there are many
12// places where we have to move a previous result in v0 to a0 for the
13// next call: mov(a0, v0). This is not needed on the other architectures.
14
15#include "src/full-codegen/full-codegen.h"
16#include "src/ast/compile-time-value.h"
17#include "src/ast/scopes.h"
18#include "src/code-factory.h"
19#include "src/code-stubs.h"
20#include "src/codegen.h"
21#include "src/compilation-info.h"
22#include "src/compiler.h"
23#include "src/debug/debug.h"
24#include "src/ic/ic.h"
25
26#include "src/mips/code-stubs-mips.h"
27#include "src/mips/macro-assembler-mips.h"
28
29namespace v8 {
30namespace internal {
31
32#define __ ACCESS_MASM(masm())
33
34// A patch site is a location in the code which it is possible to patch. This
35// class has a number of methods to emit the code which is patchable and the
36// method EmitPatchInfo to record a marker back to the patchable code. This
37// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
38// (raw 16 bit immediate value is used) is the delta from the pc to the first
39// instruction of the patchable code.
40// The marker instruction is effectively a NOP (dest is zero_reg) and will
41// never be emitted by normal code.
42class JumpPatchSite BASE_EMBEDDED {
43 public:
44  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
45#ifdef DEBUG
46    info_emitted_ = false;
47#endif
48  }
49
50  ~JumpPatchSite() {
51    DCHECK(patch_site_.is_bound() == info_emitted_);
52  }
53
54  // When initially emitting this ensure that a jump is always generated to skip
55  // the inlined smi code.
56  void EmitJumpIfNotSmi(Register reg, Label* target) {
57    DCHECK(!patch_site_.is_bound() && !info_emitted_);
58    Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
59    __ bind(&patch_site_);
60    __ andi(at, reg, 0);
61    // Always taken before patched.
62    __ BranchShort(target, eq, at, Operand(zero_reg));
63  }
64
65  // When initially emitting this ensure that a jump is never generated to skip
66  // the inlined smi code.
67  void EmitJumpIfSmi(Register reg, Label* target) {
68    Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
69    DCHECK(!patch_site_.is_bound() && !info_emitted_);
70    __ bind(&patch_site_);
71    __ andi(at, reg, 0);
72    // Never taken before patched.
73    __ BranchShort(target, ne, at, Operand(zero_reg));
74  }
75
76  void EmitPatchInfo() {
77    if (patch_site_.is_bound()) {
78      int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
79      Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
80      __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
81#ifdef DEBUG
82      info_emitted_ = true;
83#endif
84    } else {
85      __ nop();  // Signals no inlined code.
86    }
87  }
88
89 private:
90  MacroAssembler* masm() { return masm_; }
91  MacroAssembler* masm_;
92  Label patch_site_;
93#ifdef DEBUG
94  bool info_emitted_;
95#endif
96};
97
98
99// Generate code for a JS function.  On entry to the function the receiver
100// and arguments have been pushed on the stack left to right.  The actual
101// argument count matches the formal parameter count expected by the
102// function.
103//
104// The live registers are:
105//   o a1: the JS function object being called (i.e. ourselves)
106//   o a3: the new target value
107//   o cp: our context
108//   o fp: our caller's frame pointer
109//   o sp: stack pointer
110//   o ra: return address
111//
112// The function builds a JS frame.  Please see JavaScriptFrameConstants in
113// frames-mips.h for its layout.
114void FullCodeGenerator::Generate() {
115  CompilationInfo* info = info_;
116  profiling_counter_ = isolate()->factory()->NewCell(
117      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
118  SetFunctionPosition(literal());
119  Comment cmnt(masm_, "[ function compiled by full code generator");
120
121  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
122
123  if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
124    int receiver_offset = info->scope()->num_parameters() * kPointerSize;
125    __ lw(a2, MemOperand(sp, receiver_offset));
126    __ AssertNotSmi(a2);
127    __ GetObjectType(a2, a2, a2);
128    __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
129             Operand(FIRST_JS_RECEIVER_TYPE));
130  }
131
132  // Open a frame scope to indicate that there is a frame on the stack.  The
133  // MANUAL indicates that the scope shouldn't actually generate code to set up
134  // the frame (that is done below).
135  FrameScope frame_scope(masm_, StackFrame::MANUAL);
136
137  info->set_prologue_offset(masm_->pc_offset());
138  __ Prologue(info->GeneratePreagedPrologue());
139
140  // Increment invocation count for the function.
141  {
142    Comment cmnt(masm_, "[ Increment invocation count");
143    __ lw(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
144    __ lw(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
145    __ lw(t0, FieldMemOperand(
146                  a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
147                          TypeFeedbackVector::kHeaderSize));
148    __ Addu(t0, t0, Operand(Smi::FromInt(1)));
149    __ sw(t0, FieldMemOperand(
150                  a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
151                          TypeFeedbackVector::kHeaderSize));
152  }
153
154  { Comment cmnt(masm_, "[ Allocate locals");
155    int locals_count = info->scope()->num_stack_slots();
156    // Generators allocate locals, if any, in context slots.
157    DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
158    OperandStackDepthIncrement(locals_count);
159    if (locals_count > 0) {
160      if (locals_count >= 128) {
161        Label ok;
162        __ Subu(t5, sp, Operand(locals_count * kPointerSize));
163        __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
164        __ Branch(&ok, hs, t5, Operand(a2));
165        __ CallRuntime(Runtime::kThrowStackOverflow);
166        __ bind(&ok);
167      }
168      __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
169      int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
170      if (locals_count >= kMaxPushes) {
171        int loop_iterations = locals_count / kMaxPushes;
172        __ li(a2, Operand(loop_iterations));
173        Label loop_header;
174        __ bind(&loop_header);
175        // Do pushes.
176        __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
177        for (int i = 0; i < kMaxPushes; i++) {
178          __ sw(t5, MemOperand(sp, i * kPointerSize));
179        }
180        // Continue loop if not done.
181        __ Subu(a2, a2, Operand(1));
182        __ Branch(&loop_header, ne, a2, Operand(zero_reg));
183      }
184      int remaining = locals_count % kMaxPushes;
185      // Emit the remaining pushes.
186      __ Subu(sp, sp, Operand(remaining * kPointerSize));
187      for (int i  = 0; i < remaining; i++) {
188        __ sw(t5, MemOperand(sp, i * kPointerSize));
189      }
190    }
191  }
192
193  bool function_in_register_a1 = true;
194
195  // Possibly allocate a local context.
196  if (info->scope()->NeedsContext()) {
197    Comment cmnt(masm_, "[ Allocate context");
198    // Argument to NewContext is the function, which is still in a1.
199    bool need_write_barrier = true;
200    int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
201    if (info->scope()->is_script_scope()) {
202      __ push(a1);
203      __ Push(info->scope()->scope_info());
204      __ CallRuntime(Runtime::kNewScriptContext);
205      PrepareForBailoutForId(BailoutId::ScriptContext(),
206                             BailoutState::TOS_REGISTER);
207      // The new target value is not used, clobbering is safe.
208      DCHECK_NULL(info->scope()->new_target_var());
209    } else {
210      if (info->scope()->new_target_var() != nullptr) {
211        __ push(a3);  // Preserve new target.
212      }
213      if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
214        FastNewFunctionContextStub stub(isolate());
215        __ li(FastNewFunctionContextDescriptor::SlotsRegister(),
216              Operand(slots));
217        __ CallStub(&stub);
218        // Result of FastNewFunctionContextStub is always in new space.
219        need_write_barrier = false;
220      } else {
221        __ push(a1);
222        __ CallRuntime(Runtime::kNewFunctionContext);
223      }
224      if (info->scope()->new_target_var() != nullptr) {
225        __ pop(a3);  // Restore new target.
226      }
227    }
228    function_in_register_a1 = false;
229    // Context is returned in v0. It replaces the context passed to us.
230    // It's saved in the stack and kept live in cp.
231    __ mov(cp, v0);
232    __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
233    // Copy any necessary parameters into the context.
234    int num_parameters = info->scope()->num_parameters();
235    int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
236    for (int i = first_parameter; i < num_parameters; i++) {
237      Variable* var =
238          (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
239      if (var->IsContextSlot()) {
240        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
241                                 (num_parameters - 1 - i) * kPointerSize;
242        // Load parameter from stack.
243        __ lw(a0, MemOperand(fp, parameter_offset));
244        // Store it in the context.
245        MemOperand target = ContextMemOperand(cp, var->index());
246        __ sw(a0, target);
247
248        // Update the write barrier.
249        if (need_write_barrier) {
250          __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
251                                    kRAHasBeenSaved, kDontSaveFPRegs);
252        } else if (FLAG_debug_code) {
253          Label done;
254          __ JumpIfInNewSpace(cp, a0, &done);
255          __ Abort(kExpectedNewSpaceObject);
256          __ bind(&done);
257        }
258      }
259    }
260  }
261
262  // Register holding this function and new target are both trashed in case we
263  // bailout here. But since that can happen only when new target is not used
264  // and we allocate a context, the value of |function_in_register| is correct.
265  PrepareForBailoutForId(BailoutId::FunctionContext(),
266                         BailoutState::NO_REGISTERS);
267
268  // Possibly set up a local binding to the this function which is used in
269  // derived constructors with super calls.
270  Variable* this_function_var = info->scope()->this_function_var();
271  if (this_function_var != nullptr) {
272    Comment cmnt(masm_, "[ This function");
273    if (!function_in_register_a1) {
274      __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
275      // The write barrier clobbers register again, keep it marked as such.
276    }
277    SetVar(this_function_var, a1, a0, a2);
278  }
279
280  // Possibly set up a local binding to the new target value.
281  Variable* new_target_var = info->scope()->new_target_var();
282  if (new_target_var != nullptr) {
283    Comment cmnt(masm_, "[ new.target");
284    SetVar(new_target_var, a3, a0, a2);
285  }
286
287  // Possibly allocate RestParameters
288  Variable* rest_param = info->scope()->rest_parameter();
289  if (rest_param != nullptr) {
290    Comment cmnt(masm_, "[ Allocate rest parameter array");
291    if (!function_in_register_a1) {
292      __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
293    }
294    FastNewRestParameterStub stub(isolate());
295    __ CallStub(&stub);
296    function_in_register_a1 = false;
297    SetVar(rest_param, v0, a1, a2);
298  }
299
300  Variable* arguments = info->scope()->arguments();
301  if (arguments != NULL) {
302    // Function uses arguments object.
303    Comment cmnt(masm_, "[ Allocate arguments object");
304    if (!function_in_register_a1) {
305      // Load this again, if it's used by the local context below.
306      __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
307    }
308    if (is_strict(language_mode()) || !has_simple_parameters()) {
309      FastNewStrictArgumentsStub stub(isolate());
310      __ CallStub(&stub);
311    } else if (literal()->has_duplicate_parameters()) {
312      __ Push(a1);
313      __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
314    } else {
315      FastNewSloppyArgumentsStub stub(isolate());
316      __ CallStub(&stub);
317    }
318
319    SetVar(arguments, v0, a1, a2);
320  }
321
322  if (FLAG_trace) {
323    __ CallRuntime(Runtime::kTraceEnter);
324  }
325
326  // Visit the declarations and body unless there is an illegal
327  // redeclaration.
328  PrepareForBailoutForId(BailoutId::FunctionEntry(),
329                         BailoutState::NO_REGISTERS);
330  {
331    Comment cmnt(masm_, "[ Declarations");
332    VisitDeclarations(scope()->declarations());
333  }
334
335  // Assert that the declarations do not use ICs. Otherwise the debugger
336  // won't be able to redirect a PC at an IC to the correct IC in newly
337  // recompiled code.
338  DCHECK_EQ(0, ic_total_count_);
339
340  {
341    Comment cmnt(masm_, "[ Stack check");
342    PrepareForBailoutForId(BailoutId::Declarations(),
343                           BailoutState::NO_REGISTERS);
344    Label ok;
345    __ LoadRoot(at, Heap::kStackLimitRootIndex);
346    __ Branch(&ok, hs, sp, Operand(at));
347    Handle<Code> stack_check = isolate()->builtins()->StackCheck();
348    PredictableCodeSizeScope predictable(
349        masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
350    __ Call(stack_check, RelocInfo::CODE_TARGET);
351    __ bind(&ok);
352  }
353
354  {
355    Comment cmnt(masm_, "[ Body");
356    DCHECK(loop_depth() == 0);
357    VisitStatements(literal()->body());
358    DCHECK(loop_depth() == 0);
359  }
360
361  // Always emit a 'return undefined' in case control fell off the end of
362  // the body.
363  { Comment cmnt(masm_, "[ return <undefined>;");
364    __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
365  }
366  EmitReturnSequence();
367}
368
369
370void FullCodeGenerator::ClearAccumulator() {
371  DCHECK(Smi::FromInt(0) == 0);
372  __ mov(v0, zero_reg);
373}
374
375
376void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
377  __ li(a2, Operand(profiling_counter_));
378  __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
379  __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
380  __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
381}
382
383
384void FullCodeGenerator::EmitProfilingCounterReset() {
385  int reset_value = FLAG_interrupt_budget;
386  if (info_->is_debug()) {
387    // Detect debug break requests as soon as possible.
388    reset_value = FLAG_interrupt_budget >> 4;
389  }
390  __ li(a2, Operand(profiling_counter_));
391  __ li(a3, Operand(Smi::FromInt(reset_value)));
392  __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
393}
394
395
396void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
397                                                Label* back_edge_target) {
398  // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
399  // to make sure it is constant. Branch may emit a skip-or-jump sequence
400  // instead of the normal Branch. It seems that the "skip" part of that
401  // sequence is about as long as this Branch would be so it is safe to ignore
402  // that.
403  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
404  Comment cmnt(masm_, "[ Back edge bookkeeping");
405  Label ok;
406  DCHECK(back_edge_target->is_bound());
407  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
408  int weight = Min(kMaxBackEdgeWeight,
409                   Max(1, distance / kCodeSizeMultiplier));
410  EmitProfilingCounterDecrement(weight);
411  __ slt(at, a3, zero_reg);
412  __ beq(at, zero_reg, &ok);
413  // Call will emit a li t9 first, so it is safe to use the delay slot.
414  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
415  // Record a mapping of this PC offset to the OSR id.  This is used to find
416  // the AST id from the unoptimized code in order to use it as a key into
417  // the deoptimization input data found in the optimized code.
418  RecordBackEdge(stmt->OsrEntryId());
419  EmitProfilingCounterReset();
420
421  __ bind(&ok);
422  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
423  // Record a mapping of the OSR id to this PC.  This is used if the OSR
424  // entry becomes the target of a bailout.  We don't expect it to be, but
425  // we want it to work if it is.
426  PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
427}
428
429void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
430    bool is_tail_call) {
431  // Pretend that the exit is a backwards jump to the entry.
432  int weight = 1;
433  if (info_->ShouldSelfOptimize()) {
434    weight = FLAG_interrupt_budget / FLAG_self_opt_count;
435  } else {
436    int distance = masm_->pc_offset();
437    weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
438  }
439  EmitProfilingCounterDecrement(weight);
440  Label ok;
441  __ Branch(&ok, ge, a3, Operand(zero_reg));
442  // Don't need to save result register if we are going to do a tail call.
443  if (!is_tail_call) {
444    __ push(v0);
445  }
446  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
447  if (!is_tail_call) {
448    __ pop(v0);
449  }
450  EmitProfilingCounterReset();
451  __ bind(&ok);
452}
453
454void FullCodeGenerator::EmitReturnSequence() {
455  Comment cmnt(masm_, "[ Return sequence");
456  if (return_label_.is_bound()) {
457    __ Branch(&return_label_);
458  } else {
459    __ bind(&return_label_);
460    if (FLAG_trace) {
461      // Push the return value on the stack as the parameter.
462      // Runtime::TraceExit returns its parameter in v0.
463      __ push(v0);
464      __ CallRuntime(Runtime::kTraceExit);
465    }
466    EmitProfilingCounterHandlingForReturnSequence(false);
467
468    // Make sure that the constant pool is not emitted inside of the return
469    // sequence.
470    { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
471      int32_t arg_count = info_->scope()->num_parameters() + 1;
472      int32_t sp_delta = arg_count * kPointerSize;
473      SetReturnPosition(literal());
474      __ mov(sp, fp);
475      __ MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
476      __ Addu(sp, sp, Operand(sp_delta));
477      __ Jump(ra);
478    }
479  }
480}
481
482void FullCodeGenerator::RestoreContext() {
483  __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
484}
485
486void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
487  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
488  codegen()->GetVar(result_register(), var);
489  codegen()->PushOperand(result_register());
490}
491
492
493void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
494}
495
496
497void FullCodeGenerator::AccumulatorValueContext::Plug(
498    Heap::RootListIndex index) const {
499  __ LoadRoot(result_register(), index);
500}
501
502
503void FullCodeGenerator::StackValueContext::Plug(
504    Heap::RootListIndex index) const {
505  __ LoadRoot(result_register(), index);
506  codegen()->PushOperand(result_register());
507}
508
509
510void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
511  codegen()->PrepareForBailoutBeforeSplit(condition(),
512                                          true,
513                                          true_label_,
514                                          false_label_);
515  if (index == Heap::kUndefinedValueRootIndex ||
516      index == Heap::kNullValueRootIndex ||
517      index == Heap::kFalseValueRootIndex) {
518    if (false_label_ != fall_through_) __ Branch(false_label_);
519  } else if (index == Heap::kTrueValueRootIndex) {
520    if (true_label_ != fall_through_) __ Branch(true_label_);
521  } else {
522    __ LoadRoot(result_register(), index);
523    codegen()->DoTest(this);
524  }
525}
526
527
528void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
529}
530
531
532void FullCodeGenerator::AccumulatorValueContext::Plug(
533    Handle<Object> lit) const {
534  __ li(result_register(), Operand(lit));
535}
536
537
538void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
539  // Immediates cannot be pushed directly.
540  __ li(result_register(), Operand(lit));
541  codegen()->PushOperand(result_register());
542}
543
544
545void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
546  codegen()->PrepareForBailoutBeforeSplit(condition(),
547                                          true,
548                                          true_label_,
549                                          false_label_);
550  DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
551         !lit->IsUndetectable());
552  if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
553      lit->IsFalse(isolate())) {
554    if (false_label_ != fall_through_) __ Branch(false_label_);
555  } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
556    if (true_label_ != fall_through_) __ Branch(true_label_);
557  } else if (lit->IsString()) {
558    if (String::cast(*lit)->length() == 0) {
559      if (false_label_ != fall_through_) __ Branch(false_label_);
560    } else {
561      if (true_label_ != fall_through_) __ Branch(true_label_);
562    }
563  } else if (lit->IsSmi()) {
564    if (Smi::cast(*lit)->value() == 0) {
565      if (false_label_ != fall_through_) __ Branch(false_label_);
566    } else {
567      if (true_label_ != fall_through_) __ Branch(true_label_);
568    }
569  } else {
570    // For simplicity we always test the accumulator register.
571    __ li(result_register(), Operand(lit));
572    codegen()->DoTest(this);
573  }
574}
575
576
577void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
578                                                       Register reg) const {
579  DCHECK(count > 0);
580  if (count > 1) codegen()->DropOperands(count - 1);
581  __ sw(reg, MemOperand(sp, 0));
582}
583
584
585void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586                                            Label* materialize_false) const {
587  DCHECK(materialize_true == materialize_false);
588  __ bind(materialize_true);
589}
590
591
592void FullCodeGenerator::AccumulatorValueContext::Plug(
593    Label* materialize_true,
594    Label* materialize_false) const {
595  Label done;
596  __ bind(materialize_true);
597  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
598  __ Branch(&done);
599  __ bind(materialize_false);
600  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
601  __ bind(&done);
602}
603
604
605void FullCodeGenerator::StackValueContext::Plug(
606    Label* materialize_true,
607    Label* materialize_false) const {
608  codegen()->OperandStackDepthIncrement(1);
609  Label done;
610  __ bind(materialize_true);
611  __ LoadRoot(at, Heap::kTrueValueRootIndex);
612  // Push the value as the following branch can clobber at in long branch mode.
613  __ push(at);
614  __ Branch(&done);
615  __ bind(materialize_false);
616  __ LoadRoot(at, Heap::kFalseValueRootIndex);
617  __ push(at);
618  __ bind(&done);
619}
620
621
622void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
623                                          Label* materialize_false) const {
624  DCHECK(materialize_true == true_label_);
625  DCHECK(materialize_false == false_label_);
626}
627
628
629void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
630  Heap::RootListIndex value_root_index =
631      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
632  __ LoadRoot(result_register(), value_root_index);
633}
634
635
636void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
637  Heap::RootListIndex value_root_index =
638      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
639  __ LoadRoot(at, value_root_index);
640  codegen()->PushOperand(at);
641}
642
643
644void FullCodeGenerator::TestContext::Plug(bool flag) const {
645  codegen()->PrepareForBailoutBeforeSplit(condition(),
646                                          true,
647                                          true_label_,
648                                          false_label_);
649  if (flag) {
650    if (true_label_ != fall_through_) __ Branch(true_label_);
651  } else {
652    if (false_label_ != fall_through_) __ Branch(false_label_);
653  }
654}
655
656
657void FullCodeGenerator::DoTest(Expression* condition,
658                               Label* if_true,
659                               Label* if_false,
660                               Label* fall_through) {
661  __ mov(a0, result_register());
662  Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
663  CallIC(ic, condition->test_id());
664  __ LoadRoot(at, Heap::kTrueValueRootIndex);
665  Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
666}
667
668
669void FullCodeGenerator::Split(Condition cc,
670                              Register lhs,
671                              const Operand&  rhs,
672                              Label* if_true,
673                              Label* if_false,
674                              Label* fall_through) {
675  if (if_false == fall_through) {
676    __ Branch(if_true, cc, lhs, rhs);
677  } else if (if_true == fall_through) {
678    __ Branch(if_false, NegateCondition(cc), lhs, rhs);
679  } else {
680    __ Branch(if_true, cc, lhs, rhs);
681    __ Branch(if_false);
682  }
683}
684
685
686MemOperand FullCodeGenerator::StackOperand(Variable* var) {
687  DCHECK(var->IsStackAllocated());
688  // Offset is negative because higher indexes are at lower addresses.
689  int offset = -var->index() * kPointerSize;
690  // Adjust by a (parameter or local) base offset.
691  if (var->IsParameter()) {
692    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
693  } else {
694    offset += JavaScriptFrameConstants::kLocal0Offset;
695  }
696  return MemOperand(fp, offset);
697}
698
699
700MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
701  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702  if (var->IsContextSlot()) {
703    int context_chain_length = scope()->ContextChainLength(var->scope());
704    __ LoadContext(scratch, context_chain_length);
705    return ContextMemOperand(scratch, var->index());
706  } else {
707    return StackOperand(var);
708  }
709}
710
711
712void FullCodeGenerator::GetVar(Register dest, Variable* var) {
713  // Use destination as scratch.
714  MemOperand location = VarOperand(var, dest);
715  __ lw(dest, location);
716}
717
718
719void FullCodeGenerator::SetVar(Variable* var,
720                               Register src,
721                               Register scratch0,
722                               Register scratch1) {
723  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
724  DCHECK(!scratch0.is(src));
725  DCHECK(!scratch0.is(scratch1));
726  DCHECK(!scratch1.is(src));
727  MemOperand location = VarOperand(var, scratch0);
728  __ sw(src, location);
729  // Emit the write barrier code if the location is in the heap.
730  if (var->IsContextSlot()) {
731    __ RecordWriteContextSlot(scratch0,
732                              location.offset(),
733                              src,
734                              scratch1,
735                              kRAHasBeenSaved,
736                              kDontSaveFPRegs);
737  }
738}
739
740
741void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
742                                                     bool should_normalize,
743                                                     Label* if_true,
744                                                     Label* if_false) {
745  // Only prepare for bailouts before splits if we're in a test
746  // context. Otherwise, we let the Visit function deal with the
747  // preparation to avoid preparing with the same AST id twice.
748  if (!context()->IsTest()) return;
749
750  Label skip;
751  if (should_normalize) __ Branch(&skip);
752  PrepareForBailout(expr, BailoutState::TOS_REGISTER);
753  if (should_normalize) {
754    __ LoadRoot(t0, Heap::kTrueValueRootIndex);
755    Split(eq, v0, Operand(t0), if_true, if_false, NULL);
756    __ bind(&skip);
757  }
758}
759
760
761void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
762  // The variable in the declaration always resides in the current function
763  // context.
764  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
765  if (FLAG_debug_code) {
766    // Check that we're not inside a with or catch context.
767    __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
768    __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
769    __ Check(ne, kDeclarationInWithContext,
770        a1, Operand(t0));
771    __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
772    __ Check(ne, kDeclarationInCatchContext,
773        a1, Operand(t0));
774  }
775}
776
777
778void FullCodeGenerator::VisitVariableDeclaration(
779    VariableDeclaration* declaration) {
780  VariableProxy* proxy = declaration->proxy();
781  Variable* variable = proxy->var();
782  switch (variable->location()) {
783    case VariableLocation::UNALLOCATED: {
784      DCHECK(!variable->binding_needs_init());
785      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
786      DCHECK(!slot.IsInvalid());
787      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
788      globals_->Add(isolate()->factory()->undefined_value(), zone());
789      break;
790    }
791    case VariableLocation::PARAMETER:
792    case VariableLocation::LOCAL:
793      if (variable->binding_needs_init()) {
794        Comment cmnt(masm_, "[ VariableDeclaration");
795        __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
796        __ sw(t0, StackOperand(variable));
797      }
798      break;
799
800    case VariableLocation::CONTEXT:
801      if (variable->binding_needs_init()) {
802        Comment cmnt(masm_, "[ VariableDeclaration");
803        EmitDebugCheckDeclarationContext(variable);
804          __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
805          __ sw(at, ContextMemOperand(cp, variable->index()));
806          // No write barrier since the_hole_value is in old space.
807          PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
808      }
809      break;
810
811    case VariableLocation::LOOKUP: {
812      Comment cmnt(masm_, "[ VariableDeclaration");
813      DCHECK_EQ(VAR, variable->mode());
814      DCHECK(!variable->binding_needs_init());
815      __ li(a2, Operand(variable->name()));
816      __ Push(a2);
817      __ CallRuntime(Runtime::kDeclareEvalVar);
818      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
819      break;
820    }
821
822    case VariableLocation::MODULE:
823      UNREACHABLE();
824  }
825}
826
827
828void FullCodeGenerator::VisitFunctionDeclaration(
829    FunctionDeclaration* declaration) {
830  VariableProxy* proxy = declaration->proxy();
831  Variable* variable = proxy->var();
832  switch (variable->location()) {
833    case VariableLocation::UNALLOCATED: {
834      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
835      DCHECK(!slot.IsInvalid());
836      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
837      Handle<SharedFunctionInfo> function =
838          Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
839      // Check for stack-overflow exception.
840      if (function.is_null()) return SetStackOverflow();
841      globals_->Add(function, zone());
842      break;
843    }
844
845    case VariableLocation::PARAMETER:
846    case VariableLocation::LOCAL: {
847      Comment cmnt(masm_, "[ FunctionDeclaration");
848      VisitForAccumulatorValue(declaration->fun());
849      __ sw(result_register(), StackOperand(variable));
850      break;
851    }
852
853    case VariableLocation::CONTEXT: {
854      Comment cmnt(masm_, "[ FunctionDeclaration");
855      EmitDebugCheckDeclarationContext(variable);
856      VisitForAccumulatorValue(declaration->fun());
857      __ sw(result_register(), ContextMemOperand(cp, variable->index()));
858      int offset = Context::SlotOffset(variable->index());
859      // We know that we have written a function, which is not a smi.
860      __ RecordWriteContextSlot(cp,
861                                offset,
862                                result_register(),
863                                a2,
864                                kRAHasBeenSaved,
865                                kDontSaveFPRegs,
866                                EMIT_REMEMBERED_SET,
867                                OMIT_SMI_CHECK);
868      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
869      break;
870    }
871
872    case VariableLocation::LOOKUP: {
873      Comment cmnt(masm_, "[ FunctionDeclaration");
874      __ li(a2, Operand(variable->name()));
875      PushOperand(a2);
876      // Push initial value for function declaration.
877      VisitForStackValue(declaration->fun());
878      CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
879      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
880      break;
881    }
882
883    case VariableLocation::MODULE:
884      UNREACHABLE();
885  }
886}
887
888
889void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
890  // Call the runtime to declare the globals.
891  __ li(a1, Operand(pairs));
892  __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
893  __ EmitLoadTypeFeedbackVector(a2);
894  __ Push(a1, a0, a2);
895  __ CallRuntime(Runtime::kDeclareGlobals);
896  // Return value is ignored.
897}
898
899
900void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
901  Comment cmnt(masm_, "[ SwitchStatement");
902  Breakable nested_statement(this, stmt);
903  SetStatementPosition(stmt);
904
905  // Keep the switch value on the stack until a case matches.
906  VisitForStackValue(stmt->tag());
907  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
908
909  ZoneList<CaseClause*>* clauses = stmt->cases();
910  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
911
912  Label next_test;  // Recycled for each test.
913  // Compile all the tests with branches to their bodies.
914  for (int i = 0; i < clauses->length(); i++) {
915    CaseClause* clause = clauses->at(i);
916    clause->body_target()->Unuse();
917
918    // The default is not a test, but remember it as final fall through.
919    if (clause->is_default()) {
920      default_clause = clause;
921      continue;
922    }
923
924    Comment cmnt(masm_, "[ Case comparison");
925    __ bind(&next_test);
926    next_test.Unuse();
927
928    // Compile the label expression.
929    VisitForAccumulatorValue(clause->label());
930    __ mov(a0, result_register());  // CompareStub requires args in a0, a1.
931
932    // Perform the comparison as if via '==='.
933    __ lw(a1, MemOperand(sp, 0));  // Switch value.
934    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
935    JumpPatchSite patch_site(masm_);
936    if (inline_smi_code) {
937      Label slow_case;
938      __ or_(a2, a1, a0);
939      patch_site.EmitJumpIfNotSmi(a2, &slow_case);
940
941      __ Branch(&next_test, ne, a1, Operand(a0));
942      __ Drop(1);  // Switch value is no longer needed.
943      __ Branch(clause->body_target());
944
945      __ bind(&slow_case);
946    }
947
948    // Record position before stub call for type feedback.
949    SetExpressionPosition(clause);
950    Handle<Code> ic =
951        CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
952    CallIC(ic, clause->CompareId());
953    patch_site.EmitPatchInfo();
954
955    Label skip;
956    __ Branch(&skip);
957    PrepareForBailout(clause, BailoutState::TOS_REGISTER);
958    __ LoadRoot(at, Heap::kTrueValueRootIndex);
959    __ Branch(&next_test, ne, v0, Operand(at));
960    __ Drop(1);
961    __ Branch(clause->body_target());
962    __ bind(&skip);
963
964    __ Branch(&next_test, ne, v0, Operand(zero_reg));
965    __ Drop(1);  // Switch value is no longer needed.
966    __ Branch(clause->body_target());
967  }
968
969  // Discard the test value and jump to the default if present, otherwise to
970  // the end of the statement.
971  __ bind(&next_test);
972  DropOperands(1);  // Switch value is no longer needed.
973  if (default_clause == NULL) {
974    __ Branch(nested_statement.break_label());
975  } else {
976    __ Branch(default_clause->body_target());
977  }
978
979  // Compile all the case bodies.
980  for (int i = 0; i < clauses->length(); i++) {
981    Comment cmnt(masm_, "[ Case body");
982    CaseClause* clause = clauses->at(i);
983    __ bind(clause->body_target());
984    PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
985    VisitStatements(clause->statements());
986  }
987
988  __ bind(nested_statement.break_label());
989  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
990}
991
992
993void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
994  Comment cmnt(masm_, "[ ForInStatement");
995  SetStatementPosition(stmt, SKIP_BREAK);
996
997  FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
998
999  // Get the object to enumerate over.
1000  SetExpressionAsStatementPosition(stmt->enumerable());
1001  VisitForAccumulatorValue(stmt->enumerable());
1002  __ mov(a0, result_register());
1003  OperandStackDepthIncrement(5);
1004
1005  Label loop, exit;
1006  Iteration loop_statement(this, stmt);
1007  increment_loop_depth();
1008
1009  // If the object is null or undefined, skip over the loop, otherwise convert
1010  // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
1011  Label convert, done_convert;
1012  __ JumpIfSmi(a0, &convert);
1013  __ GetObjectType(a0, a1, a1);
1014  __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
1015            Operand(FIRST_JS_RECEIVER_TYPE));
1016  __ LoadRoot(at, Heap::kNullValueRootIndex);  // In delay slot.
1017  __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1018  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);  // In delay slot.
1019  __ Branch(&exit, eq, a0, Operand(at));
1020  __ bind(&convert);
1021  ToObjectStub stub(isolate());
1022  __ CallStub(&stub);
1023  RestoreContext();
1024  __ mov(a0, v0);
1025  __ bind(&done_convert);
1026  PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
1027  __ push(a0);
1028
1029  // Check cache validity in generated code. If we cannot guarantee cache
1030  // validity, call the runtime system to check cache validity or get the
1031  // property names in a fixed array. Note: Proxies never have an enum cache,
1032  // so will always take the slow path.
1033  Label call_runtime;
1034  __ CheckEnumCache(&call_runtime);
1035
1036  // The enum cache is valid.  Load the map of the object being
1037  // iterated over and use the cache for the iteration.
1038  Label use_cache;
1039  __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1040  __ Branch(&use_cache);
1041
1042  // Get the set of properties to enumerate.
1043  __ bind(&call_runtime);
1044  __ push(a0);  // Duplicate the enumerable object on the stack.
1045  __ CallRuntime(Runtime::kForInEnumerate);
1046  PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1047
1048  // If we got a map from the runtime call, we can do a fast
1049  // modification check. Otherwise, we got a fixed array, and we have
1050  // to do a slow check.
1051  Label fixed_array;
1052  __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1053  __ LoadRoot(at, Heap::kMetaMapRootIndex);
1054  __ Branch(&fixed_array, ne, a2, Operand(at));
1055
1056  // We got a map in register v0. Get the enumeration cache from it.
1057  Label no_descriptors;
1058  __ bind(&use_cache);
1059
1060  __ EnumLength(a1, v0);
1061  __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1062
1063  __ LoadInstanceDescriptors(v0, a2);
1064  __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1065  __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1066
1067  // Set up the four remaining stack slots.
1068  __ li(a0, Operand(Smi::FromInt(0)));
1069  // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1070  __ Push(v0, a2, a1, a0);
1071  __ jmp(&loop);
1072
1073  __ bind(&no_descriptors);
1074  __ Drop(1);
1075  __ jmp(&exit);
1076
1077  // We got a fixed array in register v0. Iterate through that.
1078  __ bind(&fixed_array);
1079
1080  __ li(a1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
1081  __ Push(a1, v0);  // Smi and array
1082  __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1083  __ Push(a1);  // Fixed array length (as smi).
1084  PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1085  __ li(a0, Operand(Smi::FromInt(0)));
1086  __ Push(a0);  // Initial index.
1087
1088  // Generate code for doing the condition check.
1089  __ bind(&loop);
1090  SetExpressionAsStatementPosition(stmt->each());
1091
1092  // Load the current count to a0, load the length to a1.
1093  __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1094  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1095  __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1096
1097  // Get the current entry of the array into result_register.
1098  __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1099  __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1100  __ Lsa(t0, a2, a0, kPointerSizeLog2 - kSmiTagSize);
1101  __ lw(result_register(), MemOperand(t0));  // Current entry.
1102
1103  // Get the expected map from the stack or a smi in the
1104  // permanent slow case into register a2.
1105  __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1106
1107  // Check if the expected map still matches that of the enumerable.
1108  // If not, we may have to filter the key.
1109  Label update_each;
1110  __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1111  __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1112  __ Branch(&update_each, eq, t0, Operand(a2));
1113
1114  // We need to filter the key, record slow-path here.
1115  int const vector_index = SmiFromSlot(slot)->value();
1116  __ EmitLoadTypeFeedbackVector(a3);
1117  __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1118  __ sw(a2, FieldMemOperand(a3, FixedArray::OffsetOfElementAt(vector_index)));
1119
1120  __ mov(a0, result_register());
1121  // a0 contains the key. The receiver in a1 is the second argument to the
1122  // ForInFilterStub. ForInFilter returns undefined if the receiver doesn't
1123  // have the key or returns the name-converted key.
1124  ForInFilterStub filter_stub(isolate());
1125  __ CallStub(&filter_stub);
1126  RestoreContext();
1127  PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1128  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1129  __ Branch(loop_statement.continue_label(), eq, result_register(),
1130            Operand(at));
1131
1132  // Update the 'each' property or variable from the possibly filtered
1133  // entry in the result_register.
1134  __ bind(&update_each);
1135  // Perform the assignment as if via '='.
1136  { EffectContext context(this);
1137    EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1138    PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1139  }
1140
1141  // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1142  PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1143  // Generate code for the body of the loop.
1144  Visit(stmt->body());
1145
1146  // Generate code for the going to the next element by incrementing
1147  // the index (smi) stored on top of the stack.
1148  __ bind(loop_statement.continue_label());
1149  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1150  __ pop(a0);
1151  __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1152  __ push(a0);
1153
1154  EmitBackEdgeBookkeeping(stmt, &loop);
1155  __ Branch(&loop);
1156
1157  // Remove the pointers stored on the stack.
1158  __ bind(loop_statement.break_label());
1159  DropOperands(5);
1160
1161  // Exit and decrement the loop depth.
1162  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1163  __ bind(&exit);
1164  decrement_loop_depth();
1165}
1166
1167
1168void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1169                                          FeedbackVectorSlot slot) {
1170  DCHECK(NeedsHomeObject(initializer));
1171  __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1172  __ lw(StoreDescriptor::ValueRegister(),
1173        MemOperand(sp, offset * kPointerSize));
1174  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1175}
1176
1177
1178void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1179                                                     int offset,
1180                                                     FeedbackVectorSlot slot) {
1181  DCHECK(NeedsHomeObject(initializer));
1182  __ Move(StoreDescriptor::ReceiverRegister(), v0);
1183  __ lw(StoreDescriptor::ValueRegister(),
1184        MemOperand(sp, offset * kPointerSize));
1185  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1186}
1187
1188
1189void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1190                                                      TypeofMode typeof_mode,
1191                                                      Label* slow) {
1192  Register current = cp;
1193  Register next = a1;
1194  Register temp = a2;
1195
1196  int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
1197  for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
1198    if (!s->NeedsContext()) continue;
1199    if (s->calls_sloppy_eval()) {
1200      // Check that extension is "the hole".
1201      __ lw(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1202      __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1203    }
1204    // Load next context in chain.
1205    __ lw(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1206    // Walk the rest of the chain without clobbering cp.
1207    current = next;
1208    to_check--;
1209  }
1210
1211  // All extension objects were empty and it is safe to use a normal global
1212  // load machinery.
1213  EmitGlobalVariableLoad(proxy, typeof_mode);
1214}
1215
1216
1217MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1218                                                                Label* slow) {
1219  DCHECK(var->IsContextSlot());
1220  Register context = cp;
1221  Register next = a3;
1222  Register temp = t0;
1223
1224  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1225    if (s->NeedsContext()) {
1226      if (s->calls_sloppy_eval()) {
1227        // Check that extension is "the hole".
1228        __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1229        __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1230      }
1231      __ lw(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1232      // Walk the rest of the chain without clobbering cp.
1233      context = next;
1234    }
1235  }
1236  // Check that last extension is "the hole".
1237  __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1238  __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1239
1240  // This function is used only for loads, not stores, so it's safe to
1241  // return an cp-based operand (the write barrier cannot be allowed to
1242  // destroy the cp register).
1243  return ContextMemOperand(context, var->index());
1244}
1245
1246
1247void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1248                                                  TypeofMode typeof_mode,
1249                                                  Label* slow, Label* done) {
1250  // Generate fast-case code for variables that might be shadowed by
1251  // eval-introduced variables.  Eval is used a lot without
1252  // introducing variables.  In those cases, we do not want to
1253  // perform a runtime call for all variables in the scope
1254  // containing the eval.
1255  Variable* var = proxy->var();
1256  if (var->mode() == DYNAMIC_GLOBAL) {
1257    EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1258    __ Branch(done);
1259  } else if (var->mode() == DYNAMIC_LOCAL) {
1260    Variable* local = var->local_if_not_shadowed();
1261    __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1262    if (local->binding_needs_init()) {
1263      __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1264      __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
1265      __ Branch(done, ne, at, Operand(zero_reg));
1266      __ li(a0, Operand(var->name()));
1267      __ push(a0);
1268      __ CallRuntime(Runtime::kThrowReferenceError);
1269    } else {
1270      __ Branch(done);
1271    }
1272  }
1273}
1274
1275void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1276                                         TypeofMode typeof_mode) {
1277  // Record position before possible IC call.
1278  SetExpressionPosition(proxy);
1279  PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1280  Variable* var = proxy->var();
1281
1282  // Three cases: global variables, lookup variables, and all other types of
1283  // variables.
1284  switch (var->location()) {
1285    case VariableLocation::UNALLOCATED: {
1286      Comment cmnt(masm_, "[ Global variable");
1287      EmitGlobalVariableLoad(proxy, typeof_mode);
1288      context()->Plug(v0);
1289      break;
1290    }
1291
1292    case VariableLocation::PARAMETER:
1293    case VariableLocation::LOCAL:
1294    case VariableLocation::CONTEXT: {
1295      DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1296      Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1297                                               : "[ Stack variable");
1298      if (NeedsHoleCheckForLoad(proxy)) {
1299        // Throw a reference error when using an uninitialized let/const
1300        // binding in harmony mode.
1301        Label done;
1302        GetVar(v0, var);
1303        __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1304        __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
1305        __ Branch(&done, ne, at, Operand(zero_reg));
1306        __ li(a0, Operand(var->name()));
1307        __ push(a0);
1308        __ CallRuntime(Runtime::kThrowReferenceError);
1309        __ bind(&done);
1310        context()->Plug(v0);
1311        break;
1312      }
1313      context()->Plug(var);
1314      break;
1315    }
1316
1317    case VariableLocation::LOOKUP: {
1318      Comment cmnt(masm_, "[ Lookup variable");
1319      Label done, slow;
1320      // Generate code for loading from variables potentially shadowed
1321      // by eval-introduced variables.
1322      EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1323      __ bind(&slow);
1324      __ Push(var->name());
1325      Runtime::FunctionId function_id =
1326          typeof_mode == NOT_INSIDE_TYPEOF
1327              ? Runtime::kLoadLookupSlot
1328              : Runtime::kLoadLookupSlotInsideTypeof;
1329      __ CallRuntime(function_id);
1330      __ bind(&done);
1331      context()->Plug(v0);
1332      break;
1333    }
1334
1335    case VariableLocation::MODULE:
1336      UNREACHABLE();
1337  }
1338}
1339
1340
1341void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1342  Expression* expression = (property == NULL) ? NULL : property->value();
1343  if (expression == NULL) {
1344    __ LoadRoot(a1, Heap::kNullValueRootIndex);
1345    PushOperand(a1);
1346  } else {
1347    VisitForStackValue(expression);
1348    if (NeedsHomeObject(expression)) {
1349      DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1350             property->kind() == ObjectLiteral::Property::SETTER);
1351      int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1352      EmitSetHomeObject(expression, offset, property->GetSlot());
1353    }
1354  }
1355}
1356
1357
1358void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1359  Comment cmnt(masm_, "[ ObjectLiteral");
1360
1361  Handle<FixedArray> constant_properties = expr->constant_properties();
1362  __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1363  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1364  __ li(a1, Operand(constant_properties));
1365  __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1366  if (MustCreateObjectLiteralWithRuntime(expr)) {
1367    __ Push(a3, a2, a1, a0);
1368    __ CallRuntime(Runtime::kCreateObjectLiteral);
1369  } else {
1370    FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1371    __ CallStub(&stub);
1372    RestoreContext();
1373  }
1374  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1375
1376  // If result_saved is true the result is on top of the stack.  If
1377  // result_saved is false the result is in v0.
1378  bool result_saved = false;
1379
1380  AccessorTable accessor_table(zone());
1381  int property_index = 0;
1382  for (; property_index < expr->properties()->length(); property_index++) {
1383    ObjectLiteral::Property* property = expr->properties()->at(property_index);
1384    if (property->is_computed_name()) break;
1385    if (property->IsCompileTimeValue()) continue;
1386
1387    Literal* key = property->key()->AsLiteral();
1388    Expression* value = property->value();
1389    if (!result_saved) {
1390      PushOperand(v0);  // Save result on stack.
1391      result_saved = true;
1392    }
1393    switch (property->kind()) {
1394      case ObjectLiteral::Property::CONSTANT:
1395        UNREACHABLE();
1396      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1397        DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1398        // Fall through.
1399      case ObjectLiteral::Property::COMPUTED:
1400        // It is safe to use [[Put]] here because the boilerplate already
1401        // contains computed properties with an uninitialized value.
1402        if (key->IsStringLiteral()) {
1403          DCHECK(key->IsPropertyName());
1404          if (property->emit_store()) {
1405            VisitForAccumulatorValue(value);
1406            __ mov(StoreDescriptor::ValueRegister(), result_register());
1407            DCHECK(StoreDescriptor::ValueRegister().is(a0));
1408            __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1409            CallStoreIC(property->GetSlot(0), key->value());
1410            PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1411
1412            if (NeedsHomeObject(value)) {
1413              EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1414            }
1415          } else {
1416            VisitForEffect(value);
1417          }
1418          break;
1419        }
1420        // Duplicate receiver on stack.
1421        __ lw(a0, MemOperand(sp));
1422        PushOperand(a0);
1423        VisitForStackValue(key);
1424        VisitForStackValue(value);
1425        if (property->emit_store()) {
1426          if (NeedsHomeObject(value)) {
1427            EmitSetHomeObject(value, 2, property->GetSlot());
1428          }
1429          __ li(a0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes.
1430          PushOperand(a0);
1431          CallRuntimeWithOperands(Runtime::kSetProperty);
1432        } else {
1433          DropOperands(3);
1434        }
1435        break;
1436      case ObjectLiteral::Property::PROTOTYPE:
1437        // Duplicate receiver on stack.
1438        __ lw(a0, MemOperand(sp));
1439        PushOperand(a0);
1440        VisitForStackValue(value);
1441        DCHECK(property->emit_store());
1442        CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1443        PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1444                               BailoutState::NO_REGISTERS);
1445        break;
1446      case ObjectLiteral::Property::GETTER:
1447        if (property->emit_store()) {
1448          AccessorTable::Iterator it = accessor_table.lookup(key);
1449          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1450          it->second->getter = property;
1451        }
1452        break;
1453      case ObjectLiteral::Property::SETTER:
1454        if (property->emit_store()) {
1455          AccessorTable::Iterator it = accessor_table.lookup(key);
1456          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1457          it->second->setter = property;
1458        }
1459        break;
1460    }
1461  }
1462
1463  // Emit code to define accessors, using only a single call to the runtime for
1464  // each pair of corresponding getters and setters.
1465  for (AccessorTable::Iterator it = accessor_table.begin();
1466       it != accessor_table.end();
1467       ++it) {
1468    __ lw(a0, MemOperand(sp));  // Duplicate receiver.
1469    PushOperand(a0);
1470    VisitForStackValue(it->first);
1471    EmitAccessor(it->second->getter);
1472    EmitAccessor(it->second->setter);
1473    __ li(a0, Operand(Smi::FromInt(NONE)));
1474    PushOperand(a0);
1475    CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1476    PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1477  }
1478
1479  // Object literals have two parts. The "static" part on the left contains no
1480  // computed property names, and so we can compute its map ahead of time; see
1481  // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1482  // starts with the first computed property name, and continues with all
1483  // properties to its right.  All the code from above initializes the static
1484  // component of the object literal, and arranges for the map of the result to
1485  // reflect the static order in which the keys appear. For the dynamic
1486  // properties, we compile them into a series of "SetOwnProperty" runtime
1487  // calls. This will preserve insertion order.
1488  for (; property_index < expr->properties()->length(); property_index++) {
1489    ObjectLiteral::Property* property = expr->properties()->at(property_index);
1490
1491    Expression* value = property->value();
1492    if (!result_saved) {
1493      PushOperand(v0);  // Save result on the stack
1494      result_saved = true;
1495    }
1496
1497    __ lw(a0, MemOperand(sp));  // Duplicate receiver.
1498    PushOperand(a0);
1499
1500    if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1501      DCHECK(!property->is_computed_name());
1502      VisitForStackValue(value);
1503      DCHECK(property->emit_store());
1504      CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1505      PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1506                             BailoutState::NO_REGISTERS);
1507    } else {
1508      EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1509      VisitForStackValue(value);
1510      if (NeedsHomeObject(value)) {
1511        EmitSetHomeObject(value, 2, property->GetSlot());
1512      }
1513
1514      switch (property->kind()) {
1515        case ObjectLiteral::Property::CONSTANT:
1516        case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1517        case ObjectLiteral::Property::COMPUTED:
1518          if (property->emit_store()) {
1519            PushOperand(Smi::FromInt(NONE));
1520            PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1521            CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1522            PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1523                                   BailoutState::NO_REGISTERS);
1524          } else {
1525            DropOperands(3);
1526          }
1527          break;
1528
1529        case ObjectLiteral::Property::PROTOTYPE:
1530          UNREACHABLE();
1531          break;
1532
1533        case ObjectLiteral::Property::GETTER:
1534          PushOperand(Smi::FromInt(NONE));
1535          CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1536          break;
1537
1538        case ObjectLiteral::Property::SETTER:
1539          PushOperand(Smi::FromInt(NONE));
1540          CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1541          break;
1542      }
1543    }
1544  }
1545
1546  if (result_saved) {
1547    context()->PlugTOS();
1548  } else {
1549    context()->Plug(v0);
1550  }
1551}
1552
1553
1554void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1555  Comment cmnt(masm_, "[ ArrayLiteral");
1556
1557  Handle<FixedArray> constant_elements = expr->constant_elements();
1558  bool has_fast_elements =
1559      IsFastObjectElementsKind(expr->constant_elements_kind());
1560
1561  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1562  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1563    // If the only customer of allocation sites is transitioning, then
1564    // we can turn it off if we don't have anywhere else to transition to.
1565    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1566  }
1567
1568  __ mov(a0, result_register());
1569  __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1570  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1571  __ li(a1, Operand(constant_elements));
1572  if (MustCreateArrayLiteralWithRuntime(expr)) {
1573    __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1574    __ Push(a3, a2, a1, a0);
1575    __ CallRuntime(Runtime::kCreateArrayLiteral);
1576  } else {
1577    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1578    __ CallStub(&stub);
1579    RestoreContext();
1580  }
1581  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1582
1583  bool result_saved = false;  // Is the result saved to the stack?
1584  ZoneList<Expression*>* subexprs = expr->values();
1585  int length = subexprs->length();
1586
1587  // Emit code to evaluate all the non-constant subexpressions and to store
1588  // them into the newly cloned array.
1589  for (int array_index = 0; array_index < length; array_index++) {
1590    Expression* subexpr = subexprs->at(array_index);
1591    DCHECK(!subexpr->IsSpread());
1592
1593    // If the subexpression is a literal or a simple materialized literal it
1594    // is already set in the cloned array.
1595    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1596
1597    if (!result_saved) {
1598      PushOperand(v0);  // array literal
1599      result_saved = true;
1600    }
1601
1602    VisitForAccumulatorValue(subexpr);
1603
1604    __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1605    __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1606    __ mov(StoreDescriptor::ValueRegister(), result_register());
1607    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1608
1609    PrepareForBailoutForId(expr->GetIdForElement(array_index),
1610                           BailoutState::NO_REGISTERS);
1611  }
1612
1613  if (result_saved) {
1614    context()->PlugTOS();
1615  } else {
1616    context()->Plug(v0);
1617  }
1618}
1619
1620
1621void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1622  DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1623
1624  Comment cmnt(masm_, "[ Assignment");
1625
1626  Property* property = expr->target()->AsProperty();
1627  LhsKind assign_type = Property::GetAssignType(property);
1628
1629  // Evaluate LHS expression.
1630  switch (assign_type) {
1631    case VARIABLE:
1632      // Nothing to do here.
1633      break;
1634    case NAMED_PROPERTY:
1635      if (expr->is_compound()) {
1636        // We need the receiver both on the stack and in the register.
1637        VisitForStackValue(property->obj());
1638        __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1639      } else {
1640        VisitForStackValue(property->obj());
1641      }
1642      break;
1643    case NAMED_SUPER_PROPERTY:
1644      VisitForStackValue(
1645          property->obj()->AsSuperPropertyReference()->this_var());
1646      VisitForAccumulatorValue(
1647          property->obj()->AsSuperPropertyReference()->home_object());
1648      PushOperand(result_register());
1649      if (expr->is_compound()) {
1650        const Register scratch = a1;
1651        __ lw(scratch, MemOperand(sp, kPointerSize));
1652        PushOperands(scratch, result_register());
1653      }
1654      break;
1655    case KEYED_SUPER_PROPERTY: {
1656      VisitForStackValue(
1657          property->obj()->AsSuperPropertyReference()->this_var());
1658      VisitForStackValue(
1659          property->obj()->AsSuperPropertyReference()->home_object());
1660      VisitForAccumulatorValue(property->key());
1661      PushOperand(result_register());
1662      if (expr->is_compound()) {
1663        const Register scratch1 = t0;
1664        const Register scratch2 = a1;
1665        __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
1666        __ lw(scratch2, MemOperand(sp, 1 * kPointerSize));
1667        PushOperands(scratch1, scratch2, result_register());
1668      }
1669      break;
1670    }
1671    case KEYED_PROPERTY:
1672      // We need the key and receiver on both the stack and in v0 and a1.
1673      if (expr->is_compound()) {
1674        VisitForStackValue(property->obj());
1675        VisitForStackValue(property->key());
1676        __ lw(LoadDescriptor::ReceiverRegister(),
1677              MemOperand(sp, 1 * kPointerSize));
1678        __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1679      } else {
1680        VisitForStackValue(property->obj());
1681        VisitForStackValue(property->key());
1682      }
1683      break;
1684  }
1685
1686  // For compound assignments we need another deoptimization point after the
1687  // variable/property load.
1688  if (expr->is_compound()) {
1689    { AccumulatorValueContext context(this);
1690      switch (assign_type) {
1691        case VARIABLE:
1692          EmitVariableLoad(expr->target()->AsVariableProxy());
1693          PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1694          break;
1695        case NAMED_PROPERTY:
1696          EmitNamedPropertyLoad(property);
1697          PrepareForBailoutForId(property->LoadId(),
1698                                 BailoutState::TOS_REGISTER);
1699          break;
1700        case NAMED_SUPER_PROPERTY:
1701          EmitNamedSuperPropertyLoad(property);
1702          PrepareForBailoutForId(property->LoadId(),
1703                                 BailoutState::TOS_REGISTER);
1704          break;
1705        case KEYED_SUPER_PROPERTY:
1706          EmitKeyedSuperPropertyLoad(property);
1707          PrepareForBailoutForId(property->LoadId(),
1708                                 BailoutState::TOS_REGISTER);
1709          break;
1710        case KEYED_PROPERTY:
1711          EmitKeyedPropertyLoad(property);
1712          PrepareForBailoutForId(property->LoadId(),
1713                                 BailoutState::TOS_REGISTER);
1714          break;
1715      }
1716    }
1717
1718    Token::Value op = expr->binary_op();
1719    PushOperand(v0);  // Left operand goes on the stack.
1720    VisitForAccumulatorValue(expr->value());
1721
1722    AccumulatorValueContext context(this);
1723    if (ShouldInlineSmiCase(op)) {
1724      EmitInlineSmiBinaryOp(expr->binary_operation(),
1725                            op,
1726                            expr->target(),
1727                            expr->value());
1728    } else {
1729      EmitBinaryOp(expr->binary_operation(), op);
1730    }
1731
1732    // Deoptimization point in case the binary operation may have side effects.
1733    PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1734  } else {
1735    VisitForAccumulatorValue(expr->value());
1736  }
1737
1738  SetExpressionPosition(expr);
1739
1740  // Store the value.
1741  switch (assign_type) {
1742    case VARIABLE:
1743      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1744                             expr->op(), expr->AssignmentSlot());
1745      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1746      context()->Plug(v0);
1747      break;
1748    case NAMED_PROPERTY:
1749      EmitNamedPropertyAssignment(expr);
1750      break;
1751    case NAMED_SUPER_PROPERTY:
1752      EmitNamedSuperPropertyStore(property);
1753      context()->Plug(v0);
1754      break;
1755    case KEYED_SUPER_PROPERTY:
1756      EmitKeyedSuperPropertyStore(property);
1757      context()->Plug(v0);
1758      break;
1759    case KEYED_PROPERTY:
1760      EmitKeyedPropertyAssignment(expr);
1761      break;
1762  }
1763}
1764
1765
1766void FullCodeGenerator::VisitYield(Yield* expr) {
1767  Comment cmnt(masm_, "[ Yield");
1768  SetExpressionPosition(expr);
1769
1770  // Evaluate yielded value first; the initial iterator definition depends on
1771  // this.  It stays on the stack while we update the iterator.
1772  VisitForStackValue(expr->expression());
1773
1774  Label suspend, continuation, post_runtime, resume, exception;
1775
1776  __ jmp(&suspend);
1777  __ bind(&continuation);
1778  // When we arrive here, v0 holds the generator object.
1779  __ RecordGeneratorContinuation();
1780  __ lw(a1, FieldMemOperand(v0, JSGeneratorObject::kResumeModeOffset));
1781  __ lw(v0, FieldMemOperand(v0, JSGeneratorObject::kInputOrDebugPosOffset));
1782  __ Branch(&resume, eq, a1, Operand(Smi::FromInt(JSGeneratorObject::kNext)));
1783  __ Push(result_register());
1784  __ Branch(&exception, eq, a1,
1785            Operand(Smi::FromInt(JSGeneratorObject::kThrow)));
1786  EmitCreateIteratorResult(true);
1787  EmitUnwindAndReturn();
1788
1789  __ bind(&exception);
1790  __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
1791                                              : Runtime::kThrow);
1792
1793  __ bind(&suspend);
1794  OperandStackDepthIncrement(1);  // Not popped on this path.
1795  VisitForAccumulatorValue(expr->generator_object());
1796  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1797  __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1798  __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1799  __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1800  __ mov(a1, cp);
1801  __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1802                      kRAHasBeenSaved, kDontSaveFPRegs);
1803  __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1804  __ Branch(&post_runtime, eq, sp, Operand(a1));
1805  __ push(v0);  // generator object
1806  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1807  RestoreContext();
1808  __ bind(&post_runtime);
1809  PopOperand(result_register());
1810  EmitReturnSequence();
1811
1812  __ bind(&resume);
1813  context()->Plug(result_register());
1814}
1815
1816void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1817  OperandStackDepthIncrement(2);
1818  __ Push(reg1, reg2);
1819}
1820
1821void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1822                                     Register reg3) {
1823  OperandStackDepthIncrement(3);
1824  __ Push(reg1, reg2, reg3);
1825}
1826
1827void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1828                                     Register reg3, Register reg4) {
1829  OperandStackDepthIncrement(4);
1830  __ Push(reg1, reg2, reg3, reg4);
1831}
1832
1833void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1834  OperandStackDepthDecrement(2);
1835  __ Pop(reg1, reg2);
1836}
1837
1838void FullCodeGenerator::EmitOperandStackDepthCheck() {
1839  if (FLAG_debug_code) {
1840    int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1841                        operand_stack_depth_ * kPointerSize;
1842    __ Subu(v0, fp, sp);
1843    __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
1844  }
1845}
1846
1847void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1848  Label allocate, done_allocate;
1849
1850  __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate,
1851              NO_ALLOCATION_FLAGS);
1852  __ jmp(&done_allocate);
1853
1854  __ bind(&allocate);
1855  __ Push(Smi::FromInt(JSIteratorResult::kSize));
1856  __ CallRuntime(Runtime::kAllocateInNewSpace);
1857
1858  __ bind(&done_allocate);
1859  __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
1860  PopOperand(a2);
1861  __ LoadRoot(a3,
1862              done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1863  __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
1864  __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
1865  __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
1866  __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
1867  __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
1868  __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
1869  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1870}
1871
1872
1873void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1874                                              Token::Value op,
1875                                              Expression* left_expr,
1876                                              Expression* right_expr) {
1877  Label done, smi_case, stub_call;
1878
1879  Register scratch1 = a2;
1880  Register scratch2 = a3;
1881
1882  // Get the arguments.
1883  Register left = a1;
1884  Register right = a0;
1885  PopOperand(left);
1886  __ mov(a0, result_register());
1887
1888  // Perform combined smi check on both operands.
1889  __ Or(scratch1, left, Operand(right));
1890  STATIC_ASSERT(kSmiTag == 0);
1891  JumpPatchSite patch_site(masm_);
1892  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1893
1894  __ bind(&stub_call);
1895  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1896  CallIC(code, expr->BinaryOperationFeedbackId());
1897  patch_site.EmitPatchInfo();
1898  __ jmp(&done);
1899
1900  __ bind(&smi_case);
1901  // Smi case. This code works the same way as the smi-smi case in the type
1902  // recording binary operation stub, see
1903  switch (op) {
1904    case Token::SAR:
1905      __ GetLeastBitsFromSmi(scratch1, right, 5);
1906      __ srav(right, left, scratch1);
1907      __ And(v0, right, Operand(~kSmiTagMask));
1908      break;
1909    case Token::SHL: {
1910      __ SmiUntag(scratch1, left);
1911      __ GetLeastBitsFromSmi(scratch2, right, 5);
1912      __ sllv(scratch1, scratch1, scratch2);
1913      __ Addu(scratch2, scratch1, Operand(0x40000000));
1914      __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1915      __ SmiTag(v0, scratch1);
1916      break;
1917    }
1918    case Token::SHR: {
1919      __ SmiUntag(scratch1, left);
1920      __ GetLeastBitsFromSmi(scratch2, right, 5);
1921      __ srlv(scratch1, scratch1, scratch2);
1922      __ And(scratch2, scratch1, 0xc0000000);
1923      __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1924      __ SmiTag(v0, scratch1);
1925      break;
1926    }
1927    case Token::ADD:
1928      __ AddBranchOvf(v0, left, Operand(right), &stub_call);
1929      break;
1930    case Token::SUB:
1931      __ SubBranchOvf(v0, left, Operand(right), &stub_call);
1932      break;
1933    case Token::MUL: {
1934      __ SmiUntag(scratch1, right);
1935      __ Mul(scratch2, v0, left, scratch1);
1936      __ sra(scratch1, v0, 31);
1937      __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
1938      __ Branch(&done, ne, v0, Operand(zero_reg));
1939      __ Addu(scratch2, right, left);
1940      __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1941      DCHECK(Smi::FromInt(0) == 0);
1942      __ mov(v0, zero_reg);
1943      break;
1944    }
1945    case Token::BIT_OR:
1946      __ Or(v0, left, Operand(right));
1947      break;
1948    case Token::BIT_AND:
1949      __ And(v0, left, Operand(right));
1950      break;
1951    case Token::BIT_XOR:
1952      __ Xor(v0, left, Operand(right));
1953      break;
1954    default:
1955      UNREACHABLE();
1956  }
1957
1958  __ bind(&done);
1959  context()->Plug(v0);
1960}
1961
1962
1963void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1964  for (int i = 0; i < lit->properties()->length(); i++) {
1965    ClassLiteral::Property* property = lit->properties()->at(i);
1966    Expression* value = property->value();
1967
1968    Register scratch = a1;
1969    if (property->is_static()) {
1970      __ lw(scratch, MemOperand(sp, kPointerSize));  // constructor
1971    } else {
1972      __ lw(scratch, MemOperand(sp, 0));  // prototype
1973    }
1974    PushOperand(scratch);
1975    EmitPropertyKey(property, lit->GetIdForProperty(i));
1976
1977    // The static prototype property is read only. We handle the non computed
1978    // property name case in the parser. Since this is the only case where we
1979    // need to check for an own read only property we special case this so we do
1980    // not need to do this for every property.
1981    if (property->is_static() && property->is_computed_name()) {
1982      __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1983      __ push(v0);
1984    }
1985
1986    VisitForStackValue(value);
1987    if (NeedsHomeObject(value)) {
1988      EmitSetHomeObject(value, 2, property->GetSlot());
1989    }
1990
1991    switch (property->kind()) {
1992      case ClassLiteral::Property::METHOD:
1993        PushOperand(Smi::FromInt(DONT_ENUM));
1994        PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1995        CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1996        break;
1997
1998      case ClassLiteral::Property::GETTER:
1999        PushOperand(Smi::FromInt(DONT_ENUM));
2000        CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
2001        break;
2002
2003      case ClassLiteral::Property::SETTER:
2004        PushOperand(Smi::FromInt(DONT_ENUM));
2005        CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
2006        break;
2007
2008      case ClassLiteral::Property::FIELD:
2009      default:
2010        UNREACHABLE();
2011    }
2012  }
2013}
2014
2015
2016void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2017  __ mov(a0, result_register());
2018  PopOperand(a1);
2019  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2020  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2021  CallIC(code, expr->BinaryOperationFeedbackId());
2022  patch_site.EmitPatchInfo();
2023  context()->Plug(v0);
2024}
2025
2026
2027void FullCodeGenerator::EmitAssignment(Expression* expr,
2028                                       FeedbackVectorSlot slot) {
2029  DCHECK(expr->IsValidReferenceExpressionOrThis());
2030
2031  Property* prop = expr->AsProperty();
2032  LhsKind assign_type = Property::GetAssignType(prop);
2033
2034  switch (assign_type) {
2035    case VARIABLE: {
2036      Variable* var = expr->AsVariableProxy()->var();
2037      EffectContext context(this);
2038      EmitVariableAssignment(var, Token::ASSIGN, slot);
2039      break;
2040    }
2041    case NAMED_PROPERTY: {
2042      PushOperand(result_register());  // Preserve value.
2043      VisitForAccumulatorValue(prop->obj());
2044      __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2045      PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
2046      CallStoreIC(slot, prop->key()->AsLiteral()->value());
2047      break;
2048    }
2049    case NAMED_SUPER_PROPERTY: {
2050      PushOperand(v0);
2051      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2052      VisitForAccumulatorValue(
2053          prop->obj()->AsSuperPropertyReference()->home_object());
2054      // stack: value, this; v0: home_object
2055      Register scratch = a2;
2056      Register scratch2 = a3;
2057      __ mov(scratch, result_register());             // home_object
2058      __ lw(v0, MemOperand(sp, kPointerSize));        // value
2059      __ lw(scratch2, MemOperand(sp, 0));             // this
2060      __ sw(scratch2, MemOperand(sp, kPointerSize));  // this
2061      __ sw(scratch, MemOperand(sp, 0));              // home_object
2062      // stack: this, home_object; v0: value
2063      EmitNamedSuperPropertyStore(prop);
2064      break;
2065    }
2066    case KEYED_SUPER_PROPERTY: {
2067      PushOperand(v0);
2068      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2069      VisitForStackValue(
2070          prop->obj()->AsSuperPropertyReference()->home_object());
2071      VisitForAccumulatorValue(prop->key());
2072      Register scratch = a2;
2073      Register scratch2 = a3;
2074      __ lw(scratch2, MemOperand(sp, 2 * kPointerSize));  // value
2075      // stack: value, this, home_object; v0: key, a3: value
2076      __ lw(scratch, MemOperand(sp, kPointerSize));  // this
2077      __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2078      __ lw(scratch, MemOperand(sp, 0));  // home_object
2079      __ sw(scratch, MemOperand(sp, kPointerSize));
2080      __ sw(v0, MemOperand(sp, 0));
2081      __ Move(v0, scratch2);
2082      // stack: this, home_object, key; v0: value.
2083      EmitKeyedSuperPropertyStore(prop);
2084      break;
2085    }
2086    case KEYED_PROPERTY: {
2087      PushOperand(result_register());  // Preserve value.
2088      VisitForStackValue(prop->obj());
2089      VisitForAccumulatorValue(prop->key());
2090      __ mov(StoreDescriptor::NameRegister(), result_register());
2091      PopOperands(StoreDescriptor::ValueRegister(),
2092                  StoreDescriptor::ReceiverRegister());
2093      CallKeyedStoreIC(slot);
2094      break;
2095    }
2096  }
2097  context()->Plug(v0);
2098}
2099
2100
2101void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2102    Variable* var, MemOperand location) {
2103  __ sw(result_register(), location);
2104  if (var->IsContextSlot()) {
2105    // RecordWrite may destroy all its register arguments.
2106    __ Move(a3, result_register());
2107    int offset = Context::SlotOffset(var->index());
2108    __ RecordWriteContextSlot(
2109        a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2110  }
2111}
2112
2113
2114void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2115                                               FeedbackVectorSlot slot) {
2116  if (var->IsUnallocated()) {
2117    // Global var, const, or let.
2118    __ mov(StoreDescriptor::ValueRegister(), result_register());
2119    __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2120    CallStoreIC(slot, var->name());
2121
2122  } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2123    DCHECK(!var->IsLookupSlot());
2124    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2125    MemOperand location = VarOperand(var, a1);
2126    // Perform an initialization check for lexically declared variables.
2127    if (var->binding_needs_init()) {
2128      Label assign;
2129      __ lw(a3, location);
2130      __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2131      __ Branch(&assign, ne, a3, Operand(t0));
2132      __ li(a3, Operand(var->name()));
2133      __ push(a3);
2134      __ CallRuntime(Runtime::kThrowReferenceError);
2135      __ bind(&assign);
2136    }
2137    if (var->mode() != CONST) {
2138      EmitStoreToStackLocalOrContextSlot(var, location);
2139    } else if (var->throw_on_const_assignment(language_mode())) {
2140      __ CallRuntime(Runtime::kThrowConstAssignError);
2141    }
2142  } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2143    // Initializing assignment to const {this} needs a write barrier.
2144    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2145    Label uninitialized_this;
2146    MemOperand location = VarOperand(var, a1);
2147    __ lw(a3, location);
2148    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2149    __ Branch(&uninitialized_this, eq, a3, Operand(at));
2150    __ li(a0, Operand(var->name()));
2151    __ Push(a0);
2152    __ CallRuntime(Runtime::kThrowReferenceError);
2153    __ bind(&uninitialized_this);
2154    EmitStoreToStackLocalOrContextSlot(var, location);
2155
2156  } else {
2157    DCHECK(var->mode() != CONST || op == Token::INIT);
2158    if (var->IsLookupSlot()) {
2159      // Assignment to var.
2160      __ Push(var->name());
2161      __ Push(v0);
2162      __ CallRuntime(is_strict(language_mode())
2163                         ? Runtime::kStoreLookupSlot_Strict
2164                         : Runtime::kStoreLookupSlot_Sloppy);
2165    } else {
2166      // Assignment to var or initializing assignment to let/const in harmony
2167      // mode.
2168      DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2169      MemOperand location = VarOperand(var, a1);
2170      if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2171        // Check for an uninitialized let binding.
2172        __ lw(a2, location);
2173        __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2174        __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2175      }
2176      EmitStoreToStackLocalOrContextSlot(var, location);
2177    }
2178  }
2179}
2180
2181
2182void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2183  // Assignment to a property, using a named store IC.
2184  Property* prop = expr->target()->AsProperty();
2185  DCHECK(prop != NULL);
2186  DCHECK(prop->key()->IsLiteral());
2187
2188  __ mov(StoreDescriptor::ValueRegister(), result_register());
2189  PopOperand(StoreDescriptor::ReceiverRegister());
2190  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
2191
2192  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2193  context()->Plug(v0);
2194}
2195
2196
2197void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2198  // Assignment to named property of super.
2199  // v0 : value
2200  // stack : receiver ('this'), home_object
2201  DCHECK(prop != NULL);
2202  Literal* key = prop->key()->AsLiteral();
2203  DCHECK(key != NULL);
2204
2205  PushOperand(key->value());
2206  PushOperand(v0);
2207  CallRuntimeWithOperands(is_strict(language_mode())
2208                              ? Runtime::kStoreToSuper_Strict
2209                              : Runtime::kStoreToSuper_Sloppy);
2210}
2211
2212
2213void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2214  // Assignment to named property of super.
2215  // v0 : value
2216  // stack : receiver ('this'), home_object, key
2217  DCHECK(prop != NULL);
2218
2219  PushOperand(v0);
2220  CallRuntimeWithOperands(is_strict(language_mode())
2221                              ? Runtime::kStoreKeyedToSuper_Strict
2222                              : Runtime::kStoreKeyedToSuper_Sloppy);
2223}
2224
2225
2226void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2227  // Assignment to a property, using a keyed store IC.
2228  // Call keyed store IC.
2229  // The arguments are:
2230  // - a0 is the value,
2231  // - a1 is the key,
2232  // - a2 is the receiver.
2233  __ mov(StoreDescriptor::ValueRegister(), result_register());
2234  PopOperands(StoreDescriptor::ReceiverRegister(),
2235              StoreDescriptor::NameRegister());
2236  DCHECK(StoreDescriptor::ValueRegister().is(a0));
2237
2238  CallKeyedStoreIC(expr->AssignmentSlot());
2239
2240  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2241  context()->Plug(v0);
2242}
2243
2244
2245void FullCodeGenerator::CallIC(Handle<Code> code,
2246                               TypeFeedbackId id) {
2247  ic_total_count_++;
2248  __ Call(code, RelocInfo::CODE_TARGET, id);
2249}
2250
2251
2252// Code common for calls using the IC.
2253void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2254  Expression* callee = expr->expression();
2255
2256  // Get the target function.
2257  ConvertReceiverMode convert_mode;
2258  if (callee->IsVariableProxy()) {
2259    { StackValueContext context(this);
2260      EmitVariableLoad(callee->AsVariableProxy());
2261      PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2262    }
2263    // Push undefined as receiver. This is patched in the method prologue if it
2264    // is a sloppy mode method.
2265    __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2266    PushOperand(at);
2267    convert_mode = ConvertReceiverMode::kNullOrUndefined;
2268  } else {
2269    // Load the function from the receiver.
2270    DCHECK(callee->IsProperty());
2271    DCHECK(!callee->AsProperty()->IsSuperAccess());
2272    __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2273    EmitNamedPropertyLoad(callee->AsProperty());
2274    PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2275                           BailoutState::TOS_REGISTER);
2276    // Push the target function under the receiver.
2277    __ lw(at, MemOperand(sp, 0));
2278    PushOperand(at);
2279    __ sw(v0, MemOperand(sp, kPointerSize));
2280    convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2281  }
2282
2283  EmitCall(expr, convert_mode);
2284}
2285
2286
2287void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2288  SetExpressionPosition(expr);
2289  Expression* callee = expr->expression();
2290  DCHECK(callee->IsProperty());
2291  Property* prop = callee->AsProperty();
2292  DCHECK(prop->IsSuperAccess());
2293
2294  Literal* key = prop->key()->AsLiteral();
2295  DCHECK(!key->value()->IsSmi());
2296  // Load the function from the receiver.
2297  const Register scratch = a1;
2298  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2299  VisitForAccumulatorValue(super_ref->home_object());
2300  __ mov(scratch, v0);
2301  VisitForAccumulatorValue(super_ref->this_var());
2302  PushOperands(scratch, v0, v0, scratch);
2303  PushOperand(key->value());
2304
2305  // Stack here:
2306  //  - home_object
2307  //  - this (receiver)
2308  //  - this (receiver) <-- LoadFromSuper will pop here and below.
2309  //  - home_object
2310  //  - key
2311  CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2312  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2313
2314  // Replace home_object with target function.
2315  __ sw(v0, MemOperand(sp, kPointerSize));
2316
2317  // Stack here:
2318  // - target function
2319  // - this (receiver)
2320  EmitCall(expr);
2321}
2322
2323
2324// Code common for calls using the IC.
2325void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2326                                                Expression* key) {
2327  // Load the key.
2328  VisitForAccumulatorValue(key);
2329
2330  Expression* callee = expr->expression();
2331
2332  // Load the function from the receiver.
2333  DCHECK(callee->IsProperty());
2334  __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2335  __ Move(LoadDescriptor::NameRegister(), v0);
2336  EmitKeyedPropertyLoad(callee->AsProperty());
2337  PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2338                         BailoutState::TOS_REGISTER);
2339
2340  // Push the target function under the receiver.
2341  __ lw(at, MemOperand(sp, 0));
2342  PushOperand(at);
2343  __ sw(v0, MemOperand(sp, kPointerSize));
2344
2345  EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2346}
2347
2348
2349void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2350  Expression* callee = expr->expression();
2351  DCHECK(callee->IsProperty());
2352  Property* prop = callee->AsProperty();
2353  DCHECK(prop->IsSuperAccess());
2354
2355  SetExpressionPosition(prop);
2356  // Load the function from the receiver.
2357  const Register scratch = a1;
2358  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2359  VisitForAccumulatorValue(super_ref->home_object());
2360  __ Move(scratch, v0);
2361  VisitForAccumulatorValue(super_ref->this_var());
2362  PushOperands(scratch, v0, v0, scratch);
2363  VisitForStackValue(prop->key());
2364
2365  // Stack here:
2366  //  - home_object
2367  //  - this (receiver)
2368  //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2369  //  - home_object
2370  //  - key
2371  CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2372  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2373
2374  // Replace home_object with target function.
2375  __ sw(v0, MemOperand(sp, kPointerSize));
2376
2377  // Stack here:
2378  // - target function
2379  // - this (receiver)
2380  EmitCall(expr);
2381}
2382
2383
2384void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2385  // Load the arguments.
2386  ZoneList<Expression*>* args = expr->arguments();
2387  int arg_count = args->length();
2388  for (int i = 0; i < arg_count; i++) {
2389    VisitForStackValue(args->at(i));
2390  }
2391
2392  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2393  // Record source position of the IC call.
2394  SetCallPosition(expr, expr->tail_call_mode());
2395  if (expr->tail_call_mode() == TailCallMode::kAllow) {
2396    if (FLAG_trace) {
2397      __ CallRuntime(Runtime::kTraceTailCall);
2398    }
2399    // Update profiling counters before the tail call since we will
2400    // not return to this function.
2401    EmitProfilingCounterHandlingForReturnSequence(true);
2402  }
2403  Handle<Code> ic =
2404      CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2405          .code();
2406  __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2407  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2408  // Don't assign a type feedback id to the IC, since type feedback is provided
2409  // by the vector above.
2410  CallIC(ic);
2411  OperandStackDepthDecrement(arg_count + 1);
2412
2413  RecordJSReturnSite(expr);
2414  RestoreContext();
2415  context()->DropAndPlug(1, v0);
2416}
2417
2418void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2419  int arg_count = expr->arguments()->length();
2420  // t4: copy of the first argument or undefined if it doesn't exist.
2421  if (arg_count > 0) {
2422    __ lw(t4, MemOperand(sp, arg_count * kPointerSize));
2423  } else {
2424    __ LoadRoot(t4, Heap::kUndefinedValueRootIndex);
2425  }
2426
2427  // t3: the receiver of the enclosing function.
2428  __ lw(t3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2429
2430  // t2: the language mode.
2431  __ li(t2, Operand(Smi::FromInt(language_mode())));
2432
2433  // t1: the start position of the scope the calls resides in.
2434  __ li(t1, Operand(Smi::FromInt(scope()->start_position())));
2435
2436  // t0: the source position of the eval call.
2437  __ li(t0, Operand(Smi::FromInt(expr->position())));
2438
2439  // Do the runtime call.
2440  __ Push(t4, t3, t2, t1, t0);
2441  __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2442}
2443
2444
2445// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2446void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2447  VariableProxy* callee = expr->expression()->AsVariableProxy();
2448  if (callee->var()->IsLookupSlot()) {
2449    Label slow, done;
2450
2451    SetExpressionPosition(callee);
2452    // Generate code for loading from variables potentially shadowed by
2453    // eval-introduced variables.
2454    EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2455
2456    __ bind(&slow);
2457    // Call the runtime to find the function to call (returned in v0)
2458    // and the object holding it (returned in v1).
2459    __ Push(callee->name());
2460    __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2461    PushOperands(v0, v1);  // Function, receiver.
2462    PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2463
2464    // If fast case code has been generated, emit code to push the
2465    // function and receiver and have the slow path jump around this
2466    // code.
2467    if (done.is_linked()) {
2468      Label call;
2469      __ Branch(&call);
2470      __ bind(&done);
2471      // Push function.
2472      __ push(v0);
2473      // The receiver is implicitly the global receiver. Indicate this
2474      // by passing the hole to the call function stub.
2475      __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2476      __ push(a1);
2477      __ bind(&call);
2478    }
2479  } else {
2480    VisitForStackValue(callee);
2481    // refEnv.WithBaseObject()
2482    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2483    PushOperand(a2);  // Reserved receiver slot.
2484  }
2485}
2486
2487
2488void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2489  // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2490  // to resolve the function we need to call.  Then we call the resolved
2491  // function using the given arguments.
2492  ZoneList<Expression*>* args = expr->arguments();
2493  int arg_count = args->length();
2494  PushCalleeAndWithBaseObject(expr);
2495
2496  // Push the arguments.
2497  for (int i = 0; i < arg_count; i++) {
2498    VisitForStackValue(args->at(i));
2499  }
2500
2501  // Push a copy of the function (found below the arguments) and
2502  // resolve eval.
2503  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2504  __ push(a1);
2505  EmitResolvePossiblyDirectEval(expr);
2506
2507  // Touch up the stack with the resolved function.
2508  __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2509
2510  PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2511  // Record source position for debugger.
2512  SetCallPosition(expr);
2513  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2514  __ li(a0, Operand(arg_count));
2515  __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2516                                      expr->tail_call_mode()),
2517          RelocInfo::CODE_TARGET);
2518  OperandStackDepthDecrement(arg_count + 1);
2519  RecordJSReturnSite(expr);
2520  RestoreContext();
2521  context()->DropAndPlug(1, v0);
2522}
2523
2524
2525void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2526  Comment cmnt(masm_, "[ CallNew");
2527  // According to ECMA-262, section 11.2.2, page 44, the function
2528  // expression in new calls must be evaluated before the
2529  // arguments.
2530
2531  // Push constructor on the stack.  If it's not a function it's used as
2532  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2533  // ignored.g
2534  DCHECK(!expr->expression()->IsSuperPropertyReference());
2535  VisitForStackValue(expr->expression());
2536
2537  // Push the arguments ("left-to-right") on the stack.
2538  ZoneList<Expression*>* args = expr->arguments();
2539  int arg_count = args->length();
2540  for (int i = 0; i < arg_count; i++) {
2541    VisitForStackValue(args->at(i));
2542  }
2543
2544  // Call the construct call builtin that handles allocation and
2545  // constructor invocation.
2546  SetConstructCallPosition(expr);
2547
2548  // Load function and argument count into a1 and a0.
2549  __ li(a0, Operand(arg_count));
2550  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2551
2552  // Record call targets in unoptimized code.
2553  __ EmitLoadTypeFeedbackVector(a2);
2554  __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2555
2556  CallConstructStub stub(isolate());
2557  __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2558  OperandStackDepthDecrement(arg_count + 1);
2559  PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2560  RestoreContext();
2561  context()->Plug(v0);
2562}
2563
2564
2565void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2566  SuperCallReference* super_call_ref =
2567      expr->expression()->AsSuperCallReference();
2568  DCHECK_NOT_NULL(super_call_ref);
2569
2570  // Push the super constructor target on the stack (may be null,
2571  // but the Construct builtin can deal with that properly).
2572  VisitForAccumulatorValue(super_call_ref->this_function_var());
2573  __ AssertFunction(result_register());
2574  __ lw(result_register(),
2575        FieldMemOperand(result_register(), HeapObject::kMapOffset));
2576  __ lw(result_register(),
2577        FieldMemOperand(result_register(), Map::kPrototypeOffset));
2578  PushOperand(result_register());
2579
2580  // Push the arguments ("left-to-right") on the stack.
2581  ZoneList<Expression*>* args = expr->arguments();
2582  int arg_count = args->length();
2583  for (int i = 0; i < arg_count; i++) {
2584    VisitForStackValue(args->at(i));
2585  }
2586
2587  // Call the construct call builtin that handles allocation and
2588  // constructor invocation.
2589  SetConstructCallPosition(expr);
2590
2591  // Load new target into a3.
2592  VisitForAccumulatorValue(super_call_ref->new_target_var());
2593  __ mov(a3, result_register());
2594
2595  // Load function and argument count into a1 and a0.
2596  __ li(a0, Operand(arg_count));
2597  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2598
2599  __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2600  OperandStackDepthDecrement(arg_count + 1);
2601
2602  RecordJSReturnSite(expr);
2603  RestoreContext();
2604  context()->Plug(v0);
2605}
2606
2607
2608void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2609  ZoneList<Expression*>* args = expr->arguments();
2610  DCHECK(args->length() == 1);
2611
2612  VisitForAccumulatorValue(args->at(0));
2613
2614  Label materialize_true, materialize_false;
2615  Label* if_true = NULL;
2616  Label* if_false = NULL;
2617  Label* fall_through = NULL;
2618  context()->PrepareTest(&materialize_true, &materialize_false,
2619                         &if_true, &if_false, &fall_through);
2620
2621  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2622  __ SmiTst(v0, t0);
2623  Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2624
2625  context()->Plug(if_true, if_false);
2626}
2627
2628
2629void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2630  ZoneList<Expression*>* args = expr->arguments();
2631  DCHECK(args->length() == 1);
2632
2633  VisitForAccumulatorValue(args->at(0));
2634
2635  Label materialize_true, materialize_false;
2636  Label* if_true = NULL;
2637  Label* if_false = NULL;
2638  Label* fall_through = NULL;
2639  context()->PrepareTest(&materialize_true, &materialize_false,
2640                         &if_true, &if_false, &fall_through);
2641
2642  __ JumpIfSmi(v0, if_false);
2643  __ GetObjectType(v0, a1, a1);
2644  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2645  Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2646        if_true, if_false, fall_through);
2647
2648  context()->Plug(if_true, if_false);
2649}
2650
2651
2652void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2653  ZoneList<Expression*>* args = expr->arguments();
2654  DCHECK(args->length() == 1);
2655
2656  VisitForAccumulatorValue(args->at(0));
2657
2658  Label materialize_true, materialize_false;
2659  Label* if_true = NULL;
2660  Label* if_false = NULL;
2661  Label* fall_through = NULL;
2662  context()->PrepareTest(&materialize_true, &materialize_false,
2663                         &if_true, &if_false, &fall_through);
2664
2665  __ JumpIfSmi(v0, if_false);
2666  __ GetObjectType(v0, a1, a1);
2667  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2668  Split(eq, a1, Operand(JS_ARRAY_TYPE),
2669        if_true, if_false, fall_through);
2670
2671  context()->Plug(if_true, if_false);
2672}
2673
2674
2675void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2676  ZoneList<Expression*>* args = expr->arguments();
2677  DCHECK(args->length() == 1);
2678
2679  VisitForAccumulatorValue(args->at(0));
2680
2681  Label materialize_true, materialize_false;
2682  Label* if_true = NULL;
2683  Label* if_false = NULL;
2684  Label* fall_through = NULL;
2685  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2686                         &if_false, &fall_through);
2687
2688  __ JumpIfSmi(v0, if_false);
2689  __ GetObjectType(v0, a1, a1);
2690  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2691  Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
2692
2693  context()->Plug(if_true, if_false);
2694}
2695
2696
2697void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2698  ZoneList<Expression*>* args = expr->arguments();
2699  DCHECK(args->length() == 1);
2700
2701  VisitForAccumulatorValue(args->at(0));
2702
2703  Label materialize_true, materialize_false;
2704  Label* if_true = NULL;
2705  Label* if_false = NULL;
2706  Label* fall_through = NULL;
2707  context()->PrepareTest(&materialize_true, &materialize_false,
2708                         &if_true, &if_false, &fall_through);
2709
2710  __ JumpIfSmi(v0, if_false);
2711  __ GetObjectType(v0, a1, a1);
2712  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2713  Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2714
2715  context()->Plug(if_true, if_false);
2716}
2717
2718
2719void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2720  ZoneList<Expression*>* args = expr->arguments();
2721  DCHECK(args->length() == 1);
2722
2723  VisitForAccumulatorValue(args->at(0));
2724
2725  Label materialize_true, materialize_false;
2726  Label* if_true = NULL;
2727  Label* if_false = NULL;
2728  Label* fall_through = NULL;
2729  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2730                         &if_false, &fall_through);
2731
2732  __ JumpIfSmi(v0, if_false);
2733  __ GetObjectType(v0, a1, a1);
2734  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2735  Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
2736
2737  context()->Plug(if_true, if_false);
2738}
2739
2740
2741void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2742  ZoneList<Expression*>* args = expr->arguments();
2743  DCHECK(args->length() == 1);
2744  Label done, null, function, non_function_constructor;
2745
2746  VisitForAccumulatorValue(args->at(0));
2747
2748  // If the object is not a JSReceiver, we return null.
2749  __ JumpIfSmi(v0, &null);
2750  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2751  __ GetObjectType(v0, v0, a1);  // Map is now in v0.
2752  __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
2753
2754  // Return 'Function' for JSFunction and JSBoundFunction objects.
2755  STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2756  __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
2757
2758  // Check if the constructor in the map is a JS function.
2759  Register instance_type = a2;
2760  __ GetMapConstructor(v0, v0, a1, instance_type);
2761  __ Branch(&non_function_constructor, ne, instance_type,
2762            Operand(JS_FUNCTION_TYPE));
2763
2764  // v0 now contains the constructor function. Grab the
2765  // instance class name from there.
2766  __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2767  __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2768  __ Branch(&done);
2769
2770  // Functions have class 'Function'.
2771  __ bind(&function);
2772  __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
2773  __ jmp(&done);
2774
2775  // Objects with a non-function constructor have class 'Object'.
2776  __ bind(&non_function_constructor);
2777  __ LoadRoot(v0, Heap::kObject_stringRootIndex);
2778  __ jmp(&done);
2779
2780  // Non-JS objects have class null.
2781  __ bind(&null);
2782  __ LoadRoot(v0, Heap::kNullValueRootIndex);
2783
2784  // All done.
2785  __ bind(&done);
2786
2787  context()->Plug(v0);
2788}
2789
2790
2791void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2792  ZoneList<Expression*>* args = expr->arguments();
2793  DCHECK(args->length() == 2);
2794
2795  VisitForStackValue(args->at(0));
2796  VisitForAccumulatorValue(args->at(1));
2797  __ mov(a0, result_register());
2798
2799  Register object = a1;
2800  Register index = a0;
2801  Register result = v0;
2802
2803  PopOperand(object);
2804
2805  Label need_conversion;
2806  Label index_out_of_range;
2807  Label done;
2808  StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2809                                      &need_conversion, &index_out_of_range);
2810  generator.GenerateFast(masm_);
2811  __ jmp(&done);
2812
2813  __ bind(&index_out_of_range);
2814  // When the index is out of range, the spec requires us to return
2815  // NaN.
2816  __ LoadRoot(result, Heap::kNanValueRootIndex);
2817  __ jmp(&done);
2818
2819  __ bind(&need_conversion);
2820  // Load the undefined value into the result register, which will
2821  // trigger conversion.
2822  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2823  __ jmp(&done);
2824
2825  NopRuntimeCallHelper call_helper;
2826  generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2827
2828  __ bind(&done);
2829  context()->Plug(result);
2830}
2831
2832
2833void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2834  ZoneList<Expression*>* args = expr->arguments();
2835  DCHECK_LE(2, args->length());
2836  // Push target, receiver and arguments onto the stack.
2837  for (Expression* const arg : *args) {
2838    VisitForStackValue(arg);
2839  }
2840  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2841  // Move target to a1.
2842  int const argc = args->length() - 2;
2843  __ lw(a1, MemOperand(sp, (argc + 1) * kPointerSize));
2844  // Call the target.
2845  __ li(a0, Operand(argc));
2846  __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2847  OperandStackDepthDecrement(argc + 1);
2848  RestoreContext();
2849  // Discard the function left on TOS.
2850  context()->DropAndPlug(1, v0);
2851}
2852
2853
2854void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2855  ZoneList<Expression*>* args = expr->arguments();
2856  VisitForAccumulatorValue(args->at(0));
2857
2858  Label materialize_true, materialize_false;
2859  Label* if_true = NULL;
2860  Label* if_false = NULL;
2861  Label* fall_through = NULL;
2862  context()->PrepareTest(&materialize_true, &materialize_false,
2863                         &if_true, &if_false, &fall_through);
2864
2865  __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
2866  __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
2867
2868  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2869  Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
2870
2871  context()->Plug(if_true, if_false);
2872}
2873
2874
2875void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
2876  ZoneList<Expression*>* args = expr->arguments();
2877  DCHECK(args->length() == 1);
2878  VisitForAccumulatorValue(args->at(0));
2879
2880  __ AssertString(v0);
2881
2882  __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
2883  __ IndexFromHash(v0, v0);
2884
2885  context()->Plug(v0);
2886}
2887
2888
2889void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2890  ZoneList<Expression*>* args = expr->arguments();
2891  DCHECK_EQ(1, args->length());
2892  VisitForAccumulatorValue(args->at(0));
2893  __ AssertFunction(v0);
2894  __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
2895  __ lw(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
2896  context()->Plug(v0);
2897}
2898
2899void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2900  DCHECK(expr->arguments()->length() == 0);
2901  ExternalReference debug_is_active =
2902      ExternalReference::debug_is_active_address(isolate());
2903  __ li(at, Operand(debug_is_active));
2904  __ lb(v0, MemOperand(at));
2905  __ SmiTag(v0);
2906  context()->Plug(v0);
2907}
2908
2909
2910void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2911  ZoneList<Expression*>* args = expr->arguments();
2912  DCHECK_EQ(2, args->length());
2913  VisitForStackValue(args->at(0));
2914  VisitForStackValue(args->at(1));
2915
2916  Label runtime, done;
2917
2918  __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
2919              NO_ALLOCATION_FLAGS);
2920  __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
2921  __ Pop(a2, a3);
2922  __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
2923  __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2924  __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2925  __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2926  __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2927  __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2928  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2929  __ jmp(&done);
2930
2931  __ bind(&runtime);
2932  CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2933
2934  __ bind(&done);
2935  context()->Plug(v0);
2936}
2937
2938
2939void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2940  // Push function.
2941  __ LoadNativeContextSlot(expr->context_index(), v0);
2942  PushOperand(v0);
2943
2944  // Push undefined as the receiver.
2945  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2946  PushOperand(v0);
2947}
2948
2949
2950void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2951  ZoneList<Expression*>* args = expr->arguments();
2952  int arg_count = args->length();
2953
2954  SetCallPosition(expr);
2955  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2956  __ li(a0, Operand(arg_count));
2957  __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2958          RelocInfo::CODE_TARGET);
2959  OperandStackDepthDecrement(arg_count + 1);
2960  RestoreContext();
2961}
2962
2963
2964void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2965  switch (expr->op()) {
2966    case Token::DELETE: {
2967      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2968      Property* property = expr->expression()->AsProperty();
2969      VariableProxy* proxy = expr->expression()->AsVariableProxy();
2970
2971      if (property != NULL) {
2972        VisitForStackValue(property->obj());
2973        VisitForStackValue(property->key());
2974        CallRuntimeWithOperands(is_strict(language_mode())
2975                                    ? Runtime::kDeleteProperty_Strict
2976                                    : Runtime::kDeleteProperty_Sloppy);
2977        context()->Plug(v0);
2978      } else if (proxy != NULL) {
2979        Variable* var = proxy->var();
2980        // Delete of an unqualified identifier is disallowed in strict mode but
2981        // "delete this" is allowed.
2982        bool is_this = var->is_this();
2983        DCHECK(is_sloppy(language_mode()) || is_this);
2984        if (var->IsUnallocated()) {
2985          __ LoadGlobalObject(a2);
2986          __ li(a1, Operand(var->name()));
2987          __ Push(a2, a1);
2988          __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2989          context()->Plug(v0);
2990        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2991          // Result of deleting non-global, non-dynamic variables is false.
2992          // The subexpression does not have side effects.
2993          context()->Plug(is_this);
2994        } else {
2995          // Non-global variable.  Call the runtime to try to delete from the
2996          // context where the variable was introduced.
2997          __ Push(var->name());
2998          __ CallRuntime(Runtime::kDeleteLookupSlot);
2999          context()->Plug(v0);
3000        }
3001      } else {
3002        // Result of deleting non-property, non-variable reference is true.
3003        // The subexpression may have side effects.
3004        VisitForEffect(expr->expression());
3005        context()->Plug(true);
3006      }
3007      break;
3008    }
3009
3010    case Token::VOID: {
3011      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3012      VisitForEffect(expr->expression());
3013      context()->Plug(Heap::kUndefinedValueRootIndex);
3014      break;
3015    }
3016
3017    case Token::NOT: {
3018      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3019      if (context()->IsEffect()) {
3020        // Unary NOT has no side effects so it's only necessary to visit the
3021        // subexpression.  Match the optimizing compiler by not branching.
3022        VisitForEffect(expr->expression());
3023      } else if (context()->IsTest()) {
3024        const TestContext* test = TestContext::cast(context());
3025        // The labels are swapped for the recursive call.
3026        VisitForControl(expr->expression(),
3027                        test->false_label(),
3028                        test->true_label(),
3029                        test->fall_through());
3030        context()->Plug(test->true_label(), test->false_label());
3031      } else {
3032        // We handle value contexts explicitly rather than simply visiting
3033        // for control and plugging the control flow into the context,
3034        // because we need to prepare a pair of extra administrative AST ids
3035        // for the optimizing compiler.
3036        DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3037        Label materialize_true, materialize_false, done;
3038        VisitForControl(expr->expression(),
3039                        &materialize_false,
3040                        &materialize_true,
3041                        &materialize_true);
3042        if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3043        __ bind(&materialize_true);
3044        PrepareForBailoutForId(expr->MaterializeTrueId(),
3045                               BailoutState::NO_REGISTERS);
3046        __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3047        if (context()->IsStackValue()) __ push(v0);
3048        __ jmp(&done);
3049        __ bind(&materialize_false);
3050        PrepareForBailoutForId(expr->MaterializeFalseId(),
3051                               BailoutState::NO_REGISTERS);
3052        __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3053        if (context()->IsStackValue()) __ push(v0);
3054        __ bind(&done);
3055      }
3056      break;
3057    }
3058
3059    case Token::TYPEOF: {
3060      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3061      {
3062        AccumulatorValueContext context(this);
3063        VisitForTypeofValue(expr->expression());
3064      }
3065      __ mov(a3, v0);
3066      TypeofStub typeof_stub(isolate());
3067      __ CallStub(&typeof_stub);
3068      context()->Plug(v0);
3069      break;
3070    }
3071
3072    default:
3073      UNREACHABLE();
3074  }
3075}
3076
3077
3078void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3079  DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3080
3081  Comment cmnt(masm_, "[ CountOperation");
3082
3083  Property* prop = expr->expression()->AsProperty();
3084  LhsKind assign_type = Property::GetAssignType(prop);
3085
3086  // Evaluate expression and get value.
3087  if (assign_type == VARIABLE) {
3088    DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3089    AccumulatorValueContext context(this);
3090    EmitVariableLoad(expr->expression()->AsVariableProxy());
3091  } else {
3092    // Reserve space for result of postfix operation.
3093    if (expr->is_postfix() && !context()->IsEffect()) {
3094      __ li(at, Operand(Smi::FromInt(0)));
3095      PushOperand(at);
3096    }
3097    switch (assign_type) {
3098      case NAMED_PROPERTY: {
3099        // Put the object both on the stack and in the register.
3100        VisitForStackValue(prop->obj());
3101        __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3102        EmitNamedPropertyLoad(prop);
3103        break;
3104      }
3105
3106      case NAMED_SUPER_PROPERTY: {
3107        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3108        VisitForAccumulatorValue(
3109            prop->obj()->AsSuperPropertyReference()->home_object());
3110        const Register scratch = a1;
3111        __ lw(scratch, MemOperand(sp, 0));  // this
3112        PushOperands(result_register(), scratch, result_register());
3113        EmitNamedSuperPropertyLoad(prop);
3114        break;
3115      }
3116
3117      case KEYED_SUPER_PROPERTY: {
3118        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3119        VisitForStackValue(
3120            prop->obj()->AsSuperPropertyReference()->home_object());
3121        VisitForAccumulatorValue(prop->key());
3122        const Register scratch1 = a1;
3123        const Register scratch2 = t0;
3124        __ lw(scratch1, MemOperand(sp, 1 * kPointerSize));  // this
3125        __ lw(scratch2, MemOperand(sp, 0 * kPointerSize));  // home object
3126        PushOperands(result_register(), scratch1, scratch2, result_register());
3127        EmitKeyedSuperPropertyLoad(prop);
3128        break;
3129      }
3130
3131      case KEYED_PROPERTY: {
3132        VisitForStackValue(prop->obj());
3133        VisitForStackValue(prop->key());
3134        __ lw(LoadDescriptor::ReceiverRegister(),
3135              MemOperand(sp, 1 * kPointerSize));
3136        __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3137        EmitKeyedPropertyLoad(prop);
3138        break;
3139      }
3140
3141      case VARIABLE:
3142        UNREACHABLE();
3143    }
3144  }
3145
3146  // We need a second deoptimization point after loading the value
3147  // in case evaluating the property load my have a side effect.
3148  if (assign_type == VARIABLE) {
3149    PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3150  } else {
3151    PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3152  }
3153
3154  // Inline smi case if we are in a loop.
3155  Label stub_call, done;
3156  JumpPatchSite patch_site(masm_);
3157
3158  int count_value = expr->op() == Token::INC ? 1 : -1;
3159  __ mov(a0, v0);
3160  if (ShouldInlineSmiCase(expr->op())) {
3161    Label slow;
3162    patch_site.EmitJumpIfNotSmi(v0, &slow);
3163
3164    // Save result for postfix expressions.
3165    if (expr->is_postfix()) {
3166      if (!context()->IsEffect()) {
3167        // Save the result on the stack. If we have a named or keyed property
3168        // we store the result under the receiver that is currently on top
3169        // of the stack.
3170        switch (assign_type) {
3171          case VARIABLE:
3172            __ push(v0);
3173            break;
3174          case NAMED_PROPERTY:
3175            __ sw(v0, MemOperand(sp, kPointerSize));
3176            break;
3177          case NAMED_SUPER_PROPERTY:
3178            __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3179            break;
3180          case KEYED_PROPERTY:
3181            __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3182            break;
3183          case KEYED_SUPER_PROPERTY:
3184            __ sw(v0, MemOperand(sp, 3 * kPointerSize));
3185            break;
3186        }
3187      }
3188    }
3189
3190    Register scratch1 = a1;
3191    __ li(scratch1, Operand(Smi::FromInt(count_value)));
3192    __ AddBranchNoOvf(v0, v0, Operand(scratch1), &done);
3193    // Call stub. Undo operation first.
3194    __ Move(v0, a0);
3195    __ jmp(&stub_call);
3196    __ bind(&slow);
3197  }
3198
3199  // Convert old value into a number.
3200  __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3201  RestoreContext();
3202  PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3203
3204  // Save result for postfix expressions.
3205  if (expr->is_postfix()) {
3206    if (!context()->IsEffect()) {
3207      // Save the result on the stack. If we have a named or keyed property
3208      // we store the result under the receiver that is currently on top
3209      // of the stack.
3210      switch (assign_type) {
3211        case VARIABLE:
3212          PushOperand(v0);
3213          break;
3214        case NAMED_PROPERTY:
3215          __ sw(v0, MemOperand(sp, kPointerSize));
3216          break;
3217        case NAMED_SUPER_PROPERTY:
3218          __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3219          break;
3220        case KEYED_PROPERTY:
3221          __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3222          break;
3223        case KEYED_SUPER_PROPERTY:
3224          __ sw(v0, MemOperand(sp, 3 * kPointerSize));
3225          break;
3226      }
3227    }
3228  }
3229
3230  __ bind(&stub_call);
3231  __ mov(a1, v0);
3232  __ li(a0, Operand(Smi::FromInt(count_value)));
3233
3234  SetExpressionPosition(expr);
3235
3236  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3237  CallIC(code, expr->CountBinOpFeedbackId());
3238  patch_site.EmitPatchInfo();
3239  __ bind(&done);
3240
3241  // Store the value returned in v0.
3242  switch (assign_type) {
3243    case VARIABLE:
3244      if (expr->is_postfix()) {
3245        { EffectContext context(this);
3246          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3247                                 Token::ASSIGN, expr->CountSlot());
3248          PrepareForBailoutForId(expr->AssignmentId(),
3249                                 BailoutState::TOS_REGISTER);
3250          context.Plug(v0);
3251        }
3252        // For all contexts except EffectConstant we have the result on
3253        // top of the stack.
3254        if (!context()->IsEffect()) {
3255          context()->PlugTOS();
3256        }
3257      } else {
3258        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3259                               Token::ASSIGN, expr->CountSlot());
3260        PrepareForBailoutForId(expr->AssignmentId(),
3261                               BailoutState::TOS_REGISTER);
3262        context()->Plug(v0);
3263      }
3264      break;
3265    case NAMED_PROPERTY: {
3266      __ mov(StoreDescriptor::ValueRegister(), result_register());
3267      PopOperand(StoreDescriptor::ReceiverRegister());
3268      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
3269      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3270      if (expr->is_postfix()) {
3271        if (!context()->IsEffect()) {
3272          context()->PlugTOS();
3273        }
3274      } else {
3275        context()->Plug(v0);
3276      }
3277      break;
3278    }
3279    case NAMED_SUPER_PROPERTY: {
3280      EmitNamedSuperPropertyStore(prop);
3281      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3282      if (expr->is_postfix()) {
3283        if (!context()->IsEffect()) {
3284          context()->PlugTOS();
3285        }
3286      } else {
3287        context()->Plug(v0);
3288      }
3289      break;
3290    }
3291    case KEYED_SUPER_PROPERTY: {
3292      EmitKeyedSuperPropertyStore(prop);
3293      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3294      if (expr->is_postfix()) {
3295        if (!context()->IsEffect()) {
3296          context()->PlugTOS();
3297        }
3298      } else {
3299        context()->Plug(v0);
3300      }
3301      break;
3302    }
3303    case KEYED_PROPERTY: {
3304      __ mov(StoreDescriptor::ValueRegister(), result_register());
3305      PopOperands(StoreDescriptor::ReceiverRegister(),
3306                  StoreDescriptor::NameRegister());
3307      CallKeyedStoreIC(expr->CountSlot());
3308      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3309      if (expr->is_postfix()) {
3310        if (!context()->IsEffect()) {
3311          context()->PlugTOS();
3312        }
3313      } else {
3314        context()->Plug(v0);
3315      }
3316      break;
3317    }
3318  }
3319}
3320
3321
3322void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3323                                                 Expression* sub_expr,
3324                                                 Handle<String> check) {
3325  Label materialize_true, materialize_false;
3326  Label* if_true = NULL;
3327  Label* if_false = NULL;
3328  Label* fall_through = NULL;
3329  context()->PrepareTest(&materialize_true, &materialize_false,
3330                         &if_true, &if_false, &fall_through);
3331
3332  { AccumulatorValueContext context(this);
3333    VisitForTypeofValue(sub_expr);
3334  }
3335  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3336
3337  Factory* factory = isolate()->factory();
3338  if (String::Equals(check, factory->number_string())) {
3339    __ JumpIfSmi(v0, if_true);
3340    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3341    __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3342    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3343  } else if (String::Equals(check, factory->string_string())) {
3344    __ JumpIfSmi(v0, if_false);
3345    __ GetObjectType(v0, v0, a1);
3346    Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
3347          fall_through);
3348  } else if (String::Equals(check, factory->symbol_string())) {
3349    __ JumpIfSmi(v0, if_false);
3350    __ GetObjectType(v0, v0, a1);
3351    Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3352  } else if (String::Equals(check, factory->boolean_string())) {
3353    __ LoadRoot(at, Heap::kTrueValueRootIndex);
3354    __ Branch(if_true, eq, v0, Operand(at));
3355    __ LoadRoot(at, Heap::kFalseValueRootIndex);
3356    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3357  } else if (String::Equals(check, factory->undefined_string())) {
3358    __ LoadRoot(at, Heap::kNullValueRootIndex);
3359    __ Branch(if_false, eq, v0, Operand(at));
3360    __ JumpIfSmi(v0, if_false);
3361    // Check for undetectable objects => true.
3362    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3363    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3364    __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3365    Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3366  } else if (String::Equals(check, factory->function_string())) {
3367    __ JumpIfSmi(v0, if_false);
3368    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3369    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3370    __ And(a1, a1,
3371           Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3372    Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
3373          fall_through);
3374  } else if (String::Equals(check, factory->object_string())) {
3375    __ JumpIfSmi(v0, if_false);
3376    __ LoadRoot(at, Heap::kNullValueRootIndex);
3377    __ Branch(if_true, eq, v0, Operand(at));
3378    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3379    __ GetObjectType(v0, v0, a1);
3380    __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3381    // Check for callable or undetectable objects => false.
3382    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3383    __ And(a1, a1,
3384           Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3385    Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3386// clang-format off
3387#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)    \
3388  } else if (String::Equals(check, factory->type##_string())) {  \
3389    __ JumpIfSmi(v0, if_false);                                  \
3390    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));      \
3391    __ LoadRoot(at, Heap::k##Type##MapRootIndex);                \
3392    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3393  SIMD128_TYPES(SIMD128_TYPE)
3394#undef SIMD128_TYPE
3395    // clang-format on
3396  } else {
3397    if (if_false != fall_through) __ jmp(if_false);
3398  }
3399  context()->Plug(if_true, if_false);
3400}
3401
3402
3403void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3404  Comment cmnt(masm_, "[ CompareOperation");
3405
3406  // First we try a fast inlined version of the compare when one of
3407  // the operands is a literal.
3408  if (TryLiteralCompare(expr)) return;
3409
3410  // Always perform the comparison for its control flow.  Pack the result
3411  // into the expression's context after the comparison is performed.
3412  Label materialize_true, materialize_false;
3413  Label* if_true = NULL;
3414  Label* if_false = NULL;
3415  Label* fall_through = NULL;
3416  context()->PrepareTest(&materialize_true, &materialize_false,
3417                         &if_true, &if_false, &fall_through);
3418
3419  Token::Value op = expr->op();
3420  VisitForStackValue(expr->left());
3421  switch (op) {
3422    case Token::IN:
3423      VisitForStackValue(expr->right());
3424      SetExpressionPosition(expr);
3425      EmitHasProperty();
3426      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3427      __ LoadRoot(t0, Heap::kTrueValueRootIndex);
3428      Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
3429      break;
3430
3431    case Token::INSTANCEOF: {
3432      VisitForAccumulatorValue(expr->right());
3433      SetExpressionPosition(expr);
3434      __ mov(a0, result_register());
3435      PopOperand(a1);
3436      InstanceOfStub stub(isolate());
3437      __ CallStub(&stub);
3438      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3439      __ LoadRoot(at, Heap::kTrueValueRootIndex);
3440      Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3441      break;
3442    }
3443
3444    default: {
3445      VisitForAccumulatorValue(expr->right());
3446      SetExpressionPosition(expr);
3447      Condition cc = CompareIC::ComputeCondition(op);
3448      __ mov(a0, result_register());
3449      PopOperand(a1);
3450
3451      bool inline_smi_code = ShouldInlineSmiCase(op);
3452      JumpPatchSite patch_site(masm_);
3453      if (inline_smi_code) {
3454        Label slow_case;
3455        __ Or(a2, a0, Operand(a1));
3456        patch_site.EmitJumpIfNotSmi(a2, &slow_case);
3457        Split(cc, a1, Operand(a0), if_true, if_false, NULL);
3458        __ bind(&slow_case);
3459      }
3460
3461      Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3462      CallIC(ic, expr->CompareOperationFeedbackId());
3463      patch_site.EmitPatchInfo();
3464      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3465      Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
3466    }
3467  }
3468
3469  // Convert the result of the comparison into one expected for this
3470  // expression's context.
3471  context()->Plug(if_true, if_false);
3472}
3473
3474
3475void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3476                                              Expression* sub_expr,
3477                                              NilValue nil) {
3478  Label materialize_true, materialize_false;
3479  Label* if_true = NULL;
3480  Label* if_false = NULL;
3481  Label* fall_through = NULL;
3482  context()->PrepareTest(&materialize_true, &materialize_false,
3483                         &if_true, &if_false, &fall_through);
3484
3485  VisitForAccumulatorValue(sub_expr);
3486  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3487  if (expr->op() == Token::EQ_STRICT) {
3488    Heap::RootListIndex nil_value = nil == kNullValue ?
3489        Heap::kNullValueRootIndex :
3490        Heap::kUndefinedValueRootIndex;
3491    __ LoadRoot(a1, nil_value);
3492    Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3493  } else {
3494    __ JumpIfSmi(v0, if_false);
3495    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3496    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3497    __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3498    Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3499  }
3500  context()->Plug(if_true, if_false);
3501}
3502
3503
3504Register FullCodeGenerator::result_register() {
3505  return v0;
3506}
3507
3508
3509Register FullCodeGenerator::context_register() {
3510  return cp;
3511}
3512
3513void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3514  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3515  __ lw(value, MemOperand(fp, frame_offset));
3516}
3517
3518void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3519  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3520  __ sw(value, MemOperand(fp, frame_offset));
3521}
3522
3523
3524void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3525  __ lw(dst, ContextMemOperand(cp, context_index));
3526}
3527
3528
3529void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3530  DeclarationScope* closure_scope = scope()->GetClosureScope();
3531  if (closure_scope->is_script_scope() ||
3532      closure_scope->is_module_scope()) {
3533    // Contexts nested in the native context have a canonical empty function
3534    // as their closure, not the anonymous closure containing the global
3535    // code.
3536    __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
3537  } else if (closure_scope->is_eval_scope()) {
3538    // Contexts created by a call to eval have the same closure as the
3539    // context calling eval, not the anonymous closure containing the eval
3540    // code.  Fetch it from the context.
3541    __ lw(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3542  } else {
3543    DCHECK(closure_scope->is_function_scope());
3544    __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3545  }
3546  PushOperand(at);
3547}
3548
3549
3550// ----------------------------------------------------------------------------
3551// Non-local control flow support.
3552
3553void FullCodeGenerator::EnterFinallyBlock() {
3554  DCHECK(!result_register().is(a1));
3555  // Store pending message while executing finally block.
3556  ExternalReference pending_message_obj =
3557      ExternalReference::address_of_pending_message_obj(isolate());
3558  __ li(at, Operand(pending_message_obj));
3559  __ lw(a1, MemOperand(at));
3560  PushOperand(a1);
3561
3562  ClearPendingMessage();
3563}
3564
3565
3566void FullCodeGenerator::ExitFinallyBlock() {
3567  DCHECK(!result_register().is(a1));
3568  // Restore pending message from stack.
3569  PopOperand(a1);
3570  ExternalReference pending_message_obj =
3571      ExternalReference::address_of_pending_message_obj(isolate());
3572  __ li(at, Operand(pending_message_obj));
3573  __ sw(a1, MemOperand(at));
3574}
3575
3576
3577void FullCodeGenerator::ClearPendingMessage() {
3578  DCHECK(!result_register().is(a1));
3579  ExternalReference pending_message_obj =
3580      ExternalReference::address_of_pending_message_obj(isolate());
3581  __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
3582  __ li(at, Operand(pending_message_obj));
3583  __ sw(a1, MemOperand(at));
3584}
3585
3586
3587void FullCodeGenerator::DeferredCommands::EmitCommands() {
3588  DCHECK(!result_register().is(a1));
3589  __ Pop(result_register());  // Restore the accumulator.
3590  __ Pop(a1);                 // Get the token.
3591  for (DeferredCommand cmd : commands_) {
3592    Label skip;
3593    __ li(at, Operand(Smi::FromInt(cmd.token)));
3594    __ Branch(&skip, ne, a1, Operand(at));
3595    switch (cmd.command) {
3596      case kReturn:
3597        codegen_->EmitUnwindAndReturn();
3598        break;
3599      case kThrow:
3600        __ Push(result_register());
3601        __ CallRuntime(Runtime::kReThrow);
3602        break;
3603      case kContinue:
3604        codegen_->EmitContinue(cmd.target);
3605        break;
3606      case kBreak:
3607        codegen_->EmitBreak(cmd.target);
3608        break;
3609    }
3610    __ bind(&skip);
3611  }
3612}
3613
3614#undef __
3615
3616
3617void BackEdgeTable::PatchAt(Code* unoptimized_code,
3618                            Address pc,
3619                            BackEdgeState target_state,
3620                            Code* replacement_code) {
3621  static const int kInstrSize = Assembler::kInstrSize;
3622  Address pc_immediate_load_address =
3623      Assembler::target_address_from_return_address(pc);
3624  Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3625  Isolate* isolate = unoptimized_code->GetIsolate();
3626  CodePatcher patcher(isolate, branch_address, 1);
3627
3628  switch (target_state) {
3629    case INTERRUPT:
3630      // slt at, a3, zero_reg (in case of count based interrupts)
3631      // beq at, zero_reg, ok
3632      // lui t9, <interrupt stub address> upper
3633      // ori t9, <interrupt stub address> lower
3634      // jalr t9
3635      // nop
3636      // ok-label ----- pc_after points here
3637      patcher.masm()->slt(at, a3, zero_reg);
3638      break;
3639    case ON_STACK_REPLACEMENT:
3640      // addiu at, zero_reg, 1
3641      // beq at, zero_reg, ok  ;; Not changed
3642      // lui t9, <on-stack replacement address> upper
3643      // ori t9, <on-stack replacement address> lower
3644      // jalr t9  ;; Not changed
3645      // nop  ;; Not changed
3646      // ok-label ----- pc_after points here
3647      patcher.masm()->addiu(at, zero_reg, 1);
3648      break;
3649  }
3650  // Replace the stack check address in the load-immediate (lui/ori pair)
3651  // with the entry address of the replacement code.
3652  Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3653                                   replacement_code->entry());
3654
3655  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3656      unoptimized_code, pc_immediate_load_address, replacement_code);
3657}
3658
3659
3660BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3661    Isolate* isolate,
3662    Code* unoptimized_code,
3663    Address pc) {
3664  static const int kInstrSize = Assembler::kInstrSize;
3665  Address pc_immediate_load_address =
3666      Assembler::target_address_from_return_address(pc);
3667  Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3668
3669  DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
3670  if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
3671    DCHECK(reinterpret_cast<uint32_t>(
3672        Assembler::target_address_at(pc_immediate_load_address)) ==
3673           reinterpret_cast<uint32_t>(
3674               isolate->builtins()->InterruptCheck()->entry()));
3675    return INTERRUPT;
3676  }
3677
3678  DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
3679
3680  DCHECK(reinterpret_cast<uint32_t>(
3681             Assembler::target_address_at(pc_immediate_load_address)) ==
3682         reinterpret_cast<uint32_t>(
3683             isolate->builtins()->OnStackReplacement()->entry()));
3684  return ON_STACK_REPLACEMENT;
3685}
3686
3687
3688}  // namespace internal
3689}  // namespace v8
3690
3691#endif  // V8_TARGET_ARCH_MIPS
3692