1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_MIPS64
6
7// Note on Mips implementation:
8//
9// The result_register() for mips is the 'v0' register, which is defined
10// by the ABI to contain function return values. However, the first
11// parameter to a function is defined to be 'a0'. So there are many
12// places where we have to move a previous result in v0 to a0 for the
13// next call: mov(a0, v0). This is not needed on the other architectures.
14
15#include "src/full-codegen/full-codegen.h"
16#include "src/ast/compile-time-value.h"
17#include "src/ast/scopes.h"
18#include "src/code-factory.h"
19#include "src/code-stubs.h"
20#include "src/codegen.h"
21#include "src/compilation-info.h"
22#include "src/compiler.h"
23#include "src/debug/debug.h"
24#include "src/ic/ic.h"
25
26#include "src/mips64/code-stubs-mips64.h"
27#include "src/mips64/macro-assembler-mips64.h"
28
29namespace v8 {
30namespace internal {
31
32#define __ ACCESS_MASM(masm())
33
34// A patch site is a location in the code which it is possible to patch. This
35// class has a number of methods to emit the code which is patchable and the
36// method EmitPatchInfo to record a marker back to the patchable code. This
37// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
38// (raw 16 bit immediate value is used) is the delta from the pc to the first
39// instruction of the patchable code.
40// The marker instruction is effectively a NOP (dest is zero_reg) and will
41// never be emitted by normal code.
42class JumpPatchSite BASE_EMBEDDED {
43 public:
44  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
45#ifdef DEBUG
46    info_emitted_ = false;
47#endif
48  }
49
50  ~JumpPatchSite() {
51    DCHECK(patch_site_.is_bound() == info_emitted_);
52  }
53
54  // When initially emitting this ensure that a jump is always generated to skip
55  // the inlined smi code.
56  void EmitJumpIfNotSmi(Register reg, Label* target) {
57    DCHECK(!patch_site_.is_bound() && !info_emitted_);
58    Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
59    __ bind(&patch_site_);
60    __ andi(at, reg, 0);
61    // Always taken before patched.
62    __ BranchShort(target, eq, at, Operand(zero_reg));
63  }
64
65  // When initially emitting this ensure that a jump is never generated to skip
66  // the inlined smi code.
67  void EmitJumpIfSmi(Register reg, Label* target) {
68    Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
69    DCHECK(!patch_site_.is_bound() && !info_emitted_);
70    __ bind(&patch_site_);
71    __ andi(at, reg, 0);
72    // Never taken before patched.
73    __ BranchShort(target, ne, at, Operand(zero_reg));
74  }
75
76  void EmitPatchInfo() {
77    if (patch_site_.is_bound()) {
78      int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
79      Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
80      __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
81#ifdef DEBUG
82      info_emitted_ = true;
83#endif
84    } else {
85      __ nop();  // Signals no inlined code.
86    }
87  }
88
89 private:
90  MacroAssembler* masm() { return masm_; }
91  MacroAssembler* masm_;
92  Label patch_site_;
93#ifdef DEBUG
94  bool info_emitted_;
95#endif
96};
97
98
99// Generate code for a JS function.  On entry to the function the receiver
100// and arguments have been pushed on the stack left to right.  The actual
101// argument count matches the formal parameter count expected by the
102// function.
103//
104// The live registers are:
105//   o a1: the JS function object being called (i.e. ourselves)
106//   o a3: the new target value
107//   o cp: our context
108//   o fp: our caller's frame pointer
109//   o sp: stack pointer
110//   o ra: return address
111//
112// The function builds a JS frame.  Please see JavaScriptFrameConstants in
113// frames-mips.h for its layout.
114void FullCodeGenerator::Generate() {
115  CompilationInfo* info = info_;
116  profiling_counter_ = isolate()->factory()->NewCell(
117      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
118  SetFunctionPosition(literal());
119  Comment cmnt(masm_, "[ function compiled by full code generator");
120
121  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
122
123  if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
124    int receiver_offset = info->scope()->num_parameters() * kPointerSize;
125    __ ld(a2, MemOperand(sp, receiver_offset));
126    __ AssertNotSmi(a2);
127    __ GetObjectType(a2, a2, a2);
128    __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
129             Operand(FIRST_JS_RECEIVER_TYPE));
130  }
131
132  // Open a frame scope to indicate that there is a frame on the stack.  The
133  // MANUAL indicates that the scope shouldn't actually generate code to set up
134  // the frame (that is done below).
135  FrameScope frame_scope(masm_, StackFrame::MANUAL);
136  info->set_prologue_offset(masm_->pc_offset());
137  __ Prologue(info->GeneratePreagedPrologue());
138
139  // Increment invocation count for the function.
140  {
141    Comment cmnt(masm_, "[ Increment invocation count");
142    __ ld(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
143    __ ld(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
144    __ ld(a4, FieldMemOperand(
145                  a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
146                          TypeFeedbackVector::kHeaderSize));
147    __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
148    __ sd(a4, FieldMemOperand(
149                  a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
150                          TypeFeedbackVector::kHeaderSize));
151  }
152
153  { Comment cmnt(masm_, "[ Allocate locals");
154    int locals_count = info->scope()->num_stack_slots();
155    // Generators allocate locals, if any, in context slots.
156    DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
157    OperandStackDepthIncrement(locals_count);
158    if (locals_count > 0) {
159      if (locals_count >= 128) {
160        Label ok;
161        __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
162        __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
163        __ Branch(&ok, hs, t1, Operand(a2));
164        __ CallRuntime(Runtime::kThrowStackOverflow);
165        __ bind(&ok);
166      }
167      __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
168      int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
169      if (locals_count >= kMaxPushes) {
170        int loop_iterations = locals_count / kMaxPushes;
171        __ li(a2, Operand(loop_iterations));
172        Label loop_header;
173        __ bind(&loop_header);
174        // Do pushes.
175        __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
176        for (int i = 0; i < kMaxPushes; i++) {
177          __ sd(t1, MemOperand(sp, i * kPointerSize));
178        }
179        // Continue loop if not done.
180        __ Dsubu(a2, a2, Operand(1));
181        __ Branch(&loop_header, ne, a2, Operand(zero_reg));
182      }
183      int remaining = locals_count % kMaxPushes;
184      // Emit the remaining pushes.
185      __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
186      for (int i  = 0; i < remaining; i++) {
187        __ sd(t1, MemOperand(sp, i * kPointerSize));
188      }
189    }
190  }
191
192  bool function_in_register_a1 = true;
193
194  // Possibly allocate a local context.
195  if (info->scope()->NeedsContext()) {
196    Comment cmnt(masm_, "[ Allocate context");
197    // Argument to NewContext is the function, which is still in a1.
198    bool need_write_barrier = true;
199    int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
200    if (info->scope()->is_script_scope()) {
201      __ push(a1);
202      __ Push(info->scope()->scope_info());
203      __ CallRuntime(Runtime::kNewScriptContext);
204      PrepareForBailoutForId(BailoutId::ScriptContext(),
205                             BailoutState::TOS_REGISTER);
206      // The new target value is not used, clobbering is safe.
207      DCHECK_NULL(info->scope()->new_target_var());
208    } else {
209      if (info->scope()->new_target_var() != nullptr) {
210        __ push(a3);  // Preserve new target.
211      }
212      if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
213        FastNewFunctionContextStub stub(isolate());
214        __ li(FastNewFunctionContextDescriptor::SlotsRegister(),
215              Operand(slots));
216        __ CallStub(&stub);
217        // Result of FastNewFunctionContextStub is always in new space.
218        need_write_barrier = false;
219      } else {
220        __ push(a1);
221        __ CallRuntime(Runtime::kNewFunctionContext);
222      }
223      if (info->scope()->new_target_var() != nullptr) {
224        __ pop(a3);  // Restore new target.
225      }
226    }
227    function_in_register_a1 = false;
228    // Context is returned in v0. It replaces the context passed to us.
229    // It's saved in the stack and kept live in cp.
230    __ mov(cp, v0);
231    __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
232    // Copy any necessary parameters into the context.
233    int num_parameters = info->scope()->num_parameters();
234    int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
235    for (int i = first_parameter; i < num_parameters; i++) {
236      Variable* var =
237          (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
238      if (var->IsContextSlot()) {
239        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
240                                 (num_parameters - 1 - i) * kPointerSize;
241        // Load parameter from stack.
242        __ ld(a0, MemOperand(fp, parameter_offset));
243        // Store it in the context.
244        MemOperand target = ContextMemOperand(cp, var->index());
245        __ sd(a0, target);
246
247        // Update the write barrier.
248        if (need_write_barrier) {
249          __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
250                                    kRAHasBeenSaved, kDontSaveFPRegs);
251        } else if (FLAG_debug_code) {
252          Label done;
253          __ JumpIfInNewSpace(cp, a0, &done);
254          __ Abort(kExpectedNewSpaceObject);
255          __ bind(&done);
256        }
257      }
258    }
259  }
260
261  // Register holding this function and new target are both trashed in case we
262  // bailout here. But since that can happen only when new target is not used
263  // and we allocate a context, the value of |function_in_register| is correct.
264  PrepareForBailoutForId(BailoutId::FunctionContext(),
265                         BailoutState::NO_REGISTERS);
266
267  // Possibly set up a local binding to the this function which is used in
268  // derived constructors with super calls.
269  Variable* this_function_var = info->scope()->this_function_var();
270  if (this_function_var != nullptr) {
271    Comment cmnt(masm_, "[ This function");
272    if (!function_in_register_a1) {
273      __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
274      // The write barrier clobbers register again, keep it marked as such.
275    }
276    SetVar(this_function_var, a1, a0, a2);
277  }
278
279  Variable* new_target_var = info->scope()->new_target_var();
280  if (new_target_var != nullptr) {
281    Comment cmnt(masm_, "[ new.target");
282    SetVar(new_target_var, a3, a0, a2);
283  }
284
285  // Possibly allocate RestParameters
286  Variable* rest_param = info->scope()->rest_parameter();
287  if (rest_param != nullptr) {
288    Comment cmnt(masm_, "[ Allocate rest parameter array");
289    if (!function_in_register_a1) {
290      __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
291    }
292    FastNewRestParameterStub stub(isolate());
293    __ CallStub(&stub);
294    function_in_register_a1 = false;
295    SetVar(rest_param, v0, a1, a2);
296  }
297
298  Variable* arguments = info->scope()->arguments();
299  if (arguments != NULL) {
300    // Function uses arguments object.
301    Comment cmnt(masm_, "[ Allocate arguments object");
302    if (!function_in_register_a1) {
303      // Load this again, if it's used by the local context below.
304      __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
305    }
306    if (is_strict(language_mode()) || !has_simple_parameters()) {
307      FastNewStrictArgumentsStub stub(isolate());
308      __ CallStub(&stub);
309    } else if (literal()->has_duplicate_parameters()) {
310      __ Push(a1);
311      __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
312    } else {
313      FastNewSloppyArgumentsStub stub(isolate());
314      __ CallStub(&stub);
315    }
316
317    SetVar(arguments, v0, a1, a2);
318  }
319
320  if (FLAG_trace) {
321    __ CallRuntime(Runtime::kTraceEnter);
322  }
323
324  // Visit the declarations and body.
325  PrepareForBailoutForId(BailoutId::FunctionEntry(),
326                         BailoutState::NO_REGISTERS);
327  {
328    Comment cmnt(masm_, "[ Declarations");
329    VisitDeclarations(scope()->declarations());
330  }
331
332  // Assert that the declarations do not use ICs. Otherwise the debugger
333  // won't be able to redirect a PC at an IC to the correct IC in newly
334  // recompiled code.
335  DCHECK_EQ(0, ic_total_count_);
336
337  {
338    Comment cmnt(masm_, "[ Stack check");
339    PrepareForBailoutForId(BailoutId::Declarations(),
340                           BailoutState::NO_REGISTERS);
341    Label ok;
342    __ LoadRoot(at, Heap::kStackLimitRootIndex);
343    __ Branch(&ok, hs, sp, Operand(at));
344    Handle<Code> stack_check = isolate()->builtins()->StackCheck();
345    PredictableCodeSizeScope predictable(
346        masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
347    __ Call(stack_check, RelocInfo::CODE_TARGET);
348    __ bind(&ok);
349  }
350
351  {
352    Comment cmnt(masm_, "[ Body");
353    DCHECK(loop_depth() == 0);
354
355    VisitStatements(literal()->body());
356
357    DCHECK(loop_depth() == 0);
358  }
359
360  // Always emit a 'return undefined' in case control fell off the end of
361  // the body.
362  { Comment cmnt(masm_, "[ return <undefined>;");
363    __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
364  }
365  EmitReturnSequence();
366}
367
368
369void FullCodeGenerator::ClearAccumulator() {
370  DCHECK(Smi::kZero == 0);
371  __ mov(v0, zero_reg);
372}
373
374
375void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
376  __ li(a2, Operand(profiling_counter_));
377  __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
378  __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
379  __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
380}
381
382
383void FullCodeGenerator::EmitProfilingCounterReset() {
384  int reset_value = FLAG_interrupt_budget;
385  if (info_->is_debug()) {
386    // Detect debug break requests as soon as possible.
387    reset_value = FLAG_interrupt_budget >> 4;
388  }
389  __ li(a2, Operand(profiling_counter_));
390  __ li(a3, Operand(Smi::FromInt(reset_value)));
391  __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
392}
393
394
395void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
396                                                Label* back_edge_target) {
397  // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
398  // to make sure it is constant. Branch may emit a skip-or-jump sequence
399  // instead of the normal Branch. It seems that the "skip" part of that
400  // sequence is about as long as this Branch would be so it is safe to ignore
401  // that.
402  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
403  Comment cmnt(masm_, "[ Back edge bookkeeping");
404  Label ok;
405  DCHECK(back_edge_target->is_bound());
406  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
407  int weight = Min(kMaxBackEdgeWeight,
408                   Max(1, distance / kCodeSizeMultiplier));
409  EmitProfilingCounterDecrement(weight);
410  __ slt(at, a3, zero_reg);
411  __ beq(at, zero_reg, &ok);
412  // Call will emit a li t9 first, so it is safe to use the delay slot.
413  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
414  // Record a mapping of this PC offset to the OSR id.  This is used to find
415  // the AST id from the unoptimized code in order to use it as a key into
416  // the deoptimization input data found in the optimized code.
417  RecordBackEdge(stmt->OsrEntryId());
418  EmitProfilingCounterReset();
419
420  __ bind(&ok);
421  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
422  // Record a mapping of the OSR id to this PC.  This is used if the OSR
423  // entry becomes the target of a bailout.  We don't expect it to be, but
424  // we want it to work if it is.
425  PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
426}
427
428void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
429    bool is_tail_call) {
430  // Pretend that the exit is a backwards jump to the entry.
431  int weight = 1;
432  if (info_->ShouldSelfOptimize()) {
433    weight = FLAG_interrupt_budget / FLAG_self_opt_count;
434  } else {
435    int distance = masm_->pc_offset();
436    weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
437  }
438  EmitProfilingCounterDecrement(weight);
439  Label ok;
440  __ Branch(&ok, ge, a3, Operand(zero_reg));
441  // Don't need to save result register if we are going to do a tail call.
442  if (!is_tail_call) {
443    __ push(v0);
444  }
445  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
446  if (!is_tail_call) {
447    __ pop(v0);
448  }
449  EmitProfilingCounterReset();
450  __ bind(&ok);
451}
452
453void FullCodeGenerator::EmitReturnSequence() {
454  Comment cmnt(masm_, "[ Return sequence");
455  if (return_label_.is_bound()) {
456    __ Branch(&return_label_);
457  } else {
458    __ bind(&return_label_);
459    if (FLAG_trace) {
460      // Push the return value on the stack as the parameter.
461      // Runtime::TraceExit returns its parameter in v0.
462      __ push(v0);
463      __ CallRuntime(Runtime::kTraceExit);
464    }
465    EmitProfilingCounterHandlingForReturnSequence(false);
466
467    // Make sure that the constant pool is not emitted inside of the return
468    // sequence.
469    { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
470      int32_t arg_count = info_->scope()->num_parameters() + 1;
471      int32_t sp_delta = arg_count * kPointerSize;
472      SetReturnPosition(literal());
473      __ mov(sp, fp);
474      __ MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
475      __ Daddu(sp, sp, Operand(sp_delta));
476      __ Jump(ra);
477    }
478  }
479}
480
481void FullCodeGenerator::RestoreContext() {
482  __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
483}
484
485void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
486  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
487  codegen()->GetVar(result_register(), var);
488  codegen()->PushOperand(result_register());
489}
490
491
492void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
493}
494
495
496void FullCodeGenerator::AccumulatorValueContext::Plug(
497    Heap::RootListIndex index) const {
498  __ LoadRoot(result_register(), index);
499}
500
501
502void FullCodeGenerator::StackValueContext::Plug(
503    Heap::RootListIndex index) const {
504  __ LoadRoot(result_register(), index);
505  codegen()->PushOperand(result_register());
506}
507
508
509void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
510  codegen()->PrepareForBailoutBeforeSplit(condition(),
511                                          true,
512                                          true_label_,
513                                          false_label_);
514  if (index == Heap::kUndefinedValueRootIndex ||
515      index == Heap::kNullValueRootIndex ||
516      index == Heap::kFalseValueRootIndex) {
517    if (false_label_ != fall_through_) __ Branch(false_label_);
518  } else if (index == Heap::kTrueValueRootIndex) {
519    if (true_label_ != fall_through_) __ Branch(true_label_);
520  } else {
521    __ LoadRoot(result_register(), index);
522    codegen()->DoTest(this);
523  }
524}
525
526
527void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
528}
529
530
531void FullCodeGenerator::AccumulatorValueContext::Plug(
532    Handle<Object> lit) const {
533  __ li(result_register(), Operand(lit));
534}
535
536
537void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
538  // Immediates cannot be pushed directly.
539  __ li(result_register(), Operand(lit));
540  codegen()->PushOperand(result_register());
541}
542
543
544void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
545  codegen()->PrepareForBailoutBeforeSplit(condition(),
546                                          true,
547                                          true_label_,
548                                          false_label_);
549  DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
550         !lit->IsUndetectable());
551  if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
552      lit->IsFalse(isolate())) {
553    if (false_label_ != fall_through_) __ Branch(false_label_);
554  } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
555    if (true_label_ != fall_through_) __ Branch(true_label_);
556  } else if (lit->IsString()) {
557    if (String::cast(*lit)->length() == 0) {
558      if (false_label_ != fall_through_) __ Branch(false_label_);
559    } else {
560      if (true_label_ != fall_through_) __ Branch(true_label_);
561    }
562  } else if (lit->IsSmi()) {
563    if (Smi::cast(*lit)->value() == 0) {
564      if (false_label_ != fall_through_) __ Branch(false_label_);
565    } else {
566      if (true_label_ != fall_through_) __ Branch(true_label_);
567    }
568  } else {
569    // For simplicity we always test the accumulator register.
570    __ li(result_register(), Operand(lit));
571    codegen()->DoTest(this);
572  }
573}
574
575
576void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
577                                                       Register reg) const {
578  DCHECK(count > 0);
579  if (count > 1) codegen()->DropOperands(count - 1);
580  __ sd(reg, MemOperand(sp, 0));
581}
582
583
584void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
585                                            Label* materialize_false) const {
586  DCHECK(materialize_true == materialize_false);
587  __ bind(materialize_true);
588}
589
590
591void FullCodeGenerator::AccumulatorValueContext::Plug(
592    Label* materialize_true,
593    Label* materialize_false) const {
594  Label done;
595  __ bind(materialize_true);
596  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
597  __ Branch(&done);
598  __ bind(materialize_false);
599  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
600  __ bind(&done);
601}
602
603
604void FullCodeGenerator::StackValueContext::Plug(
605    Label* materialize_true,
606    Label* materialize_false) const {
607  codegen()->OperandStackDepthIncrement(1);
608  Label done;
609  __ bind(materialize_true);
610  __ LoadRoot(at, Heap::kTrueValueRootIndex);
611  // Push the value as the following branch can clobber at in long branch mode.
612  __ push(at);
613  __ Branch(&done);
614  __ bind(materialize_false);
615  __ LoadRoot(at, Heap::kFalseValueRootIndex);
616  __ push(at);
617  __ bind(&done);
618}
619
620
621void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
622                                          Label* materialize_false) const {
623  DCHECK(materialize_true == true_label_);
624  DCHECK(materialize_false == false_label_);
625}
626
627
628void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
629  Heap::RootListIndex value_root_index =
630      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
631  __ LoadRoot(result_register(), value_root_index);
632}
633
634
635void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
636  Heap::RootListIndex value_root_index =
637      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
638  __ LoadRoot(at, value_root_index);
639  codegen()->PushOperand(at);
640}
641
642
643void FullCodeGenerator::TestContext::Plug(bool flag) const {
644  codegen()->PrepareForBailoutBeforeSplit(condition(),
645                                          true,
646                                          true_label_,
647                                          false_label_);
648  if (flag) {
649    if (true_label_ != fall_through_) __ Branch(true_label_);
650  } else {
651    if (false_label_ != fall_through_) __ Branch(false_label_);
652  }
653}
654
655
656void FullCodeGenerator::DoTest(Expression* condition,
657                               Label* if_true,
658                               Label* if_false,
659                               Label* fall_through) {
660  __ mov(a0, result_register());
661  Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
662  CallIC(ic, condition->test_id());
663  __ LoadRoot(at, Heap::kTrueValueRootIndex);
664  Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
665}
666
667
668void FullCodeGenerator::Split(Condition cc,
669                              Register lhs,
670                              const Operand&  rhs,
671                              Label* if_true,
672                              Label* if_false,
673                              Label* fall_through) {
674  if (if_false == fall_through) {
675    __ Branch(if_true, cc, lhs, rhs);
676  } else if (if_true == fall_through) {
677    __ Branch(if_false, NegateCondition(cc), lhs, rhs);
678  } else {
679    __ Branch(if_true, cc, lhs, rhs);
680    __ Branch(if_false);
681  }
682}
683
684
685MemOperand FullCodeGenerator::StackOperand(Variable* var) {
686  DCHECK(var->IsStackAllocated());
687  // Offset is negative because higher indexes are at lower addresses.
688  int offset = -var->index() * kPointerSize;
689  // Adjust by a (parameter or local) base offset.
690  if (var->IsParameter()) {
691    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
692  } else {
693    offset += JavaScriptFrameConstants::kLocal0Offset;
694  }
695  return MemOperand(fp, offset);
696}
697
698
699MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
700  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
701  if (var->IsContextSlot()) {
702    int context_chain_length = scope()->ContextChainLength(var->scope());
703    __ LoadContext(scratch, context_chain_length);
704    return ContextMemOperand(scratch, var->index());
705  } else {
706    return StackOperand(var);
707  }
708}
709
710
711void FullCodeGenerator::GetVar(Register dest, Variable* var) {
712  // Use destination as scratch.
713  MemOperand location = VarOperand(var, dest);
714  __ ld(dest, location);
715}
716
717
718void FullCodeGenerator::SetVar(Variable* var,
719                               Register src,
720                               Register scratch0,
721                               Register scratch1) {
722  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
723  DCHECK(!scratch0.is(src));
724  DCHECK(!scratch0.is(scratch1));
725  DCHECK(!scratch1.is(src));
726  MemOperand location = VarOperand(var, scratch0);
727  __ sd(src, location);
728  // Emit the write barrier code if the location is in the heap.
729  if (var->IsContextSlot()) {
730    __ RecordWriteContextSlot(scratch0,
731                              location.offset(),
732                              src,
733                              scratch1,
734                              kRAHasBeenSaved,
735                              kDontSaveFPRegs);
736  }
737}
738
739
740void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
741                                                     bool should_normalize,
742                                                     Label* if_true,
743                                                     Label* if_false) {
744  // Only prepare for bailouts before splits if we're in a test
745  // context. Otherwise, we let the Visit function deal with the
746  // preparation to avoid preparing with the same AST id twice.
747  if (!context()->IsTest()) return;
748
749  Label skip;
750  if (should_normalize) __ Branch(&skip);
751  PrepareForBailout(expr, BailoutState::TOS_REGISTER);
752  if (should_normalize) {
753    __ LoadRoot(a4, Heap::kTrueValueRootIndex);
754    Split(eq, v0, Operand(a4), if_true, if_false, NULL);
755    __ bind(&skip);
756  }
757}
758
759
760void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
761  // The variable in the declaration always resides in the current function
762  // context.
763  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
764  if (FLAG_debug_code) {
765    // Check that we're not inside a with or catch context.
766    __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
767    __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
768    __ Check(ne, kDeclarationInWithContext,
769        a1, Operand(a4));
770    __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
771    __ Check(ne, kDeclarationInCatchContext,
772        a1, Operand(a4));
773  }
774}
775
776
777void FullCodeGenerator::VisitVariableDeclaration(
778    VariableDeclaration* declaration) {
779  VariableProxy* proxy = declaration->proxy();
780  Variable* variable = proxy->var();
781  switch (variable->location()) {
782    case VariableLocation::UNALLOCATED: {
783      DCHECK(!variable->binding_needs_init());
784      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
785      DCHECK(!slot.IsInvalid());
786      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
787      globals_->Add(isolate()->factory()->undefined_value(), zone());
788      break;
789    }
790    case VariableLocation::PARAMETER:
791    case VariableLocation::LOCAL:
792      if (variable->binding_needs_init()) {
793        Comment cmnt(masm_, "[ VariableDeclaration");
794        __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
795        __ sd(a4, StackOperand(variable));
796      }
797      break;
798
799    case VariableLocation::CONTEXT:
800      if (variable->binding_needs_init()) {
801        Comment cmnt(masm_, "[ VariableDeclaration");
802        EmitDebugCheckDeclarationContext(variable);
803          __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
804          __ sd(at, ContextMemOperand(cp, variable->index()));
805          // No write barrier since the_hole_value is in old space.
806          PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
807      }
808      break;
809
810    case VariableLocation::LOOKUP: {
811      Comment cmnt(masm_, "[ VariableDeclaration");
812      DCHECK_EQ(VAR, variable->mode());
813      DCHECK(!variable->binding_needs_init());
814      __ li(a2, Operand(variable->name()));
815      __ Push(a2);
816      __ CallRuntime(Runtime::kDeclareEvalVar);
817      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
818      break;
819    }
820
821    case VariableLocation::MODULE:
822      UNREACHABLE();
823  }
824}
825
826
827void FullCodeGenerator::VisitFunctionDeclaration(
828    FunctionDeclaration* declaration) {
829  VariableProxy* proxy = declaration->proxy();
830  Variable* variable = proxy->var();
831  switch (variable->location()) {
832    case VariableLocation::UNALLOCATED: {
833      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
834      DCHECK(!slot.IsInvalid());
835      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
836      Handle<SharedFunctionInfo> function =
837          Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
838      // Check for stack-overflow exception.
839      if (function.is_null()) return SetStackOverflow();
840      globals_->Add(function, zone());
841      break;
842    }
843
844    case VariableLocation::PARAMETER:
845    case VariableLocation::LOCAL: {
846      Comment cmnt(masm_, "[ FunctionDeclaration");
847      VisitForAccumulatorValue(declaration->fun());
848      __ sd(result_register(), StackOperand(variable));
849      break;
850    }
851
852    case VariableLocation::CONTEXT: {
853      Comment cmnt(masm_, "[ FunctionDeclaration");
854      EmitDebugCheckDeclarationContext(variable);
855      VisitForAccumulatorValue(declaration->fun());
856      __ sd(result_register(), ContextMemOperand(cp, variable->index()));
857      int offset = Context::SlotOffset(variable->index());
858      // We know that we have written a function, which is not a smi.
859      __ RecordWriteContextSlot(cp,
860                                offset,
861                                result_register(),
862                                a2,
863                                kRAHasBeenSaved,
864                                kDontSaveFPRegs,
865                                EMIT_REMEMBERED_SET,
866                                OMIT_SMI_CHECK);
867      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
868      break;
869    }
870
871    case VariableLocation::LOOKUP: {
872      Comment cmnt(masm_, "[ FunctionDeclaration");
873      __ li(a2, Operand(variable->name()));
874      PushOperand(a2);
875      // Push initial value for function declaration.
876      VisitForStackValue(declaration->fun());
877      CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
878      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
879      break;
880    }
881
882    case VariableLocation::MODULE:
883      UNREACHABLE();
884  }
885}
886
887
888void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
889  // Call the runtime to declare the globals.
890  __ li(a1, Operand(pairs));
891  __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
892  __ EmitLoadTypeFeedbackVector(a2);
893  __ Push(a1, a0, a2);
894  __ CallRuntime(Runtime::kDeclareGlobals);
895  // Return value is ignored.
896}
897
898
899void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
900  Comment cmnt(masm_, "[ SwitchStatement");
901  Breakable nested_statement(this, stmt);
902  SetStatementPosition(stmt);
903
904  // Keep the switch value on the stack until a case matches.
905  VisitForStackValue(stmt->tag());
906  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
907
908  ZoneList<CaseClause*>* clauses = stmt->cases();
909  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
910
911  Label next_test;  // Recycled for each test.
912  // Compile all the tests with branches to their bodies.
913  for (int i = 0; i < clauses->length(); i++) {
914    CaseClause* clause = clauses->at(i);
915    clause->body_target()->Unuse();
916
917    // The default is not a test, but remember it as final fall through.
918    if (clause->is_default()) {
919      default_clause = clause;
920      continue;
921    }
922
923    Comment cmnt(masm_, "[ Case comparison");
924    __ bind(&next_test);
925    next_test.Unuse();
926
927    // Compile the label expression.
928    VisitForAccumulatorValue(clause->label());
929    __ mov(a0, result_register());  // CompareStub requires args in a0, a1.
930
931    // Perform the comparison as if via '==='.
932    __ ld(a1, MemOperand(sp, 0));  // Switch value.
933    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
934    JumpPatchSite patch_site(masm_);
935    if (inline_smi_code) {
936      Label slow_case;
937      __ or_(a2, a1, a0);
938      patch_site.EmitJumpIfNotSmi(a2, &slow_case);
939
940      __ Branch(&next_test, ne, a1, Operand(a0));
941      __ Drop(1);  // Switch value is no longer needed.
942      __ Branch(clause->body_target());
943
944      __ bind(&slow_case);
945    }
946
947    // Record position before stub call for type feedback.
948    SetExpressionPosition(clause);
949    Handle<Code> ic =
950        CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
951    CallIC(ic, clause->CompareId());
952    patch_site.EmitPatchInfo();
953
954    Label skip;
955    __ Branch(&skip);
956    PrepareForBailout(clause, BailoutState::TOS_REGISTER);
957    __ LoadRoot(at, Heap::kTrueValueRootIndex);
958    __ Branch(&next_test, ne, v0, Operand(at));
959    __ Drop(1);
960    __ Branch(clause->body_target());
961    __ bind(&skip);
962
963    __ Branch(&next_test, ne, v0, Operand(zero_reg));
964    __ Drop(1);  // Switch value is no longer needed.
965    __ Branch(clause->body_target());
966  }
967
968  // Discard the test value and jump to the default if present, otherwise to
969  // the end of the statement.
970  __ bind(&next_test);
971  DropOperands(1);  // Switch value is no longer needed.
972  if (default_clause == NULL) {
973    __ Branch(nested_statement.break_label());
974  } else {
975    __ Branch(default_clause->body_target());
976  }
977
978  // Compile all the case bodies.
979  for (int i = 0; i < clauses->length(); i++) {
980    Comment cmnt(masm_, "[ Case body");
981    CaseClause* clause = clauses->at(i);
982    __ bind(clause->body_target());
983    PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
984    VisitStatements(clause->statements());
985  }
986
987  __ bind(nested_statement.break_label());
988  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
989}
990
991
992void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
993  Comment cmnt(masm_, "[ ForInStatement");
994  SetStatementPosition(stmt, SKIP_BREAK);
995
996  FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
997
998  // Get the object to enumerate over. If the object is null or undefined, skip
999  // over the loop.  See ECMA-262 version 5, section 12.6.4.
1000  SetExpressionAsStatementPosition(stmt->enumerable());
1001  VisitForAccumulatorValue(stmt->enumerable());
1002  __ mov(a0, result_register());
1003  OperandStackDepthIncrement(5);
1004
1005  Label loop, exit;
1006  Iteration loop_statement(this, stmt);
1007  increment_loop_depth();
1008
1009  // If the object is null or undefined, skip over the loop, otherwise convert
1010  // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
1011  Label convert, done_convert;
1012  __ JumpIfSmi(a0, &convert);
1013  __ GetObjectType(a0, a1, a1);
1014  __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
1015            Operand(FIRST_JS_RECEIVER_TYPE));
1016  __ LoadRoot(at, Heap::kNullValueRootIndex);  // In delay slot.
1017  __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1018  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);  // In delay slot.
1019  __ Branch(&exit, eq, a0, Operand(at));
1020  __ bind(&convert);
1021  __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
1022  RestoreContext();
1023  __ mov(a0, v0);
1024  __ bind(&done_convert);
1025  PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
1026  __ push(a0);
1027
1028  // Check cache validity in generated code. If we cannot guarantee cache
1029  // validity, call the runtime system to check cache validity or get the
1030  // property names in a fixed array. Note: Proxies never have an enum cache,
1031  // so will always take the slow path.
1032  Label call_runtime;
1033  __ CheckEnumCache(&call_runtime);
1034
1035  // The enum cache is valid.  Load the map of the object being
1036  // iterated over and use the cache for the iteration.
1037  Label use_cache;
1038  __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1039  __ Branch(&use_cache);
1040
1041  // Get the set of properties to enumerate.
1042  __ bind(&call_runtime);
1043  __ push(a0);  // Duplicate the enumerable object on the stack.
1044  __ CallRuntime(Runtime::kForInEnumerate);
1045  PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1046
1047  // If we got a map from the runtime call, we can do a fast
1048  // modification check. Otherwise, we got a fixed array, and we have
1049  // to do a slow check.
1050  Label fixed_array;
1051  __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1052  __ LoadRoot(at, Heap::kMetaMapRootIndex);
1053  __ Branch(&fixed_array, ne, a2, Operand(at));
1054
1055  // We got a map in register v0. Get the enumeration cache from it.
1056  Label no_descriptors;
1057  __ bind(&use_cache);
1058
1059  __ EnumLength(a1, v0);
1060  __ Branch(&no_descriptors, eq, a1, Operand(Smi::kZero));
1061
1062  __ LoadInstanceDescriptors(v0, a2);
1063  __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1064  __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1065
1066  // Set up the four remaining stack slots.
1067  __ li(a0, Operand(Smi::kZero));
1068  // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1069  __ Push(v0, a2, a1, a0);
1070  __ jmp(&loop);
1071
1072  __ bind(&no_descriptors);
1073  __ Drop(1);
1074  __ jmp(&exit);
1075
1076  // We got a fixed array in register v0. Iterate through that.
1077  __ bind(&fixed_array);
1078
1079  __ li(a1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
1080  __ Push(a1, v0);  // Smi and array
1081  __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1082  __ Push(a1);  // Fixed array length (as smi).
1083  PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1084  __ li(a0, Operand(Smi::kZero));
1085  __ Push(a0);  // Initial index.
1086
1087  // Generate code for doing the condition check.
1088  __ bind(&loop);
1089  SetExpressionAsStatementPosition(stmt->each());
1090
1091  // Load the current count to a0, load the length to a1.
1092  __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1093  __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1094  __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1095
1096  // Get the current entry of the array into register a3.
1097  __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1098  __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1099  __ SmiScale(a4, a0, kPointerSizeLog2);
1100  __ daddu(a4, a2, a4);  // Array base + scaled (smi) index.
1101  __ ld(result_register(), MemOperand(a4));  // Current entry.
1102
1103  // Get the expected map from the stack or a smi in the
1104  // permanent slow case into register a2.
1105  __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1106
1107  // Check if the expected map still matches that of the enumerable.
1108  // If not, we may have to filter the key.
1109  Label update_each;
1110  __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1111  __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1112  __ Branch(&update_each, eq, a4, Operand(a2));
1113
1114  // We need to filter the key, record slow-path here.
1115  int const vector_index = SmiFromSlot(slot)->value();
1116  __ EmitLoadTypeFeedbackVector(a3);
1117  __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1118  __ sd(a2, FieldMemOperand(a3, FixedArray::OffsetOfElementAt(vector_index)));
1119
1120  __ mov(a0, result_register());
1121  // a0 contains the key. The receiver in a1 is the second argument to the
1122  // ForInFilter. ForInFilter returns undefined if the receiver doesn't
1123  // have the key or returns the name-converted key.
1124  __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1125  RestoreContext();
1126  PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1127  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1128  __ Branch(loop_statement.continue_label(), eq, result_register(),
1129            Operand(at));
1130
1131  // Update the 'each' property or variable from the possibly filtered
1132  // entry in the result_register.
1133  __ bind(&update_each);
1134  // Perform the assignment as if via '='.
1135  { EffectContext context(this);
1136    EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1137    PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1138  }
1139
1140  // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1141  PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1142  // Generate code for the body of the loop.
1143  Visit(stmt->body());
1144
1145  // Generate code for the going to the next element by incrementing
1146  // the index (smi) stored on top of the stack.
1147  __ bind(loop_statement.continue_label());
1148  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1149  __ pop(a0);
1150  __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1151  __ push(a0);
1152
1153  EmitBackEdgeBookkeeping(stmt, &loop);
1154  __ Branch(&loop);
1155
1156  // Remove the pointers stored on the stack.
1157  __ bind(loop_statement.break_label());
1158  DropOperands(5);
1159
1160  // Exit and decrement the loop depth.
1161  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1162  __ bind(&exit);
1163  decrement_loop_depth();
1164}
1165
1166
1167void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1168                                          FeedbackVectorSlot slot) {
1169  DCHECK(NeedsHomeObject(initializer));
1170  __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1171  __ ld(StoreDescriptor::ValueRegister(),
1172        MemOperand(sp, offset * kPointerSize));
1173  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1174}
1175
1176
1177void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1178                                                     int offset,
1179                                                     FeedbackVectorSlot slot) {
1180  DCHECK(NeedsHomeObject(initializer));
1181  __ Move(StoreDescriptor::ReceiverRegister(), v0);
1182  __ ld(StoreDescriptor::ValueRegister(),
1183        MemOperand(sp, offset * kPointerSize));
1184  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1185}
1186
1187
1188void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1189                                                      TypeofMode typeof_mode,
1190                                                      Label* slow) {
1191  Register current = cp;
1192  Register next = a1;
1193  Register temp = a2;
1194
1195  int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
1196  for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
1197    if (!s->NeedsContext()) continue;
1198    if (s->calls_sloppy_eval()) {
1199      // Check that extension is "the hole".
1200      __ ld(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1201      __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1202    }
1203    // Load next context in chain.
1204    __ ld(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1205    // Walk the rest of the chain without clobbering cp.
1206    current = next;
1207    to_check--;
1208  }
1209
1210  // All extension objects were empty and it is safe to use a normal global
1211  // load machinery.
1212  EmitGlobalVariableLoad(proxy, typeof_mode);
1213}
1214
1215
1216MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1217                                                                Label* slow) {
1218  DCHECK(var->IsContextSlot());
1219  Register context = cp;
1220  Register next = a3;
1221  Register temp = a4;
1222
1223  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1224    if (s->NeedsContext()) {
1225      if (s->calls_sloppy_eval()) {
1226        // Check that extension is "the hole".
1227        __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1228        __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1229      }
1230      __ ld(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1231      // Walk the rest of the chain without clobbering cp.
1232      context = next;
1233    }
1234  }
1235  // Check that last extension is "the hole".
1236  __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1237  __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1238
1239  // This function is used only for loads, not stores, so it's safe to
1240  // return an cp-based operand (the write barrier cannot be allowed to
1241  // destroy the cp register).
1242  return ContextMemOperand(context, var->index());
1243}
1244
1245
1246void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1247                                                  TypeofMode typeof_mode,
1248                                                  Label* slow, Label* done) {
1249  // Generate fast-case code for variables that might be shadowed by
1250  // eval-introduced variables.  Eval is used a lot without
1251  // introducing variables.  In those cases, we do not want to
1252  // perform a runtime call for all variables in the scope
1253  // containing the eval.
1254  Variable* var = proxy->var();
1255  if (var->mode() == DYNAMIC_GLOBAL) {
1256    EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1257    __ Branch(done);
1258  } else if (var->mode() == DYNAMIC_LOCAL) {
1259    Variable* local = var->local_if_not_shadowed();
1260    __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1261    if (local->binding_needs_init()) {
1262      __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1263      __ dsubu(at, v0, at);  // Sub as compare: at == 0 on eq.
1264      __ Branch(done, ne, at, Operand(zero_reg));
1265      __ li(a0, Operand(var->name()));
1266      __ push(a0);
1267      __ CallRuntime(Runtime::kThrowReferenceError);
1268    } else {
1269      __ Branch(done);
1270    }
1271  }
1272}
1273
1274void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1275                                         TypeofMode typeof_mode) {
1276  // Record position before possible IC call.
1277  SetExpressionPosition(proxy);
1278  PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1279  Variable* var = proxy->var();
1280
1281  // Three cases: global variables, lookup variables, and all other types of
1282  // variables.
1283  switch (var->location()) {
1284    case VariableLocation::UNALLOCATED: {
1285      Comment cmnt(masm_, "[ Global variable");
1286      EmitGlobalVariableLoad(proxy, typeof_mode);
1287      context()->Plug(v0);
1288      break;
1289    }
1290
1291    case VariableLocation::PARAMETER:
1292    case VariableLocation::LOCAL:
1293    case VariableLocation::CONTEXT: {
1294      DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1295      Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1296                                               : "[ Stack variable");
1297      if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1298        // Throw a reference error when using an uninitialized let/const
1299        // binding in harmony mode.
1300        Label done;
1301        GetVar(v0, var);
1302        __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1303        __ dsubu(at, v0, at);  // Sub as compare: at == 0 on eq.
1304        __ Branch(&done, ne, at, Operand(zero_reg));
1305        __ li(a0, Operand(var->name()));
1306        __ push(a0);
1307        __ CallRuntime(Runtime::kThrowReferenceError);
1308        __ bind(&done);
1309        context()->Plug(v0);
1310        break;
1311      }
1312      context()->Plug(var);
1313      break;
1314    }
1315
1316    case VariableLocation::LOOKUP: {
1317      Comment cmnt(masm_, "[ Lookup variable");
1318      Label done, slow;
1319      // Generate code for loading from variables potentially shadowed
1320      // by eval-introduced variables.
1321      EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1322      __ bind(&slow);
1323      __ Push(var->name());
1324      Runtime::FunctionId function_id =
1325          typeof_mode == NOT_INSIDE_TYPEOF
1326              ? Runtime::kLoadLookupSlot
1327              : Runtime::kLoadLookupSlotInsideTypeof;
1328      __ CallRuntime(function_id);
1329      __ bind(&done);
1330      context()->Plug(v0);
1331      break;
1332    }
1333
1334    case VariableLocation::MODULE:
1335      UNREACHABLE();
1336  }
1337}
1338
1339
1340void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1341  Expression* expression = (property == NULL) ? NULL : property->value();
1342  if (expression == NULL) {
1343    __ LoadRoot(a1, Heap::kNullValueRootIndex);
1344    PushOperand(a1);
1345  } else {
1346    VisitForStackValue(expression);
1347    if (NeedsHomeObject(expression)) {
1348      DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1349             property->kind() == ObjectLiteral::Property::SETTER);
1350      int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1351      EmitSetHomeObject(expression, offset, property->GetSlot());
1352    }
1353  }
1354}
1355
1356
1357void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1358  Comment cmnt(masm_, "[ ObjectLiteral");
1359
1360  Handle<FixedArray> constant_properties = expr->constant_properties();
1361  __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1362  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1363  __ li(a1, Operand(constant_properties));
1364  __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1365  if (MustCreateObjectLiteralWithRuntime(expr)) {
1366    __ Push(a3, a2, a1, a0);
1367    __ CallRuntime(Runtime::kCreateObjectLiteral);
1368  } else {
1369    FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1370    __ CallStub(&stub);
1371    RestoreContext();
1372  }
1373  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1374
1375  // If result_saved is true the result is on top of the stack.  If
1376  // result_saved is false the result is in v0.
1377  bool result_saved = false;
1378
1379  AccessorTable accessor_table(zone());
1380  int property_index = 0;
1381  for (; property_index < expr->properties()->length(); property_index++) {
1382    ObjectLiteral::Property* property = expr->properties()->at(property_index);
1383    if (property->is_computed_name()) break;
1384    if (property->IsCompileTimeValue()) continue;
1385
1386    Literal* key = property->key()->AsLiteral();
1387    Expression* value = property->value();
1388    if (!result_saved) {
1389      PushOperand(v0);  // Save result on stack.
1390      result_saved = true;
1391    }
1392    switch (property->kind()) {
1393      case ObjectLiteral::Property::CONSTANT:
1394        UNREACHABLE();
1395      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1396        DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1397        // Fall through.
1398      case ObjectLiteral::Property::COMPUTED:
1399        // It is safe to use [[Put]] here because the boilerplate already
1400        // contains computed properties with an uninitialized value.
1401        if (key->IsStringLiteral()) {
1402          DCHECK(key->IsPropertyName());
1403          if (property->emit_store()) {
1404            VisitForAccumulatorValue(value);
1405            __ mov(StoreDescriptor::ValueRegister(), result_register());
1406            DCHECK(StoreDescriptor::ValueRegister().is(a0));
1407            __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1408            CallStoreIC(property->GetSlot(0), key->value());
1409            PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1410
1411            if (NeedsHomeObject(value)) {
1412              EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1413            }
1414          } else {
1415            VisitForEffect(value);
1416          }
1417          break;
1418        }
1419        // Duplicate receiver on stack.
1420        __ ld(a0, MemOperand(sp));
1421        PushOperand(a0);
1422        VisitForStackValue(key);
1423        VisitForStackValue(value);
1424        if (property->emit_store()) {
1425          if (NeedsHomeObject(value)) {
1426            EmitSetHomeObject(value, 2, property->GetSlot());
1427          }
1428          __ li(a0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes.
1429          PushOperand(a0);
1430          CallRuntimeWithOperands(Runtime::kSetProperty);
1431        } else {
1432          DropOperands(3);
1433        }
1434        break;
1435      case ObjectLiteral::Property::PROTOTYPE:
1436        // Duplicate receiver on stack.
1437        __ ld(a0, MemOperand(sp));
1438        PushOperand(a0);
1439        VisitForStackValue(value);
1440        DCHECK(property->emit_store());
1441        CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1442        PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1443                               BailoutState::NO_REGISTERS);
1444        break;
1445      case ObjectLiteral::Property::GETTER:
1446        if (property->emit_store()) {
1447          AccessorTable::Iterator it = accessor_table.lookup(key);
1448          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1449          it->second->getter = property;
1450        }
1451        break;
1452      case ObjectLiteral::Property::SETTER:
1453        if (property->emit_store()) {
1454          AccessorTable::Iterator it = accessor_table.lookup(key);
1455          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1456          it->second->setter = property;
1457        }
1458        break;
1459    }
1460  }
1461
1462  // Emit code to define accessors, using only a single call to the runtime for
1463  // each pair of corresponding getters and setters.
1464  for (AccessorTable::Iterator it = accessor_table.begin();
1465       it != accessor_table.end();
1466       ++it) {
1467    __ ld(a0, MemOperand(sp));  // Duplicate receiver.
1468    PushOperand(a0);
1469    VisitForStackValue(it->first);
1470    EmitAccessor(it->second->getter);
1471    EmitAccessor(it->second->setter);
1472    __ li(a0, Operand(Smi::FromInt(NONE)));
1473    PushOperand(a0);
1474    CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1475    PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1476  }
1477
1478  // Object literals have two parts. The "static" part on the left contains no
1479  // computed property names, and so we can compute its map ahead of time; see
1480  // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1481  // starts with the first computed property name, and continues with all
1482  // properties to its right.  All the code from above initializes the static
1483  // component of the object literal, and arranges for the map of the result to
1484  // reflect the static order in which the keys appear. For the dynamic
1485  // properties, we compile them into a series of "SetOwnProperty" runtime
1486  // calls. This will preserve insertion order.
1487  for (; property_index < expr->properties()->length(); property_index++) {
1488    ObjectLiteral::Property* property = expr->properties()->at(property_index);
1489
1490    Expression* value = property->value();
1491    if (!result_saved) {
1492      PushOperand(v0);  // Save result on the stack
1493      result_saved = true;
1494    }
1495
1496    __ ld(a0, MemOperand(sp));  // Duplicate receiver.
1497    PushOperand(a0);
1498
1499    if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1500      DCHECK(!property->is_computed_name());
1501      VisitForStackValue(value);
1502      DCHECK(property->emit_store());
1503      CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1504      PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1505                             BailoutState::NO_REGISTERS);
1506    } else {
1507      EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1508      VisitForStackValue(value);
1509      if (NeedsHomeObject(value)) {
1510        EmitSetHomeObject(value, 2, property->GetSlot());
1511      }
1512
1513      switch (property->kind()) {
1514        case ObjectLiteral::Property::CONSTANT:
1515        case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1516        case ObjectLiteral::Property::COMPUTED:
1517          if (property->emit_store()) {
1518            PushOperand(Smi::FromInt(NONE));
1519            PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1520            CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1521            PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1522                                   BailoutState::NO_REGISTERS);
1523          } else {
1524            DropOperands(3);
1525          }
1526          break;
1527
1528        case ObjectLiteral::Property::PROTOTYPE:
1529          UNREACHABLE();
1530          break;
1531
1532        case ObjectLiteral::Property::GETTER:
1533          PushOperand(Smi::FromInt(NONE));
1534          CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1535          break;
1536
1537        case ObjectLiteral::Property::SETTER:
1538          PushOperand(Smi::FromInt(NONE));
1539          CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1540          break;
1541      }
1542    }
1543  }
1544
1545  if (result_saved) {
1546    context()->PlugTOS();
1547  } else {
1548    context()->Plug(v0);
1549  }
1550}
1551
1552
1553void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1554  Comment cmnt(masm_, "[ ArrayLiteral");
1555
1556  Handle<FixedArray> constant_elements = expr->constant_elements();
1557  bool has_fast_elements =
1558      IsFastObjectElementsKind(expr->constant_elements_kind());
1559
1560  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1561  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1562    // If the only customer of allocation sites is transitioning, then
1563    // we can turn it off if we don't have anywhere else to transition to.
1564    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1565  }
1566
1567  __ mov(a0, result_register());
1568  __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1569  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1570  __ li(a1, Operand(constant_elements));
1571  if (MustCreateArrayLiteralWithRuntime(expr)) {
1572    __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1573    __ Push(a3, a2, a1, a0);
1574    __ CallRuntime(Runtime::kCreateArrayLiteral);
1575  } else {
1576    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1577    __ CallStub(&stub);
1578    RestoreContext();
1579  }
1580  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1581
1582  bool result_saved = false;  // Is the result saved to the stack?
1583  ZoneList<Expression*>* subexprs = expr->values();
1584  int length = subexprs->length();
1585
1586  // Emit code to evaluate all the non-constant subexpressions and to store
1587  // them into the newly cloned array.
1588  for (int array_index = 0; array_index < length; array_index++) {
1589    Expression* subexpr = subexprs->at(array_index);
1590    DCHECK(!subexpr->IsSpread());
1591
1592    // If the subexpression is a literal or a simple materialized literal it
1593    // is already set in the cloned array.
1594    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1595
1596    if (!result_saved) {
1597      PushOperand(v0);  // array literal
1598      result_saved = true;
1599    }
1600
1601    VisitForAccumulatorValue(subexpr);
1602
1603    __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1604    __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1605    __ mov(StoreDescriptor::ValueRegister(), result_register());
1606    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1607
1608    PrepareForBailoutForId(expr->GetIdForElement(array_index),
1609                           BailoutState::NO_REGISTERS);
1610  }
1611
1612  if (result_saved) {
1613    context()->PlugTOS();
1614  } else {
1615    context()->Plug(v0);
1616  }
1617}
1618
1619
1620void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1621  DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1622
1623  Comment cmnt(masm_, "[ Assignment");
1624
1625  Property* property = expr->target()->AsProperty();
1626  LhsKind assign_type = Property::GetAssignType(property);
1627
1628  // Evaluate LHS expression.
1629  switch (assign_type) {
1630    case VARIABLE:
1631      // Nothing to do here.
1632      break;
1633    case NAMED_PROPERTY:
1634      if (expr->is_compound()) {
1635        // We need the receiver both on the stack and in the register.
1636        VisitForStackValue(property->obj());
1637        __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1638      } else {
1639        VisitForStackValue(property->obj());
1640      }
1641      break;
1642    case NAMED_SUPER_PROPERTY:
1643      VisitForStackValue(
1644          property->obj()->AsSuperPropertyReference()->this_var());
1645      VisitForAccumulatorValue(
1646          property->obj()->AsSuperPropertyReference()->home_object());
1647      PushOperand(result_register());
1648      if (expr->is_compound()) {
1649        const Register scratch = a1;
1650        __ ld(scratch, MemOperand(sp, kPointerSize));
1651        PushOperands(scratch, result_register());
1652      }
1653      break;
1654    case KEYED_SUPER_PROPERTY: {
1655      VisitForStackValue(
1656          property->obj()->AsSuperPropertyReference()->this_var());
1657      VisitForStackValue(
1658          property->obj()->AsSuperPropertyReference()->home_object());
1659      VisitForAccumulatorValue(property->key());
1660      PushOperand(result_register());
1661      if (expr->is_compound()) {
1662        const Register scratch1 = a4;
1663        const Register scratch2 = a1;
1664        __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
1665        __ ld(scratch2, MemOperand(sp, 1 * kPointerSize));
1666        PushOperands(scratch1, scratch2, result_register());
1667      }
1668      break;
1669    }
1670    case KEYED_PROPERTY:
1671      // We need the key and receiver on both the stack and in v0 and a1.
1672      if (expr->is_compound()) {
1673        VisitForStackValue(property->obj());
1674        VisitForStackValue(property->key());
1675        __ ld(LoadDescriptor::ReceiverRegister(),
1676              MemOperand(sp, 1 * kPointerSize));
1677        __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1678      } else {
1679        VisitForStackValue(property->obj());
1680        VisitForStackValue(property->key());
1681      }
1682      break;
1683  }
1684
1685  // For compound assignments we need another deoptimization point after the
1686  // variable/property load.
1687  if (expr->is_compound()) {
1688    { AccumulatorValueContext context(this);
1689      switch (assign_type) {
1690        case VARIABLE:
1691          EmitVariableLoad(expr->target()->AsVariableProxy());
1692          PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1693          break;
1694        case NAMED_PROPERTY:
1695          EmitNamedPropertyLoad(property);
1696          PrepareForBailoutForId(property->LoadId(),
1697                                 BailoutState::TOS_REGISTER);
1698          break;
1699        case NAMED_SUPER_PROPERTY:
1700          EmitNamedSuperPropertyLoad(property);
1701          PrepareForBailoutForId(property->LoadId(),
1702                                 BailoutState::TOS_REGISTER);
1703          break;
1704        case KEYED_SUPER_PROPERTY:
1705          EmitKeyedSuperPropertyLoad(property);
1706          PrepareForBailoutForId(property->LoadId(),
1707                                 BailoutState::TOS_REGISTER);
1708          break;
1709        case KEYED_PROPERTY:
1710          EmitKeyedPropertyLoad(property);
1711          PrepareForBailoutForId(property->LoadId(),
1712                                 BailoutState::TOS_REGISTER);
1713          break;
1714      }
1715    }
1716
1717    Token::Value op = expr->binary_op();
1718    PushOperand(v0);  // Left operand goes on the stack.
1719    VisitForAccumulatorValue(expr->value());
1720
1721    AccumulatorValueContext context(this);
1722    if (ShouldInlineSmiCase(op)) {
1723      EmitInlineSmiBinaryOp(expr->binary_operation(),
1724                            op,
1725                            expr->target(),
1726                            expr->value());
1727    } else {
1728      EmitBinaryOp(expr->binary_operation(), op);
1729    }
1730
1731    // Deoptimization point in case the binary operation may have side effects.
1732    PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1733  } else {
1734    VisitForAccumulatorValue(expr->value());
1735  }
1736
1737  SetExpressionPosition(expr);
1738
1739  // Store the value.
1740  switch (assign_type) {
1741    case VARIABLE: {
1742      VariableProxy* proxy = expr->target()->AsVariableProxy();
1743      EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1744                             proxy->hole_check_mode());
1745      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1746      context()->Plug(v0);
1747      break;
1748    }
1749    case NAMED_PROPERTY:
1750      EmitNamedPropertyAssignment(expr);
1751      break;
1752    case NAMED_SUPER_PROPERTY:
1753      EmitNamedSuperPropertyStore(property);
1754      context()->Plug(v0);
1755      break;
1756    case KEYED_SUPER_PROPERTY:
1757      EmitKeyedSuperPropertyStore(property);
1758      context()->Plug(v0);
1759      break;
1760    case KEYED_PROPERTY:
1761      EmitKeyedPropertyAssignment(expr);
1762      break;
1763  }
1764}
1765
1766
1767void FullCodeGenerator::VisitYield(Yield* expr) {
1768  Comment cmnt(masm_, "[ Yield");
1769  SetExpressionPosition(expr);
1770
1771  // Evaluate yielded value first; the initial iterator definition depends on
1772  // this.  It stays on the stack while we update the iterator.
1773  VisitForStackValue(expr->expression());
1774
1775  Label suspend, continuation, post_runtime, resume, exception;
1776
1777  __ jmp(&suspend);
1778  __ bind(&continuation);
1779  // When we arrive here, v0 holds the generator object.
1780  __ RecordGeneratorContinuation();
1781  __ ld(a1, FieldMemOperand(v0, JSGeneratorObject::kResumeModeOffset));
1782  __ ld(v0, FieldMemOperand(v0, JSGeneratorObject::kInputOrDebugPosOffset));
1783  __ Branch(&resume, eq, a1, Operand(Smi::FromInt(JSGeneratorObject::kNext)));
1784  __ Push(result_register());
1785  __ Branch(&exception, eq, a1,
1786            Operand(Smi::FromInt(JSGeneratorObject::kThrow)));
1787  EmitCreateIteratorResult(true);
1788  EmitUnwindAndReturn();
1789
1790  __ bind(&exception);
1791  __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
1792                                              : Runtime::kThrow);
1793
1794  __ bind(&suspend);
1795  OperandStackDepthIncrement(1);  // Not popped on this path.
1796  VisitForAccumulatorValue(expr->generator_object());
1797  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1798  __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1799  __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1800  __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1801  __ mov(a1, cp);
1802  __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1803                      kRAHasBeenSaved, kDontSaveFPRegs);
1804  __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1805  __ Branch(&post_runtime, eq, sp, Operand(a1));
1806  __ push(v0);  // generator object
1807  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1808  RestoreContext();
1809  __ bind(&post_runtime);
1810  PopOperand(result_register());
1811  EmitReturnSequence();
1812
1813  __ bind(&resume);
1814  context()->Plug(result_register());
1815}
1816
1817void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1818  OperandStackDepthIncrement(2);
1819  __ Push(reg1, reg2);
1820}
1821
1822void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1823                                     Register reg3) {
1824  OperandStackDepthIncrement(3);
1825  __ Push(reg1, reg2, reg3);
1826}
1827
1828void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1829                                     Register reg3, Register reg4) {
1830  OperandStackDepthIncrement(4);
1831  __ Push(reg1, reg2, reg3, reg4);
1832}
1833
1834void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1835  OperandStackDepthDecrement(2);
1836  __ Pop(reg1, reg2);
1837}
1838
1839void FullCodeGenerator::EmitOperandStackDepthCheck() {
1840  if (FLAG_debug_code) {
1841    int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1842                        operand_stack_depth_ * kPointerSize;
1843    __ Dsubu(v0, fp, sp);
1844    __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
1845  }
1846}
1847
1848void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1849  Label allocate, done_allocate;
1850
1851  __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate,
1852              NO_ALLOCATION_FLAGS);
1853  __ jmp(&done_allocate);
1854
1855  __ bind(&allocate);
1856  __ Push(Smi::FromInt(JSIteratorResult::kSize));
1857  __ CallRuntime(Runtime::kAllocateInNewSpace);
1858
1859  __ bind(&done_allocate);
1860  __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
1861  PopOperand(a2);
1862  __ LoadRoot(a3,
1863              done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1864  __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
1865  __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
1866  __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
1867  __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
1868  __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
1869  __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
1870  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1871}
1872
1873
1874void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1875                                              Token::Value op,
1876                                              Expression* left_expr,
1877                                              Expression* right_expr) {
1878  Label done, smi_case, stub_call;
1879
1880  Register scratch1 = a2;
1881  Register scratch2 = a3;
1882
1883  // Get the arguments.
1884  Register left = a1;
1885  Register right = a0;
1886  PopOperand(left);
1887  __ mov(a0, result_register());
1888
1889  // Perform combined smi check on both operands.
1890  __ Or(scratch1, left, Operand(right));
1891  STATIC_ASSERT(kSmiTag == 0);
1892  JumpPatchSite patch_site(masm_);
1893  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1894
1895  __ bind(&stub_call);
1896  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1897  CallIC(code, expr->BinaryOperationFeedbackId());
1898  patch_site.EmitPatchInfo();
1899  __ jmp(&done);
1900
1901  __ bind(&smi_case);
1902  // Smi case. This code works the same way as the smi-smi case in the type
1903  // recording binary operation stub, see
1904  switch (op) {
1905    case Token::SAR:
1906      __ GetLeastBitsFromSmi(scratch1, right, 5);
1907      __ dsrav(right, left, scratch1);
1908      __ And(v0, right, Operand(0xffffffff00000000L));
1909      break;
1910    case Token::SHL: {
1911      __ SmiUntag(scratch1, left);
1912      __ GetLeastBitsFromSmi(scratch2, right, 5);
1913      __ dsllv(scratch1, scratch1, scratch2);
1914      __ SmiTag(v0, scratch1);
1915      break;
1916    }
1917    case Token::SHR: {
1918      __ SmiUntag(scratch1, left);
1919      __ GetLeastBitsFromSmi(scratch2, right, 5);
1920      __ dsrlv(scratch1, scratch1, scratch2);
1921      __ And(scratch2, scratch1, 0x80000000);
1922      __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1923      __ SmiTag(v0, scratch1);
1924      break;
1925    }
1926    case Token::ADD:
1927      __ DaddBranchOvf(v0, left, Operand(right), &stub_call);
1928      break;
1929    case Token::SUB:
1930      __ DsubBranchOvf(v0, left, Operand(right), &stub_call);
1931      break;
1932    case Token::MUL: {
1933      __ Dmulh(v0, left, right);
1934      __ dsra32(scratch2, v0, 0);
1935      __ sra(scratch1, v0, 31);
1936      __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
1937      __ SmiTag(v0);
1938      __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
1939      __ Daddu(scratch2, right, left);
1940      __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1941      DCHECK(Smi::kZero == 0);
1942      __ mov(v0, zero_reg);
1943      break;
1944    }
1945    case Token::BIT_OR:
1946      __ Or(v0, left, Operand(right));
1947      break;
1948    case Token::BIT_AND:
1949      __ And(v0, left, Operand(right));
1950      break;
1951    case Token::BIT_XOR:
1952      __ Xor(v0, left, Operand(right));
1953      break;
1954    default:
1955      UNREACHABLE();
1956  }
1957
1958  __ bind(&done);
1959  context()->Plug(v0);
1960}
1961
1962
1963void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1964  for (int i = 0; i < lit->properties()->length(); i++) {
1965    ClassLiteral::Property* property = lit->properties()->at(i);
1966    Expression* value = property->value();
1967
1968    Register scratch = a1;
1969    if (property->is_static()) {
1970      __ ld(scratch, MemOperand(sp, kPointerSize));  // constructor
1971    } else {
1972      __ ld(scratch, MemOperand(sp, 0));  // prototype
1973    }
1974    PushOperand(scratch);
1975    EmitPropertyKey(property, lit->GetIdForProperty(i));
1976
1977    // The static prototype property is read only. We handle the non computed
1978    // property name case in the parser. Since this is the only case where we
1979    // need to check for an own read only property we special case this so we do
1980    // not need to do this for every property.
1981    if (property->is_static() && property->is_computed_name()) {
1982      __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1983      __ push(v0);
1984    }
1985
1986    VisitForStackValue(value);
1987    if (NeedsHomeObject(value)) {
1988      EmitSetHomeObject(value, 2, property->GetSlot());
1989    }
1990
1991    switch (property->kind()) {
1992      case ClassLiteral::Property::METHOD:
1993        PushOperand(Smi::FromInt(DONT_ENUM));
1994        PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1995        CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1996        break;
1997
1998      case ClassLiteral::Property::GETTER:
1999        PushOperand(Smi::FromInt(DONT_ENUM));
2000        CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
2001        break;
2002
2003      case ClassLiteral::Property::SETTER:
2004        PushOperand(Smi::FromInt(DONT_ENUM));
2005        CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
2006        break;
2007
2008      case ClassLiteral::Property::FIELD:
2009      default:
2010        UNREACHABLE();
2011    }
2012  }
2013}
2014
2015
2016void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2017  __ mov(a0, result_register());
2018  PopOperand(a1);
2019  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2020  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2021  CallIC(code, expr->BinaryOperationFeedbackId());
2022  patch_site.EmitPatchInfo();
2023  context()->Plug(v0);
2024}
2025
2026
2027void FullCodeGenerator::EmitAssignment(Expression* expr,
2028                                       FeedbackVectorSlot slot) {
2029  DCHECK(expr->IsValidReferenceExpressionOrThis());
2030
2031  Property* prop = expr->AsProperty();
2032  LhsKind assign_type = Property::GetAssignType(prop);
2033
2034  switch (assign_type) {
2035    case VARIABLE: {
2036      VariableProxy* proxy = expr->AsVariableProxy();
2037      EffectContext context(this);
2038      EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
2039                             proxy->hole_check_mode());
2040      break;
2041    }
2042    case NAMED_PROPERTY: {
2043      PushOperand(result_register());  // Preserve value.
2044      VisitForAccumulatorValue(prop->obj());
2045      __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2046      PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
2047      CallStoreIC(slot, prop->key()->AsLiteral()->value());
2048      break;
2049    }
2050    case NAMED_SUPER_PROPERTY: {
2051      PushOperand(v0);
2052      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2053      VisitForAccumulatorValue(
2054          prop->obj()->AsSuperPropertyReference()->home_object());
2055      // stack: value, this; v0: home_object
2056      Register scratch = a2;
2057      Register scratch2 = a3;
2058      __ mov(scratch, result_register());             // home_object
2059      __ ld(v0, MemOperand(sp, kPointerSize));        // value
2060      __ ld(scratch2, MemOperand(sp, 0));             // this
2061      __ sd(scratch2, MemOperand(sp, kPointerSize));  // this
2062      __ sd(scratch, MemOperand(sp, 0));              // home_object
2063      // stack: this, home_object; v0: value
2064      EmitNamedSuperPropertyStore(prop);
2065      break;
2066    }
2067    case KEYED_SUPER_PROPERTY: {
2068      PushOperand(v0);
2069      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2070      VisitForStackValue(
2071          prop->obj()->AsSuperPropertyReference()->home_object());
2072      VisitForAccumulatorValue(prop->key());
2073      Register scratch = a2;
2074      Register scratch2 = a3;
2075      __ ld(scratch2, MemOperand(sp, 2 * kPointerSize));  // value
2076      // stack: value, this, home_object; v0: key, a3: value
2077      __ ld(scratch, MemOperand(sp, kPointerSize));  // this
2078      __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2079      __ ld(scratch, MemOperand(sp, 0));  // home_object
2080      __ sd(scratch, MemOperand(sp, kPointerSize));
2081      __ sd(v0, MemOperand(sp, 0));
2082      __ Move(v0, scratch2);
2083      // stack: this, home_object, key; v0: value.
2084      EmitKeyedSuperPropertyStore(prop);
2085      break;
2086    }
2087    case KEYED_PROPERTY: {
2088      PushOperand(result_register());  // Preserve value.
2089      VisitForStackValue(prop->obj());
2090      VisitForAccumulatorValue(prop->key());
2091      __ Move(StoreDescriptor::NameRegister(), result_register());
2092      PopOperands(StoreDescriptor::ValueRegister(),
2093                  StoreDescriptor::ReceiverRegister());
2094      CallKeyedStoreIC(slot);
2095      break;
2096    }
2097  }
2098  context()->Plug(v0);
2099}
2100
2101
2102void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2103    Variable* var, MemOperand location) {
2104  __ sd(result_register(), location);
2105  if (var->IsContextSlot()) {
2106    // RecordWrite may destroy all its register arguments.
2107    __ Move(a3, result_register());
2108    int offset = Context::SlotOffset(var->index());
2109    __ RecordWriteContextSlot(
2110        a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2111  }
2112}
2113
2114void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2115                                               FeedbackVectorSlot slot,
2116                                               HoleCheckMode hole_check_mode) {
2117  if (var->IsUnallocated()) {
2118    // Global var, const, or let.
2119    __ mov(StoreDescriptor::ValueRegister(), result_register());
2120    __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2121    CallStoreIC(slot, var->name());
2122
2123  } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2124    DCHECK(!var->IsLookupSlot());
2125    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2126    MemOperand location = VarOperand(var, a1);
2127    // Perform an initialization check for lexically declared variables.
2128    if (hole_check_mode == HoleCheckMode::kRequired) {
2129      Label assign;
2130      __ ld(a3, location);
2131      __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2132      __ Branch(&assign, ne, a3, Operand(a4));
2133      __ li(a3, Operand(var->name()));
2134      __ push(a3);
2135      __ CallRuntime(Runtime::kThrowReferenceError);
2136      __ bind(&assign);
2137    }
2138    if (var->mode() != CONST) {
2139      EmitStoreToStackLocalOrContextSlot(var, location);
2140    } else if (var->throw_on_const_assignment(language_mode())) {
2141      __ CallRuntime(Runtime::kThrowConstAssignError);
2142    }
2143  } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2144    // Initializing assignment to const {this} needs a write barrier.
2145    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2146    Label uninitialized_this;
2147    MemOperand location = VarOperand(var, a1);
2148    __ ld(a3, location);
2149    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2150    __ Branch(&uninitialized_this, eq, a3, Operand(at));
2151    __ li(a0, Operand(var->name()));
2152    __ Push(a0);
2153    __ CallRuntime(Runtime::kThrowReferenceError);
2154    __ bind(&uninitialized_this);
2155    EmitStoreToStackLocalOrContextSlot(var, location);
2156
2157  } else {
2158    DCHECK(var->mode() != CONST || op == Token::INIT);
2159    if (var->IsLookupSlot()) {
2160      __ Push(var->name());
2161      __ Push(v0);
2162      __ CallRuntime(is_strict(language_mode())
2163                         ? Runtime::kStoreLookupSlot_Strict
2164                         : Runtime::kStoreLookupSlot_Sloppy);
2165    } else {
2166      // Assignment to var or initializing assignment to let/const in harmony
2167      // mode.
2168      DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2169      MemOperand location = VarOperand(var, a1);
2170      if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2171        // Check for an uninitialized let binding.
2172        __ ld(a2, location);
2173        __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2174        __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2175      }
2176      EmitStoreToStackLocalOrContextSlot(var, location);
2177    }
2178  }
2179}
2180
2181
2182void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2183  // Assignment to a property, using a named store IC.
2184  Property* prop = expr->target()->AsProperty();
2185  DCHECK(prop != NULL);
2186  DCHECK(prop->key()->IsLiteral());
2187
2188  __ mov(StoreDescriptor::ValueRegister(), result_register());
2189  PopOperand(StoreDescriptor::ReceiverRegister());
2190  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
2191
2192  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2193  context()->Plug(v0);
2194}
2195
2196
2197void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2198  // Assignment to named property of super.
2199  // v0 : value
2200  // stack : receiver ('this'), home_object
2201  DCHECK(prop != NULL);
2202  Literal* key = prop->key()->AsLiteral();
2203  DCHECK(key != NULL);
2204
2205  PushOperand(key->value());
2206  PushOperand(v0);
2207  CallRuntimeWithOperands(is_strict(language_mode())
2208                              ? Runtime::kStoreToSuper_Strict
2209                              : Runtime::kStoreToSuper_Sloppy);
2210}
2211
2212
2213void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2214  // Assignment to named property of super.
2215  // v0 : value
2216  // stack : receiver ('this'), home_object, key
2217  DCHECK(prop != NULL);
2218
2219  PushOperand(v0);
2220  CallRuntimeWithOperands(is_strict(language_mode())
2221                              ? Runtime::kStoreKeyedToSuper_Strict
2222                              : Runtime::kStoreKeyedToSuper_Sloppy);
2223}
2224
2225
2226void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2227  // Assignment to a property, using a keyed store IC.
2228  // Call keyed store IC.
2229  // The arguments are:
2230  // - a0 is the value,
2231  // - a1 is the key,
2232  // - a2 is the receiver.
2233  __ mov(StoreDescriptor::ValueRegister(), result_register());
2234  PopOperands(StoreDescriptor::ReceiverRegister(),
2235              StoreDescriptor::NameRegister());
2236  DCHECK(StoreDescriptor::ValueRegister().is(a0));
2237
2238  CallKeyedStoreIC(expr->AssignmentSlot());
2239
2240  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2241  context()->Plug(v0);
2242}
2243
2244// Code common for calls using the IC.
2245void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2246  Expression* callee = expr->expression();
2247
2248  // Get the target function.
2249  ConvertReceiverMode convert_mode;
2250  if (callee->IsVariableProxy()) {
2251    { StackValueContext context(this);
2252      EmitVariableLoad(callee->AsVariableProxy());
2253      PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2254    }
2255    // Push undefined as receiver. This is patched in the method prologue if it
2256    // is a sloppy mode method.
2257    __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2258    PushOperand(at);
2259    convert_mode = ConvertReceiverMode::kNullOrUndefined;
2260  } else {
2261    // Load the function from the receiver.
2262    DCHECK(callee->IsProperty());
2263    DCHECK(!callee->AsProperty()->IsSuperAccess());
2264    __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2265    EmitNamedPropertyLoad(callee->AsProperty());
2266    PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2267                           BailoutState::TOS_REGISTER);
2268    // Push the target function under the receiver.
2269    __ ld(at, MemOperand(sp, 0));
2270    PushOperand(at);
2271    __ sd(v0, MemOperand(sp, kPointerSize));
2272    convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2273  }
2274
2275  EmitCall(expr, convert_mode);
2276}
2277
2278
2279void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2280  SetExpressionPosition(expr);
2281  Expression* callee = expr->expression();
2282  DCHECK(callee->IsProperty());
2283  Property* prop = callee->AsProperty();
2284  DCHECK(prop->IsSuperAccess());
2285
2286  Literal* key = prop->key()->AsLiteral();
2287  DCHECK(!key->value()->IsSmi());
2288  // Load the function from the receiver.
2289  const Register scratch = a1;
2290  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2291  VisitForAccumulatorValue(super_ref->home_object());
2292  __ mov(scratch, v0);
2293  VisitForAccumulatorValue(super_ref->this_var());
2294  PushOperands(scratch, v0, v0, scratch);
2295  PushOperand(key->value());
2296
2297  // Stack here:
2298  //  - home_object
2299  //  - this (receiver)
2300  //  - this (receiver) <-- LoadFromSuper will pop here and below.
2301  //  - home_object
2302  //  - key
2303  CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2304  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2305
2306  // Replace home_object with target function.
2307  __ sd(v0, MemOperand(sp, kPointerSize));
2308
2309  // Stack here:
2310  // - target function
2311  // - this (receiver)
2312  EmitCall(expr);
2313}
2314
2315
2316// Code common for calls using the IC.
2317void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2318                                                Expression* key) {
2319  // Load the key.
2320  VisitForAccumulatorValue(key);
2321
2322  Expression* callee = expr->expression();
2323
2324  // Load the function from the receiver.
2325  DCHECK(callee->IsProperty());
2326  __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2327  __ Move(LoadDescriptor::NameRegister(), v0);
2328  EmitKeyedPropertyLoad(callee->AsProperty());
2329  PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2330                         BailoutState::TOS_REGISTER);
2331
2332  // Push the target function under the receiver.
2333  __ ld(at, MemOperand(sp, 0));
2334  PushOperand(at);
2335  __ sd(v0, MemOperand(sp, kPointerSize));
2336
2337  EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2338}
2339
2340
2341void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2342  Expression* callee = expr->expression();
2343  DCHECK(callee->IsProperty());
2344  Property* prop = callee->AsProperty();
2345  DCHECK(prop->IsSuperAccess());
2346
2347  SetExpressionPosition(prop);
2348  // Load the function from the receiver.
2349  const Register scratch = a1;
2350  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2351  VisitForAccumulatorValue(super_ref->home_object());
2352  __ Move(scratch, v0);
2353  VisitForAccumulatorValue(super_ref->this_var());
2354  PushOperands(scratch, v0, v0, scratch);
2355  VisitForStackValue(prop->key());
2356
2357  // Stack here:
2358  //  - home_object
2359  //  - this (receiver)
2360  //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2361  //  - home_object
2362  //  - key
2363  CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2364  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2365
2366  // Replace home_object with target function.
2367  __ sd(v0, MemOperand(sp, kPointerSize));
2368
2369  // Stack here:
2370  // - target function
2371  // - this (receiver)
2372  EmitCall(expr);
2373}
2374
2375
2376void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2377  // Load the arguments.
2378  ZoneList<Expression*>* args = expr->arguments();
2379  int arg_count = args->length();
2380  for (int i = 0; i < arg_count; i++) {
2381    VisitForStackValue(args->at(i));
2382  }
2383
2384  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2385  // Record source position of the IC call.
2386  SetCallPosition(expr, expr->tail_call_mode());
2387  if (expr->tail_call_mode() == TailCallMode::kAllow) {
2388    if (FLAG_trace) {
2389      __ CallRuntime(Runtime::kTraceTailCall);
2390    }
2391    // Update profiling counters before the tail call since we will
2392    // not return to this function.
2393    EmitProfilingCounterHandlingForReturnSequence(true);
2394  }
2395  Handle<Code> code =
2396      CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
2397  __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2398  __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2399  __ li(a0, Operand(arg_count));
2400  CallIC(code);
2401  OperandStackDepthDecrement(arg_count + 1);
2402
2403  RecordJSReturnSite(expr);
2404  RestoreContext();
2405  context()->DropAndPlug(1, v0);
2406}
2407
2408void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2409  int arg_count = expr->arguments()->length();
2410  // a6: copy of the first argument or undefined if it doesn't exist.
2411  if (arg_count > 0) {
2412    __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
2413  } else {
2414    __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
2415  }
2416
2417  // a5: the receiver of the enclosing function.
2418  __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2419
2420  // a4: the language mode.
2421  __ li(a4, Operand(Smi::FromInt(language_mode())));
2422
2423  // a1: the start position of the scope the calls resides in.
2424  __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2425
2426  // a0: the source position of the eval call.
2427  __ li(a0, Operand(Smi::FromInt(expr->position())));
2428
2429  // Do the runtime call.
2430  __ Push(a6, a5, a4, a1, a0);
2431  __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2432}
2433
2434
2435// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2436void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2437  VariableProxy* callee = expr->expression()->AsVariableProxy();
2438  if (callee->var()->IsLookupSlot()) {
2439    Label slow, done;
2440
2441    SetExpressionPosition(callee);
2442    // Generate code for loading from variables potentially shadowed by
2443    // eval-introduced variables.
2444    EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2445
2446    __ bind(&slow);
2447    // Call the runtime to find the function to call (returned in v0)
2448    // and the object holding it (returned in v1).
2449    __ Push(callee->name());
2450    __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2451    PushOperands(v0, v1);  // Function, receiver.
2452    PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2453
2454    // If fast case code has been generated, emit code to push the
2455    // function and receiver and have the slow path jump around this
2456    // code.
2457    if (done.is_linked()) {
2458      Label call;
2459      __ Branch(&call);
2460      __ bind(&done);
2461      // Push function.
2462      __ push(v0);
2463      // The receiver is implicitly the global receiver. Indicate this
2464      // by passing the hole to the call function stub.
2465      __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2466      __ push(a1);
2467      __ bind(&call);
2468    }
2469  } else {
2470    VisitForStackValue(callee);
2471    // refEnv.WithBaseObject()
2472    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2473    PushOperand(a2);  // Reserved receiver slot.
2474  }
2475}
2476
2477
2478void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2479  // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2480  // to resolve the function we need to call.  Then we call the resolved
2481  // function using the given arguments.
2482  ZoneList<Expression*>* args = expr->arguments();
2483  int arg_count = args->length();
2484  PushCalleeAndWithBaseObject(expr);
2485
2486  // Push the arguments.
2487  for (int i = 0; i < arg_count; i++) {
2488    VisitForStackValue(args->at(i));
2489  }
2490
2491  // Push a copy of the function (found below the arguments) and
2492  // resolve eval.
2493  __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2494  __ push(a1);
2495  EmitResolvePossiblyDirectEval(expr);
2496
2497  // Touch up the stack with the resolved function.
2498  __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2499
2500  PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2501  // Record source position for debugger.
2502  SetCallPosition(expr);
2503  Handle<Code> code = CodeFactory::CallIC(isolate(), ConvertReceiverMode::kAny,
2504                                          expr->tail_call_mode())
2505                          .code();
2506  __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2507  __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2508  __ li(a0, Operand(arg_count));
2509  __ Call(code, RelocInfo::CODE_TARGET);
2510  OperandStackDepthDecrement(arg_count + 1);
2511  RecordJSReturnSite(expr);
2512  RestoreContext();
2513  context()->DropAndPlug(1, v0);
2514}
2515
2516
2517void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2518  Comment cmnt(masm_, "[ CallNew");
2519  // According to ECMA-262, section 11.2.2, page 44, the function
2520  // expression in new calls must be evaluated before the
2521  // arguments.
2522
2523  // Push constructor on the stack.  If it's not a function it's used as
2524  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2525  // ignored.
2526  DCHECK(!expr->expression()->IsSuperPropertyReference());
2527  VisitForStackValue(expr->expression());
2528
2529  // Push the arguments ("left-to-right") on the stack.
2530  ZoneList<Expression*>* args = expr->arguments();
2531  int arg_count = args->length();
2532  for (int i = 0; i < arg_count; i++) {
2533    VisitForStackValue(args->at(i));
2534  }
2535
2536  // Call the construct call builtin that handles allocation and
2537  // constructor invocation.
2538  SetConstructCallPosition(expr);
2539
2540  // Load function and argument count into a1 and a0.
2541  __ li(a0, Operand(arg_count));
2542  __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2543
2544  // Record call targets in unoptimized code.
2545  __ EmitLoadTypeFeedbackVector(a2);
2546  __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2547
2548  CallConstructStub stub(isolate());
2549  CallIC(stub.GetCode());
2550  OperandStackDepthDecrement(arg_count + 1);
2551  PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2552  RestoreContext();
2553  context()->Plug(v0);
2554}
2555
2556
2557void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2558  SuperCallReference* super_call_ref =
2559      expr->expression()->AsSuperCallReference();
2560  DCHECK_NOT_NULL(super_call_ref);
2561
2562  // Push the super constructor target on the stack (may be null,
2563  // but the Construct builtin can deal with that properly).
2564  VisitForAccumulatorValue(super_call_ref->this_function_var());
2565  __ AssertFunction(result_register());
2566  __ ld(result_register(),
2567        FieldMemOperand(result_register(), HeapObject::kMapOffset));
2568  __ ld(result_register(),
2569        FieldMemOperand(result_register(), Map::kPrototypeOffset));
2570  PushOperand(result_register());
2571
2572  // Push the arguments ("left-to-right") on the stack.
2573  ZoneList<Expression*>* args = expr->arguments();
2574  int arg_count = args->length();
2575  for (int i = 0; i < arg_count; i++) {
2576    VisitForStackValue(args->at(i));
2577  }
2578
2579  // Call the construct call builtin that handles allocation and
2580  // constructor invocation.
2581  SetConstructCallPosition(expr);
2582
2583  // Load new target into a3.
2584  VisitForAccumulatorValue(super_call_ref->new_target_var());
2585  __ mov(a3, result_register());
2586
2587  // Load function and argument count into a1 and a0.
2588  __ li(a0, Operand(arg_count));
2589  __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2590
2591  __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2592  OperandStackDepthDecrement(arg_count + 1);
2593
2594  RecordJSReturnSite(expr);
2595  RestoreContext();
2596  context()->Plug(v0);
2597}
2598
2599
2600void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2601  ZoneList<Expression*>* args = expr->arguments();
2602  DCHECK(args->length() == 1);
2603
2604  VisitForAccumulatorValue(args->at(0));
2605
2606  Label materialize_true, materialize_false;
2607  Label* if_true = NULL;
2608  Label* if_false = NULL;
2609  Label* fall_through = NULL;
2610  context()->PrepareTest(&materialize_true, &materialize_false,
2611                         &if_true, &if_false, &fall_through);
2612
2613  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2614  __ SmiTst(v0, a4);
2615  Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
2616
2617  context()->Plug(if_true, if_false);
2618}
2619
2620
2621void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2622  ZoneList<Expression*>* args = expr->arguments();
2623  DCHECK(args->length() == 1);
2624
2625  VisitForAccumulatorValue(args->at(0));
2626
2627  Label materialize_true, materialize_false;
2628  Label* if_true = NULL;
2629  Label* if_false = NULL;
2630  Label* fall_through = NULL;
2631  context()->PrepareTest(&materialize_true, &materialize_false,
2632                         &if_true, &if_false, &fall_through);
2633
2634  __ JumpIfSmi(v0, if_false);
2635  __ GetObjectType(v0, a1, a1);
2636  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2637  Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2638        if_true, if_false, fall_through);
2639
2640  context()->Plug(if_true, if_false);
2641}
2642
2643
2644void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2645  ZoneList<Expression*>* args = expr->arguments();
2646  DCHECK(args->length() == 1);
2647
2648  VisitForAccumulatorValue(args->at(0));
2649
2650  Label materialize_true, materialize_false;
2651  Label* if_true = NULL;
2652  Label* if_false = NULL;
2653  Label* fall_through = NULL;
2654  context()->PrepareTest(&materialize_true, &materialize_false,
2655                         &if_true, &if_false, &fall_through);
2656
2657  __ JumpIfSmi(v0, if_false);
2658  __ GetObjectType(v0, a1, a1);
2659  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2660  Split(eq, a1, Operand(JS_ARRAY_TYPE),
2661        if_true, if_false, fall_through);
2662
2663  context()->Plug(if_true, if_false);
2664}
2665
2666
2667void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2668  ZoneList<Expression*>* args = expr->arguments();
2669  DCHECK(args->length() == 1);
2670
2671  VisitForAccumulatorValue(args->at(0));
2672
2673  Label materialize_true, materialize_false;
2674  Label* if_true = NULL;
2675  Label* if_false = NULL;
2676  Label* fall_through = NULL;
2677  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2678                         &if_false, &fall_through);
2679
2680  __ JumpIfSmi(v0, if_false);
2681  __ GetObjectType(v0, a1, a1);
2682  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2683  Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
2684
2685  context()->Plug(if_true, if_false);
2686}
2687
2688
2689void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2690  ZoneList<Expression*>* args = expr->arguments();
2691  DCHECK(args->length() == 1);
2692
2693  VisitForAccumulatorValue(args->at(0));
2694
2695  Label materialize_true, materialize_false;
2696  Label* if_true = NULL;
2697  Label* if_false = NULL;
2698  Label* fall_through = NULL;
2699  context()->PrepareTest(&materialize_true, &materialize_false,
2700                         &if_true, &if_false, &fall_through);
2701
2702  __ JumpIfSmi(v0, if_false);
2703  __ GetObjectType(v0, a1, a1);
2704  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2705  Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2706
2707  context()->Plug(if_true, if_false);
2708}
2709
2710
2711void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2712  ZoneList<Expression*>* args = expr->arguments();
2713  DCHECK(args->length() == 1);
2714
2715  VisitForAccumulatorValue(args->at(0));
2716
2717  Label materialize_true, materialize_false;
2718  Label* if_true = NULL;
2719  Label* if_false = NULL;
2720  Label* fall_through = NULL;
2721  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2722                         &if_false, &fall_through);
2723
2724  __ JumpIfSmi(v0, if_false);
2725  __ GetObjectType(v0, a1, a1);
2726  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2727  Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
2728
2729  context()->Plug(if_true, if_false);
2730}
2731
2732
2733void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2734  ZoneList<Expression*>* args = expr->arguments();
2735  DCHECK(args->length() == 1);
2736  Label done, null, function, non_function_constructor;
2737
2738  VisitForAccumulatorValue(args->at(0));
2739
2740  // If the object is not a JSReceiver, we return null.
2741  __ JumpIfSmi(v0, &null);
2742  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2743  __ GetObjectType(v0, v0, a1);  // Map is now in v0.
2744  __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
2745
2746  // Return 'Function' for JSFunction and JSBoundFunction objects.
2747  STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2748  __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
2749
2750  // Check if the constructor in the map is a JS function.
2751  Register instance_type = a2;
2752  __ GetMapConstructor(v0, v0, a1, instance_type);
2753  __ Branch(&non_function_constructor, ne, instance_type,
2754            Operand(JS_FUNCTION_TYPE));
2755
2756  // v0 now contains the constructor function. Grab the
2757  // instance class name from there.
2758  __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2759  __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2760  __ Branch(&done);
2761
2762  // Functions have class 'Function'.
2763  __ bind(&function);
2764  __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
2765  __ jmp(&done);
2766
2767  // Objects with a non-function constructor have class 'Object'.
2768  __ bind(&non_function_constructor);
2769  __ LoadRoot(v0, Heap::kObject_stringRootIndex);
2770  __ jmp(&done);
2771
2772  // Non-JS objects have class null.
2773  __ bind(&null);
2774  __ LoadRoot(v0, Heap::kNullValueRootIndex);
2775
2776  // All done.
2777  __ bind(&done);
2778
2779  context()->Plug(v0);
2780}
2781
2782
2783void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2784  ZoneList<Expression*>* args = expr->arguments();
2785  DCHECK(args->length() == 2);
2786
2787  VisitForStackValue(args->at(0));
2788  VisitForAccumulatorValue(args->at(1));
2789  __ mov(a0, result_register());
2790
2791  Register object = a1;
2792  Register index = a0;
2793  Register result = v0;
2794
2795  PopOperand(object);
2796
2797  Label need_conversion;
2798  Label index_out_of_range;
2799  Label done;
2800  StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2801                                      &need_conversion, &index_out_of_range);
2802  generator.GenerateFast(masm_);
2803  __ jmp(&done);
2804
2805  __ bind(&index_out_of_range);
2806  // When the index is out of range, the spec requires us to return
2807  // NaN.
2808  __ LoadRoot(result, Heap::kNanValueRootIndex);
2809  __ jmp(&done);
2810
2811  __ bind(&need_conversion);
2812  // Load the undefined value into the result register, which will
2813  // trigger conversion.
2814  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2815  __ jmp(&done);
2816
2817  NopRuntimeCallHelper call_helper;
2818  generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2819
2820  __ bind(&done);
2821  context()->Plug(result);
2822}
2823
2824
2825void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2826  ZoneList<Expression*>* args = expr->arguments();
2827  DCHECK_LE(2, args->length());
2828  // Push target, receiver and arguments onto the stack.
2829  for (Expression* const arg : *args) {
2830    VisitForStackValue(arg);
2831  }
2832  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2833  // Move target to a1.
2834  int const argc = args->length() - 2;
2835  __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
2836  // Call the target.
2837  __ li(a0, Operand(argc));
2838  __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2839  OperandStackDepthDecrement(argc + 1);
2840  RestoreContext();
2841  // Discard the function left on TOS.
2842  context()->DropAndPlug(1, v0);
2843}
2844
2845void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2846  ZoneList<Expression*>* args = expr->arguments();
2847  DCHECK_EQ(1, args->length());
2848  VisitForAccumulatorValue(args->at(0));
2849  __ AssertFunction(v0);
2850  __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
2851  __ ld(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
2852  context()->Plug(v0);
2853}
2854
2855void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2856  DCHECK(expr->arguments()->length() == 0);
2857  ExternalReference debug_is_active =
2858      ExternalReference::debug_is_active_address(isolate());
2859  __ li(at, Operand(debug_is_active));
2860  __ lbu(v0, MemOperand(at));
2861  __ SmiTag(v0);
2862  context()->Plug(v0);
2863}
2864
2865
2866void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2867  ZoneList<Expression*>* args = expr->arguments();
2868  DCHECK_EQ(2, args->length());
2869  VisitForStackValue(args->at(0));
2870  VisitForStackValue(args->at(1));
2871
2872  Label runtime, done;
2873
2874  __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
2875              NO_ALLOCATION_FLAGS);
2876  __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
2877  __ Pop(a2, a3);
2878  __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
2879  __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2880  __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2881  __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2882  __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2883  __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2884  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2885  __ jmp(&done);
2886
2887  __ bind(&runtime);
2888  CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2889
2890  __ bind(&done);
2891  context()->Plug(v0);
2892}
2893
2894
2895void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2896  // Push function.
2897  __ LoadNativeContextSlot(expr->context_index(), v0);
2898  PushOperand(v0);
2899
2900  // Push undefined as the receiver.
2901  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2902  PushOperand(v0);
2903}
2904
2905
2906void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2907  ZoneList<Expression*>* args = expr->arguments();
2908  int arg_count = args->length();
2909
2910  SetCallPosition(expr);
2911  __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2912  __ li(a0, Operand(arg_count));
2913  __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2914          RelocInfo::CODE_TARGET);
2915  OperandStackDepthDecrement(arg_count + 1);
2916  RestoreContext();
2917}
2918
2919
2920void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2921  switch (expr->op()) {
2922    case Token::DELETE: {
2923      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2924      Property* property = expr->expression()->AsProperty();
2925      VariableProxy* proxy = expr->expression()->AsVariableProxy();
2926
2927      if (property != NULL) {
2928        VisitForStackValue(property->obj());
2929        VisitForStackValue(property->key());
2930        CallRuntimeWithOperands(is_strict(language_mode())
2931                                    ? Runtime::kDeleteProperty_Strict
2932                                    : Runtime::kDeleteProperty_Sloppy);
2933        context()->Plug(v0);
2934      } else if (proxy != NULL) {
2935        Variable* var = proxy->var();
2936        // Delete of an unqualified identifier is disallowed in strict mode but
2937        // "delete this" is allowed.
2938        bool is_this = var->is_this();
2939        DCHECK(is_sloppy(language_mode()) || is_this);
2940        if (var->IsUnallocated()) {
2941          __ LoadGlobalObject(a2);
2942          __ li(a1, Operand(var->name()));
2943          __ Push(a2, a1);
2944          __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2945          context()->Plug(v0);
2946        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2947          // Result of deleting non-global, non-dynamic variables is false.
2948          // The subexpression does not have side effects.
2949          context()->Plug(is_this);
2950        } else {
2951          // Non-global variable.  Call the runtime to try to delete from the
2952          // context where the variable was introduced.
2953          DCHECK(!context_register().is(a2));
2954          __ Push(var->name());
2955          __ CallRuntime(Runtime::kDeleteLookupSlot);
2956          context()->Plug(v0);
2957        }
2958      } else {
2959        // Result of deleting non-property, non-variable reference is true.
2960        // The subexpression may have side effects.
2961        VisitForEffect(expr->expression());
2962        context()->Plug(true);
2963      }
2964      break;
2965    }
2966
2967    case Token::VOID: {
2968      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2969      VisitForEffect(expr->expression());
2970      context()->Plug(Heap::kUndefinedValueRootIndex);
2971      break;
2972    }
2973
2974    case Token::NOT: {
2975      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2976      if (context()->IsEffect()) {
2977        // Unary NOT has no side effects so it's only necessary to visit the
2978        // subexpression.  Match the optimizing compiler by not branching.
2979        VisitForEffect(expr->expression());
2980      } else if (context()->IsTest()) {
2981        const TestContext* test = TestContext::cast(context());
2982        // The labels are swapped for the recursive call.
2983        VisitForControl(expr->expression(),
2984                        test->false_label(),
2985                        test->true_label(),
2986                        test->fall_through());
2987        context()->Plug(test->true_label(), test->false_label());
2988      } else {
2989        // We handle value contexts explicitly rather than simply visiting
2990        // for control and plugging the control flow into the context,
2991        // because we need to prepare a pair of extra administrative AST ids
2992        // for the optimizing compiler.
2993        DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2994        Label materialize_true, materialize_false, done;
2995        VisitForControl(expr->expression(),
2996                        &materialize_false,
2997                        &materialize_true,
2998                        &materialize_true);
2999        if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3000        __ bind(&materialize_true);
3001        PrepareForBailoutForId(expr->MaterializeTrueId(),
3002                               BailoutState::NO_REGISTERS);
3003        __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3004        if (context()->IsStackValue()) __ push(v0);
3005        __ jmp(&done);
3006        __ bind(&materialize_false);
3007        PrepareForBailoutForId(expr->MaterializeFalseId(),
3008                               BailoutState::NO_REGISTERS);
3009        __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3010        if (context()->IsStackValue()) __ push(v0);
3011        __ bind(&done);
3012      }
3013      break;
3014    }
3015
3016    case Token::TYPEOF: {
3017      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3018      {
3019        AccumulatorValueContext context(this);
3020        VisitForTypeofValue(expr->expression());
3021      }
3022      __ mov(a3, v0);
3023      __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
3024      context()->Plug(v0);
3025      break;
3026    }
3027
3028    default:
3029      UNREACHABLE();
3030  }
3031}
3032
3033
3034void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3035  DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3036
3037  Comment cmnt(masm_, "[ CountOperation");
3038
3039  Property* prop = expr->expression()->AsProperty();
3040  LhsKind assign_type = Property::GetAssignType(prop);
3041
3042  // Evaluate expression and get value.
3043  if (assign_type == VARIABLE) {
3044    DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3045    AccumulatorValueContext context(this);
3046    EmitVariableLoad(expr->expression()->AsVariableProxy());
3047  } else {
3048    // Reserve space for result of postfix operation.
3049    if (expr->is_postfix() && !context()->IsEffect()) {
3050      __ li(at, Operand(Smi::kZero));
3051      PushOperand(at);
3052    }
3053    switch (assign_type) {
3054      case NAMED_PROPERTY: {
3055        // Put the object both on the stack and in the register.
3056        VisitForStackValue(prop->obj());
3057        __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3058        EmitNamedPropertyLoad(prop);
3059        break;
3060      }
3061
3062      case NAMED_SUPER_PROPERTY: {
3063        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3064        VisitForAccumulatorValue(
3065            prop->obj()->AsSuperPropertyReference()->home_object());
3066        const Register scratch = a1;
3067        __ ld(scratch, MemOperand(sp, 0));  // this
3068        PushOperands(result_register(), scratch, result_register());
3069        EmitNamedSuperPropertyLoad(prop);
3070        break;
3071      }
3072
3073      case KEYED_SUPER_PROPERTY: {
3074        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3075        VisitForStackValue(
3076            prop->obj()->AsSuperPropertyReference()->home_object());
3077        VisitForAccumulatorValue(prop->key());
3078        const Register scratch1 = a1;
3079        const Register scratch2 = a4;
3080        __ ld(scratch1, MemOperand(sp, 1 * kPointerSize));  // this
3081        __ ld(scratch2, MemOperand(sp, 0 * kPointerSize));  // home object
3082        PushOperands(result_register(), scratch1, scratch2, result_register());
3083        EmitKeyedSuperPropertyLoad(prop);
3084        break;
3085      }
3086
3087      case KEYED_PROPERTY: {
3088        VisitForStackValue(prop->obj());
3089        VisitForStackValue(prop->key());
3090        __ ld(LoadDescriptor::ReceiverRegister(),
3091              MemOperand(sp, 1 * kPointerSize));
3092        __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3093        EmitKeyedPropertyLoad(prop);
3094        break;
3095      }
3096
3097      case VARIABLE:
3098        UNREACHABLE();
3099    }
3100  }
3101
3102  // We need a second deoptimization point after loading the value
3103  // in case evaluating the property load my have a side effect.
3104  if (assign_type == VARIABLE) {
3105    PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3106  } else {
3107    PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3108  }
3109
3110  // Inline smi case if we are in a loop.
3111  Label stub_call, done;
3112  JumpPatchSite patch_site(masm_);
3113
3114  int count_value = expr->op() == Token::INC ? 1 : -1;
3115  __ mov(a0, v0);
3116  if (ShouldInlineSmiCase(expr->op())) {
3117    Label slow;
3118    patch_site.EmitJumpIfNotSmi(v0, &slow);
3119
3120    // Save result for postfix expressions.
3121    if (expr->is_postfix()) {
3122      if (!context()->IsEffect()) {
3123        // Save the result on the stack. If we have a named or keyed property
3124        // we store the result under the receiver that is currently on top
3125        // of the stack.
3126        switch (assign_type) {
3127          case VARIABLE:
3128            __ push(v0);
3129            break;
3130          case NAMED_PROPERTY:
3131            __ sd(v0, MemOperand(sp, kPointerSize));
3132            break;
3133          case NAMED_SUPER_PROPERTY:
3134            __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3135            break;
3136          case KEYED_PROPERTY:
3137            __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3138            break;
3139          case KEYED_SUPER_PROPERTY:
3140            __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3141            break;
3142        }
3143      }
3144    }
3145
3146    Register scratch1 = a1;
3147    __ li(scratch1, Operand(Smi::FromInt(count_value)));
3148    __ DaddBranchNoOvf(v0, v0, Operand(scratch1), &done);
3149    // Call stub. Undo operation first.
3150    __ Move(v0, a0);
3151    __ jmp(&stub_call);
3152    __ bind(&slow);
3153  }
3154
3155  // Convert old value into a number.
3156  __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3157  RestoreContext();
3158  PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3159
3160  // Save result for postfix expressions.
3161  if (expr->is_postfix()) {
3162    if (!context()->IsEffect()) {
3163      // Save the result on the stack. If we have a named or keyed property
3164      // we store the result under the receiver that is currently on top
3165      // of the stack.
3166      switch (assign_type) {
3167        case VARIABLE:
3168          PushOperand(v0);
3169          break;
3170        case NAMED_PROPERTY:
3171          __ sd(v0, MemOperand(sp, kPointerSize));
3172          break;
3173        case NAMED_SUPER_PROPERTY:
3174          __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3175          break;
3176        case KEYED_PROPERTY:
3177          __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3178          break;
3179        case KEYED_SUPER_PROPERTY:
3180          __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3181          break;
3182      }
3183    }
3184  }
3185
3186  __ bind(&stub_call);
3187  __ mov(a1, v0);
3188  __ li(a0, Operand(Smi::FromInt(count_value)));
3189
3190  SetExpressionPosition(expr);
3191
3192  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3193  CallIC(code, expr->CountBinOpFeedbackId());
3194  patch_site.EmitPatchInfo();
3195  __ bind(&done);
3196
3197  // Store the value returned in v0.
3198  switch (assign_type) {
3199    case VARIABLE: {
3200      VariableProxy* proxy = expr->expression()->AsVariableProxy();
3201      if (expr->is_postfix()) {
3202        { EffectContext context(this);
3203          EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3204                                 proxy->hole_check_mode());
3205          PrepareForBailoutForId(expr->AssignmentId(),
3206                                 BailoutState::TOS_REGISTER);
3207          context.Plug(v0);
3208        }
3209        // For all contexts except EffectConstant we have the result on
3210        // top of the stack.
3211        if (!context()->IsEffect()) {
3212          context()->PlugTOS();
3213        }
3214      } else {
3215        EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3216                               proxy->hole_check_mode());
3217        PrepareForBailoutForId(expr->AssignmentId(),
3218                               BailoutState::TOS_REGISTER);
3219        context()->Plug(v0);
3220      }
3221      break;
3222    }
3223    case NAMED_PROPERTY: {
3224      __ mov(StoreDescriptor::ValueRegister(), result_register());
3225      PopOperand(StoreDescriptor::ReceiverRegister());
3226      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
3227      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3228      if (expr->is_postfix()) {
3229        if (!context()->IsEffect()) {
3230          context()->PlugTOS();
3231        }
3232      } else {
3233        context()->Plug(v0);
3234      }
3235      break;
3236    }
3237    case NAMED_SUPER_PROPERTY: {
3238      EmitNamedSuperPropertyStore(prop);
3239      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3240      if (expr->is_postfix()) {
3241        if (!context()->IsEffect()) {
3242          context()->PlugTOS();
3243        }
3244      } else {
3245        context()->Plug(v0);
3246      }
3247      break;
3248    }
3249    case KEYED_SUPER_PROPERTY: {
3250      EmitKeyedSuperPropertyStore(prop);
3251      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3252      if (expr->is_postfix()) {
3253        if (!context()->IsEffect()) {
3254          context()->PlugTOS();
3255        }
3256      } else {
3257        context()->Plug(v0);
3258      }
3259      break;
3260    }
3261    case KEYED_PROPERTY: {
3262      __ mov(StoreDescriptor::ValueRegister(), result_register());
3263      PopOperands(StoreDescriptor::ReceiverRegister(),
3264                  StoreDescriptor::NameRegister());
3265      CallKeyedStoreIC(expr->CountSlot());
3266      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3267      if (expr->is_postfix()) {
3268        if (!context()->IsEffect()) {
3269          context()->PlugTOS();
3270        }
3271      } else {
3272        context()->Plug(v0);
3273      }
3274      break;
3275    }
3276  }
3277}
3278
3279
3280void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3281                                                 Expression* sub_expr,
3282                                                 Handle<String> check) {
3283  Label materialize_true, materialize_false;
3284  Label* if_true = NULL;
3285  Label* if_false = NULL;
3286  Label* fall_through = NULL;
3287  context()->PrepareTest(&materialize_true, &materialize_false,
3288                         &if_true, &if_false, &fall_through);
3289
3290  { AccumulatorValueContext context(this);
3291    VisitForTypeofValue(sub_expr);
3292  }
3293  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3294
3295  Factory* factory = isolate()->factory();
3296  if (String::Equals(check, factory->number_string())) {
3297    __ JumpIfSmi(v0, if_true);
3298    __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3299    __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3300    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3301  } else if (String::Equals(check, factory->string_string())) {
3302    __ JumpIfSmi(v0, if_false);
3303    __ GetObjectType(v0, v0, a1);
3304    Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
3305          fall_through);
3306  } else if (String::Equals(check, factory->symbol_string())) {
3307    __ JumpIfSmi(v0, if_false);
3308    __ GetObjectType(v0, v0, a1);
3309    Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3310  } else if (String::Equals(check, factory->boolean_string())) {
3311    __ LoadRoot(at, Heap::kTrueValueRootIndex);
3312    __ Branch(if_true, eq, v0, Operand(at));
3313    __ LoadRoot(at, Heap::kFalseValueRootIndex);
3314    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3315  } else if (String::Equals(check, factory->undefined_string())) {
3316    __ LoadRoot(at, Heap::kNullValueRootIndex);
3317    __ Branch(if_false, eq, v0, Operand(at));
3318    __ JumpIfSmi(v0, if_false);
3319    // Check for undetectable objects => true.
3320    __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3321    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3322    __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3323    Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3324  } else if (String::Equals(check, factory->function_string())) {
3325    __ JumpIfSmi(v0, if_false);
3326    __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3327    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3328    __ And(a1, a1,
3329           Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3330    Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
3331          fall_through);
3332  } else if (String::Equals(check, factory->object_string())) {
3333    __ JumpIfSmi(v0, if_false);
3334    __ LoadRoot(at, Heap::kNullValueRootIndex);
3335    __ Branch(if_true, eq, v0, Operand(at));
3336    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3337    __ GetObjectType(v0, v0, a1);
3338    __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3339    // Check for callable or undetectable objects => false.
3340    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3341    __ And(a1, a1,
3342           Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3343    Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3344// clang-format off
3345#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)    \
3346  } else if (String::Equals(check, factory->type##_string())) {  \
3347    __ JumpIfSmi(v0, if_false);                                  \
3348    __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));      \
3349    __ LoadRoot(at, Heap::k##Type##MapRootIndex);                \
3350    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3351  SIMD128_TYPES(SIMD128_TYPE)
3352#undef SIMD128_TYPE
3353    // clang-format on
3354  } else {
3355    if (if_false != fall_through) __ jmp(if_false);
3356  }
3357  context()->Plug(if_true, if_false);
3358}
3359
3360
3361void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3362  Comment cmnt(masm_, "[ CompareOperation");
3363
3364  // First we try a fast inlined version of the compare when one of
3365  // the operands is a literal.
3366  if (TryLiteralCompare(expr)) return;
3367
3368  // Always perform the comparison for its control flow.  Pack the result
3369  // into the expression's context after the comparison is performed.
3370  Label materialize_true, materialize_false;
3371  Label* if_true = NULL;
3372  Label* if_false = NULL;
3373  Label* fall_through = NULL;
3374  context()->PrepareTest(&materialize_true, &materialize_false,
3375                         &if_true, &if_false, &fall_through);
3376
3377  Token::Value op = expr->op();
3378  VisitForStackValue(expr->left());
3379  switch (op) {
3380    case Token::IN:
3381      VisitForStackValue(expr->right());
3382      SetExpressionPosition(expr);
3383      EmitHasProperty();
3384      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3385      __ LoadRoot(a4, Heap::kTrueValueRootIndex);
3386      Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
3387      break;
3388
3389    case Token::INSTANCEOF: {
3390      VisitForAccumulatorValue(expr->right());
3391      SetExpressionPosition(expr);
3392      __ mov(a0, result_register());
3393      PopOperand(a1);
3394      __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
3395      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3396      __ LoadRoot(a4, Heap::kTrueValueRootIndex);
3397      Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
3398      break;
3399    }
3400
3401    default: {
3402      VisitForAccumulatorValue(expr->right());
3403      SetExpressionPosition(expr);
3404      Condition cc = CompareIC::ComputeCondition(op);
3405      __ mov(a0, result_register());
3406      PopOperand(a1);
3407
3408      bool inline_smi_code = ShouldInlineSmiCase(op);
3409      JumpPatchSite patch_site(masm_);
3410      if (inline_smi_code) {
3411        Label slow_case;
3412        __ Or(a2, a0, Operand(a1));
3413        patch_site.EmitJumpIfNotSmi(a2, &slow_case);
3414        Split(cc, a1, Operand(a0), if_true, if_false, NULL);
3415        __ bind(&slow_case);
3416      }
3417
3418      Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3419      CallIC(ic, expr->CompareOperationFeedbackId());
3420      patch_site.EmitPatchInfo();
3421      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3422      Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
3423    }
3424  }
3425
3426  // Convert the result of the comparison into one expected for this
3427  // expression's context.
3428  context()->Plug(if_true, if_false);
3429}
3430
3431
3432void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3433                                              Expression* sub_expr,
3434                                              NilValue nil) {
3435  Label materialize_true, materialize_false;
3436  Label* if_true = NULL;
3437  Label* if_false = NULL;
3438  Label* fall_through = NULL;
3439  context()->PrepareTest(&materialize_true, &materialize_false,
3440                         &if_true, &if_false, &fall_through);
3441
3442  VisitForAccumulatorValue(sub_expr);
3443  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3444  if (expr->op() == Token::EQ_STRICT) {
3445    Heap::RootListIndex nil_value = nil == kNullValue ?
3446        Heap::kNullValueRootIndex :
3447        Heap::kUndefinedValueRootIndex;
3448    __ LoadRoot(a1, nil_value);
3449    Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3450  } else {
3451    __ JumpIfSmi(v0, if_false);
3452    __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3453    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3454    __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3455    Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3456  }
3457  context()->Plug(if_true, if_false);
3458}
3459
3460
3461Register FullCodeGenerator::result_register() {
3462  return v0;
3463}
3464
3465
3466Register FullCodeGenerator::context_register() {
3467  return cp;
3468}
3469
3470void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3471  // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3472  DCHECK(IsAligned(frame_offset, kPointerSize));
3473  //  __ sw(value, MemOperand(fp, frame_offset));
3474  __ ld(value, MemOperand(fp, frame_offset));
3475}
3476
3477void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3478  // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3479  DCHECK(IsAligned(frame_offset, kPointerSize));
3480  //  __ sw(value, MemOperand(fp, frame_offset));
3481  __ sd(value, MemOperand(fp, frame_offset));
3482}
3483
3484
3485void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3486  __ ld(dst, ContextMemOperand(cp, context_index));
3487}
3488
3489
3490void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3491  DeclarationScope* closure_scope = scope()->GetClosureScope();
3492  if (closure_scope->is_script_scope() ||
3493      closure_scope->is_module_scope()) {
3494    // Contexts nested in the native context have a canonical empty function
3495    // as their closure, not the anonymous closure containing the global
3496    // code.
3497    __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
3498  } else if (closure_scope->is_eval_scope()) {
3499    // Contexts created by a call to eval have the same closure as the
3500    // context calling eval, not the anonymous closure containing the eval
3501    // code.  Fetch it from the context.
3502    __ ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3503  } else {
3504    DCHECK(closure_scope->is_function_scope());
3505    __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3506  }
3507  PushOperand(at);
3508}
3509
3510
3511// ----------------------------------------------------------------------------
3512// Non-local control flow support.
3513
3514void FullCodeGenerator::EnterFinallyBlock() {
3515  DCHECK(!result_register().is(a1));
3516  // Store pending message while executing finally block.
3517  ExternalReference pending_message_obj =
3518      ExternalReference::address_of_pending_message_obj(isolate());
3519  __ li(at, Operand(pending_message_obj));
3520  __ ld(a1, MemOperand(at));
3521  PushOperand(a1);
3522
3523  ClearPendingMessage();
3524}
3525
3526
3527void FullCodeGenerator::ExitFinallyBlock() {
3528  DCHECK(!result_register().is(a1));
3529  // Restore pending message from stack.
3530  PopOperand(a1);
3531  ExternalReference pending_message_obj =
3532      ExternalReference::address_of_pending_message_obj(isolate());
3533  __ li(at, Operand(pending_message_obj));
3534  __ sd(a1, MemOperand(at));
3535}
3536
3537
3538void FullCodeGenerator::ClearPendingMessage() {
3539  DCHECK(!result_register().is(a1));
3540  ExternalReference pending_message_obj =
3541      ExternalReference::address_of_pending_message_obj(isolate());
3542  __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
3543  __ li(at, Operand(pending_message_obj));
3544  __ sd(a1, MemOperand(at));
3545}
3546
3547
3548void FullCodeGenerator::DeferredCommands::EmitCommands() {
3549  __ Pop(result_register());  // Restore the accumulator.
3550  __ Pop(a1);                 // Get the token.
3551  for (DeferredCommand cmd : commands_) {
3552    Label skip;
3553    __ li(at, Operand(Smi::FromInt(cmd.token)));
3554    __ Branch(&skip, ne, a1, Operand(at));
3555    switch (cmd.command) {
3556      case kReturn:
3557        codegen_->EmitUnwindAndReturn();
3558        break;
3559      case kThrow:
3560        __ Push(result_register());
3561        __ CallRuntime(Runtime::kReThrow);
3562        break;
3563      case kContinue:
3564        codegen_->EmitContinue(cmd.target);
3565        break;
3566      case kBreak:
3567        codegen_->EmitBreak(cmd.target);
3568        break;
3569    }
3570    __ bind(&skip);
3571  }
3572}
3573
3574#undef __
3575
3576
3577void BackEdgeTable::PatchAt(Code* unoptimized_code,
3578                            Address pc,
3579                            BackEdgeState target_state,
3580                            Code* replacement_code) {
3581  static const int kInstrSize = Assembler::kInstrSize;
3582  Address pc_immediate_load_address =
3583      Assembler::target_address_from_return_address(pc);
3584  Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3585  Isolate* isolate = unoptimized_code->GetIsolate();
3586  CodePatcher patcher(isolate, branch_address, 1);
3587
3588  switch (target_state) {
3589    case INTERRUPT:
3590      // slt  at, a3, zero_reg (in case of count based interrupts)
3591      // beq  at, zero_reg, ok
3592      // lui  t9, <interrupt stub address> upper
3593      // ori  t9, <interrupt stub address> u-middle
3594      // dsll t9, t9, 16
3595      // ori  t9, <interrupt stub address> lower
3596      // jalr t9
3597      // nop
3598      // ok-label ----- pc_after points here
3599      patcher.masm()->slt(at, a3, zero_reg);
3600      break;
3601    case ON_STACK_REPLACEMENT:
3602      // addiu at, zero_reg, 1
3603      // beq  at, zero_reg, ok  ;; Not changed
3604      // lui  t9, <on-stack replacement address> upper
3605      // ori  t9, <on-stack replacement address> middle
3606      // dsll t9, t9, 16
3607      // ori  t9, <on-stack replacement address> lower
3608      // jalr t9  ;; Not changed
3609      // nop  ;; Not changed
3610      // ok-label ----- pc_after points here
3611      patcher.masm()->daddiu(at, zero_reg, 1);
3612      break;
3613  }
3614  // Replace the stack check address in the load-immediate (6-instr sequence)
3615  // with the entry address of the replacement code.
3616  Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3617                                   replacement_code->entry());
3618
3619  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3620      unoptimized_code, pc_immediate_load_address, replacement_code);
3621}
3622
3623
3624BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3625    Isolate* isolate,
3626    Code* unoptimized_code,
3627    Address pc) {
3628  static const int kInstrSize = Assembler::kInstrSize;
3629  Address pc_immediate_load_address =
3630      Assembler::target_address_from_return_address(pc);
3631  Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3632
3633  DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
3634  if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
3635    DCHECK(reinterpret_cast<uint64_t>(
3636        Assembler::target_address_at(pc_immediate_load_address)) ==
3637           reinterpret_cast<uint64_t>(
3638               isolate->builtins()->InterruptCheck()->entry()));
3639    return INTERRUPT;
3640  }
3641
3642  DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
3643
3644  DCHECK(reinterpret_cast<uint64_t>(
3645             Assembler::target_address_at(pc_immediate_load_address)) ==
3646         reinterpret_cast<uint64_t>(
3647             isolate->builtins()->OnStackReplacement()->entry()));
3648  return ON_STACK_REPLACEMENT;
3649}
3650
3651
3652}  // namespace internal
3653}  // namespace v8
3654
3655#endif  // V8_TARGET_ARCH_MIPS64
3656