1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_ARM64
8
9#include "src/code-factory.h"
10#include "src/code-stubs.h"
11#include "src/codegen.h"
12#include "src/compiler.h"
13#include "src/debug.h"
14#include "src/full-codegen.h"
15#include "src/ic/ic.h"
16#include "src/isolate-inl.h"
17#include "src/parser.h"
18#include "src/scopes.h"
19
20#include "src/arm64/code-stubs-arm64.h"
21#include "src/arm64/macro-assembler-arm64.h"
22
23namespace v8 {
24namespace internal {
25
26#define __ ACCESS_MASM(masm_)
27
28class JumpPatchSite BASE_EMBEDDED {
29 public:
30  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
31#ifdef DEBUG
32    info_emitted_ = false;
33#endif
34  }
35
36  ~JumpPatchSite() {
37    if (patch_site_.is_bound()) {
38      DCHECK(info_emitted_);
39    } else {
40      DCHECK(reg_.IsNone());
41    }
42  }
43
44  void EmitJumpIfNotSmi(Register reg, Label* target) {
45    // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
46    InstructionAccurateScope scope(masm_, 1);
47    DCHECK(!info_emitted_);
48    DCHECK(reg.Is64Bits());
49    DCHECK(!reg.Is(csp));
50    reg_ = reg;
51    __ bind(&patch_site_);
52    __ tbz(xzr, 0, target);   // Always taken before patched.
53  }
54
55  void EmitJumpIfSmi(Register reg, Label* target) {
56    // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
57    InstructionAccurateScope scope(masm_, 1);
58    DCHECK(!info_emitted_);
59    DCHECK(reg.Is64Bits());
60    DCHECK(!reg.Is(csp));
61    reg_ = reg;
62    __ bind(&patch_site_);
63    __ tbnz(xzr, 0, target);  // Never taken before patched.
64  }
65
66  void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
67    UseScratchRegisterScope temps(masm_);
68    Register temp = temps.AcquireX();
69    __ Orr(temp, reg1, reg2);
70    EmitJumpIfNotSmi(temp, target);
71  }
72
73  void EmitPatchInfo() {
74    Assembler::BlockPoolsScope scope(masm_);
75    InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
76#ifdef DEBUG
77    info_emitted_ = true;
78#endif
79  }
80
81 private:
82  MacroAssembler* masm_;
83  Label patch_site_;
84  Register reg_;
85#ifdef DEBUG
86  bool info_emitted_;
87#endif
88};
89
90
91// Generate code for a JS function. On entry to the function the receiver
92// and arguments have been pushed on the stack left to right. The actual
93// argument count matches the formal parameter count expected by the
94// function.
95//
96// The live registers are:
97//   - x1: the JS function object being called (i.e. ourselves).
98//   - cp: our context.
99//   - fp: our caller's frame pointer.
100//   - jssp: stack pointer.
101//   - lr: return address.
102//
103// The function builds a JS frame. See JavaScriptFrameConstants in
104// frames-arm.h for its layout.
105void FullCodeGenerator::Generate() {
106  CompilationInfo* info = info_;
107  handler_table_ =
108      isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
109
110  profiling_counter_ = isolate()->factory()->NewCell(
111      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112  SetFunctionPosition(function());
113  Comment cmnt(masm_, "[ Function compiled by full code generator");
114
115  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
116
117#ifdef DEBUG
118  if (strlen(FLAG_stop_at) > 0 &&
119      info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
120    __ Debug("stop-at", __LINE__, BREAK);
121  }
122#endif
123
124  // Sloppy mode functions and builtins need to replace the receiver with the
125  // global proxy when called as functions (without an explicit receiver
126  // object).
127  if (info->strict_mode() == SLOPPY && !info->is_native()) {
128    Label ok;
129    int receiver_offset = info->scope()->num_parameters() * kXRegSize;
130    __ Peek(x10, receiver_offset);
131    __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
132
133    __ Ldr(x10, GlobalObjectMemOperand());
134    __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
135    __ Poke(x10, receiver_offset);
136
137    __ Bind(&ok);
138  }
139
140
141  // Open a frame scope to indicate that there is a frame on the stack.
142  // The MANUAL indicates that the scope shouldn't actually generate code
143  // to set up the frame because we do it manually below.
144  FrameScope frame_scope(masm_, StackFrame::MANUAL);
145
146  // This call emits the following sequence in a way that can be patched for
147  // code ageing support:
148  //  Push(lr, fp, cp, x1);
149  //  Add(fp, jssp, 2 * kPointerSize);
150  info->set_prologue_offset(masm_->pc_offset());
151  __ Prologue(info->IsCodePreAgingActive());
152  info->AddNoFrameRange(0, masm_->pc_offset());
153
154  // Reserve space on the stack for locals.
155  { Comment cmnt(masm_, "[ Allocate locals");
156    int locals_count = info->scope()->num_stack_slots();
157    // Generators allocate locals, if any, in context slots.
158    DCHECK(!info->function()->is_generator() || locals_count == 0);
159
160    if (locals_count > 0) {
161      if (locals_count >= 128) {
162        Label ok;
163        DCHECK(jssp.Is(__ StackPointer()));
164        __ Sub(x10, jssp, locals_count * kPointerSize);
165        __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
166        __ B(hs, &ok);
167        __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
168        __ Bind(&ok);
169      }
170      __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
171      if (FLAG_optimize_for_size) {
172        __ PushMultipleTimes(x10 , locals_count);
173      } else {
174        const int kMaxPushes = 32;
175        if (locals_count >= kMaxPushes) {
176          int loop_iterations = locals_count / kMaxPushes;
177          __ Mov(x3, loop_iterations);
178          Label loop_header;
179          __ Bind(&loop_header);
180          // Do pushes.
181          __ PushMultipleTimes(x10 , kMaxPushes);
182          __ Subs(x3, x3, 1);
183          __ B(ne, &loop_header);
184        }
185        int remaining = locals_count % kMaxPushes;
186        // Emit the remaining pushes.
187        __ PushMultipleTimes(x10 , remaining);
188      }
189    }
190  }
191
192  bool function_in_register_x1 = true;
193
194  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
195  if (heap_slots > 0) {
196    // Argument to NewContext is the function, which is still in x1.
197    Comment cmnt(masm_, "[ Allocate context");
198    bool need_write_barrier = true;
199    if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
200      __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
201      __ Push(x1, x10);
202      __ CallRuntime(Runtime::kNewGlobalContext, 2);
203    } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204      FastNewContextStub stub(isolate(), heap_slots);
205      __ CallStub(&stub);
206      // Result of FastNewContextStub is always in new space.
207      need_write_barrier = false;
208    } else {
209      __ Push(x1);
210      __ CallRuntime(Runtime::kNewFunctionContext, 1);
211    }
212    function_in_register_x1 = false;
213    // Context is returned in x0.  It replaces the context passed to us.
214    // It's saved in the stack and kept live in cp.
215    __ Mov(cp, x0);
216    __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
217    // Copy any necessary parameters into the context.
218    int num_parameters = info->scope()->num_parameters();
219    for (int i = 0; i < num_parameters; i++) {
220      Variable* var = scope()->parameter(i);
221      if (var->IsContextSlot()) {
222        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223            (num_parameters - 1 - i) * kPointerSize;
224        // Load parameter from stack.
225        __ Ldr(x10, MemOperand(fp, parameter_offset));
226        // Store it in the context.
227        MemOperand target = ContextMemOperand(cp, var->index());
228        __ Str(x10, target);
229
230        // Update the write barrier.
231        if (need_write_barrier) {
232          __ RecordWriteContextSlot(
233              cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
234        } else if (FLAG_debug_code) {
235          Label done;
236          __ JumpIfInNewSpace(cp, &done);
237          __ Abort(kExpectedNewSpaceObject);
238          __ bind(&done);
239        }
240      }
241    }
242  }
243
244  Variable* arguments = scope()->arguments();
245  if (arguments != NULL) {
246    // Function uses arguments object.
247    Comment cmnt(masm_, "[ Allocate arguments object");
248    if (!function_in_register_x1) {
249      // Load this again, if it's used by the local context below.
250      __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
251    } else {
252      __ Mov(x3, x1);
253    }
254    // Receiver is just before the parameters on the caller's stack.
255    int num_parameters = info->scope()->num_parameters();
256    int offset = num_parameters * kPointerSize;
257    __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
258    __ Mov(x1, Smi::FromInt(num_parameters));
259    __ Push(x3, x2, x1);
260
261    // Arguments to ArgumentsAccessStub:
262    //   function, receiver address, parameter count.
263    // The stub will rewrite receiver and parameter count if the previous
264    // stack frame was an arguments adapter frame.
265    ArgumentsAccessStub::Type type;
266    if (strict_mode() == STRICT) {
267      type = ArgumentsAccessStub::NEW_STRICT;
268    } else if (function()->has_duplicate_parameters()) {
269      type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
270    } else {
271      type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
272    }
273    ArgumentsAccessStub stub(isolate(), type);
274    __ CallStub(&stub);
275
276    SetVar(arguments, x0, x1, x2);
277  }
278
279  if (FLAG_trace) {
280    __ CallRuntime(Runtime::kTraceEnter, 0);
281  }
282
283
284  // Visit the declarations and body unless there is an illegal
285  // redeclaration.
286  if (scope()->HasIllegalRedeclaration()) {
287    Comment cmnt(masm_, "[ Declarations");
288    scope()->VisitIllegalRedeclaration(this);
289
290  } else {
291    PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
292    { Comment cmnt(masm_, "[ Declarations");
293      if (scope()->is_function_scope() && scope()->function() != NULL) {
294        VariableDeclaration* function = scope()->function();
295        DCHECK(function->proxy()->var()->mode() == CONST ||
296               function->proxy()->var()->mode() == CONST_LEGACY);
297        DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
298        VisitVariableDeclaration(function);
299      }
300      VisitDeclarations(scope()->declarations());
301    }
302  }
303
304  { Comment cmnt(masm_, "[ Stack check");
305    PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
306    Label ok;
307    DCHECK(jssp.Is(__ StackPointer()));
308    __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
309    __ B(hs, &ok);
310    PredictableCodeSizeScope predictable(masm_,
311                                         Assembler::kCallSizeWithRelocation);
312    __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
313    __ Bind(&ok);
314  }
315
316  { Comment cmnt(masm_, "[ Body");
317    DCHECK(loop_depth() == 0);
318    VisitStatements(function()->body());
319    DCHECK(loop_depth() == 0);
320  }
321
322  // Always emit a 'return undefined' in case control fell off the end of
323  // the body.
324  { Comment cmnt(masm_, "[ return <undefined>;");
325    __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
326  }
327  EmitReturnSequence();
328
329  // Force emission of the pools, so they don't get emitted in the middle
330  // of the back edge table.
331  masm()->CheckVeneerPool(true, false);
332  masm()->CheckConstPool(true, false);
333}
334
335
336void FullCodeGenerator::ClearAccumulator() {
337  __ Mov(x0, Smi::FromInt(0));
338}
339
340
341void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
342  __ Mov(x2, Operand(profiling_counter_));
343  __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
344  __ Subs(x3, x3, Smi::FromInt(delta));
345  __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
346}
347
348
349void FullCodeGenerator::EmitProfilingCounterReset() {
350  int reset_value = FLAG_interrupt_budget;
351  if (info_->is_debug()) {
352    // Detect debug break requests as soon as possible.
353    reset_value = FLAG_interrupt_budget >> 4;
354  }
355  __ Mov(x2, Operand(profiling_counter_));
356  __ Mov(x3, Smi::FromInt(reset_value));
357  __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
358}
359
360
361void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
362                                                Label* back_edge_target) {
363  DCHECK(jssp.Is(__ StackPointer()));
364  Comment cmnt(masm_, "[ Back edge bookkeeping");
365  // Block literal pools whilst emitting back edge code.
366  Assembler::BlockPoolsScope block_const_pool(masm_);
367  Label ok;
368
369  DCHECK(back_edge_target->is_bound());
370  // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
371  // to reduce the absolute error due to the integer division. To do that,
372  // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
373  // the result).
374  int distance =
375    masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2;
376  int weight = Min(kMaxBackEdgeWeight,
377                   Max(1, distance / kCodeSizeMultiplier));
378  EmitProfilingCounterDecrement(weight);
379  __ B(pl, &ok);
380  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
381
382  // Record a mapping of this PC offset to the OSR id.  This is used to find
383  // the AST id from the unoptimized code in order to use it as a key into
384  // the deoptimization input data found in the optimized code.
385  RecordBackEdge(stmt->OsrEntryId());
386
387  EmitProfilingCounterReset();
388
389  __ Bind(&ok);
390  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
391  // Record a mapping of the OSR id to this PC.  This is used if the OSR
392  // entry becomes the target of a bailout.  We don't expect it to be, but
393  // we want it to work if it is.
394  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
395}
396
397
398void FullCodeGenerator::EmitReturnSequence() {
399  Comment cmnt(masm_, "[ Return sequence");
400
401  if (return_label_.is_bound()) {
402    __ B(&return_label_);
403
404  } else {
405    __ Bind(&return_label_);
406    if (FLAG_trace) {
407      // Push the return value on the stack as the parameter.
408      // Runtime::TraceExit returns its parameter in x0.
409      __ Push(result_register());
410      __ CallRuntime(Runtime::kTraceExit, 1);
411      DCHECK(x0.Is(result_register()));
412    }
413    // Pretend that the exit is a backwards jump to the entry.
414    int weight = 1;
415    if (info_->ShouldSelfOptimize()) {
416      weight = FLAG_interrupt_budget / FLAG_self_opt_count;
417    } else {
418      int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
419      weight = Min(kMaxBackEdgeWeight,
420                   Max(1, distance / kCodeSizeMultiplier));
421    }
422    EmitProfilingCounterDecrement(weight);
423    Label ok;
424    __ B(pl, &ok);
425    __ Push(x0);
426    __ Call(isolate()->builtins()->InterruptCheck(),
427            RelocInfo::CODE_TARGET);
428    __ Pop(x0);
429    EmitProfilingCounterReset();
430    __ Bind(&ok);
431
432    // Make sure that the constant pool is not emitted inside of the return
433    // sequence. This sequence can get patched when the debugger is used. See
434    // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
435    {
436      InstructionAccurateScope scope(masm_,
437                                     Assembler::kJSRetSequenceInstructions);
438      CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
439      __ RecordJSReturn();
440      // This code is generated using Assembler methods rather than Macro
441      // Assembler methods because it will be patched later on, and so the size
442      // of the generated code must be consistent.
443      const Register& current_sp = __ StackPointer();
444      // Nothing ensures 16 bytes alignment here.
445      DCHECK(!current_sp.Is(csp));
446      __ mov(current_sp, fp);
447      int no_frame_start = masm_->pc_offset();
448      __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
449      // Drop the arguments and receiver and return.
450      // TODO(all): This implementation is overkill as it supports 2**31+1
451      // arguments, consider how to improve it without creating a security
452      // hole.
453      __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
454      __ add(current_sp, current_sp, ip0);
455      __ ret();
456      __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
457      info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
458    }
459  }
460}
461
462
463void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
464  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
465}
466
467
468void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
469  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
470  codegen()->GetVar(result_register(), var);
471}
472
473
474void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
475  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
476  codegen()->GetVar(result_register(), var);
477  __ Push(result_register());
478}
479
480
481void FullCodeGenerator::TestContext::Plug(Variable* var) const {
482  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
483  // For simplicity we always test the accumulator register.
484  codegen()->GetVar(result_register(), var);
485  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
486  codegen()->DoTest(this);
487}
488
489
490void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
491  // Root values have no side effects.
492}
493
494
495void FullCodeGenerator::AccumulatorValueContext::Plug(
496    Heap::RootListIndex index) const {
497  __ LoadRoot(result_register(), index);
498}
499
500
501void FullCodeGenerator::StackValueContext::Plug(
502    Heap::RootListIndex index) const {
503  __ LoadRoot(result_register(), index);
504  __ Push(result_register());
505}
506
507
508void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
509  codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
510                                          false_label_);
511  if (index == Heap::kUndefinedValueRootIndex ||
512      index == Heap::kNullValueRootIndex ||
513      index == Heap::kFalseValueRootIndex) {
514    if (false_label_ != fall_through_) __ B(false_label_);
515  } else if (index == Heap::kTrueValueRootIndex) {
516    if (true_label_ != fall_through_) __ B(true_label_);
517  } else {
518    __ LoadRoot(result_register(), index);
519    codegen()->DoTest(this);
520  }
521}
522
523
524void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
525}
526
527
528void FullCodeGenerator::AccumulatorValueContext::Plug(
529    Handle<Object> lit) const {
530  __ Mov(result_register(), Operand(lit));
531}
532
533
534void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
535  // Immediates cannot be pushed directly.
536  __ Mov(result_register(), Operand(lit));
537  __ Push(result_register());
538}
539
540
541void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
542  codegen()->PrepareForBailoutBeforeSplit(condition(),
543                                          true,
544                                          true_label_,
545                                          false_label_);
546  DCHECK(!lit->IsUndetectableObject());  // There are no undetectable literals.
547  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548    if (false_label_ != fall_through_) __ B(false_label_);
549  } else if (lit->IsTrue() || lit->IsJSObject()) {
550    if (true_label_ != fall_through_) __ B(true_label_);
551  } else if (lit->IsString()) {
552    if (String::cast(*lit)->length() == 0) {
553      if (false_label_ != fall_through_) __ B(false_label_);
554    } else {
555      if (true_label_ != fall_through_) __ B(true_label_);
556    }
557  } else if (lit->IsSmi()) {
558    if (Smi::cast(*lit)->value() == 0) {
559      if (false_label_ != fall_through_) __ B(false_label_);
560    } else {
561      if (true_label_ != fall_through_) __ B(true_label_);
562    }
563  } else {
564    // For simplicity we always test the accumulator register.
565    __ Mov(result_register(), Operand(lit));
566    codegen()->DoTest(this);
567  }
568}
569
570
571void FullCodeGenerator::EffectContext::DropAndPlug(int count,
572                                                   Register reg) const {
573  DCHECK(count > 0);
574  __ Drop(count);
575}
576
577
578void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
579    int count,
580    Register reg) const {
581  DCHECK(count > 0);
582  __ Drop(count);
583  __ Move(result_register(), reg);
584}
585
586
587void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
588                                                       Register reg) const {
589  DCHECK(count > 0);
590  if (count > 1) __ Drop(count - 1);
591  __ Poke(reg, 0);
592}
593
594
595void FullCodeGenerator::TestContext::DropAndPlug(int count,
596                                                 Register reg) const {
597  DCHECK(count > 0);
598  // For simplicity we always test the accumulator register.
599  __ Drop(count);
600  __ Mov(result_register(), reg);
601  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
602  codegen()->DoTest(this);
603}
604
605
606void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
607                                            Label* materialize_false) const {
608  DCHECK(materialize_true == materialize_false);
609  __ Bind(materialize_true);
610}
611
612
613void FullCodeGenerator::AccumulatorValueContext::Plug(
614    Label* materialize_true,
615    Label* materialize_false) const {
616  Label done;
617  __ Bind(materialize_true);
618  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
619  __ B(&done);
620  __ Bind(materialize_false);
621  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
622  __ Bind(&done);
623}
624
625
626void FullCodeGenerator::StackValueContext::Plug(
627    Label* materialize_true,
628    Label* materialize_false) const {
629  Label done;
630  __ Bind(materialize_true);
631  __ LoadRoot(x10, Heap::kTrueValueRootIndex);
632  __ B(&done);
633  __ Bind(materialize_false);
634  __ LoadRoot(x10, Heap::kFalseValueRootIndex);
635  __ Bind(&done);
636  __ Push(x10);
637}
638
639
640void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
641                                          Label* materialize_false) const {
642  DCHECK(materialize_true == true_label_);
643  DCHECK(materialize_false == false_label_);
644}
645
646
647void FullCodeGenerator::EffectContext::Plug(bool flag) const {
648}
649
650
651void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
652  Heap::RootListIndex value_root_index =
653      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
654  __ LoadRoot(result_register(), value_root_index);
655}
656
657
658void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
659  Heap::RootListIndex value_root_index =
660      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
661  __ LoadRoot(x10, value_root_index);
662  __ Push(x10);
663}
664
665
666void FullCodeGenerator::TestContext::Plug(bool flag) const {
667  codegen()->PrepareForBailoutBeforeSplit(condition(),
668                                          true,
669                                          true_label_,
670                                          false_label_);
671  if (flag) {
672    if (true_label_ != fall_through_) {
673      __ B(true_label_);
674    }
675  } else {
676    if (false_label_ != fall_through_) {
677      __ B(false_label_);
678    }
679  }
680}
681
682
683void FullCodeGenerator::DoTest(Expression* condition,
684                               Label* if_true,
685                               Label* if_false,
686                               Label* fall_through) {
687  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
688  CallIC(ic, condition->test_id());
689  __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
690}
691
692
693// If (cond), branch to if_true.
694// If (!cond), branch to if_false.
695// fall_through is used as an optimization in cases where only one branch
696// instruction is necessary.
697void FullCodeGenerator::Split(Condition cond,
698                              Label* if_true,
699                              Label* if_false,
700                              Label* fall_through) {
701  if (if_false == fall_through) {
702    __ B(cond, if_true);
703  } else if (if_true == fall_through) {
704    DCHECK(if_false != fall_through);
705    __ B(NegateCondition(cond), if_false);
706  } else {
707    __ B(cond, if_true);
708    __ B(if_false);
709  }
710}
711
712
713MemOperand FullCodeGenerator::StackOperand(Variable* var) {
714  // Offset is negative because higher indexes are at lower addresses.
715  int offset = -var->index() * kXRegSize;
716  // Adjust by a (parameter or local) base offset.
717  if (var->IsParameter()) {
718    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
719  } else {
720    offset += JavaScriptFrameConstants::kLocal0Offset;
721  }
722  return MemOperand(fp, offset);
723}
724
725
726MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
727  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
728  if (var->IsContextSlot()) {
729    int context_chain_length = scope()->ContextChainLength(var->scope());
730    __ LoadContext(scratch, context_chain_length);
731    return ContextMemOperand(scratch, var->index());
732  } else {
733    return StackOperand(var);
734  }
735}
736
737
738void FullCodeGenerator::GetVar(Register dest, Variable* var) {
739  // Use destination as scratch.
740  MemOperand location = VarOperand(var, dest);
741  __ Ldr(dest, location);
742}
743
744
745void FullCodeGenerator::SetVar(Variable* var,
746                               Register src,
747                               Register scratch0,
748                               Register scratch1) {
749  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
750  DCHECK(!AreAliased(src, scratch0, scratch1));
751  MemOperand location = VarOperand(var, scratch0);
752  __ Str(src, location);
753
754  // Emit the write barrier code if the location is in the heap.
755  if (var->IsContextSlot()) {
756    // scratch0 contains the correct context.
757    __ RecordWriteContextSlot(scratch0,
758                              location.offset(),
759                              src,
760                              scratch1,
761                              kLRHasBeenSaved,
762                              kDontSaveFPRegs);
763  }
764}
765
766
767void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
768                                                     bool should_normalize,
769                                                     Label* if_true,
770                                                     Label* if_false) {
771  // Only prepare for bailouts before splits if we're in a test
772  // context. Otherwise, we let the Visit function deal with the
773  // preparation to avoid preparing with the same AST id twice.
774  if (!context()->IsTest() || !info_->IsOptimizable()) return;
775
776  // TODO(all): Investigate to see if there is something to work on here.
777  Label skip;
778  if (should_normalize) {
779    __ B(&skip);
780  }
781  PrepareForBailout(expr, TOS_REG);
782  if (should_normalize) {
783    __ CompareRoot(x0, Heap::kTrueValueRootIndex);
784    Split(eq, if_true, if_false, NULL);
785    __ Bind(&skip);
786  }
787}
788
789
790void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
791  // The variable in the declaration always resides in the current function
792  // context.
793  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
794  if (generate_debug_code_) {
795    // Check that we're not inside a with or catch context.
796    __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
797    __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
798    __ Check(ne, kDeclarationInWithContext);
799    __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
800    __ Check(ne, kDeclarationInCatchContext);
801  }
802}
803
804
805void FullCodeGenerator::VisitVariableDeclaration(
806    VariableDeclaration* declaration) {
807  // If it was not possible to allocate the variable at compile time, we
808  // need to "declare" it at runtime to make sure it actually exists in the
809  // local context.
810  VariableProxy* proxy = declaration->proxy();
811  VariableMode mode = declaration->mode();
812  Variable* variable = proxy->var();
813  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
814
815  switch (variable->location()) {
816    case Variable::UNALLOCATED:
817      globals_->Add(variable->name(), zone());
818      globals_->Add(variable->binding_needs_init()
819                        ? isolate()->factory()->the_hole_value()
820                        : isolate()->factory()->undefined_value(),
821                    zone());
822      break;
823
824    case Variable::PARAMETER:
825    case Variable::LOCAL:
826      if (hole_init) {
827        Comment cmnt(masm_, "[ VariableDeclaration");
828        __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
829        __ Str(x10, StackOperand(variable));
830      }
831      break;
832
833    case Variable::CONTEXT:
834      if (hole_init) {
835        Comment cmnt(masm_, "[ VariableDeclaration");
836        EmitDebugCheckDeclarationContext(variable);
837        __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
838        __ Str(x10, ContextMemOperand(cp, variable->index()));
839        // No write barrier since the_hole_value is in old space.
840        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
841      }
842      break;
843
844    case Variable::LOOKUP: {
845      Comment cmnt(masm_, "[ VariableDeclaration");
846      __ Mov(x2, Operand(variable->name()));
847      // Declaration nodes are always introduced in one of four modes.
848      DCHECK(IsDeclaredVariableMode(mode));
849      PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
850                                                              : NONE;
851      __ Mov(x1, Smi::FromInt(attr));
852      // Push initial value, if any.
853      // Note: For variables we must not push an initial value (such as
854      // 'undefined') because we may have a (legal) redeclaration and we
855      // must not destroy the current value.
856      if (hole_init) {
857        __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
858        __ Push(cp, x2, x1, x0);
859      } else {
860        // Pushing 0 (xzr) indicates no initial value.
861        __ Push(cp, x2, x1, xzr);
862      }
863      __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
864      break;
865    }
866  }
867}
868
869
870void FullCodeGenerator::VisitFunctionDeclaration(
871    FunctionDeclaration* declaration) {
872  VariableProxy* proxy = declaration->proxy();
873  Variable* variable = proxy->var();
874  switch (variable->location()) {
875    case Variable::UNALLOCATED: {
876      globals_->Add(variable->name(), zone());
877      Handle<SharedFunctionInfo> function =
878          Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
879      // Check for stack overflow exception.
880      if (function.is_null()) return SetStackOverflow();
881      globals_->Add(function, zone());
882      break;
883    }
884
885    case Variable::PARAMETER:
886    case Variable::LOCAL: {
887      Comment cmnt(masm_, "[ Function Declaration");
888      VisitForAccumulatorValue(declaration->fun());
889      __ Str(result_register(), StackOperand(variable));
890      break;
891    }
892
893    case Variable::CONTEXT: {
894      Comment cmnt(masm_, "[ Function Declaration");
895      EmitDebugCheckDeclarationContext(variable);
896      VisitForAccumulatorValue(declaration->fun());
897      __ Str(result_register(), ContextMemOperand(cp, variable->index()));
898      int offset = Context::SlotOffset(variable->index());
899      // We know that we have written a function, which is not a smi.
900      __ RecordWriteContextSlot(cp,
901                                offset,
902                                result_register(),
903                                x2,
904                                kLRHasBeenSaved,
905                                kDontSaveFPRegs,
906                                EMIT_REMEMBERED_SET,
907                                OMIT_SMI_CHECK);
908      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
909      break;
910    }
911
912    case Variable::LOOKUP: {
913      Comment cmnt(masm_, "[ Function Declaration");
914      __ Mov(x2, Operand(variable->name()));
915      __ Mov(x1, Smi::FromInt(NONE));
916      __ Push(cp, x2, x1);
917      // Push initial value for function declaration.
918      VisitForStackValue(declaration->fun());
919      __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
920      break;
921    }
922  }
923}
924
925
926void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
927  Variable* variable = declaration->proxy()->var();
928  DCHECK(variable->location() == Variable::CONTEXT);
929  DCHECK(variable->interface()->IsFrozen());
930
931  Comment cmnt(masm_, "[ ModuleDeclaration");
932  EmitDebugCheckDeclarationContext(variable);
933
934  // Load instance object.
935  __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
936  __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
937  __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
938
939  // Assign it.
940  __ Str(x1, ContextMemOperand(cp, variable->index()));
941  // We know that we have written a module, which is not a smi.
942  __ RecordWriteContextSlot(cp,
943                            Context::SlotOffset(variable->index()),
944                            x1,
945                            x3,
946                            kLRHasBeenSaved,
947                            kDontSaveFPRegs,
948                            EMIT_REMEMBERED_SET,
949                            OMIT_SMI_CHECK);
950  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
951
952  // Traverse info body.
953  Visit(declaration->module());
954}
955
956
957void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
958  VariableProxy* proxy = declaration->proxy();
959  Variable* variable = proxy->var();
960  switch (variable->location()) {
961    case Variable::UNALLOCATED:
962      // TODO(rossberg)
963      break;
964
965    case Variable::CONTEXT: {
966      Comment cmnt(masm_, "[ ImportDeclaration");
967      EmitDebugCheckDeclarationContext(variable);
968      // TODO(rossberg)
969      break;
970    }
971
972    case Variable::PARAMETER:
973    case Variable::LOCAL:
974    case Variable::LOOKUP:
975      UNREACHABLE();
976  }
977}
978
979
980void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
981  // TODO(rossberg)
982}
983
984
985void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
986  // Call the runtime to declare the globals.
987  __ Mov(x11, Operand(pairs));
988  Register flags = xzr;
989  if (Smi::FromInt(DeclareGlobalsFlags())) {
990    flags = x10;
991  __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
992  }
993  __ Push(cp, x11, flags);
994  __ CallRuntime(Runtime::kDeclareGlobals, 3);
995  // Return value is ignored.
996}
997
998
999void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1000  // Call the runtime to declare the modules.
1001  __ Push(descriptions);
1002  __ CallRuntime(Runtime::kDeclareModules, 1);
1003  // Return value is ignored.
1004}
1005
1006
1007void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1008  ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
1009  Comment cmnt(masm_, "[ SwitchStatement");
1010  Breakable nested_statement(this, stmt);
1011  SetStatementPosition(stmt);
1012
1013  // Keep the switch value on the stack until a case matches.
1014  VisitForStackValue(stmt->tag());
1015  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1016
1017  ZoneList<CaseClause*>* clauses = stmt->cases();
1018  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
1019
1020  Label next_test;  // Recycled for each test.
1021  // Compile all the tests with branches to their bodies.
1022  for (int i = 0; i < clauses->length(); i++) {
1023    CaseClause* clause = clauses->at(i);
1024    clause->body_target()->Unuse();
1025
1026    // The default is not a test, but remember it as final fall through.
1027    if (clause->is_default()) {
1028      default_clause = clause;
1029      continue;
1030    }
1031
1032    Comment cmnt(masm_, "[ Case comparison");
1033    __ Bind(&next_test);
1034    next_test.Unuse();
1035
1036    // Compile the label expression.
1037    VisitForAccumulatorValue(clause->label());
1038
1039    // Perform the comparison as if via '==='.
1040    __ Peek(x1, 0);   // Switch value.
1041
1042    JumpPatchSite patch_site(masm_);
1043    if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1044      Label slow_case;
1045      patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1046      __ Cmp(x1, x0);
1047      __ B(ne, &next_test);
1048      __ Drop(1);  // Switch value is no longer needed.
1049      __ B(clause->body_target());
1050      __ Bind(&slow_case);
1051    }
1052
1053    // Record position before stub call for type feedback.
1054    SetSourcePosition(clause->position());
1055    Handle<Code> ic =
1056        CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1057    CallIC(ic, clause->CompareId());
1058    patch_site.EmitPatchInfo();
1059
1060    Label skip;
1061    __ B(&skip);
1062    PrepareForBailout(clause, TOS_REG);
1063    __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1064    __ Drop(1);
1065    __ B(clause->body_target());
1066    __ Bind(&skip);
1067
1068    __ Cbnz(x0, &next_test);
1069    __ Drop(1);  // Switch value is no longer needed.
1070    __ B(clause->body_target());
1071  }
1072
1073  // Discard the test value and jump to the default if present, otherwise to
1074  // the end of the statement.
1075  __ Bind(&next_test);
1076  __ Drop(1);  // Switch value is no longer needed.
1077  if (default_clause == NULL) {
1078    __ B(nested_statement.break_label());
1079  } else {
1080    __ B(default_clause->body_target());
1081  }
1082
1083  // Compile all the case bodies.
1084  for (int i = 0; i < clauses->length(); i++) {
1085    Comment cmnt(masm_, "[ Case body");
1086    CaseClause* clause = clauses->at(i);
1087    __ Bind(clause->body_target());
1088    PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1089    VisitStatements(clause->statements());
1090  }
1091
1092  __ Bind(nested_statement.break_label());
1093  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1094}
1095
1096
1097void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1098  ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1099  Comment cmnt(masm_, "[ ForInStatement");
1100  int slot = stmt->ForInFeedbackSlot();
1101  // TODO(all): This visitor probably needs better comments and a revisit.
1102  SetStatementPosition(stmt);
1103
1104  Label loop, exit;
1105  ForIn loop_statement(this, stmt);
1106  increment_loop_depth();
1107
1108  // Get the object to enumerate over. If the object is null or undefined, skip
1109  // over the loop.  See ECMA-262 version 5, section 12.6.4.
1110  VisitForAccumulatorValue(stmt->enumerable());
1111  __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1112  Register null_value = x15;
1113  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1114  __ Cmp(x0, null_value);
1115  __ B(eq, &exit);
1116
1117  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1118
1119  // Convert the object to a JS object.
1120  Label convert, done_convert;
1121  __ JumpIfSmi(x0, &convert);
1122  __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1123  __ Bind(&convert);
1124  __ Push(x0);
1125  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1126  __ Bind(&done_convert);
1127  __ Push(x0);
1128
1129  // Check for proxies.
1130  Label call_runtime;
1131  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1132  __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1133
1134  // Check cache validity in generated code. This is a fast case for
1135  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1136  // guarantee cache validity, call the runtime system to check cache
1137  // validity or get the property names in a fixed array.
1138  __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1139
1140  // The enum cache is valid.  Load the map of the object being
1141  // iterated over and use the cache for the iteration.
1142  Label use_cache;
1143  __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1144  __ B(&use_cache);
1145
1146  // Get the set of properties to enumerate.
1147  __ Bind(&call_runtime);
1148  __ Push(x0);  // Duplicate the enumerable object on the stack.
1149  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1150
1151  // If we got a map from the runtime call, we can do a fast
1152  // modification check. Otherwise, we got a fixed array, and we have
1153  // to do a slow check.
1154  Label fixed_array, no_descriptors;
1155  __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1156  __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1157
1158  // We got a map in register x0. Get the enumeration cache from it.
1159  __ Bind(&use_cache);
1160
1161  __ EnumLengthUntagged(x1, x0);
1162  __ Cbz(x1, &no_descriptors);
1163
1164  __ LoadInstanceDescriptors(x0, x2);
1165  __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1166  __ Ldr(x2,
1167         FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1168
1169  // Set up the four remaining stack slots.
1170  __ SmiTag(x1);
1171  // Map, enumeration cache, enum cache length, zero (both last as smis).
1172  __ Push(x0, x2, x1, xzr);
1173  __ B(&loop);
1174
1175  __ Bind(&no_descriptors);
1176  __ Drop(1);
1177  __ B(&exit);
1178
1179  // We got a fixed array in register x0. Iterate through that.
1180  __ Bind(&fixed_array);
1181
1182  __ LoadObject(x1, FeedbackVector());
1183  __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1184  __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1185
1186  __ Mov(x1, Smi::FromInt(1));  // Smi indicates slow check.
1187  __ Peek(x10, 0);  // Get enumerated object.
1188  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1189  // TODO(all): similar check was done already. Can we avoid it here?
1190  __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1191  DCHECK(Smi::FromInt(0) == 0);
1192  __ CzeroX(x1, le);  // Zero indicates proxy.
1193  __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1194  // Smi and array, fixed array length (as smi) and initial index.
1195  __ Push(x1, x0, x2, xzr);
1196
1197  // Generate code for doing the condition check.
1198  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1199  __ Bind(&loop);
1200  // Load the current count to x0, load the length to x1.
1201  __ PeekPair(x0, x1, 0);
1202  __ Cmp(x0, x1);  // Compare to the array length.
1203  __ B(hs, loop_statement.break_label());
1204
1205  // Get the current entry of the array into register r3.
1206  __ Peek(x10, 2 * kXRegSize);
1207  __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1208  __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1209
1210  // Get the expected map from the stack or a smi in the
1211  // permanent slow case into register x10.
1212  __ Peek(x2, 3 * kXRegSize);
1213
1214  // Check if the expected map still matches that of the enumerable.
1215  // If not, we may have to filter the key.
1216  Label update_each;
1217  __ Peek(x1, 4 * kXRegSize);
1218  __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1219  __ Cmp(x11, x2);
1220  __ B(eq, &update_each);
1221
1222  // For proxies, no filtering is done.
1223  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1224  STATIC_ASSERT(kSmiTag == 0);
1225  __ Cbz(x2, &update_each);
1226
1227  // Convert the entry to a string or (smi) 0 if it isn't a property
1228  // any more. If the property has been removed while iterating, we
1229  // just skip it.
1230  __ Push(x1, x3);
1231  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1232  __ Mov(x3, x0);
1233  __ Cbz(x0, loop_statement.continue_label());
1234
1235  // Update the 'each' property or variable from the possibly filtered
1236  // entry in register x3.
1237  __ Bind(&update_each);
1238  __ Mov(result_register(), x3);
1239  // Perform the assignment as if via '='.
1240  { EffectContext context(this);
1241    EmitAssignment(stmt->each());
1242  }
1243
1244  // Generate code for the body of the loop.
1245  Visit(stmt->body());
1246
1247  // Generate code for going to the next element by incrementing
1248  // the index (smi) stored on top of the stack.
1249  __ Bind(loop_statement.continue_label());
1250  // TODO(all): We could use a callee saved register to avoid popping.
1251  __ Pop(x0);
1252  __ Add(x0, x0, Smi::FromInt(1));
1253  __ Push(x0);
1254
1255  EmitBackEdgeBookkeeping(stmt, &loop);
1256  __ B(&loop);
1257
1258  // Remove the pointers stored on the stack.
1259  __ Bind(loop_statement.break_label());
1260  __ Drop(5);
1261
1262  // Exit and decrement the loop depth.
1263  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1264  __ Bind(&exit);
1265  decrement_loop_depth();
1266}
1267
1268
1269void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1270  Comment cmnt(masm_, "[ ForOfStatement");
1271  SetStatementPosition(stmt);
1272
1273  Iteration loop_statement(this, stmt);
1274  increment_loop_depth();
1275
1276  // var iterator = iterable[Symbol.iterator]();
1277  VisitForEffect(stmt->assign_iterator());
1278
1279  // Loop entry.
1280  __ Bind(loop_statement.continue_label());
1281
1282  // result = iterator.next()
1283  VisitForEffect(stmt->next_result());
1284
1285  // if (result.done) break;
1286  Label result_not_done;
1287  VisitForControl(stmt->result_done(),
1288                  loop_statement.break_label(),
1289                  &result_not_done,
1290                  &result_not_done);
1291  __ Bind(&result_not_done);
1292
1293  // each = result.value
1294  VisitForEffect(stmt->assign_each());
1295
1296  // Generate code for the body of the loop.
1297  Visit(stmt->body());
1298
1299  // Check stack before looping.
1300  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1301  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1302  __ B(loop_statement.continue_label());
1303
1304  // Exit and decrement the loop depth.
1305  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1306  __ Bind(loop_statement.break_label());
1307  decrement_loop_depth();
1308}
1309
1310
1311void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1312                                       bool pretenure) {
1313  // Use the fast case closure allocation code that allocates in new space for
1314  // nested functions that don't need literals cloning. If we're running with
1315  // the --always-opt or the --prepare-always-opt flag, we need to use the
1316  // runtime function so that the new function we are creating here gets a
1317  // chance to have its code optimized and doesn't just get a copy of the
1318  // existing unoptimized code.
1319  if (!FLAG_always_opt &&
1320      !FLAG_prepare_always_opt &&
1321      !pretenure &&
1322      scope()->is_function_scope() &&
1323      info->num_literals() == 0) {
1324    FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1325    __ Mov(x2, Operand(info));
1326    __ CallStub(&stub);
1327  } else {
1328    __ Mov(x11, Operand(info));
1329    __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1330                               : Heap::kFalseValueRootIndex);
1331    __ Push(cp, x11, x10);
1332    __ CallRuntime(Runtime::kNewClosure, 3);
1333  }
1334  context()->Plug(x0);
1335}
1336
1337
1338void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1339  Comment cmnt(masm_, "[ VariableProxy");
1340  EmitVariableLoad(expr);
1341}
1342
1343
1344void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1345  Comment cnmt(masm_, "[ SuperReference ");
1346
1347  __ ldr(LoadDescriptor::ReceiverRegister(),
1348         MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1349
1350  Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1351  __ Mov(LoadDescriptor::NameRegister(), Operand(home_object_symbol));
1352
1353  CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1354
1355  __ Mov(x10, Operand(isolate()->factory()->undefined_value()));
1356  __ cmp(x0, x10);
1357  Label done;
1358  __ b(&done, ne);
1359  __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1360  __ bind(&done);
1361}
1362
1363
1364void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1365                                                      TypeofState typeof_state,
1366                                                      Label* slow) {
1367  Register current = cp;
1368  Register next = x10;
1369  Register temp = x11;
1370
1371  Scope* s = scope();
1372  while (s != NULL) {
1373    if (s->num_heap_slots() > 0) {
1374      if (s->calls_sloppy_eval()) {
1375        // Check that extension is NULL.
1376        __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1377        __ Cbnz(temp, slow);
1378      }
1379      // Load next context in chain.
1380      __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1381      // Walk the rest of the chain without clobbering cp.
1382      current = next;
1383    }
1384    // If no outer scope calls eval, we do not need to check more
1385    // context extensions.
1386    if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1387    s = s->outer_scope();
1388  }
1389
1390  if (s->is_eval_scope()) {
1391    Label loop, fast;
1392    __ Mov(next, current);
1393
1394    __ Bind(&loop);
1395    // Terminate at native context.
1396    __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1397    __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1398    // Check that extension is NULL.
1399    __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1400    __ Cbnz(temp, slow);
1401    // Load next context in chain.
1402    __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1403    __ B(&loop);
1404    __ Bind(&fast);
1405  }
1406
1407  __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1408  __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1409  if (FLAG_vector_ics) {
1410    __ Mov(VectorLoadICDescriptor::SlotRegister(),
1411           Smi::FromInt(proxy->VariableFeedbackSlot()));
1412  }
1413
1414  ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1415                                                        : CONTEXTUAL;
1416  CallLoadIC(mode);
1417}
1418
1419
1420MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1421                                                                Label* slow) {
1422  DCHECK(var->IsContextSlot());
1423  Register context = cp;
1424  Register next = x10;
1425  Register temp = x11;
1426
1427  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1428    if (s->num_heap_slots() > 0) {
1429      if (s->calls_sloppy_eval()) {
1430        // Check that extension is NULL.
1431        __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1432        __ Cbnz(temp, slow);
1433      }
1434      __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1435      // Walk the rest of the chain without clobbering cp.
1436      context = next;
1437    }
1438  }
1439  // Check that last extension is NULL.
1440  __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1441  __ Cbnz(temp, slow);
1442
1443  // This function is used only for loads, not stores, so it's safe to
1444  // return an cp-based operand (the write barrier cannot be allowed to
1445  // destroy the cp register).
1446  return ContextMemOperand(context, var->index());
1447}
1448
1449
1450void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1451                                                  TypeofState typeof_state,
1452                                                  Label* slow,
1453                                                  Label* done) {
1454  // Generate fast-case code for variables that might be shadowed by
1455  // eval-introduced variables.  Eval is used a lot without
1456  // introducing variables.  In those cases, we do not want to
1457  // perform a runtime call for all variables in the scope
1458  // containing the eval.
1459  Variable* var = proxy->var();
1460  if (var->mode() == DYNAMIC_GLOBAL) {
1461    EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1462    __ B(done);
1463  } else if (var->mode() == DYNAMIC_LOCAL) {
1464    Variable* local = var->local_if_not_shadowed();
1465    __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1466    if (local->mode() == LET || local->mode() == CONST ||
1467        local->mode() == CONST_LEGACY) {
1468      __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1469      if (local->mode() == CONST_LEGACY) {
1470        __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1471      } else {  // LET || CONST
1472        __ Mov(x0, Operand(var->name()));
1473        __ Push(x0);
1474        __ CallRuntime(Runtime::kThrowReferenceError, 1);
1475      }
1476    }
1477    __ B(done);
1478  }
1479}
1480
1481
1482void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1483  // Record position before possible IC call.
1484  SetSourcePosition(proxy->position());
1485  Variable* var = proxy->var();
1486
1487  // Three cases: global variables, lookup variables, and all other types of
1488  // variables.
1489  switch (var->location()) {
1490    case Variable::UNALLOCATED: {
1491      Comment cmnt(masm_, "Global variable");
1492      __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1493      __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1494      if (FLAG_vector_ics) {
1495        __ Mov(VectorLoadICDescriptor::SlotRegister(),
1496               Smi::FromInt(proxy->VariableFeedbackSlot()));
1497      }
1498      CallLoadIC(CONTEXTUAL);
1499      context()->Plug(x0);
1500      break;
1501    }
1502
1503    case Variable::PARAMETER:
1504    case Variable::LOCAL:
1505    case Variable::CONTEXT: {
1506      Comment cmnt(masm_, var->IsContextSlot()
1507                              ? "Context variable"
1508                              : "Stack variable");
1509      if (var->binding_needs_init()) {
1510        // var->scope() may be NULL when the proxy is located in eval code and
1511        // refers to a potential outside binding. Currently those bindings are
1512        // always looked up dynamically, i.e. in that case
1513        //     var->location() == LOOKUP.
1514        // always holds.
1515        DCHECK(var->scope() != NULL);
1516
1517        // Check if the binding really needs an initialization check. The check
1518        // can be skipped in the following situation: we have a LET or CONST
1519        // binding in harmony mode, both the Variable and the VariableProxy have
1520        // the same declaration scope (i.e. they are both in global code, in the
1521        // same function or in the same eval code) and the VariableProxy is in
1522        // the source physically located after the initializer of the variable.
1523        //
1524        // We cannot skip any initialization checks for CONST in non-harmony
1525        // mode because const variables may be declared but never initialized:
1526        //   if (false) { const x; }; var y = x;
1527        //
1528        // The condition on the declaration scopes is a conservative check for
1529        // nested functions that access a binding and are called before the
1530        // binding is initialized:
1531        //   function() { f(); let x = 1; function f() { x = 2; } }
1532        //
1533        bool skip_init_check;
1534        if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1535          skip_init_check = false;
1536        } else {
1537          // Check that we always have valid source position.
1538          DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1539          DCHECK(proxy->position() != RelocInfo::kNoPosition);
1540          skip_init_check = var->mode() != CONST_LEGACY &&
1541              var->initializer_position() < proxy->position();
1542        }
1543
1544        if (!skip_init_check) {
1545          // Let and const need a read barrier.
1546          GetVar(x0, var);
1547          Label done;
1548          __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1549          if (var->mode() == LET || var->mode() == CONST) {
1550            // Throw a reference error when using an uninitialized let/const
1551            // binding in harmony mode.
1552            __ Mov(x0, Operand(var->name()));
1553            __ Push(x0);
1554            __ CallRuntime(Runtime::kThrowReferenceError, 1);
1555            __ Bind(&done);
1556          } else {
1557            // Uninitalized const bindings outside of harmony mode are unholed.
1558            DCHECK(var->mode() == CONST_LEGACY);
1559            __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1560            __ Bind(&done);
1561          }
1562          context()->Plug(x0);
1563          break;
1564        }
1565      }
1566      context()->Plug(var);
1567      break;
1568    }
1569
1570    case Variable::LOOKUP: {
1571      Label done, slow;
1572      // Generate code for loading from variables potentially shadowed by
1573      // eval-introduced variables.
1574      EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1575      __ Bind(&slow);
1576      Comment cmnt(masm_, "Lookup variable");
1577      __ Mov(x1, Operand(var->name()));
1578      __ Push(cp, x1);  // Context and name.
1579      __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1580      __ Bind(&done);
1581      context()->Plug(x0);
1582      break;
1583    }
1584  }
1585}
1586
1587
1588void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1589  Comment cmnt(masm_, "[ RegExpLiteral");
1590  Label materialized;
1591  // Registers will be used as follows:
1592  // x5 = materialized value (RegExp literal)
1593  // x4 = JS function, literals array
1594  // x3 = literal index
1595  // x2 = RegExp pattern
1596  // x1 = RegExp flags
1597  // x0 = RegExp literal clone
1598  __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1599  __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1600  int literal_offset =
1601      FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1602  __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1603  __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1604
1605  // Create regexp literal using runtime function.
1606  // Result will be in x0.
1607  __ Mov(x3, Smi::FromInt(expr->literal_index()));
1608  __ Mov(x2, Operand(expr->pattern()));
1609  __ Mov(x1, Operand(expr->flags()));
1610  __ Push(x4, x3, x2, x1);
1611  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1612  __ Mov(x5, x0);
1613
1614  __ Bind(&materialized);
1615  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1616  Label allocated, runtime_allocate;
1617  __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1618  __ B(&allocated);
1619
1620  __ Bind(&runtime_allocate);
1621  __ Mov(x10, Smi::FromInt(size));
1622  __ Push(x5, x10);
1623  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1624  __ Pop(x5);
1625
1626  __ Bind(&allocated);
1627  // After this, registers are used as follows:
1628  // x0: Newly allocated regexp.
1629  // x5: Materialized regexp.
1630  // x10, x11, x12: temps.
1631  __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1632  context()->Plug(x0);
1633}
1634
1635
1636void FullCodeGenerator::EmitAccessor(Expression* expression) {
1637  if (expression == NULL) {
1638    __ LoadRoot(x10, Heap::kNullValueRootIndex);
1639    __ Push(x10);
1640  } else {
1641    VisitForStackValue(expression);
1642  }
1643}
1644
1645
1646void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1647  Comment cmnt(masm_, "[ ObjectLiteral");
1648
1649  expr->BuildConstantProperties(isolate());
1650  Handle<FixedArray> constant_properties = expr->constant_properties();
1651  __ Ldr(x3, MemOperand(fp,  JavaScriptFrameConstants::kFunctionOffset));
1652  __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1653  __ Mov(x2, Smi::FromInt(expr->literal_index()));
1654  __ Mov(x1, Operand(constant_properties));
1655  int flags = expr->fast_elements()
1656      ? ObjectLiteral::kFastElements
1657      : ObjectLiteral::kNoFlags;
1658  flags |= expr->has_function()
1659      ? ObjectLiteral::kHasFunction
1660      : ObjectLiteral::kNoFlags;
1661  __ Mov(x0, Smi::FromInt(flags));
1662  int properties_count = constant_properties->length() / 2;
1663  const int max_cloned_properties =
1664      FastCloneShallowObjectStub::kMaximumClonedProperties;
1665  if (expr->may_store_doubles() || expr->depth() > 1 ||
1666      masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1667      properties_count > max_cloned_properties) {
1668    __ Push(x3, x2, x1, x0);
1669    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1670  } else {
1671    FastCloneShallowObjectStub stub(isolate(), properties_count);
1672    __ CallStub(&stub);
1673  }
1674
1675  // If result_saved is true the result is on top of the stack.  If
1676  // result_saved is false the result is in x0.
1677  bool result_saved = false;
1678
1679  // Mark all computed expressions that are bound to a key that
1680  // is shadowed by a later occurrence of the same key. For the
1681  // marked expressions, no store code is emitted.
1682  expr->CalculateEmitStore(zone());
1683
1684  AccessorTable accessor_table(zone());
1685  for (int i = 0; i < expr->properties()->length(); i++) {
1686    ObjectLiteral::Property* property = expr->properties()->at(i);
1687    if (property->IsCompileTimeValue()) continue;
1688
1689    Literal* key = property->key();
1690    Expression* value = property->value();
1691    if (!result_saved) {
1692      __ Push(x0);  // Save result on stack
1693      result_saved = true;
1694    }
1695    switch (property->kind()) {
1696      case ObjectLiteral::Property::CONSTANT:
1697        UNREACHABLE();
1698      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1699        DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1700        // Fall through.
1701      case ObjectLiteral::Property::COMPUTED:
1702        if (key->value()->IsInternalizedString()) {
1703          if (property->emit_store()) {
1704            VisitForAccumulatorValue(value);
1705            DCHECK(StoreDescriptor::ValueRegister().is(x0));
1706            __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1707            __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1708            CallStoreIC(key->LiteralFeedbackId());
1709            PrepareForBailoutForId(key->id(), NO_REGISTERS);
1710          } else {
1711            VisitForEffect(value);
1712          }
1713          break;
1714        }
1715        if (property->emit_store()) {
1716          // Duplicate receiver on stack.
1717          __ Peek(x0, 0);
1718          __ Push(x0);
1719          VisitForStackValue(key);
1720          VisitForStackValue(value);
1721          __ Mov(x0, Smi::FromInt(SLOPPY));  // Strict mode
1722          __ Push(x0);
1723          __ CallRuntime(Runtime::kSetProperty, 4);
1724        } else {
1725          VisitForEffect(key);
1726          VisitForEffect(value);
1727        }
1728        break;
1729      case ObjectLiteral::Property::PROTOTYPE:
1730        if (property->emit_store()) {
1731          // Duplicate receiver on stack.
1732          __ Peek(x0, 0);
1733          __ Push(x0);
1734          VisitForStackValue(value);
1735          __ CallRuntime(Runtime::kSetPrototype, 2);
1736        } else {
1737          VisitForEffect(value);
1738        }
1739        break;
1740      case ObjectLiteral::Property::GETTER:
1741        accessor_table.lookup(key)->second->getter = value;
1742        break;
1743      case ObjectLiteral::Property::SETTER:
1744        accessor_table.lookup(key)->second->setter = value;
1745        break;
1746    }
1747  }
1748
1749  // Emit code to define accessors, using only a single call to the runtime for
1750  // each pair of corresponding getters and setters.
1751  for (AccessorTable::Iterator it = accessor_table.begin();
1752       it != accessor_table.end();
1753       ++it) {
1754      __ Peek(x10, 0);  // Duplicate receiver.
1755      __ Push(x10);
1756      VisitForStackValue(it->first);
1757      EmitAccessor(it->second->getter);
1758      EmitAccessor(it->second->setter);
1759      __ Mov(x10, Smi::FromInt(NONE));
1760      __ Push(x10);
1761      __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1762  }
1763
1764  if (expr->has_function()) {
1765    DCHECK(result_saved);
1766    __ Peek(x0, 0);
1767    __ Push(x0);
1768    __ CallRuntime(Runtime::kToFastProperties, 1);
1769  }
1770
1771  if (result_saved) {
1772    context()->PlugTOS();
1773  } else {
1774    context()->Plug(x0);
1775  }
1776}
1777
1778
1779void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1780  Comment cmnt(masm_, "[ ArrayLiteral");
1781
1782  expr->BuildConstantElements(isolate());
1783  int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1784                                   : ArrayLiteral::kNoFlags;
1785
1786  ZoneList<Expression*>* subexprs = expr->values();
1787  int length = subexprs->length();
1788  Handle<FixedArray> constant_elements = expr->constant_elements();
1789  DCHECK_EQ(2, constant_elements->length());
1790  ElementsKind constant_elements_kind =
1791      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1792  bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1793  Handle<FixedArrayBase> constant_elements_values(
1794      FixedArrayBase::cast(constant_elements->get(1)));
1795
1796  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1797  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1798    // If the only customer of allocation sites is transitioning, then
1799    // we can turn it off if we don't have anywhere else to transition to.
1800    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1801  }
1802
1803  __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1804  __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1805  __ Mov(x2, Smi::FromInt(expr->literal_index()));
1806  __ Mov(x1, Operand(constant_elements));
1807  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1808    __ Mov(x0, Smi::FromInt(flags));
1809    __ Push(x3, x2, x1, x0);
1810    __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1811  } else {
1812    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1813    __ CallStub(&stub);
1814  }
1815
1816  bool result_saved = false;  // Is the result saved to the stack?
1817
1818  // Emit code to evaluate all the non-constant subexpressions and to store
1819  // them into the newly cloned array.
1820  for (int i = 0; i < length; i++) {
1821    Expression* subexpr = subexprs->at(i);
1822    // If the subexpression is a literal or a simple materialized literal it
1823    // is already set in the cloned array.
1824    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1825
1826    if (!result_saved) {
1827      __ Mov(x1, Smi::FromInt(expr->literal_index()));
1828      __ Push(x0, x1);
1829      result_saved = true;
1830    }
1831    VisitForAccumulatorValue(subexpr);
1832
1833    if (IsFastObjectElementsKind(constant_elements_kind)) {
1834      int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1835      __ Peek(x6, kPointerSize);  // Copy of array literal.
1836      __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1837      __ Str(result_register(), FieldMemOperand(x1, offset));
1838      // Update the write barrier for the array store.
1839      __ RecordWriteField(x1, offset, result_register(), x10,
1840                          kLRHasBeenSaved, kDontSaveFPRegs,
1841                          EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1842    } else {
1843      __ Mov(x3, Smi::FromInt(i));
1844      StoreArrayLiteralElementStub stub(isolate());
1845      __ CallStub(&stub);
1846    }
1847
1848    PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1849  }
1850
1851  if (result_saved) {
1852    __ Drop(1);   // literal index
1853    context()->PlugTOS();
1854  } else {
1855    context()->Plug(x0);
1856  }
1857}
1858
1859
1860void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1861  DCHECK(expr->target()->IsValidReferenceExpression());
1862
1863  Comment cmnt(masm_, "[ Assignment");
1864
1865  // Left-hand side can only be a property, a global or a (parameter or local)
1866  // slot.
1867  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1868  LhsKind assign_type = VARIABLE;
1869  Property* property = expr->target()->AsProperty();
1870  if (property != NULL) {
1871    assign_type = (property->key()->IsPropertyName())
1872        ? NAMED_PROPERTY
1873        : KEYED_PROPERTY;
1874  }
1875
1876  // Evaluate LHS expression.
1877  switch (assign_type) {
1878    case VARIABLE:
1879      // Nothing to do here.
1880      break;
1881    case NAMED_PROPERTY:
1882      if (expr->is_compound()) {
1883        // We need the receiver both on the stack and in the register.
1884        VisitForStackValue(property->obj());
1885        __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1886      } else {
1887        VisitForStackValue(property->obj());
1888      }
1889      break;
1890    case KEYED_PROPERTY:
1891      if (expr->is_compound()) {
1892        VisitForStackValue(property->obj());
1893        VisitForStackValue(property->key());
1894        __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1895        __ Peek(LoadDescriptor::NameRegister(), 0);
1896      } else {
1897        VisitForStackValue(property->obj());
1898        VisitForStackValue(property->key());
1899      }
1900      break;
1901  }
1902
1903  // For compound assignments we need another deoptimization point after the
1904  // variable/property load.
1905  if (expr->is_compound()) {
1906    { AccumulatorValueContext context(this);
1907      switch (assign_type) {
1908        case VARIABLE:
1909          EmitVariableLoad(expr->target()->AsVariableProxy());
1910          PrepareForBailout(expr->target(), TOS_REG);
1911          break;
1912        case NAMED_PROPERTY:
1913          EmitNamedPropertyLoad(property);
1914          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1915          break;
1916        case KEYED_PROPERTY:
1917          EmitKeyedPropertyLoad(property);
1918          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1919          break;
1920      }
1921    }
1922
1923    Token::Value op = expr->binary_op();
1924    __ Push(x0);  // Left operand goes on the stack.
1925    VisitForAccumulatorValue(expr->value());
1926
1927    OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1928        ? OVERWRITE_RIGHT
1929        : NO_OVERWRITE;
1930    SetSourcePosition(expr->position() + 1);
1931    AccumulatorValueContext context(this);
1932    if (ShouldInlineSmiCase(op)) {
1933      EmitInlineSmiBinaryOp(expr->binary_operation(),
1934                            op,
1935                            mode,
1936                            expr->target(),
1937                            expr->value());
1938    } else {
1939      EmitBinaryOp(expr->binary_operation(), op, mode);
1940    }
1941
1942    // Deoptimization point in case the binary operation may have side effects.
1943    PrepareForBailout(expr->binary_operation(), TOS_REG);
1944  } else {
1945    VisitForAccumulatorValue(expr->value());
1946  }
1947
1948  // Record source position before possible IC call.
1949  SetSourcePosition(expr->position());
1950
1951  // Store the value.
1952  switch (assign_type) {
1953    case VARIABLE:
1954      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1955                             expr->op());
1956      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1957      context()->Plug(x0);
1958      break;
1959    case NAMED_PROPERTY:
1960      EmitNamedPropertyAssignment(expr);
1961      break;
1962    case KEYED_PROPERTY:
1963      EmitKeyedPropertyAssignment(expr);
1964      break;
1965  }
1966}
1967
1968
1969void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1970  SetSourcePosition(prop->position());
1971  Literal* key = prop->key()->AsLiteral();
1972  DCHECK(!prop->IsSuperAccess());
1973
1974  __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
1975  if (FLAG_vector_ics) {
1976    __ Mov(VectorLoadICDescriptor::SlotRegister(),
1977           Smi::FromInt(prop->PropertyFeedbackSlot()));
1978    CallLoadIC(NOT_CONTEXTUAL);
1979  } else {
1980    CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
1981  }
1982}
1983
1984
1985void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
1986  SetSourcePosition(prop->position());
1987  Literal* key = prop->key()->AsLiteral();
1988  DCHECK(!key->value()->IsSmi());
1989  DCHECK(prop->IsSuperAccess());
1990
1991  SuperReference* super_ref = prop->obj()->AsSuperReference();
1992  EmitLoadHomeObject(super_ref);
1993  __ Push(x0);
1994  VisitForStackValue(super_ref->this_var());
1995  __ Push(key->value());
1996  __ CallRuntime(Runtime::kLoadFromSuper, 3);
1997}
1998
1999
2000void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2001  SetSourcePosition(prop->position());
2002  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2003  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2004  if (FLAG_vector_ics) {
2005    __ Mov(VectorLoadICDescriptor::SlotRegister(),
2006           Smi::FromInt(prop->PropertyFeedbackSlot()));
2007    CallIC(ic);
2008  } else {
2009    CallIC(ic, prop->PropertyFeedbackId());
2010  }
2011}
2012
2013
2014void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2015                                              Token::Value op,
2016                                              OverwriteMode mode,
2017                                              Expression* left_expr,
2018                                              Expression* right_expr) {
2019  Label done, both_smis, stub_call;
2020
2021  // Get the arguments.
2022  Register left = x1;
2023  Register right = x0;
2024  Register result = x0;
2025  __ Pop(left);
2026
2027  // Perform combined smi check on both operands.
2028  __ Orr(x10, left, right);
2029  JumpPatchSite patch_site(masm_);
2030  patch_site.EmitJumpIfSmi(x10, &both_smis);
2031
2032  __ Bind(&stub_call);
2033
2034  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2035  {
2036    Assembler::BlockPoolsScope scope(masm_);
2037    CallIC(code, expr->BinaryOperationFeedbackId());
2038    patch_site.EmitPatchInfo();
2039  }
2040  __ B(&done);
2041
2042  __ Bind(&both_smis);
2043  // Smi case. This code works in the same way as the smi-smi case in the type
2044  // recording binary operation stub, see
2045  // BinaryOpStub::GenerateSmiSmiOperation for comments.
2046  // TODO(all): That doesn't exist any more. Where are the comments?
2047  //
2048  // The set of operations that needs to be supported here is controlled by
2049  // FullCodeGenerator::ShouldInlineSmiCase().
2050  switch (op) {
2051    case Token::SAR:
2052      __ Ubfx(right, right, kSmiShift, 5);
2053      __ Asr(result, left, right);
2054      __ Bic(result, result, kSmiShiftMask);
2055      break;
2056    case Token::SHL:
2057      __ Ubfx(right, right, kSmiShift, 5);
2058      __ Lsl(result, left, right);
2059      break;
2060    case Token::SHR:
2061      // If `left >>> right` >= 0x80000000, the result is not representable in a
2062      // signed 32-bit smi.
2063      __ Ubfx(right, right, kSmiShift, 5);
2064      __ Lsr(x10, left, right);
2065      __ Tbnz(x10, kXSignBit, &stub_call);
2066      __ Bic(result, x10, kSmiShiftMask);
2067      break;
2068    case Token::ADD:
2069      __ Adds(x10, left, right);
2070      __ B(vs, &stub_call);
2071      __ Mov(result, x10);
2072      break;
2073    case Token::SUB:
2074      __ Subs(x10, left, right);
2075      __ B(vs, &stub_call);
2076      __ Mov(result, x10);
2077      break;
2078    case Token::MUL: {
2079      Label not_minus_zero, done;
2080      STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2081      STATIC_ASSERT(kSmiTag == 0);
2082      __ Smulh(x10, left, right);
2083      __ Cbnz(x10, &not_minus_zero);
2084      __ Eor(x11, left, right);
2085      __ Tbnz(x11, kXSignBit, &stub_call);
2086      __ Mov(result, x10);
2087      __ B(&done);
2088      __ Bind(&not_minus_zero);
2089      __ Cls(x11, x10);
2090      __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2091      __ B(lt, &stub_call);
2092      __ SmiTag(result, x10);
2093      __ Bind(&done);
2094      break;
2095    }
2096    case Token::BIT_OR:
2097      __ Orr(result, left, right);
2098      break;
2099    case Token::BIT_AND:
2100      __ And(result, left, right);
2101      break;
2102    case Token::BIT_XOR:
2103      __ Eor(result, left, right);
2104      break;
2105    default:
2106      UNREACHABLE();
2107  }
2108
2109  __ Bind(&done);
2110  context()->Plug(x0);
2111}
2112
2113
2114void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2115                                     Token::Value op,
2116                                     OverwriteMode mode) {
2117  __ Pop(x1);
2118  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2119  JumpPatchSite patch_site(masm_);    // Unbound, signals no inlined smi code.
2120  {
2121    Assembler::BlockPoolsScope scope(masm_);
2122    CallIC(code, expr->BinaryOperationFeedbackId());
2123    patch_site.EmitPatchInfo();
2124  }
2125  context()->Plug(x0);
2126}
2127
2128
2129void FullCodeGenerator::EmitAssignment(Expression* expr) {
2130  DCHECK(expr->IsValidReferenceExpression());
2131
2132  // Left-hand side can only be a property, a global or a (parameter or local)
2133  // slot.
2134  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2135  LhsKind assign_type = VARIABLE;
2136  Property* prop = expr->AsProperty();
2137  if (prop != NULL) {
2138    assign_type = (prop->key()->IsPropertyName())
2139        ? NAMED_PROPERTY
2140        : KEYED_PROPERTY;
2141  }
2142
2143  switch (assign_type) {
2144    case VARIABLE: {
2145      Variable* var = expr->AsVariableProxy()->var();
2146      EffectContext context(this);
2147      EmitVariableAssignment(var, Token::ASSIGN);
2148      break;
2149    }
2150    case NAMED_PROPERTY: {
2151      __ Push(x0);  // Preserve value.
2152      VisitForAccumulatorValue(prop->obj());
2153      // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2154      // this copy.
2155      __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2156      __ Pop(StoreDescriptor::ValueRegister());  // Restore value.
2157      __ Mov(StoreDescriptor::NameRegister(),
2158             Operand(prop->key()->AsLiteral()->value()));
2159      CallStoreIC();
2160      break;
2161    }
2162    case KEYED_PROPERTY: {
2163      __ Push(x0);  // Preserve value.
2164      VisitForStackValue(prop->obj());
2165      VisitForAccumulatorValue(prop->key());
2166      __ Mov(StoreDescriptor::NameRegister(), x0);
2167      __ Pop(StoreDescriptor::ReceiverRegister(),
2168             StoreDescriptor::ValueRegister());
2169      Handle<Code> ic =
2170          CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2171      CallIC(ic);
2172      break;
2173    }
2174  }
2175  context()->Plug(x0);
2176}
2177
2178
2179void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2180    Variable* var, MemOperand location) {
2181  __ Str(result_register(), location);
2182  if (var->IsContextSlot()) {
2183    // RecordWrite may destroy all its register arguments.
2184    __ Mov(x10, result_register());
2185    int offset = Context::SlotOffset(var->index());
2186    __ RecordWriteContextSlot(
2187        x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2188  }
2189}
2190
2191
2192void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2193                                               Token::Value op) {
2194  ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2195  if (var->IsUnallocated()) {
2196    // Global var, const, or let.
2197    __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2198    __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2199    CallStoreIC();
2200
2201  } else if (op == Token::INIT_CONST_LEGACY) {
2202    // Const initializers need a write barrier.
2203    DCHECK(!var->IsParameter());  // No const parameters.
2204    if (var->IsLookupSlot()) {
2205      __ Mov(x1, Operand(var->name()));
2206      __ Push(x0, cp, x1);
2207      __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2208    } else {
2209      DCHECK(var->IsStackLocal() || var->IsContextSlot());
2210      Label skip;
2211      MemOperand location = VarOperand(var, x1);
2212      __ Ldr(x10, location);
2213      __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2214      EmitStoreToStackLocalOrContextSlot(var, location);
2215      __ Bind(&skip);
2216    }
2217
2218  } else if (var->mode() == LET && op != Token::INIT_LET) {
2219    // Non-initializing assignment to let variable needs a write barrier.
2220    DCHECK(!var->IsLookupSlot());
2221    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2222    Label assign;
2223    MemOperand location = VarOperand(var, x1);
2224    __ Ldr(x10, location);
2225    __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2226    __ Mov(x10, Operand(var->name()));
2227    __ Push(x10);
2228    __ CallRuntime(Runtime::kThrowReferenceError, 1);
2229    // Perform the assignment.
2230    __ Bind(&assign);
2231    EmitStoreToStackLocalOrContextSlot(var, location);
2232
2233  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2234    if (var->IsLookupSlot()) {
2235      // Assignment to var.
2236      __ Mov(x11, Operand(var->name()));
2237      __ Mov(x10, Smi::FromInt(strict_mode()));
2238      // jssp[0]  : mode.
2239      // jssp[8]  : name.
2240      // jssp[16] : context.
2241      // jssp[24] : value.
2242      __ Push(x0, cp, x11, x10);
2243      __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2244    } else {
2245      // Assignment to var or initializing assignment to let/const in harmony
2246      // mode.
2247      DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2248      MemOperand location = VarOperand(var, x1);
2249      if (FLAG_debug_code && op == Token::INIT_LET) {
2250        __ Ldr(x10, location);
2251        __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2252        __ Check(eq, kLetBindingReInitialization);
2253      }
2254      EmitStoreToStackLocalOrContextSlot(var, location);
2255    }
2256  }
2257  // Non-initializing assignments to consts are ignored.
2258}
2259
2260
2261void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2262  ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2263  // Assignment to a property, using a named store IC.
2264  Property* prop = expr->target()->AsProperty();
2265  DCHECK(prop != NULL);
2266  DCHECK(prop->key()->IsLiteral());
2267
2268  // Record source code position before IC call.
2269  SetSourcePosition(expr->position());
2270  __ Mov(StoreDescriptor::NameRegister(),
2271         Operand(prop->key()->AsLiteral()->value()));
2272  __ Pop(StoreDescriptor::ReceiverRegister());
2273  CallStoreIC(expr->AssignmentFeedbackId());
2274
2275  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2276  context()->Plug(x0);
2277}
2278
2279
2280void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2281  ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2282  // Assignment to a property, using a keyed store IC.
2283
2284  // Record source code position before IC call.
2285  SetSourcePosition(expr->position());
2286  // TODO(all): Could we pass this in registers rather than on the stack?
2287  __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2288  DCHECK(StoreDescriptor::ValueRegister().is(x0));
2289
2290  Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2291  CallIC(ic, expr->AssignmentFeedbackId());
2292
2293  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2294  context()->Plug(x0);
2295}
2296
2297
2298void FullCodeGenerator::VisitProperty(Property* expr) {
2299  Comment cmnt(masm_, "[ Property");
2300  Expression* key = expr->key();
2301
2302  if (key->IsPropertyName()) {
2303    if (!expr->IsSuperAccess()) {
2304      VisitForAccumulatorValue(expr->obj());
2305      __ Move(LoadDescriptor::ReceiverRegister(), x0);
2306      EmitNamedPropertyLoad(expr);
2307    } else {
2308      EmitNamedSuperPropertyLoad(expr);
2309    }
2310    PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2311    context()->Plug(x0);
2312  } else {
2313    VisitForStackValue(expr->obj());
2314    VisitForAccumulatorValue(expr->key());
2315    __ Move(LoadDescriptor::NameRegister(), x0);
2316    __ Pop(LoadDescriptor::ReceiverRegister());
2317    EmitKeyedPropertyLoad(expr);
2318    context()->Plug(x0);
2319  }
2320}
2321
2322
2323void FullCodeGenerator::CallIC(Handle<Code> code,
2324                               TypeFeedbackId ast_id) {
2325  ic_total_count_++;
2326  // All calls must have a predictable size in full-codegen code to ensure that
2327  // the debugger can patch them correctly.
2328  __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2329}
2330
2331
2332// Code common for calls using the IC.
2333void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2334  Expression* callee = expr->expression();
2335
2336  CallICState::CallType call_type =
2337      callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2338
2339  // Get the target function.
2340  if (call_type == CallICState::FUNCTION) {
2341    { StackValueContext context(this);
2342      EmitVariableLoad(callee->AsVariableProxy());
2343      PrepareForBailout(callee, NO_REGISTERS);
2344    }
2345    // Push undefined as receiver. This is patched in the method prologue if it
2346    // is a sloppy mode method.
2347    __ Push(isolate()->factory()->undefined_value());
2348  } else {
2349    // Load the function from the receiver.
2350    DCHECK(callee->IsProperty());
2351    DCHECK(!callee->AsProperty()->IsSuperAccess());
2352    __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2353    EmitNamedPropertyLoad(callee->AsProperty());
2354    PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2355    // Push the target function under the receiver.
2356    __ Pop(x10);
2357    __ Push(x0, x10);
2358  }
2359
2360  EmitCall(expr, call_type);
2361}
2362
2363
2364void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2365  Expression* callee = expr->expression();
2366  DCHECK(callee->IsProperty());
2367  Property* prop = callee->AsProperty();
2368  DCHECK(prop->IsSuperAccess());
2369
2370  SetSourcePosition(prop->position());
2371  Literal* key = prop->key()->AsLiteral();
2372  DCHECK(!key->value()->IsSmi());
2373
2374  // Load the function from the receiver.
2375  const Register scratch = x10;
2376  SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2377  EmitLoadHomeObject(super_ref);
2378  __ Push(x0);
2379  VisitForAccumulatorValue(super_ref->this_var());
2380  __ Push(x0);
2381  __ Peek(scratch, kPointerSize);
2382  __ Push(scratch, x0);
2383  __ Push(key->value());
2384
2385  // Stack here:
2386  //  - home_object
2387  //  - this (receiver)
2388  //  - home_object <-- LoadFromSuper will pop here and below.
2389  //  - this (receiver)
2390  //  - key
2391  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2392
2393  // Replace home_object with target function.
2394  __ Poke(x0, kPointerSize);
2395
2396  // Stack here:
2397  // - target function
2398  // - this (receiver)
2399  EmitCall(expr, CallICState::METHOD);
2400}
2401
2402
2403// Code common for calls using the IC.
2404void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2405                                                Expression* key) {
2406  // Load the key.
2407  VisitForAccumulatorValue(key);
2408
2409  Expression* callee = expr->expression();
2410
2411  // Load the function from the receiver.
2412  DCHECK(callee->IsProperty());
2413  __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2414  __ Move(LoadDescriptor::NameRegister(), x0);
2415  EmitKeyedPropertyLoad(callee->AsProperty());
2416  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2417
2418  // Push the target function under the receiver.
2419  __ Pop(x10);
2420  __ Push(x0, x10);
2421
2422  EmitCall(expr, CallICState::METHOD);
2423}
2424
2425
2426void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2427  // Load the arguments.
2428  ZoneList<Expression*>* args = expr->arguments();
2429  int arg_count = args->length();
2430  { PreservePositionScope scope(masm()->positions_recorder());
2431    for (int i = 0; i < arg_count; i++) {
2432      VisitForStackValue(args->at(i));
2433    }
2434  }
2435  // Record source position of the IC call.
2436  SetSourcePosition(expr->position());
2437
2438  Handle<Code> ic = CallIC::initialize_stub(
2439      isolate(), arg_count, call_type);
2440  __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
2441  __ Peek(x1, (arg_count + 1) * kXRegSize);
2442  // Don't assign a type feedback id to the IC, since type feedback is provided
2443  // by the vector above.
2444  CallIC(ic);
2445
2446  RecordJSReturnSite(expr);
2447  // Restore context register.
2448  __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2449  context()->DropAndPlug(1, x0);
2450}
2451
2452
2453void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2454  ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2455  // Prepare to push a copy of the first argument or undefined if it doesn't
2456  // exist.
2457  if (arg_count > 0) {
2458    __ Peek(x9, arg_count * kXRegSize);
2459  } else {
2460    __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2461  }
2462
2463  __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2464  // Prepare to push the receiver of the enclosing function.
2465  int receiver_offset = 2 + info_->scope()->num_parameters();
2466  __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2467
2468  // Prepare to push the language mode.
2469  __ Mov(x12, Smi::FromInt(strict_mode()));
2470  // Prepare to push the start position of the scope the calls resides in.
2471  __ Mov(x13, Smi::FromInt(scope()->start_position()));
2472
2473  // Push.
2474  __ Push(x9, x10, x11, x12, x13);
2475
2476  // Do the runtime call.
2477  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2478}
2479
2480
2481void FullCodeGenerator::VisitCall(Call* expr) {
2482#ifdef DEBUG
2483  // We want to verify that RecordJSReturnSite gets called on all paths
2484  // through this function.  Avoid early returns.
2485  expr->return_is_recorded_ = false;
2486#endif
2487
2488  Comment cmnt(masm_, "[ Call");
2489  Expression* callee = expr->expression();
2490  Call::CallType call_type = expr->GetCallType(isolate());
2491
2492  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2493    // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2494    // to resolve the function we need to call and the receiver of the
2495    // call.  Then we call the resolved function using the given
2496    // arguments.
2497    ZoneList<Expression*>* args = expr->arguments();
2498    int arg_count = args->length();
2499
2500    {
2501      PreservePositionScope pos_scope(masm()->positions_recorder());
2502      VisitForStackValue(callee);
2503      __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2504      __ Push(x10);  // Reserved receiver slot.
2505
2506      // Push the arguments.
2507      for (int i = 0; i < arg_count; i++) {
2508        VisitForStackValue(args->at(i));
2509      }
2510
2511      // Push a copy of the function (found below the arguments) and
2512      // resolve eval.
2513      __ Peek(x10, (arg_count + 1) * kPointerSize);
2514      __ Push(x10);
2515      EmitResolvePossiblyDirectEval(arg_count);
2516
2517      // The runtime call returns a pair of values in x0 (function) and
2518      // x1 (receiver). Touch up the stack with the right values.
2519      __ PokePair(x1, x0, arg_count * kPointerSize);
2520    }
2521
2522    // Record source position for debugger.
2523    SetSourcePosition(expr->position());
2524
2525    // Call the evaluated function.
2526    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2527    __ Peek(x1, (arg_count + 1) * kXRegSize);
2528    __ CallStub(&stub);
2529    RecordJSReturnSite(expr);
2530    // Restore context register.
2531    __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2532    context()->DropAndPlug(1, x0);
2533
2534  } else if (call_type == Call::GLOBAL_CALL) {
2535    EmitCallWithLoadIC(expr);
2536
2537  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2538    // Call to a lookup slot (dynamically introduced variable).
2539    VariableProxy* proxy = callee->AsVariableProxy();
2540    Label slow, done;
2541
2542    { PreservePositionScope scope(masm()->positions_recorder());
2543      // Generate code for loading from variables potentially shadowed
2544      // by eval-introduced variables.
2545      EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2546    }
2547
2548    __ Bind(&slow);
2549    // Call the runtime to find the function to call (returned in x0)
2550    // and the object holding it (returned in x1).
2551    __ Mov(x10, Operand(proxy->name()));
2552    __ Push(context_register(), x10);
2553    __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2554    __ Push(x0, x1);  // Receiver, function.
2555
2556    // If fast case code has been generated, emit code to push the
2557    // function and receiver and have the slow path jump around this
2558    // code.
2559    if (done.is_linked()) {
2560      Label call;
2561      __ B(&call);
2562      __ Bind(&done);
2563      // Push function.
2564      // The receiver is implicitly the global receiver. Indicate this
2565      // by passing the undefined to the call function stub.
2566      __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2567      __ Push(x0, x1);
2568      __ Bind(&call);
2569    }
2570
2571    // The receiver is either the global receiver or an object found
2572    // by LoadContextSlot.
2573    EmitCall(expr);
2574  } else if (call_type == Call::PROPERTY_CALL) {
2575    Property* property = callee->AsProperty();
2576    bool is_named_call = property->key()->IsPropertyName();
2577    // super.x() is handled in EmitCallWithLoadIC.
2578    if (property->IsSuperAccess() && is_named_call) {
2579      EmitSuperCallWithLoadIC(expr);
2580    } else {
2581      {
2582        PreservePositionScope scope(masm()->positions_recorder());
2583        VisitForStackValue(property->obj());
2584      }
2585      if (is_named_call) {
2586        EmitCallWithLoadIC(expr);
2587      } else {
2588        EmitKeyedCallWithLoadIC(expr, property->key());
2589      }
2590    }
2591  } else {
2592    DCHECK(call_type == Call::OTHER_CALL);
2593    // Call to an arbitrary expression not handled specially above.
2594    { PreservePositionScope scope(masm()->positions_recorder());
2595      VisitForStackValue(callee);
2596    }
2597    __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2598    __ Push(x1);
2599    // Emit function call.
2600    EmitCall(expr);
2601  }
2602
2603#ifdef DEBUG
2604  // RecordJSReturnSite should have been called.
2605  DCHECK(expr->return_is_recorded_);
2606#endif
2607}
2608
2609
2610void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2611  Comment cmnt(masm_, "[ CallNew");
2612  // According to ECMA-262, section 11.2.2, page 44, the function
2613  // expression in new calls must be evaluated before the
2614  // arguments.
2615
2616  // Push constructor on the stack.  If it's not a function it's used as
2617  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2618  // ignored.
2619  VisitForStackValue(expr->expression());
2620
2621  // Push the arguments ("left-to-right") on the stack.
2622  ZoneList<Expression*>* args = expr->arguments();
2623  int arg_count = args->length();
2624  for (int i = 0; i < arg_count; i++) {
2625    VisitForStackValue(args->at(i));
2626  }
2627
2628  // Call the construct call builtin that handles allocation and
2629  // constructor invocation.
2630  SetSourcePosition(expr->position());
2631
2632  // Load function and argument count into x1 and x0.
2633  __ Mov(x0, arg_count);
2634  __ Peek(x1, arg_count * kXRegSize);
2635
2636  // Record call targets in unoptimized code.
2637  if (FLAG_pretenuring_call_new) {
2638    EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2639    DCHECK(expr->AllocationSiteFeedbackSlot() ==
2640           expr->CallNewFeedbackSlot() + 1);
2641  }
2642
2643  __ LoadObject(x2, FeedbackVector());
2644  __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
2645
2646  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2647  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2648  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2649  context()->Plug(x0);
2650}
2651
2652
2653void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2654  ZoneList<Expression*>* args = expr->arguments();
2655  DCHECK(args->length() == 1);
2656
2657  VisitForAccumulatorValue(args->at(0));
2658
2659  Label materialize_true, materialize_false;
2660  Label* if_true = NULL;
2661  Label* if_false = NULL;
2662  Label* fall_through = NULL;
2663  context()->PrepareTest(&materialize_true, &materialize_false,
2664                         &if_true, &if_false, &fall_through);
2665
2666  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2667  __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2668
2669  context()->Plug(if_true, if_false);
2670}
2671
2672
2673void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2674  ZoneList<Expression*>* args = expr->arguments();
2675  DCHECK(args->length() == 1);
2676
2677  VisitForAccumulatorValue(args->at(0));
2678
2679  Label materialize_true, materialize_false;
2680  Label* if_true = NULL;
2681  Label* if_false = NULL;
2682  Label* fall_through = NULL;
2683  context()->PrepareTest(&materialize_true, &materialize_false,
2684                         &if_true, &if_false, &fall_through);
2685
2686  uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
2687
2688  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2689  __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
2690
2691  context()->Plug(if_true, if_false);
2692}
2693
2694
2695void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2696  ZoneList<Expression*>* args = expr->arguments();
2697  DCHECK(args->length() == 1);
2698
2699  VisitForAccumulatorValue(args->at(0));
2700
2701  Label materialize_true, materialize_false;
2702  Label* if_true = NULL;
2703  Label* if_false = NULL;
2704  Label* fall_through = NULL;
2705  context()->PrepareTest(&materialize_true, &materialize_false,
2706                         &if_true, &if_false, &fall_through);
2707
2708  __ JumpIfSmi(x0, if_false);
2709  __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2710  __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2711  // Undetectable objects behave like undefined when tested with typeof.
2712  __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2713  __ Tbnz(x11, Map::kIsUndetectable, if_false);
2714  __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2715  __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2716  __ B(lt, if_false);
2717  __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2718  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2719  Split(le, if_true, if_false, fall_through);
2720
2721  context()->Plug(if_true, if_false);
2722}
2723
2724
2725void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2726  ZoneList<Expression*>* args = expr->arguments();
2727  DCHECK(args->length() == 1);
2728
2729  VisitForAccumulatorValue(args->at(0));
2730
2731  Label materialize_true, materialize_false;
2732  Label* if_true = NULL;
2733  Label* if_false = NULL;
2734  Label* fall_through = NULL;
2735  context()->PrepareTest(&materialize_true, &materialize_false,
2736                         &if_true, &if_false, &fall_through);
2737
2738  __ JumpIfSmi(x0, if_false);
2739  __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2740  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2741  Split(ge, if_true, if_false, fall_through);
2742
2743  context()->Plug(if_true, if_false);
2744}
2745
2746
2747void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2748  ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
2749  ZoneList<Expression*>* args = expr->arguments();
2750  DCHECK(args->length() == 1);
2751
2752  VisitForAccumulatorValue(args->at(0));
2753
2754  Label materialize_true, materialize_false;
2755  Label* if_true = NULL;
2756  Label* if_false = NULL;
2757  Label* fall_through = NULL;
2758  context()->PrepareTest(&materialize_true, &materialize_false,
2759                         &if_true, &if_false, &fall_through);
2760
2761  __ JumpIfSmi(x0, if_false);
2762  __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2763  __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2764  __ Tst(x11, 1 << Map::kIsUndetectable);
2765  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2766  Split(ne, if_true, if_false, fall_through);
2767
2768  context()->Plug(if_true, if_false);
2769}
2770
2771
2772void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2773    CallRuntime* expr) {
2774  ZoneList<Expression*>* args = expr->arguments();
2775  DCHECK(args->length() == 1);
2776  VisitForAccumulatorValue(args->at(0));
2777
2778  Label materialize_true, materialize_false, skip_lookup;
2779  Label* if_true = NULL;
2780  Label* if_false = NULL;
2781  Label* fall_through = NULL;
2782  context()->PrepareTest(&materialize_true, &materialize_false,
2783                         &if_true, &if_false, &fall_through);
2784
2785  Register object = x0;
2786  __ AssertNotSmi(object);
2787
2788  Register map = x10;
2789  Register bitfield2 = x11;
2790  __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2791  __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
2792  __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
2793
2794  // Check for fast case object. Generate false result for slow case object.
2795  Register props = x12;
2796  Register props_map = x12;
2797  Register hash_table_map = x13;
2798  __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
2799  __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
2800  __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2801  __ Cmp(props_map, hash_table_map);
2802  __ B(eq, if_false);
2803
2804  // Look for valueOf name in the descriptor array, and indicate false if found.
2805  // Since we omit an enumeration index check, if it is added via a transition
2806  // that shares its descriptor array, this is a false positive.
2807  Label loop, done;
2808
2809  // Skip loop if no descriptors are valid.
2810  Register descriptors = x12;
2811  Register descriptors_length = x13;
2812  __ NumberOfOwnDescriptors(descriptors_length, map);
2813  __ Cbz(descriptors_length, &done);
2814
2815  __ LoadInstanceDescriptors(map, descriptors);
2816
2817  // Calculate the end of the descriptor array.
2818  Register descriptors_end = x14;
2819  __ Mov(x15, DescriptorArray::kDescriptorSize);
2820  __ Mul(descriptors_length, descriptors_length, x15);
2821  // Calculate location of the first key name.
2822  __ Add(descriptors, descriptors,
2823         DescriptorArray::kFirstOffset - kHeapObjectTag);
2824  // Calculate the end of the descriptor array.
2825  __ Add(descriptors_end, descriptors,
2826         Operand(descriptors_length, LSL, kPointerSizeLog2));
2827
2828  // Loop through all the keys in the descriptor array. If one of these is the
2829  // string "valueOf" the result is false.
2830  Register valueof_string = x1;
2831  int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
2832  __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
2833  __ Bind(&loop);
2834  __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
2835  __ Cmp(x15, valueof_string);
2836  __ B(eq, if_false);
2837  __ Cmp(descriptors, descriptors_end);
2838  __ B(ne, &loop);
2839
2840  __ Bind(&done);
2841
2842  // Set the bit in the map to indicate that there is no local valueOf field.
2843  __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2844  __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2845  __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2846
2847  __ Bind(&skip_lookup);
2848
2849  // If a valueOf property is not found on the object check that its prototype
2850  // is the unmodified String prototype. If not result is false.
2851  Register prototype = x1;
2852  Register global_idx = x2;
2853  Register native_context = x2;
2854  Register string_proto = x3;
2855  Register proto_map = x4;
2856  __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
2857  __ JumpIfSmi(prototype, if_false);
2858  __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
2859  __ Ldr(global_idx, GlobalObjectMemOperand());
2860  __ Ldr(native_context,
2861         FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
2862  __ Ldr(string_proto,
2863         ContextMemOperand(native_context,
2864                           Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2865  __ Cmp(proto_map, string_proto);
2866
2867  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2868  Split(eq, if_true, if_false, fall_through);
2869
2870  context()->Plug(if_true, if_false);
2871}
2872
2873
2874void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2875  ZoneList<Expression*>* args = expr->arguments();
2876  DCHECK(args->length() == 1);
2877
2878  VisitForAccumulatorValue(args->at(0));
2879
2880  Label materialize_true, materialize_false;
2881  Label* if_true = NULL;
2882  Label* if_false = NULL;
2883  Label* fall_through = NULL;
2884  context()->PrepareTest(&materialize_true, &materialize_false,
2885                         &if_true, &if_false, &fall_through);
2886
2887  __ JumpIfSmi(x0, if_false);
2888  __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
2889  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2890  Split(eq, if_true, if_false, fall_through);
2891
2892  context()->Plug(if_true, if_false);
2893}
2894
2895
2896void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2897  ZoneList<Expression*>* args = expr->arguments();
2898  DCHECK(args->length() == 1);
2899
2900  VisitForAccumulatorValue(args->at(0));
2901
2902  Label materialize_true, materialize_false;
2903  Label* if_true = NULL;
2904  Label* if_false = NULL;
2905  Label* fall_through = NULL;
2906  context()->PrepareTest(&materialize_true, &materialize_false,
2907                         &if_true, &if_false, &fall_through);
2908
2909  // Only a HeapNumber can be -0.0, so return false if we have something else.
2910  __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
2911
2912  // Test the bit pattern.
2913  __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
2914  __ Cmp(x10, 1);   // Set V on 0x8000000000000000.
2915
2916  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2917  Split(vs, if_true, if_false, fall_through);
2918
2919  context()->Plug(if_true, if_false);
2920}
2921
2922
2923void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2924  ZoneList<Expression*>* args = expr->arguments();
2925  DCHECK(args->length() == 1);
2926
2927  VisitForAccumulatorValue(args->at(0));
2928
2929  Label materialize_true, materialize_false;
2930  Label* if_true = NULL;
2931  Label* if_false = NULL;
2932  Label* fall_through = NULL;
2933  context()->PrepareTest(&materialize_true, &materialize_false,
2934                         &if_true, &if_false, &fall_through);
2935
2936  __ JumpIfSmi(x0, if_false);
2937  __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2938  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2939  Split(eq, if_true, if_false, fall_through);
2940
2941  context()->Plug(if_true, if_false);
2942}
2943
2944
2945void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2946  ZoneList<Expression*>* args = expr->arguments();
2947  DCHECK(args->length() == 1);
2948
2949  VisitForAccumulatorValue(args->at(0));
2950
2951  Label materialize_true, materialize_false;
2952  Label* if_true = NULL;
2953  Label* if_false = NULL;
2954  Label* fall_through = NULL;
2955  context()->PrepareTest(&materialize_true, &materialize_false,
2956                         &if_true, &if_false, &fall_through);
2957
2958  __ JumpIfSmi(x0, if_false);
2959  __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2960  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2961  Split(eq, if_true, if_false, fall_through);
2962
2963  context()->Plug(if_true, if_false);
2964}
2965
2966
2967
2968void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2969  DCHECK(expr->arguments()->length() == 0);
2970
2971  Label materialize_true, materialize_false;
2972  Label* if_true = NULL;
2973  Label* if_false = NULL;
2974  Label* fall_through = NULL;
2975  context()->PrepareTest(&materialize_true, &materialize_false,
2976                         &if_true, &if_false, &fall_through);
2977
2978  // Get the frame pointer for the calling frame.
2979  __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2980
2981  // Skip the arguments adaptor frame if it exists.
2982  Label check_frame_marker;
2983  __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
2984  __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2985  __ B(ne, &check_frame_marker);
2986  __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
2987
2988  // Check the marker in the calling frame.
2989  __ Bind(&check_frame_marker);
2990  __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
2991  __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
2992  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2993  Split(eq, if_true, if_false, fall_through);
2994
2995  context()->Plug(if_true, if_false);
2996}
2997
2998
2999void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3000  ZoneList<Expression*>* args = expr->arguments();
3001  DCHECK(args->length() == 2);
3002
3003  // Load the two objects into registers and perform the comparison.
3004  VisitForStackValue(args->at(0));
3005  VisitForAccumulatorValue(args->at(1));
3006
3007  Label materialize_true, materialize_false;
3008  Label* if_true = NULL;
3009  Label* if_false = NULL;
3010  Label* fall_through = NULL;
3011  context()->PrepareTest(&materialize_true, &materialize_false,
3012                         &if_true, &if_false, &fall_through);
3013
3014  __ Pop(x1);
3015  __ Cmp(x0, x1);
3016  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3017  Split(eq, if_true, if_false, fall_through);
3018
3019  context()->Plug(if_true, if_false);
3020}
3021
3022
3023void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3024  ZoneList<Expression*>* args = expr->arguments();
3025  DCHECK(args->length() == 1);
3026
3027  // ArgumentsAccessStub expects the key in x1.
3028  VisitForAccumulatorValue(args->at(0));
3029  __ Mov(x1, x0);
3030  __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3031  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3032  __ CallStub(&stub);
3033  context()->Plug(x0);
3034}
3035
3036
3037void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3038  DCHECK(expr->arguments()->length() == 0);
3039  Label exit;
3040  // Get the number of formal parameters.
3041  __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3042
3043  // Check if the calling frame is an arguments adaptor frame.
3044  __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3045  __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3046  __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3047  __ B(ne, &exit);
3048
3049  // Arguments adaptor case: Read the arguments length from the
3050  // adaptor frame.
3051  __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3052
3053  __ Bind(&exit);
3054  context()->Plug(x0);
3055}
3056
3057
3058void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3059  ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3060  ZoneList<Expression*>* args = expr->arguments();
3061  DCHECK(args->length() == 1);
3062  Label done, null, function, non_function_constructor;
3063
3064  VisitForAccumulatorValue(args->at(0));
3065
3066  // If the object is a smi, we return null.
3067  __ JumpIfSmi(x0, &null);
3068
3069  // Check that the object is a JS object but take special care of JS
3070  // functions to make sure they have 'Function' as their class.
3071  // Assume that there are only two callable types, and one of them is at
3072  // either end of the type range for JS object types. Saves extra comparisons.
3073  STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3074  __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3075  // x10: object's map.
3076  // x11: object's type.
3077  __ B(lt, &null);
3078  STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3079                FIRST_SPEC_OBJECT_TYPE + 1);
3080  __ B(eq, &function);
3081
3082  __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3083  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3084                LAST_SPEC_OBJECT_TYPE - 1);
3085  __ B(eq, &function);
3086  // Assume that there is no larger type.
3087  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3088
3089  // Check if the constructor in the map is a JS function.
3090  __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3091  __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3092                         &non_function_constructor);
3093
3094  // x12 now contains the constructor function. Grab the
3095  // instance class name from there.
3096  __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3097  __ Ldr(x0,
3098         FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3099  __ B(&done);
3100
3101  // Functions have class 'Function'.
3102  __ Bind(&function);
3103  __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3104  __ B(&done);
3105
3106  // Objects with a non-function constructor have class 'Object'.
3107  __ Bind(&non_function_constructor);
3108  __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3109  __ B(&done);
3110
3111  // Non-JS objects have class null.
3112  __ Bind(&null);
3113  __ LoadRoot(x0, Heap::kNullValueRootIndex);
3114
3115  // All done.
3116  __ Bind(&done);
3117
3118  context()->Plug(x0);
3119}
3120
3121
3122void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3123  // Load the arguments on the stack and call the stub.
3124  SubStringStub stub(isolate());
3125  ZoneList<Expression*>* args = expr->arguments();
3126  DCHECK(args->length() == 3);
3127  VisitForStackValue(args->at(0));
3128  VisitForStackValue(args->at(1));
3129  VisitForStackValue(args->at(2));
3130  __ CallStub(&stub);
3131  context()->Plug(x0);
3132}
3133
3134
3135void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3136  // Load the arguments on the stack and call the stub.
3137  RegExpExecStub stub(isolate());
3138  ZoneList<Expression*>* args = expr->arguments();
3139  DCHECK(args->length() == 4);
3140  VisitForStackValue(args->at(0));
3141  VisitForStackValue(args->at(1));
3142  VisitForStackValue(args->at(2));
3143  VisitForStackValue(args->at(3));
3144  __ CallStub(&stub);
3145  context()->Plug(x0);
3146}
3147
3148
3149void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3150  ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3151  ZoneList<Expression*>* args = expr->arguments();
3152  DCHECK(args->length() == 1);
3153  VisitForAccumulatorValue(args->at(0));  // Load the object.
3154
3155  Label done;
3156  // If the object is a smi return the object.
3157  __ JumpIfSmi(x0, &done);
3158  // If the object is not a value type, return the object.
3159  __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3160  __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3161
3162  __ Bind(&done);
3163  context()->Plug(x0);
3164}
3165
3166
3167void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3168  ZoneList<Expression*>* args = expr->arguments();
3169  DCHECK(args->length() == 2);
3170  DCHECK_NE(NULL, args->at(1)->AsLiteral());
3171  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3172
3173  VisitForAccumulatorValue(args->at(0));  // Load the object.
3174
3175  Label runtime, done, not_date_object;
3176  Register object = x0;
3177  Register result = x0;
3178  Register stamp_addr = x10;
3179  Register stamp_cache = x11;
3180
3181  __ JumpIfSmi(object, &not_date_object);
3182  __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, &not_date_object);
3183
3184  if (index->value() == 0) {
3185    __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3186    __ B(&done);
3187  } else {
3188    if (index->value() < JSDate::kFirstUncachedField) {
3189      ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3190      __ Mov(x10, stamp);
3191      __ Ldr(stamp_addr, MemOperand(x10));
3192      __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3193      __ Cmp(stamp_addr, stamp_cache);
3194      __ B(ne, &runtime);
3195      __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3196                                             kPointerSize * index->value()));
3197      __ B(&done);
3198    }
3199
3200    __ Bind(&runtime);
3201    __ Mov(x1, index);
3202    __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3203    __ B(&done);
3204  }
3205
3206  __ Bind(&not_date_object);
3207  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3208  __ Bind(&done);
3209  context()->Plug(x0);
3210}
3211
3212
3213void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3214  ZoneList<Expression*>* args = expr->arguments();
3215  DCHECK_EQ(3, args->length());
3216
3217  Register string = x0;
3218  Register index = x1;
3219  Register value = x2;
3220  Register scratch = x10;
3221
3222  VisitForStackValue(args->at(0));        // index
3223  VisitForStackValue(args->at(1));        // value
3224  VisitForAccumulatorValue(args->at(2));  // string
3225  __ Pop(value, index);
3226
3227  if (FLAG_debug_code) {
3228    __ AssertSmi(value, kNonSmiValue);
3229    __ AssertSmi(index, kNonSmiIndex);
3230    static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3231    __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3232                                 one_byte_seq_type);
3233  }
3234
3235  __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3236  __ SmiUntag(value);
3237  __ SmiUntag(index);
3238  __ Strb(value, MemOperand(scratch, index));
3239  context()->Plug(string);
3240}
3241
3242
3243void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3244  ZoneList<Expression*>* args = expr->arguments();
3245  DCHECK_EQ(3, args->length());
3246
3247  Register string = x0;
3248  Register index = x1;
3249  Register value = x2;
3250  Register scratch = x10;
3251
3252  VisitForStackValue(args->at(0));        // index
3253  VisitForStackValue(args->at(1));        // value
3254  VisitForAccumulatorValue(args->at(2));  // string
3255  __ Pop(value, index);
3256
3257  if (FLAG_debug_code) {
3258    __ AssertSmi(value, kNonSmiValue);
3259    __ AssertSmi(index, kNonSmiIndex);
3260    static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3261    __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3262                                 two_byte_seq_type);
3263  }
3264
3265  __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3266  __ SmiUntag(value);
3267  __ SmiUntag(index);
3268  __ Strh(value, MemOperand(scratch, index, LSL, 1));
3269  context()->Plug(string);
3270}
3271
3272
3273void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3274  // Load the arguments on the stack and call the MathPow stub.
3275  ZoneList<Expression*>* args = expr->arguments();
3276  DCHECK(args->length() == 2);
3277  VisitForStackValue(args->at(0));
3278  VisitForStackValue(args->at(1));
3279  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3280  __ CallStub(&stub);
3281  context()->Plug(x0);
3282}
3283
3284
3285void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3286  ZoneList<Expression*>* args = expr->arguments();
3287  DCHECK(args->length() == 2);
3288  VisitForStackValue(args->at(0));  // Load the object.
3289  VisitForAccumulatorValue(args->at(1));  // Load the value.
3290  __ Pop(x1);
3291  // x0 = value.
3292  // x1 = object.
3293
3294  Label done;
3295  // If the object is a smi, return the value.
3296  __ JumpIfSmi(x1, &done);
3297
3298  // If the object is not a value type, return the value.
3299  __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3300
3301  // Store the value.
3302  __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3303  // Update the write barrier. Save the value as it will be
3304  // overwritten by the write barrier code and is needed afterward.
3305  __ Mov(x10, x0);
3306  __ RecordWriteField(
3307      x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3308
3309  __ Bind(&done);
3310  context()->Plug(x0);
3311}
3312
3313
3314void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3315  ZoneList<Expression*>* args = expr->arguments();
3316  DCHECK_EQ(args->length(), 1);
3317
3318  // Load the argument into x0 and call the stub.
3319  VisitForAccumulatorValue(args->at(0));
3320
3321  NumberToStringStub stub(isolate());
3322  __ CallStub(&stub);
3323  context()->Plug(x0);
3324}
3325
3326
3327void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3328  ZoneList<Expression*>* args = expr->arguments();
3329  DCHECK(args->length() == 1);
3330
3331  VisitForAccumulatorValue(args->at(0));
3332
3333  Label done;
3334  Register code = x0;
3335  Register result = x1;
3336
3337  StringCharFromCodeGenerator generator(code, result);
3338  generator.GenerateFast(masm_);
3339  __ B(&done);
3340
3341  NopRuntimeCallHelper call_helper;
3342  generator.GenerateSlow(masm_, call_helper);
3343
3344  __ Bind(&done);
3345  context()->Plug(result);
3346}
3347
3348
3349void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3350  ZoneList<Expression*>* args = expr->arguments();
3351  DCHECK(args->length() == 2);
3352
3353  VisitForStackValue(args->at(0));
3354  VisitForAccumulatorValue(args->at(1));
3355
3356  Register object = x1;
3357  Register index = x0;
3358  Register result = x3;
3359
3360  __ Pop(object);
3361
3362  Label need_conversion;
3363  Label index_out_of_range;
3364  Label done;
3365  StringCharCodeAtGenerator generator(object,
3366                                      index,
3367                                      result,
3368                                      &need_conversion,
3369                                      &need_conversion,
3370                                      &index_out_of_range,
3371                                      STRING_INDEX_IS_NUMBER);
3372  generator.GenerateFast(masm_);
3373  __ B(&done);
3374
3375  __ Bind(&index_out_of_range);
3376  // When the index is out of range, the spec requires us to return NaN.
3377  __ LoadRoot(result, Heap::kNanValueRootIndex);
3378  __ B(&done);
3379
3380  __ Bind(&need_conversion);
3381  // Load the undefined value into the result register, which will
3382  // trigger conversion.
3383  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3384  __ B(&done);
3385
3386  NopRuntimeCallHelper call_helper;
3387  generator.GenerateSlow(masm_, call_helper);
3388
3389  __ Bind(&done);
3390  context()->Plug(result);
3391}
3392
3393
3394void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3395  ZoneList<Expression*>* args = expr->arguments();
3396  DCHECK(args->length() == 2);
3397
3398  VisitForStackValue(args->at(0));
3399  VisitForAccumulatorValue(args->at(1));
3400
3401  Register object = x1;
3402  Register index = x0;
3403  Register result = x0;
3404
3405  __ Pop(object);
3406
3407  Label need_conversion;
3408  Label index_out_of_range;
3409  Label done;
3410  StringCharAtGenerator generator(object,
3411                                  index,
3412                                  x3,
3413                                  result,
3414                                  &need_conversion,
3415                                  &need_conversion,
3416                                  &index_out_of_range,
3417                                  STRING_INDEX_IS_NUMBER);
3418  generator.GenerateFast(masm_);
3419  __ B(&done);
3420
3421  __ Bind(&index_out_of_range);
3422  // When the index is out of range, the spec requires us to return
3423  // the empty string.
3424  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3425  __ B(&done);
3426
3427  __ Bind(&need_conversion);
3428  // Move smi zero into the result register, which will trigger conversion.
3429  __ Mov(result, Smi::FromInt(0));
3430  __ B(&done);
3431
3432  NopRuntimeCallHelper call_helper;
3433  generator.GenerateSlow(masm_, call_helper);
3434
3435  __ Bind(&done);
3436  context()->Plug(result);
3437}
3438
3439
3440void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3441  ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3442  ZoneList<Expression*>* args = expr->arguments();
3443  DCHECK_EQ(2, args->length());
3444
3445  VisitForStackValue(args->at(0));
3446  VisitForAccumulatorValue(args->at(1));
3447
3448  __ Pop(x1);
3449  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3450  __ CallStub(&stub);
3451
3452  context()->Plug(x0);
3453}
3454
3455
3456void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3457  ZoneList<Expression*>* args = expr->arguments();
3458  DCHECK_EQ(2, args->length());
3459  VisitForStackValue(args->at(0));
3460  VisitForStackValue(args->at(1));
3461
3462  StringCompareStub stub(isolate());
3463  __ CallStub(&stub);
3464  context()->Plug(x0);
3465}
3466
3467
3468void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3469  ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3470  ZoneList<Expression*>* args = expr->arguments();
3471  DCHECK(args->length() >= 2);
3472
3473  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3474  for (int i = 0; i < arg_count + 1; i++) {
3475    VisitForStackValue(args->at(i));
3476  }
3477  VisitForAccumulatorValue(args->last());  // Function.
3478
3479  Label runtime, done;
3480  // Check for non-function argument (including proxy).
3481  __ JumpIfSmi(x0, &runtime);
3482  __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3483
3484  // InvokeFunction requires the function in x1. Move it in there.
3485  __ Mov(x1, x0);
3486  ParameterCount count(arg_count);
3487  __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3488  __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3489  __ B(&done);
3490
3491  __ Bind(&runtime);
3492  __ Push(x0);
3493  __ CallRuntime(Runtime::kCall, args->length());
3494  __ Bind(&done);
3495
3496  context()->Plug(x0);
3497}
3498
3499
3500void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3501  RegExpConstructResultStub stub(isolate());
3502  ZoneList<Expression*>* args = expr->arguments();
3503  DCHECK(args->length() == 3);
3504  VisitForStackValue(args->at(0));
3505  VisitForStackValue(args->at(1));
3506  VisitForAccumulatorValue(args->at(2));
3507  __ Pop(x1, x2);
3508  __ CallStub(&stub);
3509  context()->Plug(x0);
3510}
3511
3512
3513void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3514  ZoneList<Expression*>* args = expr->arguments();
3515  DCHECK_EQ(2, args->length());
3516  DCHECK_NE(NULL, args->at(0)->AsLiteral());
3517  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3518
3519  Handle<FixedArray> jsfunction_result_caches(
3520      isolate()->native_context()->jsfunction_result_caches());
3521  if (jsfunction_result_caches->length() <= cache_id) {
3522    __ Abort(kAttemptToUseUndefinedCache);
3523    __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3524    context()->Plug(x0);
3525    return;
3526  }
3527
3528  VisitForAccumulatorValue(args->at(1));
3529
3530  Register key = x0;
3531  Register cache = x1;
3532  __ Ldr(cache, GlobalObjectMemOperand());
3533  __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3534  __ Ldr(cache, ContextMemOperand(cache,
3535                                  Context::JSFUNCTION_RESULT_CACHES_INDEX));
3536  __ Ldr(cache,
3537         FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3538
3539  Label done;
3540  __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3541                                       JSFunctionResultCache::kFingerOffset));
3542  __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3543  __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3544
3545  // Load the key and data from the cache.
3546  __ Ldp(x2, x3, MemOperand(x3));
3547
3548  __ Cmp(key, x2);
3549  __ CmovX(x0, x3, eq);
3550  __ B(eq, &done);
3551
3552  // Call runtime to perform the lookup.
3553  __ Push(cache, key);
3554  __ CallRuntime(Runtime::kGetFromCache, 2);
3555
3556  __ Bind(&done);
3557  context()->Plug(x0);
3558}
3559
3560
3561void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3562  ZoneList<Expression*>* args = expr->arguments();
3563  VisitForAccumulatorValue(args->at(0));
3564
3565  Label materialize_true, materialize_false;
3566  Label* if_true = NULL;
3567  Label* if_false = NULL;
3568  Label* fall_through = NULL;
3569  context()->PrepareTest(&materialize_true, &materialize_false,
3570                         &if_true, &if_false, &fall_through);
3571
3572  __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3573  __ Tst(x10, String::kContainsCachedArrayIndexMask);
3574  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3575  Split(eq, if_true, if_false, fall_through);
3576
3577  context()->Plug(if_true, if_false);
3578}
3579
3580
3581void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3582  ZoneList<Expression*>* args = expr->arguments();
3583  DCHECK(args->length() == 1);
3584  VisitForAccumulatorValue(args->at(0));
3585
3586  __ AssertString(x0);
3587
3588  __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3589  __ IndexFromHash(x10, x0);
3590
3591  context()->Plug(x0);
3592}
3593
3594
3595void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3596  ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
3597
3598  ZoneList<Expression*>* args = expr->arguments();
3599  DCHECK(args->length() == 2);
3600  VisitForStackValue(args->at(1));
3601  VisitForAccumulatorValue(args->at(0));
3602
3603  Register array = x0;
3604  Register result = x0;
3605  Register elements = x1;
3606  Register element = x2;
3607  Register separator = x3;
3608  Register array_length = x4;
3609  Register result_pos = x5;
3610  Register map = x6;
3611  Register string_length = x10;
3612  Register elements_end = x11;
3613  Register string = x12;
3614  Register scratch1 = x13;
3615  Register scratch2 = x14;
3616  Register scratch3 = x7;
3617  Register separator_length = x15;
3618
3619  Label bailout, done, one_char_separator, long_separator,
3620      non_trivial_array, not_size_one_array, loop,
3621      empty_separator_loop, one_char_separator_loop,
3622      one_char_separator_loop_entry, long_separator_loop;
3623
3624  // The separator operand is on the stack.
3625  __ Pop(separator);
3626
3627  // Check that the array is a JSArray.
3628  __ JumpIfSmi(array, &bailout);
3629  __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3630
3631  // Check that the array has fast elements.
3632  __ CheckFastElements(map, scratch1, &bailout);
3633
3634  // If the array has length zero, return the empty string.
3635  // Load and untag the length of the array.
3636  // It is an unsigned value, so we can skip sign extension.
3637  // We assume little endianness.
3638  __ Ldrsw(array_length,
3639           UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
3640  __ Cbnz(array_length, &non_trivial_array);
3641  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3642  __ B(&done);
3643
3644  __ Bind(&non_trivial_array);
3645  // Get the FixedArray containing array's elements.
3646  __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3647
3648  // Check that all array elements are sequential one-byte strings, and
3649  // accumulate the sum of their lengths.
3650  __ Mov(string_length, 0);
3651  __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3652  __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3653  // Loop condition: while (element < elements_end).
3654  // Live values in registers:
3655  //   elements: Fixed array of strings.
3656  //   array_length: Length of the fixed array of strings (not smi)
3657  //   separator: Separator string
3658  //   string_length: Accumulated sum of string lengths (not smi).
3659  //   element: Current array element.
3660  //   elements_end: Array end.
3661  if (FLAG_debug_code) {
3662    __ Cmp(array_length, 0);
3663    __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3664  }
3665  __ Bind(&loop);
3666  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3667  __ JumpIfSmi(string, &bailout);
3668  __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3669  __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3670  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3671  __ Ldrsw(scratch1,
3672           UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
3673  __ Adds(string_length, string_length, scratch1);
3674  __ B(vs, &bailout);
3675  __ Cmp(element, elements_end);
3676  __ B(lt, &loop);
3677
3678  // If array_length is 1, return elements[0], a string.
3679  __ Cmp(array_length, 1);
3680  __ B(ne, &not_size_one_array);
3681  __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3682  __ B(&done);
3683
3684  __ Bind(&not_size_one_array);
3685
3686  // Live values in registers:
3687  //   separator: Separator string
3688  //   array_length: Length of the array (not smi).
3689  //   string_length: Sum of string lengths (not smi).
3690  //   elements: FixedArray of strings.
3691
3692  // Check that the separator is a flat one-byte string.
3693  __ JumpIfSmi(separator, &bailout);
3694  __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3695  __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3696  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3697
3698  // Add (separator length times array_length) - separator length to the
3699  // string_length to get the length of the result string.
3700  // Load the separator length as untagged.
3701  // We assume little endianness, and that the length is positive.
3702  __ Ldrsw(separator_length,
3703           UntagSmiFieldMemOperand(separator,
3704                                   SeqOneByteString::kLengthOffset));
3705  __ Sub(string_length, string_length, separator_length);
3706  __ Umaddl(string_length, array_length.W(), separator_length.W(),
3707            string_length);
3708
3709  // Get first element in the array.
3710  __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3711  // Live values in registers:
3712  //   element: First array element
3713  //   separator: Separator string
3714  //   string_length: Length of result string (not smi)
3715  //   array_length: Length of the array (not smi).
3716  __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
3717                           &bailout);
3718
3719  // Prepare for looping. Set up elements_end to end of the array. Set
3720  // result_pos to the position of the result where to write the first
3721  // character.
3722  // TODO(all): useless unless AllocateOneByteString trashes the register.
3723  __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3724  __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3725
3726  // Check the length of the separator.
3727  __ Cmp(separator_length, 1);
3728  __ B(eq, &one_char_separator);
3729  __ B(gt, &long_separator);
3730
3731  // Empty separator case
3732  __ Bind(&empty_separator_loop);
3733  // Live values in registers:
3734  //   result_pos: the position to which we are currently copying characters.
3735  //   element: Current array element.
3736  //   elements_end: Array end.
3737
3738  // Copy next array element to the result.
3739  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3740  __ Ldrsw(string_length,
3741           UntagSmiFieldMemOperand(string, String::kLengthOffset));
3742  __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3743  __ CopyBytes(result_pos, string, string_length, scratch1);
3744  __ Cmp(element, elements_end);
3745  __ B(lt, &empty_separator_loop);  // End while (element < elements_end).
3746  __ B(&done);
3747
3748  // One-character separator case
3749  __ Bind(&one_char_separator);
3750  // Replace separator with its one-byte character value.
3751  __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3752  // Jump into the loop after the code that copies the separator, so the first
3753  // element is not preceded by a separator
3754  __ B(&one_char_separator_loop_entry);
3755
3756  __ Bind(&one_char_separator_loop);
3757  // Live values in registers:
3758  //   result_pos: the position to which we are currently copying characters.
3759  //   element: Current array element.
3760  //   elements_end: Array end.
3761  //   separator: Single separator one-byte char (in lower byte).
3762
3763  // Copy the separator character to the result.
3764  __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
3765
3766  // Copy next array element to the result.
3767  __ Bind(&one_char_separator_loop_entry);
3768  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3769  __ Ldrsw(string_length,
3770           UntagSmiFieldMemOperand(string, String::kLengthOffset));
3771  __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3772  __ CopyBytes(result_pos, string, string_length, scratch1);
3773  __ Cmp(element, elements_end);
3774  __ B(lt, &one_char_separator_loop);  // End while (element < elements_end).
3775  __ B(&done);
3776
3777  // Long separator case (separator is more than one character). Entry is at the
3778  // label long_separator below.
3779  __ Bind(&long_separator_loop);
3780  // Live values in registers:
3781  //   result_pos: the position to which we are currently copying characters.
3782  //   element: Current array element.
3783  //   elements_end: Array end.
3784  //   separator: Separator string.
3785
3786  // Copy the separator to the result.
3787  // TODO(all): hoist next two instructions.
3788  __ Ldrsw(string_length,
3789           UntagSmiFieldMemOperand(separator, String::kLengthOffset));
3790  __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3791  __ CopyBytes(result_pos, string, string_length, scratch1);
3792
3793  __ Bind(&long_separator);
3794  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3795  __ Ldrsw(string_length,
3796           UntagSmiFieldMemOperand(string, String::kLengthOffset));
3797  __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3798  __ CopyBytes(result_pos, string, string_length, scratch1);
3799  __ Cmp(element, elements_end);
3800  __ B(lt, &long_separator_loop);  // End while (element < elements_end).
3801  __ B(&done);
3802
3803  __ Bind(&bailout);
3804  // Returning undefined will force slower code to handle it.
3805  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3806  __ Bind(&done);
3807  context()->Plug(result);
3808}
3809
3810
3811void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3812  DCHECK(expr->arguments()->length() == 0);
3813  ExternalReference debug_is_active =
3814      ExternalReference::debug_is_active_address(isolate());
3815  __ Mov(x10, debug_is_active);
3816  __ Ldrb(x0, MemOperand(x10));
3817  __ SmiTag(x0);
3818  context()->Plug(x0);
3819}
3820
3821
3822void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3823  if (expr->function() != NULL &&
3824      expr->function()->intrinsic_type == Runtime::INLINE) {
3825    Comment cmnt(masm_, "[ InlineRuntimeCall");
3826    EmitInlineRuntimeCall(expr);
3827    return;
3828  }
3829
3830  Comment cmnt(masm_, "[ CallRunTime");
3831  ZoneList<Expression*>* args = expr->arguments();
3832  int arg_count = args->length();
3833
3834  if (expr->is_jsruntime()) {
3835    // Push the builtins object as the receiver.
3836    __ Ldr(x10, GlobalObjectMemOperand());
3837    __ Ldr(LoadDescriptor::ReceiverRegister(),
3838           FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
3839    __ Push(LoadDescriptor::ReceiverRegister());
3840
3841    // Load the function from the receiver.
3842    Handle<String> name = expr->name();
3843    __ Mov(LoadDescriptor::NameRegister(), Operand(name));
3844    if (FLAG_vector_ics) {
3845      __ Mov(VectorLoadICDescriptor::SlotRegister(),
3846             Smi::FromInt(expr->CallRuntimeFeedbackSlot()));
3847      CallLoadIC(NOT_CONTEXTUAL);
3848    } else {
3849      CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
3850    }
3851
3852    // Push the target function under the receiver.
3853    __ Pop(x10);
3854    __ Push(x0, x10);
3855
3856    int arg_count = args->length();
3857    for (int i = 0; i < arg_count; i++) {
3858      VisitForStackValue(args->at(i));
3859    }
3860
3861    // Record source position of the IC call.
3862    SetSourcePosition(expr->position());
3863    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3864    __ Peek(x1, (arg_count + 1) * kPointerSize);
3865    __ CallStub(&stub);
3866
3867    // Restore context register.
3868    __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3869
3870    context()->DropAndPlug(1, x0);
3871  } else {
3872    // Push the arguments ("left-to-right").
3873    for (int i = 0; i < arg_count; i++) {
3874      VisitForStackValue(args->at(i));
3875    }
3876
3877    // Call the C runtime function.
3878    __ CallRuntime(expr->function(), arg_count);
3879    context()->Plug(x0);
3880  }
3881}
3882
3883
3884void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3885  switch (expr->op()) {
3886    case Token::DELETE: {
3887      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3888      Property* property = expr->expression()->AsProperty();
3889      VariableProxy* proxy = expr->expression()->AsVariableProxy();
3890
3891      if (property != NULL) {
3892        VisitForStackValue(property->obj());
3893        VisitForStackValue(property->key());
3894        __ Mov(x10, Smi::FromInt(strict_mode()));
3895        __ Push(x10);
3896        __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3897        context()->Plug(x0);
3898      } else if (proxy != NULL) {
3899        Variable* var = proxy->var();
3900        // Delete of an unqualified identifier is disallowed in strict mode
3901        // but "delete this" is allowed.
3902        DCHECK(strict_mode() == SLOPPY || var->is_this());
3903        if (var->IsUnallocated()) {
3904          __ Ldr(x12, GlobalObjectMemOperand());
3905          __ Mov(x11, Operand(var->name()));
3906          __ Mov(x10, Smi::FromInt(SLOPPY));
3907          __ Push(x12, x11, x10);
3908          __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3909          context()->Plug(x0);
3910        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3911          // Result of deleting non-global, non-dynamic variables is false.
3912          // The subexpression does not have side effects.
3913          context()->Plug(var->is_this());
3914        } else {
3915          // Non-global variable.  Call the runtime to try to delete from the
3916          // context where the variable was introduced.
3917          __ Mov(x2, Operand(var->name()));
3918          __ Push(context_register(), x2);
3919          __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
3920          context()->Plug(x0);
3921        }
3922      } else {
3923        // Result of deleting non-property, non-variable reference is true.
3924        // The subexpression may have side effects.
3925        VisitForEffect(expr->expression());
3926        context()->Plug(true);
3927      }
3928      break;
3929      break;
3930    }
3931    case Token::VOID: {
3932      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3933      VisitForEffect(expr->expression());
3934      context()->Plug(Heap::kUndefinedValueRootIndex);
3935      break;
3936    }
3937    case Token::NOT: {
3938      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3939      if (context()->IsEffect()) {
3940        // Unary NOT has no side effects so it's only necessary to visit the
3941        // subexpression.  Match the optimizing compiler by not branching.
3942        VisitForEffect(expr->expression());
3943      } else if (context()->IsTest()) {
3944        const TestContext* test = TestContext::cast(context());
3945        // The labels are swapped for the recursive call.
3946        VisitForControl(expr->expression(),
3947                        test->false_label(),
3948                        test->true_label(),
3949                        test->fall_through());
3950        context()->Plug(test->true_label(), test->false_label());
3951      } else {
3952        DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3953        // TODO(jbramley): This could be much more efficient using (for
3954        // example) the CSEL instruction.
3955        Label materialize_true, materialize_false, done;
3956        VisitForControl(expr->expression(),
3957                        &materialize_false,
3958                        &materialize_true,
3959                        &materialize_true);
3960
3961        __ Bind(&materialize_true);
3962        PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3963        __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3964        __ B(&done);
3965
3966        __ Bind(&materialize_false);
3967        PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3968        __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
3969        __ B(&done);
3970
3971        __ Bind(&done);
3972        if (context()->IsStackValue()) {
3973          __ Push(result_register());
3974        }
3975      }
3976      break;
3977    }
3978    case Token::TYPEOF: {
3979      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3980      {
3981        StackValueContext context(this);
3982        VisitForTypeofValue(expr->expression());
3983      }
3984      __ CallRuntime(Runtime::kTypeof, 1);
3985      context()->Plug(x0);
3986      break;
3987    }
3988    default:
3989      UNREACHABLE();
3990  }
3991}
3992
3993
3994void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3995  DCHECK(expr->expression()->IsValidReferenceExpression());
3996
3997  Comment cmnt(masm_, "[ CountOperation");
3998  SetSourcePosition(expr->position());
3999
4000  // Expression can only be a property, a global or a (parameter or local)
4001  // slot.
4002  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4003  LhsKind assign_type = VARIABLE;
4004  Property* prop = expr->expression()->AsProperty();
4005  // In case of a property we use the uninitialized expression context
4006  // of the key to detect a named property.
4007  if (prop != NULL) {
4008    assign_type =
4009        (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4010  }
4011
4012  // Evaluate expression and get value.
4013  if (assign_type == VARIABLE) {
4014    DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4015    AccumulatorValueContext context(this);
4016    EmitVariableLoad(expr->expression()->AsVariableProxy());
4017  } else {
4018    // Reserve space for result of postfix operation.
4019    if (expr->is_postfix() && !context()->IsEffect()) {
4020      __ Push(xzr);
4021    }
4022    if (assign_type == NAMED_PROPERTY) {
4023      // Put the object both on the stack and in the register.
4024      VisitForStackValue(prop->obj());
4025      __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4026      EmitNamedPropertyLoad(prop);
4027    } else {
4028      // KEYED_PROPERTY
4029      VisitForStackValue(prop->obj());
4030      VisitForStackValue(prop->key());
4031      __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4032      __ Peek(LoadDescriptor::NameRegister(), 0);
4033      EmitKeyedPropertyLoad(prop);
4034    }
4035  }
4036
4037  // We need a second deoptimization point after loading the value
4038  // in case evaluating the property load my have a side effect.
4039  if (assign_type == VARIABLE) {
4040    PrepareForBailout(expr->expression(), TOS_REG);
4041  } else {
4042    PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4043  }
4044
4045  // Inline smi case if we are in a loop.
4046  Label stub_call, done;
4047  JumpPatchSite patch_site(masm_);
4048
4049  int count_value = expr->op() == Token::INC ? 1 : -1;
4050  if (ShouldInlineSmiCase(expr->op())) {
4051    Label slow;
4052    patch_site.EmitJumpIfNotSmi(x0, &slow);
4053
4054    // Save result for postfix expressions.
4055    if (expr->is_postfix()) {
4056      if (!context()->IsEffect()) {
4057        // Save the result on the stack. If we have a named or keyed property we
4058        // store the result under the receiver that is currently on top of the
4059        // stack.
4060        switch (assign_type) {
4061          case VARIABLE:
4062            __ Push(x0);
4063            break;
4064          case NAMED_PROPERTY:
4065            __ Poke(x0, kPointerSize);
4066            break;
4067          case KEYED_PROPERTY:
4068            __ Poke(x0, kPointerSize * 2);
4069            break;
4070        }
4071      }
4072    }
4073
4074    __ Adds(x0, x0, Smi::FromInt(count_value));
4075    __ B(vc, &done);
4076    // Call stub. Undo operation first.
4077    __ Sub(x0, x0, Smi::FromInt(count_value));
4078    __ B(&stub_call);
4079    __ Bind(&slow);
4080  }
4081  ToNumberStub convert_stub(isolate());
4082  __ CallStub(&convert_stub);
4083
4084  // Save result for postfix expressions.
4085  if (expr->is_postfix()) {
4086    if (!context()->IsEffect()) {
4087      // Save the result on the stack. If we have a named or keyed property
4088      // we store the result under the receiver that is currently on top
4089      // of the stack.
4090      switch (assign_type) {
4091        case VARIABLE:
4092          __ Push(x0);
4093          break;
4094        case NAMED_PROPERTY:
4095          __ Poke(x0, kXRegSize);
4096          break;
4097        case KEYED_PROPERTY:
4098          __ Poke(x0, 2 * kXRegSize);
4099          break;
4100      }
4101    }
4102  }
4103
4104  __ Bind(&stub_call);
4105  __ Mov(x1, x0);
4106  __ Mov(x0, Smi::FromInt(count_value));
4107
4108  // Record position before stub call.
4109  SetSourcePosition(expr->position());
4110
4111  {
4112    Assembler::BlockPoolsScope scope(masm_);
4113    Handle<Code> code =
4114        CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4115    CallIC(code, expr->CountBinOpFeedbackId());
4116    patch_site.EmitPatchInfo();
4117  }
4118  __ Bind(&done);
4119
4120  // Store the value returned in x0.
4121  switch (assign_type) {
4122    case VARIABLE:
4123      if (expr->is_postfix()) {
4124        { EffectContext context(this);
4125          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4126                                 Token::ASSIGN);
4127          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4128          context.Plug(x0);
4129        }
4130        // For all contexts except EffectConstant We have the result on
4131        // top of the stack.
4132        if (!context()->IsEffect()) {
4133          context()->PlugTOS();
4134        }
4135      } else {
4136        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4137                               Token::ASSIGN);
4138        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4139        context()->Plug(x0);
4140      }
4141      break;
4142    case NAMED_PROPERTY: {
4143      __ Mov(StoreDescriptor::NameRegister(),
4144             Operand(prop->key()->AsLiteral()->value()));
4145      __ Pop(StoreDescriptor::ReceiverRegister());
4146      CallStoreIC(expr->CountStoreFeedbackId());
4147      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4148      if (expr->is_postfix()) {
4149        if (!context()->IsEffect()) {
4150          context()->PlugTOS();
4151        }
4152      } else {
4153        context()->Plug(x0);
4154      }
4155      break;
4156    }
4157    case KEYED_PROPERTY: {
4158      __ Pop(StoreDescriptor::NameRegister());
4159      __ Pop(StoreDescriptor::ReceiverRegister());
4160      Handle<Code> ic =
4161          CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4162      CallIC(ic, expr->CountStoreFeedbackId());
4163      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4164      if (expr->is_postfix()) {
4165        if (!context()->IsEffect()) {
4166          context()->PlugTOS();
4167        }
4168      } else {
4169        context()->Plug(x0);
4170      }
4171      break;
4172    }
4173  }
4174}
4175
4176
4177void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4178  DCHECK(!context()->IsEffect());
4179  DCHECK(!context()->IsTest());
4180  VariableProxy* proxy = expr->AsVariableProxy();
4181  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4182    Comment cmnt(masm_, "Global variable");
4183    __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
4184    __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4185    if (FLAG_vector_ics) {
4186      __ Mov(VectorLoadICDescriptor::SlotRegister(),
4187             Smi::FromInt(proxy->VariableFeedbackSlot()));
4188    }
4189    // Use a regular load, not a contextual load, to avoid a reference
4190    // error.
4191    CallLoadIC(NOT_CONTEXTUAL);
4192    PrepareForBailout(expr, TOS_REG);
4193    context()->Plug(x0);
4194  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4195    Label done, slow;
4196
4197    // Generate code for loading from variables potentially shadowed
4198    // by eval-introduced variables.
4199    EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4200
4201    __ Bind(&slow);
4202    __ Mov(x0, Operand(proxy->name()));
4203    __ Push(cp, x0);
4204    __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4205    PrepareForBailout(expr, TOS_REG);
4206    __ Bind(&done);
4207
4208    context()->Plug(x0);
4209  } else {
4210    // This expression cannot throw a reference error at the top level.
4211    VisitInDuplicateContext(expr);
4212  }
4213}
4214
4215
4216void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4217                                                 Expression* sub_expr,
4218                                                 Handle<String> check) {
4219  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4220  Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4221  Label materialize_true, materialize_false;
4222  Label* if_true = NULL;
4223  Label* if_false = NULL;
4224  Label* fall_through = NULL;
4225  context()->PrepareTest(&materialize_true, &materialize_false,
4226                         &if_true, &if_false, &fall_through);
4227
4228  { AccumulatorValueContext context(this);
4229    VisitForTypeofValue(sub_expr);
4230  }
4231  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4232
4233  Factory* factory = isolate()->factory();
4234  if (String::Equals(check, factory->number_string())) {
4235    ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4236    __ JumpIfSmi(x0, if_true);
4237    __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4238    __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4239    Split(eq, if_true, if_false, fall_through);
4240  } else if (String::Equals(check, factory->string_string())) {
4241    ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4242    __ JumpIfSmi(x0, if_false);
4243    // Check for undetectable objects => false.
4244    __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4245    __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4246    __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4247                    fall_through);
4248  } else if (String::Equals(check, factory->symbol_string())) {
4249    ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4250    __ JumpIfSmi(x0, if_false);
4251    __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4252    Split(eq, if_true, if_false, fall_through);
4253  } else if (String::Equals(check, factory->boolean_string())) {
4254    ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4255    __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4256    __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4257    Split(eq, if_true, if_false, fall_through);
4258  } else if (String::Equals(check, factory->undefined_string())) {
4259    ASM_LOCATION(
4260        "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4261    __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4262    __ JumpIfSmi(x0, if_false);
4263    // Check for undetectable objects => true.
4264    __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4265    __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4266    __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4267                    fall_through);
4268  } else if (String::Equals(check, factory->function_string())) {
4269    ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4270    __ JumpIfSmi(x0, if_false);
4271    STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4272    __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4273    __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4274                       fall_through);
4275
4276  } else if (String::Equals(check, factory->object_string())) {
4277    ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4278    __ JumpIfSmi(x0, if_false);
4279    __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4280    // Check for JS objects => true.
4281    Register map = x10;
4282    __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4283                        if_false, lt);
4284    __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4285    __ B(gt, if_false);
4286    // Check for undetectable objects => false.
4287    __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4288
4289    __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4290                    fall_through);
4291
4292  } else {
4293    ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4294    if (if_false != fall_through) __ B(if_false);
4295  }
4296  context()->Plug(if_true, if_false);
4297}
4298
4299
4300void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4301  Comment cmnt(masm_, "[ CompareOperation");
4302  SetSourcePosition(expr->position());
4303
4304  // Try to generate an optimized comparison with a literal value.
4305  // TODO(jbramley): This only checks common values like NaN or undefined.
4306  // Should it also handle ARM64 immediate operands?
4307  if (TryLiteralCompare(expr)) {
4308    return;
4309  }
4310
4311  // Assign labels according to context()->PrepareTest.
4312  Label materialize_true;
4313  Label materialize_false;
4314  Label* if_true = NULL;
4315  Label* if_false = NULL;
4316  Label* fall_through = NULL;
4317  context()->PrepareTest(&materialize_true, &materialize_false,
4318                         &if_true, &if_false, &fall_through);
4319
4320  Token::Value op = expr->op();
4321  VisitForStackValue(expr->left());
4322  switch (op) {
4323    case Token::IN:
4324      VisitForStackValue(expr->right());
4325      __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4326      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4327      __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4328      Split(eq, if_true, if_false, fall_through);
4329      break;
4330
4331    case Token::INSTANCEOF: {
4332      VisitForStackValue(expr->right());
4333      InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4334      __ CallStub(&stub);
4335      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4336      // The stub returns 0 for true.
4337      __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4338      break;
4339    }
4340
4341    default: {
4342      VisitForAccumulatorValue(expr->right());
4343      Condition cond = CompareIC::ComputeCondition(op);
4344
4345      // Pop the stack value.
4346      __ Pop(x1);
4347
4348      JumpPatchSite patch_site(masm_);
4349      if (ShouldInlineSmiCase(op)) {
4350        Label slow_case;
4351        patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4352        __ Cmp(x1, x0);
4353        Split(cond, if_true, if_false, NULL);
4354        __ Bind(&slow_case);
4355      }
4356
4357      // Record position and call the compare IC.
4358      SetSourcePosition(expr->position());
4359      Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4360      CallIC(ic, expr->CompareOperationFeedbackId());
4361      patch_site.EmitPatchInfo();
4362      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4363      __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4364    }
4365  }
4366
4367  // Convert the result of the comparison into one expected for this
4368  // expression's context.
4369  context()->Plug(if_true, if_false);
4370}
4371
4372
4373void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4374                                              Expression* sub_expr,
4375                                              NilValue nil) {
4376  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4377  Label materialize_true, materialize_false;
4378  Label* if_true = NULL;
4379  Label* if_false = NULL;
4380  Label* fall_through = NULL;
4381  context()->PrepareTest(&materialize_true, &materialize_false,
4382                         &if_true, &if_false, &fall_through);
4383
4384  VisitForAccumulatorValue(sub_expr);
4385  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4386
4387  if (expr->op() == Token::EQ_STRICT) {
4388    Heap::RootListIndex nil_value = nil == kNullValue ?
4389        Heap::kNullValueRootIndex :
4390        Heap::kUndefinedValueRootIndex;
4391    __ CompareRoot(x0, nil_value);
4392    Split(eq, if_true, if_false, fall_through);
4393  } else {
4394    Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4395    CallIC(ic, expr->CompareOperationFeedbackId());
4396    __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4397  }
4398
4399  context()->Plug(if_true, if_false);
4400}
4401
4402
4403void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4404  __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4405  context()->Plug(x0);
4406}
4407
4408
4409void FullCodeGenerator::VisitYield(Yield* expr) {
4410  Comment cmnt(masm_, "[ Yield");
4411  // Evaluate yielded value first; the initial iterator definition depends on
4412  // this. It stays on the stack while we update the iterator.
4413  VisitForStackValue(expr->expression());
4414
4415  // TODO(jbramley): Tidy this up once the merge is done, using named registers
4416  // and suchlike. The implementation changes a little by bleeding_edge so I
4417  // don't want to spend too much time on it now.
4418
4419  switch (expr->yield_kind()) {
4420    case Yield::kSuspend:
4421      // Pop value from top-of-stack slot; box result into result register.
4422      EmitCreateIteratorResult(false);
4423      __ Push(result_register());
4424      // Fall through.
4425    case Yield::kInitial: {
4426      Label suspend, continuation, post_runtime, resume;
4427
4428      __ B(&suspend);
4429
4430      // TODO(jbramley): This label is bound here because the following code
4431      // looks at its pos(). Is it possible to do something more efficient here,
4432      // perhaps using Adr?
4433      __ Bind(&continuation);
4434      __ B(&resume);
4435
4436      __ Bind(&suspend);
4437      VisitForAccumulatorValue(expr->generator_object());
4438      DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4439      __ Mov(x1, Smi::FromInt(continuation.pos()));
4440      __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4441      __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4442      __ Mov(x1, cp);
4443      __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4444                          kLRHasBeenSaved, kDontSaveFPRegs);
4445      __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4446      __ Cmp(__ StackPointer(), x1);
4447      __ B(eq, &post_runtime);
4448      __ Push(x0);  // generator object
4449      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4450      __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4451      __ Bind(&post_runtime);
4452      __ Pop(result_register());
4453      EmitReturnSequence();
4454
4455      __ Bind(&resume);
4456      context()->Plug(result_register());
4457      break;
4458    }
4459
4460    case Yield::kFinal: {
4461      VisitForAccumulatorValue(expr->generator_object());
4462      __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4463      __ Str(x1, FieldMemOperand(result_register(),
4464                                 JSGeneratorObject::kContinuationOffset));
4465      // Pop value from top-of-stack slot, box result into result register.
4466      EmitCreateIteratorResult(true);
4467      EmitUnwindBeforeReturn();
4468      EmitReturnSequence();
4469      break;
4470    }
4471
4472    case Yield::kDelegating: {
4473      VisitForStackValue(expr->generator_object());
4474
4475      // Initial stack layout is as follows:
4476      // [sp + 1 * kPointerSize] iter
4477      // [sp + 0 * kPointerSize] g
4478
4479      Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4480      Label l_next, l_call, l_loop;
4481      Register load_receiver = LoadDescriptor::ReceiverRegister();
4482      Register load_name = LoadDescriptor::NameRegister();
4483
4484      // Initial send value is undefined.
4485      __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4486      __ B(&l_next);
4487
4488      // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4489      __ Bind(&l_catch);
4490      handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4491      __ LoadRoot(load_name, Heap::kthrow_stringRootIndex);  // "throw"
4492      __ Peek(x3, 1 * kPointerSize);                         // iter
4493      __ Push(load_name, x3, x0);                       // "throw", iter, except
4494      __ B(&l_call);
4495
4496      // try { received = %yield result }
4497      // Shuffle the received result above a try handler and yield it without
4498      // re-boxing.
4499      __ Bind(&l_try);
4500      __ Pop(x0);                                        // result
4501      __ PushTryHandler(StackHandler::CATCH, expr->index());
4502      const int handler_size = StackHandlerConstants::kSize;
4503      __ Push(x0);                                       // result
4504      __ B(&l_suspend);
4505
4506      // TODO(jbramley): This label is bound here because the following code
4507      // looks at its pos(). Is it possible to do something more efficient here,
4508      // perhaps using Adr?
4509      __ Bind(&l_continuation);
4510      __ B(&l_resume);
4511
4512      __ Bind(&l_suspend);
4513      const int generator_object_depth = kPointerSize + handler_size;
4514      __ Peek(x0, generator_object_depth);
4515      __ Push(x0);                                       // g
4516      DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4517      __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4518      __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4519      __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4520      __ Mov(x1, cp);
4521      __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4522                          kLRHasBeenSaved, kDontSaveFPRegs);
4523      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4524      __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4525      __ Pop(x0);                                        // result
4526      EmitReturnSequence();
4527      __ Bind(&l_resume);                                // received in x0
4528      __ PopTryHandler();
4529
4530      // receiver = iter; f = 'next'; arg = received;
4531      __ Bind(&l_next);
4532
4533      __ LoadRoot(load_name, Heap::knext_stringRootIndex);  // "next"
4534      __ Peek(x3, 1 * kPointerSize);                        // iter
4535      __ Push(load_name, x3, x0);                      // "next", iter, received
4536
4537      // result = receiver[f](arg);
4538      __ Bind(&l_call);
4539      __ Peek(load_receiver, 1 * kPointerSize);
4540      __ Peek(load_name, 2 * kPointerSize);
4541      if (FLAG_vector_ics) {
4542        __ Mov(VectorLoadICDescriptor::SlotRegister(),
4543               Smi::FromInt(expr->KeyedLoadFeedbackSlot()));
4544      }
4545      Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
4546      CallIC(ic, TypeFeedbackId::None());
4547      __ Mov(x1, x0);
4548      __ Poke(x1, 2 * kPointerSize);
4549      CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
4550      __ CallStub(&stub);
4551
4552      __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4553      __ Drop(1);  // The function is still on the stack; drop it.
4554
4555      // if (!result.done) goto l_try;
4556      __ Bind(&l_loop);
4557      __ Move(load_receiver, x0);
4558
4559      __ Push(load_receiver);                               // save result
4560      __ LoadRoot(load_name, Heap::kdone_stringRootIndex);  // "done"
4561      if (FLAG_vector_ics) {
4562        __ Mov(VectorLoadICDescriptor::SlotRegister(),
4563               Smi::FromInt(expr->DoneFeedbackSlot()));
4564      }
4565      CallLoadIC(NOT_CONTEXTUAL);                           // x0=result.done
4566      // The ToBooleanStub argument (result.done) is in x0.
4567      Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4568      CallIC(bool_ic);
4569      __ Cbz(x0, &l_try);
4570
4571      // result.value
4572      __ Pop(load_receiver);                                 // result
4573      __ LoadRoot(load_name, Heap::kvalue_stringRootIndex);  // "value"
4574      if (FLAG_vector_ics) {
4575        __ Mov(VectorLoadICDescriptor::SlotRegister(),
4576               Smi::FromInt(expr->ValueFeedbackSlot()));
4577      }
4578      CallLoadIC(NOT_CONTEXTUAL);                            // x0=result.value
4579      context()->DropAndPlug(2, x0);                         // drop iter and g
4580      break;
4581    }
4582  }
4583}
4584
4585
4586void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4587    Expression *value,
4588    JSGeneratorObject::ResumeMode resume_mode) {
4589  ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4590  Register value_reg = x0;
4591  Register generator_object = x1;
4592  Register the_hole = x2;
4593  Register operand_stack_size = w3;
4594  Register function = x4;
4595
4596  // The value stays in x0, and is ultimately read by the resumed generator, as
4597  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
4598  // is read to throw the value when the resumed generator is already closed. r1
4599  // will hold the generator object until the activation has been resumed.
4600  VisitForStackValue(generator);
4601  VisitForAccumulatorValue(value);
4602  __ Pop(generator_object);
4603
4604  // Check generator state.
4605  Label wrong_state, closed_state, done;
4606  __ Ldr(x10, FieldMemOperand(generator_object,
4607                              JSGeneratorObject::kContinuationOffset));
4608  STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
4609  STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
4610  __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state);
4611  __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state);
4612
4613  // Load suspended function and context.
4614  __ Ldr(cp, FieldMemOperand(generator_object,
4615                             JSGeneratorObject::kContextOffset));
4616  __ Ldr(function, FieldMemOperand(generator_object,
4617                                   JSGeneratorObject::kFunctionOffset));
4618
4619  // Load receiver and store as the first argument.
4620  __ Ldr(x10, FieldMemOperand(generator_object,
4621                              JSGeneratorObject::kReceiverOffset));
4622  __ Push(x10);
4623
4624  // Push holes for the rest of the arguments to the generator function.
4625  __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4626
4627  // The number of arguments is stored as an int32_t, and -1 is a marker
4628  // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4629  // extension to correctly handle it. However, in this case, we operate on
4630  // 32-bit W registers, so extension isn't required.
4631  __ Ldr(w10, FieldMemOperand(x10,
4632                              SharedFunctionInfo::kFormalParameterCountOffset));
4633  __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4634  __ PushMultipleTimes(the_hole, w10);
4635
4636  // Enter a new JavaScript frame, and initialize its slots as they were when
4637  // the generator was suspended.
4638  Label resume_frame;
4639  __ Bl(&resume_frame);
4640  __ B(&done);
4641
4642  __ Bind(&resume_frame);
4643  __ Push(lr,           // Return address.
4644          fp,           // Caller's frame pointer.
4645          cp,           // Callee's context.
4646          function);    // Callee's JS Function.
4647  __ Add(fp, __ StackPointer(), kPointerSize * 2);
4648
4649  // Load and untag the operand stack size.
4650  __ Ldr(x10, FieldMemOperand(generator_object,
4651                              JSGeneratorObject::kOperandStackOffset));
4652  __ Ldr(operand_stack_size,
4653         UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4654
4655  // If we are sending a value and there is no operand stack, we can jump back
4656  // in directly.
4657  if (resume_mode == JSGeneratorObject::NEXT) {
4658    Label slow_resume;
4659    __ Cbnz(operand_stack_size, &slow_resume);
4660    __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4661    __ Ldrsw(x11,
4662             UntagSmiFieldMemOperand(generator_object,
4663                                     JSGeneratorObject::kContinuationOffset));
4664    __ Add(x10, x10, x11);
4665    __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
4666    __ Str(x12, FieldMemOperand(generator_object,
4667                                JSGeneratorObject::kContinuationOffset));
4668    __ Br(x10);
4669
4670    __ Bind(&slow_resume);
4671  }
4672
4673  // Otherwise, we push holes for the operand stack and call the runtime to fix
4674  // up the stack and the handlers.
4675  __ PushMultipleTimes(the_hole, operand_stack_size);
4676
4677  __ Mov(x10, Smi::FromInt(resume_mode));
4678  __ Push(generator_object, result_register(), x10);
4679  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
4680  // Not reached: the runtime call returns elsewhere.
4681  __ Unreachable();
4682
4683  // Reach here when generator is closed.
4684  __ Bind(&closed_state);
4685  if (resume_mode == JSGeneratorObject::NEXT) {
4686    // Return completed iterator result when generator is closed.
4687    __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
4688    __ Push(x10);
4689    // Pop value from top-of-stack slot; box result into result register.
4690    EmitCreateIteratorResult(true);
4691  } else {
4692    // Throw the provided value.
4693    __ Push(value_reg);
4694    __ CallRuntime(Runtime::kThrow, 1);
4695  }
4696  __ B(&done);
4697
4698  // Throw error if we attempt to operate on a running generator.
4699  __ Bind(&wrong_state);
4700  __ Push(generator_object);
4701  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
4702
4703  __ Bind(&done);
4704  context()->Plug(result_register());
4705}
4706
4707
4708void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
4709  Label gc_required;
4710  Label allocated;
4711
4712  Handle<Map> map(isolate()->native_context()->iterator_result_map());
4713
4714  // Allocate and populate an object with this form: { value: VAL, done: DONE }
4715
4716  Register result = x0;
4717  __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT);
4718  __ B(&allocated);
4719
4720  __ Bind(&gc_required);
4721  __ Push(Smi::FromInt(map->instance_size()));
4722  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
4723  __ Ldr(context_register(),
4724         MemOperand(fp, StandardFrameConstants::kContextOffset));
4725
4726  __ Bind(&allocated);
4727  Register map_reg = x1;
4728  Register result_value = x2;
4729  Register boolean_done = x3;
4730  Register empty_fixed_array = x4;
4731  Register untagged_result = x5;
4732  __ Mov(map_reg, Operand(map));
4733  __ Pop(result_value);
4734  __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4735  __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4736  DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
4737  STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
4738                JSObject::kElementsOffset);
4739  STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
4740                JSGeneratorObject::kResultDonePropertyOffset);
4741  __ ObjectUntag(untagged_result, result);
4742  __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
4743  __ Stp(empty_fixed_array, empty_fixed_array,
4744         MemOperand(untagged_result, JSObject::kPropertiesOffset));
4745  __ Stp(result_value, boolean_done,
4746         MemOperand(untagged_result,
4747                    JSGeneratorObject::kResultValuePropertyOffset));
4748
4749  // Only the value field needs a write barrier, as the other values are in the
4750  // root set.
4751  __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
4752                      x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
4753}
4754
4755
4756// TODO(all): I don't like this method.
4757// It seems to me that in too many places x0 is used in place of this.
4758// Also, this function is not suitable for all places where x0 should be
4759// abstracted (eg. when used as an argument). But some places assume that the
4760// first argument register is x0, and use this function instead.
4761// Considering that most of the register allocation is hard-coded in the
4762// FullCodeGen, that it is unlikely we will need to change it extensively, and
4763// that abstracting the allocation through functions would not yield any
4764// performance benefit, I think the existence of this function is debatable.
4765Register FullCodeGenerator::result_register() {
4766  return x0;
4767}
4768
4769
4770Register FullCodeGenerator::context_register() {
4771  return cp;
4772}
4773
4774
4775void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4776  DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4777  __ Str(value, MemOperand(fp, frame_offset));
4778}
4779
4780
4781void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4782  __ Ldr(dst, ContextMemOperand(cp, context_index));
4783}
4784
4785
4786void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4787  Scope* declaration_scope = scope()->DeclarationScope();
4788  if (declaration_scope->is_global_scope() ||
4789      declaration_scope->is_module_scope()) {
4790    // Contexts nested in the native context have a canonical empty function
4791    // as their closure, not the anonymous closure containing the global
4792    // code.  Pass a smi sentinel and let the runtime look up the empty
4793    // function.
4794    DCHECK(kSmiTag == 0);
4795    __ Push(xzr);
4796  } else if (declaration_scope->is_eval_scope()) {
4797    // Contexts created by a call to eval have the same closure as the
4798    // context calling eval, not the anonymous closure containing the eval
4799    // code.  Fetch it from the context.
4800    __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4801    __ Push(x10);
4802  } else {
4803    DCHECK(declaration_scope->is_function_scope());
4804    __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4805    __ Push(x10);
4806  }
4807}
4808
4809
4810void FullCodeGenerator::EnterFinallyBlock() {
4811  ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4812  DCHECK(!result_register().is(x10));
4813  // Preserve the result register while executing finally block.
4814  // Also cook the return address in lr to the stack (smi encoded Code* delta).
4815  __ Sub(x10, lr, Operand(masm_->CodeObject()));
4816  __ SmiTag(x10);
4817  __ Push(result_register(), x10);
4818
4819  // Store pending message while executing finally block.
4820  ExternalReference pending_message_obj =
4821      ExternalReference::address_of_pending_message_obj(isolate());
4822  __ Mov(x10, pending_message_obj);
4823  __ Ldr(x10, MemOperand(x10));
4824
4825  ExternalReference has_pending_message =
4826      ExternalReference::address_of_has_pending_message(isolate());
4827  STATIC_ASSERT(sizeof(bool) == 1);   // NOLINT(runtime/sizeof)
4828  __ Mov(x11, has_pending_message);
4829  __ Ldrb(x11, MemOperand(x11));
4830  __ SmiTag(x11);
4831
4832  __ Push(x10, x11);
4833
4834  ExternalReference pending_message_script =
4835      ExternalReference::address_of_pending_message_script(isolate());
4836  __ Mov(x10, pending_message_script);
4837  __ Ldr(x10, MemOperand(x10));
4838  __ Push(x10);
4839}
4840
4841
4842void FullCodeGenerator::ExitFinallyBlock() {
4843  ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4844  DCHECK(!result_register().is(x10));
4845
4846  // Restore pending message from stack.
4847  __ Pop(x10, x11, x12);
4848  ExternalReference pending_message_script =
4849      ExternalReference::address_of_pending_message_script(isolate());
4850  __ Mov(x13, pending_message_script);
4851  __ Str(x10, MemOperand(x13));
4852
4853  __ SmiUntag(x11);
4854  ExternalReference has_pending_message =
4855      ExternalReference::address_of_has_pending_message(isolate());
4856  __ Mov(x13, has_pending_message);
4857  STATIC_ASSERT(sizeof(bool) == 1);   // NOLINT(runtime/sizeof)
4858  __ Strb(x11, MemOperand(x13));
4859
4860  ExternalReference pending_message_obj =
4861      ExternalReference::address_of_pending_message_obj(isolate());
4862  __ Mov(x13, pending_message_obj);
4863  __ Str(x12, MemOperand(x13));
4864
4865  // Restore result register and cooked return address from the stack.
4866  __ Pop(x10, result_register());
4867
4868  // Uncook the return address (see EnterFinallyBlock).
4869  __ SmiUntag(x10);
4870  __ Add(x11, x10, Operand(masm_->CodeObject()));
4871  __ Br(x11);
4872}
4873
4874
4875#undef __
4876
4877
4878void BackEdgeTable::PatchAt(Code* unoptimized_code,
4879                            Address pc,
4880                            BackEdgeState target_state,
4881                            Code* replacement_code) {
4882  // Turn the jump into a nop.
4883  Address branch_address = pc - 3 * kInstructionSize;
4884  PatchingAssembler patcher(branch_address, 1);
4885
4886  DCHECK(Instruction::Cast(branch_address)
4887             ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4888         (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4889          Instruction::Cast(branch_address)->ImmPCOffset() ==
4890              6 * kInstructionSize));
4891
4892  switch (target_state) {
4893    case INTERRUPT:
4894      //  <decrement profiling counter>
4895      //  .. .. .. ..       b.pl ok
4896      //  .. .. .. ..       ldr x16, pc+<interrupt stub address>
4897      //  .. .. .. ..       blr x16
4898      //  ... more instructions.
4899      //  ok-label
4900      // Jump offset is 6 instructions.
4901      patcher.b(6, pl);
4902      break;
4903    case ON_STACK_REPLACEMENT:
4904    case OSR_AFTER_STACK_CHECK:
4905      //  <decrement profiling counter>
4906      //  .. .. .. ..       mov x0, x0 (NOP)
4907      //  .. .. .. ..       ldr x16, pc+<on-stack replacement address>
4908      //  .. .. .. ..       blr x16
4909      patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4910      break;
4911  }
4912
4913  // Replace the call address.
4914  Instruction* load = Instruction::Cast(pc)->preceding(2);
4915  Address interrupt_address_pointer =
4916      reinterpret_cast<Address>(load) + load->ImmPCOffset();
4917  DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
4918          reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4919                                         ->builtins()
4920                                         ->OnStackReplacement()
4921                                         ->entry())) ||
4922         (Memory::uint64_at(interrupt_address_pointer) ==
4923          reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4924                                         ->builtins()
4925                                         ->InterruptCheck()
4926                                         ->entry())) ||
4927         (Memory::uint64_at(interrupt_address_pointer) ==
4928          reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4929                                         ->builtins()
4930                                         ->OsrAfterStackCheck()
4931                                         ->entry())) ||
4932         (Memory::uint64_at(interrupt_address_pointer) ==
4933          reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4934                                         ->builtins()
4935                                         ->OnStackReplacement()
4936                                         ->entry())));
4937  Memory::uint64_at(interrupt_address_pointer) =
4938      reinterpret_cast<uint64_t>(replacement_code->entry());
4939
4940  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4941      unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4942}
4943
4944
4945BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4946    Isolate* isolate,
4947    Code* unoptimized_code,
4948    Address pc) {
4949  // TODO(jbramley): There should be some extra assertions here (as in the ARM
4950  // back-end), but this function is gone in bleeding_edge so it might not
4951  // matter anyway.
4952  Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4953
4954  if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
4955    Instruction* load = Instruction::Cast(pc)->preceding(2);
4956    uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
4957                                       load->ImmPCOffset());
4958    if (entry == reinterpret_cast<uint64_t>(
4959        isolate->builtins()->OnStackReplacement()->entry())) {
4960      return ON_STACK_REPLACEMENT;
4961    } else if (entry == reinterpret_cast<uint64_t>(
4962        isolate->builtins()->OsrAfterStackCheck()->entry())) {
4963      return OSR_AFTER_STACK_CHECK;
4964    } else {
4965      UNREACHABLE();
4966    }
4967  }
4968
4969  return INTERRUPT;
4970}
4971
4972
4973#define __ ACCESS_MASM(masm())
4974
4975
4976FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4977    int* stack_depth,
4978    int* context_length) {
4979  ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
4980  // The macros used here must preserve the result register.
4981
4982  // Because the handler block contains the context of the finally
4983  // code, we can restore it directly from there for the finally code
4984  // rather than iteratively unwinding contexts via their previous
4985  // links.
4986  __ Drop(*stack_depth);  // Down to the handler block.
4987  if (*context_length > 0) {
4988    // Restore the context to its dedicated register and the stack.
4989    __ Peek(cp, StackHandlerConstants::kContextOffset);
4990    __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4991  }
4992  __ PopTryHandler();
4993  __ Bl(finally_entry_);
4994
4995  *stack_depth = 0;
4996  *context_length = 0;
4997  return previous_;
4998}
4999
5000
5001#undef __
5002
5003
5004} }  // namespace v8::internal
5005
5006#endif  // V8_TARGET_ARCH_ARM64
5007