full-codegen-arm.cc revision 8b112d2025046f85ef7f6be087c6129c872ebad2
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_ARM)
31
32#include "code-stubs.h"
33#include "codegen.h"
34#include "compiler.h"
35#include "debug.h"
36#include "full-codegen.h"
37#include "parser.h"
38#include "scopes.h"
39#include "stub-cache.h"
40
41#include "arm/code-stubs-arm.h"
42
43namespace v8 {
44namespace internal {
45
46#define __ ACCESS_MASM(masm_)
47
48
49// A patch site is a location in the code which it is possible to patch. This
50// class has a number of methods to emit the code which is patchable and the
51// method EmitPatchInfo to record a marker back to the patchable code. This
52// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
53// immediate value is used) is the delta from the pc to the first instruction of
54// the patchable code.
55class JumpPatchSite BASE_EMBEDDED {
56 public:
57  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
58#ifdef DEBUG
59    info_emitted_ = false;
60#endif
61  }
62
63  ~JumpPatchSite() {
64    ASSERT(patch_site_.is_bound() == info_emitted_);
65  }
66
67  // When initially emitting this ensure that a jump is always generated to skip
68  // the inlined smi code.
69  void EmitJumpIfNotSmi(Register reg, Label* target) {
70    ASSERT(!patch_site_.is_bound() && !info_emitted_);
71    __ bind(&patch_site_);
72    __ cmp(reg, Operand(reg));
73    // Don't use b(al, ...) as that might emit the constant pool right after the
74    // branch. After patching when the branch is no longer unconditional
75    // execution can continue into the constant pool.
76    __ b(eq, target);  // Always taken before patched.
77  }
78
79  // When initially emitting this ensure that a jump is never generated to skip
80  // the inlined smi code.
81  void EmitJumpIfSmi(Register reg, Label* target) {
82    ASSERT(!patch_site_.is_bound() && !info_emitted_);
83    __ bind(&patch_site_);
84    __ cmp(reg, Operand(reg));
85    __ b(ne, target);  // Never taken before patched.
86  }
87
88  void EmitPatchInfo() {
89    int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
90    Register reg;
91    reg.set_code(delta_to_patch_site / kOff12Mask);
92    __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
93#ifdef DEBUG
94    info_emitted_ = true;
95#endif
96  }
97
98  bool is_bound() const { return patch_site_.is_bound(); }
99
100 private:
101  MacroAssembler* masm_;
102  Label patch_site_;
103#ifdef DEBUG
104  bool info_emitted_;
105#endif
106};
107
108
109// Generate code for a JS function.  On entry to the function the receiver
110// and arguments have been pushed on the stack left to right.  The actual
111// argument count matches the formal parameter count expected by the
112// function.
113//
114// The live registers are:
115//   o r1: the JS function object being called (ie, ourselves)
116//   o cp: our context
117//   o fp: our caller's frame pointer
118//   o sp: stack pointer
119//   o lr: return address
120//
121// The function builds a JS frame.  Please see JavaScriptFrameConstants in
122// frames-arm.h for its layout.
123void FullCodeGenerator::Generate(CompilationInfo* info) {
124  ASSERT(info_ == NULL);
125  info_ = info;
126  SetFunctionPosition(function());
127  Comment cmnt(masm_, "[ function compiled by full code generator");
128
129#ifdef DEBUG
130  if (strlen(FLAG_stop_at) > 0 &&
131      info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
132    __ stop("stop-at");
133  }
134#endif
135
136  int locals_count = scope()->num_stack_slots();
137
138  __ Push(lr, fp, cp, r1);
139  if (locals_count > 0) {
140    // Load undefined value here, so the value is ready for the loop
141    // below.
142    __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
143  }
144  // Adjust fp to point to caller's fp.
145  __ add(fp, sp, Operand(2 * kPointerSize));
146
147  { Comment cmnt(masm_, "[ Allocate locals");
148    for (int i = 0; i < locals_count; i++) {
149      __ push(ip);
150    }
151  }
152
153  bool function_in_register = true;
154
155  // Possibly allocate a local context.
156  int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
157  if (heap_slots > 0) {
158    Comment cmnt(masm_, "[ Allocate local context");
159    // Argument to NewContext is the function, which is in r1.
160    __ push(r1);
161    if (heap_slots <= FastNewContextStub::kMaximumSlots) {
162      FastNewContextStub stub(heap_slots);
163      __ CallStub(&stub);
164    } else {
165      __ CallRuntime(Runtime::kNewContext, 1);
166    }
167    function_in_register = false;
168    // Context is returned in both r0 and cp.  It replaces the context
169    // passed to us.  It's saved in the stack and kept live in cp.
170    __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
171    // Copy any necessary parameters into the context.
172    int num_parameters = scope()->num_parameters();
173    for (int i = 0; i < num_parameters; i++) {
174      Slot* slot = scope()->parameter(i)->AsSlot();
175      if (slot != NULL && slot->type() == Slot::CONTEXT) {
176        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
177            (num_parameters - 1 - i) * kPointerSize;
178        // Load parameter from stack.
179        __ ldr(r0, MemOperand(fp, parameter_offset));
180        // Store it in the context.
181        __ mov(r1, Operand(Context::SlotOffset(slot->index())));
182        __ str(r0, MemOperand(cp, r1));
183        // Update the write barrier. This clobbers all involved
184        // registers, so we have to use two more registers to avoid
185        // clobbering cp.
186        __ mov(r2, Operand(cp));
187        __ RecordWrite(r2, Operand(r1), r3, r0);
188      }
189    }
190  }
191
192  Variable* arguments = scope()->arguments();
193  if (arguments != NULL) {
194    // Function uses arguments object.
195    Comment cmnt(masm_, "[ Allocate arguments object");
196    if (!function_in_register) {
197      // Load this again, if it's used by the local context below.
198      __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
199    } else {
200      __ mov(r3, r1);
201    }
202    // Receiver is just before the parameters on the caller's stack.
203    int offset = scope()->num_parameters() * kPointerSize;
204    __ add(r2, fp,
205           Operand(StandardFrameConstants::kCallerSPOffset + offset));
206    __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters())));
207    __ Push(r3, r2, r1);
208
209    // Arguments to ArgumentsAccessStub:
210    //   function, receiver address, parameter count.
211    // The stub will rewrite receiever and parameter count if the previous
212    // stack frame was an arguments adapter frame.
213    ArgumentsAccessStub stub(
214        is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
215                         : ArgumentsAccessStub::NEW_NON_STRICT);
216    __ CallStub(&stub);
217
218    Variable* arguments_shadow = scope()->arguments_shadow();
219    if (arguments_shadow != NULL) {
220      // Duplicate the value; move-to-slot operation might clobber registers.
221      __ mov(r3, r0);
222      Move(arguments_shadow->AsSlot(), r3, r1, r2);
223    }
224    Move(arguments->AsSlot(), r0, r1, r2);
225  }
226
227  if (FLAG_trace) {
228    __ CallRuntime(Runtime::kTraceEnter, 0);
229  }
230
231  // Visit the declarations and body unless there is an illegal
232  // redeclaration.
233  if (scope()->HasIllegalRedeclaration()) {
234    Comment cmnt(masm_, "[ Declarations");
235    scope()->VisitIllegalRedeclaration(this);
236
237  } else {
238    { Comment cmnt(masm_, "[ Declarations");
239      // For named function expressions, declare the function name as a
240      // constant.
241      if (scope()->is_function_scope() && scope()->function() != NULL) {
242        EmitDeclaration(scope()->function(), Variable::CONST, NULL);
243      }
244      VisitDeclarations(scope()->declarations());
245    }
246
247    { Comment cmnt(masm_, "[ Stack check");
248      PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
249      Label ok;
250      __ LoadRoot(ip, Heap::kStackLimitRootIndex);
251      __ cmp(sp, Operand(ip));
252      __ b(hs, &ok);
253      StackCheckStub stub;
254      __ CallStub(&stub);
255      __ bind(&ok);
256    }
257
258    { Comment cmnt(masm_, "[ Body");
259      ASSERT(loop_depth() == 0);
260      VisitStatements(function()->body());
261      ASSERT(loop_depth() == 0);
262    }
263  }
264
265  // Always emit a 'return undefined' in case control fell off the end of
266  // the body.
267  { Comment cmnt(masm_, "[ return <undefined>;");
268    __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
269  }
270  EmitReturnSequence();
271
272  // Force emit the constant pool, so it doesn't get emitted in the middle
273  // of the stack check table.
274  masm()->CheckConstPool(true, false);
275}
276
277
278void FullCodeGenerator::ClearAccumulator() {
279  __ mov(r0, Operand(Smi::FromInt(0)));
280}
281
282
283void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
284  Comment cmnt(masm_, "[ Stack check");
285  Label ok;
286  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
287  __ cmp(sp, Operand(ip));
288  __ b(hs, &ok);
289  StackCheckStub stub;
290  __ CallStub(&stub);
291  // Record a mapping of this PC offset to the OSR id.  This is used to find
292  // the AST id from the unoptimized code in order to use it as a key into
293  // the deoptimization input data found in the optimized code.
294  RecordStackCheck(stmt->OsrEntryId());
295
296  __ bind(&ok);
297  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
298  // Record a mapping of the OSR id to this PC.  This is used if the OSR
299  // entry becomes the target of a bailout.  We don't expect it to be, but
300  // we want it to work if it is.
301  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
302}
303
304
305void FullCodeGenerator::EmitReturnSequence() {
306  Comment cmnt(masm_, "[ Return sequence");
307  if (return_label_.is_bound()) {
308    __ b(&return_label_);
309  } else {
310    __ bind(&return_label_);
311    if (FLAG_trace) {
312      // Push the return value on the stack as the parameter.
313      // Runtime::TraceExit returns its parameter in r0.
314      __ push(r0);
315      __ CallRuntime(Runtime::kTraceExit, 1);
316    }
317
318#ifdef DEBUG
319    // Add a label for checking the size of the code used for returning.
320    Label check_exit_codesize;
321    masm_->bind(&check_exit_codesize);
322#endif
323    // Make sure that the constant pool is not emitted inside of the return
324    // sequence.
325    { Assembler::BlockConstPoolScope block_const_pool(masm_);
326      // Here we use masm_-> instead of the __ macro to avoid the code coverage
327      // tool from instrumenting as we rely on the code size here.
328      int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
329      CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
330      __ RecordJSReturn();
331      masm_->mov(sp, fp);
332      masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
333      masm_->add(sp, sp, Operand(sp_delta));
334      masm_->Jump(lr);
335    }
336
337#ifdef DEBUG
338    // Check that the size of the code used for returning is large enough
339    // for the debugger's requirements.
340    ASSERT(Assembler::kJSReturnSequenceInstructions <=
341           masm_->InstructionsGeneratedSince(&check_exit_codesize));
342#endif
343  }
344}
345
346
347void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
348}
349
350
351void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
352  codegen()->Move(result_register(), slot);
353}
354
355
356void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
357  codegen()->Move(result_register(), slot);
358  __ push(result_register());
359}
360
361
362void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
363  // For simplicity we always test the accumulator register.
364  codegen()->Move(result_register(), slot);
365  codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
366  codegen()->DoTest(true_label_, false_label_, fall_through_);
367}
368
369
370void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
371}
372
373
374void FullCodeGenerator::AccumulatorValueContext::Plug(
375    Heap::RootListIndex index) const {
376  __ LoadRoot(result_register(), index);
377}
378
379
380void FullCodeGenerator::StackValueContext::Plug(
381    Heap::RootListIndex index) const {
382  __ LoadRoot(result_register(), index);
383  __ push(result_register());
384}
385
386
387void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
388  codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
389                                          true,
390                                          true_label_,
391                                          false_label_);
392  if (index == Heap::kUndefinedValueRootIndex ||
393      index == Heap::kNullValueRootIndex ||
394      index == Heap::kFalseValueRootIndex) {
395    if (false_label_ != fall_through_) __ b(false_label_);
396  } else if (index == Heap::kTrueValueRootIndex) {
397    if (true_label_ != fall_through_) __ b(true_label_);
398  } else {
399    __ LoadRoot(result_register(), index);
400    codegen()->DoTest(true_label_, false_label_, fall_through_);
401  }
402}
403
404
405void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
406}
407
408
409void FullCodeGenerator::AccumulatorValueContext::Plug(
410    Handle<Object> lit) const {
411  __ mov(result_register(), Operand(lit));
412}
413
414
415void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
416  // Immediates cannot be pushed directly.
417  __ mov(result_register(), Operand(lit));
418  __ push(result_register());
419}
420
421
422void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
423  codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
424                                          true,
425                                          true_label_,
426                                          false_label_);
427  ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
428  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
429    if (false_label_ != fall_through_) __ b(false_label_);
430  } else if (lit->IsTrue() || lit->IsJSObject()) {
431    if (true_label_ != fall_through_) __ b(true_label_);
432  } else if (lit->IsString()) {
433    if (String::cast(*lit)->length() == 0) {
434      if (false_label_ != fall_through_) __ b(false_label_);
435    } else {
436      if (true_label_ != fall_through_) __ b(true_label_);
437    }
438  } else if (lit->IsSmi()) {
439    if (Smi::cast(*lit)->value() == 0) {
440      if (false_label_ != fall_through_) __ b(false_label_);
441    } else {
442      if (true_label_ != fall_through_) __ b(true_label_);
443    }
444  } else {
445    // For simplicity we always test the accumulator register.
446    __ mov(result_register(), Operand(lit));
447    codegen()->DoTest(true_label_, false_label_, fall_through_);
448  }
449}
450
451
452void FullCodeGenerator::EffectContext::DropAndPlug(int count,
453                                                   Register reg) const {
454  ASSERT(count > 0);
455  __ Drop(count);
456}
457
458
459void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
460    int count,
461    Register reg) const {
462  ASSERT(count > 0);
463  __ Drop(count);
464  __ Move(result_register(), reg);
465}
466
467
468void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
469                                                       Register reg) const {
470  ASSERT(count > 0);
471  if (count > 1) __ Drop(count - 1);
472  __ str(reg, MemOperand(sp, 0));
473}
474
475
476void FullCodeGenerator::TestContext::DropAndPlug(int count,
477                                                 Register reg) const {
478  ASSERT(count > 0);
479  // For simplicity we always test the accumulator register.
480  __ Drop(count);
481  __ Move(result_register(), reg);
482  codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
483  codegen()->DoTest(true_label_, false_label_, fall_through_);
484}
485
486
487void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
488                                            Label* materialize_false) const {
489  ASSERT(materialize_true == materialize_false);
490  __ bind(materialize_true);
491}
492
493
494void FullCodeGenerator::AccumulatorValueContext::Plug(
495    Label* materialize_true,
496    Label* materialize_false) const {
497  Label done;
498  __ bind(materialize_true);
499  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
500  __ jmp(&done);
501  __ bind(materialize_false);
502  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
503  __ bind(&done);
504}
505
506
507void FullCodeGenerator::StackValueContext::Plug(
508    Label* materialize_true,
509    Label* materialize_false) const {
510  Label done;
511  __ bind(materialize_true);
512  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
513  __ push(ip);
514  __ jmp(&done);
515  __ bind(materialize_false);
516  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
517  __ push(ip);
518  __ bind(&done);
519}
520
521
522void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
523                                          Label* materialize_false) const {
524  ASSERT(materialize_true == true_label_);
525  ASSERT(materialize_false == false_label_);
526}
527
528
529void FullCodeGenerator::EffectContext::Plug(bool flag) const {
530}
531
532
533void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
534  Heap::RootListIndex value_root_index =
535      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
536  __ LoadRoot(result_register(), value_root_index);
537}
538
539
540void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
541  Heap::RootListIndex value_root_index =
542      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
543  __ LoadRoot(ip, value_root_index);
544  __ push(ip);
545}
546
547
548void FullCodeGenerator::TestContext::Plug(bool flag) const {
549  codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
550                                          true,
551                                          true_label_,
552                                          false_label_);
553  if (flag) {
554    if (true_label_ != fall_through_) __ b(true_label_);
555  } else {
556    if (false_label_ != fall_through_) __ b(false_label_);
557  }
558}
559
560
561void FullCodeGenerator::DoTest(Label* if_true,
562                               Label* if_false,
563                               Label* fall_through) {
564  if (CpuFeatures::IsSupported(VFP3)) {
565    CpuFeatures::Scope scope(VFP3);
566    // Emit the inlined tests assumed by the stub.
567    __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
568    __ cmp(result_register(), ip);
569    __ b(eq, if_false);
570    __ LoadRoot(ip, Heap::kTrueValueRootIndex);
571    __ cmp(result_register(), ip);
572    __ b(eq, if_true);
573    __ LoadRoot(ip, Heap::kFalseValueRootIndex);
574    __ cmp(result_register(), ip);
575    __ b(eq, if_false);
576    STATIC_ASSERT(kSmiTag == 0);
577    __ tst(result_register(), result_register());
578    __ b(eq, if_false);
579    __ JumpIfSmi(result_register(), if_true);
580
581    // Call the ToBoolean stub for all other cases.
582    ToBooleanStub stub(result_register());
583    __ CallStub(&stub);
584    __ tst(result_register(), result_register());
585  } else {
586    // Call the runtime to find the boolean value of the source and then
587    // translate it into control flow to the pair of labels.
588    __ push(result_register());
589    __ CallRuntime(Runtime::kToBool, 1);
590    __ LoadRoot(ip, Heap::kFalseValueRootIndex);
591    __ cmp(r0, ip);
592  }
593
594  // The stub returns nonzero for true.
595  Split(ne, if_true, if_false, fall_through);
596}
597
598
599void FullCodeGenerator::Split(Condition cond,
600                              Label* if_true,
601                              Label* if_false,
602                              Label* fall_through) {
603  if (if_false == fall_through) {
604    __ b(cond, if_true);
605  } else if (if_true == fall_through) {
606    __ b(NegateCondition(cond), if_false);
607  } else {
608    __ b(cond, if_true);
609    __ b(if_false);
610  }
611}
612
613
614MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
615  switch (slot->type()) {
616    case Slot::PARAMETER:
617    case Slot::LOCAL:
618      return MemOperand(fp, SlotOffset(slot));
619    case Slot::CONTEXT: {
620      int context_chain_length =
621          scope()->ContextChainLength(slot->var()->scope());
622      __ LoadContext(scratch, context_chain_length);
623      return ContextOperand(scratch, slot->index());
624    }
625    case Slot::LOOKUP:
626      UNREACHABLE();
627  }
628  UNREACHABLE();
629  return MemOperand(r0, 0);
630}
631
632
633void FullCodeGenerator::Move(Register destination, Slot* source) {
634  // Use destination as scratch.
635  MemOperand slot_operand = EmitSlotSearch(source, destination);
636  __ ldr(destination, slot_operand);
637}
638
639
640void FullCodeGenerator::Move(Slot* dst,
641                             Register src,
642                             Register scratch1,
643                             Register scratch2) {
644  ASSERT(dst->type() != Slot::LOOKUP);  // Not yet implemented.
645  ASSERT(!scratch1.is(src) && !scratch2.is(src));
646  MemOperand location = EmitSlotSearch(dst, scratch1);
647  __ str(src, location);
648  // Emit the write barrier code if the location is in the heap.
649  if (dst->type() == Slot::CONTEXT) {
650    __ RecordWrite(scratch1,
651                   Operand(Context::SlotOffset(dst->index())),
652                   scratch2,
653                   src);
654  }
655}
656
657
658void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
659                                                     bool should_normalize,
660                                                     Label* if_true,
661                                                     Label* if_false) {
662  // Only prepare for bailouts before splits if we're in a test
663  // context. Otherwise, we let the Visit function deal with the
664  // preparation to avoid preparing with the same AST id twice.
665  if (!context()->IsTest() || !info_->IsOptimizable()) return;
666
667  Label skip;
668  if (should_normalize) __ b(&skip);
669
670  ForwardBailoutStack* current = forward_bailout_stack_;
671  while (current != NULL) {
672    PrepareForBailout(current->expr(), state);
673    current = current->parent();
674  }
675
676  if (should_normalize) {
677    __ LoadRoot(ip, Heap::kTrueValueRootIndex);
678    __ cmp(r0, ip);
679    Split(eq, if_true, if_false, NULL);
680    __ bind(&skip);
681  }
682}
683
684
685void FullCodeGenerator::EmitDeclaration(Variable* variable,
686                                        Variable::Mode mode,
687                                        FunctionLiteral* function) {
688  Comment cmnt(masm_, "[ Declaration");
689  ASSERT(variable != NULL);  // Must have been resolved.
690  Slot* slot = variable->AsSlot();
691  Property* prop = variable->AsProperty();
692
693  if (slot != NULL) {
694    switch (slot->type()) {
695      case Slot::PARAMETER:
696      case Slot::LOCAL:
697        if (mode == Variable::CONST) {
698          __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
699          __ str(ip, MemOperand(fp, SlotOffset(slot)));
700        } else if (function != NULL) {
701          VisitForAccumulatorValue(function);
702          __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
703        }
704        break;
705
706      case Slot::CONTEXT:
707        // We bypass the general EmitSlotSearch because we know more about
708        // this specific context.
709
710        // The variable in the decl always resides in the current function
711        // context.
712        ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
713        if (FLAG_debug_code) {
714          // Check that we're not inside a 'with'.
715          __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
716          __ cmp(r1, cp);
717          __ Check(eq, "Unexpected declaration in current context.");
718        }
719        if (mode == Variable::CONST) {
720          __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
721          __ str(ip, ContextOperand(cp, slot->index()));
722          // No write barrier since the_hole_value is in old space.
723        } else if (function != NULL) {
724          VisitForAccumulatorValue(function);
725          __ str(result_register(), ContextOperand(cp, slot->index()));
726          int offset = Context::SlotOffset(slot->index());
727          // We know that we have written a function, which is not a smi.
728          __ mov(r1, Operand(cp));
729          __ RecordWrite(r1, Operand(offset), r2, result_register());
730        }
731        break;
732
733      case Slot::LOOKUP: {
734        __ mov(r2, Operand(variable->name()));
735        // Declaration nodes are always introduced in one of two modes.
736        ASSERT(mode == Variable::VAR ||
737               mode == Variable::CONST);
738        PropertyAttributes attr =
739            (mode == Variable::VAR) ? NONE : READ_ONLY;
740        __ mov(r1, Operand(Smi::FromInt(attr)));
741        // Push initial value, if any.
742        // Note: For variables we must not push an initial value (such as
743        // 'undefined') because we may have a (legal) redeclaration and we
744        // must not destroy the current value.
745        if (mode == Variable::CONST) {
746          __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
747          __ Push(cp, r2, r1, r0);
748        } else if (function != NULL) {
749          __ Push(cp, r2, r1);
750          // Push initial value for function declaration.
751          VisitForStackValue(function);
752        } else {
753          __ mov(r0, Operand(Smi::FromInt(0)));  // No initial value!
754          __ Push(cp, r2, r1, r0);
755        }
756        __ CallRuntime(Runtime::kDeclareContextSlot, 4);
757        break;
758      }
759    }
760
761  } else if (prop != NULL) {
762    if (function != NULL || mode == Variable::CONST) {
763      // We are declaring a function or constant that rewrites to a
764      // property.  Use (keyed) IC to set the initial value.  We
765      // cannot visit the rewrite because it's shared and we risk
766      // recording duplicate AST IDs for bailouts from optimized code.
767      ASSERT(prop->obj()->AsVariableProxy() != NULL);
768      { AccumulatorValueContext for_object(this);
769        EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
770      }
771      if (function != NULL) {
772        __ push(r0);
773        VisitForAccumulatorValue(function);
774        __ pop(r2);
775      } else {
776        __ mov(r2, r0);
777        __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
778      }
779      ASSERT(prop->key()->AsLiteral() != NULL &&
780             prop->key()->AsLiteral()->handle()->IsSmi());
781      __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
782
783      Handle<Code> ic = is_strict_mode()
784          ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
785          : isolate()->builtins()->KeyedStoreIC_Initialize();
786      EmitCallIC(ic, RelocInfo::CODE_TARGET);
787      // Value in r0 is ignored (declarations are statements).
788    }
789  }
790}
791
792
793void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
794  EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
795}
796
797
798void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
799  // Call the runtime to declare the globals.
800  // The context is the first argument.
801  __ mov(r2, Operand(pairs));
802  __ mov(r1, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
803  __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
804  __ Push(cp, r2, r1, r0);
805  __ CallRuntime(Runtime::kDeclareGlobals, 4);
806  // Return value is ignored.
807}
808
809
810void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
811  Comment cmnt(masm_, "[ SwitchStatement");
812  Breakable nested_statement(this, stmt);
813  SetStatementPosition(stmt);
814
815  // Keep the switch value on the stack until a case matches.
816  VisitForStackValue(stmt->tag());
817  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
818
819  ZoneList<CaseClause*>* clauses = stmt->cases();
820  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
821
822  Label next_test;  // Recycled for each test.
823  // Compile all the tests with branches to their bodies.
824  for (int i = 0; i < clauses->length(); i++) {
825    CaseClause* clause = clauses->at(i);
826    clause->body_target()->Unuse();
827
828    // The default is not a test, but remember it as final fall through.
829    if (clause->is_default()) {
830      default_clause = clause;
831      continue;
832    }
833
834    Comment cmnt(masm_, "[ Case comparison");
835    __ bind(&next_test);
836    next_test.Unuse();
837
838    // Compile the label expression.
839    VisitForAccumulatorValue(clause->label());
840
841    // Perform the comparison as if via '==='.
842    __ ldr(r1, MemOperand(sp, 0));  // Switch value.
843    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
844    JumpPatchSite patch_site(masm_);
845    if (inline_smi_code) {
846      Label slow_case;
847      __ orr(r2, r1, r0);
848      patch_site.EmitJumpIfNotSmi(r2, &slow_case);
849
850      __ cmp(r1, r0);
851      __ b(ne, &next_test);
852      __ Drop(1);  // Switch value is no longer needed.
853      __ b(clause->body_target());
854      __ bind(&slow_case);
855    }
856
857    // Record position before stub call for type feedback.
858    SetSourcePosition(clause->position());
859    Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
860    EmitCallIC(ic, &patch_site);
861    __ cmp(r0, Operand(0));
862    __ b(ne, &next_test);
863    __ Drop(1);  // Switch value is no longer needed.
864    __ b(clause->body_target());
865  }
866
867  // Discard the test value and jump to the default if present, otherwise to
868  // the end of the statement.
869  __ bind(&next_test);
870  __ Drop(1);  // Switch value is no longer needed.
871  if (default_clause == NULL) {
872    __ b(nested_statement.break_target());
873  } else {
874    __ b(default_clause->body_target());
875  }
876
877  // Compile all the case bodies.
878  for (int i = 0; i < clauses->length(); i++) {
879    Comment cmnt(masm_, "[ Case body");
880    CaseClause* clause = clauses->at(i);
881    __ bind(clause->body_target());
882    PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
883    VisitStatements(clause->statements());
884  }
885
886  __ bind(nested_statement.break_target());
887  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
888}
889
890
891void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
892  Comment cmnt(masm_, "[ ForInStatement");
893  SetStatementPosition(stmt);
894
895  Label loop, exit;
896  ForIn loop_statement(this, stmt);
897  increment_loop_depth();
898
899  // Get the object to enumerate over. Both SpiderMonkey and JSC
900  // ignore null and undefined in contrast to the specification; see
901  // ECMA-262 section 12.6.4.
902  VisitForAccumulatorValue(stmt->enumerable());
903  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
904  __ cmp(r0, ip);
905  __ b(eq, &exit);
906  Register null_value = r5;
907  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
908  __ cmp(r0, null_value);
909  __ b(eq, &exit);
910
911  // Convert the object to a JS object.
912  Label convert, done_convert;
913  __ JumpIfSmi(r0, &convert);
914  __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
915  __ b(hs, &done_convert);
916  __ bind(&convert);
917  __ push(r0);
918  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
919  __ bind(&done_convert);
920  __ push(r0);
921
922  // Check cache validity in generated code. This is a fast case for
923  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
924  // guarantee cache validity, call the runtime system to check cache
925  // validity or get the property names in a fixed array.
926  Label next, call_runtime;
927  // Preload a couple of values used in the loop.
928  Register  empty_fixed_array_value = r6;
929  __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
930  Register empty_descriptor_array_value = r7;
931  __ LoadRoot(empty_descriptor_array_value,
932              Heap::kEmptyDescriptorArrayRootIndex);
933  __ mov(r1, r0);
934  __ bind(&next);
935
936  // Check that there are no elements.  Register r1 contains the
937  // current JS object we've reached through the prototype chain.
938  __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
939  __ cmp(r2, empty_fixed_array_value);
940  __ b(ne, &call_runtime);
941
942  // Check that instance descriptors are not empty so that we can
943  // check for an enum cache.  Leave the map in r2 for the subsequent
944  // prototype load.
945  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
946  __ ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOffset));
947  __ cmp(r3, empty_descriptor_array_value);
948  __ b(eq, &call_runtime);
949
950  // Check that there is an enum cache in the non-empty instance
951  // descriptors (r3).  This is the case if the next enumeration
952  // index field does not contain a smi.
953  __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumerationIndexOffset));
954  __ JumpIfSmi(r3, &call_runtime);
955
956  // For all objects but the receiver, check that the cache is empty.
957  Label check_prototype;
958  __ cmp(r1, r0);
959  __ b(eq, &check_prototype);
960  __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheBridgeCacheOffset));
961  __ cmp(r3, empty_fixed_array_value);
962  __ b(ne, &call_runtime);
963
964  // Load the prototype from the map and loop if non-null.
965  __ bind(&check_prototype);
966  __ ldr(r1, FieldMemOperand(r2, Map::kPrototypeOffset));
967  __ cmp(r1, null_value);
968  __ b(ne, &next);
969
970  // The enum cache is valid.  Load the map of the object being
971  // iterated over and use the cache for the iteration.
972  Label use_cache;
973  __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
974  __ b(&use_cache);
975
976  // Get the set of properties to enumerate.
977  __ bind(&call_runtime);
978  __ push(r0);  // Duplicate the enumerable object on the stack.
979  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
980
981  // If we got a map from the runtime call, we can do a fast
982  // modification check. Otherwise, we got a fixed array, and we have
983  // to do a slow check.
984  Label fixed_array;
985  __ mov(r2, r0);
986  __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
987  __ LoadRoot(ip, Heap::kMetaMapRootIndex);
988  __ cmp(r1, ip);
989  __ b(ne, &fixed_array);
990
991  // We got a map in register r0. Get the enumeration cache from it.
992  __ bind(&use_cache);
993  __ ldr(r1, FieldMemOperand(r0, Map::kInstanceDescriptorsOffset));
994  __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
995  __ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
996
997  // Setup the four remaining stack slots.
998  __ push(r0);  // Map.
999  __ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset));
1000  __ mov(r0, Operand(Smi::FromInt(0)));
1001  // Push enumeration cache, enumeration cache length (as smi) and zero.
1002  __ Push(r2, r1, r0);
1003  __ jmp(&loop);
1004
1005  // We got a fixed array in register r0. Iterate through that.
1006  __ bind(&fixed_array);
1007  __ mov(r1, Operand(Smi::FromInt(0)));  // Map (0) - force slow check.
1008  __ Push(r1, r0);
1009  __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1010  __ mov(r0, Operand(Smi::FromInt(0)));
1011  __ Push(r1, r0);  // Fixed array length (as smi) and initial index.
1012
1013  // Generate code for doing the condition check.
1014  __ bind(&loop);
1015  // Load the current count to r0, load the length to r1.
1016  __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1017  __ cmp(r0, r1);  // Compare to the array length.
1018  __ b(hs, loop_statement.break_target());
1019
1020  // Get the current entry of the array into register r3.
1021  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1022  __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1023  __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1024
1025  // Get the expected map from the stack or a zero map in the
1026  // permanent slow case into register r2.
1027  __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1028
1029  // Check if the expected map still matches that of the enumerable.
1030  // If not, we have to filter the key.
1031  Label update_each;
1032  __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1033  __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1034  __ cmp(r4, Operand(r2));
1035  __ b(eq, &update_each);
1036
1037  // Convert the entry to a string or (smi) 0 if it isn't a property
1038  // any more. If the property has been removed while iterating, we
1039  // just skip it.
1040  __ push(r1);  // Enumerable.
1041  __ push(r3);  // Current entry.
1042  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS);
1043  __ mov(r3, Operand(r0), SetCC);
1044  __ b(eq, loop_statement.continue_target());
1045
1046  // Update the 'each' property or variable from the possibly filtered
1047  // entry in register r3.
1048  __ bind(&update_each);
1049  __ mov(result_register(), r3);
1050  // Perform the assignment as if via '='.
1051  { EffectContext context(this);
1052    EmitAssignment(stmt->each(), stmt->AssignmentId());
1053  }
1054
1055  // Generate code for the body of the loop.
1056  Visit(stmt->body());
1057
1058  // Generate code for the going to the next element by incrementing
1059  // the index (smi) stored on top of the stack.
1060  __ bind(loop_statement.continue_target());
1061  __ pop(r0);
1062  __ add(r0, r0, Operand(Smi::FromInt(1)));
1063  __ push(r0);
1064
1065  EmitStackCheck(stmt);
1066  __ b(&loop);
1067
1068  // Remove the pointers stored on the stack.
1069  __ bind(loop_statement.break_target());
1070  __ Drop(5);
1071
1072  // Exit and decrement the loop depth.
1073  __ bind(&exit);
1074  decrement_loop_depth();
1075}
1076
1077
1078void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1079                                       bool pretenure) {
1080  // Use the fast case closure allocation code that allocates in new
1081  // space for nested functions that don't need literals cloning. If
1082  // we're running with the --always-opt or the --prepare-always-opt
1083  // flag, we need to use the runtime function so that the new function
1084  // we are creating here gets a chance to have its code optimized and
1085  // doesn't just get a copy of the existing unoptimized code.
1086  if (!FLAG_always_opt &&
1087      !FLAG_prepare_always_opt &&
1088      !pretenure &&
1089      scope()->is_function_scope() &&
1090      info->num_literals() == 0) {
1091    FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
1092    __ mov(r0, Operand(info));
1093    __ push(r0);
1094    __ CallStub(&stub);
1095  } else {
1096    __ mov(r0, Operand(info));
1097    __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1098                              : Heap::kFalseValueRootIndex);
1099    __ Push(cp, r0, r1);
1100    __ CallRuntime(Runtime::kNewClosure, 3);
1101  }
1102  context()->Plug(r0);
1103}
1104
1105
1106void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1107  Comment cmnt(masm_, "[ VariableProxy");
1108  EmitVariableLoad(expr->var());
1109}
1110
1111
1112MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1113    Slot* slot,
1114    Label* slow) {
1115  ASSERT(slot->type() == Slot::CONTEXT);
1116  Register context = cp;
1117  Register next = r3;
1118  Register temp = r4;
1119
1120  for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
1121    if (s->num_heap_slots() > 0) {
1122      if (s->calls_eval()) {
1123        // Check that extension is NULL.
1124        __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1125        __ tst(temp, temp);
1126        __ b(ne, slow);
1127      }
1128      __ ldr(next, ContextOperand(context, Context::CLOSURE_INDEX));
1129      __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1130      // Walk the rest of the chain without clobbering cp.
1131      context = next;
1132    }
1133  }
1134  // Check that last extension is NULL.
1135  __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1136  __ tst(temp, temp);
1137  __ b(ne, slow);
1138
1139  // This function is used only for loads, not stores, so it's safe to
1140  // return an cp-based operand (the write barrier cannot be allowed to
1141  // destroy the cp register).
1142  return ContextOperand(context, slot->index());
1143}
1144
1145
1146void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1147    Slot* slot,
1148    TypeofState typeof_state,
1149    Label* slow,
1150    Label* done) {
1151  // Generate fast-case code for variables that might be shadowed by
1152  // eval-introduced variables.  Eval is used a lot without
1153  // introducing variables.  In those cases, we do not want to
1154  // perform a runtime call for all variables in the scope
1155  // containing the eval.
1156  if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
1157    EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
1158    __ jmp(done);
1159  } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
1160    Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
1161    Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
1162    if (potential_slot != NULL) {
1163      // Generate fast case for locals that rewrite to slots.
1164      __ ldr(r0, ContextSlotOperandCheckExtensions(potential_slot, slow));
1165      if (potential_slot->var()->mode() == Variable::CONST) {
1166        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1167        __ cmp(r0, ip);
1168        __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1169      }
1170      __ jmp(done);
1171    } else if (rewrite != NULL) {
1172      // Generate fast case for calls of an argument function.
1173      Property* property = rewrite->AsProperty();
1174      if (property != NULL) {
1175        VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1176        Literal* key_literal = property->key()->AsLiteral();
1177        if (obj_proxy != NULL &&
1178            key_literal != NULL &&
1179            obj_proxy->IsArguments() &&
1180            key_literal->handle()->IsSmi()) {
1181          // Load arguments object if there are no eval-introduced
1182          // variables. Then load the argument from the arguments
1183          // object using keyed load.
1184          __ ldr(r1,
1185                 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1186                                                   slow));
1187          __ mov(r0, Operand(key_literal->handle()));
1188          Handle<Code> ic =
1189              isolate()->builtins()->KeyedLoadIC_Initialize();
1190          EmitCallIC(ic, RelocInfo::CODE_TARGET);
1191          __ jmp(done);
1192        }
1193      }
1194    }
1195  }
1196}
1197
1198
1199void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1200    Slot* slot,
1201    TypeofState typeof_state,
1202    Label* slow) {
1203  Register current = cp;
1204  Register next = r1;
1205  Register temp = r2;
1206
1207  Scope* s = scope();
1208  while (s != NULL) {
1209    if (s->num_heap_slots() > 0) {
1210      if (s->calls_eval()) {
1211        // Check that extension is NULL.
1212        __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1213        __ tst(temp, temp);
1214        __ b(ne, slow);
1215      }
1216      // Load next context in chain.
1217      __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
1218      __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1219      // Walk the rest of the chain without clobbering cp.
1220      current = next;
1221    }
1222    // If no outer scope calls eval, we do not need to check more
1223    // context extensions.
1224    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1225    s = s->outer_scope();
1226  }
1227
1228  if (s->is_eval_scope()) {
1229    Label loop, fast;
1230    if (!current.is(next)) {
1231      __ Move(next, current);
1232    }
1233    __ bind(&loop);
1234    // Terminate at global context.
1235    __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1236    __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
1237    __ cmp(temp, ip);
1238    __ b(eq, &fast);
1239    // Check that extension is NULL.
1240    __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1241    __ tst(temp, temp);
1242    __ b(ne, slow);
1243    // Load next context in chain.
1244    __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
1245    __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1246    __ b(&loop);
1247    __ bind(&fast);
1248  }
1249
1250  __ ldr(r0, GlobalObjectOperand());
1251  __ mov(r2, Operand(slot->var()->name()));
1252  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1253      ? RelocInfo::CODE_TARGET
1254      : RelocInfo::CODE_TARGET_CONTEXT;
1255  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1256  EmitCallIC(ic, mode);
1257}
1258
1259
1260void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1261  // Four cases: non-this global variables, lookup slots, all other
1262  // types of slots, and parameters that rewrite to explicit property
1263  // accesses on the arguments object.
1264  Slot* slot = var->AsSlot();
1265  Property* property = var->AsProperty();
1266
1267  if (var->is_global() && !var->is_this()) {
1268    Comment cmnt(masm_, "Global variable");
1269    // Use inline caching. Variable name is passed in r2 and the global
1270    // object (receiver) in r0.
1271    __ ldr(r0, GlobalObjectOperand());
1272    __ mov(r2, Operand(var->name()));
1273    Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1274    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1275    context()->Plug(r0);
1276
1277  } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
1278    Label done, slow;
1279
1280    // Generate code for loading from variables potentially shadowed
1281    // by eval-introduced variables.
1282    EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
1283
1284    __ bind(&slow);
1285    Comment cmnt(masm_, "Lookup slot");
1286    __ mov(r1, Operand(var->name()));
1287    __ Push(cp, r1);  // Context and name.
1288    __ CallRuntime(Runtime::kLoadContextSlot, 2);
1289    __ bind(&done);
1290
1291    context()->Plug(r0);
1292
1293  } else if (slot != NULL) {
1294    Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1295                            ? "Context slot"
1296                            : "Stack slot");
1297    if (var->mode() == Variable::CONST) {
1298      // Constants may be the hole value if they have not been initialized.
1299      // Unhole them.
1300      MemOperand slot_operand = EmitSlotSearch(slot, r0);
1301      __ ldr(r0, slot_operand);
1302      __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1303      __ cmp(r0, ip);
1304      __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1305      context()->Plug(r0);
1306    } else {
1307      context()->Plug(slot);
1308    }
1309  } else {
1310    Comment cmnt(masm_, "Rewritten parameter");
1311    ASSERT_NOT_NULL(property);
1312    // Rewritten parameter accesses are of the form "slot[literal]".
1313
1314    // Assert that the object is in a slot.
1315    Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1316    ASSERT_NOT_NULL(object_var);
1317    Slot* object_slot = object_var->AsSlot();
1318    ASSERT_NOT_NULL(object_slot);
1319
1320    // Load the object.
1321    Move(r1, object_slot);
1322
1323    // Assert that the key is a smi.
1324    Literal* key_literal = property->key()->AsLiteral();
1325    ASSERT_NOT_NULL(key_literal);
1326    ASSERT(key_literal->handle()->IsSmi());
1327
1328    // Load the key.
1329    __ mov(r0, Operand(key_literal->handle()));
1330
1331    // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1332    Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1333    EmitCallIC(ic, RelocInfo::CODE_TARGET);
1334    context()->Plug(r0);
1335  }
1336}
1337
1338
1339void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1340  Comment cmnt(masm_, "[ RegExpLiteral");
1341  Label materialized;
1342  // Registers will be used as follows:
1343  // r5 = materialized value (RegExp literal)
1344  // r4 = JS function, literals array
1345  // r3 = literal index
1346  // r2 = RegExp pattern
1347  // r1 = RegExp flags
1348  // r0 = RegExp literal clone
1349  __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1350  __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1351  int literal_offset =
1352      FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1353  __ ldr(r5, FieldMemOperand(r4, literal_offset));
1354  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1355  __ cmp(r5, ip);
1356  __ b(ne, &materialized);
1357
1358  // Create regexp literal using runtime function.
1359  // Result will be in r0.
1360  __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1361  __ mov(r2, Operand(expr->pattern()));
1362  __ mov(r1, Operand(expr->flags()));
1363  __ Push(r4, r3, r2, r1);
1364  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1365  __ mov(r5, r0);
1366
1367  __ bind(&materialized);
1368  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1369  Label allocated, runtime_allocate;
1370  __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1371  __ jmp(&allocated);
1372
1373  __ bind(&runtime_allocate);
1374  __ push(r5);
1375  __ mov(r0, Operand(Smi::FromInt(size)));
1376  __ push(r0);
1377  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1378  __ pop(r5);
1379
1380  __ bind(&allocated);
1381  // After this, registers are used as follows:
1382  // r0: Newly allocated regexp.
1383  // r5: Materialized regexp.
1384  // r2: temp.
1385  __ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
1386  context()->Plug(r0);
1387}
1388
1389
1390void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1391  Comment cmnt(masm_, "[ ObjectLiteral");
1392  __ ldr(r3, MemOperand(fp,  JavaScriptFrameConstants::kFunctionOffset));
1393  __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1394  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1395  __ mov(r1, Operand(expr->constant_properties()));
1396  int flags = expr->fast_elements()
1397      ? ObjectLiteral::kFastElements
1398      : ObjectLiteral::kNoFlags;
1399  flags |= expr->has_function()
1400      ? ObjectLiteral::kHasFunction
1401      : ObjectLiteral::kNoFlags;
1402  __ mov(r0, Operand(Smi::FromInt(flags)));
1403  __ Push(r3, r2, r1, r0);
1404  if (expr->depth() > 1) {
1405    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1406  } else {
1407    __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1408  }
1409
1410  // If result_saved is true the result is on top of the stack.  If
1411  // result_saved is false the result is in r0.
1412  bool result_saved = false;
1413
1414  // Mark all computed expressions that are bound to a key that
1415  // is shadowed by a later occurrence of the same key. For the
1416  // marked expressions, no store code is emitted.
1417  expr->CalculateEmitStore();
1418
1419  for (int i = 0; i < expr->properties()->length(); i++) {
1420    ObjectLiteral::Property* property = expr->properties()->at(i);
1421    if (property->IsCompileTimeValue()) continue;
1422
1423    Literal* key = property->key();
1424    Expression* value = property->value();
1425    if (!result_saved) {
1426      __ push(r0);  // Save result on stack
1427      result_saved = true;
1428    }
1429    switch (property->kind()) {
1430      case ObjectLiteral::Property::CONSTANT:
1431        UNREACHABLE();
1432      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1433        ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1434        // Fall through.
1435      case ObjectLiteral::Property::COMPUTED:
1436        if (key->handle()->IsSymbol()) {
1437          if (property->emit_store()) {
1438            VisitForAccumulatorValue(value);
1439            __ mov(r2, Operand(key->handle()));
1440            __ ldr(r1, MemOperand(sp));
1441            Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
1442            EmitCallIC(ic, RelocInfo::CODE_TARGET);
1443            PrepareForBailoutForId(key->id(), NO_REGISTERS);
1444          } else {
1445            VisitForEffect(value);
1446          }
1447          break;
1448        }
1449        // Fall through.
1450      case ObjectLiteral::Property::PROTOTYPE:
1451        // Duplicate receiver on stack.
1452        __ ldr(r0, MemOperand(sp));
1453        __ push(r0);
1454        VisitForStackValue(key);
1455        VisitForStackValue(value);
1456        if (property->emit_store()) {
1457          __ mov(r0, Operand(Smi::FromInt(NONE)));  // PropertyAttributes
1458          __ push(r0);
1459          __ CallRuntime(Runtime::kSetProperty, 4);
1460        } else {
1461          __ Drop(3);
1462        }
1463        break;
1464      case ObjectLiteral::Property::GETTER:
1465      case ObjectLiteral::Property::SETTER:
1466        // Duplicate receiver on stack.
1467        __ ldr(r0, MemOperand(sp));
1468        __ push(r0);
1469        VisitForStackValue(key);
1470        __ mov(r1, Operand(property->kind() == ObjectLiteral::Property::SETTER ?
1471                           Smi::FromInt(1) :
1472                           Smi::FromInt(0)));
1473        __ push(r1);
1474        VisitForStackValue(value);
1475        __ CallRuntime(Runtime::kDefineAccessor, 4);
1476        break;
1477    }
1478  }
1479
1480  if (expr->has_function()) {
1481    ASSERT(result_saved);
1482    __ ldr(r0, MemOperand(sp));
1483    __ push(r0);
1484    __ CallRuntime(Runtime::kToFastProperties, 1);
1485  }
1486
1487  if (result_saved) {
1488    context()->PlugTOS();
1489  } else {
1490    context()->Plug(r0);
1491  }
1492}
1493
1494
1495void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1496  Comment cmnt(masm_, "[ ArrayLiteral");
1497
1498  ZoneList<Expression*>* subexprs = expr->values();
1499  int length = subexprs->length();
1500
1501  __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1502  __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1503  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1504  __ mov(r1, Operand(expr->constant_elements()));
1505  __ Push(r3, r2, r1);
1506  if (expr->constant_elements()->map() ==
1507      isolate()->heap()->fixed_cow_array_map()) {
1508    FastCloneShallowArrayStub stub(
1509        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1510    __ CallStub(&stub);
1511    __ IncrementCounter(
1512        isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1513  } else if (expr->depth() > 1) {
1514    __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1515  } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1516    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1517  } else {
1518    FastCloneShallowArrayStub stub(
1519        FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
1520    __ CallStub(&stub);
1521  }
1522
1523  bool result_saved = false;  // Is the result saved to the stack?
1524
1525  // Emit code to evaluate all the non-constant subexpressions and to store
1526  // them into the newly cloned array.
1527  for (int i = 0; i < length; i++) {
1528    Expression* subexpr = subexprs->at(i);
1529    // If the subexpression is a literal or a simple materialized literal it
1530    // is already set in the cloned array.
1531    if (subexpr->AsLiteral() != NULL ||
1532        CompileTimeValue::IsCompileTimeValue(subexpr)) {
1533      continue;
1534    }
1535
1536    if (!result_saved) {
1537      __ push(r0);
1538      result_saved = true;
1539    }
1540    VisitForAccumulatorValue(subexpr);
1541
1542    // Store the subexpression value in the array's elements.
1543    __ ldr(r1, MemOperand(sp));  // Copy of array literal.
1544    __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
1545    int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1546    __ str(result_register(), FieldMemOperand(r1, offset));
1547
1548    // Update the write barrier for the array store with r0 as the scratch
1549    // register.
1550    __ RecordWrite(r1, Operand(offset), r2, result_register());
1551
1552    PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1553  }
1554
1555  if (result_saved) {
1556    context()->PlugTOS();
1557  } else {
1558    context()->Plug(r0);
1559  }
1560}
1561
1562
1563void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1564  Comment cmnt(masm_, "[ Assignment");
1565  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1566  // on the left-hand side.
1567  if (!expr->target()->IsValidLeftHandSide()) {
1568    VisitForEffect(expr->target());
1569    return;
1570  }
1571
1572  // Left-hand side can only be a property, a global or a (parameter or local)
1573  // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1574  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1575  LhsKind assign_type = VARIABLE;
1576  Property* property = expr->target()->AsProperty();
1577  if (property != NULL) {
1578    assign_type = (property->key()->IsPropertyName())
1579        ? NAMED_PROPERTY
1580        : KEYED_PROPERTY;
1581  }
1582
1583  // Evaluate LHS expression.
1584  switch (assign_type) {
1585    case VARIABLE:
1586      // Nothing to do here.
1587      break;
1588    case NAMED_PROPERTY:
1589      if (expr->is_compound()) {
1590        // We need the receiver both on the stack and in the accumulator.
1591        VisitForAccumulatorValue(property->obj());
1592        __ push(result_register());
1593      } else {
1594        VisitForStackValue(property->obj());
1595      }
1596      break;
1597    case KEYED_PROPERTY:
1598      if (expr->is_compound()) {
1599        if (property->is_arguments_access()) {
1600          VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1601          __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
1602          __ push(r0);
1603          __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
1604        } else {
1605          VisitForStackValue(property->obj());
1606          VisitForAccumulatorValue(property->key());
1607        }
1608        __ ldr(r1, MemOperand(sp, 0));
1609        __ push(r0);
1610      } else {
1611        if (property->is_arguments_access()) {
1612          VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1613          __ ldr(r1, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
1614          __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
1615          __ Push(r1, r0);
1616        } else {
1617          VisitForStackValue(property->obj());
1618          VisitForStackValue(property->key());
1619        }
1620      }
1621      break;
1622  }
1623
1624  // For compound assignments we need another deoptimization point after the
1625  // variable/property load.
1626  if (expr->is_compound()) {
1627    { AccumulatorValueContext context(this);
1628      switch (assign_type) {
1629        case VARIABLE:
1630          EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1631          PrepareForBailout(expr->target(), TOS_REG);
1632          break;
1633        case NAMED_PROPERTY:
1634          EmitNamedPropertyLoad(property);
1635          PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1636          break;
1637        case KEYED_PROPERTY:
1638          EmitKeyedPropertyLoad(property);
1639          PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1640          break;
1641      }
1642    }
1643
1644    Token::Value op = expr->binary_op();
1645    __ push(r0);  // Left operand goes on the stack.
1646    VisitForAccumulatorValue(expr->value());
1647
1648    OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1649        ? OVERWRITE_RIGHT
1650        : NO_OVERWRITE;
1651    SetSourcePosition(expr->position() + 1);
1652    AccumulatorValueContext context(this);
1653    if (ShouldInlineSmiCase(op)) {
1654      EmitInlineSmiBinaryOp(expr,
1655                            op,
1656                            mode,
1657                            expr->target(),
1658                            expr->value());
1659    } else {
1660      EmitBinaryOp(op, mode);
1661    }
1662
1663    // Deoptimization point in case the binary operation may have side effects.
1664    PrepareForBailout(expr->binary_operation(), TOS_REG);
1665  } else {
1666    VisitForAccumulatorValue(expr->value());
1667  }
1668
1669  // Record source position before possible IC call.
1670  SetSourcePosition(expr->position());
1671
1672  // Store the value.
1673  switch (assign_type) {
1674    case VARIABLE:
1675      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1676                             expr->op());
1677      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1678      context()->Plug(r0);
1679      break;
1680    case NAMED_PROPERTY:
1681      EmitNamedPropertyAssignment(expr);
1682      break;
1683    case KEYED_PROPERTY:
1684      EmitKeyedPropertyAssignment(expr);
1685      break;
1686  }
1687}
1688
1689
1690void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1691  SetSourcePosition(prop->position());
1692  Literal* key = prop->key()->AsLiteral();
1693  __ mov(r2, Operand(key->handle()));
1694  // Call load IC. It has arguments receiver and property name r0 and r2.
1695  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1696  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1697}
1698
1699
1700void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1701  SetSourcePosition(prop->position());
1702  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1703  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1704  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1705}
1706
1707
1708void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
1709                                              Token::Value op,
1710                                              OverwriteMode mode,
1711                                              Expression* left_expr,
1712                                              Expression* right_expr) {
1713  Label done, smi_case, stub_call;
1714
1715  Register scratch1 = r2;
1716  Register scratch2 = r3;
1717
1718  // Get the arguments.
1719  Register left = r1;
1720  Register right = r0;
1721  __ pop(left);
1722
1723  // Perform combined smi check on both operands.
1724  __ orr(scratch1, left, Operand(right));
1725  STATIC_ASSERT(kSmiTag == 0);
1726  JumpPatchSite patch_site(masm_);
1727  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1728
1729  __ bind(&stub_call);
1730  TypeRecordingBinaryOpStub stub(op, mode);
1731  EmitCallIC(stub.GetCode(), &patch_site);
1732  __ jmp(&done);
1733
1734  __ bind(&smi_case);
1735  // Smi case. This code works the same way as the smi-smi case in the type
1736  // recording binary operation stub, see
1737  // TypeRecordingBinaryOpStub::GenerateSmiSmiOperation for comments.
1738  switch (op) {
1739    case Token::SAR:
1740      __ b(&stub_call);
1741      __ GetLeastBitsFromSmi(scratch1, right, 5);
1742      __ mov(right, Operand(left, ASR, scratch1));
1743      __ bic(right, right, Operand(kSmiTagMask));
1744      break;
1745    case Token::SHL: {
1746      __ b(&stub_call);
1747      __ SmiUntag(scratch1, left);
1748      __ GetLeastBitsFromSmi(scratch2, right, 5);
1749      __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1750      __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
1751      __ b(mi, &stub_call);
1752      __ SmiTag(right, scratch1);
1753      break;
1754    }
1755    case Token::SHR: {
1756      __ b(&stub_call);
1757      __ SmiUntag(scratch1, left);
1758      __ GetLeastBitsFromSmi(scratch2, right, 5);
1759      __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1760      __ tst(scratch1, Operand(0xc0000000));
1761      __ b(ne, &stub_call);
1762      __ SmiTag(right, scratch1);
1763      break;
1764    }
1765    case Token::ADD:
1766      __ add(scratch1, left, Operand(right), SetCC);
1767      __ b(vs, &stub_call);
1768      __ mov(right, scratch1);
1769      break;
1770    case Token::SUB:
1771      __ sub(scratch1, left, Operand(right), SetCC);
1772      __ b(vs, &stub_call);
1773      __ mov(right, scratch1);
1774      break;
1775    case Token::MUL: {
1776      __ SmiUntag(ip, right);
1777      __ smull(scratch1, scratch2, left, ip);
1778      __ mov(ip, Operand(scratch1, ASR, 31));
1779      __ cmp(ip, Operand(scratch2));
1780      __ b(ne, &stub_call);
1781      __ tst(scratch1, Operand(scratch1));
1782      __ mov(right, Operand(scratch1), LeaveCC, ne);
1783      __ b(ne, &done);
1784      __ add(scratch2, right, Operand(left), SetCC);
1785      __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
1786      __ b(mi, &stub_call);
1787      break;
1788    }
1789    case Token::BIT_OR:
1790      __ orr(right, left, Operand(right));
1791      break;
1792    case Token::BIT_AND:
1793      __ and_(right, left, Operand(right));
1794      break;
1795    case Token::BIT_XOR:
1796      __ eor(right, left, Operand(right));
1797      break;
1798    default:
1799      UNREACHABLE();
1800  }
1801
1802  __ bind(&done);
1803  context()->Plug(r0);
1804}
1805
1806
1807void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1808                                     OverwriteMode mode) {
1809  __ pop(r1);
1810  TypeRecordingBinaryOpStub stub(op, mode);
1811  EmitCallIC(stub.GetCode(), NULL);
1812  context()->Plug(r0);
1813}
1814
1815
1816void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1817  // Invalid left-hand sides are rewritten to have a 'throw
1818  // ReferenceError' on the left-hand side.
1819  if (!expr->IsValidLeftHandSide()) {
1820    VisitForEffect(expr);
1821    return;
1822  }
1823
1824  // Left-hand side can only be a property, a global or a (parameter or local)
1825  // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1826  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1827  LhsKind assign_type = VARIABLE;
1828  Property* prop = expr->AsProperty();
1829  if (prop != NULL) {
1830    assign_type = (prop->key()->IsPropertyName())
1831        ? NAMED_PROPERTY
1832        : KEYED_PROPERTY;
1833  }
1834
1835  switch (assign_type) {
1836    case VARIABLE: {
1837      Variable* var = expr->AsVariableProxy()->var();
1838      EffectContext context(this);
1839      EmitVariableAssignment(var, Token::ASSIGN);
1840      break;
1841    }
1842    case NAMED_PROPERTY: {
1843      __ push(r0);  // Preserve value.
1844      VisitForAccumulatorValue(prop->obj());
1845      __ mov(r1, r0);
1846      __ pop(r0);  // Restore value.
1847      __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1848      Handle<Code> ic = is_strict_mode()
1849          ? isolate()->builtins()->StoreIC_Initialize_Strict()
1850          : isolate()->builtins()->StoreIC_Initialize();
1851      EmitCallIC(ic, RelocInfo::CODE_TARGET);
1852      break;
1853    }
1854    case KEYED_PROPERTY: {
1855      __ push(r0);  // Preserve value.
1856      if (prop->is_synthetic()) {
1857        ASSERT(prop->obj()->AsVariableProxy() != NULL);
1858        ASSERT(prop->key()->AsLiteral() != NULL);
1859        { AccumulatorValueContext for_object(this);
1860          EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1861        }
1862        __ mov(r2, r0);
1863        __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
1864      } else {
1865        VisitForStackValue(prop->obj());
1866        VisitForAccumulatorValue(prop->key());
1867        __ mov(r1, r0);
1868        __ pop(r2);
1869      }
1870      __ pop(r0);  // Restore value.
1871      Handle<Code> ic = is_strict_mode()
1872          ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1873          : isolate()->builtins()->KeyedStoreIC_Initialize();
1874      EmitCallIC(ic, RelocInfo::CODE_TARGET);
1875      break;
1876    }
1877  }
1878  PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1879  context()->Plug(r0);
1880}
1881
1882
1883void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1884                                               Token::Value op) {
1885  // Left-hand sides that rewrite to explicit property accesses do not reach
1886  // here.
1887  ASSERT(var != NULL);
1888  ASSERT(var->is_global() || var->AsSlot() != NULL);
1889
1890  if (var->is_global()) {
1891    ASSERT(!var->is_this());
1892    // Assignment to a global variable.  Use inline caching for the
1893    // assignment.  Right-hand-side value is passed in r0, variable name in
1894    // r2, and the global object in r1.
1895    __ mov(r2, Operand(var->name()));
1896    __ ldr(r1, GlobalObjectOperand());
1897    Handle<Code> ic = is_strict_mode()
1898        ? isolate()->builtins()->StoreIC_Initialize_Strict()
1899        : isolate()->builtins()->StoreIC_Initialize();
1900    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1901
1902  } else if (op == Token::INIT_CONST) {
1903    // Like var declarations, const declarations are hoisted to function
1904    // scope.  However, unlike var initializers, const initializers are able
1905    // to drill a hole to that function context, even from inside a 'with'
1906    // context.  We thus bypass the normal static scope lookup.
1907    Slot* slot = var->AsSlot();
1908    Label skip;
1909    switch (slot->type()) {
1910      case Slot::PARAMETER:
1911        // No const parameters.
1912        UNREACHABLE();
1913        break;
1914      case Slot::LOCAL:
1915        // Detect const reinitialization by checking for the hole value.
1916        __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
1917        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1918        __ cmp(r1, ip);
1919        __ b(ne, &skip);
1920        __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
1921        break;
1922      case Slot::CONTEXT: {
1923        __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
1924        __ ldr(r2, ContextOperand(r1, slot->index()));
1925        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1926        __ cmp(r2, ip);
1927        __ b(ne, &skip);
1928        __ str(r0, ContextOperand(r1, slot->index()));
1929        int offset = Context::SlotOffset(slot->index());
1930        __ mov(r3, r0);  // Preserve the stored value in r0.
1931        __ RecordWrite(r1, Operand(offset), r3, r2);
1932        break;
1933      }
1934      case Slot::LOOKUP:
1935        __ push(r0);
1936        __ mov(r0, Operand(slot->var()->name()));
1937        __ Push(cp, r0);  // Context and name.
1938        __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1939        break;
1940    }
1941    __ bind(&skip);
1942
1943  } else if (var->mode() != Variable::CONST) {
1944    // Perform the assignment for non-const variables.  Const assignments
1945    // are simply skipped.
1946    Slot* slot = var->AsSlot();
1947    switch (slot->type()) {
1948      case Slot::PARAMETER:
1949      case Slot::LOCAL:
1950        // Perform the assignment.
1951        __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
1952        break;
1953
1954      case Slot::CONTEXT: {
1955        MemOperand target = EmitSlotSearch(slot, r1);
1956        // Perform the assignment and issue the write barrier.
1957        __ str(result_register(), target);
1958        // RecordWrite may destroy all its register arguments.
1959        __ mov(r3, result_register());
1960        int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
1961        __ RecordWrite(r1, Operand(offset), r2, r3);
1962        break;
1963      }
1964
1965      case Slot::LOOKUP:
1966        // Call the runtime for the assignment.
1967        __ push(r0);  // Value.
1968        __ mov(r1, Operand(slot->var()->name()));
1969        __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
1970        __ Push(cp, r1, r0);  // Context, name, strict mode.
1971        __ CallRuntime(Runtime::kStoreContextSlot, 4);
1972        break;
1973    }
1974  }
1975}
1976
1977
1978void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1979  // Assignment to a property, using a named store IC.
1980  Property* prop = expr->target()->AsProperty();
1981  ASSERT(prop != NULL);
1982  ASSERT(prop->key()->AsLiteral() != NULL);
1983
1984  // If the assignment starts a block of assignments to the same object,
1985  // change to slow case to avoid the quadratic behavior of repeatedly
1986  // adding fast properties.
1987  if (expr->starts_initialization_block()) {
1988    __ push(result_register());
1989    __ ldr(ip, MemOperand(sp, kPointerSize));  // Receiver is now under value.
1990    __ push(ip);
1991    __ CallRuntime(Runtime::kToSlowProperties, 1);
1992    __ pop(result_register());
1993  }
1994
1995  // Record source code position before IC call.
1996  SetSourcePosition(expr->position());
1997  __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1998  // Load receiver to r1. Leave a copy in the stack if needed for turning the
1999  // receiver into fast case.
2000  if (expr->ends_initialization_block()) {
2001    __ ldr(r1, MemOperand(sp));
2002  } else {
2003    __ pop(r1);
2004  }
2005
2006  Handle<Code> ic = is_strict_mode()
2007      ? isolate()->builtins()->StoreIC_Initialize_Strict()
2008      : isolate()->builtins()->StoreIC_Initialize();
2009  EmitCallIC(ic, RelocInfo::CODE_TARGET);
2010
2011  // If the assignment ends an initialization block, revert to fast case.
2012  if (expr->ends_initialization_block()) {
2013    __ push(r0);  // Result of assignment, saved even if not needed.
2014    // Receiver is under the result value.
2015    __ ldr(ip, MemOperand(sp, kPointerSize));
2016    __ push(ip);
2017    __ CallRuntime(Runtime::kToFastProperties, 1);
2018    __ pop(r0);
2019    __ Drop(1);
2020  }
2021  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2022  context()->Plug(r0);
2023}
2024
2025
2026void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2027  // Assignment to a property, using a keyed store IC.
2028
2029  // If the assignment starts a block of assignments to the same object,
2030  // change to slow case to avoid the quadratic behavior of repeatedly
2031  // adding fast properties.
2032  if (expr->starts_initialization_block()) {
2033    __ push(result_register());
2034    // Receiver is now under the key and value.
2035    __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
2036    __ push(ip);
2037    __ CallRuntime(Runtime::kToSlowProperties, 1);
2038    __ pop(result_register());
2039  }
2040
2041  // Record source code position before IC call.
2042  SetSourcePosition(expr->position());
2043  __ pop(r1);  // Key.
2044  // Load receiver to r2. Leave a copy in the stack if needed for turning the
2045  // receiver into fast case.
2046  if (expr->ends_initialization_block()) {
2047    __ ldr(r2, MemOperand(sp));
2048  } else {
2049    __ pop(r2);
2050  }
2051
2052  Handle<Code> ic = is_strict_mode()
2053      ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
2054      : isolate()->builtins()->KeyedStoreIC_Initialize();
2055  EmitCallIC(ic, RelocInfo::CODE_TARGET);
2056
2057  // If the assignment ends an initialization block, revert to fast case.
2058  if (expr->ends_initialization_block()) {
2059    __ push(r0);  // Result of assignment, saved even if not needed.
2060    // Receiver is under the result value.
2061    __ ldr(ip, MemOperand(sp, kPointerSize));
2062    __ push(ip);
2063    __ CallRuntime(Runtime::kToFastProperties, 1);
2064    __ pop(r0);
2065    __ Drop(1);
2066  }
2067  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2068  context()->Plug(r0);
2069}
2070
2071
2072void FullCodeGenerator::VisitProperty(Property* expr) {
2073  Comment cmnt(masm_, "[ Property");
2074  Expression* key = expr->key();
2075
2076  if (key->IsPropertyName()) {
2077    VisitForAccumulatorValue(expr->obj());
2078    EmitNamedPropertyLoad(expr);
2079    context()->Plug(r0);
2080  } else {
2081    VisitForStackValue(expr->obj());
2082    VisitForAccumulatorValue(expr->key());
2083    __ pop(r1);
2084    EmitKeyedPropertyLoad(expr);
2085    context()->Plug(r0);
2086  }
2087}
2088
2089void FullCodeGenerator::EmitCallWithIC(Call* expr,
2090                                       Handle<Object> name,
2091                                       RelocInfo::Mode mode) {
2092  // Code common for calls using the IC.
2093  ZoneList<Expression*>* args = expr->arguments();
2094  int arg_count = args->length();
2095  { PreservePositionScope scope(masm()->positions_recorder());
2096    for (int i = 0; i < arg_count; i++) {
2097      VisitForStackValue(args->at(i));
2098    }
2099    __ mov(r2, Operand(name));
2100  }
2101  // Record source position for debugger.
2102  SetSourcePosition(expr->position());
2103  // Call the IC initialization code.
2104  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2105  Handle<Code> ic =
2106      isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
2107  EmitCallIC(ic, mode);
2108  RecordJSReturnSite(expr);
2109  // Restore context register.
2110  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2111  context()->Plug(r0);
2112}
2113
2114
2115void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2116                                            Expression* key,
2117                                            RelocInfo::Mode mode) {
2118  // Load the key.
2119  VisitForAccumulatorValue(key);
2120
2121  // Swap the name of the function and the receiver on the stack to follow
2122  // the calling convention for call ICs.
2123  __ pop(r1);
2124  __ push(r0);
2125  __ push(r1);
2126
2127  // Code common for calls using the IC.
2128  ZoneList<Expression*>* args = expr->arguments();
2129  int arg_count = args->length();
2130  { PreservePositionScope scope(masm()->positions_recorder());
2131    for (int i = 0; i < arg_count; i++) {
2132      VisitForStackValue(args->at(i));
2133    }
2134  }
2135  // Record source position for debugger.
2136  SetSourcePosition(expr->position());
2137  // Call the IC initialization code.
2138  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2139  Handle<Code> ic =
2140      isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
2141  __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize));  // Key.
2142  EmitCallIC(ic, mode);
2143  RecordJSReturnSite(expr);
2144  // Restore context register.
2145  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2146  context()->DropAndPlug(1, r0);  // Drop the key still on the stack.
2147}
2148
2149
2150void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2151  // Code common for calls using the call stub.
2152  ZoneList<Expression*>* args = expr->arguments();
2153  int arg_count = args->length();
2154  { PreservePositionScope scope(masm()->positions_recorder());
2155    for (int i = 0; i < arg_count; i++) {
2156      VisitForStackValue(args->at(i));
2157    }
2158  }
2159  // Record source position for debugger.
2160  SetSourcePosition(expr->position());
2161  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2162  CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2163  __ CallStub(&stub);
2164  RecordJSReturnSite(expr);
2165  // Restore context register.
2166  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2167  context()->DropAndPlug(1, r0);
2168}
2169
2170
2171void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2172                                                      int arg_count) {
2173  // Push copy of the first argument or undefined if it doesn't exist.
2174  if (arg_count > 0) {
2175    __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2176  } else {
2177    __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2178  }
2179  __ push(r1);
2180
2181  // Push the receiver of the enclosing function and do runtime call.
2182  __ ldr(r1, MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
2183  __ push(r1);
2184  // Push the strict mode flag.
2185  __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
2186  __ push(r1);
2187
2188  __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2189                 ? Runtime::kResolvePossiblyDirectEvalNoLookup
2190                 : Runtime::kResolvePossiblyDirectEval, 4);
2191}
2192
2193
2194void FullCodeGenerator::VisitCall(Call* expr) {
2195#ifdef DEBUG
2196  // We want to verify that RecordJSReturnSite gets called on all paths
2197  // through this function.  Avoid early returns.
2198  expr->return_is_recorded_ = false;
2199#endif
2200
2201  Comment cmnt(masm_, "[ Call");
2202  Expression* fun = expr->expression();
2203  Variable* var = fun->AsVariableProxy()->AsVariable();
2204
2205  if (var != NULL && var->is_possibly_eval()) {
2206    // In a call to eval, we first call %ResolvePossiblyDirectEval to
2207    // resolve the function we need to call and the receiver of the
2208    // call.  Then we call the resolved function using the given
2209    // arguments.
2210    ZoneList<Expression*>* args = expr->arguments();
2211    int arg_count = args->length();
2212
2213    { PreservePositionScope pos_scope(masm()->positions_recorder());
2214      VisitForStackValue(fun);
2215      __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2216      __ push(r2);  // Reserved receiver slot.
2217
2218      // Push the arguments.
2219      for (int i = 0; i < arg_count; i++) {
2220        VisitForStackValue(args->at(i));
2221      }
2222
2223      // If we know that eval can only be shadowed by eval-introduced
2224      // variables we attempt to load the global eval function directly
2225      // in generated code. If we succeed, there is no need to perform a
2226      // context lookup in the runtime system.
2227      Label done;
2228      if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2229        Label slow;
2230        EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2231                                          NOT_INSIDE_TYPEOF,
2232                                          &slow);
2233        // Push the function and resolve eval.
2234        __ push(r0);
2235        EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2236        __ jmp(&done);
2237        __ bind(&slow);
2238      }
2239
2240      // Push copy of the function (found below the arguments) and
2241      // resolve eval.
2242      __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2243      __ push(r1);
2244      EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2245      if (done.is_linked()) {
2246        __ bind(&done);
2247      }
2248
2249      // The runtime call returns a pair of values in r0 (function) and
2250      // r1 (receiver). Touch up the stack with the right values.
2251      __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2252      __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2253    }
2254
2255    // Record source position for debugger.
2256    SetSourcePosition(expr->position());
2257    InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2258    CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2259    __ CallStub(&stub);
2260    RecordJSReturnSite(expr);
2261    // Restore context register.
2262    __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2263    context()->DropAndPlug(1, r0);
2264  } else if (var != NULL && !var->is_this() && var->is_global()) {
2265    // Push global object as receiver for the call IC.
2266    __ ldr(r0, GlobalObjectOperand());
2267    __ push(r0);
2268    EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
2269  } else if (var != NULL && var->AsSlot() != NULL &&
2270             var->AsSlot()->type() == Slot::LOOKUP) {
2271    // Call to a lookup slot (dynamically introduced variable).
2272    Label slow, done;
2273
2274    { PreservePositionScope scope(masm()->positions_recorder());
2275      // Generate code for loading from variables potentially shadowed
2276      // by eval-introduced variables.
2277      EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
2278                                      NOT_INSIDE_TYPEOF,
2279                                      &slow,
2280                                      &done);
2281    }
2282
2283    __ bind(&slow);
2284    // Call the runtime to find the function to call (returned in r0)
2285    // and the object holding it (returned in edx).
2286    __ push(context_register());
2287    __ mov(r2, Operand(var->name()));
2288    __ push(r2);
2289    __ CallRuntime(Runtime::kLoadContextSlot, 2);
2290    __ Push(r0, r1);  // Function, receiver.
2291
2292    // If fast case code has been generated, emit code to push the
2293    // function and receiver and have the slow path jump around this
2294    // code.
2295    if (done.is_linked()) {
2296      Label call;
2297      __ b(&call);
2298      __ bind(&done);
2299      // Push function.
2300      __ push(r0);
2301      // Push global receiver.
2302      __ ldr(r1, GlobalObjectOperand());
2303      __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2304      __ push(r1);
2305      __ bind(&call);
2306    }
2307
2308    EmitCallWithStub(expr);
2309  } else if (fun->AsProperty() != NULL) {
2310    // Call to an object property.
2311    Property* prop = fun->AsProperty();
2312    Literal* key = prop->key()->AsLiteral();
2313    if (key != NULL && key->handle()->IsSymbol()) {
2314      // Call to a named property, use call IC.
2315      { PreservePositionScope scope(masm()->positions_recorder());
2316        VisitForStackValue(prop->obj());
2317      }
2318      EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2319    } else {
2320      // Call to a keyed property.
2321      // For a synthetic property use keyed load IC followed by function call,
2322      // for a regular property use keyed CallIC.
2323      if (prop->is_synthetic()) {
2324        // Do not visit the object and key subexpressions (they are shared
2325        // by all occurrences of the same rewritten parameter).
2326        ASSERT(prop->obj()->AsVariableProxy() != NULL);
2327        ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2328        Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2329        MemOperand operand = EmitSlotSearch(slot, r1);
2330        __ ldr(r1, operand);
2331
2332        ASSERT(prop->key()->AsLiteral() != NULL);
2333        ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2334        __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
2335
2336        // Record source code position for IC call.
2337        SetSourcePosition(prop->position());
2338
2339        Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2340        EmitCallIC(ic, RelocInfo::CODE_TARGET);
2341        __ ldr(r1, GlobalObjectOperand());
2342        __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2343        __ Push(r0, r1);  // Function, receiver.
2344        EmitCallWithStub(expr);
2345      } else {
2346        { PreservePositionScope scope(masm()->positions_recorder());
2347          VisitForStackValue(prop->obj());
2348        }
2349        EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
2350      }
2351    }
2352  } else {
2353    { PreservePositionScope scope(masm()->positions_recorder());
2354      VisitForStackValue(fun);
2355    }
2356    // Load global receiver object.
2357    __ ldr(r1, GlobalObjectOperand());
2358    __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2359    __ push(r1);
2360    // Emit function call.
2361    EmitCallWithStub(expr);
2362  }
2363
2364#ifdef DEBUG
2365  // RecordJSReturnSite should have been called.
2366  ASSERT(expr->return_is_recorded_);
2367#endif
2368}
2369
2370
2371void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2372  Comment cmnt(masm_, "[ CallNew");
2373  // According to ECMA-262, section 11.2.2, page 44, the function
2374  // expression in new calls must be evaluated before the
2375  // arguments.
2376
2377  // Push constructor on the stack.  If it's not a function it's used as
2378  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2379  // ignored.
2380  VisitForStackValue(expr->expression());
2381
2382  // Push the arguments ("left-to-right") on the stack.
2383  ZoneList<Expression*>* args = expr->arguments();
2384  int arg_count = args->length();
2385  for (int i = 0; i < arg_count; i++) {
2386    VisitForStackValue(args->at(i));
2387  }
2388
2389  // Call the construct call builtin that handles allocation and
2390  // constructor invocation.
2391  SetSourcePosition(expr->position());
2392
2393  // Load function and argument count into r1 and r0.
2394  __ mov(r0, Operand(arg_count));
2395  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2396
2397  Handle<Code> construct_builtin =
2398      isolate()->builtins()->JSConstructCall();
2399  __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
2400  context()->Plug(r0);
2401}
2402
2403
2404void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
2405  ASSERT(args->length() == 1);
2406
2407  VisitForAccumulatorValue(args->at(0));
2408
2409  Label materialize_true, materialize_false;
2410  Label* if_true = NULL;
2411  Label* if_false = NULL;
2412  Label* fall_through = NULL;
2413  context()->PrepareTest(&materialize_true, &materialize_false,
2414                         &if_true, &if_false, &fall_through);
2415
2416  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2417  __ tst(r0, Operand(kSmiTagMask));
2418  Split(eq, if_true, if_false, fall_through);
2419
2420  context()->Plug(if_true, if_false);
2421}
2422
2423
2424void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
2425  ASSERT(args->length() == 1);
2426
2427  VisitForAccumulatorValue(args->at(0));
2428
2429  Label materialize_true, materialize_false;
2430  Label* if_true = NULL;
2431  Label* if_false = NULL;
2432  Label* fall_through = NULL;
2433  context()->PrepareTest(&materialize_true, &materialize_false,
2434                         &if_true, &if_false, &fall_through);
2435
2436  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2437  __ tst(r0, Operand(kSmiTagMask | 0x80000000));
2438  Split(eq, if_true, if_false, fall_through);
2439
2440  context()->Plug(if_true, if_false);
2441}
2442
2443
2444void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2445  ASSERT(args->length() == 1);
2446
2447  VisitForAccumulatorValue(args->at(0));
2448
2449  Label materialize_true, materialize_false;
2450  Label* if_true = NULL;
2451  Label* if_false = NULL;
2452  Label* fall_through = NULL;
2453  context()->PrepareTest(&materialize_true, &materialize_false,
2454                         &if_true, &if_false, &fall_through);
2455
2456  __ JumpIfSmi(r0, if_false);
2457  __ LoadRoot(ip, Heap::kNullValueRootIndex);
2458  __ cmp(r0, ip);
2459  __ b(eq, if_true);
2460  __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2461  // Undetectable objects behave like undefined when tested with typeof.
2462  __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
2463  __ tst(r1, Operand(1 << Map::kIsUndetectable));
2464  __ b(ne, if_false);
2465  __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2466  __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
2467  __ b(lt, if_false);
2468  __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
2469  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2470  Split(le, if_true, if_false, fall_through);
2471
2472  context()->Plug(if_true, if_false);
2473}
2474
2475
2476void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2477  ASSERT(args->length() == 1);
2478
2479  VisitForAccumulatorValue(args->at(0));
2480
2481  Label materialize_true, materialize_false;
2482  Label* if_true = NULL;
2483  Label* if_false = NULL;
2484  Label* fall_through = NULL;
2485  context()->PrepareTest(&materialize_true, &materialize_false,
2486                         &if_true, &if_false, &fall_through);
2487
2488  __ JumpIfSmi(r0, if_false);
2489  __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2490  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2491  Split(ge, if_true, if_false, fall_through);
2492
2493  context()->Plug(if_true, if_false);
2494}
2495
2496
2497void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2498  ASSERT(args->length() == 1);
2499
2500  VisitForAccumulatorValue(args->at(0));
2501
2502  Label materialize_true, materialize_false;
2503  Label* if_true = NULL;
2504  Label* if_false = NULL;
2505  Label* fall_through = NULL;
2506  context()->PrepareTest(&materialize_true, &materialize_false,
2507                         &if_true, &if_false, &fall_through);
2508
2509  __ JumpIfSmi(r0, if_false);
2510  __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2511  __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
2512  __ tst(r1, Operand(1 << Map::kIsUndetectable));
2513  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2514  Split(ne, if_true, if_false, fall_through);
2515
2516  context()->Plug(if_true, if_false);
2517}
2518
2519
2520void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2521    ZoneList<Expression*>* args) {
2522
2523  ASSERT(args->length() == 1);
2524
2525  VisitForAccumulatorValue(args->at(0));
2526
2527  Label materialize_true, materialize_false;
2528  Label* if_true = NULL;
2529  Label* if_false = NULL;
2530  Label* fall_through = NULL;
2531  context()->PrepareTest(&materialize_true, &materialize_false,
2532                         &if_true, &if_false, &fall_through);
2533
2534  if (FLAG_debug_code) __ AbortIfSmi(r0);
2535
2536  __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2537  __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
2538  __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2539  __ b(ne, if_true);
2540
2541  // Check for fast case object. Generate false result for slow case object.
2542  __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2543  __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2544  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
2545  __ cmp(r2, ip);
2546  __ b(eq, if_false);
2547
2548  // Look for valueOf symbol in the descriptor array, and indicate false if
2549  // found. The type is not checked, so if it is a transition it is a false
2550  // negative.
2551  __ ldr(r4, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2552  __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset));
2553  // r4: descriptor array
2554  // r3: length of descriptor array
2555  // Calculate the end of the descriptor array.
2556  STATIC_ASSERT(kSmiTag == 0);
2557  STATIC_ASSERT(kSmiTagSize == 1);
2558  STATIC_ASSERT(kPointerSize == 4);
2559  __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2560  __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
2561
2562  // Calculate location of the first key name.
2563  __ add(r4,
2564         r4,
2565         Operand(FixedArray::kHeaderSize - kHeapObjectTag +
2566                 DescriptorArray::kFirstIndex * kPointerSize));
2567  // Loop through all the keys in the descriptor array. If one of these is the
2568  // symbol valueOf the result is false.
2569  Label entry, loop;
2570  // The use of ip to store the valueOf symbol asumes that it is not otherwise
2571  // used in the loop below.
2572  __ mov(ip, Operand(FACTORY->value_of_symbol()));
2573  __ jmp(&entry);
2574  __ bind(&loop);
2575  __ ldr(r3, MemOperand(r4, 0));
2576  __ cmp(r3, ip);
2577  __ b(eq, if_false);
2578  __ add(r4, r4, Operand(kPointerSize));
2579  __ bind(&entry);
2580  __ cmp(r4, Operand(r2));
2581  __ b(ne, &loop);
2582
2583  // If a valueOf property is not found on the object check that it's
2584  // prototype is the un-modified String prototype. If not result is false.
2585  __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
2586  __ tst(r2, Operand(kSmiTagMask));
2587  __ b(eq, if_false);
2588  __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2589  __ ldr(r3, ContextOperand(cp, Context::GLOBAL_INDEX));
2590  __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
2591  __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2592  __ cmp(r2, r3);
2593  __ b(ne, if_false);
2594
2595  // Set the bit in the map to indicate that it has been checked safe for
2596  // default valueOf and set true result.
2597  __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
2598  __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2599  __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
2600  __ jmp(if_true);
2601
2602  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2603  context()->Plug(if_true, if_false);
2604}
2605
2606
2607void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
2608  ASSERT(args->length() == 1);
2609
2610  VisitForAccumulatorValue(args->at(0));
2611
2612  Label materialize_true, materialize_false;
2613  Label* if_true = NULL;
2614  Label* if_false = NULL;
2615  Label* fall_through = NULL;
2616  context()->PrepareTest(&materialize_true, &materialize_false,
2617                         &if_true, &if_false, &fall_through);
2618
2619  __ JumpIfSmi(r0, if_false);
2620  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2621  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2622  Split(eq, if_true, if_false, fall_through);
2623
2624  context()->Plug(if_true, if_false);
2625}
2626
2627
2628void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2629  ASSERT(args->length() == 1);
2630
2631  VisitForAccumulatorValue(args->at(0));
2632
2633  Label materialize_true, materialize_false;
2634  Label* if_true = NULL;
2635  Label* if_false = NULL;
2636  Label* fall_through = NULL;
2637  context()->PrepareTest(&materialize_true, &materialize_false,
2638                         &if_true, &if_false, &fall_through);
2639
2640  __ JumpIfSmi(r0, if_false);
2641  __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2642  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2643  Split(eq, if_true, if_false, fall_through);
2644
2645  context()->Plug(if_true, if_false);
2646}
2647
2648
2649void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2650  ASSERT(args->length() == 1);
2651
2652  VisitForAccumulatorValue(args->at(0));
2653
2654  Label materialize_true, materialize_false;
2655  Label* if_true = NULL;
2656  Label* if_false = NULL;
2657  Label* fall_through = NULL;
2658  context()->PrepareTest(&materialize_true, &materialize_false,
2659                         &if_true, &if_false, &fall_through);
2660
2661  __ JumpIfSmi(r0, if_false);
2662  __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2663  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2664  Split(eq, if_true, if_false, fall_through);
2665
2666  context()->Plug(if_true, if_false);
2667}
2668
2669
2670
2671void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2672  ASSERT(args->length() == 0);
2673
2674  Label materialize_true, materialize_false;
2675  Label* if_true = NULL;
2676  Label* if_false = NULL;
2677  Label* fall_through = NULL;
2678  context()->PrepareTest(&materialize_true, &materialize_false,
2679                         &if_true, &if_false, &fall_through);
2680
2681  // Get the frame pointer for the calling frame.
2682  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2683
2684  // Skip the arguments adaptor frame if it exists.
2685  Label check_frame_marker;
2686  __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
2687  __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2688  __ b(ne, &check_frame_marker);
2689  __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
2690
2691  // Check the marker in the calling frame.
2692  __ bind(&check_frame_marker);
2693  __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
2694  __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
2695  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2696  Split(eq, if_true, if_false, fall_through);
2697
2698  context()->Plug(if_true, if_false);
2699}
2700
2701
2702void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2703  ASSERT(args->length() == 2);
2704
2705  // Load the two objects into registers and perform the comparison.
2706  VisitForStackValue(args->at(0));
2707  VisitForAccumulatorValue(args->at(1));
2708
2709  Label materialize_true, materialize_false;
2710  Label* if_true = NULL;
2711  Label* if_false = NULL;
2712  Label* fall_through = NULL;
2713  context()->PrepareTest(&materialize_true, &materialize_false,
2714                         &if_true, &if_false, &fall_through);
2715
2716  __ pop(r1);
2717  __ cmp(r0, r1);
2718  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2719  Split(eq, if_true, if_false, fall_through);
2720
2721  context()->Plug(if_true, if_false);
2722}
2723
2724
2725void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2726  ASSERT(args->length() == 1);
2727
2728  // ArgumentsAccessStub expects the key in edx and the formal
2729  // parameter count in r0.
2730  VisitForAccumulatorValue(args->at(0));
2731  __ mov(r1, r0);
2732  __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
2733  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2734  __ CallStub(&stub);
2735  context()->Plug(r0);
2736}
2737
2738
2739void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2740  ASSERT(args->length() == 0);
2741
2742  Label exit;
2743  // Get the number of formal parameters.
2744  __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
2745
2746  // Check if the calling frame is an arguments adaptor frame.
2747  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2748  __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
2749  __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2750  __ b(ne, &exit);
2751
2752  // Arguments adaptor case: Read the arguments length from the
2753  // adaptor frame.
2754  __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
2755
2756  __ bind(&exit);
2757  context()->Plug(r0);
2758}
2759
2760
2761void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2762  ASSERT(args->length() == 1);
2763  Label done, null, function, non_function_constructor;
2764
2765  VisitForAccumulatorValue(args->at(0));
2766
2767  // If the object is a smi, we return null.
2768  __ JumpIfSmi(r0, &null);
2769
2770  // Check that the object is a JS object but take special care of JS
2771  // functions to make sure they have 'Function' as their class.
2772  __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);  // Map is now in r0.
2773  __ b(lt, &null);
2774
2775  // As long as JS_FUNCTION_TYPE is the last instance type and it is
2776  // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2777  // LAST_JS_OBJECT_TYPE.
2778  ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2779  ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2780  __ cmp(r1, Operand(JS_FUNCTION_TYPE));
2781  __ b(eq, &function);
2782
2783  // Check if the constructor in the map is a function.
2784  __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
2785  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2786  __ b(ne, &non_function_constructor);
2787
2788  // r0 now contains the constructor function. Grab the
2789  // instance class name from there.
2790  __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2791  __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2792  __ b(&done);
2793
2794  // Functions have class 'Function'.
2795  __ bind(&function);
2796  __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2797  __ jmp(&done);
2798
2799  // Objects with a non-function constructor have class 'Object'.
2800  __ bind(&non_function_constructor);
2801  __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2802  __ jmp(&done);
2803
2804  // Non-JS objects have class null.
2805  __ bind(&null);
2806  __ LoadRoot(r0, Heap::kNullValueRootIndex);
2807
2808  // All done.
2809  __ bind(&done);
2810
2811  context()->Plug(r0);
2812}
2813
2814
2815void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2816  // Conditionally generate a log call.
2817  // Args:
2818  //   0 (literal string): The type of logging (corresponds to the flags).
2819  //     This is used to determine whether or not to generate the log call.
2820  //   1 (string): Format string.  Access the string at argument index 2
2821  //     with '%2s' (see Logger::LogRuntime for all the formats).
2822  //   2 (array): Arguments to the format string.
2823  ASSERT_EQ(args->length(), 3);
2824#ifdef ENABLE_LOGGING_AND_PROFILING
2825  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2826    VisitForStackValue(args->at(1));
2827    VisitForStackValue(args->at(2));
2828    __ CallRuntime(Runtime::kLog, 2);
2829  }
2830#endif
2831  // Finally, we're expected to leave a value on the top of the stack.
2832  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2833  context()->Plug(r0);
2834}
2835
2836
2837void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2838  ASSERT(args->length() == 0);
2839
2840  Label slow_allocate_heapnumber;
2841  Label heapnumber_allocated;
2842
2843  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2844  __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
2845  __ jmp(&heapnumber_allocated);
2846
2847  __ bind(&slow_allocate_heapnumber);
2848  // Allocate a heap number.
2849  __ CallRuntime(Runtime::kNumberAlloc, 0);
2850  __ mov(r4, Operand(r0));
2851
2852  __ bind(&heapnumber_allocated);
2853
2854  // Convert 32 random bits in r0 to 0.(32 random bits) in a double
2855  // by computing:
2856  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2857  if (CpuFeatures::IsSupported(VFP3)) {
2858    __ PrepareCallCFunction(1, r0);
2859    __ mov(r0, Operand(ExternalReference::isolate_address()));
2860    __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2861
2862    CpuFeatures::Scope scope(VFP3);
2863    // 0x41300000 is the top half of 1.0 x 2^20 as a double.
2864    // Create this constant using mov/orr to avoid PC relative load.
2865    __ mov(r1, Operand(0x41000000));
2866    __ orr(r1, r1, Operand(0x300000));
2867    // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
2868    __ vmov(d7, r0, r1);
2869    // Move 0x4130000000000000 to VFP.
2870    __ mov(r0, Operand(0, RelocInfo::NONE));
2871    __ vmov(d8, r0, r1);
2872    // Subtract and store the result in the heap number.
2873    __ vsub(d7, d7, d8);
2874    __ sub(r0, r4, Operand(kHeapObjectTag));
2875    __ vstr(d7, r0, HeapNumber::kValueOffset);
2876    __ mov(r0, r4);
2877  } else {
2878    __ PrepareCallCFunction(2, r0);
2879    __ mov(r0, Operand(r4));
2880    __ mov(r1, Operand(ExternalReference::isolate_address()));
2881    __ CallCFunction(
2882        ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
2883  }
2884
2885  context()->Plug(r0);
2886}
2887
2888
2889void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2890  // Load the arguments on the stack and call the stub.
2891  SubStringStub stub;
2892  ASSERT(args->length() == 3);
2893  VisitForStackValue(args->at(0));
2894  VisitForStackValue(args->at(1));
2895  VisitForStackValue(args->at(2));
2896  __ CallStub(&stub);
2897  context()->Plug(r0);
2898}
2899
2900
2901void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
2902  // Load the arguments on the stack and call the stub.
2903  RegExpExecStub stub;
2904  ASSERT(args->length() == 4);
2905  VisitForStackValue(args->at(0));
2906  VisitForStackValue(args->at(1));
2907  VisitForStackValue(args->at(2));
2908  VisitForStackValue(args->at(3));
2909  __ CallStub(&stub);
2910  context()->Plug(r0);
2911}
2912
2913
2914void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2915  ASSERT(args->length() == 1);
2916
2917  VisitForAccumulatorValue(args->at(0));  // Load the object.
2918
2919  Label done;
2920  // If the object is a smi return the object.
2921  __ JumpIfSmi(r0, &done);
2922  // If the object is not a value type, return the object.
2923  __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
2924  __ b(ne, &done);
2925  __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
2926
2927  __ bind(&done);
2928  context()->Plug(r0);
2929}
2930
2931
2932void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2933  // Load the arguments on the stack and call the runtime function.
2934  ASSERT(args->length() == 2);
2935  VisitForStackValue(args->at(0));
2936  VisitForStackValue(args->at(1));
2937  MathPowStub stub;
2938  __ CallStub(&stub);
2939  context()->Plug(r0);
2940}
2941
2942
2943void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2944  ASSERT(args->length() == 2);
2945
2946  VisitForStackValue(args->at(0));  // Load the object.
2947  VisitForAccumulatorValue(args->at(1));  // Load the value.
2948  __ pop(r1);  // r0 = value. r1 = object.
2949
2950  Label done;
2951  // If the object is a smi, return the value.
2952  __ JumpIfSmi(r1, &done);
2953
2954  // If the object is not a value type, return the value.
2955  __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
2956  __ b(ne, &done);
2957
2958  // Store the value.
2959  __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
2960  // Update the write barrier.  Save the value as it will be
2961  // overwritten by the write barrier code and is needed afterward.
2962  __ RecordWrite(r1, Operand(JSValue::kValueOffset - kHeapObjectTag), r2, r3);
2963
2964  __ bind(&done);
2965  context()->Plug(r0);
2966}
2967
2968
2969void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2970  ASSERT_EQ(args->length(), 1);
2971
2972  // Load the argument on the stack and call the stub.
2973  VisitForStackValue(args->at(0));
2974
2975  NumberToStringStub stub;
2976  __ CallStub(&stub);
2977  context()->Plug(r0);
2978}
2979
2980
2981void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
2982  ASSERT(args->length() == 1);
2983
2984  VisitForAccumulatorValue(args->at(0));
2985
2986  Label done;
2987  StringCharFromCodeGenerator generator(r0, r1);
2988  generator.GenerateFast(masm_);
2989  __ jmp(&done);
2990
2991  NopRuntimeCallHelper call_helper;
2992  generator.GenerateSlow(masm_, call_helper);
2993
2994  __ bind(&done);
2995  context()->Plug(r1);
2996}
2997
2998
2999void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
3000  ASSERT(args->length() == 2);
3001
3002  VisitForStackValue(args->at(0));
3003  VisitForAccumulatorValue(args->at(1));
3004
3005  Register object = r1;
3006  Register index = r0;
3007  Register scratch = r2;
3008  Register result = r3;
3009
3010  __ pop(object);
3011
3012  Label need_conversion;
3013  Label index_out_of_range;
3014  Label done;
3015  StringCharCodeAtGenerator generator(object,
3016                                      index,
3017                                      scratch,
3018                                      result,
3019                                      &need_conversion,
3020                                      &need_conversion,
3021                                      &index_out_of_range,
3022                                      STRING_INDEX_IS_NUMBER);
3023  generator.GenerateFast(masm_);
3024  __ jmp(&done);
3025
3026  __ bind(&index_out_of_range);
3027  // When the index is out of range, the spec requires us to return
3028  // NaN.
3029  __ LoadRoot(result, Heap::kNanValueRootIndex);
3030  __ jmp(&done);
3031
3032  __ bind(&need_conversion);
3033  // Load the undefined value into the result register, which will
3034  // trigger conversion.
3035  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3036  __ jmp(&done);
3037
3038  NopRuntimeCallHelper call_helper;
3039  generator.GenerateSlow(masm_, call_helper);
3040
3041  __ bind(&done);
3042  context()->Plug(result);
3043}
3044
3045
3046void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
3047  ASSERT(args->length() == 2);
3048
3049  VisitForStackValue(args->at(0));
3050  VisitForAccumulatorValue(args->at(1));
3051
3052  Register object = r1;
3053  Register index = r0;
3054  Register scratch1 = r2;
3055  Register scratch2 = r3;
3056  Register result = r0;
3057
3058  __ pop(object);
3059
3060  Label need_conversion;
3061  Label index_out_of_range;
3062  Label done;
3063  StringCharAtGenerator generator(object,
3064                                  index,
3065                                  scratch1,
3066                                  scratch2,
3067                                  result,
3068                                  &need_conversion,
3069                                  &need_conversion,
3070                                  &index_out_of_range,
3071                                  STRING_INDEX_IS_NUMBER);
3072  generator.GenerateFast(masm_);
3073  __ jmp(&done);
3074
3075  __ bind(&index_out_of_range);
3076  // When the index is out of range, the spec requires us to return
3077  // the empty string.
3078  __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3079  __ jmp(&done);
3080
3081  __ bind(&need_conversion);
3082  // Move smi zero into the result register, which will trigger
3083  // conversion.
3084  __ mov(result, Operand(Smi::FromInt(0)));
3085  __ jmp(&done);
3086
3087  NopRuntimeCallHelper call_helper;
3088  generator.GenerateSlow(masm_, call_helper);
3089
3090  __ bind(&done);
3091  context()->Plug(result);
3092}
3093
3094
3095void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
3096  ASSERT_EQ(2, args->length());
3097
3098  VisitForStackValue(args->at(0));
3099  VisitForStackValue(args->at(1));
3100
3101  StringAddStub stub(NO_STRING_ADD_FLAGS);
3102  __ CallStub(&stub);
3103  context()->Plug(r0);
3104}
3105
3106
3107void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
3108  ASSERT_EQ(2, args->length());
3109
3110  VisitForStackValue(args->at(0));
3111  VisitForStackValue(args->at(1));
3112
3113  StringCompareStub stub;
3114  __ CallStub(&stub);
3115  context()->Plug(r0);
3116}
3117
3118
3119void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
3120  // Load the argument on the stack and call the stub.
3121  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3122                               TranscendentalCacheStub::TAGGED);
3123  ASSERT(args->length() == 1);
3124  VisitForStackValue(args->at(0));
3125  __ CallStub(&stub);
3126  context()->Plug(r0);
3127}
3128
3129
3130void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
3131  // Load the argument on the stack and call the stub.
3132  TranscendentalCacheStub stub(TranscendentalCache::COS,
3133                               TranscendentalCacheStub::TAGGED);
3134  ASSERT(args->length() == 1);
3135  VisitForStackValue(args->at(0));
3136  __ CallStub(&stub);
3137  context()->Plug(r0);
3138}
3139
3140
3141void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
3142  // Load the argument on the stack and call the stub.
3143  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3144                               TranscendentalCacheStub::TAGGED);
3145  ASSERT(args->length() == 1);
3146  VisitForStackValue(args->at(0));
3147  __ CallStub(&stub);
3148  context()->Plug(r0);
3149}
3150
3151
3152void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
3153  // Load the argument on the stack and call the runtime function.
3154  ASSERT(args->length() == 1);
3155  VisitForStackValue(args->at(0));
3156  __ CallRuntime(Runtime::kMath_sqrt, 1);
3157  context()->Plug(r0);
3158}
3159
3160
3161void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
3162  ASSERT(args->length() >= 2);
3163
3164  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3165  for (int i = 0; i < arg_count + 1; i++) {
3166    VisitForStackValue(args->at(i));
3167  }
3168  VisitForAccumulatorValue(args->last());  // Function.
3169
3170  // InvokeFunction requires the function in r1. Move it in there.
3171  __ mov(r1, result_register());
3172  ParameterCount count(arg_count);
3173  __ InvokeFunction(r1, count, CALL_FUNCTION);
3174  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3175  context()->Plug(r0);
3176}
3177
3178
3179void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
3180  RegExpConstructResultStub stub;
3181  ASSERT(args->length() == 3);
3182  VisitForStackValue(args->at(0));
3183  VisitForStackValue(args->at(1));
3184  VisitForStackValue(args->at(2));
3185  __ CallStub(&stub);
3186  context()->Plug(r0);
3187}
3188
3189
3190void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
3191  ASSERT(args->length() == 3);
3192  VisitForStackValue(args->at(0));
3193  VisitForStackValue(args->at(1));
3194  VisitForStackValue(args->at(2));
3195  Label done;
3196  Label slow_case;
3197  Register object = r0;
3198  Register index1 = r1;
3199  Register index2 = r2;
3200  Register elements = r3;
3201  Register scratch1 = r4;
3202  Register scratch2 = r5;
3203
3204  __ ldr(object, MemOperand(sp, 2 * kPointerSize));
3205  // Fetch the map and check if array is in fast case.
3206  // Check that object doesn't require security checks and
3207  // has no indexed interceptor.
3208  __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
3209  __ b(ne, &slow_case);
3210  // Map is now in scratch1.
3211
3212  __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
3213  __ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
3214  __ b(ne, &slow_case);
3215
3216  // Check the object's elements are in fast case and writable.
3217  __ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset));
3218  __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
3219  __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
3220  __ cmp(scratch1, ip);
3221  __ b(ne, &slow_case);
3222
3223  // Check that both indices are smis.
3224  __ ldr(index1, MemOperand(sp, 1 * kPointerSize));
3225  __ ldr(index2, MemOperand(sp, 0));
3226  __ JumpIfNotBothSmi(index1, index2, &slow_case);
3227
3228  // Check that both indices are valid.
3229  __ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
3230  __ cmp(scratch1, index1);
3231  __ cmp(scratch1, index2, hi);
3232  __ b(ls, &slow_case);
3233
3234  // Bring the address of the elements into index1 and index2.
3235  __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3236  __ add(index1,
3237         scratch1,
3238         Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
3239  __ add(index2,
3240         scratch1,
3241         Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
3242
3243  // Swap elements.
3244  __ ldr(scratch1, MemOperand(index1, 0));
3245  __ ldr(scratch2, MemOperand(index2, 0));
3246  __ str(scratch1, MemOperand(index2, 0));
3247  __ str(scratch2, MemOperand(index1, 0));
3248
3249  Label new_space;
3250  __ InNewSpace(elements, scratch1, eq, &new_space);
3251  // Possible optimization: do a check that both values are Smis
3252  // (or them and test against Smi mask.)
3253
3254  __ mov(scratch1, elements);
3255  __ RecordWriteHelper(elements, index1, scratch2);
3256  __ RecordWriteHelper(scratch1, index2, scratch2);  // scratch1 holds elements.
3257
3258  __ bind(&new_space);
3259  // We are done. Drop elements from the stack, and return undefined.
3260  __ Drop(3);
3261  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3262  __ jmp(&done);
3263
3264  __ bind(&slow_case);
3265  __ CallRuntime(Runtime::kSwapElements, 3);
3266
3267  __ bind(&done);
3268  context()->Plug(r0);
3269}
3270
3271
3272void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3273  ASSERT_EQ(2, args->length());
3274
3275  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3276  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3277
3278  Handle<FixedArray> jsfunction_result_caches(
3279      isolate()->global_context()->jsfunction_result_caches());
3280  if (jsfunction_result_caches->length() <= cache_id) {
3281    __ Abort("Attempt to use undefined cache.");
3282    __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3283    context()->Plug(r0);
3284    return;
3285  }
3286
3287  VisitForAccumulatorValue(args->at(1));
3288
3289  Register key = r0;
3290  Register cache = r1;
3291  __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
3292  __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
3293  __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3294  __ ldr(cache,
3295         FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3296
3297
3298  Label done, not_found;
3299  // tmp now holds finger offset as a smi.
3300  ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3301  __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3302  // r2 now holds finger offset as a smi.
3303  __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3304  // r3 now points to the start of fixed array elements.
3305  __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
3306  // Note side effect of PreIndex: r3 now points to the key of the pair.
3307  __ cmp(key, r2);
3308  __ b(ne, &not_found);
3309
3310  __ ldr(r0, MemOperand(r3, kPointerSize));
3311  __ b(&done);
3312
3313  __ bind(&not_found);
3314  // Call runtime to perform the lookup.
3315  __ Push(cache, key);
3316  __ CallRuntime(Runtime::kGetFromCache, 2);
3317
3318  __ bind(&done);
3319  context()->Plug(r0);
3320}
3321
3322
3323void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
3324  ASSERT_EQ(2, args->length());
3325
3326  Register right = r0;
3327  Register left = r1;
3328  Register tmp = r2;
3329  Register tmp2 = r3;
3330
3331  VisitForStackValue(args->at(0));
3332  VisitForAccumulatorValue(args->at(1));
3333  __ pop(left);
3334
3335  Label done, fail, ok;
3336  __ cmp(left, Operand(right));
3337  __ b(eq, &ok);
3338  // Fail if either is a non-HeapObject.
3339  __ and_(tmp, left, Operand(right));
3340  __ tst(tmp, Operand(kSmiTagMask));
3341  __ b(eq, &fail);
3342  __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3343  __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3344  __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
3345  __ b(ne, &fail);
3346  __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3347  __ cmp(tmp, Operand(tmp2));
3348  __ b(ne, &fail);
3349  __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3350  __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3351  __ cmp(tmp, tmp2);
3352  __ b(eq, &ok);
3353  __ bind(&fail);
3354  __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3355  __ jmp(&done);
3356  __ bind(&ok);
3357  __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3358  __ bind(&done);
3359
3360  context()->Plug(r0);
3361}
3362
3363
3364void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3365  VisitForAccumulatorValue(args->at(0));
3366
3367  Label materialize_true, materialize_false;
3368  Label* if_true = NULL;
3369  Label* if_false = NULL;
3370  Label* fall_through = NULL;
3371  context()->PrepareTest(&materialize_true, &materialize_false,
3372                         &if_true, &if_false, &fall_through);
3373
3374  __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3375  __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3376  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3377  Split(eq, if_true, if_false, fall_through);
3378
3379  context()->Plug(if_true, if_false);
3380}
3381
3382
3383void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3384  ASSERT(args->length() == 1);
3385  VisitForAccumulatorValue(args->at(0));
3386
3387  if (FLAG_debug_code) {
3388    __ AbortIfNotString(r0);
3389  }
3390
3391  __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3392  __ IndexFromHash(r0, r0);
3393
3394  context()->Plug(r0);
3395}
3396
3397
3398void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3399  Label bailout, done, one_char_separator, long_separator,
3400      non_trivial_array, not_size_one_array, loop,
3401      empty_separator_loop, one_char_separator_loop,
3402      one_char_separator_loop_entry, long_separator_loop;
3403
3404  ASSERT(args->length() == 2);
3405  VisitForStackValue(args->at(1));
3406  VisitForAccumulatorValue(args->at(0));
3407
3408  // All aliases of the same register have disjoint lifetimes.
3409  Register array = r0;
3410  Register elements = no_reg;  // Will be r0.
3411  Register result = no_reg;  // Will be r0.
3412  Register separator = r1;
3413  Register array_length = r2;
3414  Register result_pos = no_reg;  // Will be r2
3415  Register string_length = r3;
3416  Register string = r4;
3417  Register element = r5;
3418  Register elements_end = r6;
3419  Register scratch1 = r7;
3420  Register scratch2 = r9;
3421
3422  // Separator operand is on the stack.
3423  __ pop(separator);
3424
3425  // Check that the array is a JSArray.
3426  __ JumpIfSmi(array, &bailout);
3427  __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3428  __ b(ne, &bailout);
3429
3430  // Check that the array has fast elements.
3431  __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset));
3432  __ tst(scratch2, Operand(1 << Map::kHasFastElements));
3433  __ b(eq, &bailout);
3434
3435  // If the array has length zero, return the empty string.
3436  __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3437  __ SmiUntag(array_length, SetCC);
3438  __ b(ne, &non_trivial_array);
3439  __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
3440  __ b(&done);
3441
3442  __ bind(&non_trivial_array);
3443
3444  // Get the FixedArray containing array's elements.
3445  elements = array;
3446  __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3447  array = no_reg;  // End of array's live range.
3448
3449  // Check that all array elements are sequential ASCII strings, and
3450  // accumulate the sum of their lengths, as a smi-encoded value.
3451  __ mov(string_length, Operand(0));
3452  __ add(element,
3453         elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3454  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3455  // Loop condition: while (element < elements_end).
3456  // Live values in registers:
3457  //   elements: Fixed array of strings.
3458  //   array_length: Length of the fixed array of strings (not smi)
3459  //   separator: Separator string
3460  //   string_length: Accumulated sum of string lengths (smi).
3461  //   element: Current array element.
3462  //   elements_end: Array end.
3463  if (FLAG_debug_code) {
3464    __ cmp(array_length, Operand(0));
3465    __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
3466  }
3467  __ bind(&loop);
3468  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3469  __ JumpIfSmi(string, &bailout);
3470  __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3471  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3472  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3473  __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3474  __ add(string_length, string_length, Operand(scratch1));
3475  __ b(vs, &bailout);
3476  __ cmp(element, elements_end);
3477  __ b(lt, &loop);
3478
3479  // If array_length is 1, return elements[0], a string.
3480  __ cmp(array_length, Operand(1));
3481  __ b(ne, &not_size_one_array);
3482  __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3483  __ b(&done);
3484
3485  __ bind(&not_size_one_array);
3486
3487  // Live values in registers:
3488  //   separator: Separator string
3489  //   array_length: Length of the array.
3490  //   string_length: Sum of string lengths (smi).
3491  //   elements: FixedArray of strings.
3492
3493  // Check that the separator is a flat ASCII string.
3494  __ JumpIfSmi(separator, &bailout);
3495  __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3496  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3497  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3498
3499  // Add (separator length times array_length) - separator length to the
3500  // string_length to get the length of the result string. array_length is not
3501  // smi but the other values are, so the result is a smi
3502  __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3503  __ sub(string_length, string_length, Operand(scratch1));
3504  __ smull(scratch2, ip, array_length, scratch1);
3505  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3506  // zero.
3507  __ cmp(ip, Operand(0));
3508  __ b(ne, &bailout);
3509  __ tst(scratch2, Operand(0x80000000));
3510  __ b(ne, &bailout);
3511  __ add(string_length, string_length, Operand(scratch2));
3512  __ b(vs, &bailout);
3513  __ SmiUntag(string_length);
3514
3515  // Get first element in the array to free up the elements register to be used
3516  // for the result.
3517  __ add(element,
3518         elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3519  result = elements;  // End of live range for elements.
3520  elements = no_reg;
3521  // Live values in registers:
3522  //   element: First array element
3523  //   separator: Separator string
3524  //   string_length: Length of result string (not smi)
3525  //   array_length: Length of the array.
3526  __ AllocateAsciiString(result,
3527                         string_length,
3528                         scratch1,
3529                         scratch2,
3530                         elements_end,
3531                         &bailout);
3532  // Prepare for looping. Set up elements_end to end of the array. Set
3533  // result_pos to the position of the result where to write the first
3534  // character.
3535  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3536  result_pos = array_length;  // End of live range for array_length.
3537  array_length = no_reg;
3538  __ add(result_pos,
3539         result,
3540         Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3541
3542  // Check the length of the separator.
3543  __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3544  __ cmp(scratch1, Operand(Smi::FromInt(1)));
3545  __ b(eq, &one_char_separator);
3546  __ b(gt, &long_separator);
3547
3548  // Empty separator case
3549  __ bind(&empty_separator_loop);
3550  // Live values in registers:
3551  //   result_pos: the position to which we are currently copying characters.
3552  //   element: Current array element.
3553  //   elements_end: Array end.
3554
3555  // Copy next array element to the result.
3556  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3557  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3558  __ SmiUntag(string_length);
3559  __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3560  __ CopyBytes(string, result_pos, string_length, scratch1);
3561  __ cmp(element, elements_end);
3562  __ b(lt, &empty_separator_loop);  // End while (element < elements_end).
3563  ASSERT(result.is(r0));
3564  __ b(&done);
3565
3566  // One-character separator case
3567  __ bind(&one_char_separator);
3568  // Replace separator with its ascii character value.
3569  __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3570  // Jump into the loop after the code that copies the separator, so the first
3571  // element is not preceded by a separator
3572  __ jmp(&one_char_separator_loop_entry);
3573
3574  __ bind(&one_char_separator_loop);
3575  // Live values in registers:
3576  //   result_pos: the position to which we are currently copying characters.
3577  //   element: Current array element.
3578  //   elements_end: Array end.
3579  //   separator: Single separator ascii char (in lower byte).
3580
3581  // Copy the separator character to the result.
3582  __ strb(separator, MemOperand(result_pos, 1, PostIndex));
3583
3584  // Copy next array element to the result.
3585  __ bind(&one_char_separator_loop_entry);
3586  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3587  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3588  __ SmiUntag(string_length);
3589  __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3590  __ CopyBytes(string, result_pos, string_length, scratch1);
3591  __ cmp(element, elements_end);
3592  __ b(lt, &one_char_separator_loop);  // End while (element < elements_end).
3593  ASSERT(result.is(r0));
3594  __ b(&done);
3595
3596  // Long separator case (separator is more than one character). Entry is at the
3597  // label long_separator below.
3598  __ bind(&long_separator_loop);
3599  // Live values in registers:
3600  //   result_pos: the position to which we are currently copying characters.
3601  //   element: Current array element.
3602  //   elements_end: Array end.
3603  //   separator: Separator string.
3604
3605  // Copy the separator to the result.
3606  __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
3607  __ SmiUntag(string_length);
3608  __ add(string,
3609         separator,
3610         Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3611  __ CopyBytes(string, result_pos, string_length, scratch1);
3612
3613  __ bind(&long_separator);
3614  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3615  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3616  __ SmiUntag(string_length);
3617  __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3618  __ CopyBytes(string, result_pos, string_length, scratch1);
3619  __ cmp(element, elements_end);
3620  __ b(lt, &long_separator_loop);  // End while (element < elements_end).
3621  ASSERT(result.is(r0));
3622  __ b(&done);
3623
3624  __ bind(&bailout);
3625  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3626  __ bind(&done);
3627  context()->Plug(r0);
3628}
3629
3630
3631void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3632  Handle<String> name = expr->name();
3633  if (name->length() > 0 && name->Get(0) == '_') {
3634    Comment cmnt(masm_, "[ InlineRuntimeCall");
3635    EmitInlineRuntimeCall(expr);
3636    return;
3637  }
3638
3639  Comment cmnt(masm_, "[ CallRuntime");
3640  ZoneList<Expression*>* args = expr->arguments();
3641
3642  if (expr->is_jsruntime()) {
3643    // Prepare for calling JS runtime function.
3644    __ ldr(r0, GlobalObjectOperand());
3645    __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
3646    __ push(r0);
3647  }
3648
3649  // Push the arguments ("left-to-right").
3650  int arg_count = args->length();
3651  for (int i = 0; i < arg_count; i++) {
3652    VisitForStackValue(args->at(i));
3653  }
3654
3655  if (expr->is_jsruntime()) {
3656    // Call the JS runtime function.
3657    __ mov(r2, Operand(expr->name()));
3658    Handle<Code> ic =
3659        isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP);
3660    EmitCallIC(ic, RelocInfo::CODE_TARGET);
3661    // Restore context register.
3662    __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3663  } else {
3664    // Call the C runtime function.
3665    __ CallRuntime(expr->function(), arg_count);
3666  }
3667  context()->Plug(r0);
3668}
3669
3670
3671void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3672  switch (expr->op()) {
3673    case Token::DELETE: {
3674      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3675      Property* prop = expr->expression()->AsProperty();
3676      Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
3677
3678      if (prop != NULL) {
3679        if (prop->is_synthetic()) {
3680          // Result of deleting parameters is false, even when they rewrite
3681          // to accesses on the arguments object.
3682          context()->Plug(false);
3683        } else {
3684          VisitForStackValue(prop->obj());
3685          VisitForStackValue(prop->key());
3686          __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
3687          __ push(r1);
3688          __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
3689          context()->Plug(r0);
3690        }
3691      } else if (var != NULL) {
3692        // Delete of an unqualified identifier is disallowed in strict mode
3693        // but "delete this" is.
3694        ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3695        if (var->is_global()) {
3696          __ ldr(r2, GlobalObjectOperand());
3697          __ mov(r1, Operand(var->name()));
3698          __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
3699          __ Push(r2, r1, r0);
3700          __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
3701          context()->Plug(r0);
3702        } else if (var->AsSlot() != NULL &&
3703                   var->AsSlot()->type() != Slot::LOOKUP) {
3704          // Result of deleting non-global, non-dynamic variables is false.
3705          // The subexpression does not have side effects.
3706          context()->Plug(false);
3707        } else {
3708          // Non-global variable.  Call the runtime to try to delete from the
3709          // context where the variable was introduced.
3710          __ push(context_register());
3711          __ mov(r2, Operand(var->name()));
3712          __ push(r2);
3713          __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3714          context()->Plug(r0);
3715        }
3716      } else {
3717        // Result of deleting non-property, non-variable reference is true.
3718        // The subexpression may have side effects.
3719        VisitForEffect(expr->expression());
3720        context()->Plug(true);
3721      }
3722      break;
3723    }
3724
3725    case Token::VOID: {
3726      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3727      VisitForEffect(expr->expression());
3728      context()->Plug(Heap::kUndefinedValueRootIndex);
3729      break;
3730    }
3731
3732    case Token::NOT: {
3733      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3734      if (context()->IsEffect()) {
3735        // Unary NOT has no side effects so it's only necessary to visit the
3736        // subexpression.  Match the optimizing compiler by not branching.
3737        VisitForEffect(expr->expression());
3738      } else {
3739        Label materialize_true, materialize_false;
3740        Label* if_true = NULL;
3741        Label* if_false = NULL;
3742        Label* fall_through = NULL;
3743
3744        // Notice that the labels are swapped.
3745        context()->PrepareTest(&materialize_true, &materialize_false,
3746                               &if_false, &if_true, &fall_through);
3747        if (context()->IsTest()) ForwardBailoutToChild(expr);
3748        VisitForControl(expr->expression(), if_true, if_false, fall_through);
3749        context()->Plug(if_false, if_true);  // Labels swapped.
3750      }
3751      break;
3752    }
3753
3754    case Token::TYPEOF: {
3755      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3756      { StackValueContext context(this);
3757        VisitForTypeofValue(expr->expression());
3758      }
3759      __ CallRuntime(Runtime::kTypeof, 1);
3760      context()->Plug(r0);
3761      break;
3762    }
3763
3764    case Token::ADD: {
3765      Comment cmt(masm_, "[ UnaryOperation (ADD)");
3766      VisitForAccumulatorValue(expr->expression());
3767      Label no_conversion;
3768      __ tst(result_register(), Operand(kSmiTagMask));
3769      __ b(eq, &no_conversion);
3770      ToNumberStub convert_stub;
3771      __ CallStub(&convert_stub);
3772      __ bind(&no_conversion);
3773      context()->Plug(result_register());
3774      break;
3775    }
3776
3777    case Token::SUB: {
3778      Comment cmt(masm_, "[ UnaryOperation (SUB)");
3779      bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3780      UnaryOverwriteMode overwrite =
3781          can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3782      GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
3783      // GenericUnaryOpStub expects the argument to be in the
3784      // accumulator register r0.
3785      VisitForAccumulatorValue(expr->expression());
3786      __ CallStub(&stub);
3787      context()->Plug(r0);
3788      break;
3789    }
3790
3791    case Token::BIT_NOT: {
3792      Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
3793      // The generic unary operation stub expects the argument to be
3794      // in the accumulator register r0.
3795      VisitForAccumulatorValue(expr->expression());
3796      Label done;
3797      bool inline_smi_code = ShouldInlineSmiCase(expr->op());
3798      if (inline_smi_code) {
3799        Label call_stub;
3800        __ JumpIfNotSmi(r0, &call_stub);
3801        __ mvn(r0, Operand(r0));
3802        // Bit-clear inverted smi-tag.
3803        __ bic(r0, r0, Operand(kSmiTagMask));
3804        __ b(&done);
3805        __ bind(&call_stub);
3806      }
3807      bool overwrite = expr->expression()->ResultOverwriteAllowed();
3808      UnaryOpFlags flags = inline_smi_code
3809          ? NO_UNARY_SMI_CODE_IN_STUB
3810          : NO_UNARY_FLAGS;
3811      UnaryOverwriteMode mode =
3812          overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3813      GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
3814      __ CallStub(&stub);
3815      __ bind(&done);
3816      context()->Plug(r0);
3817      break;
3818    }
3819
3820    default:
3821      UNREACHABLE();
3822  }
3823}
3824
3825
3826void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3827  Comment cmnt(masm_, "[ CountOperation");
3828  SetSourcePosition(expr->position());
3829
3830  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3831  // as the left-hand side.
3832  if (!expr->expression()->IsValidLeftHandSide()) {
3833    VisitForEffect(expr->expression());
3834    return;
3835  }
3836
3837  // Expression can only be a property, a global or a (parameter or local)
3838  // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
3839  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3840  LhsKind assign_type = VARIABLE;
3841  Property* prop = expr->expression()->AsProperty();
3842  // In case of a property we use the uninitialized expression context
3843  // of the key to detect a named property.
3844  if (prop != NULL) {
3845    assign_type =
3846        (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3847  }
3848
3849  // Evaluate expression and get value.
3850  if (assign_type == VARIABLE) {
3851    ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3852    AccumulatorValueContext context(this);
3853    EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
3854  } else {
3855    // Reserve space for result of postfix operation.
3856    if (expr->is_postfix() && !context()->IsEffect()) {
3857      __ mov(ip, Operand(Smi::FromInt(0)));
3858      __ push(ip);
3859    }
3860    if (assign_type == NAMED_PROPERTY) {
3861      // Put the object both on the stack and in the accumulator.
3862      VisitForAccumulatorValue(prop->obj());
3863      __ push(r0);
3864      EmitNamedPropertyLoad(prop);
3865    } else {
3866      if (prop->is_arguments_access()) {
3867        VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3868        __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
3869        __ push(r0);
3870        __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
3871      } else {
3872        VisitForStackValue(prop->obj());
3873        VisitForAccumulatorValue(prop->key());
3874      }
3875      __ ldr(r1, MemOperand(sp, 0));
3876      __ push(r0);
3877      EmitKeyedPropertyLoad(prop);
3878    }
3879  }
3880
3881  // We need a second deoptimization point after loading the value
3882  // in case evaluating the property load my have a side effect.
3883  if (assign_type == VARIABLE) {
3884    PrepareForBailout(expr->expression(), TOS_REG);
3885  } else {
3886    PrepareForBailoutForId(expr->CountId(), TOS_REG);
3887  }
3888
3889  // Call ToNumber only if operand is not a smi.
3890  Label no_conversion;
3891  __ JumpIfSmi(r0, &no_conversion);
3892  ToNumberStub convert_stub;
3893  __ CallStub(&convert_stub);
3894  __ bind(&no_conversion);
3895
3896  // Save result for postfix expressions.
3897  if (expr->is_postfix()) {
3898    if (!context()->IsEffect()) {
3899      // Save the result on the stack. If we have a named or keyed property
3900      // we store the result under the receiver that is currently on top
3901      // of the stack.
3902      switch (assign_type) {
3903        case VARIABLE:
3904          __ push(r0);
3905          break;
3906        case NAMED_PROPERTY:
3907          __ str(r0, MemOperand(sp, kPointerSize));
3908          break;
3909        case KEYED_PROPERTY:
3910          __ str(r0, MemOperand(sp, 2 * kPointerSize));
3911          break;
3912      }
3913    }
3914  }
3915
3916
3917  // Inline smi case if we are in a loop.
3918  Label stub_call, done;
3919  JumpPatchSite patch_site(masm_);
3920
3921  int count_value = expr->op() == Token::INC ? 1 : -1;
3922  if (ShouldInlineSmiCase(expr->op())) {
3923    __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
3924    __ b(vs, &stub_call);
3925    // We could eliminate this smi check if we split the code at
3926    // the first smi check before calling ToNumber.
3927    patch_site.EmitJumpIfSmi(r0, &done);
3928
3929    __ bind(&stub_call);
3930    // Call stub. Undo operation first.
3931    __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
3932  }
3933  __ mov(r1, Operand(Smi::FromInt(count_value)));
3934
3935  // Record position before stub call.
3936  SetSourcePosition(expr->position());
3937
3938  TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE);
3939  EmitCallIC(stub.GetCode(), &patch_site);
3940  __ bind(&done);
3941
3942  // Store the value returned in r0.
3943  switch (assign_type) {
3944    case VARIABLE:
3945      if (expr->is_postfix()) {
3946        { EffectContext context(this);
3947          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3948                                 Token::ASSIGN);
3949          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3950          context.Plug(r0);
3951        }
3952        // For all contexts except EffectConstant We have the result on
3953        // top of the stack.
3954        if (!context()->IsEffect()) {
3955          context()->PlugTOS();
3956        }
3957      } else {
3958        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3959                               Token::ASSIGN);
3960        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3961        context()->Plug(r0);
3962      }
3963      break;
3964    case NAMED_PROPERTY: {
3965      __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
3966      __ pop(r1);
3967      Handle<Code> ic = is_strict_mode()
3968          ? isolate()->builtins()->StoreIC_Initialize_Strict()
3969          : isolate()->builtins()->StoreIC_Initialize();
3970      EmitCallIC(ic, RelocInfo::CODE_TARGET);
3971      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3972      if (expr->is_postfix()) {
3973        if (!context()->IsEffect()) {
3974          context()->PlugTOS();
3975        }
3976      } else {
3977        context()->Plug(r0);
3978      }
3979      break;
3980    }
3981    case KEYED_PROPERTY: {
3982      __ pop(r1);  // Key.
3983      __ pop(r2);  // Receiver.
3984      Handle<Code> ic = is_strict_mode()
3985          ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3986          : isolate()->builtins()->KeyedStoreIC_Initialize();
3987      EmitCallIC(ic, RelocInfo::CODE_TARGET);
3988      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3989      if (expr->is_postfix()) {
3990        if (!context()->IsEffect()) {
3991          context()->PlugTOS();
3992        }
3993      } else {
3994        context()->Plug(r0);
3995      }
3996      break;
3997    }
3998  }
3999}
4000
4001
4002void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4003  ASSERT(!context()->IsEffect());
4004  ASSERT(!context()->IsTest());
4005  VariableProxy* proxy = expr->AsVariableProxy();
4006  if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
4007    Comment cmnt(masm_, "Global variable");
4008    __ ldr(r0, GlobalObjectOperand());
4009    __ mov(r2, Operand(proxy->name()));
4010    Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4011    // Use a regular load, not a contextual load, to avoid a reference
4012    // error.
4013    EmitCallIC(ic, RelocInfo::CODE_TARGET);
4014    PrepareForBailout(expr, TOS_REG);
4015    context()->Plug(r0);
4016  } else if (proxy != NULL &&
4017             proxy->var()->AsSlot() != NULL &&
4018             proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
4019    Label done, slow;
4020
4021    // Generate code for loading from variables potentially shadowed
4022    // by eval-introduced variables.
4023    Slot* slot = proxy->var()->AsSlot();
4024    EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
4025
4026    __ bind(&slow);
4027    __ mov(r0, Operand(proxy->name()));
4028    __ Push(cp, r0);
4029    __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4030    PrepareForBailout(expr, TOS_REG);
4031    __ bind(&done);
4032
4033    context()->Plug(r0);
4034  } else {
4035    // This expression cannot throw a reference error at the top level.
4036    context()->HandleExpression(expr);
4037  }
4038}
4039
4040
4041bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4042                                          Expression* left,
4043                                          Expression* right,
4044                                          Label* if_true,
4045                                          Label* if_false,
4046                                          Label* fall_through) {
4047  if (op != Token::EQ && op != Token::EQ_STRICT) return false;
4048
4049  // Check for the pattern: typeof <expression> == <string literal>.
4050  Literal* right_literal = right->AsLiteral();
4051  if (right_literal == NULL) return false;
4052  Handle<Object> right_literal_value = right_literal->handle();
4053  if (!right_literal_value->IsString()) return false;
4054  UnaryOperation* left_unary = left->AsUnaryOperation();
4055  if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
4056  Handle<String> check = Handle<String>::cast(right_literal_value);
4057
4058  { AccumulatorValueContext context(this);
4059    VisitForTypeofValue(left_unary->expression());
4060  }
4061  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4062
4063  if (check->Equals(isolate()->heap()->number_symbol())) {
4064    __ JumpIfSmi(r0, if_true);
4065    __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4066    __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4067    __ cmp(r0, ip);
4068    Split(eq, if_true, if_false, fall_through);
4069  } else if (check->Equals(isolate()->heap()->string_symbol())) {
4070    __ JumpIfSmi(r0, if_false);
4071    // Check for undetectable objects => false.
4072    __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4073    __ b(ge, if_false);
4074    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4075    __ tst(r1, Operand(1 << Map::kIsUndetectable));
4076    Split(eq, if_true, if_false, fall_through);
4077  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4078    __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4079    __ b(eq, if_true);
4080    __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4081    Split(eq, if_true, if_false, fall_through);
4082  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4083    __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4084    __ b(eq, if_true);
4085    __ JumpIfSmi(r0, if_false);
4086    // Check for undetectable objects => true.
4087    __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4088    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4089    __ tst(r1, Operand(1 << Map::kIsUndetectable));
4090    Split(ne, if_true, if_false, fall_through);
4091
4092  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4093    __ JumpIfSmi(r0, if_false);
4094    __ CompareObjectType(r0, r1, r0, FIRST_FUNCTION_CLASS_TYPE);
4095    Split(ge, if_true, if_false, fall_through);
4096
4097  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4098    __ JumpIfSmi(r0, if_false);
4099    __ CompareRoot(r0, Heap::kNullValueRootIndex);
4100    __ b(eq, if_true);
4101    // Check for JS objects => true.
4102    __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);
4103    __ b(lo, if_false);
4104    __ CompareInstanceType(r0, r1, FIRST_FUNCTION_CLASS_TYPE);
4105    __ b(hs, if_false);
4106    // Check for undetectable objects => false.
4107    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4108    __ tst(r1, Operand(1 << Map::kIsUndetectable));
4109    Split(eq, if_true, if_false, fall_through);
4110  } else {
4111    if (if_false != fall_through) __ jmp(if_false);
4112  }
4113
4114  return true;
4115}
4116
4117
4118void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4119  Comment cmnt(masm_, "[ CompareOperation");
4120  SetSourcePosition(expr->position());
4121
4122  // Always perform the comparison for its control flow.  Pack the result
4123  // into the expression's context after the comparison is performed.
4124
4125  Label materialize_true, materialize_false;
4126  Label* if_true = NULL;
4127  Label* if_false = NULL;
4128  Label* fall_through = NULL;
4129  context()->PrepareTest(&materialize_true, &materialize_false,
4130                         &if_true, &if_false, &fall_through);
4131
4132  // First we try a fast inlined version of the compare when one of
4133  // the operands is a literal.
4134  Token::Value op = expr->op();
4135  Expression* left = expr->left();
4136  Expression* right = expr->right();
4137  if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
4138    context()->Plug(if_true, if_false);
4139    return;
4140  }
4141
4142  VisitForStackValue(expr->left());
4143  switch (op) {
4144    case Token::IN:
4145      VisitForStackValue(expr->right());
4146      __ InvokeBuiltin(Builtins::IN, CALL_JS);
4147      PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
4148      __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4149      __ cmp(r0, ip);
4150      Split(eq, if_true, if_false, fall_through);
4151      break;
4152
4153    case Token::INSTANCEOF: {
4154      VisitForStackValue(expr->right());
4155      InstanceofStub stub(InstanceofStub::kNoFlags);
4156      __ CallStub(&stub);
4157      PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4158      // The stub returns 0 for true.
4159      __ tst(r0, r0);
4160      Split(eq, if_true, if_false, fall_through);
4161      break;
4162    }
4163
4164    default: {
4165      VisitForAccumulatorValue(expr->right());
4166      Condition cond = eq;
4167      bool strict = false;
4168      switch (op) {
4169        case Token::EQ_STRICT:
4170          strict = true;
4171          // Fall through
4172        case Token::EQ:
4173          cond = eq;
4174          __ pop(r1);
4175          break;
4176        case Token::LT:
4177          cond = lt;
4178          __ pop(r1);
4179          break;
4180        case Token::GT:
4181          // Reverse left and right sides to obtain ECMA-262 conversion order.
4182          cond = lt;
4183          __ mov(r1, result_register());
4184          __ pop(r0);
4185         break;
4186        case Token::LTE:
4187          // Reverse left and right sides to obtain ECMA-262 conversion order.
4188          cond = ge;
4189          __ mov(r1, result_register());
4190          __ pop(r0);
4191          break;
4192        case Token::GTE:
4193          cond = ge;
4194          __ pop(r1);
4195          break;
4196        case Token::IN:
4197        case Token::INSTANCEOF:
4198        default:
4199          UNREACHABLE();
4200      }
4201
4202      bool inline_smi_code = ShouldInlineSmiCase(op);
4203      JumpPatchSite patch_site(masm_);
4204      if (inline_smi_code) {
4205        Label slow_case;
4206        __ orr(r2, r0, Operand(r1));
4207        patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4208        __ cmp(r1, r0);
4209        Split(cond, if_true, if_false, NULL);
4210        __ bind(&slow_case);
4211      }
4212
4213      // Record position and call the compare IC.
4214      SetSourcePosition(expr->position());
4215      Handle<Code> ic = CompareIC::GetUninitialized(op);
4216      EmitCallIC(ic, &patch_site);
4217      PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4218      __ cmp(r0, Operand(0));
4219      Split(cond, if_true, if_false, fall_through);
4220    }
4221  }
4222
4223  // Convert the result of the comparison into one expected for this
4224  // expression's context.
4225  context()->Plug(if_true, if_false);
4226}
4227
4228
4229void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4230  Comment cmnt(masm_, "[ CompareToNull");
4231  Label materialize_true, materialize_false;
4232  Label* if_true = NULL;
4233  Label* if_false = NULL;
4234  Label* fall_through = NULL;
4235  context()->PrepareTest(&materialize_true, &materialize_false,
4236                         &if_true, &if_false, &fall_through);
4237
4238  VisitForAccumulatorValue(expr->expression());
4239  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4240  __ LoadRoot(r1, Heap::kNullValueRootIndex);
4241  __ cmp(r0, r1);
4242  if (expr->is_strict()) {
4243    Split(eq, if_true, if_false, fall_through);
4244  } else {
4245    __ b(eq, if_true);
4246    __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
4247    __ cmp(r0, r1);
4248    __ b(eq, if_true);
4249    __ tst(r0, Operand(kSmiTagMask));
4250    __ b(eq, if_false);
4251    // It can be an undetectable object.
4252    __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
4253    __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
4254    __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
4255    __ cmp(r1, Operand(1 << Map::kIsUndetectable));
4256    Split(eq, if_true, if_false, fall_through);
4257  }
4258  context()->Plug(if_true, if_false);
4259}
4260
4261
4262void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4263  __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4264  context()->Plug(r0);
4265}
4266
4267
4268Register FullCodeGenerator::result_register() {
4269  return r0;
4270}
4271
4272
4273Register FullCodeGenerator::context_register() {
4274  return cp;
4275}
4276
4277
4278void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
4279  ASSERT(mode == RelocInfo::CODE_TARGET ||
4280         mode == RelocInfo::CODE_TARGET_CONTEXT);
4281  Counters* counters = isolate()->counters();
4282  switch (ic->kind()) {
4283    case Code::LOAD_IC:
4284      __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
4285      break;
4286    case Code::KEYED_LOAD_IC:
4287      __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
4288      break;
4289    case Code::STORE_IC:
4290      __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
4291      break;
4292    case Code::KEYED_STORE_IC:
4293      __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
4294    default:
4295      break;
4296  }
4297  __ Call(ic, mode);
4298}
4299
4300
4301void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
4302  Counters* counters = isolate()->counters();
4303  switch (ic->kind()) {
4304    case Code::LOAD_IC:
4305      __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
4306      break;
4307    case Code::KEYED_LOAD_IC:
4308      __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
4309      break;
4310    case Code::STORE_IC:
4311      __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
4312      break;
4313    case Code::KEYED_STORE_IC:
4314      __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
4315    default:
4316      break;
4317  }
4318  __ Call(ic, RelocInfo::CODE_TARGET);
4319  if (patch_site != NULL && patch_site->is_bound()) {
4320    patch_site->EmitPatchInfo();
4321  } else {
4322    __ nop();  // Signals no inlined code.
4323  }
4324}
4325
4326
4327void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4328  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4329  __ str(value, MemOperand(fp, frame_offset));
4330}
4331
4332
4333void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4334  __ ldr(dst, ContextOperand(cp, context_index));
4335}
4336
4337
4338// ----------------------------------------------------------------------------
4339// Non-local control flow support.
4340
4341void FullCodeGenerator::EnterFinallyBlock() {
4342  ASSERT(!result_register().is(r1));
4343  // Store result register while executing finally block.
4344  __ push(result_register());
4345  // Cook return address in link register to stack (smi encoded Code* delta)
4346  __ sub(r1, lr, Operand(masm_->CodeObject()));
4347  ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4348  ASSERT_EQ(0, kSmiTag);
4349  __ add(r1, r1, Operand(r1));  // Convert to smi.
4350  __ push(r1);
4351}
4352
4353
4354void FullCodeGenerator::ExitFinallyBlock() {
4355  ASSERT(!result_register().is(r1));
4356  // Restore result register from stack.
4357  __ pop(r1);
4358  // Uncook return address and return.
4359  __ pop(result_register());
4360  ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4361  __ mov(r1, Operand(r1, ASR, 1));  // Un-smi-tag value.
4362  __ add(pc, r1, Operand(masm_->CodeObject()));
4363}
4364
4365
4366#undef __
4367
4368} }  // namespace v8::internal
4369
4370#endif  // V8_TARGET_ARCH_ARM
4371