full-codegen-x64.cc revision 8a31eba00023874d4a1dcdc5f411cc4336776874
1// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "code-stubs.h"
33#include "codegen-inl.h"
34#include "compiler.h"
35#include "debug.h"
36#include "full-codegen.h"
37#include "parser.h"
38#include "scopes.h"
39#include "stub-cache.h"
40
41namespace v8 {
42namespace internal {
43
44#define __ ACCESS_MASM(masm_)
45
46// Generate code for a JS function.  On entry to the function the receiver
47// and arguments have been pushed on the stack left to right, with the
48// return address on top of them.  The actual argument count matches the
49// formal parameter count expected by the function.
50//
51// The live registers are:
52//   o rdi: the JS function object being called (ie, ourselves)
53//   o rsi: our context
54//   o rbp: our caller's frame pointer
55//   o rsp: stack pointer (pointing to return address)
56//
57// The function builds a JS frame.  Please see JavaScriptFrameConstants in
58// frames-x64.h for its layout.
59void FullCodeGenerator::Generate(CompilationInfo* info) {
60  ASSERT(info_ == NULL);
61  info_ = info;
62  SetFunctionPosition(function());
63  Comment cmnt(masm_, "[ function compiled by full code generator");
64
65#ifdef DEBUG
66  if (strlen(FLAG_stop_at) > 0 &&
67      info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
68    __ int3();
69  }
70#endif
71  __ push(rbp);  // Caller's frame pointer.
72  __ movq(rbp, rsp);
73  __ push(rsi);  // Callee's context.
74  __ push(rdi);  // Callee's JS Function.
75
76  { Comment cmnt(masm_, "[ Allocate locals");
77    int locals_count = scope()->num_stack_slots();
78    if (locals_count == 1) {
79      __ PushRoot(Heap::kUndefinedValueRootIndex);
80    } else if (locals_count > 1) {
81      __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
82      for (int i = 0; i < locals_count; i++) {
83        __ push(rdx);
84      }
85    }
86  }
87
88  bool function_in_register = true;
89
90  // Possibly allocate a local context.
91  int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
92  if (heap_slots > 0) {
93    Comment cmnt(masm_, "[ Allocate local context");
94    // Argument to NewContext is the function, which is still in rdi.
95    __ push(rdi);
96    if (heap_slots <= FastNewContextStub::kMaximumSlots) {
97      FastNewContextStub stub(heap_slots);
98      __ CallStub(&stub);
99    } else {
100      __ CallRuntime(Runtime::kNewContext, 1);
101    }
102    function_in_register = false;
103    // Context is returned in both rax and rsi.  It replaces the context
104    // passed to us.  It's saved in the stack and kept live in rsi.
105    __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
106
107    // Copy any necessary parameters into the context.
108    int num_parameters = scope()->num_parameters();
109    for (int i = 0; i < num_parameters; i++) {
110      Slot* slot = scope()->parameter(i)->AsSlot();
111      if (slot != NULL && slot->type() == Slot::CONTEXT) {
112        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
113            (num_parameters - 1 - i) * kPointerSize;
114        // Load parameter from stack.
115        __ movq(rax, Operand(rbp, parameter_offset));
116        // Store it in the context.
117        int context_offset = Context::SlotOffset(slot->index());
118        __ movq(Operand(rsi, context_offset), rax);
119        // Update the write barrier. This clobbers all involved
120        // registers, so we have use a third register to avoid
121        // clobbering rsi.
122        __ movq(rcx, rsi);
123        __ RecordWrite(rcx, context_offset, rax, rbx);
124      }
125    }
126  }
127
128  // Possibly allocate an arguments object.
129  Variable* arguments = scope()->arguments();
130  if (arguments != NULL) {
131    // Arguments object must be allocated after the context object, in
132    // case the "arguments" or ".arguments" variables are in the context.
133    Comment cmnt(masm_, "[ Allocate arguments object");
134    if (function_in_register) {
135      __ push(rdi);
136    } else {
137      __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
138    }
139    // The receiver is just before the parameters on the caller's stack.
140    int offset = scope()->num_parameters() * kPointerSize;
141    __ lea(rdx,
142           Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
143    __ push(rdx);
144    __ Push(Smi::FromInt(scope()->num_parameters()));
145    // Arguments to ArgumentsAccessStub:
146    //   function, receiver address, parameter count.
147    // The stub will rewrite receiver and parameter count if the previous
148    // stack frame was an arguments adapter frame.
149    ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
150    __ CallStub(&stub);
151    // Store new arguments object in both "arguments" and ".arguments" slots.
152    __ movq(rcx, rax);
153    Move(arguments->AsSlot(), rax, rbx, rdx);
154    Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot();
155    Move(dot_arguments_slot, rcx, rbx, rdx);
156  }
157
158  { Comment cmnt(masm_, "[ Declarations");
159    // For named function expressions, declare the function name as a
160    // constant.
161    if (scope()->is_function_scope() && scope()->function() != NULL) {
162      EmitDeclaration(scope()->function(), Variable::CONST, NULL);
163    }
164    // Visit all the explicit declarations unless there is an illegal
165    // redeclaration.
166    if (scope()->HasIllegalRedeclaration()) {
167      scope()->VisitIllegalRedeclaration(this);
168    } else {
169      VisitDeclarations(scope()->declarations());
170    }
171  }
172
173  { Comment cmnt(masm_, "[ Stack check");
174    NearLabel ok;
175    __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
176    __ j(above_equal, &ok);
177    StackCheckStub stub;
178    __ CallStub(&stub);
179    __ bind(&ok);
180  }
181
182  if (FLAG_trace) {
183    __ CallRuntime(Runtime::kTraceEnter, 0);
184  }
185
186  { Comment cmnt(masm_, "[ Body");
187    ASSERT(loop_depth() == 0);
188    VisitStatements(function()->body());
189    ASSERT(loop_depth() == 0);
190  }
191
192  { Comment cmnt(masm_, "[ return <undefined>;");
193    // Emit a 'return undefined' in case control fell off the end of the body.
194    __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
195    EmitReturnSequence();
196  }
197}
198
199
200void FullCodeGenerator::EmitReturnSequence() {
201  Comment cmnt(masm_, "[ Return sequence");
202  if (return_label_.is_bound()) {
203    __ jmp(&return_label_);
204  } else {
205    __ bind(&return_label_);
206    if (FLAG_trace) {
207      __ push(rax);
208      __ CallRuntime(Runtime::kTraceExit, 1);
209    }
210#ifdef DEBUG
211    // Add a label for checking the size of the code used for returning.
212    Label check_exit_codesize;
213    masm_->bind(&check_exit_codesize);
214#endif
215    CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
216    __ RecordJSReturn();
217    // Do not use the leave instruction here because it is too short to
218    // patch with the code required by the debugger.
219    __ movq(rsp, rbp);
220    __ pop(rbp);
221    __ ret((scope()->num_parameters() + 1) * kPointerSize);
222#ifdef ENABLE_DEBUGGER_SUPPORT
223    // Add padding that will be overwritten by a debugger breakpoint.  We
224    // have just generated "movq rsp, rbp; pop rbp; ret k" with length 7
225    // (3 + 1 + 3).
226    const int kPadding = Assembler::kJSReturnSequenceLength - 7;
227    for (int i = 0; i < kPadding; ++i) {
228      masm_->int3();
229    }
230    // Check that the size of the code used for returning matches what is
231    // expected by the debugger.
232    ASSERT_EQ(Assembler::kJSReturnSequenceLength,
233            masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
234#endif
235  }
236}
237
238
239FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
240    Token::Value op, Expression* left, Expression* right) {
241  ASSERT(ShouldInlineSmiCase(op));
242  return kNoConstants;
243}
244
245
246void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
247}
248
249
250void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
251  MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
252  __ movq(result_register(), slot_operand);
253}
254
255
256void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
257  MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
258  __ push(slot_operand);
259}
260
261
262void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
263  codegen()->Move(result_register(), slot);
264  codegen()->DoTest(true_label_, false_label_, fall_through_);
265}
266
267
268void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
269}
270
271
272void FullCodeGenerator::AccumulatorValueContext::Plug(
273    Heap::RootListIndex index) const {
274  __ LoadRoot(result_register(), index);
275}
276
277
278void FullCodeGenerator::StackValueContext::Plug(
279    Heap::RootListIndex index) const {
280  __ PushRoot(index);
281}
282
283
284void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
285  if (index == Heap::kUndefinedValueRootIndex ||
286      index == Heap::kNullValueRootIndex ||
287      index == Heap::kFalseValueRootIndex) {
288    __ jmp(false_label_);
289  } else if (index == Heap::kTrueValueRootIndex) {
290    __ jmp(true_label_);
291  } else {
292    __ LoadRoot(result_register(), index);
293    codegen()->DoTest(true_label_, false_label_, fall_through_);
294  }
295}
296
297
298void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
299}
300
301
302void FullCodeGenerator::AccumulatorValueContext::Plug(
303    Handle<Object> lit) const {
304  __ Move(result_register(), lit);
305}
306
307
308void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
309  __ Push(lit);
310}
311
312
313void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
314  ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
315  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
316    __ jmp(false_label_);
317  } else if (lit->IsTrue() || lit->IsJSObject()) {
318    __ jmp(true_label_);
319  } else if (lit->IsString()) {
320    if (String::cast(*lit)->length() == 0) {
321      __ jmp(false_label_);
322    } else {
323      __ jmp(true_label_);
324    }
325  } else if (lit->IsSmi()) {
326    if (Smi::cast(*lit)->value() == 0) {
327      __ jmp(false_label_);
328    } else {
329      __ jmp(true_label_);
330    }
331  } else {
332    // For simplicity we always test the accumulator register.
333    __ Move(result_register(), lit);
334    codegen()->DoTest(true_label_, false_label_, fall_through_);
335  }
336}
337
338
339void FullCodeGenerator::EffectContext::DropAndPlug(int count,
340                                                   Register reg) const {
341  ASSERT(count > 0);
342  __ Drop(count);
343}
344
345
346void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
347    int count,
348    Register reg) const {
349  ASSERT(count > 0);
350  __ Drop(count);
351  __ Move(result_register(), reg);
352}
353
354
355void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
356                                                       Register reg) const {
357  ASSERT(count > 0);
358  if (count > 1) __ Drop(count - 1);
359  __ movq(Operand(rsp, 0), reg);
360}
361
362
363void FullCodeGenerator::TestContext::DropAndPlug(int count,
364                                                 Register reg) const {
365  ASSERT(count > 0);
366  // For simplicity we always test the accumulator register.
367  __ Drop(count);
368  __ Move(result_register(), reg);
369  codegen()->DoTest(true_label_, false_label_, fall_through_);
370}
371
372
373void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
374                                            Label* materialize_false) const {
375  ASSERT_EQ(materialize_true, materialize_false);
376  __ bind(materialize_true);
377}
378
379
380void FullCodeGenerator::AccumulatorValueContext::Plug(
381    Label* materialize_true,
382    Label* materialize_false) const {
383  NearLabel done;
384  __ bind(materialize_true);
385  __ Move(result_register(), Factory::true_value());
386  __ jmp(&done);
387  __ bind(materialize_false);
388  __ Move(result_register(), Factory::false_value());
389  __ bind(&done);
390}
391
392
393void FullCodeGenerator::StackValueContext::Plug(
394    Label* materialize_true,
395    Label* materialize_false) const {
396  NearLabel done;
397  __ bind(materialize_true);
398  __ Push(Factory::true_value());
399  __ jmp(&done);
400  __ bind(materialize_false);
401  __ Push(Factory::false_value());
402  __ bind(&done);
403}
404
405
406void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
407                                          Label* materialize_false) const {
408  ASSERT(materialize_false == false_label_);
409  ASSERT(materialize_true == true_label_);
410}
411
412
413void FullCodeGenerator::EffectContext::Plug(bool flag) const {
414}
415
416
417void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
418  Heap::RootListIndex value_root_index =
419      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
420  __ LoadRoot(result_register(), value_root_index);
421}
422
423
424void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
425  Heap::RootListIndex value_root_index =
426      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
427  __ PushRoot(value_root_index);
428}
429
430
431void FullCodeGenerator::TestContext::Plug(bool flag) const {
432  if (flag) {
433    if (true_label_ != fall_through_) __ jmp(true_label_);
434  } else {
435    if (false_label_ != fall_through_) __ jmp(false_label_);
436  }
437}
438
439
440void FullCodeGenerator::DoTest(Label* if_true,
441                               Label* if_false,
442                               Label* fall_through) {
443  // Emit the inlined tests assumed by the stub.
444  __ CompareRoot(result_register(), Heap::kUndefinedValueRootIndex);
445  __ j(equal, if_false);
446  __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
447  __ j(equal, if_true);
448  __ CompareRoot(result_register(), Heap::kFalseValueRootIndex);
449  __ j(equal, if_false);
450  ASSERT_EQ(0, kSmiTag);
451  __ SmiCompare(result_register(), Smi::FromInt(0));
452  __ j(equal, if_false);
453  Condition is_smi = masm_->CheckSmi(result_register());
454  __ j(is_smi, if_true);
455
456  // Call the ToBoolean stub for all other cases.
457  ToBooleanStub stub;
458  __ push(result_register());
459  __ CallStub(&stub);
460  __ testq(rax, rax);
461
462  // The stub returns nonzero for true.
463  Split(not_zero, if_true, if_false, fall_through);
464}
465
466
467void FullCodeGenerator::Split(Condition cc,
468                              Label* if_true,
469                              Label* if_false,
470                              Label* fall_through) {
471  if (if_false == fall_through) {
472    __ j(cc, if_true);
473  } else if (if_true == fall_through) {
474    __ j(NegateCondition(cc), if_false);
475  } else {
476    __ j(cc, if_true);
477    __ jmp(if_false);
478  }
479}
480
481
482MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
483  switch (slot->type()) {
484    case Slot::PARAMETER:
485    case Slot::LOCAL:
486      return Operand(rbp, SlotOffset(slot));
487    case Slot::CONTEXT: {
488      int context_chain_length =
489          scope()->ContextChainLength(slot->var()->scope());
490      __ LoadContext(scratch, context_chain_length);
491      return ContextOperand(scratch, slot->index());
492    }
493    case Slot::LOOKUP:
494      UNREACHABLE();
495  }
496  UNREACHABLE();
497  return Operand(rax, 0);
498}
499
500
501void FullCodeGenerator::Move(Register destination, Slot* source) {
502  MemOperand location = EmitSlotSearch(source, destination);
503  __ movq(destination, location);
504}
505
506
507void FullCodeGenerator::Move(Slot* dst,
508                             Register src,
509                             Register scratch1,
510                             Register scratch2) {
511  ASSERT(dst->type() != Slot::LOOKUP);  // Not yet implemented.
512  ASSERT(!scratch1.is(src) && !scratch2.is(src));
513  MemOperand location = EmitSlotSearch(dst, scratch1);
514  __ movq(location, src);
515  // Emit the write barrier code if the location is in the heap.
516  if (dst->type() == Slot::CONTEXT) {
517    int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
518    __ RecordWrite(scratch1, offset, src, scratch2);
519  }
520}
521
522
523void FullCodeGenerator::EmitDeclaration(Variable* variable,
524                                        Variable::Mode mode,
525                                        FunctionLiteral* function) {
526  Comment cmnt(masm_, "[ Declaration");
527  ASSERT(variable != NULL);  // Must have been resolved.
528  Slot* slot = variable->AsSlot();
529  Property* prop = variable->AsProperty();
530
531  if (slot != NULL) {
532    switch (slot->type()) {
533      case Slot::PARAMETER:
534      case Slot::LOCAL:
535        if (mode == Variable::CONST) {
536          __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
537          __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
538        } else if (function != NULL) {
539          VisitForAccumulatorValue(function);
540          __ movq(Operand(rbp, SlotOffset(slot)), result_register());
541        }
542        break;
543
544      case Slot::CONTEXT:
545        // We bypass the general EmitSlotSearch because we know more about
546        // this specific context.
547
548        // The variable in the decl always resides in the current context.
549        ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
550        if (FLAG_debug_code) {
551          // Check if we have the correct context pointer.
552          __ movq(rbx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
553          __ cmpq(rbx, rsi);
554          __ Check(equal, "Unexpected declaration in current context.");
555        }
556        if (mode == Variable::CONST) {
557          __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
558          __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
559          // No write barrier since the hole value is in old space.
560        } else if (function != NULL) {
561          VisitForAccumulatorValue(function);
562          __ movq(ContextOperand(rsi, slot->index()), result_register());
563          int offset = Context::SlotOffset(slot->index());
564          __ movq(rbx, rsi);
565          __ RecordWrite(rbx, offset, result_register(), rcx);
566        }
567        break;
568
569      case Slot::LOOKUP: {
570        __ push(rsi);
571        __ Push(variable->name());
572        // Declaration nodes are always introduced in one of two modes.
573        ASSERT(mode == Variable::VAR || mode == Variable::CONST);
574        PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
575        __ Push(Smi::FromInt(attr));
576        // Push initial value, if any.
577        // Note: For variables we must not push an initial value (such as
578        // 'undefined') because we may have a (legal) redeclaration and we
579        // must not destroy the current value.
580        if (mode == Variable::CONST) {
581          __ PushRoot(Heap::kTheHoleValueRootIndex);
582        } else if (function != NULL) {
583          VisitForStackValue(function);
584        } else {
585          __ Push(Smi::FromInt(0));  // no initial value!
586        }
587        __ CallRuntime(Runtime::kDeclareContextSlot, 4);
588        break;
589      }
590    }
591
592  } else if (prop != NULL) {
593    if (function != NULL || mode == Variable::CONST) {
594      // We are declaring a function or constant that rewrites to a
595      // property.  Use (keyed) IC to set the initial value.
596      VisitForStackValue(prop->obj());
597      if (function != NULL) {
598        VisitForStackValue(prop->key());
599        VisitForAccumulatorValue(function);
600        __ pop(rcx);
601      } else {
602        VisitForAccumulatorValue(prop->key());
603        __ movq(rcx, result_register());
604        __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex);
605      }
606      __ pop(rdx);
607
608      Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
609      EmitCallIC(ic, RelocInfo::CODE_TARGET);
610    }
611  }
612}
613
614
615void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
616  EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
617}
618
619
620void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
621  // Call the runtime to declare the globals.
622  __ push(rsi);  // The context is the first argument.
623  __ Push(pairs);
624  __ Push(Smi::FromInt(is_eval() ? 1 : 0));
625  __ CallRuntime(Runtime::kDeclareGlobals, 3);
626  // Return value is ignored.
627}
628
629
630void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
631  Comment cmnt(masm_, "[ SwitchStatement");
632  Breakable nested_statement(this, stmt);
633  SetStatementPosition(stmt);
634  // Keep the switch value on the stack until a case matches.
635  VisitForStackValue(stmt->tag());
636
637  ZoneList<CaseClause*>* clauses = stmt->cases();
638  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
639
640  Label next_test;  // Recycled for each test.
641  // Compile all the tests with branches to their bodies.
642  for (int i = 0; i < clauses->length(); i++) {
643    CaseClause* clause = clauses->at(i);
644    // The default is not a test, but remember it as final fall through.
645    if (clause->is_default()) {
646      default_clause = clause;
647      continue;
648    }
649
650    Comment cmnt(masm_, "[ Case comparison");
651    __ bind(&next_test);
652    next_test.Unuse();
653
654    // Compile the label expression.
655    VisitForAccumulatorValue(clause->label());
656
657    // Perform the comparison as if via '==='.
658    __ movq(rdx, Operand(rsp, 0));  // Switch value.
659    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
660    if (inline_smi_code) {
661      Label slow_case;
662      __ JumpIfNotBothSmi(rdx, rax, &slow_case);
663      __ SmiCompare(rdx, rax);
664      __ j(not_equal, &next_test);
665      __ Drop(1);  // Switch value is no longer needed.
666      __ jmp(clause->body_target()->entry_label());
667      __ bind(&slow_case);
668    }
669
670    CompareFlags flags = inline_smi_code
671        ? NO_SMI_COMPARE_IN_STUB
672        : NO_COMPARE_FLAGS;
673    CompareStub stub(equal, true, flags);
674    __ CallStub(&stub);
675    __ testq(rax, rax);
676    __ j(not_equal, &next_test);
677    __ Drop(1);  // Switch value is no longer needed.
678    __ jmp(clause->body_target()->entry_label());
679  }
680
681  // Discard the test value and jump to the default if present, otherwise to
682  // the end of the statement.
683  __ bind(&next_test);
684  __ Drop(1);  // Switch value is no longer needed.
685  if (default_clause == NULL) {
686    __ jmp(nested_statement.break_target());
687  } else {
688    __ jmp(default_clause->body_target()->entry_label());
689  }
690
691  // Compile all the case bodies.
692  for (int i = 0; i < clauses->length(); i++) {
693    Comment cmnt(masm_, "[ Case body");
694    CaseClause* clause = clauses->at(i);
695    __ bind(clause->body_target()->entry_label());
696    VisitStatements(clause->statements());
697  }
698
699  __ bind(nested_statement.break_target());
700}
701
702
703void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
704  Comment cmnt(masm_, "[ ForInStatement");
705  SetStatementPosition(stmt);
706
707  Label loop, exit;
708  ForIn loop_statement(this, stmt);
709  increment_loop_depth();
710
711  // Get the object to enumerate over. Both SpiderMonkey and JSC
712  // ignore null and undefined in contrast to the specification; see
713  // ECMA-262 section 12.6.4.
714  VisitForAccumulatorValue(stmt->enumerable());
715  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
716  __ j(equal, &exit);
717  __ CompareRoot(rax, Heap::kNullValueRootIndex);
718  __ j(equal, &exit);
719
720  // Convert the object to a JS object.
721  Label convert, done_convert;
722  __ JumpIfSmi(rax, &convert);
723  __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
724  __ j(above_equal, &done_convert);
725  __ bind(&convert);
726  __ push(rax);
727  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
728  __ bind(&done_convert);
729  __ push(rax);
730
731  // BUG(867): Check cache validity in generated code. This is a fast
732  // case for the JSObject::IsSimpleEnum cache validity checks. If we
733  // cannot guarantee cache validity, call the runtime system to check
734  // cache validity or get the property names in a fixed array.
735
736  // Get the set of properties to enumerate.
737  __ push(rax);  // Duplicate the enumerable object on the stack.
738  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
739
740  // If we got a map from the runtime call, we can do a fast
741  // modification check. Otherwise, we got a fixed array, and we have
742  // to do a slow check.
743  NearLabel fixed_array;
744  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
745                 Heap::kMetaMapRootIndex);
746  __ j(not_equal, &fixed_array);
747
748  // We got a map in register rax. Get the enumeration cache from it.
749  __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset));
750  __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
751  __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
752
753  // Setup the four remaining stack slots.
754  __ push(rax);  // Map.
755  __ push(rdx);  // Enumeration cache.
756  __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
757  __ push(rax);  // Enumeration cache length (as smi).
758  __ Push(Smi::FromInt(0));  // Initial index.
759  __ jmp(&loop);
760
761  // We got a fixed array in register rax. Iterate through that.
762  __ bind(&fixed_array);
763  __ Push(Smi::FromInt(0));  // Map (0) - force slow check.
764  __ push(rax);
765  __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
766  __ push(rax);  // Fixed array length (as smi).
767  __ Push(Smi::FromInt(0));  // Initial index.
768
769  // Generate code for doing the condition check.
770  __ bind(&loop);
771  __ movq(rax, Operand(rsp, 0 * kPointerSize));  // Get the current index.
772  __ cmpq(rax, Operand(rsp, 1 * kPointerSize));  // Compare to the array length.
773  __ j(above_equal, loop_statement.break_target());
774
775  // Get the current entry of the array into register rbx.
776  __ movq(rbx, Operand(rsp, 2 * kPointerSize));
777  SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
778  __ movq(rbx, FieldOperand(rbx,
779                            index.reg,
780                            index.scale,
781                            FixedArray::kHeaderSize));
782
783  // Get the expected map from the stack or a zero map in the
784  // permanent slow case into register rdx.
785  __ movq(rdx, Operand(rsp, 3 * kPointerSize));
786
787  // Check if the expected map still matches that of the enumerable.
788  // If not, we have to filter the key.
789  NearLabel update_each;
790  __ movq(rcx, Operand(rsp, 4 * kPointerSize));
791  __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
792  __ j(equal, &update_each);
793
794  // Convert the entry to a string or null if it isn't a property
795  // anymore. If the property has been removed while iterating, we
796  // just skip it.
797  __ push(rcx);  // Enumerable.
798  __ push(rbx);  // Current entry.
799  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
800  __ SmiCompare(rax, Smi::FromInt(0));
801  __ j(equal, loop_statement.continue_target());
802  __ movq(rbx, rax);
803
804  // Update the 'each' property or variable from the possibly filtered
805  // entry in register rbx.
806  __ bind(&update_each);
807  __ movq(result_register(), rbx);
808  // Perform the assignment as if via '='.
809  EmitAssignment(stmt->each());
810
811  // Generate code for the body of the loop.
812  Label stack_limit_hit, stack_check_done;
813  Visit(stmt->body());
814
815  __ StackLimitCheck(&stack_limit_hit);
816  __ bind(&stack_check_done);
817
818  // Generate code for going to the next element by incrementing the
819  // index (smi) stored on top of the stack.
820  __ bind(loop_statement.continue_target());
821  __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
822  __ jmp(&loop);
823
824  // Slow case for the stack limit check.
825  StackCheckStub stack_check_stub;
826  __ bind(&stack_limit_hit);
827  __ CallStub(&stack_check_stub);
828  __ jmp(&stack_check_done);
829
830  // Remove the pointers stored on the stack.
831  __ bind(loop_statement.break_target());
832  __ addq(rsp, Immediate(5 * kPointerSize));
833
834  // Exit and decrement the loop depth.
835  __ bind(&exit);
836  decrement_loop_depth();
837}
838
839
840void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
841                                       bool pretenure) {
842  // Use the fast case closure allocation code that allocates in new
843  // space for nested functions that don't need literals cloning.
844  if (scope()->is_function_scope() &&
845      info->num_literals() == 0 &&
846      !pretenure) {
847    FastNewClosureStub stub;
848    __ Push(info);
849    __ CallStub(&stub);
850  } else {
851    __ push(rsi);
852    __ Push(info);
853    __ Push(pretenure ? Factory::true_value() : Factory::false_value());
854    __ CallRuntime(Runtime::kNewClosure, 3);
855  }
856  context()->Plug(rax);
857}
858
859
860void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
861  Comment cmnt(masm_, "[ VariableProxy");
862  EmitVariableLoad(expr->var());
863}
864
865
866void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
867    Slot* slot,
868    TypeofState typeof_state,
869    Label* slow) {
870  Register context = rsi;
871  Register temp = rdx;
872
873  Scope* s = scope();
874  while (s != NULL) {
875    if (s->num_heap_slots() > 0) {
876      if (s->calls_eval()) {
877        // Check that extension is NULL.
878        __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
879                Immediate(0));
880        __ j(not_equal, slow);
881      }
882      // Load next context in chain.
883      __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
884      __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
885      // Walk the rest of the chain without clobbering rsi.
886      context = temp;
887    }
888    // If no outer scope calls eval, we do not need to check more
889    // context extensions.  If we have reached an eval scope, we check
890    // all extensions from this point.
891    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
892    s = s->outer_scope();
893  }
894
895  if (s != NULL && s->is_eval_scope()) {
896    // Loop up the context chain.  There is no frame effect so it is
897    // safe to use raw labels here.
898    NearLabel next, fast;
899    if (!context.is(temp)) {
900      __ movq(temp, context);
901    }
902    // Load map for comparison into register, outside loop.
903    __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
904    __ bind(&next);
905    // Terminate at global context.
906    __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
907    __ j(equal, &fast);
908    // Check that extension is NULL.
909    __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
910    __ j(not_equal, slow);
911    // Load next context in chain.
912    __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
913    __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
914    __ jmp(&next);
915    __ bind(&fast);
916  }
917
918  // All extension objects were empty and it is safe to use a global
919  // load IC call.
920  __ movq(rax, GlobalObjectOperand());
921  __ Move(rcx, slot->var()->name());
922  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
923  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
924      ? RelocInfo::CODE_TARGET
925      : RelocInfo::CODE_TARGET_CONTEXT;
926  EmitCallIC(ic, mode);
927}
928
929
930MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
931    Slot* slot,
932    Label* slow) {
933  ASSERT(slot->type() == Slot::CONTEXT);
934  Register context = rsi;
935  Register temp = rbx;
936
937  for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
938    if (s->num_heap_slots() > 0) {
939      if (s->calls_eval()) {
940        // Check that extension is NULL.
941        __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
942                Immediate(0));
943        __ j(not_equal, slow);
944      }
945      __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
946      __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
947      // Walk the rest of the chain without clobbering rsi.
948      context = temp;
949    }
950  }
951  // Check that last extension is NULL.
952  __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
953  __ j(not_equal, slow);
954  __ movq(temp, ContextOperand(context, Context::FCONTEXT_INDEX));
955  return ContextOperand(temp, slot->index());
956}
957
958
959void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
960    Slot* slot,
961    TypeofState typeof_state,
962    Label* slow,
963    Label* done) {
964  // Generate fast-case code for variables that might be shadowed by
965  // eval-introduced variables.  Eval is used a lot without
966  // introducing variables.  In those cases, we do not want to
967  // perform a runtime call for all variables in the scope
968  // containing the eval.
969  if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
970    EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
971    __ jmp(done);
972  } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
973    Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
974    Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
975    if (potential_slot != NULL) {
976      // Generate fast case for locals that rewrite to slots.
977      __ movq(rax,
978              ContextSlotOperandCheckExtensions(potential_slot, slow));
979      if (potential_slot->var()->mode() == Variable::CONST) {
980        __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
981        __ j(not_equal, done);
982        __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
983      }
984      __ jmp(done);
985    } else if (rewrite != NULL) {
986      // Generate fast case for calls of an argument function.
987      Property* property = rewrite->AsProperty();
988      if (property != NULL) {
989        VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
990        Literal* key_literal = property->key()->AsLiteral();
991        if (obj_proxy != NULL &&
992            key_literal != NULL &&
993            obj_proxy->IsArguments() &&
994            key_literal->handle()->IsSmi()) {
995          // Load arguments object if there are no eval-introduced
996          // variables. Then load the argument from the arguments
997          // object using keyed load.
998          __ movq(rdx,
999                  ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1000                                                    slow));
1001          __ Move(rax, key_literal->handle());
1002          Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1003          EmitCallIC(ic, RelocInfo::CODE_TARGET);
1004          __ jmp(done);
1005        }
1006      }
1007    }
1008  }
1009}
1010
1011
1012void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1013  // Four cases: non-this global variables, lookup slots, all other
1014  // types of slots, and parameters that rewrite to explicit property
1015  // accesses on the arguments object.
1016  Slot* slot = var->AsSlot();
1017  Property* property = var->AsProperty();
1018
1019  if (var->is_global() && !var->is_this()) {
1020    Comment cmnt(masm_, "Global variable");
1021    // Use inline caching. Variable name is passed in rcx and the global
1022    // object on the stack.
1023    __ Move(rcx, var->name());
1024    __ movq(rax, GlobalObjectOperand());
1025    Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1026    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1027    context()->Plug(rax);
1028
1029  } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
1030    Label done, slow;
1031
1032    // Generate code for loading from variables potentially shadowed
1033    // by eval-introduced variables.
1034    EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
1035
1036    __ bind(&slow);
1037    Comment cmnt(masm_, "Lookup slot");
1038    __ push(rsi);  // Context.
1039    __ Push(var->name());
1040    __ CallRuntime(Runtime::kLoadContextSlot, 2);
1041    __ bind(&done);
1042
1043    context()->Plug(rax);
1044
1045  } else if (slot != NULL) {
1046    Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1047                            ? "Context slot"
1048                            : "Stack slot");
1049    if (var->mode() == Variable::CONST) {
1050      // Constants may be the hole value if they have not been initialized.
1051      // Unhole them.
1052      NearLabel done;
1053      MemOperand slot_operand = EmitSlotSearch(slot, rax);
1054      __ movq(rax, slot_operand);
1055      __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1056      __ j(not_equal, &done);
1057      __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1058      __ bind(&done);
1059      context()->Plug(rax);
1060    } else {
1061      context()->Plug(slot);
1062    }
1063
1064  } else {
1065    Comment cmnt(masm_, "Rewritten parameter");
1066    ASSERT_NOT_NULL(property);
1067    // Rewritten parameter accesses are of the form "slot[literal]".
1068
1069    // Assert that the object is in a slot.
1070    Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1071    ASSERT_NOT_NULL(object_var);
1072    Slot* object_slot = object_var->AsSlot();
1073    ASSERT_NOT_NULL(object_slot);
1074
1075    // Load the object.
1076    MemOperand object_loc = EmitSlotSearch(object_slot, rax);
1077    __ movq(rdx, object_loc);
1078
1079    // Assert that the key is a smi.
1080    Literal* key_literal = property->key()->AsLiteral();
1081    ASSERT_NOT_NULL(key_literal);
1082    ASSERT(key_literal->handle()->IsSmi());
1083
1084    // Load the key.
1085    __ Move(rax, key_literal->handle());
1086
1087    // Do a keyed property load.
1088    Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1089    EmitCallIC(ic, RelocInfo::CODE_TARGET);
1090    context()->Plug(rax);
1091  }
1092}
1093
1094
1095void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1096  Comment cmnt(masm_, "[ RegExpLiteral");
1097  Label materialized;
1098  // Registers will be used as follows:
1099  // rdi = JS function.
1100  // rcx = literals array.
1101  // rbx = regexp literal.
1102  // rax = regexp literal clone.
1103  __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1104  __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1105  int literal_offset =
1106      FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1107  __ movq(rbx, FieldOperand(rcx, literal_offset));
1108  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1109  __ j(not_equal, &materialized);
1110
1111  // Create regexp literal using runtime function
1112  // Result will be in rax.
1113  __ push(rcx);
1114  __ Push(Smi::FromInt(expr->literal_index()));
1115  __ Push(expr->pattern());
1116  __ Push(expr->flags());
1117  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1118  __ movq(rbx, rax);
1119
1120  __ bind(&materialized);
1121  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1122  Label allocated, runtime_allocate;
1123  __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1124  __ jmp(&allocated);
1125
1126  __ bind(&runtime_allocate);
1127  __ push(rbx);
1128  __ Push(Smi::FromInt(size));
1129  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1130  __ pop(rbx);
1131
1132  __ bind(&allocated);
1133  // Copy the content into the newly allocated memory.
1134  // (Unroll copy loop once for better throughput).
1135  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1136    __ movq(rdx, FieldOperand(rbx, i));
1137    __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
1138    __ movq(FieldOperand(rax, i), rdx);
1139    __ movq(FieldOperand(rax, i + kPointerSize), rcx);
1140  }
1141  if ((size % (2 * kPointerSize)) != 0) {
1142    __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
1143    __ movq(FieldOperand(rax, size - kPointerSize), rdx);
1144  }
1145  context()->Plug(rax);
1146}
1147
1148
1149void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1150  Comment cmnt(masm_, "[ ObjectLiteral");
1151  __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1152  __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1153  __ Push(Smi::FromInt(expr->literal_index()));
1154  __ Push(expr->constant_properties());
1155  __ Push(Smi::FromInt(expr->fast_elements() ? 1 : 0));
1156  if (expr->depth() > 1) {
1157    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1158  } else {
1159    __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1160  }
1161
1162  // If result_saved is true the result is on top of the stack.  If
1163  // result_saved is false the result is in rax.
1164  bool result_saved = false;
1165
1166  // Mark all computed expressions that are bound to a key that
1167  // is shadowed by a later occurrence of the same key. For the
1168  // marked expressions, no store code is emitted.
1169  expr->CalculateEmitStore();
1170
1171  for (int i = 0; i < expr->properties()->length(); i++) {
1172    ObjectLiteral::Property* property = expr->properties()->at(i);
1173    if (property->IsCompileTimeValue()) continue;
1174
1175    Literal* key = property->key();
1176    Expression* value = property->value();
1177    if (!result_saved) {
1178      __ push(rax);  // Save result on the stack
1179      result_saved = true;
1180    }
1181    switch (property->kind()) {
1182      case ObjectLiteral::Property::CONSTANT:
1183        UNREACHABLE();
1184      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1185        ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1186        // Fall through.
1187      case ObjectLiteral::Property::COMPUTED:
1188        if (key->handle()->IsSymbol()) {
1189          VisitForAccumulatorValue(value);
1190          __ Move(rcx, key->handle());
1191          __ movq(rdx, Operand(rsp, 0));
1192          if (property->emit_store()) {
1193            Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1194            EmitCallIC(ic, RelocInfo::CODE_TARGET);
1195          }
1196          break;
1197        }
1198        // Fall through.
1199      case ObjectLiteral::Property::PROTOTYPE:
1200        __ push(Operand(rsp, 0));  // Duplicate receiver.
1201        VisitForStackValue(key);
1202        VisitForStackValue(value);
1203        if (property->emit_store()) {
1204          __ CallRuntime(Runtime::kSetProperty, 3);
1205        } else {
1206          __ Drop(3);
1207        }
1208        break;
1209      case ObjectLiteral::Property::SETTER:
1210      case ObjectLiteral::Property::GETTER:
1211        __ push(Operand(rsp, 0));  // Duplicate receiver.
1212        VisitForStackValue(key);
1213        __ Push(property->kind() == ObjectLiteral::Property::SETTER ?
1214                Smi::FromInt(1) :
1215                Smi::FromInt(0));
1216        VisitForStackValue(value);
1217        __ CallRuntime(Runtime::kDefineAccessor, 4);
1218        break;
1219    }
1220  }
1221
1222  if (result_saved) {
1223    context()->PlugTOS();
1224  } else {
1225    context()->Plug(rax);
1226  }
1227}
1228
1229
1230void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1231  Comment cmnt(masm_, "[ ArrayLiteral");
1232
1233  ZoneList<Expression*>* subexprs = expr->values();
1234  int length = subexprs->length();
1235
1236  __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1237  __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1238  __ Push(Smi::FromInt(expr->literal_index()));
1239  __ Push(expr->constant_elements());
1240  if (expr->constant_elements()->map() == Heap::fixed_cow_array_map()) {
1241    FastCloneShallowArrayStub stub(
1242        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1243    __ CallStub(&stub);
1244    __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
1245  } else if (expr->depth() > 1) {
1246    __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1247  } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1248    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1249  } else {
1250    FastCloneShallowArrayStub stub(
1251        FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
1252    __ CallStub(&stub);
1253  }
1254
1255  bool result_saved = false;  // Is the result saved to the stack?
1256
1257  // Emit code to evaluate all the non-constant subexpressions and to store
1258  // them into the newly cloned array.
1259  for (int i = 0; i < length; i++) {
1260    Expression* subexpr = subexprs->at(i);
1261    // If the subexpression is a literal or a simple materialized literal it
1262    // is already set in the cloned array.
1263    if (subexpr->AsLiteral() != NULL ||
1264        CompileTimeValue::IsCompileTimeValue(subexpr)) {
1265      continue;
1266    }
1267
1268    if (!result_saved) {
1269      __ push(rax);
1270      result_saved = true;
1271    }
1272    VisitForAccumulatorValue(subexpr);
1273
1274    // Store the subexpression value in the array's elements.
1275    __ movq(rbx, Operand(rsp, 0));  // Copy of array literal.
1276    __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1277    int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1278    __ movq(FieldOperand(rbx, offset), result_register());
1279
1280    // Update the write barrier for the array store.
1281    __ RecordWrite(rbx, offset, result_register(), rcx);
1282  }
1283
1284  if (result_saved) {
1285    context()->PlugTOS();
1286  } else {
1287    context()->Plug(rax);
1288  }
1289}
1290
1291
1292void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1293  Comment cmnt(masm_, "[ Assignment");
1294  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1295  // on the left-hand side.
1296  if (!expr->target()->IsValidLeftHandSide()) {
1297    VisitForEffect(expr->target());
1298    return;
1299  }
1300
1301  // Left-hand side can only be a property, a global or a (parameter or local)
1302  // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1303  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1304  LhsKind assign_type = VARIABLE;
1305  Property* property = expr->target()->AsProperty();
1306  if (property != NULL) {
1307    assign_type = (property->key()->IsPropertyName())
1308        ? NAMED_PROPERTY
1309        : KEYED_PROPERTY;
1310  }
1311
1312  // Evaluate LHS expression.
1313  switch (assign_type) {
1314    case VARIABLE:
1315      // Nothing to do here.
1316      break;
1317    case NAMED_PROPERTY:
1318      if (expr->is_compound()) {
1319        // We need the receiver both on the stack and in the accumulator.
1320        VisitForAccumulatorValue(property->obj());
1321        __ push(result_register());
1322      } else {
1323        VisitForStackValue(property->obj());
1324      }
1325      break;
1326    case KEYED_PROPERTY:
1327      if (expr->is_compound()) {
1328        VisitForStackValue(property->obj());
1329        VisitForAccumulatorValue(property->key());
1330        __ movq(rdx, Operand(rsp, 0));
1331        __ push(rax);
1332      } else {
1333        VisitForStackValue(property->obj());
1334        VisitForStackValue(property->key());
1335      }
1336      break;
1337  }
1338
1339  if (expr->is_compound()) {
1340    { AccumulatorValueContext context(this);
1341      switch (assign_type) {
1342        case VARIABLE:
1343          EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1344          break;
1345        case NAMED_PROPERTY:
1346          EmitNamedPropertyLoad(property);
1347          break;
1348        case KEYED_PROPERTY:
1349          EmitKeyedPropertyLoad(property);
1350          break;
1351      }
1352    }
1353
1354    Token::Value op = expr->binary_op();
1355    ConstantOperand constant = ShouldInlineSmiCase(op)
1356        ? GetConstantOperand(op, expr->target(), expr->value())
1357        : kNoConstants;
1358    ASSERT(constant == kRightConstant || constant == kNoConstants);
1359    if (constant == kNoConstants) {
1360      __ push(rax);  // Left operand goes on the stack.
1361      VisitForAccumulatorValue(expr->value());
1362    }
1363
1364    OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1365        ? OVERWRITE_RIGHT
1366        : NO_OVERWRITE;
1367    SetSourcePosition(expr->position() + 1);
1368    AccumulatorValueContext context(this);
1369    if (ShouldInlineSmiCase(op)) {
1370      EmitInlineSmiBinaryOp(expr,
1371                            op,
1372                            mode,
1373                            expr->target(),
1374                            expr->value(),
1375                            constant);
1376    } else {
1377      EmitBinaryOp(op, mode);
1378    }
1379  } else {
1380    VisitForAccumulatorValue(expr->value());
1381  }
1382
1383  // Record source position before possible IC call.
1384  SetSourcePosition(expr->position());
1385
1386  // Store the value.
1387  switch (assign_type) {
1388    case VARIABLE:
1389      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1390                             expr->op());
1391      break;
1392    case NAMED_PROPERTY:
1393      EmitNamedPropertyAssignment(expr);
1394      break;
1395    case KEYED_PROPERTY:
1396      EmitKeyedPropertyAssignment(expr);
1397      break;
1398  }
1399}
1400
1401
1402void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1403  SetSourcePosition(prop->position());
1404  Literal* key = prop->key()->AsLiteral();
1405  __ Move(rcx, key->handle());
1406  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1407  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1408}
1409
1410
1411void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1412  SetSourcePosition(prop->position());
1413  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1414  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1415}
1416
1417
1418void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
1419                                              Token::Value op,
1420                                              OverwriteMode mode,
1421                                              Expression* left,
1422                                              Expression* right,
1423                                              ConstantOperand constant) {
1424  ASSERT(constant == kNoConstants);  // Only handled case.
1425
1426  // Do combined smi check of the operands. Left operand is on the
1427  // stack (popped into rdx). Right operand is in rax but moved into
1428  // rcx to make the shifts easier.
1429  Label done, stub_call, smi_case;
1430  __ pop(rdx);
1431  __ movq(rcx, rax);
1432  Condition smi = masm()->CheckBothSmi(rdx, rax);
1433  __ j(smi, &smi_case);
1434
1435  __ bind(&stub_call);
1436  GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown());
1437  if (stub.ArgsInRegistersSupported()) {
1438    stub.GenerateCall(masm_, rdx, rcx);
1439  } else {
1440    __ push(rdx);
1441    __ push(rcx);
1442    __ CallStub(&stub);
1443  }
1444  __ jmp(&done);
1445
1446  __ bind(&smi_case);
1447  switch (op) {
1448    case Token::SAR:
1449      __ SmiShiftArithmeticRight(rax, rdx, rcx);
1450      break;
1451    case Token::SHL:
1452      __ SmiShiftLeft(rax, rdx, rcx);
1453      break;
1454    case Token::SHR:
1455      __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1456      break;
1457    case Token::ADD:
1458      __ SmiAdd(rax, rdx, rcx, &stub_call);
1459      break;
1460    case Token::SUB:
1461      __ SmiSub(rax, rdx, rcx, &stub_call);
1462      break;
1463    case Token::MUL:
1464      __ SmiMul(rax, rdx, rcx, &stub_call);
1465      break;
1466    case Token::BIT_OR:
1467      __ SmiOr(rax, rdx, rcx);
1468      break;
1469    case Token::BIT_AND:
1470      __ SmiAnd(rax, rdx, rcx);
1471      break;
1472    case Token::BIT_XOR:
1473      __ SmiXor(rax, rdx, rcx);
1474      break;
1475    default:
1476      UNREACHABLE();
1477      break;
1478  }
1479
1480  __ bind(&done);
1481  context()->Plug(rax);
1482}
1483
1484
1485void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1486                                     OverwriteMode mode) {
1487  GenericBinaryOpStub stub(op, mode, NO_GENERIC_BINARY_FLAGS);
1488  if (stub.ArgsInRegistersSupported()) {
1489    __ pop(rdx);
1490    stub.GenerateCall(masm_, rdx, rax);
1491  } else {
1492    __ push(result_register());
1493    __ CallStub(&stub);
1494  }
1495  context()->Plug(rax);
1496}
1497
1498
1499void FullCodeGenerator::EmitAssignment(Expression* expr) {
1500  // Invalid left-hand sides are rewritten to have a 'throw
1501  // ReferenceError' on the left-hand side.
1502  if (!expr->IsValidLeftHandSide()) {
1503    VisitForEffect(expr);
1504    return;
1505  }
1506
1507  // Left-hand side can only be a property, a global or a (parameter or local)
1508  // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1509  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1510  LhsKind assign_type = VARIABLE;
1511  Property* prop = expr->AsProperty();
1512  if (prop != NULL) {
1513    assign_type = (prop->key()->IsPropertyName())
1514        ? NAMED_PROPERTY
1515        : KEYED_PROPERTY;
1516  }
1517
1518  switch (assign_type) {
1519    case VARIABLE: {
1520      Variable* var = expr->AsVariableProxy()->var();
1521      EffectContext context(this);
1522      EmitVariableAssignment(var, Token::ASSIGN);
1523      break;
1524    }
1525    case NAMED_PROPERTY: {
1526      __ push(rax);  // Preserve value.
1527      VisitForAccumulatorValue(prop->obj());
1528      __ movq(rdx, rax);
1529      __ pop(rax);  // Restore value.
1530      __ Move(rcx, prop->key()->AsLiteral()->handle());
1531      Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1532      EmitCallIC(ic, RelocInfo::CODE_TARGET);
1533      break;
1534    }
1535    case KEYED_PROPERTY: {
1536      __ push(rax);  // Preserve value.
1537      VisitForStackValue(prop->obj());
1538      VisitForAccumulatorValue(prop->key());
1539      __ movq(rcx, rax);
1540      __ pop(rdx);
1541      __ pop(rax);
1542      Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1543      EmitCallIC(ic, RelocInfo::CODE_TARGET);
1544      break;
1545    }
1546  }
1547}
1548
1549
1550void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1551                                               Token::Value op) {
1552  // Left-hand sides that rewrite to explicit property accesses do not reach
1553  // here.
1554  ASSERT(var != NULL);
1555  ASSERT(var->is_global() || var->AsSlot() != NULL);
1556
1557  if (var->is_global()) {
1558    ASSERT(!var->is_this());
1559    // Assignment to a global variable.  Use inline caching for the
1560    // assignment.  Right-hand-side value is passed in rax, variable name in
1561    // rcx, and the global object on the stack.
1562    __ Move(rcx, var->name());
1563    __ movq(rdx, GlobalObjectOperand());
1564    Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1565    EmitCallIC(ic, RelocInfo::CODE_TARGET);
1566
1567  } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) {
1568    // Perform the assignment for non-const variables and for initialization
1569    // of const variables.  Const assignments are simply skipped.
1570    Label done;
1571    Slot* slot = var->AsSlot();
1572    switch (slot->type()) {
1573      case Slot::PARAMETER:
1574      case Slot::LOCAL:
1575        if (op == Token::INIT_CONST) {
1576          // Detect const reinitialization by checking for the hole value.
1577          __ movq(rdx, Operand(rbp, SlotOffset(slot)));
1578          __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1579          __ j(not_equal, &done);
1580        }
1581        // Perform the assignment.
1582        __ movq(Operand(rbp, SlotOffset(slot)), rax);
1583        break;
1584
1585      case Slot::CONTEXT: {
1586        MemOperand target = EmitSlotSearch(slot, rcx);
1587        if (op == Token::INIT_CONST) {
1588          // Detect const reinitialization by checking for the hole value.
1589          __ movq(rdx, target);
1590          __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1591          __ j(not_equal, &done);
1592        }
1593        // Perform the assignment and issue the write barrier.
1594        __ movq(target, rax);
1595        // The value of the assignment is in rax.  RecordWrite clobbers its
1596        // register arguments.
1597        __ movq(rdx, rax);
1598        int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
1599        __ RecordWrite(rcx, offset, rdx, rbx);
1600        break;
1601      }
1602
1603      case Slot::LOOKUP:
1604        // Call the runtime for the assignment.  The runtime will ignore
1605        // const reinitialization.
1606        __ push(rax);  // Value.
1607        __ push(rsi);  // Context.
1608        __ Push(var->name());
1609        if (op == Token::INIT_CONST) {
1610          // The runtime will ignore const redeclaration.
1611          __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1612        } else {
1613          __ CallRuntime(Runtime::kStoreContextSlot, 3);
1614        }
1615        break;
1616    }
1617    __ bind(&done);
1618  }
1619
1620  context()->Plug(rax);
1621}
1622
1623
1624void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1625  // Assignment to a property, using a named store IC.
1626  Property* prop = expr->target()->AsProperty();
1627  ASSERT(prop != NULL);
1628  ASSERT(prop->key()->AsLiteral() != NULL);
1629
1630  // If the assignment starts a block of assignments to the same object,
1631  // change to slow case to avoid the quadratic behavior of repeatedly
1632  // adding fast properties.
1633  if (expr->starts_initialization_block()) {
1634    __ push(result_register());
1635    __ push(Operand(rsp, kPointerSize));  // Receiver is now under value.
1636    __ CallRuntime(Runtime::kToSlowProperties, 1);
1637    __ pop(result_register());
1638  }
1639
1640  // Record source code position before IC call.
1641  SetSourcePosition(expr->position());
1642  __ Move(rcx, prop->key()->AsLiteral()->handle());
1643  if (expr->ends_initialization_block()) {
1644    __ movq(rdx, Operand(rsp, 0));
1645  } else {
1646    __ pop(rdx);
1647  }
1648  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1649  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1650
1651  // If the assignment ends an initialization block, revert to fast case.
1652  if (expr->ends_initialization_block()) {
1653    __ push(rax);  // Result of assignment, saved even if not needed.
1654    __ push(Operand(rsp, kPointerSize));  // Receiver is under value.
1655    __ CallRuntime(Runtime::kToFastProperties, 1);
1656    __ pop(rax);
1657    context()->DropAndPlug(1, rax);
1658  } else {
1659    context()->Plug(rax);
1660  }
1661}
1662
1663
1664void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1665  // Assignment to a property, using a keyed store IC.
1666
1667  // If the assignment starts a block of assignments to the same object,
1668  // change to slow case to avoid the quadratic behavior of repeatedly
1669  // adding fast properties.
1670  if (expr->starts_initialization_block()) {
1671    __ push(result_register());
1672    // Receiver is now under the key and value.
1673    __ push(Operand(rsp, 2 * kPointerSize));
1674    __ CallRuntime(Runtime::kToSlowProperties, 1);
1675    __ pop(result_register());
1676  }
1677
1678  __ pop(rcx);
1679  if (expr->ends_initialization_block()) {
1680    __ movq(rdx, Operand(rsp, 0));  // Leave receiver on the stack for later.
1681  } else {
1682    __ pop(rdx);
1683  }
1684  // Record source code position before IC call.
1685  SetSourcePosition(expr->position());
1686  Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1687  EmitCallIC(ic, RelocInfo::CODE_TARGET);
1688
1689  // If the assignment ends an initialization block, revert to fast case.
1690  if (expr->ends_initialization_block()) {
1691    __ pop(rdx);
1692    __ push(rax);  // Result of assignment, saved even if not needed.
1693    __ push(rdx);
1694    __ CallRuntime(Runtime::kToFastProperties, 1);
1695    __ pop(rax);
1696  }
1697
1698  context()->Plug(rax);
1699}
1700
1701
1702void FullCodeGenerator::VisitProperty(Property* expr) {
1703  Comment cmnt(masm_, "[ Property");
1704  Expression* key = expr->key();
1705
1706  if (key->IsPropertyName()) {
1707    VisitForAccumulatorValue(expr->obj());
1708    EmitNamedPropertyLoad(expr);
1709  } else {
1710    VisitForStackValue(expr->obj());
1711    VisitForAccumulatorValue(expr->key());
1712    __ pop(rdx);
1713    EmitKeyedPropertyLoad(expr);
1714  }
1715  context()->Plug(rax);
1716}
1717
1718
1719void FullCodeGenerator::EmitCallWithIC(Call* expr,
1720                                       Handle<Object> name,
1721                                       RelocInfo::Mode mode) {
1722  // Code common for calls using the IC.
1723  ZoneList<Expression*>* args = expr->arguments();
1724  int arg_count = args->length();
1725  { PreserveStatementPositionScope scope(masm()->positions_recorder());
1726    for (int i = 0; i < arg_count; i++) {
1727      VisitForStackValue(args->at(i));
1728    }
1729    __ Move(rcx, name);
1730  }
1731  // Record source position for debugger.
1732  SetSourcePosition(expr->position(), FORCED_POSITION);
1733  // Call the IC initialization code.
1734  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
1735  Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
1736  EmitCallIC(ic, mode);
1737  // Restore context register.
1738  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1739  context()->Plug(rax);
1740}
1741
1742
1743void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
1744                                            Expression* key,
1745                                            RelocInfo::Mode mode) {
1746  // Load the key.
1747  VisitForAccumulatorValue(key);
1748
1749  // Swap the name of the function and the receiver on the stack to follow
1750  // the calling convention for call ICs.
1751  __ pop(rcx);
1752  __ push(rax);
1753  __ push(rcx);
1754
1755  // Load the arguments.
1756  ZoneList<Expression*>* args = expr->arguments();
1757  int arg_count = args->length();
1758  { PreserveStatementPositionScope scope(masm()->positions_recorder());
1759    for (int i = 0; i < arg_count; i++) {
1760      VisitForStackValue(args->at(i));
1761    }
1762  }
1763  // Record source position for debugger.
1764  SetSourcePosition(expr->position(), FORCED_POSITION);
1765  // Call the IC initialization code.
1766  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
1767  Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
1768  __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize));  // Key.
1769  EmitCallIC(ic, mode);
1770  // Restore context register.
1771  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1772  context()->DropAndPlug(1, rax);  // Drop the key still on the stack.
1773}
1774
1775
1776void FullCodeGenerator::EmitCallWithStub(Call* expr) {
1777  // Code common for calls using the call stub.
1778  ZoneList<Expression*>* args = expr->arguments();
1779  int arg_count = args->length();
1780  { PreserveStatementPositionScope scope(masm()->positions_recorder());
1781    for (int i = 0; i < arg_count; i++) {
1782      VisitForStackValue(args->at(i));
1783    }
1784  }
1785  // Record source position for debugger.
1786  SetSourcePosition(expr->position(), FORCED_POSITION);
1787  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
1788  CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
1789  __ CallStub(&stub);
1790  // Restore context register.
1791  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1792  // Discard the function left on TOS.
1793  context()->DropAndPlug(1, rax);
1794}
1795
1796
1797void FullCodeGenerator::VisitCall(Call* expr) {
1798  Comment cmnt(masm_, "[ Call");
1799  Expression* fun = expr->expression();
1800  Variable* var = fun->AsVariableProxy()->AsVariable();
1801
1802  if (var != NULL && var->is_possibly_eval()) {
1803    // In a call to eval, we first call %ResolvePossiblyDirectEval to
1804    // resolve the function we need to call and the receiver of the
1805    // call.  The we call the resolved function using the given
1806    // arguments.
1807    ZoneList<Expression*>* args = expr->arguments();
1808    int arg_count = args->length();
1809    { PreserveStatementPositionScope pos_scope(masm()->positions_recorder());
1810      VisitForStackValue(fun);
1811      __ PushRoot(Heap::kUndefinedValueRootIndex);  // Reserved receiver slot.
1812
1813      // Push the arguments.
1814      for (int i = 0; i < arg_count; i++) {
1815        VisitForStackValue(args->at(i));
1816      }
1817
1818      // Push copy of the function - found below the arguments.
1819      __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
1820
1821      // Push copy of the first argument or undefined if it doesn't exist.
1822      if (arg_count > 0) {
1823        __ push(Operand(rsp, arg_count * kPointerSize));
1824      } else {
1825      __ PushRoot(Heap::kUndefinedValueRootIndex);
1826      }
1827
1828      // Push the receiver of the enclosing function and do runtime call.
1829      __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
1830      __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
1831
1832      // The runtime call returns a pair of values in rax (function) and
1833      // rdx (receiver). Touch up the stack with the right values.
1834      __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
1835      __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
1836    }
1837    // Record source position for debugger.
1838    SetSourcePosition(expr->position(), FORCED_POSITION);
1839    InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
1840    CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
1841    __ CallStub(&stub);
1842    // Restore context register.
1843    __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1844    context()->DropAndPlug(1, rax);
1845  } else if (var != NULL && !var->is_this() && var->is_global()) {
1846    // Call to a global variable.
1847    // Push global object as receiver for the call IC lookup.
1848    __ push(GlobalObjectOperand());
1849    EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
1850  } else if (var != NULL && var->AsSlot() != NULL &&
1851             var->AsSlot()->type() == Slot::LOOKUP) {
1852    // Call to a lookup slot (dynamically introduced variable).
1853    Label slow, done;
1854
1855    { PreserveStatementPositionScope scope(masm()->positions_recorder());
1856      // Generate code for loading from variables potentially shadowed
1857      // by eval-introduced variables.
1858      EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
1859                                      NOT_INSIDE_TYPEOF,
1860                                      &slow,
1861                                      &done);
1862
1863      __ bind(&slow);
1864      // Call the runtime to find the function to call (returned in rax)
1865      // and the object holding it (returned in rdx).
1866      __ push(context_register());
1867      __ Push(var->name());
1868      __ CallRuntime(Runtime::kLoadContextSlot, 2);
1869      __ push(rax);  // Function.
1870      __ push(rdx);  // Receiver.
1871
1872      // If fast case code has been generated, emit code to push the
1873      // function and receiver and have the slow path jump around this
1874      // code.
1875      if (done.is_linked()) {
1876        NearLabel call;
1877        __ jmp(&call);
1878        __ bind(&done);
1879        // Push function.
1880        __ push(rax);
1881        // Push global receiver.
1882        __ movq(rbx, GlobalObjectOperand());
1883        __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
1884        __ bind(&call);
1885      }
1886    }
1887
1888    EmitCallWithStub(expr);
1889
1890  } else if (fun->AsProperty() != NULL) {
1891    // Call to an object property.
1892    Property* prop = fun->AsProperty();
1893    Literal* key = prop->key()->AsLiteral();
1894    if (key != NULL && key->handle()->IsSymbol()) {
1895      // Call to a named property, use call IC.
1896      { PreserveStatementPositionScope scope(masm()->positions_recorder());
1897        VisitForStackValue(prop->obj());
1898      }
1899      EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
1900    } else {
1901      // Call to a keyed property.
1902      // For a synthetic property use keyed load IC followed by function call,
1903      // for a regular property use KeyedCallIC.
1904      { PreserveStatementPositionScope scope(masm()->positions_recorder());
1905        VisitForStackValue(prop->obj());
1906      }
1907      if (prop->is_synthetic()) {
1908        { PreserveStatementPositionScope scope(masm()->positions_recorder());
1909          VisitForAccumulatorValue(prop->key());
1910          __ movq(rdx, Operand(rsp, 0));
1911        }
1912        // Record source code position for IC call.
1913        SetSourcePosition(prop->position(), FORCED_POSITION);
1914        Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1915        EmitCallIC(ic, RelocInfo::CODE_TARGET);
1916        // Pop receiver.
1917        __ pop(rbx);
1918        // Push result (function).
1919        __ push(rax);
1920        // Push receiver object on stack.
1921        __ movq(rcx, GlobalObjectOperand());
1922        __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
1923        EmitCallWithStub(expr);
1924      } else {
1925        EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
1926      }
1927    }
1928  } else {
1929    // Call to some other expression.  If the expression is an anonymous
1930    // function literal not called in a loop, mark it as one that should
1931    // also use the fast code generator.
1932    FunctionLiteral* lit = fun->AsFunctionLiteral();
1933    if (lit != NULL &&
1934        lit->name()->Equals(Heap::empty_string()) &&
1935        loop_depth() == 0) {
1936      lit->set_try_full_codegen(true);
1937    }
1938    { PreserveStatementPositionScope scope(masm()->positions_recorder());
1939      VisitForStackValue(fun);
1940    }
1941    // Load global receiver object.
1942    __ movq(rbx, GlobalObjectOperand());
1943    __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
1944    // Emit function call.
1945    EmitCallWithStub(expr);
1946  }
1947}
1948
1949
1950void FullCodeGenerator::VisitCallNew(CallNew* expr) {
1951  Comment cmnt(masm_, "[ CallNew");
1952  // According to ECMA-262, section 11.2.2, page 44, the function
1953  // expression in new calls must be evaluated before the
1954  // arguments.
1955
1956  // Push constructor on the stack.  If it's not a function it's used as
1957  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
1958  // ignored.
1959  VisitForStackValue(expr->expression());
1960
1961  // Push the arguments ("left-to-right") on the stack.
1962  ZoneList<Expression*>* args = expr->arguments();
1963  int arg_count = args->length();
1964  for (int i = 0; i < arg_count; i++) {
1965    VisitForStackValue(args->at(i));
1966  }
1967
1968  // Call the construct call builtin that handles allocation and
1969  // constructor invocation.
1970  SetSourcePosition(expr->position());
1971
1972  // Load function and argument count into rdi and rax.
1973  __ Set(rax, arg_count);
1974  __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
1975
1976  Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
1977  __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
1978  context()->Plug(rax);
1979}
1980
1981
1982void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
1983  ASSERT(args->length() == 1);
1984
1985  VisitForAccumulatorValue(args->at(0));
1986
1987  Label materialize_true, materialize_false;
1988  Label* if_true = NULL;
1989  Label* if_false = NULL;
1990  Label* fall_through = NULL;
1991  context()->PrepareTest(&materialize_true, &materialize_false,
1992                         &if_true, &if_false, &fall_through);
1993
1994  __ JumpIfSmi(rax, if_true);
1995  __ jmp(if_false);
1996
1997  context()->Plug(if_true, if_false);
1998}
1999
2000
2001void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
2002  ASSERT(args->length() == 1);
2003
2004  VisitForAccumulatorValue(args->at(0));
2005
2006  Label materialize_true, materialize_false;
2007  Label* if_true = NULL;
2008  Label* if_false = NULL;
2009  Label* fall_through = NULL;
2010  context()->PrepareTest(&materialize_true, &materialize_false,
2011                         &if_true, &if_false, &fall_through);
2012
2013  Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2014  Split(non_negative_smi, if_true, if_false, fall_through);
2015
2016  context()->Plug(if_true, if_false);
2017}
2018
2019
2020void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2021  ASSERT(args->length() == 1);
2022
2023  VisitForAccumulatorValue(args->at(0));
2024
2025  Label materialize_true, materialize_false;
2026  Label* if_true = NULL;
2027  Label* if_false = NULL;
2028  Label* fall_through = NULL;
2029  context()->PrepareTest(&materialize_true, &materialize_false,
2030                         &if_true, &if_false, &fall_through);
2031
2032  __ JumpIfSmi(rax, if_false);
2033  __ CompareRoot(rax, Heap::kNullValueRootIndex);
2034  __ j(equal, if_true);
2035  __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2036  // Undetectable objects behave like undefined when tested with typeof.
2037  __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2038           Immediate(1 << Map::kIsUndetectable));
2039  __ j(not_zero, if_false);
2040  __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2041  __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE));
2042  __ j(below, if_false);
2043  __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE));
2044  Split(below_equal, if_true, if_false, fall_through);
2045
2046  context()->Plug(if_true, if_false);
2047}
2048
2049
2050void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2051  ASSERT(args->length() == 1);
2052
2053  VisitForAccumulatorValue(args->at(0));
2054
2055  Label materialize_true, materialize_false;
2056  Label* if_true = NULL;
2057  Label* if_false = NULL;
2058  Label* fall_through = NULL;
2059  context()->PrepareTest(&materialize_true, &materialize_false,
2060                         &if_true, &if_false, &fall_through);
2061
2062  __ JumpIfSmi(rax, if_false);
2063  __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
2064  Split(above_equal, if_true, if_false, fall_through);
2065
2066  context()->Plug(if_true, if_false);
2067}
2068
2069
2070void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2071  ASSERT(args->length() == 1);
2072
2073  VisitForAccumulatorValue(args->at(0));
2074
2075  Label materialize_true, materialize_false;
2076  Label* if_true = NULL;
2077  Label* if_false = NULL;
2078  Label* fall_through = NULL;
2079  context()->PrepareTest(&materialize_true, &materialize_false,
2080                         &if_true, &if_false, &fall_through);
2081
2082  __ JumpIfSmi(rax, if_false);
2083  __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2084  __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2085           Immediate(1 << Map::kIsUndetectable));
2086  Split(not_zero, if_true, if_false, fall_through);
2087
2088  context()->Plug(if_true, if_false);
2089}
2090
2091
2092void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2093    ZoneList<Expression*>* args) {
2094  ASSERT(args->length() == 1);
2095
2096  VisitForAccumulatorValue(args->at(0));
2097
2098  Label materialize_true, materialize_false;
2099  Label* if_true = NULL;
2100  Label* if_false = NULL;
2101  Label* fall_through = NULL;
2102  context()->PrepareTest(&materialize_true, &materialize_false,
2103                         &if_true, &if_false, &fall_through);
2104
2105  // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
2106  // used in a few functions in runtime.js which should not normally be hit by
2107  // this compiler.
2108  __ jmp(if_false);
2109  context()->Plug(if_true, if_false);
2110}
2111
2112
2113void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
2114  ASSERT(args->length() == 1);
2115
2116  VisitForAccumulatorValue(args->at(0));
2117
2118  Label materialize_true, materialize_false;
2119  Label* if_true = NULL;
2120  Label* if_false = NULL;
2121  Label* fall_through = NULL;
2122  context()->PrepareTest(&materialize_true, &materialize_false,
2123                         &if_true, &if_false, &fall_through);
2124
2125  __ JumpIfSmi(rax, if_false);
2126  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2127  Split(equal, if_true, if_false, fall_through);
2128
2129  context()->Plug(if_true, if_false);
2130}
2131
2132
2133void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2134  ASSERT(args->length() == 1);
2135
2136  VisitForAccumulatorValue(args->at(0));
2137
2138  Label materialize_true, materialize_false;
2139  Label* if_true = NULL;
2140  Label* if_false = NULL;
2141  Label* fall_through = NULL;
2142  context()->PrepareTest(&materialize_true, &materialize_false,
2143                         &if_true, &if_false, &fall_through);
2144
2145  __ JumpIfSmi(rax, if_false);
2146  __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
2147  Split(equal, if_true, if_false, fall_through);
2148
2149  context()->Plug(if_true, if_false);
2150}
2151
2152
2153void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2154  ASSERT(args->length() == 1);
2155
2156  VisitForAccumulatorValue(args->at(0));
2157
2158  Label materialize_true, materialize_false;
2159  Label* if_true = NULL;
2160  Label* if_false = NULL;
2161  Label* fall_through = NULL;
2162  context()->PrepareTest(&materialize_true, &materialize_false,
2163                         &if_true, &if_false, &fall_through);
2164
2165  __ JumpIfSmi(rax, if_false);
2166  __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
2167  Split(equal, if_true, if_false, fall_through);
2168
2169  context()->Plug(if_true, if_false);
2170}
2171
2172
2173
2174void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2175  ASSERT(args->length() == 0);
2176
2177  Label materialize_true, materialize_false;
2178  Label* if_true = NULL;
2179  Label* if_false = NULL;
2180  Label* fall_through = NULL;
2181  context()->PrepareTest(&materialize_true, &materialize_false,
2182                         &if_true, &if_false, &fall_through);
2183
2184  // Get the frame pointer for the calling frame.
2185  __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2186
2187  // Skip the arguments adaptor frame if it exists.
2188  Label check_frame_marker;
2189  __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset),
2190                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2191  __ j(not_equal, &check_frame_marker);
2192  __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
2193
2194  // Check the marker in the calling frame.
2195  __ bind(&check_frame_marker);
2196  __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset),
2197                Smi::FromInt(StackFrame::CONSTRUCT));
2198  Split(equal, if_true, if_false, fall_through);
2199
2200  context()->Plug(if_true, if_false);
2201}
2202
2203
2204void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2205  ASSERT(args->length() == 2);
2206
2207  // Load the two objects into registers and perform the comparison.
2208  VisitForStackValue(args->at(0));
2209  VisitForAccumulatorValue(args->at(1));
2210
2211  Label materialize_true, materialize_false;
2212  Label* if_true = NULL;
2213  Label* if_false = NULL;
2214  Label* fall_through = NULL;
2215  context()->PrepareTest(&materialize_true, &materialize_false,
2216                         &if_true, &if_false, &fall_through);
2217
2218  __ pop(rbx);
2219  __ cmpq(rax, rbx);
2220  Split(equal, if_true, if_false, fall_through);
2221
2222  context()->Plug(if_true, if_false);
2223}
2224
2225
2226void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2227  ASSERT(args->length() == 1);
2228
2229  // ArgumentsAccessStub expects the key in rdx and the formal
2230  // parameter count in rax.
2231  VisitForAccumulatorValue(args->at(0));
2232  __ movq(rdx, rax);
2233  __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2234  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2235  __ CallStub(&stub);
2236  context()->Plug(rax);
2237}
2238
2239
2240void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2241  ASSERT(args->length() == 0);
2242
2243  NearLabel exit;
2244  // Get the number of formal parameters.
2245  __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2246
2247  // Check if the calling frame is an arguments adaptor frame.
2248  __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2249  __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
2250                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2251  __ j(not_equal, &exit);
2252
2253  // Arguments adaptor case: Read the arguments length from the
2254  // adaptor frame.
2255  __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2256
2257  __ bind(&exit);
2258  if (FLAG_debug_code) __ AbortIfNotSmi(rax);
2259  context()->Plug(rax);
2260}
2261
2262
2263void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2264  ASSERT(args->length() == 1);
2265  Label done, null, function, non_function_constructor;
2266
2267  VisitForAccumulatorValue(args->at(0));
2268
2269  // If the object is a smi, we return null.
2270  __ JumpIfSmi(rax, &null);
2271
2272  // Check that the object is a JS object but take special care of JS
2273  // functions to make sure they have 'Function' as their class.
2274  __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);  // Map is now in rax.
2275  __ j(below, &null);
2276
2277  // As long as JS_FUNCTION_TYPE is the last instance type and it is
2278  // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2279  // LAST_JS_OBJECT_TYPE.
2280  ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2281  ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2282  __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
2283  __ j(equal, &function);
2284
2285  // Check if the constructor in the map is a function.
2286  __ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
2287  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2288  __ j(not_equal, &non_function_constructor);
2289
2290  // rax now contains the constructor function. Grab the
2291  // instance class name from there.
2292  __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
2293  __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
2294  __ jmp(&done);
2295
2296  // Functions have class 'Function'.
2297  __ bind(&function);
2298  __ Move(rax, Factory::function_class_symbol());
2299  __ jmp(&done);
2300
2301  // Objects with a non-function constructor have class 'Object'.
2302  __ bind(&non_function_constructor);
2303  __ Move(rax, Factory::Object_symbol());
2304  __ jmp(&done);
2305
2306  // Non-JS objects have class null.
2307  __ bind(&null);
2308  __ LoadRoot(rax, Heap::kNullValueRootIndex);
2309
2310  // All done.
2311  __ bind(&done);
2312
2313  context()->Plug(rax);
2314}
2315
2316
2317void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2318  // Conditionally generate a log call.
2319  // Args:
2320  //   0 (literal string): The type of logging (corresponds to the flags).
2321  //     This is used to determine whether or not to generate the log call.
2322  //   1 (string): Format string.  Access the string at argument index 2
2323  //     with '%2s' (see Logger::LogRuntime for all the formats).
2324  //   2 (array): Arguments to the format string.
2325  ASSERT_EQ(args->length(), 3);
2326#ifdef ENABLE_LOGGING_AND_PROFILING
2327  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2328    VisitForStackValue(args->at(1));
2329    VisitForStackValue(args->at(2));
2330    __ CallRuntime(Runtime::kLog, 2);
2331  }
2332#endif
2333  // Finally, we're expected to leave a value on the top of the stack.
2334  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2335  context()->Plug(rax);
2336}
2337
2338
2339void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2340  ASSERT(args->length() == 0);
2341
2342  Label slow_allocate_heapnumber;
2343  Label heapnumber_allocated;
2344
2345  __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
2346  __ jmp(&heapnumber_allocated);
2347
2348  __ bind(&slow_allocate_heapnumber);
2349  // Allocate a heap number.
2350  __ CallRuntime(Runtime::kNumberAlloc, 0);
2351  __ movq(rbx, rax);
2352
2353  __ bind(&heapnumber_allocated);
2354
2355  // Return a random uint32 number in rax.
2356  // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
2357  __ PrepareCallCFunction(0);
2358  __ CallCFunction(ExternalReference::random_uint32_function(), 0);
2359
2360  // Convert 32 random bits in rax to 0.(32 random bits) in a double
2361  // by computing:
2362  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2363  __ movl(rcx, Immediate(0x49800000));  // 1.0 x 2^20 as single.
2364  __ movd(xmm1, rcx);
2365  __ movd(xmm0, rax);
2366  __ cvtss2sd(xmm1, xmm1);
2367  __ xorpd(xmm0, xmm1);
2368  __ subsd(xmm0, xmm1);
2369  __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
2370
2371  __ movq(rax, rbx);
2372  context()->Plug(rax);
2373}
2374
2375
2376void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2377  // Load the arguments on the stack and call the stub.
2378  SubStringStub stub;
2379  ASSERT(args->length() == 3);
2380  VisitForStackValue(args->at(0));
2381  VisitForStackValue(args->at(1));
2382  VisitForStackValue(args->at(2));
2383  __ CallStub(&stub);
2384  context()->Plug(rax);
2385}
2386
2387
2388void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
2389  // Load the arguments on the stack and call the stub.
2390  RegExpExecStub stub;
2391  ASSERT(args->length() == 4);
2392  VisitForStackValue(args->at(0));
2393  VisitForStackValue(args->at(1));
2394  VisitForStackValue(args->at(2));
2395  VisitForStackValue(args->at(3));
2396  __ CallStub(&stub);
2397  context()->Plug(rax);
2398}
2399
2400
2401void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2402  ASSERT(args->length() == 1);
2403
2404  VisitForAccumulatorValue(args->at(0));  // Load the object.
2405
2406  Label done;
2407  // If the object is a smi return the object.
2408  __ JumpIfSmi(rax, &done);
2409  // If the object is not a value type, return the object.
2410  __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
2411  __ j(not_equal, &done);
2412  __ movq(rax, FieldOperand(rax, JSValue::kValueOffset));
2413
2414  __ bind(&done);
2415  context()->Plug(rax);
2416}
2417
2418
2419void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2420  // Load the arguments on the stack and call the runtime function.
2421  ASSERT(args->length() == 2);
2422  VisitForStackValue(args->at(0));
2423  VisitForStackValue(args->at(1));
2424  __ CallRuntime(Runtime::kMath_pow, 2);
2425  context()->Plug(rax);
2426}
2427
2428
2429void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2430  ASSERT(args->length() == 2);
2431
2432  VisitForStackValue(args->at(0));  // Load the object.
2433  VisitForAccumulatorValue(args->at(1));  // Load the value.
2434  __ pop(rbx);  // rax = value. rbx = object.
2435
2436  Label done;
2437  // If the object is a smi, return the value.
2438  __ JumpIfSmi(rbx, &done);
2439
2440  // If the object is not a value type, return the value.
2441  __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
2442  __ j(not_equal, &done);
2443
2444  // Store the value.
2445  __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax);
2446  // Update the write barrier.  Save the value as it will be
2447  // overwritten by the write barrier code and is needed afterward.
2448  __ movq(rdx, rax);
2449  __ RecordWrite(rbx, JSValue::kValueOffset, rdx, rcx);
2450
2451  __ bind(&done);
2452  context()->Plug(rax);
2453}
2454
2455
2456void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2457  ASSERT_EQ(args->length(), 1);
2458
2459  // Load the argument on the stack and call the stub.
2460  VisitForStackValue(args->at(0));
2461
2462  NumberToStringStub stub;
2463  __ CallStub(&stub);
2464  context()->Plug(rax);
2465}
2466
2467
2468void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
2469  ASSERT(args->length() == 1);
2470
2471  VisitForAccumulatorValue(args->at(0));
2472
2473  Label done;
2474  StringCharFromCodeGenerator generator(rax, rbx);
2475  generator.GenerateFast(masm_);
2476  __ jmp(&done);
2477
2478  NopRuntimeCallHelper call_helper;
2479  generator.GenerateSlow(masm_, call_helper);
2480
2481  __ bind(&done);
2482  context()->Plug(rbx);
2483}
2484
2485
2486void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
2487  ASSERT(args->length() == 2);
2488
2489  VisitForStackValue(args->at(0));
2490  VisitForAccumulatorValue(args->at(1));
2491
2492  Register object = rbx;
2493  Register index = rax;
2494  Register scratch = rcx;
2495  Register result = rdx;
2496
2497  __ pop(object);
2498
2499  Label need_conversion;
2500  Label index_out_of_range;
2501  Label done;
2502  StringCharCodeAtGenerator generator(object,
2503                                      index,
2504                                      scratch,
2505                                      result,
2506                                      &need_conversion,
2507                                      &need_conversion,
2508                                      &index_out_of_range,
2509                                      STRING_INDEX_IS_NUMBER);
2510  generator.GenerateFast(masm_);
2511  __ jmp(&done);
2512
2513  __ bind(&index_out_of_range);
2514  // When the index is out of range, the spec requires us to return
2515  // NaN.
2516  __ LoadRoot(result, Heap::kNanValueRootIndex);
2517  __ jmp(&done);
2518
2519  __ bind(&need_conversion);
2520  // Move the undefined value into the result register, which will
2521  // trigger conversion.
2522  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2523  __ jmp(&done);
2524
2525  NopRuntimeCallHelper call_helper;
2526  generator.GenerateSlow(masm_, call_helper);
2527
2528  __ bind(&done);
2529  context()->Plug(result);
2530}
2531
2532
2533void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
2534  ASSERT(args->length() == 2);
2535
2536  VisitForStackValue(args->at(0));
2537  VisitForAccumulatorValue(args->at(1));
2538
2539  Register object = rbx;
2540  Register index = rax;
2541  Register scratch1 = rcx;
2542  Register scratch2 = rdx;
2543  Register result = rax;
2544
2545  __ pop(object);
2546
2547  Label need_conversion;
2548  Label index_out_of_range;
2549  Label done;
2550  StringCharAtGenerator generator(object,
2551                                  index,
2552                                  scratch1,
2553                                  scratch2,
2554                                  result,
2555                                  &need_conversion,
2556                                  &need_conversion,
2557                                  &index_out_of_range,
2558                                  STRING_INDEX_IS_NUMBER);
2559  generator.GenerateFast(masm_);
2560  __ jmp(&done);
2561
2562  __ bind(&index_out_of_range);
2563  // When the index is out of range, the spec requires us to return
2564  // the empty string.
2565  __ LoadRoot(result, Heap::kEmptyStringRootIndex);
2566  __ jmp(&done);
2567
2568  __ bind(&need_conversion);
2569  // Move smi zero into the result register, which will trigger
2570  // conversion.
2571  __ Move(result, Smi::FromInt(0));
2572  __ jmp(&done);
2573
2574  NopRuntimeCallHelper call_helper;
2575  generator.GenerateSlow(masm_, call_helper);
2576
2577  __ bind(&done);
2578  context()->Plug(result);
2579}
2580
2581
2582void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
2583  ASSERT_EQ(2, args->length());
2584
2585  VisitForStackValue(args->at(0));
2586  VisitForStackValue(args->at(1));
2587
2588  StringAddStub stub(NO_STRING_ADD_FLAGS);
2589  __ CallStub(&stub);
2590  context()->Plug(rax);
2591}
2592
2593
2594void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
2595  ASSERT_EQ(2, args->length());
2596
2597  VisitForStackValue(args->at(0));
2598  VisitForStackValue(args->at(1));
2599
2600  StringCompareStub stub;
2601  __ CallStub(&stub);
2602  context()->Plug(rax);
2603}
2604
2605
2606void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
2607  // Load the argument on the stack and call the stub.
2608  TranscendentalCacheStub stub(TranscendentalCache::SIN);
2609  ASSERT(args->length() == 1);
2610  VisitForStackValue(args->at(0));
2611  __ CallStub(&stub);
2612  context()->Plug(rax);
2613}
2614
2615
2616void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
2617  // Load the argument on the stack and call the stub.
2618  TranscendentalCacheStub stub(TranscendentalCache::COS);
2619  ASSERT(args->length() == 1);
2620  VisitForStackValue(args->at(0));
2621  __ CallStub(&stub);
2622  context()->Plug(rax);
2623}
2624
2625
2626void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
2627  // Load the argument on the stack and call the runtime function.
2628  ASSERT(args->length() == 1);
2629  VisitForStackValue(args->at(0));
2630  __ CallRuntime(Runtime::kMath_sqrt, 1);
2631  context()->Plug(rax);
2632}
2633
2634
2635void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
2636  ASSERT(args->length() >= 2);
2637
2638  int arg_count = args->length() - 2;  // For receiver and function.
2639  VisitForStackValue(args->at(0));  // Receiver.
2640  for (int i = 0; i < arg_count; i++) {
2641    VisitForStackValue(args->at(i + 1));
2642  }
2643  VisitForAccumulatorValue(args->at(arg_count + 1));  // Function.
2644
2645  // InvokeFunction requires function in rdi. Move it in there.
2646  if (!result_register().is(rdi)) __ movq(rdi, result_register());
2647  ParameterCount count(arg_count);
2648  __ InvokeFunction(rdi, count, CALL_FUNCTION);
2649  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2650  context()->Plug(rax);
2651}
2652
2653
2654void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
2655  ASSERT(args->length() == 3);
2656  VisitForStackValue(args->at(0));
2657  VisitForStackValue(args->at(1));
2658  VisitForStackValue(args->at(2));
2659  __ CallRuntime(Runtime::kRegExpConstructResult, 3);
2660  context()->Plug(rax);
2661}
2662
2663
2664void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
2665  ASSERT(args->length() == 3);
2666  VisitForStackValue(args->at(0));
2667  VisitForStackValue(args->at(1));
2668  VisitForStackValue(args->at(2));
2669  __ CallRuntime(Runtime::kSwapElements, 3);
2670  context()->Plug(rax);
2671}
2672
2673
2674void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
2675  ASSERT_EQ(2, args->length());
2676
2677  ASSERT_NE(NULL, args->at(0)->AsLiteral());
2678  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
2679
2680  Handle<FixedArray> jsfunction_result_caches(
2681      Top::global_context()->jsfunction_result_caches());
2682  if (jsfunction_result_caches->length() <= cache_id) {
2683    __ Abort("Attempt to use undefined cache.");
2684    __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2685    context()->Plug(rax);
2686    return;
2687  }
2688
2689  VisitForAccumulatorValue(args->at(1));
2690
2691  Register key = rax;
2692  Register cache = rbx;
2693  Register tmp = rcx;
2694  __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
2695  __ movq(cache,
2696          FieldOperand(cache, GlobalObject::kGlobalContextOffset));
2697  __ movq(cache,
2698          ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
2699  __ movq(cache,
2700          FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
2701
2702  NearLabel done, not_found;
2703  // tmp now holds finger offset as a smi.
2704  ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
2705  __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
2706  SmiIndex index =
2707      __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
2708  __ cmpq(key, FieldOperand(cache,
2709                            index.reg,
2710                            index.scale,
2711                            FixedArray::kHeaderSize));
2712  __ j(not_equal, &not_found);
2713  __ movq(rax, FieldOperand(cache,
2714                            index.reg,
2715                            index.scale,
2716                            FixedArray::kHeaderSize + kPointerSize));
2717  __ jmp(&done);
2718
2719  __ bind(&not_found);
2720  // Call runtime to perform the lookup.
2721  __ push(cache);
2722  __ push(key);
2723  __ CallRuntime(Runtime::kGetFromCache, 2);
2724
2725  __ bind(&done);
2726  context()->Plug(rax);
2727}
2728
2729
2730void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
2731  ASSERT_EQ(2, args->length());
2732
2733  Register right = rax;
2734  Register left = rbx;
2735  Register tmp = rcx;
2736
2737  VisitForStackValue(args->at(0));
2738  VisitForAccumulatorValue(args->at(1));
2739  __ pop(left);
2740
2741  NearLabel done, fail, ok;
2742  __ cmpq(left, right);
2743  __ j(equal, &ok);
2744  // Fail if either is a non-HeapObject.
2745  Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
2746  __ j(either_smi, &fail);
2747  __ j(zero, &fail);
2748  __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
2749  __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
2750          Immediate(JS_REGEXP_TYPE));
2751  __ j(not_equal, &fail);
2752  __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
2753  __ j(not_equal, &fail);
2754  __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
2755  __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
2756  __ j(equal, &ok);
2757  __ bind(&fail);
2758  __ Move(rax, Factory::false_value());
2759  __ jmp(&done);
2760  __ bind(&ok);
2761  __ Move(rax, Factory::true_value());
2762  __ bind(&done);
2763
2764  context()->Plug(rax);
2765}
2766
2767
2768void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
2769  ASSERT(args->length() == 1);
2770
2771  VisitForAccumulatorValue(args->at(0));
2772
2773  Label materialize_true, materialize_false;
2774  Label* if_true = NULL;
2775  Label* if_false = NULL;
2776  Label* fall_through = NULL;
2777  context()->PrepareTest(&materialize_true, &materialize_false,
2778                         &if_true, &if_false, &fall_through);
2779
2780  __ testl(FieldOperand(rax, String::kHashFieldOffset),
2781           Immediate(String::kContainsCachedArrayIndexMask));
2782  __ j(zero, if_true);
2783  __ jmp(if_false);
2784
2785  context()->Plug(if_true, if_false);
2786}
2787
2788
2789void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
2790  ASSERT(args->length() == 1);
2791
2792  VisitForAccumulatorValue(args->at(0));
2793
2794  __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
2795  ASSERT(String::kHashShift >= kSmiTagSize);
2796  __ IndexFromHash(rax, rax);
2797
2798  context()->Plug(rax);
2799}
2800
2801
2802void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
2803  context()->Plug(Heap::kUndefinedValueRootIndex);
2804}
2805
2806
2807void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
2808  Handle<String> name = expr->name();
2809  if (name->length() > 0 && name->Get(0) == '_') {
2810    Comment cmnt(masm_, "[ InlineRuntimeCall");
2811    EmitInlineRuntimeCall(expr);
2812    return;
2813  }
2814
2815  Comment cmnt(masm_, "[ CallRuntime");
2816  ZoneList<Expression*>* args = expr->arguments();
2817
2818  if (expr->is_jsruntime()) {
2819    // Prepare for calling JS runtime function.
2820    __ movq(rax, GlobalObjectOperand());
2821    __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
2822  }
2823
2824  // Push the arguments ("left-to-right").
2825  int arg_count = args->length();
2826  for (int i = 0; i < arg_count; i++) {
2827    VisitForStackValue(args->at(i));
2828  }
2829
2830  if (expr->is_jsruntime()) {
2831    // Call the JS runtime function using a call IC.
2832    __ Move(rcx, expr->name());
2833    InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2834    Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
2835    EmitCallIC(ic, RelocInfo::CODE_TARGET);
2836    // Restore context register.
2837    __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2838  } else {
2839    __ CallRuntime(expr->function(), arg_count);
2840  }
2841  context()->Plug(rax);
2842}
2843
2844
2845void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2846  switch (expr->op()) {
2847    case Token::DELETE: {
2848      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2849      Property* prop = expr->expression()->AsProperty();
2850      Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
2851      if (prop == NULL && var == NULL) {
2852        // Result of deleting non-property, non-variable reference is true.
2853        // The subexpression may have side effects.
2854        VisitForEffect(expr->expression());
2855        context()->Plug(true);
2856      } else if (var != NULL &&
2857                 !var->is_global() &&
2858                 var->AsSlot() != NULL &&
2859                 var->AsSlot()->type() != Slot::LOOKUP) {
2860        // Result of deleting non-global, non-dynamic variables is false.
2861        // The subexpression does not have side effects.
2862        context()->Plug(false);
2863      } else {
2864        // Property or variable reference.  Call the delete builtin with
2865        // object and property name as arguments.
2866        if (prop != NULL) {
2867          VisitForStackValue(prop->obj());
2868          VisitForStackValue(prop->key());
2869        } else if (var->is_global()) {
2870          __ push(GlobalObjectOperand());
2871          __ Push(var->name());
2872        } else {
2873          // Non-global variable.  Call the runtime to look up the context
2874          // where the variable was introduced.
2875          __ push(context_register());
2876          __ Push(var->name());
2877          __ CallRuntime(Runtime::kLookupContext, 2);
2878          __ push(rax);
2879          __ Push(var->name());
2880        }
2881        __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
2882        context()->Plug(rax);
2883      }
2884      break;
2885    }
2886
2887    case Token::VOID: {
2888      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2889      VisitForEffect(expr->expression());
2890      context()->Plug(Heap::kUndefinedValueRootIndex);
2891      break;
2892    }
2893
2894    case Token::NOT: {
2895      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2896      Label materialize_true, materialize_false;
2897      Label* if_true = NULL;
2898      Label* if_false = NULL;
2899      Label* fall_through = NULL;
2900      // Notice that the labels are swapped.
2901      context()->PrepareTest(&materialize_true, &materialize_false,
2902                             &if_false, &if_true, &fall_through);
2903      VisitForControl(expr->expression(), if_true, if_false, fall_through);
2904      context()->Plug(if_false, if_true);  // Labels swapped.
2905      break;
2906    }
2907
2908    case Token::TYPEOF: {
2909      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2910      { StackValueContext context(this);
2911        VisitForTypeofValue(expr->expression());
2912      }
2913      __ CallRuntime(Runtime::kTypeof, 1);
2914      context()->Plug(rax);
2915      break;
2916    }
2917
2918    case Token::ADD: {
2919      Comment cmt(masm_, "[ UnaryOperation (ADD)");
2920      VisitForAccumulatorValue(expr->expression());
2921      NearLabel no_conversion;
2922      Condition is_smi = masm_->CheckSmi(result_register());
2923      __ j(is_smi, &no_conversion);
2924      __ push(result_register());
2925      __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
2926      __ bind(&no_conversion);
2927      context()->Plug(result_register());
2928      break;
2929    }
2930
2931    case Token::SUB: {
2932      Comment cmt(masm_, "[ UnaryOperation (SUB)");
2933      bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
2934      UnaryOverwriteMode overwrite =
2935          can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
2936      GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
2937      // GenericUnaryOpStub expects the argument to be in the
2938      // accumulator register rax.
2939      VisitForAccumulatorValue(expr->expression());
2940      __ CallStub(&stub);
2941      context()->Plug(rax);
2942      break;
2943    }
2944
2945    case Token::BIT_NOT: {
2946      Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
2947      // The generic unary operation stub expects the argument to be
2948      // in the accumulator register rax.
2949      VisitForAccumulatorValue(expr->expression());
2950      Label done;
2951      bool inline_smi_case = ShouldInlineSmiCase(expr->op());
2952      if (inline_smi_case) {
2953        Label call_stub;
2954        __ JumpIfNotSmi(rax, &call_stub);
2955        __ SmiNot(rax, rax);
2956        __ jmp(&done);
2957        __ bind(&call_stub);
2958      }
2959      bool overwrite = expr->expression()->ResultOverwriteAllowed();
2960      UnaryOverwriteMode mode =
2961          overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
2962      UnaryOpFlags flags = inline_smi_case
2963          ? NO_UNARY_SMI_CODE_IN_STUB
2964          : NO_UNARY_FLAGS;
2965      GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
2966      __ CallStub(&stub);
2967      __ bind(&done);
2968      context()->Plug(rax);
2969      break;
2970    }
2971
2972    default:
2973      UNREACHABLE();
2974  }
2975}
2976
2977
2978void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2979  Comment cmnt(masm_, "[ CountOperation");
2980  SetSourcePosition(expr->position());
2981
2982  // Invalid left-hand-sides are rewritten to have a 'throw
2983  // ReferenceError' as the left-hand side.
2984  if (!expr->expression()->IsValidLeftHandSide()) {
2985    VisitForEffect(expr->expression());
2986    return;
2987  }
2988
2989  // Expression can only be a property, a global or a (parameter or local)
2990  // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
2991  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2992  LhsKind assign_type = VARIABLE;
2993  Property* prop = expr->expression()->AsProperty();
2994  // In case of a property we use the uninitialized expression context
2995  // of the key to detect a named property.
2996  if (prop != NULL) {
2997    assign_type =
2998        (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
2999  }
3000
3001  // Evaluate expression and get value.
3002  if (assign_type == VARIABLE) {
3003    ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3004    AccumulatorValueContext context(this);
3005    EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
3006  } else {
3007    // Reserve space for result of postfix operation.
3008    if (expr->is_postfix() && !context()->IsEffect()) {
3009      __ Push(Smi::FromInt(0));
3010    }
3011    if (assign_type == NAMED_PROPERTY) {
3012      VisitForAccumulatorValue(prop->obj());
3013      __ push(rax);  // Copy of receiver, needed for later store.
3014      EmitNamedPropertyLoad(prop);
3015    } else {
3016      VisitForStackValue(prop->obj());
3017      VisitForAccumulatorValue(prop->key());
3018      __ movq(rdx, Operand(rsp, 0));  // Leave receiver on stack
3019      __ push(rax);  // Copy of key, needed for later store.
3020      EmitKeyedPropertyLoad(prop);
3021    }
3022  }
3023
3024  // Call ToNumber only if operand is not a smi.
3025  NearLabel no_conversion;
3026  Condition is_smi;
3027  is_smi = masm_->CheckSmi(rax);
3028  __ j(is_smi, &no_conversion);
3029  __ push(rax);
3030  __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
3031  __ bind(&no_conversion);
3032
3033  // Save result for postfix expressions.
3034  if (expr->is_postfix()) {
3035    if (!context()->IsEffect()) {
3036      // Save the result on the stack. If we have a named or keyed property
3037      // we store the result under the receiver that is currently on top
3038      // of the stack.
3039      switch (assign_type) {
3040        case VARIABLE:
3041          __ push(rax);
3042          break;
3043        case NAMED_PROPERTY:
3044          __ movq(Operand(rsp, kPointerSize), rax);
3045          break;
3046        case KEYED_PROPERTY:
3047          __ movq(Operand(rsp, 2 * kPointerSize), rax);
3048          break;
3049      }
3050    }
3051  }
3052
3053  // Inline smi case if we are in a loop.
3054  Label stub_call, done;
3055  if (ShouldInlineSmiCase(expr->op())) {
3056    if (expr->op() == Token::INC) {
3057      __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3058    } else {
3059      __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3060    }
3061    __ j(overflow, &stub_call);
3062    // We could eliminate this smi check if we split the code at
3063    // the first smi check before calling ToNumber.
3064    is_smi = masm_->CheckSmi(rax);
3065    __ j(is_smi, &done);
3066    __ bind(&stub_call);
3067    // Call stub. Undo operation first.
3068    if (expr->op() == Token::INC) {
3069      __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3070    } else {
3071      __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3072    }
3073  }
3074  // Call stub for +1/-1.
3075  GenericBinaryOpStub stub(expr->binary_op(),
3076                           NO_OVERWRITE,
3077                           NO_GENERIC_BINARY_FLAGS);
3078  stub.GenerateCall(masm_, rax, Smi::FromInt(1));
3079  __ bind(&done);
3080
3081  // Store the value returned in rax.
3082  switch (assign_type) {
3083    case VARIABLE:
3084      if (expr->is_postfix()) {
3085        // Perform the assignment as if via '='.
3086        { EffectContext context(this);
3087          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3088                                 Token::ASSIGN);
3089        }
3090        // For all contexts except kEffect: We have the result on
3091        // top of the stack.
3092        if (!context()->IsEffect()) {
3093          context()->PlugTOS();
3094        }
3095      } else {
3096        // Perform the assignment as if via '='.
3097        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3098                               Token::ASSIGN);
3099      }
3100      break;
3101    case NAMED_PROPERTY: {
3102      __ Move(rcx, prop->key()->AsLiteral()->handle());
3103      __ pop(rdx);
3104      Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
3105      EmitCallIC(ic, RelocInfo::CODE_TARGET);
3106      if (expr->is_postfix()) {
3107        if (!context()->IsEffect()) {
3108          context()->PlugTOS();
3109        }
3110      } else {
3111        context()->Plug(rax);
3112      }
3113      break;
3114    }
3115    case KEYED_PROPERTY: {
3116      __ pop(rcx);
3117      __ pop(rdx);
3118      Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
3119      EmitCallIC(ic, RelocInfo::CODE_TARGET);
3120      if (expr->is_postfix()) {
3121        if (!context()->IsEffect()) {
3122          context()->PlugTOS();
3123        }
3124      } else {
3125        context()->Plug(rax);
3126      }
3127      break;
3128    }
3129  }
3130}
3131
3132
3133void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3134  VariableProxy* proxy = expr->AsVariableProxy();
3135  ASSERT(!context()->IsEffect());
3136  ASSERT(!context()->IsTest());
3137
3138  if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
3139    Comment cmnt(masm_, "Global variable");
3140    __ Move(rcx, proxy->name());
3141    __ movq(rax, GlobalObjectOperand());
3142    Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
3143    // Use a regular load, not a contextual load, to avoid a reference
3144    // error.
3145    EmitCallIC(ic, RelocInfo::CODE_TARGET);
3146    context()->Plug(rax);
3147  } else if (proxy != NULL &&
3148             proxy->var()->AsSlot() != NULL &&
3149             proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
3150    Label done, slow;
3151
3152    // Generate code for loading from variables potentially shadowed
3153    // by eval-introduced variables.
3154    Slot* slot = proxy->var()->AsSlot();
3155    EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
3156
3157    __ bind(&slow);
3158    __ push(rsi);
3159    __ Push(proxy->name());
3160    __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3161    __ bind(&done);
3162
3163    context()->Plug(rax);
3164  } else {
3165    // This expression cannot throw a reference error at the top level.
3166    Visit(expr);
3167  }
3168}
3169
3170
3171bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3172                                          Expression* left,
3173                                          Expression* right,
3174                                          Label* if_true,
3175                                          Label* if_false,
3176                                          Label* fall_through) {
3177  if (op != Token::EQ && op != Token::EQ_STRICT) return false;
3178
3179  // Check for the pattern: typeof <expression> == <string literal>.
3180  Literal* right_literal = right->AsLiteral();
3181  if (right_literal == NULL) return false;
3182  Handle<Object> right_literal_value = right_literal->handle();
3183  if (!right_literal_value->IsString()) return false;
3184  UnaryOperation* left_unary = left->AsUnaryOperation();
3185  if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
3186  Handle<String> check = Handle<String>::cast(right_literal_value);
3187
3188  { AccumulatorValueContext context(this);
3189    VisitForTypeofValue(left_unary->expression());
3190  }
3191
3192  if (check->Equals(Heap::number_symbol())) {
3193    Condition is_smi = masm_->CheckSmi(rax);
3194    __ j(is_smi, if_true);
3195    __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
3196    __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
3197    Split(equal, if_true, if_false, fall_through);
3198  } else if (check->Equals(Heap::string_symbol())) {
3199    Condition is_smi = masm_->CheckSmi(rax);
3200    __ j(is_smi, if_false);
3201    // Check for undetectable objects => false.
3202    __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
3203    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3204             Immediate(1 << Map::kIsUndetectable));
3205    __ j(not_zero, if_false);
3206    __ CmpInstanceType(rdx, FIRST_NONSTRING_TYPE);
3207    Split(below, if_true, if_false, fall_through);
3208  } else if (check->Equals(Heap::boolean_symbol())) {
3209    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3210    __ j(equal, if_true);
3211    __ CompareRoot(rax, Heap::kFalseValueRootIndex);
3212    Split(equal, if_true, if_false, fall_through);
3213  } else if (check->Equals(Heap::undefined_symbol())) {
3214    __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
3215    __ j(equal, if_true);
3216    Condition is_smi = masm_->CheckSmi(rax);
3217    __ j(is_smi, if_false);
3218    // Check for undetectable objects => true.
3219    __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
3220    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3221             Immediate(1 << Map::kIsUndetectable));
3222    Split(not_zero, if_true, if_false, fall_through);
3223  } else if (check->Equals(Heap::function_symbol())) {
3224    Condition is_smi = masm_->CheckSmi(rax);
3225    __ j(is_smi, if_false);
3226    __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
3227    __ j(equal, if_true);
3228    // Regular expressions => 'function' (they are callable).
3229    __ CmpInstanceType(rdx, JS_REGEXP_TYPE);
3230    Split(equal, if_true, if_false, fall_through);
3231  } else if (check->Equals(Heap::object_symbol())) {
3232    Condition is_smi = masm_->CheckSmi(rax);
3233    __ j(is_smi, if_false);
3234    __ CompareRoot(rax, Heap::kNullValueRootIndex);
3235    __ j(equal, if_true);
3236    // Regular expressions => 'function', not 'object'.
3237    __ CmpObjectType(rax, JS_REGEXP_TYPE, rdx);
3238    __ j(equal, if_false);
3239    // Check for undetectable objects => false.
3240    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3241             Immediate(1 << Map::kIsUndetectable));
3242    __ j(not_zero, if_false);
3243    // Check for JS objects => true.
3244    __ CmpInstanceType(rdx, FIRST_JS_OBJECT_TYPE);
3245    __ j(below, if_false);
3246    __ CmpInstanceType(rdx, LAST_JS_OBJECT_TYPE);
3247    Split(below_equal, if_true, if_false, fall_through);
3248  } else {
3249    if (if_false != fall_through) __ jmp(if_false);
3250  }
3251
3252  return true;
3253}
3254
3255
3256void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3257  Comment cmnt(masm_, "[ CompareOperation");
3258  SetSourcePosition(expr->position());
3259
3260  // Always perform the comparison for its control flow.  Pack the result
3261  // into the expression's context after the comparison is performed.
3262  Label materialize_true, materialize_false;
3263  Label* if_true = NULL;
3264  Label* if_false = NULL;
3265  Label* fall_through = NULL;
3266  context()->PrepareTest(&materialize_true, &materialize_false,
3267                         &if_true, &if_false, &fall_through);
3268
3269  // First we try a fast inlined version of the compare when one of
3270  // the operands is a literal.
3271  Token::Value op = expr->op();
3272  Expression* left = expr->left();
3273  Expression* right = expr->right();
3274  if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
3275    context()->Plug(if_true, if_false);
3276    return;
3277  }
3278
3279  VisitForStackValue(expr->left());
3280  switch (op) {
3281    case Token::IN:
3282      VisitForStackValue(expr->right());
3283      __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
3284      __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3285      Split(equal, if_true, if_false, fall_through);
3286      break;
3287
3288    case Token::INSTANCEOF: {
3289      VisitForStackValue(expr->right());
3290      InstanceofStub stub;
3291      __ CallStub(&stub);
3292      __ testq(rax, rax);
3293       // The stub returns 0 for true.
3294      Split(zero, if_true, if_false, fall_through);
3295      break;
3296    }
3297
3298    default: {
3299      VisitForAccumulatorValue(expr->right());
3300      Condition cc = no_condition;
3301      bool strict = false;
3302      switch (op) {
3303        case Token::EQ_STRICT:
3304          strict = true;
3305          // Fall through.
3306        case Token::EQ:
3307          cc = equal;
3308          __ pop(rdx);
3309          break;
3310        case Token::LT:
3311          cc = less;
3312          __ pop(rdx);
3313          break;
3314        case Token::GT:
3315          // Reverse left and right sizes to obtain ECMA-262 conversion order.
3316          cc = less;
3317          __ movq(rdx, result_register());
3318          __ pop(rax);
3319         break;
3320        case Token::LTE:
3321          // Reverse left and right sizes to obtain ECMA-262 conversion order.
3322          cc = greater_equal;
3323          __ movq(rdx, result_register());
3324          __ pop(rax);
3325          break;
3326        case Token::GTE:
3327          cc = greater_equal;
3328          __ pop(rdx);
3329          break;
3330        case Token::IN:
3331        case Token::INSTANCEOF:
3332        default:
3333          UNREACHABLE();
3334      }
3335
3336      bool inline_smi_code = ShouldInlineSmiCase(op);
3337      if (inline_smi_code) {
3338        Label slow_case;
3339        __ JumpIfNotBothSmi(rax, rdx, &slow_case);
3340        __ SmiCompare(rdx, rax);
3341        Split(cc, if_true, if_false, NULL);
3342        __ bind(&slow_case);
3343      }
3344
3345      CompareFlags flags = inline_smi_code
3346          ? NO_SMI_COMPARE_IN_STUB
3347          : NO_COMPARE_FLAGS;
3348      CompareStub stub(cc, strict, flags);
3349      __ CallStub(&stub);
3350      __ testq(rax, rax);
3351      Split(cc, if_true, if_false, fall_through);
3352    }
3353  }
3354
3355  // Convert the result of the comparison into one expected for this
3356  // expression's context.
3357  context()->Plug(if_true, if_false);
3358}
3359
3360
3361void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
3362  Comment cmnt(masm_, "[ CompareToNull");
3363  Label materialize_true, materialize_false;
3364  Label* if_true = NULL;
3365  Label* if_false = NULL;
3366  Label* fall_through = NULL;
3367  context()->PrepareTest(&materialize_true, &materialize_false,
3368                         &if_true, &if_false, &fall_through);
3369
3370  VisitForAccumulatorValue(expr->expression());
3371  __ CompareRoot(rax, Heap::kNullValueRootIndex);
3372  if (expr->is_strict()) {
3373    Split(equal, if_true, if_false, fall_through);
3374  } else {
3375    __ j(equal, if_true);
3376    __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
3377    __ j(equal, if_true);
3378    Condition is_smi = masm_->CheckSmi(rax);
3379    __ j(is_smi, if_false);
3380    // It can be an undetectable object.
3381    __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
3382    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3383             Immediate(1 << Map::kIsUndetectable));
3384    Split(not_zero, if_true, if_false, fall_through);
3385  }
3386  context()->Plug(if_true, if_false);
3387}
3388
3389
3390void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
3391  __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3392  context()->Plug(rax);
3393}
3394
3395
3396Register FullCodeGenerator::result_register() {
3397  return rax;
3398}
3399
3400
3401Register FullCodeGenerator::context_register() {
3402  return rsi;
3403}
3404
3405
3406void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
3407  ASSERT(mode == RelocInfo::CODE_TARGET ||
3408         mode == RelocInfo::CODE_TARGET_CONTEXT);
3409  __ call(ic, mode);
3410
3411  // If we're calling a (keyed) load or store stub, we have to mark
3412  // the call as containing no inlined code so we will not attempt to
3413  // patch it.
3414  switch (ic->kind()) {
3415    case Code::LOAD_IC:
3416    case Code::KEYED_LOAD_IC:
3417    case Code::STORE_IC:
3418    case Code::KEYED_STORE_IC:
3419      __ nop();  // Signals no inlined code.
3420      break;
3421    default:
3422      // Do nothing.
3423      break;
3424  }
3425}
3426
3427
3428void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3429  ASSERT(IsAligned(frame_offset, kPointerSize));
3430  __ movq(Operand(rbp, frame_offset), value);
3431}
3432
3433
3434void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3435  __ movq(dst, ContextOperand(rsi, context_index));
3436}
3437
3438
3439// ----------------------------------------------------------------------------
3440// Non-local control flow support.
3441
3442
3443void FullCodeGenerator::EnterFinallyBlock() {
3444  ASSERT(!result_register().is(rdx));
3445  ASSERT(!result_register().is(rcx));
3446  // Cook return address on top of stack (smi encoded Code* delta)
3447  __ movq(rdx, Operand(rsp, 0));
3448  __ Move(rcx, masm_->CodeObject());
3449  __ subq(rdx, rcx);
3450  __ Integer32ToSmi(rdx, rdx);
3451  __ movq(Operand(rsp, 0), rdx);
3452  // Store result register while executing finally block.
3453  __ push(result_register());
3454}
3455
3456
3457void FullCodeGenerator::ExitFinallyBlock() {
3458  ASSERT(!result_register().is(rdx));
3459  ASSERT(!result_register().is(rcx));
3460  // Restore result register from stack.
3461  __ pop(result_register());
3462  // Uncook return address.
3463  __ movq(rdx, Operand(rsp, 0));
3464  __ SmiToInteger32(rdx, rdx);
3465  __ Move(rcx, masm_->CodeObject());
3466  __ addq(rdx, rcx);
3467  __ movq(Operand(rsp, 0), rdx);
3468  // And return.
3469  __ ret(0);
3470}
3471
3472
3473#undef __
3474
3475
3476} }  // namespace v8::internal
3477
3478#endif  // V8_TARGET_ARCH_X64
3479