1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/code-factory.h"
8#include "src/codegen.h"
9#include "src/compiler.h"
10#include "src/debug.h"
11#include "src/full-codegen.h"
12#include "src/liveedit.h"
13#include "src/macro-assembler.h"
14#include "src/prettyprinter.h"
15#include "src/scopeinfo.h"
16#include "src/scopes.h"
17#include "src/snapshot.h"
18
19namespace v8 {
20namespace internal {
21
22void BreakableStatementChecker::Check(Statement* stmt) {
23  Visit(stmt);
24}
25
26
27void BreakableStatementChecker::Check(Expression* expr) {
28  Visit(expr);
29}
30
31
32void BreakableStatementChecker::VisitVariableDeclaration(
33    VariableDeclaration* decl) {
34}
35
36
37void BreakableStatementChecker::VisitFunctionDeclaration(
38    FunctionDeclaration* decl) {
39}
40
41
42void BreakableStatementChecker::VisitModuleDeclaration(
43    ModuleDeclaration* decl) {
44}
45
46
47void BreakableStatementChecker::VisitImportDeclaration(
48    ImportDeclaration* decl) {
49}
50
51
52void BreakableStatementChecker::VisitExportDeclaration(
53    ExportDeclaration* decl) {
54}
55
56
57void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
58}
59
60
61void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
62}
63
64
65void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
66}
67
68
69void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
70}
71
72
73void BreakableStatementChecker::VisitModuleStatement(ModuleStatement* stmt) {
74}
75
76
77void BreakableStatementChecker::VisitBlock(Block* stmt) {
78}
79
80
81void BreakableStatementChecker::VisitExpressionStatement(
82    ExpressionStatement* stmt) {
83  // Check if expression is breakable.
84  Visit(stmt->expression());
85}
86
87
88void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
89}
90
91
92void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
93  // If the condition is breakable the if statement is breakable.
94  Visit(stmt->condition());
95}
96
97
98void BreakableStatementChecker::VisitContinueStatement(
99    ContinueStatement* stmt) {
100}
101
102
103void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
104}
105
106
107void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
108  // Return is breakable if the expression is.
109  Visit(stmt->expression());
110}
111
112
113void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
114  Visit(stmt->expression());
115}
116
117
118void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
119  // Switch statements breakable if the tag expression is.
120  Visit(stmt->tag());
121}
122
123
124void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
125  // Mark do while as breakable to avoid adding a break slot in front of it.
126  is_breakable_ = true;
127}
128
129
130void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
131  // Mark while statements breakable if the condition expression is.
132  Visit(stmt->cond());
133}
134
135
136void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
137  // Mark for statements breakable if the condition expression is.
138  if (stmt->cond() != NULL) {
139    Visit(stmt->cond());
140  }
141}
142
143
144void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
145  // Mark for in statements breakable if the enumerable expression is.
146  Visit(stmt->enumerable());
147}
148
149
150void BreakableStatementChecker::VisitForOfStatement(ForOfStatement* stmt) {
151  // For-of is breakable because of the next() call.
152  is_breakable_ = true;
153}
154
155
156void BreakableStatementChecker::VisitTryCatchStatement(
157    TryCatchStatement* stmt) {
158  // Mark try catch as breakable to avoid adding a break slot in front of it.
159  is_breakable_ = true;
160}
161
162
163void BreakableStatementChecker::VisitTryFinallyStatement(
164    TryFinallyStatement* stmt) {
165  // Mark try finally as breakable to avoid adding a break slot in front of it.
166  is_breakable_ = true;
167}
168
169
170void BreakableStatementChecker::VisitDebuggerStatement(
171    DebuggerStatement* stmt) {
172  // The debugger statement is breakable.
173  is_breakable_ = true;
174}
175
176
177void BreakableStatementChecker::VisitCaseClause(CaseClause* clause) {
178}
179
180
181void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
182}
183
184
185void BreakableStatementChecker::VisitClassLiteral(ClassLiteral* expr) {
186  if (expr->extends() != NULL) {
187    Visit(expr->extends());
188  }
189}
190
191
192void BreakableStatementChecker::VisitNativeFunctionLiteral(
193    NativeFunctionLiteral* expr) {
194}
195
196
197void BreakableStatementChecker::VisitConditional(Conditional* expr) {
198}
199
200
201void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
202}
203
204
205void BreakableStatementChecker::VisitLiteral(Literal* expr) {
206}
207
208
209void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
210}
211
212
213void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
214}
215
216
217void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
218}
219
220
221void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
222  // If assigning to a property (including a global property) the assignment is
223  // breakable.
224  VariableProxy* proxy = expr->target()->AsVariableProxy();
225  Property* prop = expr->target()->AsProperty();
226  if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
227    is_breakable_ = true;
228    return;
229  }
230
231  // Otherwise the assignment is breakable if the assigned value is.
232  Visit(expr->value());
233}
234
235
236void BreakableStatementChecker::VisitYield(Yield* expr) {
237  // Yield is breakable if the expression is.
238  Visit(expr->expression());
239}
240
241
242void BreakableStatementChecker::VisitThrow(Throw* expr) {
243  // Throw is breakable if the expression is.
244  Visit(expr->exception());
245}
246
247
248void BreakableStatementChecker::VisitProperty(Property* expr) {
249  // Property load is breakable.
250  is_breakable_ = true;
251}
252
253
254void BreakableStatementChecker::VisitCall(Call* expr) {
255  // Function calls both through IC and call stub are breakable.
256  is_breakable_ = true;
257}
258
259
260void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
261  // Function calls through new are breakable.
262  is_breakable_ = true;
263}
264
265
266void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
267}
268
269
270void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
271  Visit(expr->expression());
272}
273
274
275void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
276  Visit(expr->expression());
277}
278
279
280void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
281  Visit(expr->left());
282  if (expr->op() != Token::AND &&
283      expr->op() != Token::OR) {
284    Visit(expr->right());
285  }
286}
287
288
289void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
290  Visit(expr->left());
291  Visit(expr->right());
292}
293
294
295void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
296}
297
298
299void BreakableStatementChecker::VisitSuperReference(SuperReference* expr) {}
300
301
302#define __ ACCESS_MASM(masm())
303
304bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
305  Isolate* isolate = info->isolate();
306
307  TimerEventScope<TimerEventCompileFullCode> timer(info->isolate());
308
309  Handle<Script> script = info->script();
310  if (!script->IsUndefined() && !script->source()->IsUndefined()) {
311    int len = String::cast(script->source())->length();
312    isolate->counters()->total_full_codegen_source_size()->Increment(len);
313  }
314  CodeGenerator::MakeCodePrologue(info, "full");
315  const int kInitialBufferSize = 4 * KB;
316  MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize);
317  if (info->will_serialize()) masm.enable_serializer();
318
319  LOG_CODE_EVENT(isolate,
320                 CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
321
322  FullCodeGenerator cgen(&masm, info);
323  cgen.Generate();
324  if (cgen.HasStackOverflow()) {
325    DCHECK(!isolate->has_pending_exception());
326    return false;
327  }
328  unsigned table_offset = cgen.EmitBackEdgeTable();
329
330  Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
331  Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
332  code->set_optimizable(info->IsOptimizable() &&
333                        !info->function()->dont_optimize() &&
334                        info->function()->scope()->AllowsLazyCompilation());
335  cgen.PopulateDeoptimizationData(code);
336  cgen.PopulateTypeFeedbackInfo(code);
337  code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
338  code->set_handler_table(*cgen.handler_table());
339  code->set_compiled_optimizable(info->IsOptimizable());
340  code->set_allow_osr_at_loop_nesting_level(0);
341  code->set_profiler_ticks(0);
342  code->set_back_edge_table_offset(table_offset);
343  CodeGenerator::PrintCode(code, info);
344  info->SetCode(code);
345  void* line_info = masm.positions_recorder()->DetachJITHandlerData();
346  LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
347  return true;
348}
349
350
351unsigned FullCodeGenerator::EmitBackEdgeTable() {
352  // The back edge table consists of a length (in number of entries)
353  // field, and then a sequence of entries.  Each entry is a pair of AST id
354  // and code-relative pc offset.
355  masm()->Align(kPointerSize);
356  unsigned offset = masm()->pc_offset();
357  unsigned length = back_edges_.length();
358  __ dd(length);
359  for (unsigned i = 0; i < length; ++i) {
360    __ dd(back_edges_[i].id.ToInt());
361    __ dd(back_edges_[i].pc);
362    __ dd(back_edges_[i].loop_depth);
363  }
364  return offset;
365}
366
367
368void FullCodeGenerator::EnsureSlotContainsAllocationSite(int slot) {
369  Handle<FixedArray> vector = FeedbackVector();
370  if (!vector->get(slot)->IsAllocationSite()) {
371    Handle<AllocationSite> allocation_site =
372        isolate()->factory()->NewAllocationSite();
373    vector->set(slot, *allocation_site);
374  }
375}
376
377
378void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
379  // Fill in the deoptimization information.
380  DCHECK(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
381  if (!info_->HasDeoptimizationSupport()) return;
382  int length = bailout_entries_.length();
383  Handle<DeoptimizationOutputData> data =
384      DeoptimizationOutputData::New(isolate(), length, TENURED);
385  for (int i = 0; i < length; i++) {
386    data->SetAstId(i, bailout_entries_[i].id);
387    data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
388  }
389  code->set_deoptimization_data(*data);
390}
391
392
393void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
394  Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
395  info->set_ic_total_count(ic_total_count_);
396  DCHECK(!isolate()->heap()->InNewSpace(*info));
397  code->set_type_feedback_info(*info);
398}
399
400
401void FullCodeGenerator::Initialize() {
402  InitializeAstVisitor(info_->zone());
403  // The generation of debug code must match between the snapshot code and the
404  // code that is generated later.  This is assumed by the debugger when it is
405  // calculating PC offsets after generating a debug version of code.  Therefore
406  // we disable the production of debug code in the full compiler if we are
407  // either generating a snapshot or we booted from a snapshot.
408  generate_debug_code_ = FLAG_debug_code &&
409                         !masm_->serializer_enabled() &&
410                         !Snapshot::HaveASnapshotToStartFrom();
411  masm_->set_emit_debug_code(generate_debug_code_);
412  masm_->set_predictable_code_size(true);
413}
414
415
416void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
417  PrepareForBailoutForId(node->id(), state);
418}
419
420
421void FullCodeGenerator::CallLoadIC(ContextualMode contextual_mode,
422                                   TypeFeedbackId id) {
423  Handle<Code> ic = CodeFactory::LoadIC(isolate(), contextual_mode).code();
424  CallIC(ic, id);
425}
426
427
428void FullCodeGenerator::CallStoreIC(TypeFeedbackId id) {
429  Handle<Code> ic = CodeFactory::StoreIC(isolate(), strict_mode()).code();
430  CallIC(ic, id);
431}
432
433
434void FullCodeGenerator::RecordJSReturnSite(Call* call) {
435  // We record the offset of the function return so we can rebuild the frame
436  // if the function was inlined, i.e., this is the return address in the
437  // inlined function's frame.
438  //
439  // The state is ignored.  We defensively set it to TOS_REG, which is the
440  // real state of the unoptimized code at the return site.
441  PrepareForBailoutForId(call->ReturnId(), TOS_REG);
442#ifdef DEBUG
443  // In debug builds, mark the return so we can verify that this function
444  // was called.
445  DCHECK(!call->return_is_recorded_);
446  call->return_is_recorded_ = true;
447#endif
448}
449
450
451void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
452  // There's no need to prepare this code for bailouts from already optimized
453  // code or code that can't be optimized.
454  if (!info_->HasDeoptimizationSupport()) return;
455  unsigned pc_and_state =
456      StateField::encode(state) | PcField::encode(masm_->pc_offset());
457  DCHECK(Smi::IsValid(pc_and_state));
458#ifdef DEBUG
459  for (int i = 0; i < bailout_entries_.length(); ++i) {
460    DCHECK(bailout_entries_[i].id != id);
461  }
462#endif
463  BailoutEntry entry = { id, pc_and_state };
464  bailout_entries_.Add(entry, zone());
465}
466
467
468void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
469  // The pc offset does not need to be encoded and packed together with a state.
470  DCHECK(masm_->pc_offset() > 0);
471  DCHECK(loop_depth() > 0);
472  uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
473  BackEdgeEntry entry =
474      { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
475  back_edges_.Add(entry, zone());
476}
477
478
479bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
480  // Inline smi case inside loops, but not division and modulo which
481  // are too complicated and take up too much space.
482  if (op == Token::DIV ||op == Token::MOD) return false;
483  if (FLAG_always_inline_smi_code) return true;
484  return loop_depth_ > 0;
485}
486
487
488void FullCodeGenerator::EffectContext::Plug(Register reg) const {
489}
490
491
492void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
493  __ Move(result_register(), reg);
494}
495
496
497void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
498  __ Push(reg);
499}
500
501
502void FullCodeGenerator::TestContext::Plug(Register reg) const {
503  // For simplicity we always test the accumulator register.
504  __ Move(result_register(), reg);
505  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
506  codegen()->DoTest(this);
507}
508
509
510void FullCodeGenerator::EffectContext::PlugTOS() const {
511  __ Drop(1);
512}
513
514
515void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
516  __ Pop(result_register());
517}
518
519
520void FullCodeGenerator::StackValueContext::PlugTOS() const {
521}
522
523
524void FullCodeGenerator::TestContext::PlugTOS() const {
525  // For simplicity we always test the accumulator register.
526  __ Pop(result_register());
527  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
528  codegen()->DoTest(this);
529}
530
531
532void FullCodeGenerator::EffectContext::PrepareTest(
533    Label* materialize_true,
534    Label* materialize_false,
535    Label** if_true,
536    Label** if_false,
537    Label** fall_through) const {
538  // In an effect context, the true and the false case branch to the
539  // same label.
540  *if_true = *if_false = *fall_through = materialize_true;
541}
542
543
544void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
545    Label* materialize_true,
546    Label* materialize_false,
547    Label** if_true,
548    Label** if_false,
549    Label** fall_through) const {
550  *if_true = *fall_through = materialize_true;
551  *if_false = materialize_false;
552}
553
554
555void FullCodeGenerator::StackValueContext::PrepareTest(
556    Label* materialize_true,
557    Label* materialize_false,
558    Label** if_true,
559    Label** if_false,
560    Label** fall_through) const {
561  *if_true = *fall_through = materialize_true;
562  *if_false = materialize_false;
563}
564
565
566void FullCodeGenerator::TestContext::PrepareTest(
567    Label* materialize_true,
568    Label* materialize_false,
569    Label** if_true,
570    Label** if_false,
571    Label** fall_through) const {
572  *if_true = true_label_;
573  *if_false = false_label_;
574  *fall_through = fall_through_;
575}
576
577
578void FullCodeGenerator::DoTest(const TestContext* context) {
579  DoTest(context->condition(),
580         context->true_label(),
581         context->false_label(),
582         context->fall_through());
583}
584
585
586void FullCodeGenerator::AllocateModules(ZoneList<Declaration*>* declarations) {
587  DCHECK(scope_->is_global_scope());
588
589  for (int i = 0; i < declarations->length(); i++) {
590    ModuleDeclaration* declaration = declarations->at(i)->AsModuleDeclaration();
591    if (declaration != NULL) {
592      ModuleLiteral* module = declaration->module()->AsModuleLiteral();
593      if (module != NULL) {
594        Comment cmnt(masm_, "[ Link nested modules");
595        Scope* scope = module->body()->scope();
596        Interface* interface = scope->interface();
597        DCHECK(interface->IsModule() && interface->IsFrozen());
598
599        interface->Allocate(scope->module_var()->index());
600
601        // Set up module context.
602        DCHECK(scope->interface()->Index() >= 0);
603        __ Push(Smi::FromInt(scope->interface()->Index()));
604        __ Push(scope->GetScopeInfo());
605        __ CallRuntime(Runtime::kPushModuleContext, 2);
606        StoreToFrameField(StandardFrameConstants::kContextOffset,
607                          context_register());
608
609        AllocateModules(scope->declarations());
610
611        // Pop module context.
612        LoadContextField(context_register(), Context::PREVIOUS_INDEX);
613        // Update local stack frame context field.
614        StoreToFrameField(StandardFrameConstants::kContextOffset,
615                          context_register());
616      }
617    }
618  }
619}
620
621
622// Modules have their own local scope, represented by their own context.
623// Module instance objects have an accessor for every export that forwards
624// access to the respective slot from the module's context. (Exports that are
625// modules themselves, however, are simple data properties.)
626//
627// All modules have a _hosting_ scope/context, which (currently) is the
628// (innermost) enclosing global scope. To deal with recursion, nested modules
629// are hosted by the same scope as global ones.
630//
631// For every (global or nested) module literal, the hosting context has an
632// internal slot that points directly to the respective module context. This
633// enables quick access to (statically resolved) module members by 2-dimensional
634// access through the hosting context. For example,
635//
636//   module A {
637//     let x;
638//     module B { let y; }
639//   }
640//   module C { let z; }
641//
642// allocates contexts as follows:
643//
644// [header| .A | .B | .C | A | C ]  (global)
645//           |    |    |
646//           |    |    +-- [header| z ]  (module)
647//           |    |
648//           |    +------- [header| y ]  (module)
649//           |
650//           +------------ [header| x | B ]  (module)
651//
652// Here, .A, .B, .C are the internal slots pointing to the hosted module
653// contexts, whereas A, B, C hold the actual instance objects (note that every
654// module context also points to the respective instance object through its
655// extension slot in the header).
656//
657// To deal with arbitrary recursion and aliases between modules,
658// they are created and initialized in several stages. Each stage applies to
659// all modules in the hosting global scope, including nested ones.
660//
661// 1. Allocate: for each module _literal_, allocate the module contexts and
662//    respective instance object and wire them up. This happens in the
663//    PushModuleContext runtime function, as generated by AllocateModules
664//    (invoked by VisitDeclarations in the hosting scope).
665//
666// 2. Bind: for each module _declaration_ (i.e. literals as well as aliases),
667//    assign the respective instance object to respective local variables. This
668//    happens in VisitModuleDeclaration, and uses the instance objects created
669//    in the previous stage.
670//    For each module _literal_, this phase also constructs a module descriptor
671//    for the next stage. This happens in VisitModuleLiteral.
672//
673// 3. Populate: invoke the DeclareModules runtime function to populate each
674//    _instance_ object with accessors for it exports. This is generated by
675//    DeclareModules (invoked by VisitDeclarations in the hosting scope again),
676//    and uses the descriptors generated in the previous stage.
677//
678// 4. Initialize: execute the module bodies (and other code) in sequence. This
679//    happens by the separate statements generated for module bodies. To reenter
680//    the module scopes properly, the parser inserted ModuleStatements.
681
682void FullCodeGenerator::VisitDeclarations(
683    ZoneList<Declaration*>* declarations) {
684  Handle<FixedArray> saved_modules = modules_;
685  int saved_module_index = module_index_;
686  ZoneList<Handle<Object> >* saved_globals = globals_;
687  ZoneList<Handle<Object> > inner_globals(10, zone());
688  globals_ = &inner_globals;
689
690  if (scope_->num_modules() != 0) {
691    // This is a scope hosting modules. Allocate a descriptor array to pass
692    // to the runtime for initialization.
693    Comment cmnt(masm_, "[ Allocate modules");
694    DCHECK(scope_->is_global_scope());
695    modules_ =
696        isolate()->factory()->NewFixedArray(scope_->num_modules(), TENURED);
697    module_index_ = 0;
698
699    // Generate code for allocating all modules, including nested ones.
700    // The allocated contexts are stored in internal variables in this scope.
701    AllocateModules(declarations);
702  }
703
704  AstVisitor::VisitDeclarations(declarations);
705
706  if (scope_->num_modules() != 0) {
707    // Initialize modules from descriptor array.
708    DCHECK(module_index_ == modules_->length());
709    DeclareModules(modules_);
710    modules_ = saved_modules;
711    module_index_ = saved_module_index;
712  }
713
714  if (!globals_->is_empty()) {
715    // Invoke the platform-dependent code generator to do the actual
716    // declaration of the global functions and variables.
717    Handle<FixedArray> array =
718       isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
719    for (int i = 0; i < globals_->length(); ++i)
720      array->set(i, *globals_->at(i));
721    DeclareGlobals(array);
722  }
723
724  globals_ = saved_globals;
725}
726
727
728void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
729  Block* block = module->body();
730  Scope* saved_scope = scope();
731  scope_ = block->scope();
732  Interface* interface = scope_->interface();
733
734  Comment cmnt(masm_, "[ ModuleLiteral");
735  SetStatementPosition(block);
736
737  DCHECK(!modules_.is_null());
738  DCHECK(module_index_ < modules_->length());
739  int index = module_index_++;
740
741  // Set up module context.
742  DCHECK(interface->Index() >= 0);
743  __ Push(Smi::FromInt(interface->Index()));
744  __ Push(Smi::FromInt(0));
745  __ CallRuntime(Runtime::kPushModuleContext, 2);
746  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
747
748  {
749    Comment cmnt(masm_, "[ Declarations");
750    VisitDeclarations(scope_->declarations());
751  }
752
753  // Populate the module description.
754  Handle<ModuleInfo> description =
755      ModuleInfo::Create(isolate(), interface, scope_);
756  modules_->set(index, *description);
757
758  scope_ = saved_scope;
759  // Pop module context.
760  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
761  // Update local stack frame context field.
762  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
763}
764
765
766void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
767  // Nothing to do.
768  // The instance object is resolved statically through the module's interface.
769}
770
771
772void FullCodeGenerator::VisitModulePath(ModulePath* module) {
773  // Nothing to do.
774  // The instance object is resolved statically through the module's interface.
775}
776
777
778void FullCodeGenerator::VisitModuleUrl(ModuleUrl* module) {
779  // TODO(rossberg): dummy allocation for now.
780  Scope* scope = module->body()->scope();
781  Interface* interface = scope_->interface();
782
783  DCHECK(interface->IsModule() && interface->IsFrozen());
784  DCHECK(!modules_.is_null());
785  DCHECK(module_index_ < modules_->length());
786  interface->Allocate(scope->module_var()->index());
787  int index = module_index_++;
788
789  Handle<ModuleInfo> description =
790      ModuleInfo::Create(isolate(), interface, scope_);
791  modules_->set(index, *description);
792}
793
794
795int FullCodeGenerator::DeclareGlobalsFlags() {
796  DCHECK(DeclareGlobalsStrictMode::is_valid(strict_mode()));
797  return DeclareGlobalsEvalFlag::encode(is_eval()) |
798      DeclareGlobalsNativeFlag::encode(is_native()) |
799      DeclareGlobalsStrictMode::encode(strict_mode());
800}
801
802
803void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
804  CodeGenerator::RecordPositions(masm_, fun->start_position());
805}
806
807
808void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
809  CodeGenerator::RecordPositions(masm_, fun->end_position() - 1);
810}
811
812
813void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
814  if (!info_->is_debug()) {
815    CodeGenerator::RecordPositions(masm_, stmt->position());
816  } else {
817    // Check if the statement will be breakable without adding a debug break
818    // slot.
819    BreakableStatementChecker checker(zone());
820    checker.Check(stmt);
821    // Record the statement position right here if the statement is not
822    // breakable. For breakable statements the actual recording of the
823    // position will be postponed to the breakable code (typically an IC).
824    bool position_recorded = CodeGenerator::RecordPositions(
825        masm_, stmt->position(), !checker.is_breakable());
826    // If the position recording did record a new position generate a debug
827    // break slot to make the statement breakable.
828    if (position_recorded) {
829      DebugCodegen::GenerateSlot(masm_);
830    }
831  }
832}
833
834
835void FullCodeGenerator::VisitSuperReference(SuperReference* super) {
836  __ CallRuntime(Runtime::kThrowUnsupportedSuperError, 0);
837}
838
839
840void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
841  if (!info_->is_debug()) {
842    CodeGenerator::RecordPositions(masm_, expr->position());
843  } else {
844    // Check if the expression will be breakable without adding a debug break
845    // slot.
846    BreakableStatementChecker checker(zone());
847    checker.Check(expr);
848    // Record a statement position right here if the expression is not
849    // breakable. For breakable expressions the actual recording of the
850    // position will be postponed to the breakable code (typically an IC).
851    // NOTE this will record a statement position for something which might
852    // not be a statement. As stepping in the debugger will only stop at
853    // statement positions this is used for e.g. the condition expression of
854    // a do while loop.
855    bool position_recorded = CodeGenerator::RecordPositions(
856        masm_, expr->position(), !checker.is_breakable());
857    // If the position recording did record a new position generate a debug
858    // break slot to make the statement breakable.
859    if (position_recorded) {
860      DebugCodegen::GenerateSlot(masm_);
861    }
862  }
863}
864
865
866void FullCodeGenerator::SetSourcePosition(int pos) {
867  if (pos != RelocInfo::kNoPosition) {
868    masm_->positions_recorder()->RecordPosition(pos);
869  }
870}
871
872
873// Lookup table for code generators for  special runtime calls which are
874// generated inline.
875#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)          \
876    &FullCodeGenerator::Emit##Name,
877
878const FullCodeGenerator::InlineFunctionGenerator
879  FullCodeGenerator::kInlineFunctionGenerators[] = {
880    INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
881  };
882#undef INLINE_FUNCTION_GENERATOR_ADDRESS
883
884
885FullCodeGenerator::InlineFunctionGenerator
886  FullCodeGenerator::FindInlineFunctionGenerator(Runtime::FunctionId id) {
887    int lookup_index =
888        static_cast<int>(id) - static_cast<int>(Runtime::kFirstInlineFunction);
889    DCHECK(lookup_index >= 0);
890    DCHECK(static_cast<size_t>(lookup_index) <
891           arraysize(kInlineFunctionGenerators));
892    return kInlineFunctionGenerators[lookup_index];
893}
894
895
896void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
897  const Runtime::Function* function = expr->function();
898  DCHECK(function != NULL);
899  DCHECK(function->intrinsic_type == Runtime::INLINE);
900  InlineFunctionGenerator generator =
901      FindInlineFunctionGenerator(function->function_id);
902  ((*this).*(generator))(expr);
903}
904
905
906void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
907  ZoneList<Expression*>* args = expr->arguments();
908  DCHECK(args->length() == 2);
909  EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT);
910}
911
912
913void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
914  ZoneList<Expression*>* args = expr->arguments();
915  DCHECK(args->length() == 2);
916  EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
917}
918
919
920void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
921  context()->Plug(handle(Smi::FromInt(0), isolate()));
922}
923
924
925void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
926  switch (expr->op()) {
927    case Token::COMMA:
928      return VisitComma(expr);
929    case Token::OR:
930    case Token::AND:
931      return VisitLogicalExpression(expr);
932    default:
933      return VisitArithmeticExpression(expr);
934  }
935}
936
937
938void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
939  if (context()->IsEffect()) {
940    VisitForEffect(expr);
941  } else if (context()->IsAccumulatorValue()) {
942    VisitForAccumulatorValue(expr);
943  } else if (context()->IsStackValue()) {
944    VisitForStackValue(expr);
945  } else if (context()->IsTest()) {
946    const TestContext* test = TestContext::cast(context());
947    VisitForControl(expr, test->true_label(), test->false_label(),
948                    test->fall_through());
949  }
950}
951
952
953void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
954  Comment cmnt(masm_, "[ Comma");
955  VisitForEffect(expr->left());
956  VisitInDuplicateContext(expr->right());
957}
958
959
960void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
961  bool is_logical_and = expr->op() == Token::AND;
962  Comment cmnt(masm_, is_logical_and ? "[ Logical AND" :  "[ Logical OR");
963  Expression* left = expr->left();
964  Expression* right = expr->right();
965  BailoutId right_id = expr->RightId();
966  Label done;
967
968  if (context()->IsTest()) {
969    Label eval_right;
970    const TestContext* test = TestContext::cast(context());
971    if (is_logical_and) {
972      VisitForControl(left, &eval_right, test->false_label(), &eval_right);
973    } else {
974      VisitForControl(left, test->true_label(), &eval_right, &eval_right);
975    }
976    PrepareForBailoutForId(right_id, NO_REGISTERS);
977    __ bind(&eval_right);
978
979  } else if (context()->IsAccumulatorValue()) {
980    VisitForAccumulatorValue(left);
981    // We want the value in the accumulator for the test, and on the stack in
982    // case we need it.
983    __ Push(result_register());
984    Label discard, restore;
985    if (is_logical_and) {
986      DoTest(left, &discard, &restore, &restore);
987    } else {
988      DoTest(left, &restore, &discard, &restore);
989    }
990    __ bind(&restore);
991    __ Pop(result_register());
992    __ jmp(&done);
993    __ bind(&discard);
994    __ Drop(1);
995    PrepareForBailoutForId(right_id, NO_REGISTERS);
996
997  } else if (context()->IsStackValue()) {
998    VisitForAccumulatorValue(left);
999    // We want the value in the accumulator for the test, and on the stack in
1000    // case we need it.
1001    __ Push(result_register());
1002    Label discard;
1003    if (is_logical_and) {
1004      DoTest(left, &discard, &done, &discard);
1005    } else {
1006      DoTest(left, &done, &discard, &discard);
1007    }
1008    __ bind(&discard);
1009    __ Drop(1);
1010    PrepareForBailoutForId(right_id, NO_REGISTERS);
1011
1012  } else {
1013    DCHECK(context()->IsEffect());
1014    Label eval_right;
1015    if (is_logical_and) {
1016      VisitForControl(left, &eval_right, &done, &eval_right);
1017    } else {
1018      VisitForControl(left, &done, &eval_right, &eval_right);
1019    }
1020    PrepareForBailoutForId(right_id, NO_REGISTERS);
1021    __ bind(&eval_right);
1022  }
1023
1024  VisitInDuplicateContext(right);
1025  __ bind(&done);
1026}
1027
1028
1029void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
1030  Token::Value op = expr->op();
1031  Comment cmnt(masm_, "[ ArithmeticExpression");
1032  Expression* left = expr->left();
1033  Expression* right = expr->right();
1034  OverwriteMode mode =
1035      left->ResultOverwriteAllowed()
1036      ? OVERWRITE_LEFT
1037      : (right->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE);
1038
1039  VisitForStackValue(left);
1040  VisitForAccumulatorValue(right);
1041
1042  SetSourcePosition(expr->position());
1043  if (ShouldInlineSmiCase(op)) {
1044    EmitInlineSmiBinaryOp(expr, op, mode, left, right);
1045  } else {
1046    EmitBinaryOp(expr, op, mode);
1047  }
1048}
1049
1050
1051void FullCodeGenerator::VisitBlock(Block* stmt) {
1052  Comment cmnt(masm_, "[ Block");
1053  NestedBlock nested_block(this, stmt);
1054  SetStatementPosition(stmt);
1055
1056  Scope* saved_scope = scope();
1057  // Push a block context when entering a block with block scoped variables.
1058  if (stmt->scope() == NULL) {
1059    PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1060  } else {
1061    scope_ = stmt->scope();
1062    DCHECK(!scope_->is_module_scope());
1063    { Comment cmnt(masm_, "[ Extend block context");
1064      __ Push(scope_->GetScopeInfo());
1065      PushFunctionArgumentForContextAllocation();
1066      __ CallRuntime(Runtime::kPushBlockContext, 2);
1067
1068      // Replace the context stored in the frame.
1069      StoreToFrameField(StandardFrameConstants::kContextOffset,
1070                        context_register());
1071      PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1072    }
1073    { Comment cmnt(masm_, "[ Declarations");
1074      VisitDeclarations(scope_->declarations());
1075      PrepareForBailoutForId(stmt->DeclsId(), NO_REGISTERS);
1076    }
1077  }
1078
1079  VisitStatements(stmt->statements());
1080  scope_ = saved_scope;
1081  __ bind(nested_block.break_label());
1082
1083  // Pop block context if necessary.
1084  if (stmt->scope() != NULL) {
1085    LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1086    // Update local stack frame context field.
1087    StoreToFrameField(StandardFrameConstants::kContextOffset,
1088                      context_register());
1089  }
1090  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1091}
1092
1093
1094void FullCodeGenerator::VisitModuleStatement(ModuleStatement* stmt) {
1095  Comment cmnt(masm_, "[ Module context");
1096
1097  __ Push(Smi::FromInt(stmt->proxy()->interface()->Index()));
1098  __ Push(Smi::FromInt(0));
1099  __ CallRuntime(Runtime::kPushModuleContext, 2);
1100  StoreToFrameField(
1101      StandardFrameConstants::kContextOffset, context_register());
1102
1103  Scope* saved_scope = scope_;
1104  scope_ = stmt->body()->scope();
1105  VisitStatements(stmt->body()->statements());
1106  scope_ = saved_scope;
1107  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1108  // Update local stack frame context field.
1109  StoreToFrameField(StandardFrameConstants::kContextOffset,
1110                    context_register());
1111}
1112
1113
1114void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1115  Comment cmnt(masm_, "[ ExpressionStatement");
1116  SetStatementPosition(stmt);
1117  VisitForEffect(stmt->expression());
1118}
1119
1120
1121void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1122  Comment cmnt(masm_, "[ EmptyStatement");
1123  SetStatementPosition(stmt);
1124}
1125
1126
1127void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
1128  Comment cmnt(masm_, "[ IfStatement");
1129  SetStatementPosition(stmt);
1130  Label then_part, else_part, done;
1131
1132  if (stmt->HasElseStatement()) {
1133    VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
1134    PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1135    __ bind(&then_part);
1136    Visit(stmt->then_statement());
1137    __ jmp(&done);
1138
1139    PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1140    __ bind(&else_part);
1141    Visit(stmt->else_statement());
1142  } else {
1143    VisitForControl(stmt->condition(), &then_part, &done, &then_part);
1144    PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1145    __ bind(&then_part);
1146    Visit(stmt->then_statement());
1147
1148    PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1149  }
1150  __ bind(&done);
1151  PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
1152}
1153
1154
1155void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1156  Comment cmnt(masm_,  "[ ContinueStatement");
1157  SetStatementPosition(stmt);
1158  NestedStatement* current = nesting_stack_;
1159  int stack_depth = 0;
1160  int context_length = 0;
1161  // When continuing, we clobber the unpredictable value in the accumulator
1162  // with one that's safe for GC.  If we hit an exit from the try block of
1163  // try...finally on our way out, we will unconditionally preserve the
1164  // accumulator on the stack.
1165  ClearAccumulator();
1166  while (!current->IsContinueTarget(stmt->target())) {
1167    current = current->Exit(&stack_depth, &context_length);
1168  }
1169  __ Drop(stack_depth);
1170  if (context_length > 0) {
1171    while (context_length > 0) {
1172      LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1173      --context_length;
1174    }
1175    StoreToFrameField(StandardFrameConstants::kContextOffset,
1176                      context_register());
1177  }
1178
1179  __ jmp(current->AsIteration()->continue_label());
1180}
1181
1182
1183void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1184  Comment cmnt(masm_,  "[ BreakStatement");
1185  SetStatementPosition(stmt);
1186  NestedStatement* current = nesting_stack_;
1187  int stack_depth = 0;
1188  int context_length = 0;
1189  // When breaking, we clobber the unpredictable value in the accumulator
1190  // with one that's safe for GC.  If we hit an exit from the try block of
1191  // try...finally on our way out, we will unconditionally preserve the
1192  // accumulator on the stack.
1193  ClearAccumulator();
1194  while (!current->IsBreakTarget(stmt->target())) {
1195    current = current->Exit(&stack_depth, &context_length);
1196  }
1197  __ Drop(stack_depth);
1198  if (context_length > 0) {
1199    while (context_length > 0) {
1200      LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1201      --context_length;
1202    }
1203    StoreToFrameField(StandardFrameConstants::kContextOffset,
1204                      context_register());
1205  }
1206
1207  __ jmp(current->AsBreakable()->break_label());
1208}
1209
1210
1211void FullCodeGenerator::EmitUnwindBeforeReturn() {
1212  NestedStatement* current = nesting_stack_;
1213  int stack_depth = 0;
1214  int context_length = 0;
1215  while (current != NULL) {
1216    current = current->Exit(&stack_depth, &context_length);
1217  }
1218  __ Drop(stack_depth);
1219}
1220
1221
1222void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1223  Comment cmnt(masm_, "[ ReturnStatement");
1224  SetStatementPosition(stmt);
1225  Expression* expr = stmt->expression();
1226  VisitForAccumulatorValue(expr);
1227  EmitUnwindBeforeReturn();
1228  EmitReturnSequence();
1229}
1230
1231
1232void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1233  Comment cmnt(masm_, "[ WithStatement");
1234  SetStatementPosition(stmt);
1235
1236  VisitForStackValue(stmt->expression());
1237  PushFunctionArgumentForContextAllocation();
1238  __ CallRuntime(Runtime::kPushWithContext, 2);
1239  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1240
1241  Scope* saved_scope = scope();
1242  scope_ = stmt->scope();
1243  { WithOrCatch body(this);
1244    Visit(stmt->statement());
1245  }
1246  scope_ = saved_scope;
1247
1248  // Pop context.
1249  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1250  // Update local stack frame context field.
1251  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1252}
1253
1254
1255void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1256  Comment cmnt(masm_, "[ DoWhileStatement");
1257  SetStatementPosition(stmt);
1258  Label body, book_keeping;
1259
1260  Iteration loop_statement(this, stmt);
1261  increment_loop_depth();
1262
1263  __ bind(&body);
1264  Visit(stmt->body());
1265
1266  // Record the position of the do while condition and make sure it is
1267  // possible to break on the condition.
1268  __ bind(loop_statement.continue_label());
1269  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1270  SetExpressionPosition(stmt->cond());
1271  VisitForControl(stmt->cond(),
1272                  &book_keeping,
1273                  loop_statement.break_label(),
1274                  &book_keeping);
1275
1276  // Check stack before looping.
1277  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1278  __ bind(&book_keeping);
1279  EmitBackEdgeBookkeeping(stmt, &body);
1280  __ jmp(&body);
1281
1282  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1283  __ bind(loop_statement.break_label());
1284  decrement_loop_depth();
1285}
1286
1287
1288void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1289  Comment cmnt(masm_, "[ WhileStatement");
1290  Label loop, body;
1291
1292  Iteration loop_statement(this, stmt);
1293  increment_loop_depth();
1294
1295  __ bind(&loop);
1296
1297  SetExpressionPosition(stmt->cond());
1298  VisitForControl(stmt->cond(),
1299                  &body,
1300                  loop_statement.break_label(),
1301                  &body);
1302
1303  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1304  __ bind(&body);
1305  Visit(stmt->body());
1306
1307  __ bind(loop_statement.continue_label());
1308
1309  // Check stack before looping.
1310  EmitBackEdgeBookkeeping(stmt, &loop);
1311  __ jmp(&loop);
1312
1313  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1314  __ bind(loop_statement.break_label());
1315  decrement_loop_depth();
1316}
1317
1318
1319void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1320  Comment cmnt(masm_, "[ ForStatement");
1321  Label test, body;
1322
1323  Iteration loop_statement(this, stmt);
1324
1325  // Set statement position for a break slot before entering the for-body.
1326  SetStatementPosition(stmt);
1327
1328  if (stmt->init() != NULL) {
1329    Visit(stmt->init());
1330  }
1331
1332  increment_loop_depth();
1333  // Emit the test at the bottom of the loop (even if empty).
1334  __ jmp(&test);
1335
1336  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1337  __ bind(&body);
1338  Visit(stmt->body());
1339
1340  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1341  __ bind(loop_statement.continue_label());
1342  if (stmt->next() != NULL) {
1343    Visit(stmt->next());
1344  }
1345
1346  // Emit the statement position here as this is where the for
1347  // statement code starts.
1348  SetStatementPosition(stmt);
1349
1350  // Check stack before looping.
1351  EmitBackEdgeBookkeeping(stmt, &body);
1352
1353  __ bind(&test);
1354  if (stmt->cond() != NULL) {
1355    VisitForControl(stmt->cond(),
1356                    &body,
1357                    loop_statement.break_label(),
1358                    loop_statement.break_label());
1359  } else {
1360    __ jmp(&body);
1361  }
1362
1363  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1364  __ bind(loop_statement.break_label());
1365  decrement_loop_depth();
1366}
1367
1368
1369void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1370  Comment cmnt(masm_, "[ TryCatchStatement");
1371  SetStatementPosition(stmt);
1372  // The try block adds a handler to the exception handler chain before
1373  // entering, and removes it again when exiting normally.  If an exception
1374  // is thrown during execution of the try block, the handler is consumed
1375  // and control is passed to the catch block with the exception in the
1376  // result register.
1377
1378  Label try_entry, handler_entry, exit;
1379  __ jmp(&try_entry);
1380  __ bind(&handler_entry);
1381  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1382  // Exception handler code, the exception is in the result register.
1383  // Extend the context before executing the catch block.
1384  { Comment cmnt(masm_, "[ Extend catch context");
1385    __ Push(stmt->variable()->name());
1386    __ Push(result_register());
1387    PushFunctionArgumentForContextAllocation();
1388    __ CallRuntime(Runtime::kPushCatchContext, 3);
1389    StoreToFrameField(StandardFrameConstants::kContextOffset,
1390                      context_register());
1391  }
1392
1393  Scope* saved_scope = scope();
1394  scope_ = stmt->scope();
1395  DCHECK(scope_->declarations()->is_empty());
1396  { WithOrCatch catch_body(this);
1397    Visit(stmt->catch_block());
1398  }
1399  // Restore the context.
1400  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1401  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1402  scope_ = saved_scope;
1403  __ jmp(&exit);
1404
1405  // Try block code. Sets up the exception handler chain.
1406  __ bind(&try_entry);
1407  __ PushTryHandler(StackHandler::CATCH, stmt->index());
1408  { TryCatch try_body(this);
1409    Visit(stmt->try_block());
1410  }
1411  __ PopTryHandler();
1412  __ bind(&exit);
1413}
1414
1415
1416void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1417  Comment cmnt(masm_, "[ TryFinallyStatement");
1418  SetStatementPosition(stmt);
1419  // Try finally is compiled by setting up a try-handler on the stack while
1420  // executing the try body, and removing it again afterwards.
1421  //
1422  // The try-finally construct can enter the finally block in three ways:
1423  // 1. By exiting the try-block normally. This removes the try-handler and
1424  //    calls the finally block code before continuing.
1425  // 2. By exiting the try-block with a function-local control flow transfer
1426  //    (break/continue/return). The site of the, e.g., break removes the
1427  //    try handler and calls the finally block code before continuing
1428  //    its outward control transfer.
1429  // 3. By exiting the try-block with a thrown exception.
1430  //    This can happen in nested function calls. It traverses the try-handler
1431  //    chain and consumes the try-handler entry before jumping to the
1432  //    handler code. The handler code then calls the finally-block before
1433  //    rethrowing the exception.
1434  //
1435  // The finally block must assume a return address on top of the stack
1436  // (or in the link register on ARM chips) and a value (return value or
1437  // exception) in the result register (rax/eax/r0), both of which must
1438  // be preserved. The return address isn't GC-safe, so it should be
1439  // cooked before GC.
1440  Label try_entry, handler_entry, finally_entry;
1441
1442  // Jump to try-handler setup and try-block code.
1443  __ jmp(&try_entry);
1444  __ bind(&handler_entry);
1445  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1446  // Exception handler code.  This code is only executed when an exception
1447  // is thrown.  The exception is in the result register, and must be
1448  // preserved by the finally block.  Call the finally block and then
1449  // rethrow the exception if it returns.
1450  __ Call(&finally_entry);
1451  __ Push(result_register());
1452  __ CallRuntime(Runtime::kReThrow, 1);
1453
1454  // Finally block implementation.
1455  __ bind(&finally_entry);
1456  EnterFinallyBlock();
1457  { Finally finally_body(this);
1458    Visit(stmt->finally_block());
1459  }
1460  ExitFinallyBlock();  // Return to the calling code.
1461
1462  // Set up try handler.
1463  __ bind(&try_entry);
1464  __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1465  { TryFinally try_body(this, &finally_entry);
1466    Visit(stmt->try_block());
1467  }
1468  __ PopTryHandler();
1469  // Execute the finally block on the way out.  Clobber the unpredictable
1470  // value in the result register with one that's safe for GC because the
1471  // finally block will unconditionally preserve the result register on the
1472  // stack.
1473  ClearAccumulator();
1474  __ Call(&finally_entry);
1475}
1476
1477
1478void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1479  Comment cmnt(masm_, "[ DebuggerStatement");
1480  SetStatementPosition(stmt);
1481
1482  __ DebugBreak();
1483  // Ignore the return value.
1484
1485  PrepareForBailoutForId(stmt->DebugBreakId(), NO_REGISTERS);
1486}
1487
1488
1489void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1490  UNREACHABLE();
1491}
1492
1493
1494void FullCodeGenerator::VisitConditional(Conditional* expr) {
1495  Comment cmnt(masm_, "[ Conditional");
1496  Label true_case, false_case, done;
1497  VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1498
1499  PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1500  __ bind(&true_case);
1501  SetExpressionPosition(expr->then_expression());
1502  if (context()->IsTest()) {
1503    const TestContext* for_test = TestContext::cast(context());
1504    VisitForControl(expr->then_expression(),
1505                    for_test->true_label(),
1506                    for_test->false_label(),
1507                    NULL);
1508  } else {
1509    VisitInDuplicateContext(expr->then_expression());
1510    __ jmp(&done);
1511  }
1512
1513  PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1514  __ bind(&false_case);
1515  SetExpressionPosition(expr->else_expression());
1516  VisitInDuplicateContext(expr->else_expression());
1517  // If control flow falls through Visit, merge it with true case here.
1518  if (!context()->IsTest()) {
1519    __ bind(&done);
1520  }
1521}
1522
1523
1524void FullCodeGenerator::VisitLiteral(Literal* expr) {
1525  Comment cmnt(masm_, "[ Literal");
1526  context()->Plug(expr->value());
1527}
1528
1529
1530void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1531  Comment cmnt(masm_, "[ FunctionLiteral");
1532
1533  // Build the function boilerplate and instantiate it.
1534  Handle<SharedFunctionInfo> function_info =
1535      Compiler::BuildFunctionInfo(expr, script(), info_);
1536  if (function_info.is_null()) {
1537    SetStackOverflow();
1538    return;
1539  }
1540  EmitNewClosure(function_info, expr->pretenure());
1541}
1542
1543
1544void FullCodeGenerator::VisitClassLiteral(ClassLiteral* expr) {
1545  // TODO(arv): Implement
1546  Comment cmnt(masm_, "[ ClassLiteral");
1547  if (expr->extends() != NULL) {
1548    VisitForEffect(expr->extends());
1549  }
1550  context()->Plug(isolate()->factory()->undefined_value());
1551}
1552
1553
1554void FullCodeGenerator::VisitNativeFunctionLiteral(
1555    NativeFunctionLiteral* expr) {
1556  Comment cmnt(masm_, "[ NativeFunctionLiteral");
1557
1558  // Compute the function template for the native function.
1559  Handle<String> name = expr->name();
1560  v8::Handle<v8::FunctionTemplate> fun_template =
1561      expr->extension()->GetNativeFunctionTemplate(
1562          reinterpret_cast<v8::Isolate*>(isolate()), v8::Utils::ToLocal(name));
1563  DCHECK(!fun_template.IsEmpty());
1564
1565  // Instantiate the function and create a shared function info from it.
1566  Handle<JSFunction> fun = Utils::OpenHandle(*fun_template->GetFunction());
1567  const int literals = fun->NumberOfLiterals();
1568  Handle<Code> code = Handle<Code>(fun->shared()->code());
1569  Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1570  Handle<SharedFunctionInfo> shared =
1571      isolate()->factory()->NewSharedFunctionInfo(
1572          name, literals, FunctionKind::kNormalFunction, code,
1573          Handle<ScopeInfo>(fun->shared()->scope_info()),
1574          Handle<TypeFeedbackVector>(fun->shared()->feedback_vector()));
1575  shared->set_construct_stub(*construct_stub);
1576
1577  // Copy the function data to the shared function info.
1578  shared->set_function_data(fun->shared()->function_data());
1579  int parameters = fun->shared()->formal_parameter_count();
1580  shared->set_formal_parameter_count(parameters);
1581
1582  EmitNewClosure(shared, false);
1583}
1584
1585
1586void FullCodeGenerator::VisitThrow(Throw* expr) {
1587  Comment cmnt(masm_, "[ Throw");
1588  VisitForStackValue(expr->exception());
1589  __ CallRuntime(Runtime::kThrow, 1);
1590  // Never returns here.
1591}
1592
1593
1594FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
1595    int* stack_depth,
1596    int* context_length) {
1597  // The macros used here must preserve the result register.
1598  __ Drop(*stack_depth);
1599  __ PopTryHandler();
1600  *stack_depth = 0;
1601  return previous_;
1602}
1603
1604
1605bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1606  Expression* sub_expr;
1607  Handle<String> check;
1608  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1609    EmitLiteralCompareTypeof(expr, sub_expr, check);
1610    return true;
1611  }
1612
1613  if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
1614    EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1615    return true;
1616  }
1617
1618  if (expr->IsLiteralCompareNull(&sub_expr)) {
1619    EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1620    return true;
1621  }
1622
1623  return false;
1624}
1625
1626
1627void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
1628  DisallowHeapAllocation no_gc;
1629  Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1630
1631  // Increment loop nesting level by one and iterate over the back edge table
1632  // to find the matching loops to patch the interrupt
1633  // call to an unconditional call to the replacement code.
1634  int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level() + 1;
1635  if (loop_nesting_level > Code::kMaxLoopNestingMarker) return;
1636
1637  BackEdgeTable back_edges(unoptimized, &no_gc);
1638  for (uint32_t i = 0; i < back_edges.length(); i++) {
1639    if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1640      DCHECK_EQ(INTERRUPT, GetBackEdgeState(isolate,
1641                                            unoptimized,
1642                                            back_edges.pc(i)));
1643      PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1644    }
1645  }
1646
1647  unoptimized->set_allow_osr_at_loop_nesting_level(loop_nesting_level);
1648  DCHECK(Verify(isolate, unoptimized));
1649}
1650
1651
1652void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
1653  DisallowHeapAllocation no_gc;
1654  Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1655
1656  // Iterate over the back edge table and revert the patched interrupt calls.
1657  int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1658
1659  BackEdgeTable back_edges(unoptimized, &no_gc);
1660  for (uint32_t i = 0; i < back_edges.length(); i++) {
1661    if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1662      DCHECK_NE(INTERRUPT, GetBackEdgeState(isolate,
1663                                            unoptimized,
1664                                            back_edges.pc(i)));
1665      PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1666    }
1667  }
1668
1669  unoptimized->set_allow_osr_at_loop_nesting_level(0);
1670  // Assert that none of the back edges are patched anymore.
1671  DCHECK(Verify(isolate, unoptimized));
1672}
1673
1674
1675void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
1676  DisallowHeapAllocation no_gc;
1677  Isolate* isolate = code->GetIsolate();
1678  Address pc = code->instruction_start() + pc_offset;
1679  Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
1680  PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
1681}
1682
1683
1684void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
1685  DisallowHeapAllocation no_gc;
1686  Isolate* isolate = code->GetIsolate();
1687  Address pc = code->instruction_start() + pc_offset;
1688
1689  if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
1690    Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1691    PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
1692  }
1693}
1694
1695
1696#ifdef DEBUG
1697bool BackEdgeTable::Verify(Isolate* isolate, Code* unoptimized) {
1698  DisallowHeapAllocation no_gc;
1699  int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1700  BackEdgeTable back_edges(unoptimized, &no_gc);
1701  for (uint32_t i = 0; i < back_edges.length(); i++) {
1702    uint32_t loop_depth = back_edges.loop_depth(i);
1703    CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1704    // Assert that all back edges for shallower loops (and only those)
1705    // have already been patched.
1706    CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1707             GetBackEdgeState(isolate,
1708                              unoptimized,
1709                              back_edges.pc(i)) != INTERRUPT);
1710  }
1711  return true;
1712}
1713#endif  // DEBUG
1714
1715
1716#undef __
1717
1718
1719} }  // namespace v8::internal
1720