1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "codegen.h"
31#include "compiler.h"
32#include "debug.h"
33#include "full-codegen.h"
34#include "liveedit.h"
35#include "macro-assembler.h"
36#include "prettyprinter.h"
37#include "scopes.h"
38#include "scopeinfo.h"
39#include "snapshot.h"
40#include "stub-cache.h"
41
42namespace v8 {
43namespace internal {
44
45void BreakableStatementChecker::Check(Statement* stmt) {
46  Visit(stmt);
47}
48
49
50void BreakableStatementChecker::Check(Expression* expr) {
51  Visit(expr);
52}
53
54
55void BreakableStatementChecker::VisitVariableDeclaration(
56    VariableDeclaration* decl) {
57}
58
59void BreakableStatementChecker::VisitFunctionDeclaration(
60    FunctionDeclaration* decl) {
61}
62
63void BreakableStatementChecker::VisitModuleDeclaration(
64    ModuleDeclaration* decl) {
65}
66
67void BreakableStatementChecker::VisitImportDeclaration(
68    ImportDeclaration* decl) {
69}
70
71void BreakableStatementChecker::VisitExportDeclaration(
72    ExportDeclaration* decl) {
73}
74
75
76void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
77}
78
79
80void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
81}
82
83
84void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
85}
86
87
88void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
89}
90
91
92void BreakableStatementChecker::VisitModuleStatement(ModuleStatement* stmt) {
93}
94
95
96void BreakableStatementChecker::VisitBlock(Block* stmt) {
97}
98
99
100void BreakableStatementChecker::VisitExpressionStatement(
101    ExpressionStatement* stmt) {
102  // Check if expression is breakable.
103  Visit(stmt->expression());
104}
105
106
107void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
108}
109
110
111void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
112  // If the condition is breakable the if statement is breakable.
113  Visit(stmt->condition());
114}
115
116
117void BreakableStatementChecker::VisitContinueStatement(
118    ContinueStatement* stmt) {
119}
120
121
122void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
123}
124
125
126void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
127  // Return is breakable if the expression is.
128  Visit(stmt->expression());
129}
130
131
132void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
133  Visit(stmt->expression());
134}
135
136
137void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
138  // Switch statements breakable if the tag expression is.
139  Visit(stmt->tag());
140}
141
142
143void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
144  // Mark do while as breakable to avoid adding a break slot in front of it.
145  is_breakable_ = true;
146}
147
148
149void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
150  // Mark while statements breakable if the condition expression is.
151  Visit(stmt->cond());
152}
153
154
155void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
156  // Mark for statements breakable if the condition expression is.
157  if (stmt->cond() != NULL) {
158    Visit(stmt->cond());
159  }
160}
161
162
163void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
164  // Mark for in statements breakable if the enumerable expression is.
165  Visit(stmt->enumerable());
166}
167
168
169void BreakableStatementChecker::VisitForOfStatement(ForOfStatement* stmt) {
170  // For-of is breakable because of the next() call.
171  is_breakable_ = true;
172}
173
174
175void BreakableStatementChecker::VisitTryCatchStatement(
176    TryCatchStatement* stmt) {
177  // Mark try catch as breakable to avoid adding a break slot in front of it.
178  is_breakable_ = true;
179}
180
181
182void BreakableStatementChecker::VisitTryFinallyStatement(
183    TryFinallyStatement* stmt) {
184  // Mark try finally as breakable to avoid adding a break slot in front of it.
185  is_breakable_ = true;
186}
187
188
189void BreakableStatementChecker::VisitDebuggerStatement(
190    DebuggerStatement* stmt) {
191  // The debugger statement is breakable.
192  is_breakable_ = true;
193}
194
195
196void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
197}
198
199
200void BreakableStatementChecker::VisitSharedFunctionInfoLiteral(
201    SharedFunctionInfoLiteral* expr) {
202}
203
204
205void BreakableStatementChecker::VisitConditional(Conditional* expr) {
206}
207
208
209void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
210}
211
212
213void BreakableStatementChecker::VisitLiteral(Literal* expr) {
214}
215
216
217void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
218}
219
220
221void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
222}
223
224
225void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
226}
227
228
229void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
230  // If assigning to a property (including a global property) the assignment is
231  // breakable.
232  VariableProxy* proxy = expr->target()->AsVariableProxy();
233  Property* prop = expr->target()->AsProperty();
234  if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
235    is_breakable_ = true;
236    return;
237  }
238
239  // Otherwise the assignment is breakable if the assigned value is.
240  Visit(expr->value());
241}
242
243
244void BreakableStatementChecker::VisitYield(Yield* expr) {
245  // Yield is breakable if the expression is.
246  Visit(expr->expression());
247}
248
249
250void BreakableStatementChecker::VisitThrow(Throw* expr) {
251  // Throw is breakable if the expression is.
252  Visit(expr->exception());
253}
254
255
256void BreakableStatementChecker::VisitProperty(Property* expr) {
257  // Property load is breakable.
258  is_breakable_ = true;
259}
260
261
262void BreakableStatementChecker::VisitCall(Call* expr) {
263  // Function calls both through IC and call stub are breakable.
264  is_breakable_ = true;
265}
266
267
268void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
269  // Function calls through new are breakable.
270  is_breakable_ = true;
271}
272
273
274void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
275}
276
277
278void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
279  Visit(expr->expression());
280}
281
282
283void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
284  Visit(expr->expression());
285}
286
287
288void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
289  Visit(expr->left());
290  if (expr->op() != Token::AND &&
291      expr->op() != Token::OR) {
292    Visit(expr->right());
293  }
294}
295
296
297void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
298  Visit(expr->left());
299  Visit(expr->right());
300}
301
302
303void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
304}
305
306
307#define __ ACCESS_MASM(masm())
308
309bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
310  Isolate* isolate = info->isolate();
311  Handle<Script> script = info->script();
312  if (!script->IsUndefined() && !script->source()->IsUndefined()) {
313    int len = String::cast(script->source())->length();
314    isolate->counters()->total_full_codegen_source_size()->Increment(len);
315  }
316  CodeGenerator::MakeCodePrologue(info, "full");
317  const int kInitialBufferSize = 4 * KB;
318  MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize);
319#ifdef ENABLE_GDB_JIT_INTERFACE
320  masm.positions_recorder()->StartGDBJITLineInfoRecording();
321#endif
322  LOG_CODE_EVENT(isolate,
323                 CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
324
325  FullCodeGenerator cgen(&masm, info);
326  cgen.Generate();
327  if (cgen.HasStackOverflow()) {
328    ASSERT(!isolate->has_pending_exception());
329    return false;
330  }
331  unsigned table_offset = cgen.EmitBackEdgeTable();
332
333  Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
334  Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
335  code->set_optimizable(info->IsOptimizable() &&
336                        !info->function()->flags()->Contains(kDontOptimize) &&
337                        info->function()->scope()->AllowsLazyCompilation());
338  cgen.PopulateDeoptimizationData(code);
339  cgen.PopulateTypeFeedbackInfo(code);
340  cgen.PopulateTypeFeedbackCells(code);
341  code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
342  code->set_handler_table(*cgen.handler_table());
343#ifdef ENABLE_DEBUGGER_SUPPORT
344  code->set_has_debug_break_slots(
345      info->isolate()->debugger()->IsDebuggerActive());
346  code->set_compiled_optimizable(info->IsOptimizable());
347#endif  // ENABLE_DEBUGGER_SUPPORT
348  code->set_allow_osr_at_loop_nesting_level(0);
349  code->set_profiler_ticks(0);
350  code->set_back_edge_table_offset(table_offset);
351  code->set_back_edges_patched_for_osr(false);
352  CodeGenerator::PrintCode(code, info);
353  info->SetCode(code);  // May be an empty handle.
354#ifdef ENABLE_GDB_JIT_INTERFACE
355  if (FLAG_gdbjit && !code.is_null()) {
356    GDBJITLineInfo* lineinfo =
357        masm.positions_recorder()->DetachGDBJITLineInfo();
358
359    GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
360  }
361#endif
362  if (!code.is_null()) {
363    void* line_info =
364        masm.positions_recorder()->DetachJITHandlerData();
365    LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
366  }
367  return !code.is_null();
368}
369
370
371unsigned FullCodeGenerator::EmitBackEdgeTable() {
372  // The back edge table consists of a length (in number of entries)
373  // field, and then a sequence of entries.  Each entry is a pair of AST id
374  // and code-relative pc offset.
375  masm()->Align(kIntSize);
376  unsigned offset = masm()->pc_offset();
377  unsigned length = back_edges_.length();
378  __ dd(length);
379  for (unsigned i = 0; i < length; ++i) {
380    __ dd(back_edges_[i].id.ToInt());
381    __ dd(back_edges_[i].pc);
382    __ dd(back_edges_[i].loop_depth);
383  }
384  return offset;
385}
386
387
388void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
389  // Fill in the deoptimization information.
390  ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
391  if (!info_->HasDeoptimizationSupport()) return;
392  int length = bailout_entries_.length();
393  Handle<DeoptimizationOutputData> data = isolate()->factory()->
394      NewDeoptimizationOutputData(length, TENURED);
395  for (int i = 0; i < length; i++) {
396    data->SetAstId(i, bailout_entries_[i].id);
397    data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
398  }
399  code->set_deoptimization_data(*data);
400}
401
402
403void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
404  Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
405  info->set_ic_total_count(ic_total_count_);
406  ASSERT(!isolate()->heap()->InNewSpace(*info));
407  code->set_type_feedback_info(*info);
408}
409
410
411void FullCodeGenerator::Initialize() {
412  // The generation of debug code must match between the snapshot code and the
413  // code that is generated later.  This is assumed by the debugger when it is
414  // calculating PC offsets after generating a debug version of code.  Therefore
415  // we disable the production of debug code in the full compiler if we are
416  // either generating a snapshot or we booted from a snapshot.
417  generate_debug_code_ = FLAG_debug_code &&
418                         !Serializer::enabled() &&
419                         !Snapshot::HaveASnapshotToStartFrom();
420  masm_->set_emit_debug_code(generate_debug_code_);
421  masm_->set_predictable_code_size(true);
422  InitializeAstVisitor();
423}
424
425
426void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
427  if (type_feedback_cells_.is_empty()) return;
428  int length = type_feedback_cells_.length();
429  int array_size = TypeFeedbackCells::LengthOfFixedArray(length);
430  Handle<TypeFeedbackCells> cache = Handle<TypeFeedbackCells>::cast(
431      isolate()->factory()->NewFixedArray(array_size, TENURED));
432  for (int i = 0; i < length; i++) {
433    cache->SetAstId(i, type_feedback_cells_[i].ast_id);
434    cache->SetCell(i, *type_feedback_cells_[i].cell);
435  }
436  TypeFeedbackInfo::cast(code->type_feedback_info())->set_type_feedback_cells(
437      *cache);
438}
439
440
441
442void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
443  PrepareForBailoutForId(node->id(), state);
444}
445
446
447void FullCodeGenerator::RecordJSReturnSite(Call* call) {
448  // We record the offset of the function return so we can rebuild the frame
449  // if the function was inlined, i.e., this is the return address in the
450  // inlined function's frame.
451  //
452  // The state is ignored.  We defensively set it to TOS_REG, which is the
453  // real state of the unoptimized code at the return site.
454  PrepareForBailoutForId(call->ReturnId(), TOS_REG);
455#ifdef DEBUG
456  // In debug builds, mark the return so we can verify that this function
457  // was called.
458  ASSERT(!call->return_is_recorded_);
459  call->return_is_recorded_ = true;
460#endif
461}
462
463
464void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
465  // There's no need to prepare this code for bailouts from already optimized
466  // code or code that can't be optimized.
467  if (!info_->HasDeoptimizationSupport()) return;
468  unsigned pc_and_state =
469      StateField::encode(state) | PcField::encode(masm_->pc_offset());
470  ASSERT(Smi::IsValid(pc_and_state));
471  BailoutEntry entry = { id, pc_and_state };
472  ASSERT(!prepared_bailout_ids_.Contains(id.ToInt()));
473  prepared_bailout_ids_.Add(id.ToInt(), zone());
474  bailout_entries_.Add(entry, zone());
475}
476
477
478void FullCodeGenerator::RecordTypeFeedbackCell(
479    TypeFeedbackId id, Handle<Cell> cell) {
480  TypeFeedbackCellEntry entry = { id, cell };
481  type_feedback_cells_.Add(entry, zone());
482}
483
484
485void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
486  // The pc offset does not need to be encoded and packed together with a state.
487  ASSERT(masm_->pc_offset() > 0);
488  ASSERT(loop_depth() > 0);
489  uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
490  BackEdgeEntry entry =
491      { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
492  back_edges_.Add(entry, zone());
493}
494
495
496bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
497  // Inline smi case inside loops, but not division and modulo which
498  // are too complicated and take up too much space.
499  if (op == Token::DIV ||op == Token::MOD) return false;
500  if (FLAG_always_inline_smi_code) return true;
501  return loop_depth_ > 0;
502}
503
504
505void FullCodeGenerator::EffectContext::Plug(Register reg) const {
506}
507
508
509void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
510  __ Move(result_register(), reg);
511}
512
513
514void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
515  __ Push(reg);
516}
517
518
519void FullCodeGenerator::TestContext::Plug(Register reg) const {
520  // For simplicity we always test the accumulator register.
521  __ Move(result_register(), reg);
522  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
523  codegen()->DoTest(this);
524}
525
526
527void FullCodeGenerator::EffectContext::PlugTOS() const {
528  __ Drop(1);
529}
530
531
532void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
533  __ Pop(result_register());
534}
535
536
537void FullCodeGenerator::StackValueContext::PlugTOS() const {
538}
539
540
541void FullCodeGenerator::TestContext::PlugTOS() const {
542  // For simplicity we always test the accumulator register.
543  __ Pop(result_register());
544  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
545  codegen()->DoTest(this);
546}
547
548
549void FullCodeGenerator::EffectContext::PrepareTest(
550    Label* materialize_true,
551    Label* materialize_false,
552    Label** if_true,
553    Label** if_false,
554    Label** fall_through) const {
555  // In an effect context, the true and the false case branch to the
556  // same label.
557  *if_true = *if_false = *fall_through = materialize_true;
558}
559
560
561void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
562    Label* materialize_true,
563    Label* materialize_false,
564    Label** if_true,
565    Label** if_false,
566    Label** fall_through) const {
567  *if_true = *fall_through = materialize_true;
568  *if_false = materialize_false;
569}
570
571
572void FullCodeGenerator::StackValueContext::PrepareTest(
573    Label* materialize_true,
574    Label* materialize_false,
575    Label** if_true,
576    Label** if_false,
577    Label** fall_through) const {
578  *if_true = *fall_through = materialize_true;
579  *if_false = materialize_false;
580}
581
582
583void FullCodeGenerator::TestContext::PrepareTest(
584    Label* materialize_true,
585    Label* materialize_false,
586    Label** if_true,
587    Label** if_false,
588    Label** fall_through) const {
589  *if_true = true_label_;
590  *if_false = false_label_;
591  *fall_through = fall_through_;
592}
593
594
595void FullCodeGenerator::DoTest(const TestContext* context) {
596  DoTest(context->condition(),
597         context->true_label(),
598         context->false_label(),
599         context->fall_through());
600}
601
602
603void FullCodeGenerator::AllocateModules(ZoneList<Declaration*>* declarations) {
604  ASSERT(scope_->is_global_scope());
605
606  for (int i = 0; i < declarations->length(); i++) {
607    ModuleDeclaration* declaration = declarations->at(i)->AsModuleDeclaration();
608    if (declaration != NULL) {
609      ModuleLiteral* module = declaration->module()->AsModuleLiteral();
610      if (module != NULL) {
611        Comment cmnt(masm_, "[ Link nested modules");
612        Scope* scope = module->body()->scope();
613        Interface* interface = scope->interface();
614        ASSERT(interface->IsModule() && interface->IsFrozen());
615
616        interface->Allocate(scope->module_var()->index());
617
618        // Set up module context.
619        ASSERT(scope->interface()->Index() >= 0);
620        __ Push(Smi::FromInt(scope->interface()->Index()));
621        __ Push(scope->GetScopeInfo());
622        __ CallRuntime(Runtime::kPushModuleContext, 2);
623        StoreToFrameField(StandardFrameConstants::kContextOffset,
624                          context_register());
625
626        AllocateModules(scope->declarations());
627
628        // Pop module context.
629        LoadContextField(context_register(), Context::PREVIOUS_INDEX);
630        // Update local stack frame context field.
631        StoreToFrameField(StandardFrameConstants::kContextOffset,
632                          context_register());
633      }
634    }
635  }
636}
637
638
639// Modules have their own local scope, represented by their own context.
640// Module instance objects have an accessor for every export that forwards
641// access to the respective slot from the module's context. (Exports that are
642// modules themselves, however, are simple data properties.)
643//
644// All modules have a _hosting_ scope/context, which (currently) is the
645// (innermost) enclosing global scope. To deal with recursion, nested modules
646// are hosted by the same scope as global ones.
647//
648// For every (global or nested) module literal, the hosting context has an
649// internal slot that points directly to the respective module context. This
650// enables quick access to (statically resolved) module members by 2-dimensional
651// access through the hosting context. For example,
652//
653//   module A {
654//     let x;
655//     module B { let y; }
656//   }
657//   module C { let z; }
658//
659// allocates contexts as follows:
660//
661// [header| .A | .B | .C | A | C ]  (global)
662//           |    |    |
663//           |    |    +-- [header| z ]  (module)
664//           |    |
665//           |    +------- [header| y ]  (module)
666//           |
667//           +------------ [header| x | B ]  (module)
668//
669// Here, .A, .B, .C are the internal slots pointing to the hosted module
670// contexts, whereas A, B, C hold the actual instance objects (note that every
671// module context also points to the respective instance object through its
672// extension slot in the header).
673//
674// To deal with arbitrary recursion and aliases between modules,
675// they are created and initialized in several stages. Each stage applies to
676// all modules in the hosting global scope, including nested ones.
677//
678// 1. Allocate: for each module _literal_, allocate the module contexts and
679//    respective instance object and wire them up. This happens in the
680//    PushModuleContext runtime function, as generated by AllocateModules
681//    (invoked by VisitDeclarations in the hosting scope).
682//
683// 2. Bind: for each module _declaration_ (i.e. literals as well as aliases),
684//    assign the respective instance object to respective local variables. This
685//    happens in VisitModuleDeclaration, and uses the instance objects created
686//    in the previous stage.
687//    For each module _literal_, this phase also constructs a module descriptor
688//    for the next stage. This happens in VisitModuleLiteral.
689//
690// 3. Populate: invoke the DeclareModules runtime function to populate each
691//    _instance_ object with accessors for it exports. This is generated by
692//    DeclareModules (invoked by VisitDeclarations in the hosting scope again),
693//    and uses the descriptors generated in the previous stage.
694//
695// 4. Initialize: execute the module bodies (and other code) in sequence. This
696//    happens by the separate statements generated for module bodies. To reenter
697//    the module scopes properly, the parser inserted ModuleStatements.
698
699void FullCodeGenerator::VisitDeclarations(
700    ZoneList<Declaration*>* declarations) {
701  Handle<FixedArray> saved_modules = modules_;
702  int saved_module_index = module_index_;
703  ZoneList<Handle<Object> >* saved_globals = globals_;
704  ZoneList<Handle<Object> > inner_globals(10, zone());
705  globals_ = &inner_globals;
706
707  if (scope_->num_modules() != 0) {
708    // This is a scope hosting modules. Allocate a descriptor array to pass
709    // to the runtime for initialization.
710    Comment cmnt(masm_, "[ Allocate modules");
711    ASSERT(scope_->is_global_scope());
712    modules_ =
713        isolate()->factory()->NewFixedArray(scope_->num_modules(), TENURED);
714    module_index_ = 0;
715
716    // Generate code for allocating all modules, including nested ones.
717    // The allocated contexts are stored in internal variables in this scope.
718    AllocateModules(declarations);
719  }
720
721  AstVisitor::VisitDeclarations(declarations);
722
723  if (scope_->num_modules() != 0) {
724    // Initialize modules from descriptor array.
725    ASSERT(module_index_ == modules_->length());
726    DeclareModules(modules_);
727    modules_ = saved_modules;
728    module_index_ = saved_module_index;
729  }
730
731  if (!globals_->is_empty()) {
732    // Invoke the platform-dependent code generator to do the actual
733    // declaration of the global functions and variables.
734    Handle<FixedArray> array =
735       isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
736    for (int i = 0; i < globals_->length(); ++i)
737      array->set(i, *globals_->at(i));
738    DeclareGlobals(array);
739  }
740
741  globals_ = saved_globals;
742}
743
744
745void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
746  Block* block = module->body();
747  Scope* saved_scope = scope();
748  scope_ = block->scope();
749  Interface* interface = scope_->interface();
750
751  Comment cmnt(masm_, "[ ModuleLiteral");
752  SetStatementPosition(block);
753
754  ASSERT(!modules_.is_null());
755  ASSERT(module_index_ < modules_->length());
756  int index = module_index_++;
757
758  // Set up module context.
759  ASSERT(interface->Index() >= 0);
760  __ Push(Smi::FromInt(interface->Index()));
761  __ Push(Smi::FromInt(0));
762  __ CallRuntime(Runtime::kPushModuleContext, 2);
763  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
764
765  {
766    Comment cmnt(masm_, "[ Declarations");
767    VisitDeclarations(scope_->declarations());
768  }
769
770  // Populate the module description.
771  Handle<ModuleInfo> description =
772      ModuleInfo::Create(isolate(), interface, scope_);
773  modules_->set(index, *description);
774
775  scope_ = saved_scope;
776  // Pop module context.
777  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
778  // Update local stack frame context field.
779  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
780}
781
782
783void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
784  // Nothing to do.
785  // The instance object is resolved statically through the module's interface.
786}
787
788
789void FullCodeGenerator::VisitModulePath(ModulePath* module) {
790  // Nothing to do.
791  // The instance object is resolved statically through the module's interface.
792}
793
794
795void FullCodeGenerator::VisitModuleUrl(ModuleUrl* module) {
796  // TODO(rossberg): dummy allocation for now.
797  Scope* scope = module->body()->scope();
798  Interface* interface = scope_->interface();
799
800  ASSERT(interface->IsModule() && interface->IsFrozen());
801  ASSERT(!modules_.is_null());
802  ASSERT(module_index_ < modules_->length());
803  interface->Allocate(scope->module_var()->index());
804  int index = module_index_++;
805
806  Handle<ModuleInfo> description =
807      ModuleInfo::Create(isolate(), interface, scope_);
808  modules_->set(index, *description);
809}
810
811
812int FullCodeGenerator::DeclareGlobalsFlags() {
813  ASSERT(DeclareGlobalsLanguageMode::is_valid(language_mode()));
814  return DeclareGlobalsEvalFlag::encode(is_eval()) |
815      DeclareGlobalsNativeFlag::encode(is_native()) |
816      DeclareGlobalsLanguageMode::encode(language_mode());
817}
818
819
820void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
821  CodeGenerator::RecordPositions(masm_, fun->start_position());
822}
823
824
825void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
826  CodeGenerator::RecordPositions(masm_, fun->end_position() - 1);
827}
828
829
830void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
831#ifdef ENABLE_DEBUGGER_SUPPORT
832  if (!isolate()->debugger()->IsDebuggerActive()) {
833    CodeGenerator::RecordPositions(masm_, stmt->statement_pos());
834  } else {
835    // Check if the statement will be breakable without adding a debug break
836    // slot.
837    BreakableStatementChecker checker;
838    checker.Check(stmt);
839    // Record the statement position right here if the statement is not
840    // breakable. For breakable statements the actual recording of the
841    // position will be postponed to the breakable code (typically an IC).
842    bool position_recorded = CodeGenerator::RecordPositions(
843        masm_, stmt->statement_pos(), !checker.is_breakable());
844    // If the position recording did record a new position generate a debug
845    // break slot to make the statement breakable.
846    if (position_recorded) {
847      Debug::GenerateSlot(masm_);
848    }
849  }
850#else
851  CodeGenerator::RecordPositions(masm_, stmt->statement_pos());
852#endif
853}
854
855
856void FullCodeGenerator::SetExpressionPosition(Expression* expr, int pos) {
857#ifdef ENABLE_DEBUGGER_SUPPORT
858  if (!isolate()->debugger()->IsDebuggerActive()) {
859    CodeGenerator::RecordPositions(masm_, pos);
860  } else {
861    // Check if the expression will be breakable without adding a debug break
862    // slot.
863    BreakableStatementChecker checker;
864    checker.Check(expr);
865    // Record a statement position right here if the expression is not
866    // breakable. For breakable expressions the actual recording of the
867    // position will be postponed to the breakable code (typically an IC).
868    // NOTE this will record a statement position for something which might
869    // not be a statement. As stepping in the debugger will only stop at
870    // statement positions this is used for e.g. the condition expression of
871    // a do while loop.
872    bool position_recorded = CodeGenerator::RecordPositions(
873        masm_, pos, !checker.is_breakable());
874    // If the position recording did record a new position generate a debug
875    // break slot to make the statement breakable.
876    if (position_recorded) {
877      Debug::GenerateSlot(masm_);
878    }
879  }
880#else
881  CodeGenerator::RecordPositions(masm_, pos);
882#endif
883}
884
885
886void FullCodeGenerator::SetStatementPosition(int pos) {
887  CodeGenerator::RecordPositions(masm_, pos);
888}
889
890
891void FullCodeGenerator::SetSourcePosition(int pos) {
892  if (pos != RelocInfo::kNoPosition) {
893    masm_->positions_recorder()->RecordPosition(pos);
894  }
895}
896
897
898// Lookup table for code generators for  special runtime calls which are
899// generated inline.
900#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)          \
901    &FullCodeGenerator::Emit##Name,
902
903const FullCodeGenerator::InlineFunctionGenerator
904  FullCodeGenerator::kInlineFunctionGenerators[] = {
905    INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
906    INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
907  };
908#undef INLINE_FUNCTION_GENERATOR_ADDRESS
909
910
911FullCodeGenerator::InlineFunctionGenerator
912  FullCodeGenerator::FindInlineFunctionGenerator(Runtime::FunctionId id) {
913    int lookup_index =
914        static_cast<int>(id) - static_cast<int>(Runtime::kFirstInlineFunction);
915    ASSERT(lookup_index >= 0);
916    ASSERT(static_cast<size_t>(lookup_index) <
917           ARRAY_SIZE(kInlineFunctionGenerators));
918    return kInlineFunctionGenerators[lookup_index];
919}
920
921
922void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
923  const Runtime::Function* function = expr->function();
924  ASSERT(function != NULL);
925  ASSERT(function->intrinsic_type == Runtime::INLINE);
926  InlineFunctionGenerator generator =
927      FindInlineFunctionGenerator(function->function_id);
928  ((*this).*(generator))(expr);
929}
930
931
932void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
933  ZoneList<Expression*>* args = expr->arguments();
934  ASSERT(args->length() == 2);
935  EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT);
936}
937
938
939void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
940  ZoneList<Expression*>* args = expr->arguments();
941  ASSERT(args->length() == 2);
942  EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
943}
944
945
946void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
947  context()->Plug(handle(Smi::FromInt(0), isolate()));
948}
949
950
951void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
952  switch (expr->op()) {
953    case Token::COMMA:
954      return VisitComma(expr);
955    case Token::OR:
956    case Token::AND:
957      return VisitLogicalExpression(expr);
958    default:
959      return VisitArithmeticExpression(expr);
960  }
961}
962
963
964void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
965  if (context()->IsEffect()) {
966    VisitForEffect(expr);
967  } else if (context()->IsAccumulatorValue()) {
968    VisitForAccumulatorValue(expr);
969  } else if (context()->IsStackValue()) {
970    VisitForStackValue(expr);
971  } else if (context()->IsTest()) {
972    const TestContext* test = TestContext::cast(context());
973    VisitForControl(expr, test->true_label(), test->false_label(),
974                    test->fall_through());
975  }
976}
977
978
979void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
980  Comment cmnt(masm_, "[ Comma");
981  VisitForEffect(expr->left());
982  VisitInDuplicateContext(expr->right());
983}
984
985
986void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
987  bool is_logical_and = expr->op() == Token::AND;
988  Comment cmnt(masm_, is_logical_and ? "[ Logical AND" :  "[ Logical OR");
989  Expression* left = expr->left();
990  Expression* right = expr->right();
991  BailoutId right_id = expr->RightId();
992  Label done;
993
994  if (context()->IsTest()) {
995    Label eval_right;
996    const TestContext* test = TestContext::cast(context());
997    if (is_logical_and) {
998      VisitForControl(left, &eval_right, test->false_label(), &eval_right);
999    } else {
1000      VisitForControl(left, test->true_label(), &eval_right, &eval_right);
1001    }
1002    PrepareForBailoutForId(right_id, NO_REGISTERS);
1003    __ bind(&eval_right);
1004
1005  } else if (context()->IsAccumulatorValue()) {
1006    VisitForAccumulatorValue(left);
1007    // We want the value in the accumulator for the test, and on the stack in
1008    // case we need it.
1009    __ Push(result_register());
1010    Label discard, restore;
1011    if (is_logical_and) {
1012      DoTest(left, &discard, &restore, &restore);
1013    } else {
1014      DoTest(left, &restore, &discard, &restore);
1015    }
1016    __ bind(&restore);
1017    __ Pop(result_register());
1018    __ jmp(&done);
1019    __ bind(&discard);
1020    __ Drop(1);
1021    PrepareForBailoutForId(right_id, NO_REGISTERS);
1022
1023  } else if (context()->IsStackValue()) {
1024    VisitForAccumulatorValue(left);
1025    // We want the value in the accumulator for the test, and on the stack in
1026    // case we need it.
1027    __ Push(result_register());
1028    Label discard;
1029    if (is_logical_and) {
1030      DoTest(left, &discard, &done, &discard);
1031    } else {
1032      DoTest(left, &done, &discard, &discard);
1033    }
1034    __ bind(&discard);
1035    __ Drop(1);
1036    PrepareForBailoutForId(right_id, NO_REGISTERS);
1037
1038  } else {
1039    ASSERT(context()->IsEffect());
1040    Label eval_right;
1041    if (is_logical_and) {
1042      VisitForControl(left, &eval_right, &done, &eval_right);
1043    } else {
1044      VisitForControl(left, &done, &eval_right, &eval_right);
1045    }
1046    PrepareForBailoutForId(right_id, NO_REGISTERS);
1047    __ bind(&eval_right);
1048  }
1049
1050  VisitInDuplicateContext(right);
1051  __ bind(&done);
1052}
1053
1054
1055void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
1056  Token::Value op = expr->op();
1057  Comment cmnt(masm_, "[ ArithmeticExpression");
1058  Expression* left = expr->left();
1059  Expression* right = expr->right();
1060  OverwriteMode mode =
1061      left->ResultOverwriteAllowed()
1062      ? OVERWRITE_LEFT
1063      : (right->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE);
1064
1065  VisitForStackValue(left);
1066  VisitForAccumulatorValue(right);
1067
1068  SetSourcePosition(expr->position());
1069  if (ShouldInlineSmiCase(op)) {
1070    EmitInlineSmiBinaryOp(expr, op, mode, left, right);
1071  } else {
1072    EmitBinaryOp(expr, op, mode);
1073  }
1074}
1075
1076
1077void FullCodeGenerator::VisitBlock(Block* stmt) {
1078  Comment cmnt(masm_, "[ Block");
1079  NestedBlock nested_block(this, stmt);
1080  SetStatementPosition(stmt);
1081
1082  Scope* saved_scope = scope();
1083  // Push a block context when entering a block with block scoped variables.
1084  if (stmt->scope() != NULL) {
1085    scope_ = stmt->scope();
1086    ASSERT(!scope_->is_module_scope());
1087    { Comment cmnt(masm_, "[ Extend block context");
1088      Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
1089      int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
1090      __ Push(scope_info);
1091      PushFunctionArgumentForContextAllocation();
1092      if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
1093        FastNewBlockContextStub stub(heap_slots);
1094        __ CallStub(&stub);
1095      } else {
1096        __ CallRuntime(Runtime::kPushBlockContext, 2);
1097      }
1098
1099      // Replace the context stored in the frame.
1100      StoreToFrameField(StandardFrameConstants::kContextOffset,
1101                        context_register());
1102    }
1103    { Comment cmnt(masm_, "[ Declarations");
1104      VisitDeclarations(scope_->declarations());
1105    }
1106  }
1107
1108  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1109  VisitStatements(stmt->statements());
1110  scope_ = saved_scope;
1111  __ bind(nested_block.break_label());
1112  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1113
1114  // Pop block context if necessary.
1115  if (stmt->scope() != NULL) {
1116    LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1117    // Update local stack frame context field.
1118    StoreToFrameField(StandardFrameConstants::kContextOffset,
1119                      context_register());
1120  }
1121}
1122
1123
1124void FullCodeGenerator::VisitModuleStatement(ModuleStatement* stmt) {
1125  Comment cmnt(masm_, "[ Module context");
1126
1127  __ Push(Smi::FromInt(stmt->proxy()->interface()->Index()));
1128  __ Push(Smi::FromInt(0));
1129  __ CallRuntime(Runtime::kPushModuleContext, 2);
1130  StoreToFrameField(
1131      StandardFrameConstants::kContextOffset, context_register());
1132
1133  Scope* saved_scope = scope_;
1134  scope_ = stmt->body()->scope();
1135  VisitStatements(stmt->body()->statements());
1136  scope_ = saved_scope;
1137  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1138  // Update local stack frame context field.
1139  StoreToFrameField(StandardFrameConstants::kContextOffset,
1140                    context_register());
1141}
1142
1143
1144void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1145  Comment cmnt(masm_, "[ ExpressionStatement");
1146  SetStatementPosition(stmt);
1147  VisitForEffect(stmt->expression());
1148}
1149
1150
1151void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1152  Comment cmnt(masm_, "[ EmptyStatement");
1153  SetStatementPosition(stmt);
1154}
1155
1156
1157void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
1158  Comment cmnt(masm_, "[ IfStatement");
1159  SetStatementPosition(stmt);
1160  Label then_part, else_part, done;
1161
1162  if (stmt->HasElseStatement()) {
1163    VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
1164    PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1165    __ bind(&then_part);
1166    Visit(stmt->then_statement());
1167    __ jmp(&done);
1168
1169    PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1170    __ bind(&else_part);
1171    Visit(stmt->else_statement());
1172  } else {
1173    VisitForControl(stmt->condition(), &then_part, &done, &then_part);
1174    PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1175    __ bind(&then_part);
1176    Visit(stmt->then_statement());
1177
1178    PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1179  }
1180  __ bind(&done);
1181  PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
1182}
1183
1184
1185void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1186  Comment cmnt(masm_,  "[ ContinueStatement");
1187  SetStatementPosition(stmt);
1188  NestedStatement* current = nesting_stack_;
1189  int stack_depth = 0;
1190  int context_length = 0;
1191  // When continuing, we clobber the unpredictable value in the accumulator
1192  // with one that's safe for GC.  If we hit an exit from the try block of
1193  // try...finally on our way out, we will unconditionally preserve the
1194  // accumulator on the stack.
1195  ClearAccumulator();
1196  while (!current->IsContinueTarget(stmt->target())) {
1197    current = current->Exit(&stack_depth, &context_length);
1198  }
1199  __ Drop(stack_depth);
1200  if (context_length > 0) {
1201    while (context_length > 0) {
1202      LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1203      --context_length;
1204    }
1205    StoreToFrameField(StandardFrameConstants::kContextOffset,
1206                      context_register());
1207  }
1208
1209  __ jmp(current->AsIteration()->continue_label());
1210}
1211
1212
1213void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1214  Comment cmnt(masm_,  "[ BreakStatement");
1215  SetStatementPosition(stmt);
1216  NestedStatement* current = nesting_stack_;
1217  int stack_depth = 0;
1218  int context_length = 0;
1219  // When breaking, we clobber the unpredictable value in the accumulator
1220  // with one that's safe for GC.  If we hit an exit from the try block of
1221  // try...finally on our way out, we will unconditionally preserve the
1222  // accumulator on the stack.
1223  ClearAccumulator();
1224  while (!current->IsBreakTarget(stmt->target())) {
1225    current = current->Exit(&stack_depth, &context_length);
1226  }
1227  __ Drop(stack_depth);
1228  if (context_length > 0) {
1229    while (context_length > 0) {
1230      LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1231      --context_length;
1232    }
1233    StoreToFrameField(StandardFrameConstants::kContextOffset,
1234                      context_register());
1235  }
1236
1237  __ jmp(current->AsBreakable()->break_label());
1238}
1239
1240
1241void FullCodeGenerator::EmitUnwindBeforeReturn() {
1242  NestedStatement* current = nesting_stack_;
1243  int stack_depth = 0;
1244  int context_length = 0;
1245  while (current != NULL) {
1246    current = current->Exit(&stack_depth, &context_length);
1247  }
1248  __ Drop(stack_depth);
1249}
1250
1251
1252void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1253  Comment cmnt(masm_, "[ ReturnStatement");
1254  SetStatementPosition(stmt);
1255  Expression* expr = stmt->expression();
1256  VisitForAccumulatorValue(expr);
1257  EmitUnwindBeforeReturn();
1258  EmitReturnSequence();
1259}
1260
1261
1262void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1263  Comment cmnt(masm_, "[ WithStatement");
1264  SetStatementPosition(stmt);
1265
1266  VisitForStackValue(stmt->expression());
1267  PushFunctionArgumentForContextAllocation();
1268  __ CallRuntime(Runtime::kPushWithContext, 2);
1269  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1270
1271  Scope* saved_scope = scope();
1272  scope_ = stmt->scope();
1273  { WithOrCatch body(this);
1274    Visit(stmt->statement());
1275  }
1276  scope_ = saved_scope;
1277
1278  // Pop context.
1279  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1280  // Update local stack frame context field.
1281  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1282}
1283
1284
1285void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1286  Comment cmnt(masm_, "[ DoWhileStatement");
1287  SetStatementPosition(stmt);
1288  Label body, book_keeping;
1289
1290  Iteration loop_statement(this, stmt);
1291  increment_loop_depth();
1292
1293  __ bind(&body);
1294  Visit(stmt->body());
1295
1296  // Record the position of the do while condition and make sure it is
1297  // possible to break on the condition.
1298  __ bind(loop_statement.continue_label());
1299  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1300  SetExpressionPosition(stmt->cond(), stmt->condition_position());
1301  VisitForControl(stmt->cond(),
1302                  &book_keeping,
1303                  loop_statement.break_label(),
1304                  &book_keeping);
1305
1306  // Check stack before looping.
1307  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1308  __ bind(&book_keeping);
1309  EmitBackEdgeBookkeeping(stmt, &body);
1310  __ jmp(&body);
1311
1312  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1313  __ bind(loop_statement.break_label());
1314  decrement_loop_depth();
1315}
1316
1317
1318void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1319  Comment cmnt(masm_, "[ WhileStatement");
1320  Label test, body;
1321
1322  Iteration loop_statement(this, stmt);
1323  increment_loop_depth();
1324
1325  // Emit the test at the bottom of the loop.
1326  __ jmp(&test);
1327
1328  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1329  __ bind(&body);
1330  Visit(stmt->body());
1331
1332  // Emit the statement position here as this is where the while
1333  // statement code starts.
1334  __ bind(loop_statement.continue_label());
1335  SetStatementPosition(stmt);
1336
1337  // Check stack before looping.
1338  EmitBackEdgeBookkeeping(stmt, &body);
1339
1340  __ bind(&test);
1341  VisitForControl(stmt->cond(),
1342                  &body,
1343                  loop_statement.break_label(),
1344                  loop_statement.break_label());
1345
1346  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1347  __ bind(loop_statement.break_label());
1348  decrement_loop_depth();
1349}
1350
1351
1352void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1353  Comment cmnt(masm_, "[ ForStatement");
1354  Label test, body;
1355
1356  Iteration loop_statement(this, stmt);
1357
1358  // Set statement position for a break slot before entering the for-body.
1359  SetStatementPosition(stmt);
1360
1361  if (stmt->init() != NULL) {
1362    Visit(stmt->init());
1363  }
1364
1365  increment_loop_depth();
1366  // Emit the test at the bottom of the loop (even if empty).
1367  __ jmp(&test);
1368
1369  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1370  __ bind(&body);
1371  Visit(stmt->body());
1372
1373  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1374  __ bind(loop_statement.continue_label());
1375  if (stmt->next() != NULL) {
1376    Visit(stmt->next());
1377  }
1378
1379  // Emit the statement position here as this is where the for
1380  // statement code starts.
1381  SetStatementPosition(stmt);
1382
1383  // Check stack before looping.
1384  EmitBackEdgeBookkeeping(stmt, &body);
1385
1386  __ bind(&test);
1387  if (stmt->cond() != NULL) {
1388    VisitForControl(stmt->cond(),
1389                    &body,
1390                    loop_statement.break_label(),
1391                    loop_statement.break_label());
1392  } else {
1393    __ jmp(&body);
1394  }
1395
1396  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1397  __ bind(loop_statement.break_label());
1398  decrement_loop_depth();
1399}
1400
1401
1402void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1403  Comment cmnt(masm_, "[ TryCatchStatement");
1404  SetStatementPosition(stmt);
1405  // The try block adds a handler to the exception handler chain before
1406  // entering, and removes it again when exiting normally.  If an exception
1407  // is thrown during execution of the try block, the handler is consumed
1408  // and control is passed to the catch block with the exception in the
1409  // result register.
1410
1411  Label try_entry, handler_entry, exit;
1412  __ jmp(&try_entry);
1413  __ bind(&handler_entry);
1414  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1415  // Exception handler code, the exception is in the result register.
1416  // Extend the context before executing the catch block.
1417  { Comment cmnt(masm_, "[ Extend catch context");
1418    __ Push(stmt->variable()->name());
1419    __ Push(result_register());
1420    PushFunctionArgumentForContextAllocation();
1421    __ CallRuntime(Runtime::kPushCatchContext, 3);
1422    StoreToFrameField(StandardFrameConstants::kContextOffset,
1423                      context_register());
1424  }
1425
1426  Scope* saved_scope = scope();
1427  scope_ = stmt->scope();
1428  ASSERT(scope_->declarations()->is_empty());
1429  { WithOrCatch catch_body(this);
1430    Visit(stmt->catch_block());
1431  }
1432  // Restore the context.
1433  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1434  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1435  scope_ = saved_scope;
1436  __ jmp(&exit);
1437
1438  // Try block code. Sets up the exception handler chain.
1439  __ bind(&try_entry);
1440  __ PushTryHandler(StackHandler::CATCH, stmt->index());
1441  { TryCatch try_body(this);
1442    Visit(stmt->try_block());
1443  }
1444  __ PopTryHandler();
1445  __ bind(&exit);
1446}
1447
1448
1449void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1450  Comment cmnt(masm_, "[ TryFinallyStatement");
1451  SetStatementPosition(stmt);
1452  // Try finally is compiled by setting up a try-handler on the stack while
1453  // executing the try body, and removing it again afterwards.
1454  //
1455  // The try-finally construct can enter the finally block in three ways:
1456  // 1. By exiting the try-block normally. This removes the try-handler and
1457  //    calls the finally block code before continuing.
1458  // 2. By exiting the try-block with a function-local control flow transfer
1459  //    (break/continue/return). The site of the, e.g., break removes the
1460  //    try handler and calls the finally block code before continuing
1461  //    its outward control transfer.
1462  // 3. By exiting the try-block with a thrown exception.
1463  //    This can happen in nested function calls. It traverses the try-handler
1464  //    chain and consumes the try-handler entry before jumping to the
1465  //    handler code. The handler code then calls the finally-block before
1466  //    rethrowing the exception.
1467  //
1468  // The finally block must assume a return address on top of the stack
1469  // (or in the link register on ARM chips) and a value (return value or
1470  // exception) in the result register (rax/eax/r0), both of which must
1471  // be preserved. The return address isn't GC-safe, so it should be
1472  // cooked before GC.
1473  Label try_entry, handler_entry, finally_entry;
1474
1475  // Jump to try-handler setup and try-block code.
1476  __ jmp(&try_entry);
1477  __ bind(&handler_entry);
1478  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1479  // Exception handler code.  This code is only executed when an exception
1480  // is thrown.  The exception is in the result register, and must be
1481  // preserved by the finally block.  Call the finally block and then
1482  // rethrow the exception if it returns.
1483  __ Call(&finally_entry);
1484  __ Push(result_register());
1485  __ CallRuntime(Runtime::kReThrow, 1);
1486
1487  // Finally block implementation.
1488  __ bind(&finally_entry);
1489  EnterFinallyBlock();
1490  { Finally finally_body(this);
1491    Visit(stmt->finally_block());
1492  }
1493  ExitFinallyBlock();  // Return to the calling code.
1494
1495  // Set up try handler.
1496  __ bind(&try_entry);
1497  __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1498  { TryFinally try_body(this, &finally_entry);
1499    Visit(stmt->try_block());
1500  }
1501  __ PopTryHandler();
1502  // Execute the finally block on the way out.  Clobber the unpredictable
1503  // value in the result register with one that's safe for GC because the
1504  // finally block will unconditionally preserve the result register on the
1505  // stack.
1506  ClearAccumulator();
1507  __ Call(&finally_entry);
1508}
1509
1510
1511void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1512#ifdef ENABLE_DEBUGGER_SUPPORT
1513  Comment cmnt(masm_, "[ DebuggerStatement");
1514  SetStatementPosition(stmt);
1515
1516  __ DebugBreak();
1517  // Ignore the return value.
1518#endif
1519}
1520
1521
1522void FullCodeGenerator::VisitConditional(Conditional* expr) {
1523  Comment cmnt(masm_, "[ Conditional");
1524  Label true_case, false_case, done;
1525  VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1526
1527  PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1528  __ bind(&true_case);
1529  SetExpressionPosition(expr->then_expression(),
1530                        expr->then_expression_position());
1531  if (context()->IsTest()) {
1532    const TestContext* for_test = TestContext::cast(context());
1533    VisitForControl(expr->then_expression(),
1534                    for_test->true_label(),
1535                    for_test->false_label(),
1536                    NULL);
1537  } else {
1538    VisitInDuplicateContext(expr->then_expression());
1539    __ jmp(&done);
1540  }
1541
1542  PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1543  __ bind(&false_case);
1544  SetExpressionPosition(expr->else_expression(),
1545                        expr->else_expression_position());
1546  VisitInDuplicateContext(expr->else_expression());
1547  // If control flow falls through Visit, merge it with true case here.
1548  if (!context()->IsTest()) {
1549    __ bind(&done);
1550  }
1551}
1552
1553
1554void FullCodeGenerator::VisitLiteral(Literal* expr) {
1555  Comment cmnt(masm_, "[ Literal");
1556  context()->Plug(expr->value());
1557}
1558
1559
1560void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1561  Comment cmnt(masm_, "[ FunctionLiteral");
1562
1563  // Build the function boilerplate and instantiate it.
1564  Handle<SharedFunctionInfo> function_info =
1565      Compiler::BuildFunctionInfo(expr, script());
1566  if (function_info.is_null()) {
1567    SetStackOverflow();
1568    return;
1569  }
1570  EmitNewClosure(function_info, expr->pretenure());
1571}
1572
1573
1574void FullCodeGenerator::VisitSharedFunctionInfoLiteral(
1575    SharedFunctionInfoLiteral* expr) {
1576  Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
1577  EmitNewClosure(expr->shared_function_info(), false);
1578}
1579
1580
1581void FullCodeGenerator::VisitThrow(Throw* expr) {
1582  Comment cmnt(masm_, "[ Throw");
1583  VisitForStackValue(expr->exception());
1584  __ CallRuntime(Runtime::kThrow, 1);
1585  // Never returns here.
1586}
1587
1588
1589FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
1590    int* stack_depth,
1591    int* context_length) {
1592  // The macros used here must preserve the result register.
1593  __ Drop(*stack_depth);
1594  __ PopTryHandler();
1595  *stack_depth = 0;
1596  return previous_;
1597}
1598
1599
1600bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1601  Expression* sub_expr;
1602  Handle<String> check;
1603  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1604    EmitLiteralCompareTypeof(expr, sub_expr, check);
1605    return true;
1606  }
1607
1608  if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
1609    EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1610    return true;
1611  }
1612
1613  if (expr->IsLiteralCompareNull(&sub_expr)) {
1614    EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1615    return true;
1616  }
1617
1618  return false;
1619}
1620
1621
1622#undef __
1623
1624
1625} }  // namespace v8::internal
1626