1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "codegen.h"
31#include "compiler.h"
32#include "debug.h"
33#include "full-codegen.h"
34#include "liveedit.h"
35#include "macro-assembler.h"
36#include "prettyprinter.h"
37#include "scopes.h"
38#include "scopeinfo.h"
39#include "snapshot.h"
40#include "stub-cache.h"
41
42namespace v8 {
43namespace internal {
44
45void BreakableStatementChecker::Check(Statement* stmt) {
46  Visit(stmt);
47}
48
49
50void BreakableStatementChecker::Check(Expression* expr) {
51  Visit(expr);
52}
53
54
55void BreakableStatementChecker::VisitVariableDeclaration(
56    VariableDeclaration* decl) {
57}
58
59void BreakableStatementChecker::VisitFunctionDeclaration(
60    FunctionDeclaration* decl) {
61}
62
63void BreakableStatementChecker::VisitModuleDeclaration(
64    ModuleDeclaration* decl) {
65}
66
67void BreakableStatementChecker::VisitImportDeclaration(
68    ImportDeclaration* decl) {
69}
70
71void BreakableStatementChecker::VisitExportDeclaration(
72    ExportDeclaration* decl) {
73}
74
75
76void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
77}
78
79
80void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
81}
82
83
84void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
85}
86
87
88void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
89}
90
91
92void BreakableStatementChecker::VisitModuleStatement(ModuleStatement* stmt) {
93}
94
95
96void BreakableStatementChecker::VisitBlock(Block* stmt) {
97}
98
99
100void BreakableStatementChecker::VisitExpressionStatement(
101    ExpressionStatement* stmt) {
102  // Check if expression is breakable.
103  Visit(stmt->expression());
104}
105
106
107void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
108}
109
110
111void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
112  // If the condition is breakable the if statement is breakable.
113  Visit(stmt->condition());
114}
115
116
117void BreakableStatementChecker::VisitContinueStatement(
118    ContinueStatement* stmt) {
119}
120
121
122void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
123}
124
125
126void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
127  // Return is breakable if the expression is.
128  Visit(stmt->expression());
129}
130
131
132void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
133  Visit(stmt->expression());
134}
135
136
137void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
138  // Switch statements breakable if the tag expression is.
139  Visit(stmt->tag());
140}
141
142
143void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
144  // Mark do while as breakable to avoid adding a break slot in front of it.
145  is_breakable_ = true;
146}
147
148
149void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
150  // Mark while statements breakable if the condition expression is.
151  Visit(stmt->cond());
152}
153
154
155void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
156  // Mark for statements breakable if the condition expression is.
157  if (stmt->cond() != NULL) {
158    Visit(stmt->cond());
159  }
160}
161
162
163void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
164  // Mark for in statements breakable if the enumerable expression is.
165  Visit(stmt->enumerable());
166}
167
168
169void BreakableStatementChecker::VisitForOfStatement(ForOfStatement* stmt) {
170  // For-of is breakable because of the next() call.
171  is_breakable_ = true;
172}
173
174
175void BreakableStatementChecker::VisitTryCatchStatement(
176    TryCatchStatement* stmt) {
177  // Mark try catch as breakable to avoid adding a break slot in front of it.
178  is_breakable_ = true;
179}
180
181
182void BreakableStatementChecker::VisitTryFinallyStatement(
183    TryFinallyStatement* stmt) {
184  // Mark try finally as breakable to avoid adding a break slot in front of it.
185  is_breakable_ = true;
186}
187
188
189void BreakableStatementChecker::VisitDebuggerStatement(
190    DebuggerStatement* stmt) {
191  // The debugger statement is breakable.
192  is_breakable_ = true;
193}
194
195
196void BreakableStatementChecker::VisitCaseClause(CaseClause* clause) {
197}
198
199
200void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
201}
202
203
204void BreakableStatementChecker::VisitNativeFunctionLiteral(
205    NativeFunctionLiteral* expr) {
206}
207
208
209void BreakableStatementChecker::VisitConditional(Conditional* expr) {
210}
211
212
213void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
214}
215
216
217void BreakableStatementChecker::VisitLiteral(Literal* expr) {
218}
219
220
221void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
222}
223
224
225void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
226}
227
228
229void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
230}
231
232
233void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
234  // If assigning to a property (including a global property) the assignment is
235  // breakable.
236  VariableProxy* proxy = expr->target()->AsVariableProxy();
237  Property* prop = expr->target()->AsProperty();
238  if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
239    is_breakable_ = true;
240    return;
241  }
242
243  // Otherwise the assignment is breakable if the assigned value is.
244  Visit(expr->value());
245}
246
247
248void BreakableStatementChecker::VisitYield(Yield* expr) {
249  // Yield is breakable if the expression is.
250  Visit(expr->expression());
251}
252
253
254void BreakableStatementChecker::VisitThrow(Throw* expr) {
255  // Throw is breakable if the expression is.
256  Visit(expr->exception());
257}
258
259
260void BreakableStatementChecker::VisitProperty(Property* expr) {
261  // Property load is breakable.
262  is_breakable_ = true;
263}
264
265
266void BreakableStatementChecker::VisitCall(Call* expr) {
267  // Function calls both through IC and call stub are breakable.
268  is_breakable_ = true;
269}
270
271
272void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
273  // Function calls through new are breakable.
274  is_breakable_ = true;
275}
276
277
278void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
279}
280
281
282void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
283  Visit(expr->expression());
284}
285
286
287void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
288  Visit(expr->expression());
289}
290
291
292void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
293  Visit(expr->left());
294  if (expr->op() != Token::AND &&
295      expr->op() != Token::OR) {
296    Visit(expr->right());
297  }
298}
299
300
301void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
302  Visit(expr->left());
303  Visit(expr->right());
304}
305
306
307void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
308}
309
310
311#define __ ACCESS_MASM(masm())
312
313bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
314  Isolate* isolate = info->isolate();
315  Handle<Script> script = info->script();
316  if (!script->IsUndefined() && !script->source()->IsUndefined()) {
317    int len = String::cast(script->source())->length();
318    isolate->counters()->total_full_codegen_source_size()->Increment(len);
319  }
320  CodeGenerator::MakeCodePrologue(info, "full");
321  const int kInitialBufferSize = 4 * KB;
322  MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize);
323#ifdef ENABLE_GDB_JIT_INTERFACE
324  masm.positions_recorder()->StartGDBJITLineInfoRecording();
325#endif
326  LOG_CODE_EVENT(isolate,
327                 CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
328
329  FullCodeGenerator cgen(&masm, info);
330  cgen.Generate();
331  if (cgen.HasStackOverflow()) {
332    ASSERT(!isolate->has_pending_exception());
333    return false;
334  }
335  unsigned table_offset = cgen.EmitBackEdgeTable();
336
337  Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
338  Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
339  code->set_optimizable(info->IsOptimizable() &&
340                        !info->function()->dont_optimize() &&
341                        info->function()->scope()->AllowsLazyCompilation());
342  cgen.PopulateDeoptimizationData(code);
343  cgen.PopulateTypeFeedbackInfo(code);
344  cgen.PopulateTypeFeedbackCells(code);
345  code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
346  code->set_handler_table(*cgen.handler_table());
347#ifdef ENABLE_DEBUGGER_SUPPORT
348  code->set_compiled_optimizable(info->IsOptimizable());
349#endif  // ENABLE_DEBUGGER_SUPPORT
350  code->set_allow_osr_at_loop_nesting_level(0);
351  code->set_profiler_ticks(0);
352  code->set_back_edge_table_offset(table_offset);
353  code->set_back_edges_patched_for_osr(false);
354  CodeGenerator::PrintCode(code, info);
355  info->SetCode(code);
356#ifdef ENABLE_GDB_JIT_INTERFACE
357  if (FLAG_gdbjit) {
358    GDBJITLineInfo* lineinfo =
359        masm.positions_recorder()->DetachGDBJITLineInfo();
360    GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
361  }
362#endif
363  void* line_info = masm.positions_recorder()->DetachJITHandlerData();
364  LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
365  return true;
366}
367
368
369unsigned FullCodeGenerator::EmitBackEdgeTable() {
370  // The back edge table consists of a length (in number of entries)
371  // field, and then a sequence of entries.  Each entry is a pair of AST id
372  // and code-relative pc offset.
373  masm()->Align(kIntSize);
374  unsigned offset = masm()->pc_offset();
375  unsigned length = back_edges_.length();
376  __ dd(length);
377  for (unsigned i = 0; i < length; ++i) {
378    __ dd(back_edges_[i].id.ToInt());
379    __ dd(back_edges_[i].pc);
380    __ dd(back_edges_[i].loop_depth);
381  }
382  return offset;
383}
384
385
386void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
387  // Fill in the deoptimization information.
388  ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
389  if (!info_->HasDeoptimizationSupport()) return;
390  int length = bailout_entries_.length();
391  Handle<DeoptimizationOutputData> data = isolate()->factory()->
392      NewDeoptimizationOutputData(length, TENURED);
393  for (int i = 0; i < length; i++) {
394    data->SetAstId(i, bailout_entries_[i].id);
395    data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
396  }
397  code->set_deoptimization_data(*data);
398}
399
400
401void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
402  Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
403  info->set_ic_total_count(ic_total_count_);
404  ASSERT(!isolate()->heap()->InNewSpace(*info));
405  code->set_type_feedback_info(*info);
406}
407
408
409void FullCodeGenerator::Initialize() {
410  // The generation of debug code must match between the snapshot code and the
411  // code that is generated later.  This is assumed by the debugger when it is
412  // calculating PC offsets after generating a debug version of code.  Therefore
413  // we disable the production of debug code in the full compiler if we are
414  // either generating a snapshot or we booted from a snapshot.
415  generate_debug_code_ = FLAG_debug_code &&
416                         !Serializer::enabled() &&
417                         !Snapshot::HaveASnapshotToStartFrom();
418  masm_->set_emit_debug_code(generate_debug_code_);
419  masm_->set_predictable_code_size(true);
420  InitializeAstVisitor(info_->isolate());
421}
422
423
424void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
425  if (type_feedback_cells_.is_empty()) return;
426  int length = type_feedback_cells_.length();
427  int array_size = TypeFeedbackCells::LengthOfFixedArray(length);
428  Handle<TypeFeedbackCells> cache = Handle<TypeFeedbackCells>::cast(
429      isolate()->factory()->NewFixedArray(array_size, TENURED));
430  for (int i = 0; i < length; i++) {
431    cache->SetAstId(i, type_feedback_cells_[i].ast_id);
432    cache->SetCell(i, *type_feedback_cells_[i].cell);
433  }
434  TypeFeedbackInfo::cast(code->type_feedback_info())->set_type_feedback_cells(
435      *cache);
436}
437
438
439
440void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
441  PrepareForBailoutForId(node->id(), state);
442}
443
444
445void FullCodeGenerator::RecordJSReturnSite(Call* call) {
446  // We record the offset of the function return so we can rebuild the frame
447  // if the function was inlined, i.e., this is the return address in the
448  // inlined function's frame.
449  //
450  // The state is ignored.  We defensively set it to TOS_REG, which is the
451  // real state of the unoptimized code at the return site.
452  PrepareForBailoutForId(call->ReturnId(), TOS_REG);
453#ifdef DEBUG
454  // In debug builds, mark the return so we can verify that this function
455  // was called.
456  ASSERT(!call->return_is_recorded_);
457  call->return_is_recorded_ = true;
458#endif
459}
460
461
462void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
463  // There's no need to prepare this code for bailouts from already optimized
464  // code or code that can't be optimized.
465  if (!info_->HasDeoptimizationSupport()) return;
466  unsigned pc_and_state =
467      StateField::encode(state) | PcField::encode(masm_->pc_offset());
468  ASSERT(Smi::IsValid(pc_and_state));
469  BailoutEntry entry = { id, pc_and_state };
470  ASSERT(!prepared_bailout_ids_.Contains(id.ToInt()));
471  prepared_bailout_ids_.Add(id.ToInt(), zone());
472  bailout_entries_.Add(entry, zone());
473}
474
475
476void FullCodeGenerator::RecordTypeFeedbackCell(
477    TypeFeedbackId id, Handle<Cell> cell) {
478  TypeFeedbackCellEntry entry = { id, cell };
479  type_feedback_cells_.Add(entry, zone());
480}
481
482
483void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
484  // The pc offset does not need to be encoded and packed together with a state.
485  ASSERT(masm_->pc_offset() > 0);
486  ASSERT(loop_depth() > 0);
487  uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
488  BackEdgeEntry entry =
489      { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
490  back_edges_.Add(entry, zone());
491}
492
493
494bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
495  // Inline smi case inside loops, but not division and modulo which
496  // are too complicated and take up too much space.
497  if (op == Token::DIV ||op == Token::MOD) return false;
498  if (FLAG_always_inline_smi_code) return true;
499  return loop_depth_ > 0;
500}
501
502
503void FullCodeGenerator::EffectContext::Plug(Register reg) const {
504}
505
506
507void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
508  __ Move(result_register(), reg);
509}
510
511
512void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
513  __ Push(reg);
514}
515
516
517void FullCodeGenerator::TestContext::Plug(Register reg) const {
518  // For simplicity we always test the accumulator register.
519  __ Move(result_register(), reg);
520  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
521  codegen()->DoTest(this);
522}
523
524
525void FullCodeGenerator::EffectContext::PlugTOS() const {
526  __ Drop(1);
527}
528
529
530void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
531  __ Pop(result_register());
532}
533
534
535void FullCodeGenerator::StackValueContext::PlugTOS() const {
536}
537
538
539void FullCodeGenerator::TestContext::PlugTOS() const {
540  // For simplicity we always test the accumulator register.
541  __ Pop(result_register());
542  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
543  codegen()->DoTest(this);
544}
545
546
547void FullCodeGenerator::EffectContext::PrepareTest(
548    Label* materialize_true,
549    Label* materialize_false,
550    Label** if_true,
551    Label** if_false,
552    Label** fall_through) const {
553  // In an effect context, the true and the false case branch to the
554  // same label.
555  *if_true = *if_false = *fall_through = materialize_true;
556}
557
558
559void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
560    Label* materialize_true,
561    Label* materialize_false,
562    Label** if_true,
563    Label** if_false,
564    Label** fall_through) const {
565  *if_true = *fall_through = materialize_true;
566  *if_false = materialize_false;
567}
568
569
570void FullCodeGenerator::StackValueContext::PrepareTest(
571    Label* materialize_true,
572    Label* materialize_false,
573    Label** if_true,
574    Label** if_false,
575    Label** fall_through) const {
576  *if_true = *fall_through = materialize_true;
577  *if_false = materialize_false;
578}
579
580
581void FullCodeGenerator::TestContext::PrepareTest(
582    Label* materialize_true,
583    Label* materialize_false,
584    Label** if_true,
585    Label** if_false,
586    Label** fall_through) const {
587  *if_true = true_label_;
588  *if_false = false_label_;
589  *fall_through = fall_through_;
590}
591
592
593void FullCodeGenerator::DoTest(const TestContext* context) {
594  DoTest(context->condition(),
595         context->true_label(),
596         context->false_label(),
597         context->fall_through());
598}
599
600
601void FullCodeGenerator::AllocateModules(ZoneList<Declaration*>* declarations) {
602  ASSERT(scope_->is_global_scope());
603
604  for (int i = 0; i < declarations->length(); i++) {
605    ModuleDeclaration* declaration = declarations->at(i)->AsModuleDeclaration();
606    if (declaration != NULL) {
607      ModuleLiteral* module = declaration->module()->AsModuleLiteral();
608      if (module != NULL) {
609        Comment cmnt(masm_, "[ Link nested modules");
610        Scope* scope = module->body()->scope();
611        Interface* interface = scope->interface();
612        ASSERT(interface->IsModule() && interface->IsFrozen());
613
614        interface->Allocate(scope->module_var()->index());
615
616        // Set up module context.
617        ASSERT(scope->interface()->Index() >= 0);
618        __ Push(Smi::FromInt(scope->interface()->Index()));
619        __ Push(scope->GetScopeInfo());
620        __ CallRuntime(Runtime::kPushModuleContext, 2);
621        StoreToFrameField(StandardFrameConstants::kContextOffset,
622                          context_register());
623
624        AllocateModules(scope->declarations());
625
626        // Pop module context.
627        LoadContextField(context_register(), Context::PREVIOUS_INDEX);
628        // Update local stack frame context field.
629        StoreToFrameField(StandardFrameConstants::kContextOffset,
630                          context_register());
631      }
632    }
633  }
634}
635
636
637// Modules have their own local scope, represented by their own context.
638// Module instance objects have an accessor for every export that forwards
639// access to the respective slot from the module's context. (Exports that are
640// modules themselves, however, are simple data properties.)
641//
642// All modules have a _hosting_ scope/context, which (currently) is the
643// (innermost) enclosing global scope. To deal with recursion, nested modules
644// are hosted by the same scope as global ones.
645//
646// For every (global or nested) module literal, the hosting context has an
647// internal slot that points directly to the respective module context. This
648// enables quick access to (statically resolved) module members by 2-dimensional
649// access through the hosting context. For example,
650//
651//   module A {
652//     let x;
653//     module B { let y; }
654//   }
655//   module C { let z; }
656//
657// allocates contexts as follows:
658//
659// [header| .A | .B | .C | A | C ]  (global)
660//           |    |    |
661//           |    |    +-- [header| z ]  (module)
662//           |    |
663//           |    +------- [header| y ]  (module)
664//           |
665//           +------------ [header| x | B ]  (module)
666//
667// Here, .A, .B, .C are the internal slots pointing to the hosted module
668// contexts, whereas A, B, C hold the actual instance objects (note that every
669// module context also points to the respective instance object through its
670// extension slot in the header).
671//
672// To deal with arbitrary recursion and aliases between modules,
673// they are created and initialized in several stages. Each stage applies to
674// all modules in the hosting global scope, including nested ones.
675//
676// 1. Allocate: for each module _literal_, allocate the module contexts and
677//    respective instance object and wire them up. This happens in the
678//    PushModuleContext runtime function, as generated by AllocateModules
679//    (invoked by VisitDeclarations in the hosting scope).
680//
681// 2. Bind: for each module _declaration_ (i.e. literals as well as aliases),
682//    assign the respective instance object to respective local variables. This
683//    happens in VisitModuleDeclaration, and uses the instance objects created
684//    in the previous stage.
685//    For each module _literal_, this phase also constructs a module descriptor
686//    for the next stage. This happens in VisitModuleLiteral.
687//
688// 3. Populate: invoke the DeclareModules runtime function to populate each
689//    _instance_ object with accessors for it exports. This is generated by
690//    DeclareModules (invoked by VisitDeclarations in the hosting scope again),
691//    and uses the descriptors generated in the previous stage.
692//
693// 4. Initialize: execute the module bodies (and other code) in sequence. This
694//    happens by the separate statements generated for module bodies. To reenter
695//    the module scopes properly, the parser inserted ModuleStatements.
696
697void FullCodeGenerator::VisitDeclarations(
698    ZoneList<Declaration*>* declarations) {
699  Handle<FixedArray> saved_modules = modules_;
700  int saved_module_index = module_index_;
701  ZoneList<Handle<Object> >* saved_globals = globals_;
702  ZoneList<Handle<Object> > inner_globals(10, zone());
703  globals_ = &inner_globals;
704
705  if (scope_->num_modules() != 0) {
706    // This is a scope hosting modules. Allocate a descriptor array to pass
707    // to the runtime for initialization.
708    Comment cmnt(masm_, "[ Allocate modules");
709    ASSERT(scope_->is_global_scope());
710    modules_ =
711        isolate()->factory()->NewFixedArray(scope_->num_modules(), TENURED);
712    module_index_ = 0;
713
714    // Generate code for allocating all modules, including nested ones.
715    // The allocated contexts are stored in internal variables in this scope.
716    AllocateModules(declarations);
717  }
718
719  AstVisitor::VisitDeclarations(declarations);
720
721  if (scope_->num_modules() != 0) {
722    // Initialize modules from descriptor array.
723    ASSERT(module_index_ == modules_->length());
724    DeclareModules(modules_);
725    modules_ = saved_modules;
726    module_index_ = saved_module_index;
727  }
728
729  if (!globals_->is_empty()) {
730    // Invoke the platform-dependent code generator to do the actual
731    // declaration of the global functions and variables.
732    Handle<FixedArray> array =
733       isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
734    for (int i = 0; i < globals_->length(); ++i)
735      array->set(i, *globals_->at(i));
736    DeclareGlobals(array);
737  }
738
739  globals_ = saved_globals;
740}
741
742
743void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
744  Block* block = module->body();
745  Scope* saved_scope = scope();
746  scope_ = block->scope();
747  Interface* interface = scope_->interface();
748
749  Comment cmnt(masm_, "[ ModuleLiteral");
750  SetStatementPosition(block);
751
752  ASSERT(!modules_.is_null());
753  ASSERT(module_index_ < modules_->length());
754  int index = module_index_++;
755
756  // Set up module context.
757  ASSERT(interface->Index() >= 0);
758  __ Push(Smi::FromInt(interface->Index()));
759  __ Push(Smi::FromInt(0));
760  __ CallRuntime(Runtime::kPushModuleContext, 2);
761  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
762
763  {
764    Comment cmnt(masm_, "[ Declarations");
765    VisitDeclarations(scope_->declarations());
766  }
767
768  // Populate the module description.
769  Handle<ModuleInfo> description =
770      ModuleInfo::Create(isolate(), interface, scope_);
771  modules_->set(index, *description);
772
773  scope_ = saved_scope;
774  // Pop module context.
775  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
776  // Update local stack frame context field.
777  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
778}
779
780
781void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
782  // Nothing to do.
783  // The instance object is resolved statically through the module's interface.
784}
785
786
787void FullCodeGenerator::VisitModulePath(ModulePath* module) {
788  // Nothing to do.
789  // The instance object is resolved statically through the module's interface.
790}
791
792
793void FullCodeGenerator::VisitModuleUrl(ModuleUrl* module) {
794  // TODO(rossberg): dummy allocation for now.
795  Scope* scope = module->body()->scope();
796  Interface* interface = scope_->interface();
797
798  ASSERT(interface->IsModule() && interface->IsFrozen());
799  ASSERT(!modules_.is_null());
800  ASSERT(module_index_ < modules_->length());
801  interface->Allocate(scope->module_var()->index());
802  int index = module_index_++;
803
804  Handle<ModuleInfo> description =
805      ModuleInfo::Create(isolate(), interface, scope_);
806  modules_->set(index, *description);
807}
808
809
810int FullCodeGenerator::DeclareGlobalsFlags() {
811  ASSERT(DeclareGlobalsLanguageMode::is_valid(language_mode()));
812  return DeclareGlobalsEvalFlag::encode(is_eval()) |
813      DeclareGlobalsNativeFlag::encode(is_native()) |
814      DeclareGlobalsLanguageMode::encode(language_mode());
815}
816
817
818void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
819  CodeGenerator::RecordPositions(masm_, fun->start_position());
820}
821
822
823void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
824  CodeGenerator::RecordPositions(masm_, fun->end_position() - 1);
825}
826
827
828void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
829#ifdef ENABLE_DEBUGGER_SUPPORT
830  if (!isolate()->debugger()->IsDebuggerActive()) {
831    CodeGenerator::RecordPositions(masm_, stmt->position());
832  } else {
833    // Check if the statement will be breakable without adding a debug break
834    // slot.
835    BreakableStatementChecker checker(isolate());
836    checker.Check(stmt);
837    // Record the statement position right here if the statement is not
838    // breakable. For breakable statements the actual recording of the
839    // position will be postponed to the breakable code (typically an IC).
840    bool position_recorded = CodeGenerator::RecordPositions(
841        masm_, stmt->position(), !checker.is_breakable());
842    // If the position recording did record a new position generate a debug
843    // break slot to make the statement breakable.
844    if (position_recorded) {
845      Debug::GenerateSlot(masm_);
846    }
847  }
848#else
849  CodeGenerator::RecordPositions(masm_, stmt->position());
850#endif
851}
852
853
854void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
855#ifdef ENABLE_DEBUGGER_SUPPORT
856  if (!isolate()->debugger()->IsDebuggerActive()) {
857    CodeGenerator::RecordPositions(masm_, expr->position());
858  } else {
859    // Check if the expression will be breakable without adding a debug break
860    // slot.
861    BreakableStatementChecker checker(isolate());
862    checker.Check(expr);
863    // Record a statement position right here if the expression is not
864    // breakable. For breakable expressions the actual recording of the
865    // position will be postponed to the breakable code (typically an IC).
866    // NOTE this will record a statement position for something which might
867    // not be a statement. As stepping in the debugger will only stop at
868    // statement positions this is used for e.g. the condition expression of
869    // a do while loop.
870    bool position_recorded = CodeGenerator::RecordPositions(
871        masm_, expr->position(), !checker.is_breakable());
872    // If the position recording did record a new position generate a debug
873    // break slot to make the statement breakable.
874    if (position_recorded) {
875      Debug::GenerateSlot(masm_);
876    }
877  }
878#else
879  CodeGenerator::RecordPositions(masm_, pos);
880#endif
881}
882
883
884void FullCodeGenerator::SetStatementPosition(int pos) {
885  CodeGenerator::RecordPositions(masm_, pos);
886}
887
888
889void FullCodeGenerator::SetSourcePosition(int pos) {
890  if (pos != RelocInfo::kNoPosition) {
891    masm_->positions_recorder()->RecordPosition(pos);
892  }
893}
894
895
896// Lookup table for code generators for  special runtime calls which are
897// generated inline.
898#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)          \
899    &FullCodeGenerator::Emit##Name,
900
901const FullCodeGenerator::InlineFunctionGenerator
902  FullCodeGenerator::kInlineFunctionGenerators[] = {
903    INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
904    INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
905  };
906#undef INLINE_FUNCTION_GENERATOR_ADDRESS
907
908
909FullCodeGenerator::InlineFunctionGenerator
910  FullCodeGenerator::FindInlineFunctionGenerator(Runtime::FunctionId id) {
911    int lookup_index =
912        static_cast<int>(id) - static_cast<int>(Runtime::kFirstInlineFunction);
913    ASSERT(lookup_index >= 0);
914    ASSERT(static_cast<size_t>(lookup_index) <
915           ARRAY_SIZE(kInlineFunctionGenerators));
916    return kInlineFunctionGenerators[lookup_index];
917}
918
919
920void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
921  const Runtime::Function* function = expr->function();
922  ASSERT(function != NULL);
923  ASSERT(function->intrinsic_type == Runtime::INLINE);
924  InlineFunctionGenerator generator =
925      FindInlineFunctionGenerator(function->function_id);
926  ((*this).*(generator))(expr);
927}
928
929
930void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
931  ZoneList<Expression*>* args = expr->arguments();
932  ASSERT(args->length() == 2);
933  EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT);
934}
935
936
937void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
938  ZoneList<Expression*>* args = expr->arguments();
939  ASSERT(args->length() == 2);
940  EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
941}
942
943
944void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
945  context()->Plug(handle(Smi::FromInt(0), isolate()));
946}
947
948
949void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
950  switch (expr->op()) {
951    case Token::COMMA:
952      return VisitComma(expr);
953    case Token::OR:
954    case Token::AND:
955      return VisitLogicalExpression(expr);
956    default:
957      return VisitArithmeticExpression(expr);
958  }
959}
960
961
962void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
963  if (context()->IsEffect()) {
964    VisitForEffect(expr);
965  } else if (context()->IsAccumulatorValue()) {
966    VisitForAccumulatorValue(expr);
967  } else if (context()->IsStackValue()) {
968    VisitForStackValue(expr);
969  } else if (context()->IsTest()) {
970    const TestContext* test = TestContext::cast(context());
971    VisitForControl(expr, test->true_label(), test->false_label(),
972                    test->fall_through());
973  }
974}
975
976
977void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
978  Comment cmnt(masm_, "[ Comma");
979  VisitForEffect(expr->left());
980  VisitInDuplicateContext(expr->right());
981}
982
983
984void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
985  bool is_logical_and = expr->op() == Token::AND;
986  Comment cmnt(masm_, is_logical_and ? "[ Logical AND" :  "[ Logical OR");
987  Expression* left = expr->left();
988  Expression* right = expr->right();
989  BailoutId right_id = expr->RightId();
990  Label done;
991
992  if (context()->IsTest()) {
993    Label eval_right;
994    const TestContext* test = TestContext::cast(context());
995    if (is_logical_and) {
996      VisitForControl(left, &eval_right, test->false_label(), &eval_right);
997    } else {
998      VisitForControl(left, test->true_label(), &eval_right, &eval_right);
999    }
1000    PrepareForBailoutForId(right_id, NO_REGISTERS);
1001    __ bind(&eval_right);
1002
1003  } else if (context()->IsAccumulatorValue()) {
1004    VisitForAccumulatorValue(left);
1005    // We want the value in the accumulator for the test, and on the stack in
1006    // case we need it.
1007    __ Push(result_register());
1008    Label discard, restore;
1009    if (is_logical_and) {
1010      DoTest(left, &discard, &restore, &restore);
1011    } else {
1012      DoTest(left, &restore, &discard, &restore);
1013    }
1014    __ bind(&restore);
1015    __ Pop(result_register());
1016    __ jmp(&done);
1017    __ bind(&discard);
1018    __ Drop(1);
1019    PrepareForBailoutForId(right_id, NO_REGISTERS);
1020
1021  } else if (context()->IsStackValue()) {
1022    VisitForAccumulatorValue(left);
1023    // We want the value in the accumulator for the test, and on the stack in
1024    // case we need it.
1025    __ Push(result_register());
1026    Label discard;
1027    if (is_logical_and) {
1028      DoTest(left, &discard, &done, &discard);
1029    } else {
1030      DoTest(left, &done, &discard, &discard);
1031    }
1032    __ bind(&discard);
1033    __ Drop(1);
1034    PrepareForBailoutForId(right_id, NO_REGISTERS);
1035
1036  } else {
1037    ASSERT(context()->IsEffect());
1038    Label eval_right;
1039    if (is_logical_and) {
1040      VisitForControl(left, &eval_right, &done, &eval_right);
1041    } else {
1042      VisitForControl(left, &done, &eval_right, &eval_right);
1043    }
1044    PrepareForBailoutForId(right_id, NO_REGISTERS);
1045    __ bind(&eval_right);
1046  }
1047
1048  VisitInDuplicateContext(right);
1049  __ bind(&done);
1050}
1051
1052
1053void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
1054  Token::Value op = expr->op();
1055  Comment cmnt(masm_, "[ ArithmeticExpression");
1056  Expression* left = expr->left();
1057  Expression* right = expr->right();
1058  OverwriteMode mode =
1059      left->ResultOverwriteAllowed()
1060      ? OVERWRITE_LEFT
1061      : (right->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE);
1062
1063  VisitForStackValue(left);
1064  VisitForAccumulatorValue(right);
1065
1066  SetSourcePosition(expr->position());
1067  if (ShouldInlineSmiCase(op)) {
1068    EmitInlineSmiBinaryOp(expr, op, mode, left, right);
1069  } else {
1070    EmitBinaryOp(expr, op, mode);
1071  }
1072}
1073
1074
1075void FullCodeGenerator::VisitBlock(Block* stmt) {
1076  Comment cmnt(masm_, "[ Block");
1077  NestedBlock nested_block(this, stmt);
1078  SetStatementPosition(stmt);
1079
1080  Scope* saved_scope = scope();
1081  // Push a block context when entering a block with block scoped variables.
1082  if (stmt->scope() != NULL) {
1083    scope_ = stmt->scope();
1084    ASSERT(!scope_->is_module_scope());
1085    { Comment cmnt(masm_, "[ Extend block context");
1086      Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
1087      int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
1088      __ Push(scope_info);
1089      PushFunctionArgumentForContextAllocation();
1090      if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
1091        FastNewBlockContextStub stub(heap_slots);
1092        __ CallStub(&stub);
1093      } else {
1094        __ CallRuntime(Runtime::kPushBlockContext, 2);
1095      }
1096
1097      // Replace the context stored in the frame.
1098      StoreToFrameField(StandardFrameConstants::kContextOffset,
1099                        context_register());
1100    }
1101    { Comment cmnt(masm_, "[ Declarations");
1102      VisitDeclarations(scope_->declarations());
1103    }
1104  }
1105
1106  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1107  VisitStatements(stmt->statements());
1108  scope_ = saved_scope;
1109  __ bind(nested_block.break_label());
1110  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1111
1112  // Pop block context if necessary.
1113  if (stmt->scope() != NULL) {
1114    LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1115    // Update local stack frame context field.
1116    StoreToFrameField(StandardFrameConstants::kContextOffset,
1117                      context_register());
1118  }
1119}
1120
1121
1122void FullCodeGenerator::VisitModuleStatement(ModuleStatement* stmt) {
1123  Comment cmnt(masm_, "[ Module context");
1124
1125  __ Push(Smi::FromInt(stmt->proxy()->interface()->Index()));
1126  __ Push(Smi::FromInt(0));
1127  __ CallRuntime(Runtime::kPushModuleContext, 2);
1128  StoreToFrameField(
1129      StandardFrameConstants::kContextOffset, context_register());
1130
1131  Scope* saved_scope = scope_;
1132  scope_ = stmt->body()->scope();
1133  VisitStatements(stmt->body()->statements());
1134  scope_ = saved_scope;
1135  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1136  // Update local stack frame context field.
1137  StoreToFrameField(StandardFrameConstants::kContextOffset,
1138                    context_register());
1139}
1140
1141
1142void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1143  Comment cmnt(masm_, "[ ExpressionStatement");
1144  SetStatementPosition(stmt);
1145  VisitForEffect(stmt->expression());
1146}
1147
1148
1149void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1150  Comment cmnt(masm_, "[ EmptyStatement");
1151  SetStatementPosition(stmt);
1152}
1153
1154
1155void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
1156  Comment cmnt(masm_, "[ IfStatement");
1157  SetStatementPosition(stmt);
1158  Label then_part, else_part, done;
1159
1160  if (stmt->HasElseStatement()) {
1161    VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
1162    PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1163    __ bind(&then_part);
1164    Visit(stmt->then_statement());
1165    __ jmp(&done);
1166
1167    PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1168    __ bind(&else_part);
1169    Visit(stmt->else_statement());
1170  } else {
1171    VisitForControl(stmt->condition(), &then_part, &done, &then_part);
1172    PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1173    __ bind(&then_part);
1174    Visit(stmt->then_statement());
1175
1176    PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1177  }
1178  __ bind(&done);
1179  PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
1180}
1181
1182
1183void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1184  Comment cmnt(masm_,  "[ ContinueStatement");
1185  SetStatementPosition(stmt);
1186  NestedStatement* current = nesting_stack_;
1187  int stack_depth = 0;
1188  int context_length = 0;
1189  // When continuing, we clobber the unpredictable value in the accumulator
1190  // with one that's safe for GC.  If we hit an exit from the try block of
1191  // try...finally on our way out, we will unconditionally preserve the
1192  // accumulator on the stack.
1193  ClearAccumulator();
1194  while (!current->IsContinueTarget(stmt->target())) {
1195    current = current->Exit(&stack_depth, &context_length);
1196  }
1197  __ Drop(stack_depth);
1198  if (context_length > 0) {
1199    while (context_length > 0) {
1200      LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1201      --context_length;
1202    }
1203    StoreToFrameField(StandardFrameConstants::kContextOffset,
1204                      context_register());
1205  }
1206
1207  __ jmp(current->AsIteration()->continue_label());
1208}
1209
1210
1211void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1212  Comment cmnt(masm_,  "[ BreakStatement");
1213  SetStatementPosition(stmt);
1214  NestedStatement* current = nesting_stack_;
1215  int stack_depth = 0;
1216  int context_length = 0;
1217  // When breaking, we clobber the unpredictable value in the accumulator
1218  // with one that's safe for GC.  If we hit an exit from the try block of
1219  // try...finally on our way out, we will unconditionally preserve the
1220  // accumulator on the stack.
1221  ClearAccumulator();
1222  while (!current->IsBreakTarget(stmt->target())) {
1223    current = current->Exit(&stack_depth, &context_length);
1224  }
1225  __ Drop(stack_depth);
1226  if (context_length > 0) {
1227    while (context_length > 0) {
1228      LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1229      --context_length;
1230    }
1231    StoreToFrameField(StandardFrameConstants::kContextOffset,
1232                      context_register());
1233  }
1234
1235  __ jmp(current->AsBreakable()->break_label());
1236}
1237
1238
1239void FullCodeGenerator::EmitUnwindBeforeReturn() {
1240  NestedStatement* current = nesting_stack_;
1241  int stack_depth = 0;
1242  int context_length = 0;
1243  while (current != NULL) {
1244    current = current->Exit(&stack_depth, &context_length);
1245  }
1246  __ Drop(stack_depth);
1247}
1248
1249
1250void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1251  Comment cmnt(masm_, "[ ReturnStatement");
1252  SetStatementPosition(stmt);
1253  Expression* expr = stmt->expression();
1254  VisitForAccumulatorValue(expr);
1255  EmitUnwindBeforeReturn();
1256  EmitReturnSequence();
1257}
1258
1259
1260void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1261  Comment cmnt(masm_, "[ WithStatement");
1262  SetStatementPosition(stmt);
1263
1264  VisitForStackValue(stmt->expression());
1265  PushFunctionArgumentForContextAllocation();
1266  __ CallRuntime(Runtime::kPushWithContext, 2);
1267  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1268
1269  Scope* saved_scope = scope();
1270  scope_ = stmt->scope();
1271  { WithOrCatch body(this);
1272    Visit(stmt->statement());
1273  }
1274  scope_ = saved_scope;
1275
1276  // Pop context.
1277  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1278  // Update local stack frame context field.
1279  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1280}
1281
1282
1283void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1284  Comment cmnt(masm_, "[ DoWhileStatement");
1285  SetStatementPosition(stmt);
1286  Label body, book_keeping;
1287
1288  Iteration loop_statement(this, stmt);
1289  increment_loop_depth();
1290
1291  __ bind(&body);
1292  Visit(stmt->body());
1293
1294  // Record the position of the do while condition and make sure it is
1295  // possible to break on the condition.
1296  __ bind(loop_statement.continue_label());
1297  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1298  SetExpressionPosition(stmt->cond());
1299  VisitForControl(stmt->cond(),
1300                  &book_keeping,
1301                  loop_statement.break_label(),
1302                  &book_keeping);
1303
1304  // Check stack before looping.
1305  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1306  __ bind(&book_keeping);
1307  EmitBackEdgeBookkeeping(stmt, &body);
1308  __ jmp(&body);
1309
1310  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1311  __ bind(loop_statement.break_label());
1312  decrement_loop_depth();
1313}
1314
1315
1316void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1317  Comment cmnt(masm_, "[ WhileStatement");
1318  Label test, body;
1319
1320  Iteration loop_statement(this, stmt);
1321  increment_loop_depth();
1322
1323  // Emit the test at the bottom of the loop.
1324  __ jmp(&test);
1325
1326  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1327  __ bind(&body);
1328  Visit(stmt->body());
1329
1330  // Emit the statement position here as this is where the while
1331  // statement code starts.
1332  __ bind(loop_statement.continue_label());
1333  SetStatementPosition(stmt);
1334
1335  // Check stack before looping.
1336  EmitBackEdgeBookkeeping(stmt, &body);
1337
1338  __ bind(&test);
1339  VisitForControl(stmt->cond(),
1340                  &body,
1341                  loop_statement.break_label(),
1342                  loop_statement.break_label());
1343
1344  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1345  __ bind(loop_statement.break_label());
1346  decrement_loop_depth();
1347}
1348
1349
1350void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1351  Comment cmnt(masm_, "[ ForStatement");
1352  Label test, body;
1353
1354  Iteration loop_statement(this, stmt);
1355
1356  // Set statement position for a break slot before entering the for-body.
1357  SetStatementPosition(stmt);
1358
1359  if (stmt->init() != NULL) {
1360    Visit(stmt->init());
1361  }
1362
1363  increment_loop_depth();
1364  // Emit the test at the bottom of the loop (even if empty).
1365  __ jmp(&test);
1366
1367  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1368  __ bind(&body);
1369  Visit(stmt->body());
1370
1371  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1372  __ bind(loop_statement.continue_label());
1373  if (stmt->next() != NULL) {
1374    Visit(stmt->next());
1375  }
1376
1377  // Emit the statement position here as this is where the for
1378  // statement code starts.
1379  SetStatementPosition(stmt);
1380
1381  // Check stack before looping.
1382  EmitBackEdgeBookkeeping(stmt, &body);
1383
1384  __ bind(&test);
1385  if (stmt->cond() != NULL) {
1386    VisitForControl(stmt->cond(),
1387                    &body,
1388                    loop_statement.break_label(),
1389                    loop_statement.break_label());
1390  } else {
1391    __ jmp(&body);
1392  }
1393
1394  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1395  __ bind(loop_statement.break_label());
1396  decrement_loop_depth();
1397}
1398
1399
1400void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1401  Comment cmnt(masm_, "[ TryCatchStatement");
1402  SetStatementPosition(stmt);
1403  // The try block adds a handler to the exception handler chain before
1404  // entering, and removes it again when exiting normally.  If an exception
1405  // is thrown during execution of the try block, the handler is consumed
1406  // and control is passed to the catch block with the exception in the
1407  // result register.
1408
1409  Label try_entry, handler_entry, exit;
1410  __ jmp(&try_entry);
1411  __ bind(&handler_entry);
1412  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1413  // Exception handler code, the exception is in the result register.
1414  // Extend the context before executing the catch block.
1415  { Comment cmnt(masm_, "[ Extend catch context");
1416    __ Push(stmt->variable()->name());
1417    __ Push(result_register());
1418    PushFunctionArgumentForContextAllocation();
1419    __ CallRuntime(Runtime::kPushCatchContext, 3);
1420    StoreToFrameField(StandardFrameConstants::kContextOffset,
1421                      context_register());
1422  }
1423
1424  Scope* saved_scope = scope();
1425  scope_ = stmt->scope();
1426  ASSERT(scope_->declarations()->is_empty());
1427  { WithOrCatch catch_body(this);
1428    Visit(stmt->catch_block());
1429  }
1430  // Restore the context.
1431  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1432  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1433  scope_ = saved_scope;
1434  __ jmp(&exit);
1435
1436  // Try block code. Sets up the exception handler chain.
1437  __ bind(&try_entry);
1438  __ PushTryHandler(StackHandler::CATCH, stmt->index());
1439  { TryCatch try_body(this);
1440    Visit(stmt->try_block());
1441  }
1442  __ PopTryHandler();
1443  __ bind(&exit);
1444}
1445
1446
1447void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1448  Comment cmnt(masm_, "[ TryFinallyStatement");
1449  SetStatementPosition(stmt);
1450  // Try finally is compiled by setting up a try-handler on the stack while
1451  // executing the try body, and removing it again afterwards.
1452  //
1453  // The try-finally construct can enter the finally block in three ways:
1454  // 1. By exiting the try-block normally. This removes the try-handler and
1455  //    calls the finally block code before continuing.
1456  // 2. By exiting the try-block with a function-local control flow transfer
1457  //    (break/continue/return). The site of the, e.g., break removes the
1458  //    try handler and calls the finally block code before continuing
1459  //    its outward control transfer.
1460  // 3. By exiting the try-block with a thrown exception.
1461  //    This can happen in nested function calls. It traverses the try-handler
1462  //    chain and consumes the try-handler entry before jumping to the
1463  //    handler code. The handler code then calls the finally-block before
1464  //    rethrowing the exception.
1465  //
1466  // The finally block must assume a return address on top of the stack
1467  // (or in the link register on ARM chips) and a value (return value or
1468  // exception) in the result register (rax/eax/r0), both of which must
1469  // be preserved. The return address isn't GC-safe, so it should be
1470  // cooked before GC.
1471  Label try_entry, handler_entry, finally_entry;
1472
1473  // Jump to try-handler setup and try-block code.
1474  __ jmp(&try_entry);
1475  __ bind(&handler_entry);
1476  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1477  // Exception handler code.  This code is only executed when an exception
1478  // is thrown.  The exception is in the result register, and must be
1479  // preserved by the finally block.  Call the finally block and then
1480  // rethrow the exception if it returns.
1481  __ Call(&finally_entry);
1482  __ Push(result_register());
1483  __ CallRuntime(Runtime::kReThrow, 1);
1484
1485  // Finally block implementation.
1486  __ bind(&finally_entry);
1487  EnterFinallyBlock();
1488  { Finally finally_body(this);
1489    Visit(stmt->finally_block());
1490  }
1491  ExitFinallyBlock();  // Return to the calling code.
1492
1493  // Set up try handler.
1494  __ bind(&try_entry);
1495  __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1496  { TryFinally try_body(this, &finally_entry);
1497    Visit(stmt->try_block());
1498  }
1499  __ PopTryHandler();
1500  // Execute the finally block on the way out.  Clobber the unpredictable
1501  // value in the result register with one that's safe for GC because the
1502  // finally block will unconditionally preserve the result register on the
1503  // stack.
1504  ClearAccumulator();
1505  __ Call(&finally_entry);
1506}
1507
1508
1509void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1510#ifdef ENABLE_DEBUGGER_SUPPORT
1511  Comment cmnt(masm_, "[ DebuggerStatement");
1512  SetStatementPosition(stmt);
1513
1514  __ DebugBreak();
1515  // Ignore the return value.
1516#endif
1517}
1518
1519
1520void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1521  UNREACHABLE();
1522}
1523
1524
1525void FullCodeGenerator::VisitConditional(Conditional* expr) {
1526  Comment cmnt(masm_, "[ Conditional");
1527  Label true_case, false_case, done;
1528  VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1529
1530  PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1531  __ bind(&true_case);
1532  SetExpressionPosition(expr->then_expression());
1533  if (context()->IsTest()) {
1534    const TestContext* for_test = TestContext::cast(context());
1535    VisitForControl(expr->then_expression(),
1536                    for_test->true_label(),
1537                    for_test->false_label(),
1538                    NULL);
1539  } else {
1540    VisitInDuplicateContext(expr->then_expression());
1541    __ jmp(&done);
1542  }
1543
1544  PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1545  __ bind(&false_case);
1546  SetExpressionPosition(expr->else_expression());
1547  VisitInDuplicateContext(expr->else_expression());
1548  // If control flow falls through Visit, merge it with true case here.
1549  if (!context()->IsTest()) {
1550    __ bind(&done);
1551  }
1552}
1553
1554
1555void FullCodeGenerator::VisitLiteral(Literal* expr) {
1556  Comment cmnt(masm_, "[ Literal");
1557  context()->Plug(expr->value());
1558}
1559
1560
1561void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1562  Comment cmnt(masm_, "[ FunctionLiteral");
1563
1564  // Build the function boilerplate and instantiate it.
1565  Handle<SharedFunctionInfo> function_info =
1566      Compiler::BuildFunctionInfo(expr, script());
1567  if (function_info.is_null()) {
1568    SetStackOverflow();
1569    return;
1570  }
1571  EmitNewClosure(function_info, expr->pretenure());
1572}
1573
1574
1575void FullCodeGenerator::VisitNativeFunctionLiteral(
1576    NativeFunctionLiteral* expr) {
1577  Comment cmnt(masm_, "[ NativeFunctionLiteral");
1578
1579  // Compute the function template for the native function.
1580  Handle<String> name = expr->name();
1581  v8::Handle<v8::FunctionTemplate> fun_template =
1582      expr->extension()->GetNativeFunctionTemplate(
1583          reinterpret_cast<v8::Isolate*>(isolate()), v8::Utils::ToLocal(name));
1584  ASSERT(!fun_template.IsEmpty());
1585
1586  // Instantiate the function and create a shared function info from it.
1587  Handle<JSFunction> fun = Utils::OpenHandle(*fun_template->GetFunction());
1588  const int literals = fun->NumberOfLiterals();
1589  Handle<Code> code = Handle<Code>(fun->shared()->code());
1590  Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1591  bool is_generator = false;
1592  Handle<SharedFunctionInfo> shared =
1593      isolate()->factory()->NewSharedFunctionInfo(name, literals, is_generator,
1594          code, Handle<ScopeInfo>(fun->shared()->scope_info()));
1595  shared->set_construct_stub(*construct_stub);
1596
1597  // Copy the function data to the shared function info.
1598  shared->set_function_data(fun->shared()->function_data());
1599  int parameters = fun->shared()->formal_parameter_count();
1600  shared->set_formal_parameter_count(parameters);
1601
1602  EmitNewClosure(shared, false);
1603}
1604
1605
1606void FullCodeGenerator::VisitThrow(Throw* expr) {
1607  Comment cmnt(masm_, "[ Throw");
1608  VisitForStackValue(expr->exception());
1609  __ CallRuntime(Runtime::kThrow, 1);
1610  // Never returns here.
1611}
1612
1613
1614FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
1615    int* stack_depth,
1616    int* context_length) {
1617  // The macros used here must preserve the result register.
1618  __ Drop(*stack_depth);
1619  __ PopTryHandler();
1620  *stack_depth = 0;
1621  return previous_;
1622}
1623
1624
1625bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1626  Expression* sub_expr;
1627  Handle<String> check;
1628  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1629    EmitLiteralCompareTypeof(expr, sub_expr, check);
1630    return true;
1631  }
1632
1633  if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
1634    EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1635    return true;
1636  }
1637
1638  if (expr->IsLiteralCompareNull(&sub_expr)) {
1639    EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1640    return true;
1641  }
1642
1643  return false;
1644}
1645
1646
1647void BackEdgeTable::Patch(Isolate* isolate,
1648                          Code* unoptimized) {
1649  DisallowHeapAllocation no_gc;
1650  Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1651
1652  // Iterate over the back edge table and patch every interrupt
1653  // call to an unconditional call to the replacement code.
1654  int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1655
1656  BackEdgeTable back_edges(unoptimized, &no_gc);
1657  for (uint32_t i = 0; i < back_edges.length(); i++) {
1658    if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1659      ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate,
1660                                            unoptimized,
1661                                            back_edges.pc(i)));
1662      PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1663    }
1664  }
1665
1666  unoptimized->set_back_edges_patched_for_osr(true);
1667  ASSERT(Verify(isolate, unoptimized, loop_nesting_level));
1668}
1669
1670
1671void BackEdgeTable::Revert(Isolate* isolate,
1672                           Code* unoptimized) {
1673  DisallowHeapAllocation no_gc;
1674  Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1675
1676  // Iterate over the back edge table and revert the patched interrupt calls.
1677  ASSERT(unoptimized->back_edges_patched_for_osr());
1678  int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1679
1680  BackEdgeTable back_edges(unoptimized, &no_gc);
1681  for (uint32_t i = 0; i < back_edges.length(); i++) {
1682    if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1683      ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate,
1684                                            unoptimized,
1685                                            back_edges.pc(i)));
1686      PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1687    }
1688  }
1689
1690  unoptimized->set_back_edges_patched_for_osr(false);
1691  unoptimized->set_allow_osr_at_loop_nesting_level(0);
1692  // Assert that none of the back edges are patched anymore.
1693  ASSERT(Verify(isolate, unoptimized, -1));
1694}
1695
1696
1697void BackEdgeTable::AddStackCheck(CompilationInfo* info) {
1698  DisallowHeapAllocation no_gc;
1699  Isolate* isolate = info->isolate();
1700  Code* code = info->shared_info()->code();
1701  Address pc = code->instruction_start() + info->osr_pc_offset();
1702  ASSERT_EQ(ON_STACK_REPLACEMENT, GetBackEdgeState(isolate, code, pc));
1703  Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
1704  PatchAt(code, pc, OSR_AFTER_STACK_CHECK, patch);
1705}
1706
1707
1708void BackEdgeTable::RemoveStackCheck(CompilationInfo* info) {
1709  DisallowHeapAllocation no_gc;
1710  Isolate* isolate = info->isolate();
1711  Code* code = info->shared_info()->code();
1712  Address pc = code->instruction_start() + info->osr_pc_offset();
1713  if (GetBackEdgeState(isolate, code, pc) == OSR_AFTER_STACK_CHECK) {
1714    Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1715    PatchAt(code, pc, ON_STACK_REPLACEMENT, patch);
1716  }
1717}
1718
1719
1720#ifdef DEBUG
1721bool BackEdgeTable::Verify(Isolate* isolate,
1722                           Code* unoptimized,
1723                           int loop_nesting_level) {
1724  DisallowHeapAllocation no_gc;
1725  BackEdgeTable back_edges(unoptimized, &no_gc);
1726  for (uint32_t i = 0; i < back_edges.length(); i++) {
1727    uint32_t loop_depth = back_edges.loop_depth(i);
1728    CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1729    // Assert that all back edges for shallower loops (and only those)
1730    // have already been patched.
1731    CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1732             GetBackEdgeState(isolate,
1733                              unoptimized,
1734                              back_edges.pc(i)) != INTERRUPT);
1735  }
1736  return true;
1737}
1738#endif  // DEBUG
1739
1740
1741#undef __
1742
1743
1744} }  // namespace v8::internal
1745