1// Copyright 2012 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/full-codegen/full-codegen.h" 6 7#include "src/ast/ast-numbering.h" 8#include "src/ast/ast.h" 9#include "src/ast/prettyprinter.h" 10#include "src/ast/scopes.h" 11#include "src/code-factory.h" 12#include "src/codegen.h" 13#include "src/compilation-info.h" 14#include "src/compiler.h" 15#include "src/debug/debug.h" 16#include "src/debug/liveedit.h" 17#include "src/frames-inl.h" 18#include "src/globals.h" 19#include "src/isolate-inl.h" 20#include "src/macro-assembler.h" 21#include "src/snapshot/snapshot.h" 22#include "src/tracing/trace-event.h" 23 24namespace v8 { 25namespace internal { 26 27#define __ ACCESS_MASM(masm()) 28 29class FullCodegenCompilationJob final : public CompilationJob { 30 public: 31 explicit FullCodegenCompilationJob(CompilationInfo* info) 32 : CompilationJob(info->isolate(), info, "Full-Codegen") {} 33 34 bool can_execute_on_background_thread() const override { return false; } 35 36 CompilationJob::Status PrepareJobImpl() final { return SUCCEEDED; } 37 38 CompilationJob::Status ExecuteJobImpl() final { 39 DCHECK(ThreadId::Current().Equals(isolate()->thread_id())); 40 return FullCodeGenerator::MakeCode(info(), stack_limit()) ? SUCCEEDED 41 : FAILED; 42 } 43 44 CompilationJob::Status FinalizeJobImpl() final { return SUCCEEDED; } 45 46 private: 47 DISALLOW_COPY_AND_ASSIGN(FullCodegenCompilationJob); 48}; 49 50FullCodeGenerator::FullCodeGenerator(MacroAssembler* masm, 51 CompilationInfo* info, 52 uintptr_t stack_limit) 53 : masm_(masm), 54 info_(info), 55 isolate_(info->isolate()), 56 zone_(info->zone()), 57 scope_(info->scope()), 58 nesting_stack_(NULL), 59 loop_depth_(0), 60 operand_stack_depth_(0), 61 globals_(NULL), 62 context_(NULL), 63 bailout_entries_(info->HasDeoptimizationSupport() 64 ? info->literal()->ast_node_count() 65 : 0, 66 info->zone()), 67 back_edges_(2, info->zone()), 68 source_position_table_builder_(info->zone(), 69 info->SourcePositionRecordingMode()), 70 ic_total_count_(0) { 71 DCHECK(!info->IsStub()); 72 Initialize(stack_limit); 73} 74 75// static 76CompilationJob* FullCodeGenerator::NewCompilationJob(CompilationInfo* info) { 77 return new FullCodegenCompilationJob(info); 78} 79 80// static 81bool FullCodeGenerator::MakeCode(CompilationInfo* info) { 82 return MakeCode(info, info->isolate()->stack_guard()->real_climit()); 83} 84 85// static 86bool FullCodeGenerator::MakeCode(CompilationInfo* info, uintptr_t stack_limit) { 87 Isolate* isolate = info->isolate(); 88 89 DCHECK(!info->shared_info()->must_use_ignition_turbo()); 90 DCHECK(!FLAG_minimal); 91 RuntimeCallTimerScope runtimeTimer(isolate, 92 &RuntimeCallStats::CompileFullCode); 93 TimerEventScope<TimerEventCompileFullCode> timer(info->isolate()); 94 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileFullCode"); 95 96 Handle<Script> script = info->script(); 97 if (!script->IsUndefined(isolate) && 98 !script->source()->IsUndefined(isolate)) { 99 int len = String::cast(script->source())->length(); 100 isolate->counters()->total_full_codegen_source_size()->Increment(len); 101 } 102 CodeGenerator::MakeCodePrologue(info, "full"); 103 const int kInitialBufferSize = 4 * KB; 104 MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize, 105 CodeObjectRequired::kYes); 106 if (info->will_serialize()) masm.enable_serializer(); 107 108 FullCodeGenerator cgen(&masm, info, stack_limit); 109 cgen.Generate(); 110 if (cgen.HasStackOverflow()) { 111 DCHECK(!isolate->has_pending_exception()); 112 return false; 113 } 114 unsigned table_offset = cgen.EmitBackEdgeTable(); 115 116 Handle<Code> code = 117 CodeGenerator::MakeCodeEpilogue(&masm, nullptr, info, masm.CodeObject()); 118 cgen.PopulateDeoptimizationData(code); 119 cgen.PopulateTypeFeedbackInfo(code); 120 code->set_has_deoptimization_support(info->HasDeoptimizationSupport()); 121 code->set_has_reloc_info_for_serialization(info->will_serialize()); 122 code->set_allow_osr_at_loop_nesting_level(0); 123 code->set_profiler_ticks(0); 124 code->set_back_edge_table_offset(table_offset); 125 Handle<ByteArray> source_positions = 126 cgen.source_position_table_builder_.ToSourcePositionTable( 127 isolate, Handle<AbstractCode>::cast(code)); 128 code->set_source_position_table(*source_positions); 129 CodeGenerator::PrintCode(code, info); 130 info->SetCode(code); 131 132#ifdef DEBUG 133 // Check that no context-specific object has been embedded. 134 code->VerifyEmbeddedObjects(Code::kNoContextSpecificPointers); 135#endif // DEBUG 136 return true; 137} 138 139 140unsigned FullCodeGenerator::EmitBackEdgeTable() { 141 // The back edge table consists of a length (in number of entries) 142 // field, and then a sequence of entries. Each entry is a pair of AST id 143 // and code-relative pc offset. 144 masm()->Align(kPointerSize); 145 unsigned offset = masm()->pc_offset(); 146 unsigned length = back_edges_.length(); 147 __ dd(length); 148 for (unsigned i = 0; i < length; ++i) { 149 __ dd(back_edges_[i].id.ToInt()); 150 __ dd(back_edges_[i].pc); 151 __ dd(back_edges_[i].loop_depth); 152 } 153 return offset; 154} 155 156 157void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) { 158 // Fill in the deoptimization information. 159 DCHECK(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty()); 160 if (!info_->HasDeoptimizationSupport()) return; 161 int length = bailout_entries_.length(); 162 Handle<DeoptimizationOutputData> data = 163 DeoptimizationOutputData::New(isolate(), length, TENURED); 164 for (int i = 0; i < length; i++) { 165 data->SetAstId(i, bailout_entries_[i].id); 166 data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state)); 167 } 168 code->set_deoptimization_data(*data); 169} 170 171 172void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) { 173 Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo(); 174 info->set_ic_total_count(ic_total_count_); 175 DCHECK(!isolate()->heap()->InNewSpace(*info)); 176 code->set_type_feedback_info(*info); 177} 178 179 180bool FullCodeGenerator::MustCreateObjectLiteralWithRuntime( 181 ObjectLiteral* expr) const { 182 return masm()->serializer_enabled() || !expr->IsFastCloningSupported(); 183} 184 185 186bool FullCodeGenerator::MustCreateArrayLiteralWithRuntime( 187 ArrayLiteral* expr) const { 188 return !expr->IsFastCloningSupported(); 189} 190 191void FullCodeGenerator::Initialize(uintptr_t stack_limit) { 192 InitializeAstVisitor(stack_limit); 193 masm_->set_emit_debug_code(FLAG_debug_code); 194 masm_->set_predictable_code_size(true); 195} 196 197void FullCodeGenerator::PrepareForBailout(Expression* node, 198 BailoutState state) { 199 PrepareForBailoutForId(node->id(), state); 200} 201 202void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) { 203 ic_total_count_++; 204 __ Call(code, RelocInfo::CODE_TARGET, ast_id); 205} 206 207void FullCodeGenerator::CallLoadIC(FeedbackSlot slot, Handle<Object> name) { 208 DCHECK(name->IsName()); 209 __ Move(LoadDescriptor::NameRegister(), name); 210 211 EmitLoadSlot(LoadDescriptor::SlotRegister(), slot); 212 213 Handle<Code> code = CodeFactory::LoadIC(isolate()).code(); 214 __ Call(code, RelocInfo::CODE_TARGET); 215 RestoreContext(); 216} 217 218void FullCodeGenerator::CallStoreIC(FeedbackSlot slot, Handle<Object> name, 219 bool store_own_property) { 220 DCHECK(name->IsName()); 221 __ Move(StoreDescriptor::NameRegister(), name); 222 223 STATIC_ASSERT(!StoreDescriptor::kPassLastArgsOnStack || 224 StoreDescriptor::kStackArgumentsCount == 2); 225 if (StoreDescriptor::kPassLastArgsOnStack) { 226 __ Push(StoreDescriptor::ValueRegister()); 227 EmitPushSlot(slot); 228 } else { 229 EmitLoadSlot(StoreDescriptor::SlotRegister(), slot); 230 } 231 232 Handle<Code> code; 233 if (store_own_property) { 234 DCHECK_EQ(FeedbackSlotKind::kStoreOwnNamed, 235 feedback_vector_spec()->GetKind(slot)); 236 code = CodeFactory::StoreOwnIC(isolate()).code(); 237 } else { 238 // Ensure that language mode is in sync with the IC slot kind. 239 DCHECK_EQ( 240 GetLanguageModeFromSlotKind(feedback_vector_spec()->GetKind(slot)), 241 language_mode()); 242 code = CodeFactory::StoreIC(isolate(), language_mode()).code(); 243 } 244 __ Call(code, RelocInfo::CODE_TARGET); 245 RestoreContext(); 246} 247 248void FullCodeGenerator::CallKeyedStoreIC(FeedbackSlot slot) { 249 STATIC_ASSERT(!StoreDescriptor::kPassLastArgsOnStack || 250 StoreDescriptor::kStackArgumentsCount == 2); 251 if (StoreDescriptor::kPassLastArgsOnStack) { 252 __ Push(StoreDescriptor::ValueRegister()); 253 EmitPushSlot(slot); 254 } else { 255 EmitLoadSlot(StoreDescriptor::SlotRegister(), slot); 256 } 257 258 // Ensure that language mode is in sync with the IC slot kind. 259 DCHECK_EQ(GetLanguageModeFromSlotKind(feedback_vector_spec()->GetKind(slot)), 260 language_mode()); 261 Handle<Code> code = 262 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 263 __ Call(code, RelocInfo::CODE_TARGET); 264 RestoreContext(); 265} 266 267void FullCodeGenerator::RecordJSReturnSite(Call* call) { 268 // We record the offset of the function return so we can rebuild the frame 269 // if the function was inlined, i.e., this is the return address in the 270 // inlined function's frame. 271 // 272 // The bailout state is ignored. We defensively set it to TOS_REGISTER, which 273 // is the real state of the unoptimized code at the return site. 274 PrepareForBailoutForId(call->ReturnId(), BailoutState::TOS_REGISTER); 275#ifdef DEBUG 276 // In debug builds, mark the return so we can verify that this function 277 // was called. 278 DCHECK(!call->return_is_recorded_); 279 call->return_is_recorded_ = true; 280#endif 281} 282 283void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, 284 BailoutState state) { 285 // There's no need to prepare this code for bailouts from already optimized 286 // code or code that can't be optimized. 287 if (!info_->HasDeoptimizationSupport()) return; 288 unsigned pc_and_state = 289 BailoutStateField::encode(state) | PcField::encode(masm_->pc_offset()); 290 DCHECK(Smi::IsValid(pc_and_state)); 291#ifdef DEBUG 292 for (int i = 0; i < bailout_entries_.length(); ++i) { 293 DCHECK(bailout_entries_[i].id != id); 294 } 295#endif 296 BailoutEntry entry = { id, pc_and_state }; 297 bailout_entries_.Add(entry, zone()); 298} 299 300 301void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) { 302 // The pc offset does not need to be encoded and packed together with a state. 303 DCHECK(masm_->pc_offset() > 0); 304 DCHECK(loop_depth() > 0); 305 uint8_t depth = Min(loop_depth(), AbstractCode::kMaxLoopNestingMarker); 306 BackEdgeEntry entry = 307 { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth }; 308 back_edges_.Add(entry, zone()); 309} 310 311 312bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) { 313 // Inline smi case inside loops, but not division and modulo which 314 // are too complicated and take up too much space. 315 if (op == Token::DIV ||op == Token::MOD) return false; 316 if (FLAG_always_inline_smi_code) return true; 317 return loop_depth_ > 0; 318} 319 320 321void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 322 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 323} 324 325 326void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 327 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 328 codegen()->GetVar(result_register(), var); 329} 330 331 332void FullCodeGenerator::TestContext::Plug(Variable* var) const { 333 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 334 // For simplicity we always test the accumulator register. 335 codegen()->GetVar(result_register(), var); 336 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 337 codegen()->DoTest(this); 338} 339 340 341void FullCodeGenerator::EffectContext::Plug(Register reg) const { 342} 343 344 345void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const { 346 __ Move(result_register(), reg); 347} 348 349 350void FullCodeGenerator::StackValueContext::Plug(Register reg) const { 351 codegen()->PushOperand(reg); 352} 353 354 355void FullCodeGenerator::TestContext::Plug(Register reg) const { 356 // For simplicity we always test the accumulator register. 357 __ Move(result_register(), reg); 358 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 359 codegen()->DoTest(this); 360} 361 362 363void FullCodeGenerator::EffectContext::Plug(bool flag) const {} 364 365void FullCodeGenerator::EffectContext::DropAndPlug(int count, 366 Register reg) const { 367 DCHECK(count > 0); 368 codegen()->DropOperands(count); 369} 370 371void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 372 int count, Register reg) const { 373 DCHECK(count > 0); 374 codegen()->DropOperands(count); 375 __ Move(result_register(), reg); 376} 377 378void FullCodeGenerator::TestContext::DropAndPlug(int count, 379 Register reg) const { 380 DCHECK(count > 0); 381 // For simplicity we always test the accumulator register. 382 codegen()->DropOperands(count); 383 __ Move(result_register(), reg); 384 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 385 codegen()->DoTest(this); 386} 387 388void FullCodeGenerator::EffectContext::PlugTOS() const { 389 codegen()->DropOperands(1); 390} 391 392 393void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const { 394 codegen()->PopOperand(result_register()); 395} 396 397 398void FullCodeGenerator::StackValueContext::PlugTOS() const { 399} 400 401 402void FullCodeGenerator::TestContext::PlugTOS() const { 403 // For simplicity we always test the accumulator register. 404 codegen()->PopOperand(result_register()); 405 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 406 codegen()->DoTest(this); 407} 408 409 410void FullCodeGenerator::EffectContext::PrepareTest( 411 Label* materialize_true, 412 Label* materialize_false, 413 Label** if_true, 414 Label** if_false, 415 Label** fall_through) const { 416 // In an effect context, the true and the false case branch to the 417 // same label. 418 *if_true = *if_false = *fall_through = materialize_true; 419} 420 421 422void FullCodeGenerator::AccumulatorValueContext::PrepareTest( 423 Label* materialize_true, 424 Label* materialize_false, 425 Label** if_true, 426 Label** if_false, 427 Label** fall_through) const { 428 *if_true = *fall_through = materialize_true; 429 *if_false = materialize_false; 430} 431 432 433void FullCodeGenerator::StackValueContext::PrepareTest( 434 Label* materialize_true, 435 Label* materialize_false, 436 Label** if_true, 437 Label** if_false, 438 Label** fall_through) const { 439 *if_true = *fall_through = materialize_true; 440 *if_false = materialize_false; 441} 442 443 444void FullCodeGenerator::TestContext::PrepareTest( 445 Label* materialize_true, 446 Label* materialize_false, 447 Label** if_true, 448 Label** if_false, 449 Label** fall_through) const { 450 *if_true = true_label_; 451 *if_false = false_label_; 452 *fall_through = fall_through_; 453} 454 455 456void FullCodeGenerator::DoTest(const TestContext* context) { 457 DoTest(context->condition(), 458 context->true_label(), 459 context->false_label(), 460 context->fall_through()); 461} 462 463void FullCodeGenerator::VisitDeclarations(Declaration::List* declarations) { 464 ZoneList<Handle<Object> >* saved_globals = globals_; 465 ZoneList<Handle<Object> > inner_globals(10, zone()); 466 globals_ = &inner_globals; 467 468 AstVisitor<FullCodeGenerator>::VisitDeclarations(declarations); 469 470 if (!globals_->is_empty()) { 471 // Invoke the platform-dependent code generator to do the actual 472 // declaration of the global functions and variables. 473 Handle<FixedArray> array = 474 isolate()->factory()->NewFixedArray(globals_->length(), TENURED); 475 for (int i = 0; i < globals_->length(); ++i) 476 array->set(i, *globals_->at(i)); 477 DeclareGlobals(array); 478 } 479 480 globals_ = saved_globals; 481} 482 483 484void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 485 Comment cmnt(masm_, "[ VariableProxy"); 486 EmitVariableLoad(expr); 487} 488 489void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, 490 TypeofMode typeof_mode) { 491 Variable* var = proxy->var(); 492 DCHECK(var->IsUnallocated()); 493 __ Move(LoadDescriptor::NameRegister(), var->name()); 494 495 FeedbackSlot slot = proxy->VariableFeedbackSlot(); 496 // Ensure that typeof mode is in sync with the IC slot kind. 497 DCHECK_EQ(GetTypeofModeFromSlotKind(feedback_vector_spec()->GetKind(slot)), 498 typeof_mode); 499 500 EmitLoadSlot(LoadGlobalDescriptor::SlotRegister(), slot); 501 Handle<Code> code = CodeFactory::LoadGlobalIC(isolate(), typeof_mode).code(); 502 __ Call(code, RelocInfo::CODE_TARGET); 503 RestoreContext(); 504} 505 506void FullCodeGenerator::VisitSloppyBlockFunctionStatement( 507 SloppyBlockFunctionStatement* declaration) { 508 Visit(declaration->statement()); 509} 510 511 512int FullCodeGenerator::DeclareGlobalsFlags() { 513 return info_->GetDeclareGlobalsFlags(); 514} 515 516void FullCodeGenerator::PushOperand(Handle<Object> handle) { 517 OperandStackDepthIncrement(1); 518 __ Push(handle); 519} 520 521void FullCodeGenerator::PushOperand(Smi* smi) { 522 OperandStackDepthIncrement(1); 523 __ Push(smi); 524} 525 526void FullCodeGenerator::PushOperand(Register reg) { 527 OperandStackDepthIncrement(1); 528 __ Push(reg); 529} 530 531void FullCodeGenerator::PopOperand(Register reg) { 532 OperandStackDepthDecrement(1); 533 __ Pop(reg); 534} 535 536void FullCodeGenerator::DropOperands(int count) { 537 OperandStackDepthDecrement(count); 538 __ Drop(count); 539} 540 541void FullCodeGenerator::CallRuntimeWithOperands(Runtime::FunctionId id) { 542 OperandStackDepthDecrement(Runtime::FunctionForId(id)->nargs); 543 __ CallRuntime(id); 544} 545 546void FullCodeGenerator::OperandStackDepthIncrement(int count) { 547 DCHECK_IMPLIES(!HasStackOverflow(), operand_stack_depth_ >= 0); 548 DCHECK_GE(count, 0); 549 operand_stack_depth_ += count; 550} 551 552void FullCodeGenerator::OperandStackDepthDecrement(int count) { 553 DCHECK_IMPLIES(!HasStackOverflow(), operand_stack_depth_ >= count); 554 DCHECK_GE(count, 0); 555 operand_stack_depth_ -= count; 556} 557 558void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 559 // Load the arguments on the stack and call the stub. 560 SubStringStub stub(isolate()); 561 ZoneList<Expression*>* args = expr->arguments(); 562 DCHECK(args->length() == 3); 563 VisitForStackValue(args->at(0)); 564 VisitForStackValue(args->at(1)); 565 VisitForStackValue(args->at(2)); 566 __ CallStub(&stub); 567 RestoreContext(); 568 OperandStackDepthDecrement(3); 569 context()->Plug(result_register()); 570} 571 572 573void FullCodeGenerator::EmitIntrinsicAsStubCall(CallRuntime* expr, 574 const Callable& callable) { 575 ZoneList<Expression*>* args = expr->arguments(); 576 int param_count = callable.descriptor().GetRegisterParameterCount(); 577 DCHECK_EQ(args->length(), param_count); 578 579 if (param_count > 0) { 580 int last = param_count - 1; 581 // Put all but last arguments on stack. 582 for (int i = 0; i < last; i++) { 583 VisitForStackValue(args->at(i)); 584 } 585 // The last argument goes to the accumulator. 586 VisitForAccumulatorValue(args->at(last)); 587 588 // Move the arguments to the registers, as required by the stub. 589 __ Move(callable.descriptor().GetRegisterParameter(last), 590 result_register()); 591 for (int i = last; i-- > 0;) { 592 PopOperand(callable.descriptor().GetRegisterParameter(i)); 593 } 594 } 595 __ Call(callable.code(), RelocInfo::CODE_TARGET); 596 597 // Reload the context register after the call as i.e. TurboFan code stubs 598 // won't preserve the context register. 599 LoadFromFrameField(StandardFrameConstants::kContextOffset, 600 context_register()); 601 context()->Plug(result_register()); 602} 603 604 605void FullCodeGenerator::EmitToString(CallRuntime* expr) { 606 EmitIntrinsicAsStubCall(expr, CodeFactory::ToString(isolate())); 607} 608 609 610void FullCodeGenerator::EmitToLength(CallRuntime* expr) { 611 EmitIntrinsicAsStubCall(expr, CodeFactory::ToLength(isolate())); 612} 613 614void FullCodeGenerator::EmitToInteger(CallRuntime* expr) { 615 EmitIntrinsicAsStubCall(expr, CodeFactory::ToInteger(isolate())); 616} 617 618void FullCodeGenerator::EmitToNumber(CallRuntime* expr) { 619 EmitIntrinsicAsStubCall(expr, CodeFactory::ToNumber(isolate())); 620} 621 622 623void FullCodeGenerator::EmitToObject(CallRuntime* expr) { 624 EmitIntrinsicAsStubCall(expr, CodeFactory::ToObject(isolate())); 625} 626 627 628void FullCodeGenerator::EmitHasProperty() { 629 Callable callable = CodeFactory::HasProperty(isolate()); 630 PopOperand(callable.descriptor().GetRegisterParameter(1)); 631 PopOperand(callable.descriptor().GetRegisterParameter(0)); 632 __ Call(callable.code(), RelocInfo::CODE_TARGET); 633 RestoreContext(); 634} 635 636void FullCodeGenerator::RecordStatementPosition(int pos) { 637 DCHECK_NE(kNoSourcePosition, pos); 638 source_position_table_builder_.AddPosition(masm_->pc_offset(), 639 SourcePosition(pos), true); 640} 641 642void FullCodeGenerator::RecordPosition(int pos) { 643 DCHECK_NE(kNoSourcePosition, pos); 644 source_position_table_builder_.AddPosition(masm_->pc_offset(), 645 SourcePosition(pos), false); 646} 647 648 649void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) { 650 RecordPosition(fun->start_position()); 651} 652 653 654void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) { 655 // For default constructors, start position equals end position, and there 656 // is no source code besides the class literal. 657 RecordStatementPosition(fun->return_position()); 658 if (info_->is_debug()) { 659 // Always emit a debug break slot before a return. 660 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_RETURN); 661 } 662} 663 664 665void FullCodeGenerator::SetStatementPosition( 666 Statement* stmt, FullCodeGenerator::InsertBreak insert_break) { 667 if (stmt->position() == kNoSourcePosition) return; 668 RecordStatementPosition(stmt->position()); 669 if (insert_break == INSERT_BREAK && info_->is_debug() && 670 !stmt->IsDebuggerStatement()) { 671 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION); 672 } 673} 674 675void FullCodeGenerator::SetExpressionPosition(Expression* expr) { 676 if (expr->position() == kNoSourcePosition) return; 677 RecordPosition(expr->position()); 678} 679 680 681void FullCodeGenerator::SetExpressionAsStatementPosition(Expression* expr) { 682 if (expr->position() == kNoSourcePosition) return; 683 RecordStatementPosition(expr->position()); 684 if (info_->is_debug()) { 685 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION); 686 } 687} 688 689void FullCodeGenerator::SetCallPosition(Expression* expr, 690 TailCallMode tail_call_mode) { 691 if (expr->position() == kNoSourcePosition) return; 692 RecordPosition(expr->position()); 693 if (info_->is_debug()) { 694 RelocInfo::Mode mode = (tail_call_mode == TailCallMode::kAllow) 695 ? RelocInfo::DEBUG_BREAK_SLOT_AT_TAIL_CALL 696 : RelocInfo::DEBUG_BREAK_SLOT_AT_CALL; 697 // Always emit a debug break slot before a call. 698 DebugCodegen::GenerateSlot(masm_, mode); 699 } 700} 701 702 703void FullCodeGenerator::VisitSuperPropertyReference( 704 SuperPropertyReference* super) { 705 __ CallRuntime(Runtime::kThrowUnsupportedSuperError); 706 // Even though this expression doesn't produce a value, we need to simulate 707 // plugging of the value context to ensure stack depth tracking is in sync. 708 if (context()->IsStackValue()) OperandStackDepthIncrement(1); 709} 710 711 712void FullCodeGenerator::VisitSuperCallReference(SuperCallReference* super) { 713 // Handled by VisitCall 714 UNREACHABLE(); 715} 716 717 718void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) { 719 context()->Plug(handle(Smi::kZero, isolate())); 720} 721 722 723void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { 724 switch (expr->op()) { 725 case Token::COMMA: 726 return VisitComma(expr); 727 case Token::OR: 728 case Token::AND: 729 return VisitLogicalExpression(expr); 730 default: 731 return VisitArithmeticExpression(expr); 732 } 733} 734 735 736void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) { 737 if (context()->IsEffect()) { 738 VisitForEffect(expr); 739 } else if (context()->IsAccumulatorValue()) { 740 VisitForAccumulatorValue(expr); 741 } else if (context()->IsStackValue()) { 742 VisitForStackValue(expr); 743 } else if (context()->IsTest()) { 744 const TestContext* test = TestContext::cast(context()); 745 VisitForControl(expr, test->true_label(), test->false_label(), 746 test->fall_through()); 747 } 748} 749 750 751void FullCodeGenerator::VisitComma(BinaryOperation* expr) { 752 Comment cmnt(masm_, "[ Comma"); 753 VisitForEffect(expr->left()); 754 VisitInDuplicateContext(expr->right()); 755} 756 757 758void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) { 759 bool is_logical_and = expr->op() == Token::AND; 760 Comment cmnt(masm_, is_logical_and ? "[ Logical AND" : "[ Logical OR"); 761 Expression* left = expr->left(); 762 Expression* right = expr->right(); 763 BailoutId right_id = expr->RightId(); 764 Label done; 765 766 if (context()->IsTest()) { 767 Label eval_right; 768 const TestContext* test = TestContext::cast(context()); 769 if (is_logical_and) { 770 VisitForControl(left, &eval_right, test->false_label(), &eval_right); 771 } else { 772 VisitForControl(left, test->true_label(), &eval_right, &eval_right); 773 } 774 PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS); 775 __ bind(&eval_right); 776 777 } else if (context()->IsAccumulatorValue()) { 778 VisitForAccumulatorValue(left); 779 // We want the value in the accumulator for the test, and on the stack in 780 // case we need it. 781 __ Push(result_register()); 782 Label discard, restore; 783 if (is_logical_and) { 784 DoTest(left, &discard, &restore, &restore); 785 } else { 786 DoTest(left, &restore, &discard, &restore); 787 } 788 __ bind(&restore); 789 __ Pop(result_register()); 790 __ jmp(&done); 791 __ bind(&discard); 792 __ Drop(1); 793 PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS); 794 795 } else if (context()->IsStackValue()) { 796 VisitForAccumulatorValue(left); 797 // We want the value in the accumulator for the test, and on the stack in 798 // case we need it. 799 __ Push(result_register()); 800 Label discard; 801 if (is_logical_and) { 802 DoTest(left, &discard, &done, &discard); 803 } else { 804 DoTest(left, &done, &discard, &discard); 805 } 806 __ bind(&discard); 807 __ Drop(1); 808 PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS); 809 810 } else { 811 DCHECK(context()->IsEffect()); 812 Label eval_right; 813 if (is_logical_and) { 814 VisitForControl(left, &eval_right, &done, &eval_right); 815 } else { 816 VisitForControl(left, &done, &eval_right, &eval_right); 817 } 818 PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS); 819 __ bind(&eval_right); 820 } 821 822 VisitInDuplicateContext(right); 823 __ bind(&done); 824} 825 826 827void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) { 828 Token::Value op = expr->op(); 829 Comment cmnt(masm_, "[ ArithmeticExpression"); 830 Expression* left = expr->left(); 831 Expression* right = expr->right(); 832 833 VisitForStackValue(left); 834 VisitForAccumulatorValue(right); 835 836 SetExpressionPosition(expr); 837 if (ShouldInlineSmiCase(op)) { 838 EmitInlineSmiBinaryOp(expr, op, left, right); 839 } else { 840 EmitBinaryOp(expr, op); 841 } 842} 843 844void FullCodeGenerator::VisitProperty(Property* expr) { 845 Comment cmnt(masm_, "[ Property"); 846 SetExpressionPosition(expr); 847 848 Expression* key = expr->key(); 849 850 if (key->IsPropertyName()) { 851 DCHECK(!expr->IsSuperAccess()); 852 VisitForAccumulatorValue(expr->obj()); 853 __ Move(LoadDescriptor::ReceiverRegister(), result_register()); 854 EmitNamedPropertyLoad(expr); 855 } else { 856 DCHECK(!expr->IsSuperAccess()); 857 VisitForStackValue(expr->obj()); 858 VisitForAccumulatorValue(expr->key()); 859 __ Move(LoadDescriptor::NameRegister(), result_register()); 860 PopOperand(LoadDescriptor::ReceiverRegister()); 861 EmitKeyedPropertyLoad(expr); 862 } 863 PrepareForBailoutForId(expr->LoadId(), BailoutState::TOS_REGISTER); 864 context()->Plug(result_register()); 865} 866 867void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 868 VariableProxy* proxy = expr->AsVariableProxy(); 869 DCHECK(!context()->IsEffect()); 870 DCHECK(!context()->IsTest()); 871 872 if (proxy != NULL && proxy->var()->IsUnallocated()) { 873 EmitVariableLoad(proxy, INSIDE_TYPEOF); 874 PrepareForBailout(proxy, BailoutState::TOS_REGISTER); 875 } else { 876 // This expression cannot throw a reference error at the top level. 877 VisitInDuplicateContext(expr); 878 } 879} 880 881 882void FullCodeGenerator::VisitBlock(Block* stmt) { 883 Comment cmnt(masm_, "[ Block"); 884 NestedBlock nested_block(this, stmt); 885 886 { 887 EnterBlockScopeIfNeeded block_scope_state( 888 this, stmt->scope(), stmt->EntryId(), stmt->DeclsId(), stmt->ExitId()); 889 VisitStatements(stmt->statements()); 890 __ bind(nested_block.break_label()); 891 } 892} 893 894 895void FullCodeGenerator::VisitDoExpression(DoExpression* expr) { 896 Comment cmnt(masm_, "[ Do Expression"); 897 SetExpressionPosition(expr); 898 VisitBlock(expr->block()); 899 VisitInDuplicateContext(expr->result()); 900} 901 902 903void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) { 904 Comment cmnt(masm_, "[ ExpressionStatement"); 905 SetStatementPosition(stmt); 906 VisitForEffect(stmt->expression()); 907} 908 909 910void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) { 911 Comment cmnt(masm_, "[ EmptyStatement"); 912} 913 914 915void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) { 916 Comment cmnt(masm_, "[ IfStatement"); 917 SetStatementPosition(stmt); 918 Label then_part, else_part, done; 919 920 if (stmt->HasElseStatement()) { 921 VisitForControl(stmt->condition(), &then_part, &else_part, &then_part); 922 PrepareForBailoutForId(stmt->ThenId(), BailoutState::NO_REGISTERS); 923 __ bind(&then_part); 924 Visit(stmt->then_statement()); 925 __ jmp(&done); 926 927 PrepareForBailoutForId(stmt->ElseId(), BailoutState::NO_REGISTERS); 928 __ bind(&else_part); 929 Visit(stmt->else_statement()); 930 } else { 931 VisitForControl(stmt->condition(), &then_part, &done, &then_part); 932 PrepareForBailoutForId(stmt->ThenId(), BailoutState::NO_REGISTERS); 933 __ bind(&then_part); 934 Visit(stmt->then_statement()); 935 936 PrepareForBailoutForId(stmt->ElseId(), BailoutState::NO_REGISTERS); 937 } 938 __ bind(&done); 939 PrepareForBailoutForId(stmt->IfId(), BailoutState::NO_REGISTERS); 940} 941 942void FullCodeGenerator::EmitContinue(Statement* target) { 943 NestedStatement* current = nesting_stack_; 944 int context_length = 0; 945 // When continuing, we clobber the unpredictable value in the accumulator 946 // with one that's safe for GC. 947 ClearAccumulator(); 948 while (!current->IsContinueTarget(target)) { 949 if (HasStackOverflow()) return; 950 current = current->Exit(&context_length); 951 } 952 int stack_depth = current->GetStackDepthAtTarget(); 953 int stack_drop = operand_stack_depth_ - stack_depth; 954 DCHECK_GE(stack_drop, 0); 955 __ Drop(stack_drop); 956 if (context_length > 0) { 957 while (context_length > 0) { 958 LoadContextField(context_register(), Context::PREVIOUS_INDEX); 959 --context_length; 960 } 961 StoreToFrameField(StandardFrameConstants::kContextOffset, 962 context_register()); 963 } 964 965 __ jmp(current->AsIteration()->continue_label()); 966} 967 968void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) { 969 Comment cmnt(masm_, "[ ContinueStatement"); 970 SetStatementPosition(stmt); 971 EmitContinue(stmt->target()); 972} 973 974void FullCodeGenerator::EmitBreak(Statement* target) { 975 NestedStatement* current = nesting_stack_; 976 int context_length = 0; 977 // When breaking, we clobber the unpredictable value in the accumulator 978 // with one that's safe for GC. 979 ClearAccumulator(); 980 while (!current->IsBreakTarget(target)) { 981 if (HasStackOverflow()) return; 982 current = current->Exit(&context_length); 983 } 984 int stack_depth = current->GetStackDepthAtTarget(); 985 int stack_drop = operand_stack_depth_ - stack_depth; 986 DCHECK_GE(stack_drop, 0); 987 __ Drop(stack_drop); 988 if (context_length > 0) { 989 while (context_length > 0) { 990 LoadContextField(context_register(), Context::PREVIOUS_INDEX); 991 --context_length; 992 } 993 StoreToFrameField(StandardFrameConstants::kContextOffset, 994 context_register()); 995 } 996 997 __ jmp(current->AsBreakable()->break_label()); 998} 999 1000void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) { 1001 Comment cmnt(masm_, "[ BreakStatement"); 1002 SetStatementPosition(stmt); 1003 EmitBreak(stmt->target()); 1004} 1005 1006void FullCodeGenerator::EmitUnwindAndReturn() { 1007 NestedStatement* current = nesting_stack_; 1008 int context_length = 0; 1009 while (current != NULL) { 1010 if (HasStackOverflow()) return; 1011 current = current->Exit(&context_length); 1012 } 1013 EmitReturnSequence(); 1014} 1015 1016void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1017 FeedbackSlot slot, bool pretenure) { 1018 // If slot is invalid, then it's a native function literal and we 1019 // can pass the empty array or empty literal array, something like that... 1020 1021 // If we're running with the --always-opt or the --prepare-always-opt 1022 // flag, we need to use the runtime function so that the new function 1023 // we are creating here gets a chance to have its code optimized and 1024 // doesn't just get a copy of the existing unoptimized code. 1025 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure && 1026 scope()->is_function_scope()) { 1027 Callable callable = CodeFactory::FastNewClosure(isolate()); 1028 __ Move(callable.descriptor().GetRegisterParameter(0), info); 1029 __ EmitLoadFeedbackVector(callable.descriptor().GetRegisterParameter(1)); 1030 __ Move(callable.descriptor().GetRegisterParameter(2), SmiFromSlot(slot)); 1031 __ Call(callable.code(), RelocInfo::CODE_TARGET); 1032 } else { 1033 __ Push(info); 1034 __ EmitLoadFeedbackVector(result_register()); 1035 __ Push(result_register()); 1036 __ Push(SmiFromSlot(slot)); 1037 __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured 1038 : Runtime::kNewClosure); 1039 } 1040 context()->Plug(result_register()); 1041} 1042 1043void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 1044 SetExpressionPosition(prop); 1045 Literal* key = prop->key()->AsLiteral(); 1046 DCHECK(!key->value()->IsSmi()); 1047 DCHECK(!prop->IsSuperAccess()); 1048 1049 CallLoadIC(prop->PropertyFeedbackSlot(), key->value()); 1050} 1051 1052void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 1053 SetExpressionPosition(prop); 1054 1055 EmitLoadSlot(LoadDescriptor::SlotRegister(), prop->PropertyFeedbackSlot()); 1056 1057 Handle<Code> code = CodeFactory::KeyedLoadIC(isolate()).code(); 1058 __ Call(code, RelocInfo::CODE_TARGET); 1059 RestoreContext(); 1060} 1061 1062void FullCodeGenerator::EmitLoadSlot(Register destination, FeedbackSlot slot) { 1063 DCHECK(!slot.IsInvalid()); 1064 __ Move(destination, SmiFromSlot(slot)); 1065} 1066 1067void FullCodeGenerator::EmitPushSlot(FeedbackSlot slot) { 1068 __ Push(SmiFromSlot(slot)); 1069} 1070 1071void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) { 1072 Comment cmnt(masm_, "[ ReturnStatement"); 1073 SetStatementPosition(stmt); 1074 Expression* expr = stmt->expression(); 1075 VisitForAccumulatorValue(expr); 1076 EmitUnwindAndReturn(); 1077} 1078 1079 1080void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) { 1081 // Dynamic scoping is not supported. 1082 UNREACHABLE(); 1083} 1084 1085 1086void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) { 1087 Comment cmnt(masm_, "[ DoWhileStatement"); 1088 // Do not insert break location as we do that below. 1089 SetStatementPosition(stmt, SKIP_BREAK); 1090 1091 Label body, book_keeping; 1092 1093 Iteration loop_statement(this, stmt); 1094 increment_loop_depth(); 1095 1096 __ bind(&body); 1097 Visit(stmt->body()); 1098 1099 // Record the position of the do while condition and make sure it is 1100 // possible to break on the condition. 1101 __ bind(loop_statement.continue_label()); 1102 PrepareForBailoutForId(stmt->ContinueId(), BailoutState::NO_REGISTERS); 1103 1104 // Here is the actual 'while' keyword. 1105 SetExpressionAsStatementPosition(stmt->cond()); 1106 VisitForControl(stmt->cond(), 1107 &book_keeping, 1108 loop_statement.break_label(), 1109 &book_keeping); 1110 1111 // Check stack before looping. 1112 PrepareForBailoutForId(stmt->BackEdgeId(), BailoutState::NO_REGISTERS); 1113 __ bind(&book_keeping); 1114 EmitBackEdgeBookkeeping(stmt, &body); 1115 __ jmp(&body); 1116 1117 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 1118 __ bind(loop_statement.break_label()); 1119 decrement_loop_depth(); 1120} 1121 1122 1123void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) { 1124 Comment cmnt(masm_, "[ WhileStatement"); 1125 Label loop, body; 1126 1127 Iteration loop_statement(this, stmt); 1128 increment_loop_depth(); 1129 1130 __ bind(&loop); 1131 1132 SetExpressionAsStatementPosition(stmt->cond()); 1133 VisitForControl(stmt->cond(), 1134 &body, 1135 loop_statement.break_label(), 1136 &body); 1137 1138 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS); 1139 __ bind(&body); 1140 Visit(stmt->body()); 1141 1142 __ bind(loop_statement.continue_label()); 1143 1144 // Check stack before looping. 1145 EmitBackEdgeBookkeeping(stmt, &loop); 1146 __ jmp(&loop); 1147 1148 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 1149 __ bind(loop_statement.break_label()); 1150 decrement_loop_depth(); 1151} 1152 1153 1154void FullCodeGenerator::VisitForStatement(ForStatement* stmt) { 1155 Comment cmnt(masm_, "[ ForStatement"); 1156 // Do not insert break location as we do it below. 1157 SetStatementPosition(stmt, SKIP_BREAK); 1158 1159 Label test, body; 1160 1161 Iteration loop_statement(this, stmt); 1162 1163 if (stmt->init() != NULL) { 1164 Visit(stmt->init()); 1165 } 1166 1167 increment_loop_depth(); 1168 // Emit the test at the bottom of the loop (even if empty). 1169 __ jmp(&test); 1170 1171 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS); 1172 __ bind(&body); 1173 Visit(stmt->body()); 1174 1175 PrepareForBailoutForId(stmt->ContinueId(), BailoutState::NO_REGISTERS); 1176 __ bind(loop_statement.continue_label()); 1177 if (stmt->next() != NULL) { 1178 SetStatementPosition(stmt->next()); 1179 Visit(stmt->next()); 1180 } 1181 1182 // Check stack before looping. 1183 EmitBackEdgeBookkeeping(stmt, &body); 1184 1185 __ bind(&test); 1186 if (stmt->cond() != NULL) { 1187 SetExpressionAsStatementPosition(stmt->cond()); 1188 VisitForControl(stmt->cond(), 1189 &body, 1190 loop_statement.break_label(), 1191 loop_statement.break_label()); 1192 } else { 1193 __ jmp(&body); 1194 } 1195 1196 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 1197 __ bind(loop_statement.break_label()); 1198 decrement_loop_depth(); 1199} 1200 1201 1202void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) { 1203 // Iterator looping is not supported. 1204 UNREACHABLE(); 1205} 1206 1207void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 1208 LoadFromFrameField(JavaScriptFrameConstants::kFunctionOffset, 1209 result_register()); 1210 context()->Plug(result_register()); 1211} 1212 1213void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) { 1214 // Exception handling is not supported. 1215 UNREACHABLE(); 1216} 1217 1218 1219void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) { 1220 // Exception handling is not supported. 1221 UNREACHABLE(); 1222} 1223 1224 1225void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) { 1226 // Debugger statement is not supported. 1227 UNREACHABLE(); 1228} 1229 1230 1231void FullCodeGenerator::VisitCaseClause(CaseClause* clause) { 1232 UNREACHABLE(); 1233} 1234 1235 1236void FullCodeGenerator::VisitConditional(Conditional* expr) { 1237 Comment cmnt(masm_, "[ Conditional"); 1238 Label true_case, false_case, done; 1239 VisitForControl(expr->condition(), &true_case, &false_case, &true_case); 1240 1241 int original_stack_depth = operand_stack_depth_; 1242 PrepareForBailoutForId(expr->ThenId(), BailoutState::NO_REGISTERS); 1243 __ bind(&true_case); 1244 SetExpressionPosition(expr->then_expression()); 1245 if (context()->IsTest()) { 1246 const TestContext* for_test = TestContext::cast(context()); 1247 VisitForControl(expr->then_expression(), 1248 for_test->true_label(), 1249 for_test->false_label(), 1250 NULL); 1251 } else { 1252 VisitInDuplicateContext(expr->then_expression()); 1253 __ jmp(&done); 1254 } 1255 1256 operand_stack_depth_ = original_stack_depth; 1257 PrepareForBailoutForId(expr->ElseId(), BailoutState::NO_REGISTERS); 1258 __ bind(&false_case); 1259 SetExpressionPosition(expr->else_expression()); 1260 VisitInDuplicateContext(expr->else_expression()); 1261 // If control flow falls through Visit, merge it with true case here. 1262 if (!context()->IsTest()) { 1263 __ bind(&done); 1264 } 1265} 1266 1267 1268void FullCodeGenerator::VisitLiteral(Literal* expr) { 1269 Comment cmnt(masm_, "[ Literal"); 1270 context()->Plug(expr->value()); 1271} 1272 1273 1274void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) { 1275 Comment cmnt(masm_, "[ FunctionLiteral"); 1276 1277 // Build the function boilerplate and instantiate it. 1278 Handle<SharedFunctionInfo> function_info = 1279 Compiler::GetSharedFunctionInfo(expr, script(), info_); 1280 if (function_info.is_null()) { 1281 SetStackOverflow(); 1282 return; 1283 } 1284 EmitNewClosure(function_info, expr->LiteralFeedbackSlot(), expr->pretenure()); 1285} 1286 1287 1288void FullCodeGenerator::VisitClassLiteral(ClassLiteral* lit) { 1289 // Unsupported 1290 UNREACHABLE(); 1291} 1292 1293void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1294 Comment cmnt(masm_, "[ RegExpLiteral"); 1295 Callable callable = CodeFactory::FastCloneRegExp(isolate()); 1296 CallInterfaceDescriptor descriptor = callable.descriptor(); 1297 LoadFromFrameField(JavaScriptFrameConstants::kFunctionOffset, 1298 descriptor.GetRegisterParameter(0)); 1299 __ Move(descriptor.GetRegisterParameter(1), 1300 SmiFromSlot(expr->literal_slot())); 1301 __ Move(descriptor.GetRegisterParameter(2), expr->pattern()); 1302 __ Move(descriptor.GetRegisterParameter(3), Smi::FromInt(expr->flags())); 1303 __ Call(callable.code(), RelocInfo::CODE_TARGET); 1304 1305 // Reload the context register after the call as i.e. TurboFan code stubs 1306 // won't preserve the context register. 1307 LoadFromFrameField(StandardFrameConstants::kContextOffset, 1308 context_register()); 1309 context()->Plug(result_register()); 1310} 1311 1312void FullCodeGenerator::VisitNativeFunctionLiteral( 1313 NativeFunctionLiteral* expr) { 1314 Comment cmnt(masm_, "[ NativeFunctionLiteral"); 1315 Handle<SharedFunctionInfo> shared = 1316 Compiler::GetSharedFunctionInfoForNative(expr->extension(), expr->name()); 1317 EmitNewClosure(shared, expr->LiteralFeedbackSlot(), false); 1318} 1319 1320 1321void FullCodeGenerator::VisitThrow(Throw* expr) { 1322 Comment cmnt(masm_, "[ Throw"); 1323 VisitForStackValue(expr->exception()); 1324 SetExpressionPosition(expr); 1325 CallRuntimeWithOperands(Runtime::kThrow); 1326 // Never returns here. 1327 1328 // Even though this expression doesn't produce a value, we need to simulate 1329 // plugging of the value context to ensure stack depth tracking is in sync. 1330 if (context()->IsStackValue()) OperandStackDepthIncrement(1); 1331} 1332 1333 1334void FullCodeGenerator::VisitCall(Call* expr) { 1335#ifdef DEBUG 1336 // We want to verify that RecordJSReturnSite gets called on all paths 1337 // through this function. Avoid early returns. 1338 expr->return_is_recorded_ = false; 1339#endif 1340 1341 Comment cmnt(masm_, (expr->tail_call_mode() == TailCallMode::kAllow) 1342 ? "[ TailCall" 1343 : "[ Call"); 1344 Expression* callee = expr->expression(); 1345 Call::CallType call_type = expr->GetCallType(); 1346 1347 // Eval is unsupported. 1348 CHECK(!expr->is_possibly_eval()); 1349 1350 switch (call_type) { 1351 case Call::GLOBAL_CALL: 1352 EmitCallWithLoadIC(expr); 1353 break; 1354 case Call::NAMED_PROPERTY_CALL: { 1355 Property* property = callee->AsProperty(); 1356 VisitForStackValue(property->obj()); 1357 EmitCallWithLoadIC(expr); 1358 break; 1359 } 1360 case Call::KEYED_PROPERTY_CALL: { 1361 Property* property = callee->AsProperty(); 1362 VisitForStackValue(property->obj()); 1363 EmitKeyedCallWithLoadIC(expr, property->key()); 1364 break; 1365 } 1366 case Call::OTHER_CALL: 1367 // Call to an arbitrary expression not handled specially above. 1368 VisitForStackValue(callee); 1369 OperandStackDepthIncrement(1); 1370 __ PushRoot(Heap::kUndefinedValueRootIndex); 1371 // Emit function call. 1372 EmitCall(expr); 1373 break; 1374 case Call::NAMED_SUPER_PROPERTY_CALL: 1375 case Call::KEYED_SUPER_PROPERTY_CALL: 1376 case Call::SUPER_CALL: 1377 case Call::WITH_CALL: 1378 UNREACHABLE(); 1379 } 1380 1381#ifdef DEBUG 1382 // RecordJSReturnSite should have been called. 1383 DCHECK(expr->return_is_recorded_); 1384#endif 1385} 1386 1387void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 1388 ZoneList<Expression*>* args = expr->arguments(); 1389 int arg_count = args->length(); 1390 1391 if (expr->is_jsruntime()) { 1392 Comment cmnt(masm_, "[ CallRuntime"); 1393 EmitLoadJSRuntimeFunction(expr); 1394 1395 // Push the arguments ("left-to-right"). 1396 for (int i = 0; i < arg_count; i++) { 1397 VisitForStackValue(args->at(i)); 1398 } 1399 1400 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 1401 EmitCallJSRuntimeFunction(expr); 1402 context()->DropAndPlug(1, result_register()); 1403 1404 } else { 1405 const Runtime::Function* function = expr->function(); 1406 switch (function->function_id) { 1407#define CALL_INTRINSIC_GENERATOR(Name) \ 1408 case Runtime::kInline##Name: { \ 1409 Comment cmnt(masm_, "[ Inline" #Name); \ 1410 return Emit##Name(expr); \ 1411 } 1412 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR) 1413#undef CALL_INTRINSIC_GENERATOR 1414 default: { 1415 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic"); 1416 // Push the arguments ("left-to-right"). 1417 for (int i = 0; i < arg_count; i++) { 1418 VisitForStackValue(args->at(i)); 1419 } 1420 1421 // Call the C runtime function. 1422 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 1423 __ CallRuntime(expr->function(), arg_count); 1424 OperandStackDepthDecrement(arg_count); 1425 context()->Plug(result_register()); 1426 } 1427 } 1428 } 1429} 1430 1431void FullCodeGenerator::VisitSpread(Spread* expr) { UNREACHABLE(); } 1432 1433 1434void FullCodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) { 1435 UNREACHABLE(); 1436} 1437 1438void FullCodeGenerator::VisitGetIterator(GetIterator* expr) { UNREACHABLE(); } 1439 1440void FullCodeGenerator::VisitRewritableExpression(RewritableExpression* expr) { 1441 Visit(expr->expression()); 1442} 1443 1444 1445bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) { 1446 Expression* sub_expr; 1447 Handle<String> check; 1448 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) { 1449 SetExpressionPosition(expr); 1450 EmitLiteralCompareTypeof(expr, sub_expr, check); 1451 return true; 1452 } 1453 1454 if (expr->IsLiteralCompareUndefined(&sub_expr)) { 1455 SetExpressionPosition(expr); 1456 EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue); 1457 return true; 1458 } 1459 1460 if (expr->IsLiteralCompareNull(&sub_expr)) { 1461 SetExpressionPosition(expr); 1462 EmitLiteralCompareNil(expr, sub_expr, kNullValue); 1463 return true; 1464 } 1465 1466 return false; 1467} 1468 1469 1470void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) { 1471 DisallowHeapAllocation no_gc; 1472 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); 1473 1474 // Increment loop nesting level by one and iterate over the back edge table 1475 // to find the matching loops to patch the interrupt 1476 // call to an unconditional call to the replacement code. 1477 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level() + 1; 1478 if (loop_nesting_level > AbstractCode::kMaxLoopNestingMarker) return; 1479 1480 BackEdgeTable back_edges(unoptimized, &no_gc); 1481 for (uint32_t i = 0; i < back_edges.length(); i++) { 1482 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) { 1483 DCHECK_EQ(INTERRUPT, GetBackEdgeState(isolate, 1484 unoptimized, 1485 back_edges.pc(i))); 1486 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch); 1487 } 1488 } 1489 1490 unoptimized->set_allow_osr_at_loop_nesting_level(loop_nesting_level); 1491 DCHECK(Verify(isolate, unoptimized)); 1492} 1493 1494 1495void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) { 1496 DisallowHeapAllocation no_gc; 1497 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck); 1498 1499 // Iterate over the back edge table and revert the patched interrupt calls. 1500 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); 1501 1502 BackEdgeTable back_edges(unoptimized, &no_gc); 1503 for (uint32_t i = 0; i < back_edges.length(); i++) { 1504 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) { 1505 DCHECK_NE(INTERRUPT, GetBackEdgeState(isolate, 1506 unoptimized, 1507 back_edges.pc(i))); 1508 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch); 1509 } 1510 } 1511 1512 unoptimized->set_allow_osr_at_loop_nesting_level(0); 1513 // Assert that none of the back edges are patched anymore. 1514 DCHECK(Verify(isolate, unoptimized)); 1515} 1516 1517 1518#ifdef DEBUG 1519bool BackEdgeTable::Verify(Isolate* isolate, Code* unoptimized) { 1520 DisallowHeapAllocation no_gc; 1521 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); 1522 BackEdgeTable back_edges(unoptimized, &no_gc); 1523 for (uint32_t i = 0; i < back_edges.length(); i++) { 1524 uint32_t loop_depth = back_edges.loop_depth(i); 1525 CHECK_LE(static_cast<int>(loop_depth), AbstractCode::kMaxLoopNestingMarker); 1526 // Assert that all back edges for shallower loops (and only those) 1527 // have already been patched. 1528 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), 1529 GetBackEdgeState(isolate, 1530 unoptimized, 1531 back_edges.pc(i)) != INTERRUPT); 1532 } 1533 return true; 1534} 1535#endif // DEBUG 1536 1537 1538FullCodeGenerator::EnterBlockScopeIfNeeded::EnterBlockScopeIfNeeded( 1539 FullCodeGenerator* codegen, Scope* scope, BailoutId entry_id, 1540 BailoutId declarations_id, BailoutId exit_id) 1541 : codegen_(codegen), exit_id_(exit_id) { 1542 saved_scope_ = codegen_->scope(); 1543 1544 if (scope == NULL) { 1545 codegen_->PrepareForBailoutForId(entry_id, BailoutState::NO_REGISTERS); 1546 needs_block_context_ = false; 1547 } else { 1548 needs_block_context_ = scope->NeedsContext(); 1549 codegen_->scope_ = scope; 1550 { 1551 if (needs_block_context_) { 1552 Comment cmnt(masm(), "[ Extend block context"); 1553 codegen_->PushOperand(scope->scope_info()); 1554 codegen_->PushFunctionArgumentForContextAllocation(); 1555 codegen_->CallRuntimeWithOperands(Runtime::kPushBlockContext); 1556 1557 // Replace the context stored in the frame. 1558 codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset, 1559 codegen_->context_register()); 1560 } 1561 CHECK_EQ(0, scope->num_stack_slots()); 1562 codegen_->PrepareForBailoutForId(entry_id, BailoutState::NO_REGISTERS); 1563 } 1564 { 1565 Comment cmnt(masm(), "[ Declarations"); 1566 codegen_->VisitDeclarations(scope->declarations()); 1567 codegen_->PrepareForBailoutForId(declarations_id, 1568 BailoutState::NO_REGISTERS); 1569 } 1570 } 1571} 1572 1573 1574FullCodeGenerator::EnterBlockScopeIfNeeded::~EnterBlockScopeIfNeeded() { 1575 if (needs_block_context_) { 1576 codegen_->LoadContextField(codegen_->context_register(), 1577 Context::PREVIOUS_INDEX); 1578 // Update local stack frame context field. 1579 codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset, 1580 codegen_->context_register()); 1581 } 1582 codegen_->PrepareForBailoutForId(exit_id_, BailoutState::NO_REGISTERS); 1583 codegen_->scope_ = saved_scope_; 1584} 1585 1586Handle<Script> FullCodeGenerator::script() { return info_->script(); } 1587 1588LanguageMode FullCodeGenerator::language_mode() { 1589 return scope()->language_mode(); 1590} 1591 1592bool FullCodeGenerator::has_simple_parameters() { 1593 return info_->has_simple_parameters(); 1594} 1595 1596FunctionLiteral* FullCodeGenerator::literal() const { return info_->literal(); } 1597 1598const FeedbackVectorSpec* FullCodeGenerator::feedback_vector_spec() const { 1599 return literal()->feedback_vector_spec(); 1600} 1601 1602#undef __ 1603 1604 1605} // namespace internal 1606} // namespace v8 1607