1// Copyright 2012 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#include "v8.h" 29#include "hydrogen.h" 30 31#include "codegen.h" 32#include "full-codegen.h" 33#include "hashmap.h" 34#include "lithium-allocator.h" 35#include "parser.h" 36#include "scopeinfo.h" 37#include "scopes.h" 38#include "stub-cache.h" 39 40#if V8_TARGET_ARCH_IA32 41#include "ia32/lithium-codegen-ia32.h" 42#elif V8_TARGET_ARCH_X64 43#include "x64/lithium-codegen-x64.h" 44#elif V8_TARGET_ARCH_ARM 45#include "arm/lithium-codegen-arm.h" 46#elif V8_TARGET_ARCH_MIPS 47#include "mips/lithium-codegen-mips.h" 48#else 49#error Unsupported target architecture. 50#endif 51 52namespace v8 { 53namespace internal { 54 55HBasicBlock::HBasicBlock(HGraph* graph) 56 : block_id_(graph->GetNextBlockID()), 57 graph_(graph), 58 phis_(4), 59 first_(NULL), 60 last_(NULL), 61 end_(NULL), 62 loop_information_(NULL), 63 predecessors_(2), 64 dominator_(NULL), 65 dominated_blocks_(4), 66 last_environment_(NULL), 67 argument_count_(-1), 68 first_instruction_index_(-1), 69 last_instruction_index_(-1), 70 deleted_phis_(4), 71 parent_loop_header_(NULL), 72 is_inline_return_target_(false), 73 is_deoptimizing_(false), 74 dominates_loop_successors_(false) { } 75 76 77void HBasicBlock::AttachLoopInformation() { 78 ASSERT(!IsLoopHeader()); 79 loop_information_ = new(zone()) HLoopInformation(this); 80} 81 82 83void HBasicBlock::DetachLoopInformation() { 84 ASSERT(IsLoopHeader()); 85 loop_information_ = NULL; 86} 87 88 89void HBasicBlock::AddPhi(HPhi* phi) { 90 ASSERT(!IsStartBlock()); 91 phis_.Add(phi); 92 phi->SetBlock(this); 93} 94 95 96void HBasicBlock::RemovePhi(HPhi* phi) { 97 ASSERT(phi->block() == this); 98 ASSERT(phis_.Contains(phi)); 99 ASSERT(phi->HasNoUses() || !phi->is_live()); 100 phi->Kill(); 101 phis_.RemoveElement(phi); 102 phi->SetBlock(NULL); 103} 104 105 106void HBasicBlock::AddInstruction(HInstruction* instr) { 107 ASSERT(!IsStartBlock() || !IsFinished()); 108 ASSERT(!instr->IsLinked()); 109 ASSERT(!IsFinished()); 110 if (first_ == NULL) { 111 HBlockEntry* entry = new(zone()) HBlockEntry(); 112 entry->InitializeAsFirst(this); 113 first_ = last_ = entry; 114 } 115 instr->InsertAfter(last_); 116 last_ = instr; 117} 118 119 120HDeoptimize* HBasicBlock::CreateDeoptimize( 121 HDeoptimize::UseEnvironment has_uses) { 122 ASSERT(HasEnvironment()); 123 if (has_uses == HDeoptimize::kNoUses) return new(zone()) HDeoptimize(0); 124 125 HEnvironment* environment = last_environment(); 126 HDeoptimize* instr = new(zone()) HDeoptimize(environment->length()); 127 for (int i = 0; i < environment->length(); i++) { 128 HValue* val = environment->values()->at(i); 129 instr->AddEnvironmentValue(val); 130 } 131 132 return instr; 133} 134 135 136HSimulate* HBasicBlock::CreateSimulate(int ast_id) { 137 ASSERT(HasEnvironment()); 138 HEnvironment* environment = last_environment(); 139 ASSERT(ast_id == AstNode::kNoNumber || 140 environment->closure()->shared()->VerifyBailoutId(ast_id)); 141 142 int push_count = environment->push_count(); 143 int pop_count = environment->pop_count(); 144 145 HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count); 146 for (int i = push_count - 1; i >= 0; --i) { 147 instr->AddPushedValue(environment->ExpressionStackAt(i)); 148 } 149 for (int i = 0; i < environment->assigned_variables()->length(); ++i) { 150 int index = environment->assigned_variables()->at(i); 151 instr->AddAssignedValue(index, environment->Lookup(index)); 152 } 153 environment->ClearHistory(); 154 return instr; 155} 156 157 158void HBasicBlock::Finish(HControlInstruction* end) { 159 ASSERT(!IsFinished()); 160 AddInstruction(end); 161 end_ = end; 162 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 163 it.Current()->RegisterPredecessor(this); 164 } 165} 166 167 168void HBasicBlock::Goto(HBasicBlock* block, bool drop_extra) { 169 if (block->IsInlineReturnTarget()) { 170 AddInstruction(new(zone()) HLeaveInlined); 171 last_environment_ = last_environment()->DiscardInlined(drop_extra); 172 } 173 AddSimulate(AstNode::kNoNumber); 174 HGoto* instr = new(zone()) HGoto(block); 175 Finish(instr); 176} 177 178 179void HBasicBlock::AddLeaveInlined(HValue* return_value, 180 HBasicBlock* target, 181 bool drop_extra) { 182 ASSERT(target->IsInlineReturnTarget()); 183 ASSERT(return_value != NULL); 184 AddInstruction(new(zone()) HLeaveInlined); 185 last_environment_ = last_environment()->DiscardInlined(drop_extra); 186 last_environment()->Push(return_value); 187 AddSimulate(AstNode::kNoNumber); 188 HGoto* instr = new(zone()) HGoto(target); 189 Finish(instr); 190} 191 192 193void HBasicBlock::SetInitialEnvironment(HEnvironment* env) { 194 ASSERT(!HasEnvironment()); 195 ASSERT(first() == NULL); 196 UpdateEnvironment(env); 197} 198 199 200void HBasicBlock::SetJoinId(int ast_id) { 201 int length = predecessors_.length(); 202 ASSERT(length > 0); 203 for (int i = 0; i < length; i++) { 204 HBasicBlock* predecessor = predecessors_[i]; 205 ASSERT(predecessor->end()->IsGoto()); 206 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous()); 207 // We only need to verify the ID once. 208 ASSERT(i != 0 || 209 predecessor->last_environment()->closure()->shared() 210 ->VerifyBailoutId(ast_id)); 211 simulate->set_ast_id(ast_id); 212 } 213} 214 215 216bool HBasicBlock::Dominates(HBasicBlock* other) const { 217 HBasicBlock* current = other->dominator(); 218 while (current != NULL) { 219 if (current == this) return true; 220 current = current->dominator(); 221 } 222 return false; 223} 224 225 226int HBasicBlock::LoopNestingDepth() const { 227 const HBasicBlock* current = this; 228 int result = (current->IsLoopHeader()) ? 1 : 0; 229 while (current->parent_loop_header() != NULL) { 230 current = current->parent_loop_header(); 231 result++; 232 } 233 return result; 234} 235 236 237void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) { 238 ASSERT(IsLoopHeader()); 239 240 SetJoinId(stmt->EntryId()); 241 if (predecessors()->length() == 1) { 242 // This is a degenerated loop. 243 DetachLoopInformation(); 244 return; 245 } 246 247 // Only the first entry into the loop is from outside the loop. All other 248 // entries must be back edges. 249 for (int i = 1; i < predecessors()->length(); ++i) { 250 loop_information()->RegisterBackEdge(predecessors()->at(i)); 251 } 252} 253 254 255void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) { 256 if (HasPredecessor()) { 257 // Only loop header blocks can have a predecessor added after 258 // instructions have been added to the block (they have phis for all 259 // values in the environment, these phis may be eliminated later). 260 ASSERT(IsLoopHeader() || first_ == NULL); 261 HEnvironment* incoming_env = pred->last_environment(); 262 if (IsLoopHeader()) { 263 ASSERT(phis()->length() == incoming_env->length()); 264 for (int i = 0; i < phis_.length(); ++i) { 265 phis_[i]->AddInput(incoming_env->values()->at(i)); 266 } 267 } else { 268 last_environment()->AddIncomingEdge(this, pred->last_environment()); 269 } 270 } else if (!HasEnvironment() && !IsFinished()) { 271 ASSERT(!IsLoopHeader()); 272 SetInitialEnvironment(pred->last_environment()->Copy()); 273 } 274 275 predecessors_.Add(pred); 276} 277 278 279void HBasicBlock::AddDominatedBlock(HBasicBlock* block) { 280 ASSERT(!dominated_blocks_.Contains(block)); 281 // Keep the list of dominated blocks sorted such that if there is two 282 // succeeding block in this list, the predecessor is before the successor. 283 int index = 0; 284 while (index < dominated_blocks_.length() && 285 dominated_blocks_[index]->block_id() < block->block_id()) { 286 ++index; 287 } 288 dominated_blocks_.InsertAt(index, block); 289} 290 291 292void HBasicBlock::AssignCommonDominator(HBasicBlock* other) { 293 if (dominator_ == NULL) { 294 dominator_ = other; 295 other->AddDominatedBlock(this); 296 } else if (other->dominator() != NULL) { 297 HBasicBlock* first = dominator_; 298 HBasicBlock* second = other; 299 300 while (first != second) { 301 if (first->block_id() > second->block_id()) { 302 first = first->dominator(); 303 } else { 304 second = second->dominator(); 305 } 306 ASSERT(first != NULL && second != NULL); 307 } 308 309 if (dominator_ != first) { 310 ASSERT(dominator_->dominated_blocks_.Contains(this)); 311 dominator_->dominated_blocks_.RemoveElement(this); 312 dominator_ = first; 313 first->AddDominatedBlock(this); 314 } 315 } 316} 317 318 319void HBasicBlock::AssignLoopSuccessorDominators() { 320 // Mark blocks that dominate all subsequent reachable blocks inside their 321 // loop. Exploit the fact that blocks are sorted in reverse post order. When 322 // the loop is visited in increasing block id order, if the number of 323 // non-loop-exiting successor edges at the dominator_candidate block doesn't 324 // exceed the number of previously encountered predecessor edges, there is no 325 // path from the loop header to any block with higher id that doesn't go 326 // through the dominator_candidate block. In this case, the 327 // dominator_candidate block is guaranteed to dominate all blocks reachable 328 // from it with higher ids. 329 HBasicBlock* last = loop_information()->GetLastBackEdge(); 330 int outstanding_successors = 1; // one edge from the pre-header 331 // Header always dominates everything. 332 MarkAsLoopSuccessorDominator(); 333 for (int j = block_id(); j <= last->block_id(); ++j) { 334 HBasicBlock* dominator_candidate = graph_->blocks()->at(j); 335 for (HPredecessorIterator it(dominator_candidate); !it.Done(); 336 it.Advance()) { 337 HBasicBlock* predecessor = it.Current(); 338 // Don't count back edges. 339 if (predecessor->block_id() < dominator_candidate->block_id()) { 340 outstanding_successors--; 341 } 342 } 343 344 // If more successors than predecessors have been seen in the loop up to 345 // now, it's not possible to guarantee that the current block dominates 346 // all of the blocks with higher IDs. In this case, assume conservatively 347 // that those paths through loop that don't go through the current block 348 // contain all of the loop's dependencies. Also be careful to record 349 // dominator information about the current loop that's being processed, 350 // and not nested loops, which will be processed when 351 // AssignLoopSuccessorDominators gets called on their header. 352 ASSERT(outstanding_successors >= 0); 353 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header(); 354 if (outstanding_successors == 0 && 355 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) { 356 dominator_candidate->MarkAsLoopSuccessorDominator(); 357 } 358 HControlInstruction* end = dominator_candidate->end(); 359 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 360 HBasicBlock* successor = it.Current(); 361 // Only count successors that remain inside the loop and don't loop back 362 // to a loop header. 363 if (successor->block_id() > dominator_candidate->block_id() && 364 successor->block_id() <= last->block_id()) { 365 // Backwards edges must land on loop headers. 366 ASSERT(successor->block_id() > dominator_candidate->block_id() || 367 successor->IsLoopHeader()); 368 outstanding_successors++; 369 } 370 } 371 } 372} 373 374 375int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { 376 for (int i = 0; i < predecessors_.length(); ++i) { 377 if (predecessors_[i] == predecessor) return i; 378 } 379 UNREACHABLE(); 380 return -1; 381} 382 383 384#ifdef DEBUG 385void HBasicBlock::Verify() { 386 // Check that every block is finished. 387 ASSERT(IsFinished()); 388 ASSERT(block_id() >= 0); 389 390 // Check that the incoming edges are in edge split form. 391 if (predecessors_.length() > 1) { 392 for (int i = 0; i < predecessors_.length(); ++i) { 393 ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL); 394 } 395 } 396} 397#endif 398 399 400void HLoopInformation::RegisterBackEdge(HBasicBlock* block) { 401 this->back_edges_.Add(block); 402 AddBlock(block); 403} 404 405 406HBasicBlock* HLoopInformation::GetLastBackEdge() const { 407 int max_id = -1; 408 HBasicBlock* result = NULL; 409 for (int i = 0; i < back_edges_.length(); ++i) { 410 HBasicBlock* cur = back_edges_[i]; 411 if (cur->block_id() > max_id) { 412 max_id = cur->block_id(); 413 result = cur; 414 } 415 } 416 return result; 417} 418 419 420void HLoopInformation::AddBlock(HBasicBlock* block) { 421 if (block == loop_header()) return; 422 if (block->parent_loop_header() == loop_header()) return; 423 if (block->parent_loop_header() != NULL) { 424 AddBlock(block->parent_loop_header()); 425 } else { 426 block->set_parent_loop_header(loop_header()); 427 blocks_.Add(block); 428 for (int i = 0; i < block->predecessors()->length(); ++i) { 429 AddBlock(block->predecessors()->at(i)); 430 } 431 } 432} 433 434 435#ifdef DEBUG 436 437// Checks reachability of the blocks in this graph and stores a bit in 438// the BitVector "reachable()" for every block that can be reached 439// from the start block of the graph. If "dont_visit" is non-null, the given 440// block is treated as if it would not be part of the graph. "visited_count()" 441// returns the number of reachable blocks. 442class ReachabilityAnalyzer BASE_EMBEDDED { 443 public: 444 ReachabilityAnalyzer(HBasicBlock* entry_block, 445 int block_count, 446 HBasicBlock* dont_visit) 447 : visited_count_(0), 448 stack_(16), 449 reachable_(block_count, ZONE), 450 dont_visit_(dont_visit) { 451 PushBlock(entry_block); 452 Analyze(); 453 } 454 455 int visited_count() const { return visited_count_; } 456 const BitVector* reachable() const { return &reachable_; } 457 458 private: 459 void PushBlock(HBasicBlock* block) { 460 if (block != NULL && block != dont_visit_ && 461 !reachable_.Contains(block->block_id())) { 462 reachable_.Add(block->block_id()); 463 stack_.Add(block); 464 visited_count_++; 465 } 466 } 467 468 void Analyze() { 469 while (!stack_.is_empty()) { 470 HControlInstruction* end = stack_.RemoveLast()->end(); 471 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 472 PushBlock(it.Current()); 473 } 474 } 475 } 476 477 int visited_count_; 478 ZoneList<HBasicBlock*> stack_; 479 BitVector reachable_; 480 HBasicBlock* dont_visit_; 481}; 482 483 484void HGraph::Verify(bool do_full_verify) const { 485 for (int i = 0; i < blocks_.length(); i++) { 486 HBasicBlock* block = blocks_.at(i); 487 488 block->Verify(); 489 490 // Check that every block contains at least one node and that only the last 491 // node is a control instruction. 492 HInstruction* current = block->first(); 493 ASSERT(current != NULL && current->IsBlockEntry()); 494 while (current != NULL) { 495 ASSERT((current->next() == NULL) == current->IsControlInstruction()); 496 ASSERT(current->block() == block); 497 current->Verify(); 498 current = current->next(); 499 } 500 501 // Check that successors are correctly set. 502 HBasicBlock* first = block->end()->FirstSuccessor(); 503 HBasicBlock* second = block->end()->SecondSuccessor(); 504 ASSERT(second == NULL || first != NULL); 505 506 // Check that the predecessor array is correct. 507 if (first != NULL) { 508 ASSERT(first->predecessors()->Contains(block)); 509 if (second != NULL) { 510 ASSERT(second->predecessors()->Contains(block)); 511 } 512 } 513 514 // Check that phis have correct arguments. 515 for (int j = 0; j < block->phis()->length(); j++) { 516 HPhi* phi = block->phis()->at(j); 517 phi->Verify(); 518 } 519 520 // Check that all join blocks have predecessors that end with an 521 // unconditional goto and agree on their environment node id. 522 if (block->predecessors()->length() >= 2) { 523 int id = block->predecessors()->first()->last_environment()->ast_id(); 524 for (int k = 0; k < block->predecessors()->length(); k++) { 525 HBasicBlock* predecessor = block->predecessors()->at(k); 526 ASSERT(predecessor->end()->IsGoto()); 527 ASSERT(predecessor->last_environment()->ast_id() == id); 528 } 529 } 530 } 531 532 // Check special property of first block to have no predecessors. 533 ASSERT(blocks_.at(0)->predecessors()->is_empty()); 534 535 if (do_full_verify) { 536 // Check that the graph is fully connected. 537 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL); 538 ASSERT(analyzer.visited_count() == blocks_.length()); 539 540 // Check that entry block dominator is NULL. 541 ASSERT(entry_block_->dominator() == NULL); 542 543 // Check dominators. 544 for (int i = 0; i < blocks_.length(); ++i) { 545 HBasicBlock* block = blocks_.at(i); 546 if (block->dominator() == NULL) { 547 // Only start block may have no dominator assigned to. 548 ASSERT(i == 0); 549 } else { 550 // Assert that block is unreachable if dominator must not be visited. 551 ReachabilityAnalyzer dominator_analyzer(entry_block_, 552 blocks_.length(), 553 block->dominator()); 554 ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id())); 555 } 556 } 557 } 558} 559 560#endif 561 562 563HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer, 564 Object* value) { 565 if (!pointer->is_set()) { 566 HConstant* constant = new(zone()) HConstant(Handle<Object>(value), 567 Representation::Tagged()); 568 constant->InsertAfter(GetConstantUndefined()); 569 pointer->set(constant); 570 } 571 return pointer->get(); 572} 573 574 575HConstant* HGraph::GetConstant1() { 576 return GetConstant(&constant_1_, Smi::FromInt(1)); 577} 578 579 580HConstant* HGraph::GetConstantMinus1() { 581 return GetConstant(&constant_minus1_, Smi::FromInt(-1)); 582} 583 584 585HConstant* HGraph::GetConstantTrue() { 586 return GetConstant(&constant_true_, isolate()->heap()->true_value()); 587} 588 589 590HConstant* HGraph::GetConstantFalse() { 591 return GetConstant(&constant_false_, isolate()->heap()->false_value()); 592} 593 594 595HConstant* HGraph::GetConstantHole() { 596 return GetConstant(&constant_hole_, isolate()->heap()->the_hole_value()); 597} 598 599 600HGraphBuilder::HGraphBuilder(CompilationInfo* info, 601 TypeFeedbackOracle* oracle) 602 : function_state_(NULL), 603 initial_function_state_(this, info, oracle, NORMAL_RETURN), 604 ast_context_(NULL), 605 break_scope_(NULL), 606 graph_(NULL), 607 current_block_(NULL), 608 inlined_count_(0), 609 zone_(info->isolate()->zone()), 610 inline_bailout_(false) { 611 // This is not initialized in the initializer list because the 612 // constructor for the initial state relies on function_state_ == NULL 613 // to know it's the initial state. 614 function_state_= &initial_function_state_; 615} 616 617HBasicBlock* HGraphBuilder::CreateJoin(HBasicBlock* first, 618 HBasicBlock* second, 619 int join_id) { 620 if (first == NULL) { 621 return second; 622 } else if (second == NULL) { 623 return first; 624 } else { 625 HBasicBlock* join_block = graph_->CreateBasicBlock(); 626 first->Goto(join_block); 627 second->Goto(join_block); 628 join_block->SetJoinId(join_id); 629 return join_block; 630 } 631} 632 633 634HBasicBlock* HGraphBuilder::JoinContinue(IterationStatement* statement, 635 HBasicBlock* exit_block, 636 HBasicBlock* continue_block) { 637 if (continue_block != NULL) { 638 if (exit_block != NULL) exit_block->Goto(continue_block); 639 continue_block->SetJoinId(statement->ContinueId()); 640 return continue_block; 641 } 642 return exit_block; 643} 644 645 646HBasicBlock* HGraphBuilder::CreateLoop(IterationStatement* statement, 647 HBasicBlock* loop_entry, 648 HBasicBlock* body_exit, 649 HBasicBlock* loop_successor, 650 HBasicBlock* break_block) { 651 if (body_exit != NULL) body_exit->Goto(loop_entry); 652 loop_entry->PostProcessLoopHeader(statement); 653 if (break_block != NULL) { 654 if (loop_successor != NULL) loop_successor->Goto(break_block); 655 break_block->SetJoinId(statement->ExitId()); 656 return break_block; 657 } 658 return loop_successor; 659} 660 661 662void HBasicBlock::FinishExit(HControlInstruction* instruction) { 663 Finish(instruction); 664 ClearEnvironment(); 665} 666 667 668HGraph::HGraph(CompilationInfo* info) 669 : isolate_(info->isolate()), 670 next_block_id_(0), 671 entry_block_(NULL), 672 blocks_(8), 673 values_(16), 674 phi_list_(NULL) { 675 start_environment_ = 676 new(zone()) HEnvironment(NULL, info->scope(), info->closure()); 677 start_environment_->set_ast_id(AstNode::kFunctionEntryId); 678 entry_block_ = CreateBasicBlock(); 679 entry_block_->SetInitialEnvironment(start_environment_); 680} 681 682 683Handle<Code> HGraph::Compile(CompilationInfo* info) { 684 int values = GetMaximumValueID(); 685 if (values > LUnallocated::kMaxVirtualRegisters) { 686 if (FLAG_trace_bailout) { 687 PrintF("Not enough virtual registers for (values).\n"); 688 } 689 return Handle<Code>::null(); 690 } 691 LAllocator allocator(values, this); 692 LChunkBuilder builder(info, this, &allocator); 693 LChunk* chunk = builder.Build(); 694 if (chunk == NULL) return Handle<Code>::null(); 695 696 if (!allocator.Allocate(chunk)) { 697 if (FLAG_trace_bailout) { 698 PrintF("Not enough virtual registers (regalloc).\n"); 699 } 700 return Handle<Code>::null(); 701 } 702 703 MacroAssembler assembler(info->isolate(), NULL, 0); 704 LCodeGen generator(chunk, &assembler, info); 705 706 chunk->MarkEmptyBlocks(); 707 708 if (generator.GenerateCode()) { 709 if (FLAG_trace_codegen) { 710 PrintF("Crankshaft Compiler - "); 711 } 712 CodeGenerator::MakeCodePrologue(info); 713 Code::Flags flags = Code::ComputeFlags(Code::OPTIMIZED_FUNCTION); 714 Handle<Code> code = 715 CodeGenerator::MakeCodeEpilogue(&assembler, flags, info); 716 generator.FinishCode(code); 717 CodeGenerator::PrintCode(code, info); 718 return code; 719 } 720 return Handle<Code>::null(); 721} 722 723 724HBasicBlock* HGraph::CreateBasicBlock() { 725 HBasicBlock* result = new(zone()) HBasicBlock(this); 726 blocks_.Add(result); 727 return result; 728} 729 730 731void HGraph::Canonicalize() { 732 if (!FLAG_use_canonicalizing) return; 733 HPhase phase("H_Canonicalize", this); 734 for (int i = 0; i < blocks()->length(); ++i) { 735 HInstruction* instr = blocks()->at(i)->first(); 736 while (instr != NULL) { 737 HValue* value = instr->Canonicalize(); 738 if (value != instr) instr->DeleteAndReplaceWith(value); 739 instr = instr->next(); 740 } 741 } 742} 743 744 745void HGraph::OrderBlocks() { 746 HPhase phase("H_Block ordering"); 747 BitVector visited(blocks_.length(), zone()); 748 749 ZoneList<HBasicBlock*> reverse_result(8); 750 HBasicBlock* start = blocks_[0]; 751 Postorder(start, &visited, &reverse_result, NULL); 752 753 blocks_.Rewind(0); 754 int index = 0; 755 for (int i = reverse_result.length() - 1; i >= 0; --i) { 756 HBasicBlock* b = reverse_result[i]; 757 blocks_.Add(b); 758 b->set_block_id(index++); 759 } 760} 761 762 763void HGraph::PostorderLoopBlocks(HLoopInformation* loop, 764 BitVector* visited, 765 ZoneList<HBasicBlock*>* order, 766 HBasicBlock* loop_header) { 767 for (int i = 0; i < loop->blocks()->length(); ++i) { 768 HBasicBlock* b = loop->blocks()->at(i); 769 for (HSuccessorIterator it(b->end()); !it.Done(); it.Advance()) { 770 Postorder(it.Current(), visited, order, loop_header); 771 } 772 if (b->IsLoopHeader() && b != loop->loop_header()) { 773 PostorderLoopBlocks(b->loop_information(), visited, order, loop_header); 774 } 775 } 776} 777 778 779void HGraph::Postorder(HBasicBlock* block, 780 BitVector* visited, 781 ZoneList<HBasicBlock*>* order, 782 HBasicBlock* loop_header) { 783 if (block == NULL || visited->Contains(block->block_id())) return; 784 if (block->parent_loop_header() != loop_header) return; 785 visited->Add(block->block_id()); 786 if (block->IsLoopHeader()) { 787 PostorderLoopBlocks(block->loop_information(), visited, order, loop_header); 788 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { 789 Postorder(it.Current(), visited, order, block); 790 } 791 } else { 792 ASSERT(block->IsFinished()); 793 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { 794 Postorder(it.Current(), visited, order, loop_header); 795 } 796 } 797 ASSERT(block->end()->FirstSuccessor() == NULL || 798 order->Contains(block->end()->FirstSuccessor()) || 799 block->end()->FirstSuccessor()->IsLoopHeader()); 800 ASSERT(block->end()->SecondSuccessor() == NULL || 801 order->Contains(block->end()->SecondSuccessor()) || 802 block->end()->SecondSuccessor()->IsLoopHeader()); 803 order->Add(block); 804} 805 806 807void HGraph::AssignDominators() { 808 HPhase phase("H_Assign dominators", this); 809 for (int i = 0; i < blocks_.length(); ++i) { 810 HBasicBlock* block = blocks_[i]; 811 if (block->IsLoopHeader()) { 812 // Only the first predecessor of a loop header is from outside the loop. 813 // All others are back edges, and thus cannot dominate the loop header. 814 block->AssignCommonDominator(block->predecessors()->first()); 815 block->AssignLoopSuccessorDominators(); 816 } else { 817 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) { 818 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j)); 819 } 820 } 821 } 822} 823 824// Mark all blocks that are dominated by an unconditional soft deoptimize to 825// prevent code motion across those blocks. 826void HGraph::PropagateDeoptimizingMark() { 827 HPhase phase("H_Propagate deoptimizing mark", this); 828 MarkAsDeoptimizingRecursively(entry_block()); 829} 830 831void HGraph::MarkAsDeoptimizingRecursively(HBasicBlock* block) { 832 for (int i = 0; i < block->dominated_blocks()->length(); ++i) { 833 HBasicBlock* dominated = block->dominated_blocks()->at(i); 834 if (block->IsDeoptimizing()) dominated->MarkAsDeoptimizing(); 835 MarkAsDeoptimizingRecursively(dominated); 836 } 837} 838 839void HGraph::EliminateRedundantPhis() { 840 HPhase phase("H_Redundant phi elimination", this); 841 842 // Worklist of phis that can potentially be eliminated. Initialized with 843 // all phi nodes. When elimination of a phi node modifies another phi node 844 // the modified phi node is added to the worklist. 845 ZoneList<HPhi*> worklist(blocks_.length()); 846 for (int i = 0; i < blocks_.length(); ++i) { 847 worklist.AddAll(*blocks_[i]->phis()); 848 } 849 850 while (!worklist.is_empty()) { 851 HPhi* phi = worklist.RemoveLast(); 852 HBasicBlock* block = phi->block(); 853 854 // Skip phi node if it was already replaced. 855 if (block == NULL) continue; 856 857 // Get replacement value if phi is redundant. 858 HValue* replacement = phi->GetRedundantReplacement(); 859 860 if (replacement != NULL) { 861 // Iterate through the uses and replace them all. 862 for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) { 863 HValue* value = it.value(); 864 value->SetOperandAt(it.index(), replacement); 865 if (value->IsPhi()) worklist.Add(HPhi::cast(value)); 866 } 867 block->RemovePhi(phi); 868 } 869 } 870} 871 872 873void HGraph::EliminateUnreachablePhis() { 874 HPhase phase("H_Unreachable phi elimination", this); 875 876 // Initialize worklist. 877 ZoneList<HPhi*> phi_list(blocks_.length()); 878 ZoneList<HPhi*> worklist(blocks_.length()); 879 for (int i = 0; i < blocks_.length(); ++i) { 880 for (int j = 0; j < blocks_[i]->phis()->length(); j++) { 881 HPhi* phi = blocks_[i]->phis()->at(j); 882 phi_list.Add(phi); 883 // We can't eliminate phis in the receiver position in the environment 884 // because in case of throwing an error we need this value to 885 // construct a stack trace. 886 if (phi->HasRealUses() || phi->IsReceiver()) { 887 phi->set_is_live(true); 888 worklist.Add(phi); 889 } 890 } 891 } 892 893 // Iteratively mark live phis. 894 while (!worklist.is_empty()) { 895 HPhi* phi = worklist.RemoveLast(); 896 for (int i = 0; i < phi->OperandCount(); i++) { 897 HValue* operand = phi->OperandAt(i); 898 if (operand->IsPhi() && !HPhi::cast(operand)->is_live()) { 899 HPhi::cast(operand)->set_is_live(true); 900 worklist.Add(HPhi::cast(operand)); 901 } 902 } 903 } 904 905 // Remove unreachable phis. 906 for (int i = 0; i < phi_list.length(); i++) { 907 HPhi* phi = phi_list[i]; 908 if (!phi->is_live()) { 909 HBasicBlock* block = phi->block(); 910 block->RemovePhi(phi); 911 block->RecordDeletedPhi(phi->merged_index()); 912 } 913 } 914} 915 916 917bool HGraph::CheckArgumentsPhiUses() { 918 int block_count = blocks_.length(); 919 for (int i = 0; i < block_count; ++i) { 920 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 921 HPhi* phi = blocks_[i]->phis()->at(j); 922 // We don't support phi uses of arguments for now. 923 if (phi->CheckFlag(HValue::kIsArguments)) return false; 924 } 925 } 926 return true; 927} 928 929 930bool HGraph::CheckConstPhiUses() { 931 int block_count = blocks_.length(); 932 for (int i = 0; i < block_count; ++i) { 933 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 934 HPhi* phi = blocks_[i]->phis()->at(j); 935 // Check for the hole value (from an uninitialized const). 936 for (int k = 0; k < phi->OperandCount(); k++) { 937 if (phi->OperandAt(k) == GetConstantHole()) return false; 938 } 939 } 940 } 941 return true; 942} 943 944 945void HGraph::CollectPhis() { 946 int block_count = blocks_.length(); 947 phi_list_ = new ZoneList<HPhi*>(block_count); 948 for (int i = 0; i < block_count; ++i) { 949 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 950 HPhi* phi = blocks_[i]->phis()->at(j); 951 phi_list_->Add(phi); 952 } 953 } 954} 955 956 957void HGraph::InferTypes(ZoneList<HValue*>* worklist) { 958 BitVector in_worklist(GetMaximumValueID(), zone()); 959 for (int i = 0; i < worklist->length(); ++i) { 960 ASSERT(!in_worklist.Contains(worklist->at(i)->id())); 961 in_worklist.Add(worklist->at(i)->id()); 962 } 963 964 while (!worklist->is_empty()) { 965 HValue* current = worklist->RemoveLast(); 966 in_worklist.Remove(current->id()); 967 if (current->UpdateInferredType()) { 968 for (HUseIterator it(current->uses()); !it.Done(); it.Advance()) { 969 HValue* use = it.value(); 970 if (!in_worklist.Contains(use->id())) { 971 in_worklist.Add(use->id()); 972 worklist->Add(use); 973 } 974 } 975 } 976 } 977} 978 979 980class HRangeAnalysis BASE_EMBEDDED { 981 public: 982 explicit HRangeAnalysis(HGraph* graph) : 983 graph_(graph), zone_(graph->isolate()->zone()), changed_ranges_(16) { } 984 985 void Analyze(); 986 987 private: 988 void TraceRange(const char* msg, ...); 989 void Analyze(HBasicBlock* block); 990 void InferControlFlowRange(HCompareIDAndBranch* test, HBasicBlock* dest); 991 void UpdateControlFlowRange(Token::Value op, HValue* value, HValue* other); 992 void InferRange(HValue* value); 993 void RollBackTo(int index); 994 void AddRange(HValue* value, Range* range); 995 996 HGraph* graph_; 997 Zone* zone_; 998 ZoneList<HValue*> changed_ranges_; 999}; 1000 1001 1002void HRangeAnalysis::TraceRange(const char* msg, ...) { 1003 if (FLAG_trace_range) { 1004 va_list arguments; 1005 va_start(arguments, msg); 1006 OS::VPrint(msg, arguments); 1007 va_end(arguments); 1008 } 1009} 1010 1011 1012void HRangeAnalysis::Analyze() { 1013 HPhase phase("H_Range analysis", graph_); 1014 Analyze(graph_->entry_block()); 1015} 1016 1017 1018void HRangeAnalysis::Analyze(HBasicBlock* block) { 1019 TraceRange("Analyzing block B%d\n", block->block_id()); 1020 1021 int last_changed_range = changed_ranges_.length() - 1; 1022 1023 // Infer range based on control flow. 1024 if (block->predecessors()->length() == 1) { 1025 HBasicBlock* pred = block->predecessors()->first(); 1026 if (pred->end()->IsCompareIDAndBranch()) { 1027 InferControlFlowRange(HCompareIDAndBranch::cast(pred->end()), block); 1028 } 1029 } 1030 1031 // Process phi instructions. 1032 for (int i = 0; i < block->phis()->length(); ++i) { 1033 HPhi* phi = block->phis()->at(i); 1034 InferRange(phi); 1035 } 1036 1037 // Go through all instructions of the current block. 1038 HInstruction* instr = block->first(); 1039 while (instr != block->end()) { 1040 InferRange(instr); 1041 instr = instr->next(); 1042 } 1043 1044 // Continue analysis in all dominated blocks. 1045 for (int i = 0; i < block->dominated_blocks()->length(); ++i) { 1046 Analyze(block->dominated_blocks()->at(i)); 1047 } 1048 1049 RollBackTo(last_changed_range); 1050} 1051 1052 1053void HRangeAnalysis::InferControlFlowRange(HCompareIDAndBranch* test, 1054 HBasicBlock* dest) { 1055 ASSERT((test->FirstSuccessor() == dest) == (test->SecondSuccessor() != dest)); 1056 if (test->GetInputRepresentation().IsInteger32()) { 1057 Token::Value op = test->token(); 1058 if (test->SecondSuccessor() == dest) { 1059 op = Token::NegateCompareOp(op); 1060 } 1061 Token::Value inverted_op = Token::InvertCompareOp(op); 1062 UpdateControlFlowRange(op, test->left(), test->right()); 1063 UpdateControlFlowRange(inverted_op, test->right(), test->left()); 1064 } 1065} 1066 1067 1068// We know that value [op] other. Use this information to update the range on 1069// value. 1070void HRangeAnalysis::UpdateControlFlowRange(Token::Value op, 1071 HValue* value, 1072 HValue* other) { 1073 Range temp_range; 1074 Range* range = other->range() != NULL ? other->range() : &temp_range; 1075 Range* new_range = NULL; 1076 1077 TraceRange("Control flow range infer %d %s %d\n", 1078 value->id(), 1079 Token::Name(op), 1080 other->id()); 1081 1082 if (op == Token::EQ || op == Token::EQ_STRICT) { 1083 // The same range has to apply for value. 1084 new_range = range->Copy(zone_); 1085 } else if (op == Token::LT || op == Token::LTE) { 1086 new_range = range->CopyClearLower(zone_); 1087 if (op == Token::LT) { 1088 new_range->AddConstant(-1); 1089 } 1090 } else if (op == Token::GT || op == Token::GTE) { 1091 new_range = range->CopyClearUpper(zone_); 1092 if (op == Token::GT) { 1093 new_range->AddConstant(1); 1094 } 1095 } 1096 1097 if (new_range != NULL && !new_range->IsMostGeneric()) { 1098 AddRange(value, new_range); 1099 } 1100} 1101 1102 1103void HRangeAnalysis::InferRange(HValue* value) { 1104 ASSERT(!value->HasRange()); 1105 if (!value->representation().IsNone()) { 1106 value->ComputeInitialRange(zone_); 1107 Range* range = value->range(); 1108 TraceRange("Initial inferred range of %d (%s) set to [%d,%d]\n", 1109 value->id(), 1110 value->Mnemonic(), 1111 range->lower(), 1112 range->upper()); 1113 } 1114} 1115 1116 1117void HRangeAnalysis::RollBackTo(int index) { 1118 for (int i = index + 1; i < changed_ranges_.length(); ++i) { 1119 changed_ranges_[i]->RemoveLastAddedRange(); 1120 } 1121 changed_ranges_.Rewind(index + 1); 1122} 1123 1124 1125void HRangeAnalysis::AddRange(HValue* value, Range* range) { 1126 Range* original_range = value->range(); 1127 value->AddNewRange(range, zone_); 1128 changed_ranges_.Add(value); 1129 Range* new_range = value->range(); 1130 TraceRange("Updated range of %d set to [%d,%d]\n", 1131 value->id(), 1132 new_range->lower(), 1133 new_range->upper()); 1134 if (original_range != NULL) { 1135 TraceRange("Original range was [%d,%d]\n", 1136 original_range->lower(), 1137 original_range->upper()); 1138 } 1139 TraceRange("New information was [%d,%d]\n", 1140 range->lower(), 1141 range->upper()); 1142} 1143 1144 1145void TraceGVN(const char* msg, ...) { 1146 if (FLAG_trace_gvn) { 1147 va_list arguments; 1148 va_start(arguments, msg); 1149 OS::VPrint(msg, arguments); 1150 va_end(arguments); 1151 } 1152} 1153 1154 1155HValueMap::HValueMap(Zone* zone, const HValueMap* other) 1156 : array_size_(other->array_size_), 1157 lists_size_(other->lists_size_), 1158 count_(other->count_), 1159 present_flags_(other->present_flags_), 1160 array_(zone->NewArray<HValueMapListElement>(other->array_size_)), 1161 lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)), 1162 free_list_head_(other->free_list_head_) { 1163 memcpy(array_, other->array_, array_size_ * sizeof(HValueMapListElement)); 1164 memcpy(lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement)); 1165} 1166 1167 1168void HValueMap::Kill(GVNFlagSet flags) { 1169 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags); 1170 if (!present_flags_.ContainsAnyOf(depends_flags)) return; 1171 present_flags_.RemoveAll(); 1172 for (int i = 0; i < array_size_; ++i) { 1173 HValue* value = array_[i].value; 1174 if (value != NULL) { 1175 // Clear list of collisions first, so we know if it becomes empty. 1176 int kept = kNil; // List of kept elements. 1177 int next; 1178 for (int current = array_[i].next; current != kNil; current = next) { 1179 next = lists_[current].next; 1180 HValue* value = lists_[current].value; 1181 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { 1182 // Drop it. 1183 count_--; 1184 lists_[current].next = free_list_head_; 1185 free_list_head_ = current; 1186 } else { 1187 // Keep it. 1188 lists_[current].next = kept; 1189 kept = current; 1190 present_flags_.Add(value->gvn_flags()); 1191 } 1192 } 1193 array_[i].next = kept; 1194 1195 // Now possibly drop directly indexed element. 1196 value = array_[i].value; 1197 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it. 1198 count_--; 1199 int head = array_[i].next; 1200 if (head == kNil) { 1201 array_[i].value = NULL; 1202 } else { 1203 array_[i].value = lists_[head].value; 1204 array_[i].next = lists_[head].next; 1205 lists_[head].next = free_list_head_; 1206 free_list_head_ = head; 1207 } 1208 } else { 1209 present_flags_.Add(value->gvn_flags()); // Keep it. 1210 } 1211 } 1212 } 1213} 1214 1215 1216HValue* HValueMap::Lookup(HValue* value) const { 1217 uint32_t hash = static_cast<uint32_t>(value->Hashcode()); 1218 uint32_t pos = Bound(hash); 1219 if (array_[pos].value != NULL) { 1220 if (array_[pos].value->Equals(value)) return array_[pos].value; 1221 int next = array_[pos].next; 1222 while (next != kNil) { 1223 if (lists_[next].value->Equals(value)) return lists_[next].value; 1224 next = lists_[next].next; 1225 } 1226 } 1227 return NULL; 1228} 1229 1230 1231void HValueMap::Resize(int new_size) { 1232 ASSERT(new_size > count_); 1233 // Hashing the values into the new array has no more collisions than in the 1234 // old hash map, so we can use the existing lists_ array, if we are careful. 1235 1236 // Make sure we have at least one free element. 1237 if (free_list_head_ == kNil) { 1238 ResizeLists(lists_size_ << 1); 1239 } 1240 1241 HValueMapListElement* new_array = 1242 ZONE->NewArray<HValueMapListElement>(new_size); 1243 memset(new_array, 0, sizeof(HValueMapListElement) * new_size); 1244 1245 HValueMapListElement* old_array = array_; 1246 int old_size = array_size_; 1247 1248 int old_count = count_; 1249 count_ = 0; 1250 // Do not modify present_flags_. It is currently correct. 1251 array_size_ = new_size; 1252 array_ = new_array; 1253 1254 if (old_array != NULL) { 1255 // Iterate over all the elements in lists, rehashing them. 1256 for (int i = 0; i < old_size; ++i) { 1257 if (old_array[i].value != NULL) { 1258 int current = old_array[i].next; 1259 while (current != kNil) { 1260 Insert(lists_[current].value); 1261 int next = lists_[current].next; 1262 lists_[current].next = free_list_head_; 1263 free_list_head_ = current; 1264 current = next; 1265 } 1266 // Rehash the directly stored value. 1267 Insert(old_array[i].value); 1268 } 1269 } 1270 } 1271 USE(old_count); 1272 ASSERT(count_ == old_count); 1273} 1274 1275 1276void HValueMap::ResizeLists(int new_size) { 1277 ASSERT(new_size > lists_size_); 1278 1279 HValueMapListElement* new_lists = 1280 ZONE->NewArray<HValueMapListElement>(new_size); 1281 memset(new_lists, 0, sizeof(HValueMapListElement) * new_size); 1282 1283 HValueMapListElement* old_lists = lists_; 1284 int old_size = lists_size_; 1285 1286 lists_size_ = new_size; 1287 lists_ = new_lists; 1288 1289 if (old_lists != NULL) { 1290 memcpy(lists_, old_lists, old_size * sizeof(HValueMapListElement)); 1291 } 1292 for (int i = old_size; i < lists_size_; ++i) { 1293 lists_[i].next = free_list_head_; 1294 free_list_head_ = i; 1295 } 1296} 1297 1298 1299void HValueMap::Insert(HValue* value) { 1300 ASSERT(value != NULL); 1301 // Resizing when half of the hashtable is filled up. 1302 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1); 1303 ASSERT(count_ < array_size_); 1304 count_++; 1305 uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode())); 1306 if (array_[pos].value == NULL) { 1307 array_[pos].value = value; 1308 array_[pos].next = kNil; 1309 } else { 1310 if (free_list_head_ == kNil) { 1311 ResizeLists(lists_size_ << 1); 1312 } 1313 int new_element_pos = free_list_head_; 1314 ASSERT(new_element_pos != kNil); 1315 free_list_head_ = lists_[free_list_head_].next; 1316 lists_[new_element_pos].value = value; 1317 lists_[new_element_pos].next = array_[pos].next; 1318 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL); 1319 array_[pos].next = new_element_pos; 1320 } 1321} 1322 1323 1324class HStackCheckEliminator BASE_EMBEDDED { 1325 public: 1326 explicit HStackCheckEliminator(HGraph* graph) : graph_(graph) { } 1327 1328 void Process(); 1329 1330 private: 1331 HGraph* graph_; 1332}; 1333 1334 1335void HStackCheckEliminator::Process() { 1336 // For each loop block walk the dominator tree from the backwards branch to 1337 // the loop header. If a call instruction is encountered the backwards branch 1338 // is dominated by a call and the stack check in the backwards branch can be 1339 // removed. 1340 for (int i = 0; i < graph_->blocks()->length(); i++) { 1341 HBasicBlock* block = graph_->blocks()->at(i); 1342 if (block->IsLoopHeader()) { 1343 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge(); 1344 HBasicBlock* dominator = back_edge; 1345 while (true) { 1346 HInstruction* instr = dominator->first(); 1347 while (instr != NULL) { 1348 if (instr->IsCall()) { 1349 block->loop_information()->stack_check()->Eliminate(); 1350 break; 1351 } 1352 instr = instr->next(); 1353 } 1354 1355 // Done when the loop header is processed. 1356 if (dominator == block) break; 1357 1358 // Move up the dominator tree. 1359 dominator = dominator->dominator(); 1360 } 1361 } 1362 } 1363} 1364 1365 1366// Simple sparse set with O(1) add, contains, and clear. 1367class SparseSet { 1368 public: 1369 SparseSet(Zone* zone, int capacity) 1370 : capacity_(capacity), 1371 length_(0), 1372 dense_(zone->NewArray<int>(capacity)), 1373 sparse_(zone->NewArray<int>(capacity)) { 1374#ifndef NVALGRIND 1375 // Initialize the sparse array to make valgrind happy. 1376 memset(sparse_, 0, sizeof(sparse_[0]) * capacity); 1377#endif 1378 } 1379 1380 bool Contains(int n) const { 1381 ASSERT(0 <= n && n < capacity_); 1382 int d = sparse_[n]; 1383 return 0 <= d && d < length_ && dense_[d] == n; 1384 } 1385 1386 bool Add(int n) { 1387 if (Contains(n)) return false; 1388 dense_[length_] = n; 1389 sparse_[n] = length_; 1390 ++length_; 1391 return true; 1392 } 1393 1394 void Clear() { length_ = 0; } 1395 1396 private: 1397 int capacity_; 1398 int length_; 1399 int* dense_; 1400 int* sparse_; 1401 1402 DISALLOW_COPY_AND_ASSIGN(SparseSet); 1403}; 1404 1405 1406class HGlobalValueNumberer BASE_EMBEDDED { 1407 public: 1408 explicit HGlobalValueNumberer(HGraph* graph, CompilationInfo* info) 1409 : graph_(graph), 1410 info_(info), 1411 removed_side_effects_(false), 1412 block_side_effects_(graph->blocks()->length()), 1413 loop_side_effects_(graph->blocks()->length()), 1414 visited_on_paths_(graph->zone(), graph->blocks()->length()) { 1415 ASSERT(info->isolate()->heap()->allow_allocation(false)); 1416 block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length()); 1417 loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length()); 1418 } 1419 ~HGlobalValueNumberer() { 1420 ASSERT(!info_->isolate()->heap()->allow_allocation(true)); 1421 } 1422 1423 // Returns true if values with side effects are removed. 1424 bool Analyze(); 1425 1426 private: 1427 GVNFlagSet CollectSideEffectsOnPathsToDominatedBlock( 1428 HBasicBlock* dominator, 1429 HBasicBlock* dominated); 1430 void AnalyzeBlock(HBasicBlock* block, HValueMap* map); 1431 void ComputeBlockSideEffects(); 1432 void LoopInvariantCodeMotion(); 1433 void ProcessLoopBlock(HBasicBlock* block, 1434 HBasicBlock* before_loop, 1435 GVNFlagSet loop_kills, 1436 GVNFlagSet* accumulated_first_time_depends, 1437 GVNFlagSet* accumulated_first_time_changes); 1438 bool AllowCodeMotion(); 1439 bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header); 1440 1441 HGraph* graph() { return graph_; } 1442 CompilationInfo* info() { return info_; } 1443 Zone* zone() { return graph_->zone(); } 1444 1445 HGraph* graph_; 1446 CompilationInfo* info_; 1447 bool removed_side_effects_; 1448 1449 // A map of block IDs to their side effects. 1450 ZoneList<GVNFlagSet> block_side_effects_; 1451 1452 // A map of loop header block IDs to their loop's side effects. 1453 ZoneList<GVNFlagSet> loop_side_effects_; 1454 1455 // Used when collecting side effects on paths from dominator to 1456 // dominated. 1457 SparseSet visited_on_paths_; 1458}; 1459 1460 1461bool HGlobalValueNumberer::Analyze() { 1462 removed_side_effects_ = false; 1463 ComputeBlockSideEffects(); 1464 if (FLAG_loop_invariant_code_motion) { 1465 LoopInvariantCodeMotion(); 1466 } 1467 HValueMap* map = new(zone()) HValueMap(); 1468 AnalyzeBlock(graph_->entry_block(), map); 1469 return removed_side_effects_; 1470} 1471 1472 1473void HGlobalValueNumberer::ComputeBlockSideEffects() { 1474 // The Analyze phase of GVN can be called multiple times. Clear loop side 1475 // effects before computing them to erase the contents from previous Analyze 1476 // passes. 1477 for (int i = 0; i < loop_side_effects_.length(); ++i) { 1478 loop_side_effects_[i].RemoveAll(); 1479 } 1480 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) { 1481 // Compute side effects for the block. 1482 HBasicBlock* block = graph_->blocks()->at(i); 1483 HInstruction* instr = block->first(); 1484 int id = block->block_id(); 1485 GVNFlagSet side_effects; 1486 while (instr != NULL) { 1487 side_effects.Add(instr->ChangesFlags()); 1488 if (instr->IsSoftDeoptimize()) { 1489 block_side_effects_[id].RemoveAll(); 1490 side_effects.RemoveAll(); 1491 break; 1492 } 1493 instr = instr->next(); 1494 } 1495 block_side_effects_[id].Add(side_effects); 1496 1497 // Loop headers are part of their loop. 1498 if (block->IsLoopHeader()) { 1499 loop_side_effects_[id].Add(side_effects); 1500 } 1501 1502 // Propagate loop side effects upwards. 1503 if (block->HasParentLoopHeader()) { 1504 int header_id = block->parent_loop_header()->block_id(); 1505 loop_side_effects_[header_id].Add(block->IsLoopHeader() 1506 ? loop_side_effects_[id] 1507 : side_effects); 1508 } 1509 } 1510} 1511 1512 1513void HGlobalValueNumberer::LoopInvariantCodeMotion() { 1514 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) { 1515 HBasicBlock* block = graph_->blocks()->at(i); 1516 if (block->IsLoopHeader()) { 1517 GVNFlagSet side_effects = loop_side_effects_[block->block_id()]; 1518 TraceGVN("Try loop invariant motion for block B%d effects=0x%x\n", 1519 block->block_id(), 1520 side_effects.ToIntegral()); 1521 1522 GVNFlagSet accumulated_first_time_depends; 1523 GVNFlagSet accumulated_first_time_changes; 1524 HBasicBlock* last = block->loop_information()->GetLastBackEdge(); 1525 for (int j = block->block_id(); j <= last->block_id(); ++j) { 1526 ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects, 1527 &accumulated_first_time_depends, 1528 &accumulated_first_time_changes); 1529 } 1530 } 1531 } 1532} 1533 1534 1535void HGlobalValueNumberer::ProcessLoopBlock( 1536 HBasicBlock* block, 1537 HBasicBlock* loop_header, 1538 GVNFlagSet loop_kills, 1539 GVNFlagSet* first_time_depends, 1540 GVNFlagSet* first_time_changes) { 1541 HBasicBlock* pre_header = loop_header->predecessors()->at(0); 1542 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills); 1543 TraceGVN("Loop invariant motion for B%d depends_flags=0x%x\n", 1544 block->block_id(), 1545 depends_flags.ToIntegral()); 1546 HInstruction* instr = block->first(); 1547 while (instr != NULL) { 1548 HInstruction* next = instr->next(); 1549 bool hoisted = false; 1550 if (instr->CheckFlag(HValue::kUseGVN)) { 1551 TraceGVN("Checking instruction %d (%s) instruction GVN flags 0x%X, " 1552 "loop kills 0x%X\n", 1553 instr->id(), 1554 instr->Mnemonic(), 1555 instr->gvn_flags().ToIntegral(), 1556 depends_flags.ToIntegral()); 1557 bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags); 1558 if (instr->IsTransitionElementsKind()) { 1559 // It's possible to hoist transitions out of a loop as long as the 1560 // hoisting wouldn't move the transition past a DependsOn of one of it's 1561 // changes or any instructions that might change an objects map or 1562 // elements contents. 1563 GVNFlagSet changes = instr->ChangesFlags(); 1564 GVNFlagSet hoist_depends_blockers = 1565 HValue::ConvertChangesToDependsFlags(changes); 1566 // In addition to not hoisting transitions above other instructions that 1567 // change dependencies that the transition changes, it must not be 1568 // hoisted above map changes and stores to an elements backing store 1569 // that the transition might change. 1570 GVNFlagSet hoist_change_blockers = changes; 1571 hoist_change_blockers.Add(kChangesMaps); 1572 HTransitionElementsKind* trans = HTransitionElementsKind::cast(instr); 1573 if (trans->original_map()->has_fast_double_elements()) { 1574 hoist_change_blockers.Add(kChangesDoubleArrayElements); 1575 } 1576 if (trans->transitioned_map()->has_fast_double_elements()) { 1577 hoist_change_blockers.Add(kChangesArrayElements); 1578 } 1579 TraceGVN("Checking dependencies on HTransitionElementsKind %d (%s) " 1580 "hoist depends blockers 0x%X, hoist change blockers 0x%X, " 1581 "accumulated depends 0x%X, accumulated changes 0x%X\n", 1582 instr->id(), 1583 instr->Mnemonic(), 1584 hoist_depends_blockers.ToIntegral(), 1585 hoist_change_blockers.ToIntegral(), 1586 first_time_depends->ToIntegral(), 1587 first_time_changes->ToIntegral()); 1588 // It's possible to hoist transition from the current loop loop only if 1589 // they dominate all of the successor blocks in the same loop and there 1590 // are not any instructions that have Changes/DependsOn that intervene 1591 // between it and the beginning of the loop header. 1592 bool in_nested_loop = block != loop_header && 1593 ((block->parent_loop_header() != loop_header) || 1594 block->IsLoopHeader()); 1595 can_hoist = !in_nested_loop && 1596 block->IsLoopSuccessorDominator() && 1597 !first_time_depends->ContainsAnyOf(hoist_depends_blockers) && 1598 !first_time_changes->ContainsAnyOf(hoist_change_blockers); 1599 } 1600 1601 if (can_hoist) { 1602 bool inputs_loop_invariant = true; 1603 for (int i = 0; i < instr->OperandCount(); ++i) { 1604 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { 1605 inputs_loop_invariant = false; 1606 } 1607 } 1608 1609 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { 1610 TraceGVN("Hoisting loop invariant instruction %d\n", instr->id()); 1611 // Move the instruction out of the loop. 1612 instr->Unlink(); 1613 instr->InsertBefore(pre_header->end()); 1614 if (instr->HasSideEffects()) removed_side_effects_ = true; 1615 hoisted = true; 1616 } 1617 } 1618 } 1619 if (!hoisted) { 1620 // If an instruction is not hoisted, we have to account for its side 1621 // effects when hoisting later HTransitionElementsKind instructions. 1622 first_time_depends->Add(instr->DependsOnFlags()); 1623 first_time_changes->Add(instr->ChangesFlags()); 1624 } 1625 instr = next; 1626 } 1627} 1628 1629 1630bool HGlobalValueNumberer::AllowCodeMotion() { 1631 return info()->shared_info()->opt_count() + 1 < Compiler::kDefaultMaxOptCount; 1632} 1633 1634 1635bool HGlobalValueNumberer::ShouldMove(HInstruction* instr, 1636 HBasicBlock* loop_header) { 1637 // If we've disabled code motion or we're in a block that unconditionally 1638 // deoptimizes, don't move any instructions. 1639 return AllowCodeMotion() && !instr->block()->IsDeoptimizing(); 1640} 1641 1642 1643GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock( 1644 HBasicBlock* dominator, HBasicBlock* dominated) { 1645 GVNFlagSet side_effects; 1646 for (int i = 0; i < dominated->predecessors()->length(); ++i) { 1647 HBasicBlock* block = dominated->predecessors()->at(i); 1648 if (dominator->block_id() < block->block_id() && 1649 block->block_id() < dominated->block_id() && 1650 visited_on_paths_.Add(block->block_id())) { 1651 side_effects.Add(block_side_effects_[block->block_id()]); 1652 if (block->IsLoopHeader()) { 1653 side_effects.Add(loop_side_effects_[block->block_id()]); 1654 } 1655 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( 1656 dominator, block)); 1657 } 1658 } 1659 return side_effects; 1660} 1661 1662 1663void HGlobalValueNumberer::AnalyzeBlock(HBasicBlock* block, HValueMap* map) { 1664 TraceGVN("Analyzing block B%d%s\n", 1665 block->block_id(), 1666 block->IsLoopHeader() ? " (loop header)" : ""); 1667 1668 // If this is a loop header kill everything killed by the loop. 1669 if (block->IsLoopHeader()) { 1670 map->Kill(loop_side_effects_[block->block_id()]); 1671 } 1672 1673 // Go through all instructions of the current block. 1674 HInstruction* instr = block->first(); 1675 while (instr != NULL) { 1676 HInstruction* next = instr->next(); 1677 GVNFlagSet flags = instr->ChangesFlags(); 1678 if (!flags.IsEmpty()) { 1679 // Clear all instructions in the map that are affected by side effects. 1680 map->Kill(flags); 1681 TraceGVN("Instruction %d kills\n", instr->id()); 1682 } 1683 if (instr->CheckFlag(HValue::kUseGVN)) { 1684 ASSERT(!instr->HasObservableSideEffects()); 1685 HValue* other = map->Lookup(instr); 1686 if (other != NULL) { 1687 ASSERT(instr->Equals(other) && other->Equals(instr)); 1688 TraceGVN("Replacing value %d (%s) with value %d (%s)\n", 1689 instr->id(), 1690 instr->Mnemonic(), 1691 other->id(), 1692 other->Mnemonic()); 1693 if (instr->HasSideEffects()) removed_side_effects_ = true; 1694 instr->DeleteAndReplaceWith(other); 1695 } else { 1696 map->Add(instr); 1697 } 1698 } 1699 instr = next; 1700 } 1701 1702 // Recursively continue analysis for all immediately dominated blocks. 1703 int length = block->dominated_blocks()->length(); 1704 for (int i = 0; i < length; ++i) { 1705 HBasicBlock* dominated = block->dominated_blocks()->at(i); 1706 // No need to copy the map for the last child in the dominator tree. 1707 HValueMap* successor_map = (i == length - 1) ? map : map->Copy(zone()); 1708 1709 // Kill everything killed on any path between this block and the 1710 // dominated block. 1711 // We don't have to traverse these paths if the value map is 1712 // already empty. 1713 // If the range of block ids (block_id, dominated_id) is empty 1714 // there are no such paths. 1715 if (!successor_map->IsEmpty() && 1716 block->block_id() + 1 < dominated->block_id()) { 1717 visited_on_paths_.Clear(); 1718 successor_map->Kill(CollectSideEffectsOnPathsToDominatedBlock(block, 1719 dominated)); 1720 } 1721 AnalyzeBlock(dominated, successor_map); 1722 } 1723} 1724 1725 1726class HInferRepresentation BASE_EMBEDDED { 1727 public: 1728 explicit HInferRepresentation(HGraph* graph) 1729 : graph_(graph), 1730 worklist_(8), 1731 in_worklist_(graph->GetMaximumValueID(), graph->zone()) { } 1732 1733 void Analyze(); 1734 1735 private: 1736 Representation TryChange(HValue* current); 1737 void AddToWorklist(HValue* current); 1738 void InferBasedOnInputs(HValue* current); 1739 void AddDependantsToWorklist(HValue* current); 1740 void InferBasedOnUses(HValue* current); 1741 1742 Zone* zone() { return graph_->zone(); } 1743 1744 HGraph* graph_; 1745 ZoneList<HValue*> worklist_; 1746 BitVector in_worklist_; 1747}; 1748 1749 1750void HInferRepresentation::AddToWorklist(HValue* current) { 1751 if (current->representation().IsSpecialization()) return; 1752 if (!current->CheckFlag(HValue::kFlexibleRepresentation)) return; 1753 if (in_worklist_.Contains(current->id())) return; 1754 worklist_.Add(current); 1755 in_worklist_.Add(current->id()); 1756} 1757 1758 1759// This method tries to specialize the representation type of the value 1760// given as a parameter. The value is asked to infer its representation type 1761// based on its inputs. If the inferred type is more specialized, then this 1762// becomes the new representation type of the node. 1763void HInferRepresentation::InferBasedOnInputs(HValue* current) { 1764 Representation r = current->representation(); 1765 if (r.IsSpecialization()) return; 1766 ASSERT(current->CheckFlag(HValue::kFlexibleRepresentation)); 1767 Representation inferred = current->InferredRepresentation(); 1768 if (inferred.IsSpecialization()) { 1769 if (FLAG_trace_representation) { 1770 PrintF("Changing #%d representation %s -> %s based on inputs\n", 1771 current->id(), 1772 r.Mnemonic(), 1773 inferred.Mnemonic()); 1774 } 1775 current->ChangeRepresentation(inferred); 1776 AddDependantsToWorklist(current); 1777 } 1778} 1779 1780 1781void HInferRepresentation::AddDependantsToWorklist(HValue* value) { 1782 for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) { 1783 AddToWorklist(it.value()); 1784 } 1785 for (int i = 0; i < value->OperandCount(); ++i) { 1786 AddToWorklist(value->OperandAt(i)); 1787 } 1788} 1789 1790 1791// This method calculates whether specializing the representation of the value 1792// given as the parameter has a benefit in terms of less necessary type 1793// conversions. If there is a benefit, then the representation of the value is 1794// specialized. 1795void HInferRepresentation::InferBasedOnUses(HValue* value) { 1796 Representation r = value->representation(); 1797 if (r.IsSpecialization() || value->HasNoUses()) return; 1798 ASSERT(value->CheckFlag(HValue::kFlexibleRepresentation)); 1799 Representation new_rep = TryChange(value); 1800 if (!new_rep.IsNone()) { 1801 if (!value->representation().Equals(new_rep)) { 1802 if (FLAG_trace_representation) { 1803 PrintF("Changing #%d representation %s -> %s based on uses\n", 1804 value->id(), 1805 r.Mnemonic(), 1806 new_rep.Mnemonic()); 1807 } 1808 value->ChangeRepresentation(new_rep); 1809 AddDependantsToWorklist(value); 1810 } 1811 } 1812} 1813 1814 1815Representation HInferRepresentation::TryChange(HValue* value) { 1816 // Array of use counts for each representation. 1817 int use_count[Representation::kNumRepresentations] = { 0 }; 1818 1819 for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) { 1820 HValue* use = it.value(); 1821 Representation rep = use->RequiredInputRepresentation(it.index()); 1822 if (rep.IsNone()) continue; 1823 if (use->IsPhi()) HPhi::cast(use)->AddIndirectUsesTo(&use_count[0]); 1824 use_count[rep.kind()] += use->LoopWeight(); 1825 } 1826 int tagged_count = use_count[Representation::kTagged]; 1827 int double_count = use_count[Representation::kDouble]; 1828 int int32_count = use_count[Representation::kInteger32]; 1829 int non_tagged_count = double_count + int32_count; 1830 1831 // If a non-loop phi has tagged uses, don't convert it to untagged. 1832 if (value->IsPhi() && !value->block()->IsLoopHeader() && tagged_count > 0) { 1833 return Representation::None(); 1834 } 1835 1836 // Prefer unboxing over boxing, the latter is more expensive. 1837 if (tagged_count > non_tagged_count) return Representation::None(); 1838 1839 // Prefer Integer32 over Double, if possible. 1840 if (int32_count > 0 && value->IsConvertibleToInteger()) { 1841 return Representation::Integer32(); 1842 } 1843 1844 if (double_count > 0) return Representation::Double(); 1845 1846 return Representation::None(); 1847} 1848 1849 1850void HInferRepresentation::Analyze() { 1851 HPhase phase("H_Infer representations", graph_); 1852 1853 // (1) Initialize bit vectors and count real uses. Each phi gets a 1854 // bit-vector of length <number of phis>. 1855 const ZoneList<HPhi*>* phi_list = graph_->phi_list(); 1856 int phi_count = phi_list->length(); 1857 ZoneList<BitVector*> connected_phis(phi_count); 1858 for (int i = 0; i < phi_count; ++i) { 1859 phi_list->at(i)->InitRealUses(i); 1860 BitVector* connected_set = new(zone()) BitVector(phi_count, graph_->zone()); 1861 connected_set->Add(i); 1862 connected_phis.Add(connected_set); 1863 } 1864 1865 // (2) Do a fixed point iteration to find the set of connected phis. A 1866 // phi is connected to another phi if its value is used either directly or 1867 // indirectly through a transitive closure of the def-use relation. 1868 bool change = true; 1869 while (change) { 1870 change = false; 1871 // We normally have far more "forward edges" than "backward edges", 1872 // so we terminate faster when we walk backwards. 1873 for (int i = phi_count - 1; i >= 0; --i) { 1874 HPhi* phi = phi_list->at(i); 1875 for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) { 1876 HValue* use = it.value(); 1877 if (use->IsPhi()) { 1878 int id = HPhi::cast(use)->phi_id(); 1879 if (connected_phis[i]->UnionIsChanged(*connected_phis[id])) 1880 change = true; 1881 } 1882 } 1883 } 1884 } 1885 1886 // (3) Use the phi reachability information from step 2 to 1887 // (a) sum up the non-phi use counts of all connected phis. 1888 // (b) push information about values which can't be converted to integer 1889 // without deoptimization through the phi use-def chains, avoiding 1890 // unnecessary deoptimizations later. 1891 for (int i = 0; i < phi_count; ++i) { 1892 HPhi* phi = phi_list->at(i); 1893 bool cti = phi->AllOperandsConvertibleToInteger(); 1894 for (BitVector::Iterator it(connected_phis.at(i)); 1895 !it.Done(); 1896 it.Advance()) { 1897 int index = it.Current(); 1898 HPhi* it_use = phi_list->at(it.Current()); 1899 if (index != i) phi->AddNonPhiUsesFrom(it_use); // Don't count twice! 1900 if (!cti) it_use->set_is_convertible_to_integer(false); 1901 } 1902 } 1903 1904 // Initialize work list 1905 for (int i = 0; i < graph_->blocks()->length(); ++i) { 1906 HBasicBlock* block = graph_->blocks()->at(i); 1907 const ZoneList<HPhi*>* phis = block->phis(); 1908 for (int j = 0; j < phis->length(); ++j) { 1909 AddToWorklist(phis->at(j)); 1910 } 1911 1912 HInstruction* current = block->first(); 1913 while (current != NULL) { 1914 AddToWorklist(current); 1915 current = current->next(); 1916 } 1917 } 1918 1919 // Do a fixed point iteration, trying to improve representations 1920 while (!worklist_.is_empty()) { 1921 HValue* current = worklist_.RemoveLast(); 1922 in_worklist_.Remove(current->id()); 1923 InferBasedOnInputs(current); 1924 InferBasedOnUses(current); 1925 } 1926} 1927 1928 1929void HGraph::InitializeInferredTypes() { 1930 HPhase phase("H_Inferring types", this); 1931 InitializeInferredTypes(0, this->blocks_.length() - 1); 1932} 1933 1934 1935void HGraph::InitializeInferredTypes(int from_inclusive, int to_inclusive) { 1936 for (int i = from_inclusive; i <= to_inclusive; ++i) { 1937 HBasicBlock* block = blocks_[i]; 1938 1939 const ZoneList<HPhi*>* phis = block->phis(); 1940 for (int j = 0; j < phis->length(); j++) { 1941 phis->at(j)->UpdateInferredType(); 1942 } 1943 1944 HInstruction* current = block->first(); 1945 while (current != NULL) { 1946 current->UpdateInferredType(); 1947 current = current->next(); 1948 } 1949 1950 if (block->IsLoopHeader()) { 1951 HBasicBlock* last_back_edge = 1952 block->loop_information()->GetLastBackEdge(); 1953 InitializeInferredTypes(i + 1, last_back_edge->block_id()); 1954 // Skip all blocks already processed by the recursive call. 1955 i = last_back_edge->block_id(); 1956 // Update phis of the loop header now after the whole loop body is 1957 // guaranteed to be processed. 1958 ZoneList<HValue*> worklist(block->phis()->length()); 1959 for (int j = 0; j < block->phis()->length(); ++j) { 1960 worklist.Add(block->phis()->at(j)); 1961 } 1962 InferTypes(&worklist); 1963 } 1964 } 1965} 1966 1967 1968void HGraph::PropagateMinusZeroChecks(HValue* value, BitVector* visited) { 1969 HValue* current = value; 1970 while (current != NULL) { 1971 if (visited->Contains(current->id())) return; 1972 1973 // For phis, we must propagate the check to all of its inputs. 1974 if (current->IsPhi()) { 1975 visited->Add(current->id()); 1976 HPhi* phi = HPhi::cast(current); 1977 for (int i = 0; i < phi->OperandCount(); ++i) { 1978 PropagateMinusZeroChecks(phi->OperandAt(i), visited); 1979 } 1980 break; 1981 } 1982 1983 // For multiplication and division, we must propagate to the left and 1984 // the right side. 1985 if (current->IsMul()) { 1986 HMul* mul = HMul::cast(current); 1987 mul->EnsureAndPropagateNotMinusZero(visited); 1988 PropagateMinusZeroChecks(mul->left(), visited); 1989 PropagateMinusZeroChecks(mul->right(), visited); 1990 } else if (current->IsDiv()) { 1991 HDiv* div = HDiv::cast(current); 1992 div->EnsureAndPropagateNotMinusZero(visited); 1993 PropagateMinusZeroChecks(div->left(), visited); 1994 PropagateMinusZeroChecks(div->right(), visited); 1995 } 1996 1997 current = current->EnsureAndPropagateNotMinusZero(visited); 1998 } 1999} 2000 2001 2002void HGraph::InsertRepresentationChangeForUse(HValue* value, 2003 HValue* use_value, 2004 int use_index, 2005 Representation to) { 2006 // Insert the representation change right before its use. For phi-uses we 2007 // insert at the end of the corresponding predecessor. 2008 HInstruction* next = NULL; 2009 if (use_value->IsPhi()) { 2010 next = use_value->block()->predecessors()->at(use_index)->end(); 2011 } else { 2012 next = HInstruction::cast(use_value); 2013 } 2014 2015 // For constants we try to make the representation change at compile 2016 // time. When a representation change is not possible without loss of 2017 // information we treat constants like normal instructions and insert the 2018 // change instructions for them. 2019 HInstruction* new_value = NULL; 2020 bool is_truncating = use_value->CheckFlag(HValue::kTruncatingToInt32); 2021 bool deoptimize_on_undefined = 2022 use_value->CheckFlag(HValue::kDeoptimizeOnUndefined); 2023 if (value->IsConstant()) { 2024 HConstant* constant = HConstant::cast(value); 2025 // Try to create a new copy of the constant with the new representation. 2026 new_value = is_truncating 2027 ? constant->CopyToTruncatedInt32() 2028 : constant->CopyToRepresentation(to); 2029 } 2030 2031 if (new_value == NULL) { 2032 new_value = new(zone()) HChange(value, to, 2033 is_truncating, deoptimize_on_undefined); 2034 } 2035 2036 new_value->InsertBefore(next); 2037 use_value->SetOperandAt(use_index, new_value); 2038} 2039 2040 2041void HGraph::InsertRepresentationChangesForValue(HValue* value) { 2042 Representation r = value->representation(); 2043 if (r.IsNone()) return; 2044 if (value->HasNoUses()) return; 2045 2046 for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) { 2047 HValue* use_value = it.value(); 2048 int use_index = it.index(); 2049 Representation req = use_value->RequiredInputRepresentation(use_index); 2050 if (req.IsNone() || req.Equals(r)) continue; 2051 InsertRepresentationChangeForUse(value, use_value, use_index, req); 2052 } 2053 if (value->HasNoUses()) { 2054 ASSERT(value->IsConstant()); 2055 value->DeleteAndReplaceWith(NULL); 2056 } 2057 2058 // The only purpose of a HForceRepresentation is to represent the value 2059 // after the (possible) HChange instruction. We make it disappear. 2060 if (value->IsForceRepresentation()) { 2061 value->DeleteAndReplaceWith(HForceRepresentation::cast(value)->value()); 2062 } 2063} 2064 2065 2066void HGraph::InsertRepresentationChanges() { 2067 HPhase phase("H_Representation changes", this); 2068 2069 // Compute truncation flag for phis: Initially assume that all 2070 // int32-phis allow truncation and iteratively remove the ones that 2071 // are used in an operation that does not allow a truncating 2072 // conversion. 2073 // TODO(fschneider): Replace this with a worklist-based iteration. 2074 for (int i = 0; i < phi_list()->length(); i++) { 2075 HPhi* phi = phi_list()->at(i); 2076 if (phi->representation().IsInteger32()) { 2077 phi->SetFlag(HValue::kTruncatingToInt32); 2078 } 2079 } 2080 bool change = true; 2081 while (change) { 2082 change = false; 2083 for (int i = 0; i < phi_list()->length(); i++) { 2084 HPhi* phi = phi_list()->at(i); 2085 if (!phi->CheckFlag(HValue::kTruncatingToInt32)) continue; 2086 if (!phi->CheckUsesForFlag(HValue::kTruncatingToInt32)) { 2087 phi->ClearFlag(HValue::kTruncatingToInt32); 2088 change = true; 2089 } 2090 } 2091 } 2092 2093 for (int i = 0; i < blocks_.length(); ++i) { 2094 // Process phi instructions first. 2095 const ZoneList<HPhi*>* phis = blocks_[i]->phis(); 2096 for (int j = 0; j < phis->length(); j++) { 2097 InsertRepresentationChangesForValue(phis->at(j)); 2098 } 2099 2100 // Process normal instructions. 2101 HInstruction* current = blocks_[i]->first(); 2102 while (current != NULL) { 2103 InsertRepresentationChangesForValue(current); 2104 current = current->next(); 2105 } 2106 } 2107} 2108 2109 2110void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) { 2111 if (phi->CheckFlag(HValue::kDeoptimizeOnUndefined)) return; 2112 phi->SetFlag(HValue::kDeoptimizeOnUndefined); 2113 for (int i = 0; i < phi->OperandCount(); ++i) { 2114 HValue* input = phi->OperandAt(i); 2115 if (input->IsPhi()) { 2116 RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi::cast(input)); 2117 } 2118 } 2119} 2120 2121 2122void HGraph::MarkDeoptimizeOnUndefined() { 2123 HPhase phase("H_MarkDeoptimizeOnUndefined", this); 2124 // Compute DeoptimizeOnUndefined flag for phis. 2125 // Any phi that can reach a use with DeoptimizeOnUndefined set must 2126 // have DeoptimizeOnUndefined set. Currently only HCompareIDAndBranch, with 2127 // double input representation, has this flag set. 2128 // The flag is used by HChange tagged->double, which must deoptimize 2129 // if one of its uses has this flag set. 2130 for (int i = 0; i < phi_list()->length(); i++) { 2131 HPhi* phi = phi_list()->at(i); 2132 if (phi->representation().IsDouble()) { 2133 for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) { 2134 if (it.value()->CheckFlag(HValue::kDeoptimizeOnUndefined)) { 2135 RecursivelyMarkPhiDeoptimizeOnUndefined(phi); 2136 break; 2137 } 2138 } 2139 } 2140 } 2141} 2142 2143 2144void HGraph::ComputeMinusZeroChecks() { 2145 BitVector visited(GetMaximumValueID(), zone()); 2146 for (int i = 0; i < blocks_.length(); ++i) { 2147 for (HInstruction* current = blocks_[i]->first(); 2148 current != NULL; 2149 current = current->next()) { 2150 if (current->IsChange()) { 2151 HChange* change = HChange::cast(current); 2152 // Propagate flags for negative zero checks upwards from conversions 2153 // int32-to-tagged and int32-to-double. 2154 Representation from = change->value()->representation(); 2155 ASSERT(from.Equals(change->from())); 2156 if (from.IsInteger32()) { 2157 ASSERT(change->to().IsTagged() || change->to().IsDouble()); 2158 ASSERT(visited.IsEmpty()); 2159 PropagateMinusZeroChecks(change->value(), &visited); 2160 visited.Clear(); 2161 } 2162 } 2163 } 2164 } 2165} 2166 2167 2168// Implementation of utility class to encapsulate the translation state for 2169// a (possibly inlined) function. 2170FunctionState::FunctionState(HGraphBuilder* owner, 2171 CompilationInfo* info, 2172 TypeFeedbackOracle* oracle, 2173 ReturnHandlingFlag return_handling) 2174 : owner_(owner), 2175 compilation_info_(info), 2176 oracle_(oracle), 2177 call_context_(NULL), 2178 return_handling_(return_handling), 2179 function_return_(NULL), 2180 test_context_(NULL), 2181 outer_(owner->function_state()) { 2182 if (outer_ != NULL) { 2183 // State for an inline function. 2184 if (owner->ast_context()->IsTest()) { 2185 HBasicBlock* if_true = owner->graph()->CreateBasicBlock(); 2186 HBasicBlock* if_false = owner->graph()->CreateBasicBlock(); 2187 if_true->MarkAsInlineReturnTarget(); 2188 if_false->MarkAsInlineReturnTarget(); 2189 Expression* cond = TestContext::cast(owner->ast_context())->condition(); 2190 // The AstContext constructor pushed on the context stack. This newed 2191 // instance is the reason that AstContext can't be BASE_EMBEDDED. 2192 test_context_ = new TestContext(owner, cond, if_true, if_false); 2193 } else { 2194 function_return_ = owner->graph()->CreateBasicBlock(); 2195 function_return()->MarkAsInlineReturnTarget(); 2196 } 2197 // Set this after possibly allocating a new TestContext above. 2198 call_context_ = owner->ast_context(); 2199 } 2200 2201 // Push on the state stack. 2202 owner->set_function_state(this); 2203} 2204 2205 2206FunctionState::~FunctionState() { 2207 delete test_context_; 2208 owner_->set_function_state(outer_); 2209} 2210 2211 2212// Implementation of utility classes to represent an expression's context in 2213// the AST. 2214AstContext::AstContext(HGraphBuilder* owner, Expression::Context kind) 2215 : owner_(owner), 2216 kind_(kind), 2217 outer_(owner->ast_context()), 2218 for_typeof_(false) { 2219 owner->set_ast_context(this); // Push. 2220#ifdef DEBUG 2221 ASSERT(owner->environment()->frame_type() == JS_FUNCTION); 2222 original_length_ = owner->environment()->length(); 2223#endif 2224} 2225 2226 2227AstContext::~AstContext() { 2228 owner_->set_ast_context(outer_); // Pop. 2229} 2230 2231 2232EffectContext::~EffectContext() { 2233 ASSERT(owner()->HasStackOverflow() || 2234 owner()->current_block() == NULL || 2235 (owner()->environment()->length() == original_length_ && 2236 owner()->environment()->frame_type() == JS_FUNCTION)); 2237} 2238 2239 2240ValueContext::~ValueContext() { 2241 ASSERT(owner()->HasStackOverflow() || 2242 owner()->current_block() == NULL || 2243 (owner()->environment()->length() == original_length_ + 1 && 2244 owner()->environment()->frame_type() == JS_FUNCTION)); 2245} 2246 2247 2248void EffectContext::ReturnValue(HValue* value) { 2249 // The value is simply ignored. 2250} 2251 2252 2253void ValueContext::ReturnValue(HValue* value) { 2254 // The value is tracked in the bailout environment, and communicated 2255 // through the environment as the result of the expression. 2256 if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) { 2257 owner()->Bailout("bad value context for arguments value"); 2258 } 2259 owner()->Push(value); 2260} 2261 2262 2263void TestContext::ReturnValue(HValue* value) { 2264 BuildBranch(value); 2265} 2266 2267 2268void EffectContext::ReturnInstruction(HInstruction* instr, int ast_id) { 2269 ASSERT(!instr->IsControlInstruction()); 2270 owner()->AddInstruction(instr); 2271 if (instr->HasObservableSideEffects()) owner()->AddSimulate(ast_id); 2272} 2273 2274 2275void EffectContext::ReturnControl(HControlInstruction* instr, int ast_id) { 2276 ASSERT(!instr->HasObservableSideEffects()); 2277 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 2278 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 2279 instr->SetSuccessorAt(0, empty_true); 2280 instr->SetSuccessorAt(1, empty_false); 2281 owner()->current_block()->Finish(instr); 2282 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id); 2283 owner()->set_current_block(join); 2284} 2285 2286 2287void ValueContext::ReturnInstruction(HInstruction* instr, int ast_id) { 2288 ASSERT(!instr->IsControlInstruction()); 2289 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 2290 return owner()->Bailout("bad value context for arguments object value"); 2291 } 2292 owner()->AddInstruction(instr); 2293 owner()->Push(instr); 2294 if (instr->HasObservableSideEffects()) owner()->AddSimulate(ast_id); 2295} 2296 2297 2298void ValueContext::ReturnControl(HControlInstruction* instr, int ast_id) { 2299 ASSERT(!instr->HasObservableSideEffects()); 2300 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 2301 return owner()->Bailout("bad value context for arguments object value"); 2302 } 2303 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock(); 2304 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock(); 2305 instr->SetSuccessorAt(0, materialize_true); 2306 instr->SetSuccessorAt(1, materialize_false); 2307 owner()->current_block()->Finish(instr); 2308 owner()->set_current_block(materialize_true); 2309 owner()->Push(owner()->graph()->GetConstantTrue()); 2310 owner()->set_current_block(materialize_false); 2311 owner()->Push(owner()->graph()->GetConstantFalse()); 2312 HBasicBlock* join = 2313 owner()->CreateJoin(materialize_true, materialize_false, ast_id); 2314 owner()->set_current_block(join); 2315} 2316 2317 2318void TestContext::ReturnInstruction(HInstruction* instr, int ast_id) { 2319 ASSERT(!instr->IsControlInstruction()); 2320 HGraphBuilder* builder = owner(); 2321 builder->AddInstruction(instr); 2322 // We expect a simulate after every expression with side effects, though 2323 // this one isn't actually needed (and wouldn't work if it were targeted). 2324 if (instr->HasObservableSideEffects()) { 2325 builder->Push(instr); 2326 builder->AddSimulate(ast_id); 2327 builder->Pop(); 2328 } 2329 BuildBranch(instr); 2330} 2331 2332 2333void TestContext::ReturnControl(HControlInstruction* instr, int ast_id) { 2334 ASSERT(!instr->HasObservableSideEffects()); 2335 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 2336 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 2337 instr->SetSuccessorAt(0, empty_true); 2338 instr->SetSuccessorAt(1, empty_false); 2339 owner()->current_block()->Finish(instr); 2340 empty_true->Goto(if_true(), owner()->function_state()->drop_extra()); 2341 empty_false->Goto(if_false(), owner()->function_state()->drop_extra()); 2342 owner()->set_current_block(NULL); 2343} 2344 2345 2346void TestContext::BuildBranch(HValue* value) { 2347 // We expect the graph to be in edge-split form: there is no edge that 2348 // connects a branch node to a join node. We conservatively ensure that 2349 // property by always adding an empty block on the outgoing edges of this 2350 // branch. 2351 HGraphBuilder* builder = owner(); 2352 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) { 2353 builder->Bailout("arguments object value in a test context"); 2354 } 2355 HBasicBlock* empty_true = builder->graph()->CreateBasicBlock(); 2356 HBasicBlock* empty_false = builder->graph()->CreateBasicBlock(); 2357 unsigned test_id = condition()->test_id(); 2358 ToBooleanStub::Types expected(builder->oracle()->ToBooleanTypes(test_id)); 2359 HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected); 2360 builder->current_block()->Finish(test); 2361 2362 empty_true->Goto(if_true(), owner()->function_state()->drop_extra()); 2363 empty_false->Goto(if_false(), owner()->function_state()->drop_extra()); 2364 builder->set_current_block(NULL); 2365} 2366 2367 2368// HGraphBuilder infrastructure for bailing out and checking bailouts. 2369#define CHECK_BAILOUT(call) \ 2370 do { \ 2371 call; \ 2372 if (HasStackOverflow()) return; \ 2373 } while (false) 2374 2375 2376#define CHECK_ALIVE(call) \ 2377 do { \ 2378 call; \ 2379 if (HasStackOverflow() || current_block() == NULL) return; \ 2380 } while (false) 2381 2382 2383void HGraphBuilder::Bailout(const char* reason) { 2384 if (FLAG_trace_bailout) { 2385 SmartArrayPointer<char> name( 2386 info()->shared_info()->DebugName()->ToCString()); 2387 PrintF("Bailout in HGraphBuilder: @\"%s\": %s\n", *name, reason); 2388 } 2389 SetStackOverflow(); 2390} 2391 2392 2393void HGraphBuilder::VisitForEffect(Expression* expr) { 2394 EffectContext for_effect(this); 2395 Visit(expr); 2396} 2397 2398 2399void HGraphBuilder::VisitForValue(Expression* expr, ArgumentsAllowedFlag flag) { 2400 ValueContext for_value(this, flag); 2401 Visit(expr); 2402} 2403 2404 2405void HGraphBuilder::VisitForTypeOf(Expression* expr) { 2406 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED); 2407 for_value.set_for_typeof(true); 2408 Visit(expr); 2409} 2410 2411 2412 2413void HGraphBuilder::VisitForControl(Expression* expr, 2414 HBasicBlock* true_block, 2415 HBasicBlock* false_block) { 2416 TestContext for_test(this, expr, true_block, false_block); 2417 Visit(expr); 2418} 2419 2420 2421HValue* HGraphBuilder::VisitArgument(Expression* expr) { 2422 VisitForValue(expr); 2423 if (HasStackOverflow() || current_block() == NULL) return NULL; 2424 HValue* value = Pop(); 2425 Push(AddInstruction(new(zone()) HPushArgument(value))); 2426 return value; 2427} 2428 2429 2430void HGraphBuilder::VisitArgumentList(ZoneList<Expression*>* arguments) { 2431 for (int i = 0; i < arguments->length(); i++) { 2432 CHECK_ALIVE(VisitArgument(arguments->at(i))); 2433 } 2434} 2435 2436 2437void HGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs) { 2438 for (int i = 0; i < exprs->length(); ++i) { 2439 CHECK_ALIVE(VisitForValue(exprs->at(i))); 2440 } 2441} 2442 2443 2444HGraph* HGraphBuilder::CreateGraph() { 2445 graph_ = new(zone()) HGraph(info()); 2446 if (FLAG_hydrogen_stats) HStatistics::Instance()->Initialize(info()); 2447 2448 { 2449 HPhase phase("H_Block building"); 2450 current_block_ = graph()->entry_block(); 2451 2452 Scope* scope = info()->scope(); 2453 if (scope->HasIllegalRedeclaration()) { 2454 Bailout("function with illegal redeclaration"); 2455 return NULL; 2456 } 2457 if (scope->calls_eval()) { 2458 Bailout("function calls eval"); 2459 return NULL; 2460 } 2461 SetUpScope(scope); 2462 2463 // Add an edge to the body entry. This is warty: the graph's start 2464 // environment will be used by the Lithium translation as the initial 2465 // environment on graph entry, but it has now been mutated by the 2466 // Hydrogen translation of the instructions in the start block. This 2467 // environment uses values which have not been defined yet. These 2468 // Hydrogen instructions will then be replayed by the Lithium 2469 // translation, so they cannot have an environment effect. The edge to 2470 // the body's entry block (along with some special logic for the start 2471 // block in HInstruction::InsertAfter) seals the start block from 2472 // getting unwanted instructions inserted. 2473 // 2474 // TODO(kmillikin): Fix this. Stop mutating the initial environment. 2475 // Make the Hydrogen instructions in the initial block into Hydrogen 2476 // values (but not instructions), present in the initial environment and 2477 // not replayed by the Lithium translation. 2478 HEnvironment* initial_env = environment()->CopyWithoutHistory(); 2479 HBasicBlock* body_entry = CreateBasicBlock(initial_env); 2480 current_block()->Goto(body_entry); 2481 body_entry->SetJoinId(AstNode::kFunctionEntryId); 2482 set_current_block(body_entry); 2483 2484 // Handle implicit declaration of the function name in named function 2485 // expressions before other declarations. 2486 if (scope->is_function_scope() && scope->function() != NULL) { 2487 HandleDeclaration(scope->function(), CONST, NULL, NULL); 2488 } 2489 VisitDeclarations(scope->declarations()); 2490 AddSimulate(AstNode::kDeclarationsId); 2491 2492 HValue* context = environment()->LookupContext(); 2493 AddInstruction( 2494 new(zone()) HStackCheck(context, HStackCheck::kFunctionEntry)); 2495 2496 VisitStatements(info()->function()->body()); 2497 if (HasStackOverflow()) return NULL; 2498 2499 if (current_block() != NULL) { 2500 HReturn* instr = new(zone()) HReturn(graph()->GetConstantUndefined()); 2501 current_block()->FinishExit(instr); 2502 set_current_block(NULL); 2503 } 2504 } 2505 2506 graph()->OrderBlocks(); 2507 graph()->AssignDominators(); 2508 2509#ifdef DEBUG 2510 // Do a full verify after building the graph and computing dominators. 2511 graph()->Verify(true); 2512#endif 2513 2514 graph()->PropagateDeoptimizingMark(); 2515 if (!graph()->CheckConstPhiUses()) { 2516 Bailout("Unsupported phi use of const variable"); 2517 return NULL; 2518 } 2519 graph()->EliminateRedundantPhis(); 2520 if (!graph()->CheckArgumentsPhiUses()) { 2521 Bailout("Unsupported phi use of arguments"); 2522 return NULL; 2523 } 2524 if (FLAG_eliminate_dead_phis) graph()->EliminateUnreachablePhis(); 2525 graph()->CollectPhis(); 2526 2527 if (graph()->has_osr_loop_entry()) { 2528 const ZoneList<HPhi*>* phis = graph()->osr_loop_entry()->phis(); 2529 for (int j = 0; j < phis->length(); j++) { 2530 HPhi* phi = phis->at(j); 2531 graph()->osr_values()->at(phi->merged_index())->set_incoming_value(phi); 2532 } 2533 } 2534 2535 HInferRepresentation rep(graph()); 2536 rep.Analyze(); 2537 2538 graph()->MarkDeoptimizeOnUndefined(); 2539 graph()->InsertRepresentationChanges(); 2540 2541 graph()->InitializeInferredTypes(); 2542 graph()->Canonicalize(); 2543 2544 // Perform common subexpression elimination and loop-invariant code motion. 2545 if (FLAG_use_gvn) { 2546 HPhase phase("H_Global value numbering", graph()); 2547 HGlobalValueNumberer gvn(graph(), info()); 2548 bool removed_side_effects = gvn.Analyze(); 2549 // Trigger a second analysis pass to further eliminate duplicate values that 2550 // could only be discovered by removing side-effect-generating instructions 2551 // during the first pass. 2552 if (FLAG_smi_only_arrays && removed_side_effects) { 2553 removed_side_effects = gvn.Analyze(); 2554 ASSERT(!removed_side_effects); 2555 } 2556 } 2557 2558 if (FLAG_use_range) { 2559 HRangeAnalysis rangeAnalysis(graph()); 2560 rangeAnalysis.Analyze(); 2561 } 2562 graph()->ComputeMinusZeroChecks(); 2563 2564 // Eliminate redundant stack checks on backwards branches. 2565 HStackCheckEliminator sce(graph()); 2566 sce.Process(); 2567 2568 // Replace the results of check instructions with the original value, if the 2569 // result is used. This is safe now, since we don't do code motion after this 2570 // point. It enables better register allocation since the value produced by 2571 // check instructions is really a copy of the original value. 2572 graph()->ReplaceCheckedValues(); 2573 2574 return graph(); 2575} 2576 2577 2578void HGraph::ReplaceCheckedValues() { 2579 HPhase phase("H_Replace checked values", this); 2580 for (int i = 0; i < blocks()->length(); ++i) { 2581 HInstruction* instr = blocks()->at(i)->first(); 2582 while (instr != NULL) { 2583 if (instr->IsBoundsCheck()) { 2584 // Replace all uses of the checked value with the original input. 2585 ASSERT(instr->UseCount() > 0); 2586 instr->ReplaceAllUsesWith(HBoundsCheck::cast(instr)->index()); 2587 } 2588 instr = instr->next(); 2589 } 2590 } 2591} 2592 2593 2594HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) { 2595 ASSERT(current_block() != NULL); 2596 current_block()->AddInstruction(instr); 2597 return instr; 2598} 2599 2600 2601void HGraphBuilder::AddSimulate(int ast_id) { 2602 ASSERT(current_block() != NULL); 2603 current_block()->AddSimulate(ast_id); 2604} 2605 2606 2607void HGraphBuilder::AddPhi(HPhi* instr) { 2608 ASSERT(current_block() != NULL); 2609 current_block()->AddPhi(instr); 2610} 2611 2612 2613void HGraphBuilder::PushAndAdd(HInstruction* instr) { 2614 Push(instr); 2615 AddInstruction(instr); 2616} 2617 2618 2619template <class Instruction> 2620HInstruction* HGraphBuilder::PreProcessCall(Instruction* call) { 2621 int count = call->argument_count(); 2622 ZoneList<HValue*> arguments(count); 2623 for (int i = 0; i < count; ++i) { 2624 arguments.Add(Pop()); 2625 } 2626 2627 while (!arguments.is_empty()) { 2628 AddInstruction(new(zone()) HPushArgument(arguments.RemoveLast())); 2629 } 2630 return call; 2631} 2632 2633 2634void HGraphBuilder::SetUpScope(Scope* scope) { 2635 HConstant* undefined_constant = new(zone()) HConstant( 2636 isolate()->factory()->undefined_value(), Representation::Tagged()); 2637 AddInstruction(undefined_constant); 2638 graph_->set_undefined_constant(undefined_constant); 2639 2640 HArgumentsObject* object = new(zone()) HArgumentsObject; 2641 AddInstruction(object); 2642 graph()->SetArgumentsObject(object); 2643 2644 // Set the initial values of parameters including "this". "This" has 2645 // parameter index 0. 2646 ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count()); 2647 2648 for (int i = 0; i < environment()->parameter_count(); ++i) { 2649 HInstruction* parameter = AddInstruction(new(zone()) HParameter(i)); 2650 environment()->Bind(i, parameter); 2651 } 2652 2653 // First special is HContext. 2654 HInstruction* context = AddInstruction(new(zone()) HContext); 2655 environment()->BindContext(context); 2656 2657 // Initialize specials and locals to undefined. 2658 for (int i = environment()->parameter_count() + 1; 2659 i < environment()->length(); 2660 ++i) { 2661 environment()->Bind(i, undefined_constant); 2662 } 2663 2664 // Handle the arguments and arguments shadow variables specially (they do 2665 // not have declarations). 2666 if (scope->arguments() != NULL) { 2667 if (!scope->arguments()->IsStackAllocated()) { 2668 return Bailout("context-allocated arguments"); 2669 } 2670 2671 environment()->Bind(scope->arguments(), 2672 graph()->GetArgumentsObject()); 2673 } 2674} 2675 2676 2677void HGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) { 2678 for (int i = 0; i < statements->length(); i++) { 2679 CHECK_ALIVE(Visit(statements->at(i))); 2680 } 2681} 2682 2683 2684HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) { 2685 HBasicBlock* b = graph()->CreateBasicBlock(); 2686 b->SetInitialEnvironment(env); 2687 return b; 2688} 2689 2690 2691HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() { 2692 HBasicBlock* header = graph()->CreateBasicBlock(); 2693 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header); 2694 header->SetInitialEnvironment(entry_env); 2695 header->AttachLoopInformation(); 2696 return header; 2697} 2698 2699 2700void HGraphBuilder::VisitBlock(Block* stmt) { 2701 ASSERT(!HasStackOverflow()); 2702 ASSERT(current_block() != NULL); 2703 ASSERT(current_block()->HasPredecessor()); 2704 if (stmt->block_scope() != NULL) { 2705 return Bailout("ScopedBlock"); 2706 } 2707 BreakAndContinueInfo break_info(stmt); 2708 { BreakAndContinueScope push(&break_info, this); 2709 CHECK_BAILOUT(VisitStatements(stmt->statements())); 2710 } 2711 HBasicBlock* break_block = break_info.break_block(); 2712 if (break_block != NULL) { 2713 if (current_block() != NULL) current_block()->Goto(break_block); 2714 break_block->SetJoinId(stmt->ExitId()); 2715 set_current_block(break_block); 2716 } 2717} 2718 2719 2720void HGraphBuilder::VisitExpressionStatement(ExpressionStatement* stmt) { 2721 ASSERT(!HasStackOverflow()); 2722 ASSERT(current_block() != NULL); 2723 ASSERT(current_block()->HasPredecessor()); 2724 VisitForEffect(stmt->expression()); 2725} 2726 2727 2728void HGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) { 2729 ASSERT(!HasStackOverflow()); 2730 ASSERT(current_block() != NULL); 2731 ASSERT(current_block()->HasPredecessor()); 2732} 2733 2734 2735void HGraphBuilder::VisitIfStatement(IfStatement* stmt) { 2736 ASSERT(!HasStackOverflow()); 2737 ASSERT(current_block() != NULL); 2738 ASSERT(current_block()->HasPredecessor()); 2739 if (stmt->condition()->ToBooleanIsTrue()) { 2740 AddSimulate(stmt->ThenId()); 2741 Visit(stmt->then_statement()); 2742 } else if (stmt->condition()->ToBooleanIsFalse()) { 2743 AddSimulate(stmt->ElseId()); 2744 Visit(stmt->else_statement()); 2745 } else { 2746 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 2747 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 2748 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false)); 2749 2750 if (cond_true->HasPredecessor()) { 2751 cond_true->SetJoinId(stmt->ThenId()); 2752 set_current_block(cond_true); 2753 CHECK_BAILOUT(Visit(stmt->then_statement())); 2754 cond_true = current_block(); 2755 } else { 2756 cond_true = NULL; 2757 } 2758 2759 if (cond_false->HasPredecessor()) { 2760 cond_false->SetJoinId(stmt->ElseId()); 2761 set_current_block(cond_false); 2762 CHECK_BAILOUT(Visit(stmt->else_statement())); 2763 cond_false = current_block(); 2764 } else { 2765 cond_false = NULL; 2766 } 2767 2768 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId()); 2769 set_current_block(join); 2770 } 2771} 2772 2773 2774HBasicBlock* HGraphBuilder::BreakAndContinueScope::Get( 2775 BreakableStatement* stmt, 2776 BreakType type, 2777 int* drop_extra) { 2778 *drop_extra = 0; 2779 BreakAndContinueScope* current = this; 2780 while (current != NULL && current->info()->target() != stmt) { 2781 *drop_extra += current->info()->drop_extra(); 2782 current = current->next(); 2783 } 2784 ASSERT(current != NULL); // Always found (unless stack is malformed). 2785 2786 if (type == BREAK) { 2787 *drop_extra += current->info()->drop_extra(); 2788 } 2789 2790 HBasicBlock* block = NULL; 2791 switch (type) { 2792 case BREAK: 2793 block = current->info()->break_block(); 2794 if (block == NULL) { 2795 block = current->owner()->graph()->CreateBasicBlock(); 2796 current->info()->set_break_block(block); 2797 } 2798 break; 2799 2800 case CONTINUE: 2801 block = current->info()->continue_block(); 2802 if (block == NULL) { 2803 block = current->owner()->graph()->CreateBasicBlock(); 2804 current->info()->set_continue_block(block); 2805 } 2806 break; 2807 } 2808 2809 return block; 2810} 2811 2812 2813void HGraphBuilder::VisitContinueStatement(ContinueStatement* stmt) { 2814 ASSERT(!HasStackOverflow()); 2815 ASSERT(current_block() != NULL); 2816 ASSERT(current_block()->HasPredecessor()); 2817 int drop_extra = 0; 2818 HBasicBlock* continue_block = break_scope()->Get(stmt->target(), 2819 CONTINUE, 2820 &drop_extra); 2821 Drop(drop_extra); 2822 current_block()->Goto(continue_block); 2823 set_current_block(NULL); 2824} 2825 2826 2827void HGraphBuilder::VisitBreakStatement(BreakStatement* stmt) { 2828 ASSERT(!HasStackOverflow()); 2829 ASSERT(current_block() != NULL); 2830 ASSERT(current_block()->HasPredecessor()); 2831 int drop_extra = 0; 2832 HBasicBlock* break_block = break_scope()->Get(stmt->target(), 2833 BREAK, 2834 &drop_extra); 2835 Drop(drop_extra); 2836 current_block()->Goto(break_block); 2837 set_current_block(NULL); 2838} 2839 2840 2841void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) { 2842 ASSERT(!HasStackOverflow()); 2843 ASSERT(current_block() != NULL); 2844 ASSERT(current_block()->HasPredecessor()); 2845 AstContext* context = call_context(); 2846 if (context == NULL) { 2847 // Not an inlined return, so an actual one. 2848 CHECK_ALIVE(VisitForValue(stmt->expression())); 2849 HValue* result = environment()->Pop(); 2850 current_block()->FinishExit(new(zone()) HReturn(result)); 2851 } else if (function_state()->is_construct()) { 2852 // Return from an inlined construct call. In a test context the return 2853 // value will always evaluate to true, in a value context the return value 2854 // needs to be a JSObject. 2855 if (context->IsTest()) { 2856 TestContext* test = TestContext::cast(context); 2857 CHECK_ALIVE(VisitForEffect(stmt->expression())); 2858 current_block()->Goto(test->if_true(), function_state()->drop_extra()); 2859 } else if (context->IsEffect()) { 2860 CHECK_ALIVE(VisitForEffect(stmt->expression())); 2861 current_block()->Goto(function_return(), function_state()->drop_extra()); 2862 } else { 2863 ASSERT(context->IsValue()); 2864 CHECK_ALIVE(VisitForValue(stmt->expression())); 2865 HValue* return_value = Pop(); 2866 HValue* receiver = environment()->Lookup(0); 2867 HHasInstanceTypeAndBranch* typecheck = 2868 new(zone()) HHasInstanceTypeAndBranch(return_value, 2869 FIRST_SPEC_OBJECT_TYPE, 2870 LAST_SPEC_OBJECT_TYPE); 2871 HBasicBlock* if_spec_object = graph()->CreateBasicBlock(); 2872 HBasicBlock* not_spec_object = graph()->CreateBasicBlock(); 2873 typecheck->SetSuccessorAt(0, if_spec_object); 2874 typecheck->SetSuccessorAt(1, not_spec_object); 2875 current_block()->Finish(typecheck); 2876 if_spec_object->AddLeaveInlined(return_value, 2877 function_return(), 2878 function_state()->drop_extra()); 2879 not_spec_object->AddLeaveInlined(receiver, 2880 function_return(), 2881 function_state()->drop_extra()); 2882 } 2883 } else { 2884 // Return from an inlined function, visit the subexpression in the 2885 // expression context of the call. 2886 if (context->IsTest()) { 2887 TestContext* test = TestContext::cast(context); 2888 VisitForControl(stmt->expression(), 2889 test->if_true(), 2890 test->if_false()); 2891 } else if (context->IsEffect()) { 2892 CHECK_ALIVE(VisitForEffect(stmt->expression())); 2893 current_block()->Goto(function_return(), function_state()->drop_extra()); 2894 } else { 2895 ASSERT(context->IsValue()); 2896 CHECK_ALIVE(VisitForValue(stmt->expression())); 2897 HValue* return_value = Pop(); 2898 current_block()->AddLeaveInlined(return_value, 2899 function_return(), 2900 function_state()->drop_extra()); 2901 } 2902 } 2903 set_current_block(NULL); 2904} 2905 2906 2907void HGraphBuilder::VisitWithStatement(WithStatement* stmt) { 2908 ASSERT(!HasStackOverflow()); 2909 ASSERT(current_block() != NULL); 2910 ASSERT(current_block()->HasPredecessor()); 2911 return Bailout("WithStatement"); 2912} 2913 2914 2915void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) { 2916 ASSERT(!HasStackOverflow()); 2917 ASSERT(current_block() != NULL); 2918 ASSERT(current_block()->HasPredecessor()); 2919 // We only optimize switch statements with smi-literal smi comparisons, 2920 // with a bounded number of clauses. 2921 const int kCaseClauseLimit = 128; 2922 ZoneList<CaseClause*>* clauses = stmt->cases(); 2923 int clause_count = clauses->length(); 2924 if (clause_count > kCaseClauseLimit) { 2925 return Bailout("SwitchStatement: too many clauses"); 2926 } 2927 2928 HValue* context = environment()->LookupContext(); 2929 2930 CHECK_ALIVE(VisitForValue(stmt->tag())); 2931 AddSimulate(stmt->EntryId()); 2932 HValue* tag_value = Pop(); 2933 HBasicBlock* first_test_block = current_block(); 2934 2935 SwitchType switch_type = UNKNOWN_SWITCH; 2936 2937 // 1. Extract clause type 2938 for (int i = 0; i < clause_count; ++i) { 2939 CaseClause* clause = clauses->at(i); 2940 if (clause->is_default()) continue; 2941 2942 if (switch_type == UNKNOWN_SWITCH) { 2943 if (clause->label()->IsSmiLiteral()) { 2944 switch_type = SMI_SWITCH; 2945 } else if (clause->label()->IsStringLiteral()) { 2946 switch_type = STRING_SWITCH; 2947 } else { 2948 return Bailout("SwitchStatement: non-literal switch label"); 2949 } 2950 } else if ((switch_type == STRING_SWITCH && 2951 !clause->label()->IsStringLiteral()) || 2952 (switch_type == SMI_SWITCH && 2953 !clause->label()->IsSmiLiteral())) { 2954 return Bailout("SwitchStatemnt: mixed label types are not supported"); 2955 } 2956 } 2957 2958 HUnaryControlInstruction* string_check = NULL; 2959 HBasicBlock* not_string_block = NULL; 2960 2961 // Test switch's tag value if all clauses are string literals 2962 if (switch_type == STRING_SWITCH) { 2963 string_check = new(zone()) HIsStringAndBranch(tag_value); 2964 first_test_block = graph()->CreateBasicBlock(); 2965 not_string_block = graph()->CreateBasicBlock(); 2966 2967 string_check->SetSuccessorAt(0, first_test_block); 2968 string_check->SetSuccessorAt(1, not_string_block); 2969 current_block()->Finish(string_check); 2970 2971 set_current_block(first_test_block); 2972 } 2973 2974 // 2. Build all the tests, with dangling true branches 2975 int default_id = AstNode::kNoNumber; 2976 for (int i = 0; i < clause_count; ++i) { 2977 CaseClause* clause = clauses->at(i); 2978 if (clause->is_default()) { 2979 default_id = clause->EntryId(); 2980 continue; 2981 } 2982 if (switch_type == SMI_SWITCH) { 2983 clause->RecordTypeFeedback(oracle()); 2984 } 2985 2986 // Generate a compare and branch. 2987 CHECK_ALIVE(VisitForValue(clause->label())); 2988 HValue* label_value = Pop(); 2989 2990 HBasicBlock* next_test_block = graph()->CreateBasicBlock(); 2991 HBasicBlock* body_block = graph()->CreateBasicBlock(); 2992 2993 HControlInstruction* compare; 2994 2995 if (switch_type == SMI_SWITCH) { 2996 if (!clause->IsSmiCompare()) { 2997 // Finish with deoptimize and add uses of enviroment values to 2998 // account for invisible uses. 2999 current_block()->FinishExitWithDeoptimization(HDeoptimize::kUseAll); 3000 set_current_block(NULL); 3001 break; 3002 } 3003 3004 HCompareIDAndBranch* compare_ = 3005 new(zone()) HCompareIDAndBranch(tag_value, 3006 label_value, 3007 Token::EQ_STRICT); 3008 compare_->SetInputRepresentation(Representation::Integer32()); 3009 compare = compare_; 3010 } else { 3011 compare = new(zone()) HStringCompareAndBranch(context, tag_value, 3012 label_value, 3013 Token::EQ_STRICT); 3014 } 3015 3016 compare->SetSuccessorAt(0, body_block); 3017 compare->SetSuccessorAt(1, next_test_block); 3018 current_block()->Finish(compare); 3019 3020 set_current_block(next_test_block); 3021 } 3022 3023 // Save the current block to use for the default or to join with the 3024 // exit. This block is NULL if we deoptimized. 3025 HBasicBlock* last_block = current_block(); 3026 3027 if (not_string_block != NULL) { 3028 int join_id = (default_id != AstNode::kNoNumber) 3029 ? default_id 3030 : stmt->ExitId(); 3031 last_block = CreateJoin(last_block, not_string_block, join_id); 3032 } 3033 3034 // 3. Loop over the clauses and the linked list of tests in lockstep, 3035 // translating the clause bodies. 3036 HBasicBlock* curr_test_block = first_test_block; 3037 HBasicBlock* fall_through_block = NULL; 3038 3039 BreakAndContinueInfo break_info(stmt); 3040 { BreakAndContinueScope push(&break_info, this); 3041 for (int i = 0; i < clause_count; ++i) { 3042 CaseClause* clause = clauses->at(i); 3043 3044 // Identify the block where normal (non-fall-through) control flow 3045 // goes to. 3046 HBasicBlock* normal_block = NULL; 3047 if (clause->is_default()) { 3048 if (last_block != NULL) { 3049 normal_block = last_block; 3050 last_block = NULL; // Cleared to indicate we've handled it. 3051 } 3052 } else if (!curr_test_block->end()->IsDeoptimize()) { 3053 normal_block = curr_test_block->end()->FirstSuccessor(); 3054 curr_test_block = curr_test_block->end()->SecondSuccessor(); 3055 } 3056 3057 // Identify a block to emit the body into. 3058 if (normal_block == NULL) { 3059 if (fall_through_block == NULL) { 3060 // (a) Unreachable. 3061 if (clause->is_default()) { 3062 continue; // Might still be reachable clause bodies. 3063 } else { 3064 break; 3065 } 3066 } else { 3067 // (b) Reachable only as fall through. 3068 set_current_block(fall_through_block); 3069 } 3070 } else if (fall_through_block == NULL) { 3071 // (c) Reachable only normally. 3072 set_current_block(normal_block); 3073 } else { 3074 // (d) Reachable both ways. 3075 HBasicBlock* join = CreateJoin(fall_through_block, 3076 normal_block, 3077 clause->EntryId()); 3078 set_current_block(join); 3079 } 3080 3081 CHECK_BAILOUT(VisitStatements(clause->statements())); 3082 fall_through_block = current_block(); 3083 } 3084 } 3085 3086 // Create an up-to-3-way join. Use the break block if it exists since 3087 // it's already a join block. 3088 HBasicBlock* break_block = break_info.break_block(); 3089 if (break_block == NULL) { 3090 set_current_block(CreateJoin(fall_through_block, 3091 last_block, 3092 stmt->ExitId())); 3093 } else { 3094 if (fall_through_block != NULL) fall_through_block->Goto(break_block); 3095 if (last_block != NULL) last_block->Goto(break_block); 3096 break_block->SetJoinId(stmt->ExitId()); 3097 set_current_block(break_block); 3098 } 3099} 3100 3101 3102bool HGraphBuilder::HasOsrEntryAt(IterationStatement* statement) { 3103 return statement->OsrEntryId() == info()->osr_ast_id(); 3104} 3105 3106 3107bool HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) { 3108 if (!HasOsrEntryAt(statement)) return false; 3109 3110 HBasicBlock* non_osr_entry = graph()->CreateBasicBlock(); 3111 HBasicBlock* osr_entry = graph()->CreateBasicBlock(); 3112 HValue* true_value = graph()->GetConstantTrue(); 3113 HBranch* test = new(zone()) HBranch(true_value, non_osr_entry, osr_entry); 3114 current_block()->Finish(test); 3115 3116 HBasicBlock* loop_predecessor = graph()->CreateBasicBlock(); 3117 non_osr_entry->Goto(loop_predecessor); 3118 3119 set_current_block(osr_entry); 3120 int osr_entry_id = statement->OsrEntryId(); 3121 int first_expression_index = environment()->first_expression_index(); 3122 int length = environment()->length(); 3123 ZoneList<HUnknownOSRValue*>* osr_values = 3124 new(zone()) ZoneList<HUnknownOSRValue*>(length); 3125 3126 for (int i = 0; i < first_expression_index; ++i) { 3127 HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue; 3128 AddInstruction(osr_value); 3129 environment()->Bind(i, osr_value); 3130 osr_values->Add(osr_value); 3131 } 3132 3133 if (first_expression_index != length) { 3134 environment()->Drop(length - first_expression_index); 3135 for (int i = first_expression_index; i < length; ++i) { 3136 HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue; 3137 AddInstruction(osr_value); 3138 environment()->Push(osr_value); 3139 osr_values->Add(osr_value); 3140 } 3141 } 3142 3143 graph()->set_osr_values(osr_values); 3144 3145 AddSimulate(osr_entry_id); 3146 AddInstruction(new(zone()) HOsrEntry(osr_entry_id)); 3147 HContext* context = new(zone()) HContext; 3148 AddInstruction(context); 3149 environment()->BindContext(context); 3150 current_block()->Goto(loop_predecessor); 3151 loop_predecessor->SetJoinId(statement->EntryId()); 3152 set_current_block(loop_predecessor); 3153 return true; 3154} 3155 3156 3157void HGraphBuilder::VisitLoopBody(IterationStatement* stmt, 3158 HBasicBlock* loop_entry, 3159 BreakAndContinueInfo* break_info) { 3160 BreakAndContinueScope push(break_info, this); 3161 AddSimulate(stmt->StackCheckId()); 3162 HValue* context = environment()->LookupContext(); 3163 HStackCheck* stack_check = 3164 new(zone()) HStackCheck(context, HStackCheck::kBackwardsBranch); 3165 AddInstruction(stack_check); 3166 ASSERT(loop_entry->IsLoopHeader()); 3167 loop_entry->loop_information()->set_stack_check(stack_check); 3168 CHECK_BAILOUT(Visit(stmt->body())); 3169} 3170 3171 3172void HGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) { 3173 ASSERT(!HasStackOverflow()); 3174 ASSERT(current_block() != NULL); 3175 ASSERT(current_block()->HasPredecessor()); 3176 ASSERT(current_block() != NULL); 3177 bool osr_entry = PreProcessOsrEntry(stmt); 3178 HBasicBlock* loop_entry = CreateLoopHeaderBlock(); 3179 current_block()->Goto(loop_entry); 3180 set_current_block(loop_entry); 3181 if (osr_entry) graph()->set_osr_loop_entry(loop_entry); 3182 3183 BreakAndContinueInfo break_info(stmt); 3184 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 3185 HBasicBlock* body_exit = 3186 JoinContinue(stmt, current_block(), break_info.continue_block()); 3187 HBasicBlock* loop_successor = NULL; 3188 if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) { 3189 set_current_block(body_exit); 3190 // The block for a true condition, the actual predecessor block of the 3191 // back edge. 3192 body_exit = graph()->CreateBasicBlock(); 3193 loop_successor = graph()->CreateBasicBlock(); 3194 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor)); 3195 if (body_exit->HasPredecessor()) { 3196 body_exit->SetJoinId(stmt->BackEdgeId()); 3197 } else { 3198 body_exit = NULL; 3199 } 3200 if (loop_successor->HasPredecessor()) { 3201 loop_successor->SetJoinId(stmt->ExitId()); 3202 } else { 3203 loop_successor = NULL; 3204 } 3205 } 3206 HBasicBlock* loop_exit = CreateLoop(stmt, 3207 loop_entry, 3208 body_exit, 3209 loop_successor, 3210 break_info.break_block()); 3211 set_current_block(loop_exit); 3212} 3213 3214 3215void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) { 3216 ASSERT(!HasStackOverflow()); 3217 ASSERT(current_block() != NULL); 3218 ASSERT(current_block()->HasPredecessor()); 3219 ASSERT(current_block() != NULL); 3220 bool osr_entry = PreProcessOsrEntry(stmt); 3221 HBasicBlock* loop_entry = CreateLoopHeaderBlock(); 3222 current_block()->Goto(loop_entry); 3223 set_current_block(loop_entry); 3224 if (osr_entry) graph()->set_osr_loop_entry(loop_entry); 3225 3226 3227 // If the condition is constant true, do not generate a branch. 3228 HBasicBlock* loop_successor = NULL; 3229 if (!stmt->cond()->ToBooleanIsTrue()) { 3230 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 3231 loop_successor = graph()->CreateBasicBlock(); 3232 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 3233 if (body_entry->HasPredecessor()) { 3234 body_entry->SetJoinId(stmt->BodyId()); 3235 set_current_block(body_entry); 3236 } 3237 if (loop_successor->HasPredecessor()) { 3238 loop_successor->SetJoinId(stmt->ExitId()); 3239 } else { 3240 loop_successor = NULL; 3241 } 3242 } 3243 3244 BreakAndContinueInfo break_info(stmt); 3245 if (current_block() != NULL) { 3246 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 3247 } 3248 HBasicBlock* body_exit = 3249 JoinContinue(stmt, current_block(), break_info.continue_block()); 3250 HBasicBlock* loop_exit = CreateLoop(stmt, 3251 loop_entry, 3252 body_exit, 3253 loop_successor, 3254 break_info.break_block()); 3255 set_current_block(loop_exit); 3256} 3257 3258 3259void HGraphBuilder::VisitForStatement(ForStatement* stmt) { 3260 ASSERT(!HasStackOverflow()); 3261 ASSERT(current_block() != NULL); 3262 ASSERT(current_block()->HasPredecessor()); 3263 if (stmt->init() != NULL) { 3264 CHECK_ALIVE(Visit(stmt->init())); 3265 } 3266 ASSERT(current_block() != NULL); 3267 bool osr_entry = PreProcessOsrEntry(stmt); 3268 HBasicBlock* loop_entry = CreateLoopHeaderBlock(); 3269 current_block()->Goto(loop_entry); 3270 set_current_block(loop_entry); 3271 if (osr_entry) graph()->set_osr_loop_entry(loop_entry); 3272 3273 HBasicBlock* loop_successor = NULL; 3274 if (stmt->cond() != NULL) { 3275 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 3276 loop_successor = graph()->CreateBasicBlock(); 3277 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 3278 if (body_entry->HasPredecessor()) { 3279 body_entry->SetJoinId(stmt->BodyId()); 3280 set_current_block(body_entry); 3281 } 3282 if (loop_successor->HasPredecessor()) { 3283 loop_successor->SetJoinId(stmt->ExitId()); 3284 } else { 3285 loop_successor = NULL; 3286 } 3287 } 3288 3289 BreakAndContinueInfo break_info(stmt); 3290 if (current_block() != NULL) { 3291 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 3292 } 3293 HBasicBlock* body_exit = 3294 JoinContinue(stmt, current_block(), break_info.continue_block()); 3295 3296 if (stmt->next() != NULL && body_exit != NULL) { 3297 set_current_block(body_exit); 3298 CHECK_BAILOUT(Visit(stmt->next())); 3299 body_exit = current_block(); 3300 } 3301 3302 HBasicBlock* loop_exit = CreateLoop(stmt, 3303 loop_entry, 3304 body_exit, 3305 loop_successor, 3306 break_info.break_block()); 3307 set_current_block(loop_exit); 3308} 3309 3310 3311void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) { 3312 ASSERT(!HasStackOverflow()); 3313 ASSERT(current_block() != NULL); 3314 ASSERT(current_block()->HasPredecessor()); 3315 3316 if (!FLAG_optimize_for_in) { 3317 return Bailout("ForInStatement optimization is disabled"); 3318 } 3319 3320 if (!oracle()->IsForInFastCase(stmt)) { 3321 return Bailout("ForInStatement is not fast case"); 3322 } 3323 3324 if (!stmt->each()->IsVariableProxy() || 3325 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) { 3326 return Bailout("ForInStatement with non-local each variable"); 3327 } 3328 3329 Variable* each_var = stmt->each()->AsVariableProxy()->var(); 3330 3331 CHECK_ALIVE(VisitForValue(stmt->enumerable())); 3332 HValue* enumerable = Top(); // Leave enumerable at the top. 3333 3334 HInstruction* map = AddInstruction(new(zone()) HForInPrepareMap( 3335 environment()->LookupContext(), enumerable)); 3336 AddSimulate(stmt->PrepareId()); 3337 3338 HInstruction* array = AddInstruction( 3339 new(zone()) HForInCacheArray( 3340 enumerable, 3341 map, 3342 DescriptorArray::kEnumCacheBridgeCacheIndex)); 3343 3344 HInstruction* array_length = AddInstruction( 3345 new(zone()) HFixedArrayBaseLength(array)); 3346 3347 HInstruction* start_index = AddInstruction(new(zone()) HConstant( 3348 Handle<Object>(Smi::FromInt(0)), Representation::Integer32())); 3349 3350 Push(map); 3351 Push(array); 3352 Push(array_length); 3353 Push(start_index); 3354 3355 HInstruction* index_cache = AddInstruction( 3356 new(zone()) HForInCacheArray( 3357 enumerable, 3358 map, 3359 DescriptorArray::kEnumCacheBridgeIndicesCacheIndex)); 3360 HForInCacheArray::cast(array)->set_index_cache( 3361 HForInCacheArray::cast(index_cache)); 3362 3363 bool osr_entry = PreProcessOsrEntry(stmt); 3364 HBasicBlock* loop_entry = CreateLoopHeaderBlock(); 3365 current_block()->Goto(loop_entry); 3366 set_current_block(loop_entry); 3367 if (osr_entry) graph()->set_osr_loop_entry(loop_entry); 3368 3369 HValue* index = environment()->ExpressionStackAt(0); 3370 HValue* limit = environment()->ExpressionStackAt(1); 3371 3372 // Check that we still have more keys. 3373 HCompareIDAndBranch* compare_index = 3374 new(zone()) HCompareIDAndBranch(index, limit, Token::LT); 3375 compare_index->SetInputRepresentation(Representation::Integer32()); 3376 3377 HBasicBlock* loop_body = graph()->CreateBasicBlock(); 3378 HBasicBlock* loop_successor = graph()->CreateBasicBlock(); 3379 3380 compare_index->SetSuccessorAt(0, loop_body); 3381 compare_index->SetSuccessorAt(1, loop_successor); 3382 current_block()->Finish(compare_index); 3383 3384 set_current_block(loop_successor); 3385 Drop(5); 3386 3387 set_current_block(loop_body); 3388 3389 HValue* key = AddInstruction( 3390 new(zone()) HLoadKeyedFastElement( 3391 environment()->ExpressionStackAt(2), // Enum cache. 3392 environment()->ExpressionStackAt(0), // Iteration index. 3393 HLoadKeyedFastElement::OMIT_HOLE_CHECK)); 3394 3395 // Check if the expected map still matches that of the enumerable. 3396 // If not just deoptimize. 3397 AddInstruction(new(zone()) HCheckMapValue( 3398 environment()->ExpressionStackAt(4), 3399 environment()->ExpressionStackAt(3))); 3400 3401 Bind(each_var, key); 3402 3403 BreakAndContinueInfo break_info(stmt, 5); 3404 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 3405 3406 HBasicBlock* body_exit = 3407 JoinContinue(stmt, current_block(), break_info.continue_block()); 3408 3409 if (body_exit != NULL) { 3410 set_current_block(body_exit); 3411 3412 HValue* current_index = Pop(); 3413 HInstruction* new_index = new(zone()) HAdd(environment()->LookupContext(), 3414 current_index, 3415 graph()->GetConstant1()); 3416 new_index->AssumeRepresentation(Representation::Integer32()); 3417 PushAndAdd(new_index); 3418 body_exit = current_block(); 3419 } 3420 3421 HBasicBlock* loop_exit = CreateLoop(stmt, 3422 loop_entry, 3423 body_exit, 3424 loop_successor, 3425 break_info.break_block()); 3426 3427 set_current_block(loop_exit); 3428} 3429 3430 3431void HGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) { 3432 ASSERT(!HasStackOverflow()); 3433 ASSERT(current_block() != NULL); 3434 ASSERT(current_block()->HasPredecessor()); 3435 return Bailout("TryCatchStatement"); 3436} 3437 3438 3439void HGraphBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) { 3440 ASSERT(!HasStackOverflow()); 3441 ASSERT(current_block() != NULL); 3442 ASSERT(current_block()->HasPredecessor()); 3443 return Bailout("TryFinallyStatement"); 3444} 3445 3446 3447void HGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) { 3448 ASSERT(!HasStackOverflow()); 3449 ASSERT(current_block() != NULL); 3450 ASSERT(current_block()->HasPredecessor()); 3451 return Bailout("DebuggerStatement"); 3452} 3453 3454 3455static Handle<SharedFunctionInfo> SearchSharedFunctionInfo( 3456 Code* unoptimized_code, FunctionLiteral* expr) { 3457 int start_position = expr->start_position(); 3458 RelocIterator it(unoptimized_code); 3459 for (;!it.done(); it.next()) { 3460 RelocInfo* rinfo = it.rinfo(); 3461 if (rinfo->rmode() != RelocInfo::EMBEDDED_OBJECT) continue; 3462 Object* obj = rinfo->target_object(); 3463 if (obj->IsSharedFunctionInfo()) { 3464 SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj); 3465 if (shared->start_position() == start_position) { 3466 return Handle<SharedFunctionInfo>(shared); 3467 } 3468 } 3469 } 3470 3471 return Handle<SharedFunctionInfo>(); 3472} 3473 3474 3475void HGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) { 3476 ASSERT(!HasStackOverflow()); 3477 ASSERT(current_block() != NULL); 3478 ASSERT(current_block()->HasPredecessor()); 3479 Handle<SharedFunctionInfo> shared_info = 3480 SearchSharedFunctionInfo(info()->shared_info()->code(), 3481 expr); 3482 if (shared_info.is_null()) { 3483 shared_info = Compiler::BuildFunctionInfo(expr, info()->script()); 3484 } 3485 // We also have a stack overflow if the recursive compilation did. 3486 if (HasStackOverflow()) return; 3487 HValue* context = environment()->LookupContext(); 3488 HFunctionLiteral* instr = 3489 new(zone()) HFunctionLiteral(context, shared_info, expr->pretenure()); 3490 return ast_context()->ReturnInstruction(instr, expr->id()); 3491} 3492 3493 3494void HGraphBuilder::VisitSharedFunctionInfoLiteral( 3495 SharedFunctionInfoLiteral* expr) { 3496 ASSERT(!HasStackOverflow()); 3497 ASSERT(current_block() != NULL); 3498 ASSERT(current_block()->HasPredecessor()); 3499 return Bailout("SharedFunctionInfoLiteral"); 3500} 3501 3502 3503void HGraphBuilder::VisitConditional(Conditional* expr) { 3504 ASSERT(!HasStackOverflow()); 3505 ASSERT(current_block() != NULL); 3506 ASSERT(current_block()->HasPredecessor()); 3507 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 3508 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 3509 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false)); 3510 3511 // Visit the true and false subexpressions in the same AST context as the 3512 // whole expression. 3513 if (cond_true->HasPredecessor()) { 3514 cond_true->SetJoinId(expr->ThenId()); 3515 set_current_block(cond_true); 3516 CHECK_BAILOUT(Visit(expr->then_expression())); 3517 cond_true = current_block(); 3518 } else { 3519 cond_true = NULL; 3520 } 3521 3522 if (cond_false->HasPredecessor()) { 3523 cond_false->SetJoinId(expr->ElseId()); 3524 set_current_block(cond_false); 3525 CHECK_BAILOUT(Visit(expr->else_expression())); 3526 cond_false = current_block(); 3527 } else { 3528 cond_false = NULL; 3529 } 3530 3531 if (!ast_context()->IsTest()) { 3532 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id()); 3533 set_current_block(join); 3534 if (join != NULL && !ast_context()->IsEffect()) { 3535 return ast_context()->ReturnValue(Pop()); 3536 } 3537 } 3538} 3539 3540 3541HGraphBuilder::GlobalPropertyAccess HGraphBuilder::LookupGlobalProperty( 3542 Variable* var, LookupResult* lookup, bool is_store) { 3543 if (var->is_this() || !info()->has_global_object()) { 3544 return kUseGeneric; 3545 } 3546 Handle<GlobalObject> global(info()->global_object()); 3547 global->Lookup(*var->name(), lookup); 3548 if (!lookup->IsFound() || 3549 lookup->type() != NORMAL || 3550 (is_store && lookup->IsReadOnly()) || 3551 lookup->holder() != *global) { 3552 return kUseGeneric; 3553 } 3554 3555 return kUseCell; 3556} 3557 3558 3559HValue* HGraphBuilder::BuildContextChainWalk(Variable* var) { 3560 ASSERT(var->IsContextSlot()); 3561 HValue* context = environment()->LookupContext(); 3562 int length = info()->scope()->ContextChainLength(var->scope()); 3563 while (length-- > 0) { 3564 HInstruction* context_instruction = new(zone()) HOuterContext(context); 3565 AddInstruction(context_instruction); 3566 context = context_instruction; 3567 } 3568 return context; 3569} 3570 3571 3572void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) { 3573 ASSERT(!HasStackOverflow()); 3574 ASSERT(current_block() != NULL); 3575 ASSERT(current_block()->HasPredecessor()); 3576 Variable* variable = expr->var(); 3577 switch (variable->location()) { 3578 case Variable::UNALLOCATED: { 3579 if (variable->mode() == LET || variable->mode() == CONST_HARMONY) { 3580 return Bailout("reference to global harmony declared variable"); 3581 } 3582 // Handle known global constants like 'undefined' specially to avoid a 3583 // load from a global cell for them. 3584 Handle<Object> constant_value = 3585 isolate()->factory()->GlobalConstantFor(variable->name()); 3586 if (!constant_value.is_null()) { 3587 HConstant* instr = 3588 new(zone()) HConstant(constant_value, Representation::Tagged()); 3589 return ast_context()->ReturnInstruction(instr, expr->id()); 3590 } 3591 3592 LookupResult lookup(isolate()); 3593 GlobalPropertyAccess type = 3594 LookupGlobalProperty(variable, &lookup, false); 3595 3596 if (type == kUseCell && 3597 info()->global_object()->IsAccessCheckNeeded()) { 3598 type = kUseGeneric; 3599 } 3600 3601 if (type == kUseCell) { 3602 Handle<GlobalObject> global(info()->global_object()); 3603 Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup)); 3604 HLoadGlobalCell* instr = 3605 new(zone()) HLoadGlobalCell(cell, lookup.GetPropertyDetails()); 3606 return ast_context()->ReturnInstruction(instr, expr->id()); 3607 } else { 3608 HValue* context = environment()->LookupContext(); 3609 HGlobalObject* global_object = new(zone()) HGlobalObject(context); 3610 AddInstruction(global_object); 3611 HLoadGlobalGeneric* instr = 3612 new(zone()) HLoadGlobalGeneric(context, 3613 global_object, 3614 variable->name(), 3615 ast_context()->is_for_typeof()); 3616 instr->set_position(expr->position()); 3617 return ast_context()->ReturnInstruction(instr, expr->id()); 3618 } 3619 } 3620 3621 case Variable::PARAMETER: 3622 case Variable::LOCAL: { 3623 HValue* value = environment()->Lookup(variable); 3624 if (value == graph()->GetConstantHole()) { 3625 ASSERT(variable->mode() == CONST || 3626 variable->mode() == CONST_HARMONY || 3627 variable->mode() == LET); 3628 return Bailout("reference to uninitialized variable"); 3629 } 3630 return ast_context()->ReturnValue(value); 3631 } 3632 3633 case Variable::CONTEXT: { 3634 HValue* context = BuildContextChainWalk(variable); 3635 HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable); 3636 return ast_context()->ReturnInstruction(instr, expr->id()); 3637 } 3638 3639 case Variable::LOOKUP: 3640 return Bailout("reference to a variable which requires dynamic lookup"); 3641 } 3642} 3643 3644 3645void HGraphBuilder::VisitLiteral(Literal* expr) { 3646 ASSERT(!HasStackOverflow()); 3647 ASSERT(current_block() != NULL); 3648 ASSERT(current_block()->HasPredecessor()); 3649 HConstant* instr = 3650 new(zone()) HConstant(expr->handle(), Representation::Tagged()); 3651 return ast_context()->ReturnInstruction(instr, expr->id()); 3652} 3653 3654 3655void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) { 3656 ASSERT(!HasStackOverflow()); 3657 ASSERT(current_block() != NULL); 3658 ASSERT(current_block()->HasPredecessor()); 3659 HValue* context = environment()->LookupContext(); 3660 3661 HRegExpLiteral* instr = new(zone()) HRegExpLiteral(context, 3662 expr->pattern(), 3663 expr->flags(), 3664 expr->literal_index()); 3665 return ast_context()->ReturnInstruction(instr, expr->id()); 3666} 3667 3668 3669// Determines whether the given array or object literal boilerplate satisfies 3670// all limits to be considered for fast deep-copying and computes the total 3671// size of all objects that are part of the graph. 3672static bool IsFastLiteral(Handle<JSObject> boilerplate, 3673 int max_depth, 3674 int* max_properties, 3675 int* total_size) { 3676 ASSERT(max_depth >= 0 && *max_properties >= 0); 3677 if (max_depth == 0) return false; 3678 3679 Handle<FixedArrayBase> elements(boilerplate->elements()); 3680 if (elements->length() > 0 && 3681 elements->map() != boilerplate->GetHeap()->fixed_cow_array_map()) { 3682 if (boilerplate->HasFastDoubleElements()) { 3683 *total_size += FixedDoubleArray::SizeFor(elements->length()); 3684 } else if (boilerplate->HasFastElements()) { 3685 int length = elements->length(); 3686 for (int i = 0; i < length; i++) { 3687 if ((*max_properties)-- == 0) return false; 3688 Handle<Object> value = JSObject::GetElement(boilerplate, i); 3689 if (value->IsJSObject()) { 3690 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 3691 if (!IsFastLiteral(value_object, 3692 max_depth - 1, 3693 max_properties, 3694 total_size)) { 3695 return false; 3696 } 3697 } 3698 } 3699 *total_size += FixedArray::SizeFor(length); 3700 } else { 3701 return false; 3702 } 3703 } 3704 3705 Handle<FixedArray> properties(boilerplate->properties()); 3706 if (properties->length() > 0) { 3707 return false; 3708 } else { 3709 int nof = boilerplate->map()->inobject_properties(); 3710 for (int i = 0; i < nof; i++) { 3711 if ((*max_properties)-- == 0) return false; 3712 Handle<Object> value(boilerplate->InObjectPropertyAt(i)); 3713 if (value->IsJSObject()) { 3714 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 3715 if (!IsFastLiteral(value_object, 3716 max_depth - 1, 3717 max_properties, 3718 total_size)) { 3719 return false; 3720 } 3721 } 3722 } 3723 } 3724 3725 *total_size += boilerplate->map()->instance_size(); 3726 return true; 3727} 3728 3729 3730void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { 3731 ASSERT(!HasStackOverflow()); 3732 ASSERT(current_block() != NULL); 3733 ASSERT(current_block()->HasPredecessor()); 3734 Handle<JSFunction> closure = function_state()->compilation_info()->closure(); 3735 HValue* context = environment()->LookupContext(); 3736 HInstruction* literal; 3737 3738 // Check whether to use fast or slow deep-copying for boilerplate. 3739 int total_size = 0; 3740 int max_properties = HFastLiteral::kMaxLiteralProperties; 3741 Handle<Object> boilerplate(closure->literals()->get(expr->literal_index())); 3742 if (boilerplate->IsJSObject() && 3743 IsFastLiteral(Handle<JSObject>::cast(boilerplate), 3744 HFastLiteral::kMaxLiteralDepth, 3745 &max_properties, 3746 &total_size)) { 3747 Handle<JSObject> boilerplate_object = Handle<JSObject>::cast(boilerplate); 3748 literal = new(zone()) HFastLiteral(context, 3749 boilerplate_object, 3750 total_size, 3751 expr->literal_index(), 3752 expr->depth()); 3753 } else { 3754 literal = new(zone()) HObjectLiteral(context, 3755 expr->constant_properties(), 3756 expr->fast_elements(), 3757 expr->literal_index(), 3758 expr->depth(), 3759 expr->has_function()); 3760 } 3761 3762 // The object is expected in the bailout environment during computation 3763 // of the property values and is the value of the entire expression. 3764 PushAndAdd(literal); 3765 3766 expr->CalculateEmitStore(); 3767 3768 for (int i = 0; i < expr->properties()->length(); i++) { 3769 ObjectLiteral::Property* property = expr->properties()->at(i); 3770 if (property->IsCompileTimeValue()) continue; 3771 3772 Literal* key = property->key(); 3773 Expression* value = property->value(); 3774 3775 switch (property->kind()) { 3776 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 3777 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); 3778 // Fall through. 3779 case ObjectLiteral::Property::COMPUTED: 3780 if (key->handle()->IsSymbol()) { 3781 if (property->emit_store()) { 3782 property->RecordTypeFeedback(oracle()); 3783 CHECK_ALIVE(VisitForValue(value)); 3784 HValue* value = Pop(); 3785 HInstruction* store = BuildStoreNamed(literal, value, property); 3786 AddInstruction(store); 3787 if (store->HasObservableSideEffects()) AddSimulate(key->id()); 3788 } else { 3789 CHECK_ALIVE(VisitForEffect(value)); 3790 } 3791 break; 3792 } 3793 // Fall through. 3794 case ObjectLiteral::Property::PROTOTYPE: 3795 case ObjectLiteral::Property::SETTER: 3796 case ObjectLiteral::Property::GETTER: 3797 return Bailout("Object literal with complex property"); 3798 default: UNREACHABLE(); 3799 } 3800 } 3801 3802 if (expr->has_function()) { 3803 // Return the result of the transformation to fast properties 3804 // instead of the original since this operation changes the map 3805 // of the object. This makes sure that the original object won't 3806 // be used by other optimized code before it is transformed 3807 // (e.g. because of code motion). 3808 HToFastProperties* result = new(zone()) HToFastProperties(Pop()); 3809 AddInstruction(result); 3810 return ast_context()->ReturnValue(result); 3811 } else { 3812 return ast_context()->ReturnValue(Pop()); 3813 } 3814} 3815 3816 3817void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { 3818 ASSERT(!HasStackOverflow()); 3819 ASSERT(current_block() != NULL); 3820 ASSERT(current_block()->HasPredecessor()); 3821 ZoneList<Expression*>* subexprs = expr->values(); 3822 int length = subexprs->length(); 3823 HValue* context = environment()->LookupContext(); 3824 HInstruction* literal; 3825 3826 Handle<FixedArray> literals(environment()->closure()->literals()); 3827 Handle<Object> raw_boilerplate(literals->get(expr->literal_index())); 3828 3829 if (raw_boilerplate->IsUndefined()) { 3830 raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate( 3831 isolate(), literals, expr->constant_elements()); 3832 if (raw_boilerplate.is_null()) { 3833 return Bailout("array boilerplate creation failed"); 3834 } 3835 literals->set(expr->literal_index(), *raw_boilerplate); 3836 if (JSObject::cast(*raw_boilerplate)->elements()->map() == 3837 isolate()->heap()->fixed_cow_array_map()) { 3838 isolate()->counters()->cow_arrays_created_runtime()->Increment(); 3839 } 3840 } 3841 3842 Handle<JSObject> boilerplate = Handle<JSObject>::cast(raw_boilerplate); 3843 ElementsKind boilerplate_elements_kind = 3844 Handle<JSObject>::cast(boilerplate)->GetElementsKind(); 3845 3846 // Check whether to use fast or slow deep-copying for boilerplate. 3847 int total_size = 0; 3848 int max_properties = HFastLiteral::kMaxLiteralProperties; 3849 if (IsFastLiteral(boilerplate, 3850 HFastLiteral::kMaxLiteralDepth, 3851 &max_properties, 3852 &total_size)) { 3853 literal = new(zone()) HFastLiteral(context, 3854 boilerplate, 3855 total_size, 3856 expr->literal_index(), 3857 expr->depth()); 3858 } else { 3859 literal = new(zone()) HArrayLiteral(context, 3860 boilerplate, 3861 length, 3862 expr->literal_index(), 3863 expr->depth()); 3864 } 3865 3866 // The array is expected in the bailout environment during computation 3867 // of the property values and is the value of the entire expression. 3868 PushAndAdd(literal); 3869 3870 HLoadElements* elements = NULL; 3871 3872 for (int i = 0; i < length; i++) { 3873 Expression* subexpr = subexprs->at(i); 3874 // If the subexpression is a literal or a simple materialized literal it 3875 // is already set in the cloned array. 3876 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 3877 3878 CHECK_ALIVE(VisitForValue(subexpr)); 3879 HValue* value = Pop(); 3880 if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal"); 3881 3882 elements = new(zone()) HLoadElements(literal); 3883 AddInstruction(elements); 3884 3885 HValue* key = AddInstruction( 3886 new(zone()) HConstant(Handle<Object>(Smi::FromInt(i)), 3887 Representation::Integer32())); 3888 3889 switch (boilerplate_elements_kind) { 3890 case FAST_SMI_ONLY_ELEMENTS: 3891 // Smi-only arrays need a smi check. 3892 AddInstruction(new(zone()) HCheckSmi(value)); 3893 // Fall through. 3894 case FAST_ELEMENTS: 3895 AddInstruction(new(zone()) HStoreKeyedFastElement( 3896 elements, 3897 key, 3898 value, 3899 boilerplate_elements_kind)); 3900 break; 3901 case FAST_DOUBLE_ELEMENTS: 3902 AddInstruction(new(zone()) HStoreKeyedFastDoubleElement(elements, 3903 key, 3904 value)); 3905 break; 3906 default: 3907 UNREACHABLE(); 3908 break; 3909 } 3910 3911 AddSimulate(expr->GetIdForElement(i)); 3912 } 3913 return ast_context()->ReturnValue(Pop()); 3914} 3915 3916 3917// Sets the lookup result and returns true if the store can be inlined. 3918static bool ComputeStoredField(Handle<Map> type, 3919 Handle<String> name, 3920 LookupResult* lookup) { 3921 type->LookupInDescriptors(NULL, *name, lookup); 3922 if (!lookup->IsFound()) return false; 3923 if (lookup->type() == FIELD) return true; 3924 return (lookup->type() == MAP_TRANSITION) && 3925 (type->unused_property_fields() > 0); 3926} 3927 3928 3929static int ComputeStoredFieldIndex(Handle<Map> type, 3930 Handle<String> name, 3931 LookupResult* lookup) { 3932 ASSERT(lookup->type() == FIELD || lookup->type() == MAP_TRANSITION); 3933 if (lookup->type() == FIELD) { 3934 return lookup->GetLocalFieldIndexFromMap(*type); 3935 } else { 3936 Map* transition = lookup->GetTransitionMapFromMap(*type); 3937 return transition->PropertyIndexFor(*name) - type->inobject_properties(); 3938 } 3939} 3940 3941 3942HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object, 3943 Handle<String> name, 3944 HValue* value, 3945 Handle<Map> type, 3946 LookupResult* lookup, 3947 bool smi_and_map_check) { 3948 if (smi_and_map_check) { 3949 AddInstruction(new(zone()) HCheckNonSmi(object)); 3950 AddInstruction(new(zone()) HCheckMap(object, type, NULL, 3951 ALLOW_ELEMENT_TRANSITION_MAPS)); 3952 } 3953 3954 int index = ComputeStoredFieldIndex(type, name, lookup); 3955 bool is_in_object = index < 0; 3956 int offset = index * kPointerSize; 3957 if (index < 0) { 3958 // Negative property indices are in-object properties, indexed 3959 // from the end of the fixed part of the object. 3960 offset += type->instance_size(); 3961 } else { 3962 offset += FixedArray::kHeaderSize; 3963 } 3964 HStoreNamedField* instr = 3965 new(zone()) HStoreNamedField(object, name, value, is_in_object, offset); 3966 if (lookup->type() == MAP_TRANSITION) { 3967 Handle<Map> transition(lookup->GetTransitionMapFromMap(*type)); 3968 instr->set_transition(transition); 3969 // TODO(fschneider): Record the new map type of the object in the IR to 3970 // enable elimination of redundant checks after the transition store. 3971 instr->SetGVNFlag(kChangesMaps); 3972 } 3973 return instr; 3974} 3975 3976 3977HInstruction* HGraphBuilder::BuildStoreNamedGeneric(HValue* object, 3978 Handle<String> name, 3979 HValue* value) { 3980 HValue* context = environment()->LookupContext(); 3981 return new(zone()) HStoreNamedGeneric( 3982 context, 3983 object, 3984 name, 3985 value, 3986 function_strict_mode_flag()); 3987} 3988 3989 3990HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object, 3991 HValue* value, 3992 ObjectLiteral::Property* prop) { 3993 Literal* key = prop->key()->AsLiteral(); 3994 Handle<String> name = Handle<String>::cast(key->handle()); 3995 ASSERT(!name.is_null()); 3996 3997 LookupResult lookup(isolate()); 3998 Handle<Map> type = prop->GetReceiverType(); 3999 bool is_monomorphic = prop->IsMonomorphic() && 4000 ComputeStoredField(type, name, &lookup); 4001 4002 return is_monomorphic 4003 ? BuildStoreNamedField(object, name, value, type, &lookup, 4004 true) // Needs smi and map check. 4005 : BuildStoreNamedGeneric(object, name, value); 4006} 4007 4008 4009HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object, 4010 HValue* value, 4011 Expression* expr) { 4012 Property* prop = (expr->AsProperty() != NULL) 4013 ? expr->AsProperty() 4014 : expr->AsAssignment()->target()->AsProperty(); 4015 Literal* key = prop->key()->AsLiteral(); 4016 Handle<String> name = Handle<String>::cast(key->handle()); 4017 ASSERT(!name.is_null()); 4018 4019 LookupResult lookup(isolate()); 4020 SmallMapList* types = expr->GetReceiverTypes(); 4021 bool is_monomorphic = expr->IsMonomorphic() && 4022 ComputeStoredField(types->first(), name, &lookup); 4023 4024 return is_monomorphic 4025 ? BuildStoreNamedField(object, name, value, types->first(), &lookup, 4026 true) // Needs smi and map check. 4027 : BuildStoreNamedGeneric(object, name, value); 4028} 4029 4030 4031void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr, 4032 HValue* object, 4033 HValue* value, 4034 SmallMapList* types, 4035 Handle<String> name) { 4036 // TODO(ager): We should recognize when the prototype chains for different 4037 // maps are identical. In that case we can avoid repeatedly generating the 4038 // same prototype map checks. 4039 int count = 0; 4040 HBasicBlock* join = NULL; 4041 for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) { 4042 Handle<Map> map = types->at(i); 4043 LookupResult lookup(isolate()); 4044 if (ComputeStoredField(map, name, &lookup)) { 4045 if (count == 0) { 4046 AddInstruction(new(zone()) HCheckNonSmi(object)); // Only needed once. 4047 join = graph()->CreateBasicBlock(); 4048 } 4049 ++count; 4050 HBasicBlock* if_true = graph()->CreateBasicBlock(); 4051 HBasicBlock* if_false = graph()->CreateBasicBlock(); 4052 HCompareMap* compare = 4053 new(zone()) HCompareMap(object, map, if_true, if_false); 4054 current_block()->Finish(compare); 4055 4056 set_current_block(if_true); 4057 HInstruction* instr = 4058 BuildStoreNamedField(object, name, value, map, &lookup, false); 4059 instr->set_position(expr->position()); 4060 // Goto will add the HSimulate for the store. 4061 AddInstruction(instr); 4062 if (!ast_context()->IsEffect()) Push(value); 4063 current_block()->Goto(join); 4064 4065 set_current_block(if_false); 4066 } 4067 } 4068 4069 // Finish up. Unconditionally deoptimize if we've handled all the maps we 4070 // know about and do not want to handle ones we've never seen. Otherwise 4071 // use a generic IC. 4072 if (count == types->length() && FLAG_deoptimize_uncommon_cases) { 4073 current_block()->FinishExitWithDeoptimization(HDeoptimize::kNoUses); 4074 } else { 4075 HInstruction* instr = BuildStoreNamedGeneric(object, name, value); 4076 instr->set_position(expr->position()); 4077 AddInstruction(instr); 4078 4079 if (join != NULL) { 4080 if (!ast_context()->IsEffect()) Push(value); 4081 current_block()->Goto(join); 4082 } else { 4083 // The HSimulate for the store should not see the stored value in 4084 // effect contexts (it is not materialized at expr->id() in the 4085 // unoptimized code). 4086 if (instr->HasObservableSideEffects()) { 4087 if (ast_context()->IsEffect()) { 4088 AddSimulate(expr->id()); 4089 } else { 4090 Push(value); 4091 AddSimulate(expr->id()); 4092 Drop(1); 4093 } 4094 } 4095 return ast_context()->ReturnValue(value); 4096 } 4097 } 4098 4099 ASSERT(join != NULL); 4100 join->SetJoinId(expr->id()); 4101 set_current_block(join); 4102 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop()); 4103} 4104 4105 4106void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) { 4107 Property* prop = expr->target()->AsProperty(); 4108 ASSERT(prop != NULL); 4109 expr->RecordTypeFeedback(oracle()); 4110 CHECK_ALIVE(VisitForValue(prop->obj())); 4111 4112 HValue* value = NULL; 4113 HInstruction* instr = NULL; 4114 4115 if (prop->key()->IsPropertyName()) { 4116 // Named store. 4117 CHECK_ALIVE(VisitForValue(expr->value())); 4118 value = Pop(); 4119 HValue* object = Pop(); 4120 4121 Literal* key = prop->key()->AsLiteral(); 4122 Handle<String> name = Handle<String>::cast(key->handle()); 4123 ASSERT(!name.is_null()); 4124 4125 SmallMapList* types = expr->GetReceiverTypes(); 4126 LookupResult lookup(isolate()); 4127 4128 if (expr->IsMonomorphic()) { 4129 instr = BuildStoreNamed(object, value, expr); 4130 4131 } else if (types != NULL && types->length() > 1) { 4132 HandlePolymorphicStoreNamedField(expr, object, value, types, name); 4133 return; 4134 4135 } else { 4136 instr = BuildStoreNamedGeneric(object, name, value); 4137 } 4138 4139 } else { 4140 // Keyed store. 4141 CHECK_ALIVE(VisitForValue(prop->key())); 4142 CHECK_ALIVE(VisitForValue(expr->value())); 4143 value = Pop(); 4144 HValue* key = Pop(); 4145 HValue* object = Pop(); 4146 bool has_side_effects = false; 4147 HandleKeyedElementAccess(object, key, value, expr, expr->AssignmentId(), 4148 expr->position(), 4149 true, // is_store 4150 &has_side_effects); 4151 Push(value); 4152 ASSERT(has_side_effects); // Stores always have side effects. 4153 AddSimulate(expr->AssignmentId()); 4154 return ast_context()->ReturnValue(Pop()); 4155 } 4156 Push(value); 4157 instr->set_position(expr->position()); 4158 AddInstruction(instr); 4159 if (instr->HasObservableSideEffects()) AddSimulate(expr->AssignmentId()); 4160 return ast_context()->ReturnValue(Pop()); 4161} 4162 4163 4164// Because not every expression has a position and there is not common 4165// superclass of Assignment and CountOperation, we cannot just pass the 4166// owning expression instead of position and ast_id separately. 4167void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var, 4168 HValue* value, 4169 int position, 4170 int ast_id) { 4171 LookupResult lookup(isolate()); 4172 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true); 4173 if (type == kUseCell) { 4174 Handle<GlobalObject> global(info()->global_object()); 4175 Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup)); 4176 HInstruction* instr = 4177 new(zone()) HStoreGlobalCell(value, cell, lookup.GetPropertyDetails()); 4178 instr->set_position(position); 4179 AddInstruction(instr); 4180 if (instr->HasObservableSideEffects()) AddSimulate(ast_id); 4181 } else { 4182 HValue* context = environment()->LookupContext(); 4183 HGlobalObject* global_object = new(zone()) HGlobalObject(context); 4184 AddInstruction(global_object); 4185 HStoreGlobalGeneric* instr = 4186 new(zone()) HStoreGlobalGeneric(context, 4187 global_object, 4188 var->name(), 4189 value, 4190 function_strict_mode_flag()); 4191 instr->set_position(position); 4192 AddInstruction(instr); 4193 ASSERT(instr->HasObservableSideEffects()); 4194 if (instr->HasObservableSideEffects()) AddSimulate(ast_id); 4195 } 4196} 4197 4198 4199void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) { 4200 Expression* target = expr->target(); 4201 VariableProxy* proxy = target->AsVariableProxy(); 4202 Property* prop = target->AsProperty(); 4203 ASSERT(proxy == NULL || prop == NULL); 4204 4205 // We have a second position recorded in the FullCodeGenerator to have 4206 // type feedback for the binary operation. 4207 BinaryOperation* operation = expr->binary_operation(); 4208 4209 if (proxy != NULL) { 4210 Variable* var = proxy->var(); 4211 if (var->mode() == LET) { 4212 return Bailout("unsupported let compound assignment"); 4213 } 4214 4215 CHECK_ALIVE(VisitForValue(operation)); 4216 4217 switch (var->location()) { 4218 case Variable::UNALLOCATED: 4219 HandleGlobalVariableAssignment(var, 4220 Top(), 4221 expr->position(), 4222 expr->AssignmentId()); 4223 break; 4224 4225 case Variable::PARAMETER: 4226 case Variable::LOCAL: 4227 if (var->mode() == CONST) { 4228 return Bailout("unsupported const compound assignment"); 4229 } 4230 Bind(var, Top()); 4231 break; 4232 4233 case Variable::CONTEXT: { 4234 // Bail out if we try to mutate a parameter value in a function 4235 // using the arguments object. We do not (yet) correctly handle the 4236 // arguments property of the function. 4237 if (info()->scope()->arguments() != NULL) { 4238 // Parameters will be allocated to context slots. We have no 4239 // direct way to detect that the variable is a parameter so we do 4240 // a linear search of the parameter variables. 4241 int count = info()->scope()->num_parameters(); 4242 for (int i = 0; i < count; ++i) { 4243 if (var == info()->scope()->parameter(i)) { 4244 Bailout( 4245 "assignment to parameter, function uses arguments object"); 4246 } 4247 } 4248 } 4249 4250 HStoreContextSlot::Mode mode; 4251 4252 switch (var->mode()) { 4253 case LET: 4254 mode = HStoreContextSlot::kCheckDeoptimize; 4255 break; 4256 case CONST: 4257 return ast_context()->ReturnValue(Pop()); 4258 case CONST_HARMONY: 4259 // This case is checked statically so no need to 4260 // perform checks here 4261 UNREACHABLE(); 4262 default: 4263 mode = HStoreContextSlot::kNoCheck; 4264 } 4265 4266 HValue* context = BuildContextChainWalk(var); 4267 HStoreContextSlot* instr = 4268 new(zone()) HStoreContextSlot(context, var->index(), mode, Top()); 4269 AddInstruction(instr); 4270 if (instr->HasObservableSideEffects()) { 4271 AddSimulate(expr->AssignmentId()); 4272 } 4273 break; 4274 } 4275 4276 case Variable::LOOKUP: 4277 return Bailout("compound assignment to lookup slot"); 4278 } 4279 return ast_context()->ReturnValue(Pop()); 4280 4281 } else if (prop != NULL) { 4282 prop->RecordTypeFeedback(oracle()); 4283 4284 if (prop->key()->IsPropertyName()) { 4285 // Named property. 4286 CHECK_ALIVE(VisitForValue(prop->obj())); 4287 HValue* obj = Top(); 4288 4289 HInstruction* load = NULL; 4290 if (prop->IsMonomorphic()) { 4291 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName(); 4292 Handle<Map> map = prop->GetReceiverTypes()->first(); 4293 load = BuildLoadNamed(obj, prop, map, name); 4294 } else { 4295 load = BuildLoadNamedGeneric(obj, prop); 4296 } 4297 PushAndAdd(load); 4298 if (load->HasObservableSideEffects()) AddSimulate(expr->CompoundLoadId()); 4299 4300 CHECK_ALIVE(VisitForValue(expr->value())); 4301 HValue* right = Pop(); 4302 HValue* left = Pop(); 4303 4304 HInstruction* instr = BuildBinaryOperation(operation, left, right); 4305 PushAndAdd(instr); 4306 if (instr->HasObservableSideEffects()) AddSimulate(operation->id()); 4307 4308 HInstruction* store = BuildStoreNamed(obj, instr, prop); 4309 AddInstruction(store); 4310 // Drop the simulated receiver and value. Return the value. 4311 Drop(2); 4312 Push(instr); 4313 if (store->HasObservableSideEffects()) AddSimulate(expr->AssignmentId()); 4314 return ast_context()->ReturnValue(Pop()); 4315 4316 } else { 4317 // Keyed property. 4318 CHECK_ALIVE(VisitForValue(prop->obj())); 4319 CHECK_ALIVE(VisitForValue(prop->key())); 4320 HValue* obj = environment()->ExpressionStackAt(1); 4321 HValue* key = environment()->ExpressionStackAt(0); 4322 4323 bool has_side_effects = false; 4324 HValue* load = HandleKeyedElementAccess( 4325 obj, key, NULL, prop, expr->CompoundLoadId(), RelocInfo::kNoPosition, 4326 false, // is_store 4327 &has_side_effects); 4328 Push(load); 4329 if (has_side_effects) AddSimulate(expr->CompoundLoadId()); 4330 4331 4332 CHECK_ALIVE(VisitForValue(expr->value())); 4333 HValue* right = Pop(); 4334 HValue* left = Pop(); 4335 4336 HInstruction* instr = BuildBinaryOperation(operation, left, right); 4337 PushAndAdd(instr); 4338 if (instr->HasObservableSideEffects()) AddSimulate(operation->id()); 4339 4340 expr->RecordTypeFeedback(oracle()); 4341 HandleKeyedElementAccess(obj, key, instr, expr, expr->AssignmentId(), 4342 RelocInfo::kNoPosition, 4343 true, // is_store 4344 &has_side_effects); 4345 4346 // Drop the simulated receiver, key, and value. Return the value. 4347 Drop(3); 4348 Push(instr); 4349 ASSERT(has_side_effects); // Stores always have side effects. 4350 AddSimulate(expr->AssignmentId()); 4351 return ast_context()->ReturnValue(Pop()); 4352 } 4353 4354 } else { 4355 return Bailout("invalid lhs in compound assignment"); 4356 } 4357} 4358 4359 4360void HGraphBuilder::VisitAssignment(Assignment* expr) { 4361 ASSERT(!HasStackOverflow()); 4362 ASSERT(current_block() != NULL); 4363 ASSERT(current_block()->HasPredecessor()); 4364 VariableProxy* proxy = expr->target()->AsVariableProxy(); 4365 Property* prop = expr->target()->AsProperty(); 4366 ASSERT(proxy == NULL || prop == NULL); 4367 4368 if (expr->is_compound()) { 4369 HandleCompoundAssignment(expr); 4370 return; 4371 } 4372 4373 if (prop != NULL) { 4374 HandlePropertyAssignment(expr); 4375 } else if (proxy != NULL) { 4376 Variable* var = proxy->var(); 4377 4378 if (var->mode() == CONST) { 4379 if (expr->op() != Token::INIT_CONST) { 4380 CHECK_ALIVE(VisitForValue(expr->value())); 4381 return ast_context()->ReturnValue(Pop()); 4382 } 4383 4384 if (var->IsStackAllocated()) { 4385 // We insert a use of the old value to detect unsupported uses of const 4386 // variables (e.g. initialization inside a loop). 4387 HValue* old_value = environment()->Lookup(var); 4388 AddInstruction(new HUseConst(old_value)); 4389 } 4390 } else if (var->mode() == CONST_HARMONY) { 4391 if (expr->op() != Token::INIT_CONST_HARMONY) { 4392 return Bailout("non-initializer assignment to const"); 4393 } 4394 } 4395 4396 if (proxy->IsArguments()) return Bailout("assignment to arguments"); 4397 4398 // Handle the assignment. 4399 switch (var->location()) { 4400 case Variable::UNALLOCATED: 4401 CHECK_ALIVE(VisitForValue(expr->value())); 4402 HandleGlobalVariableAssignment(var, 4403 Top(), 4404 expr->position(), 4405 expr->AssignmentId()); 4406 return ast_context()->ReturnValue(Pop()); 4407 4408 case Variable::PARAMETER: 4409 case Variable::LOCAL: { 4410 // Perform an initialization check for let declared variables 4411 // or parameters. 4412 if (var->mode() == LET && expr->op() == Token::ASSIGN) { 4413 HValue* env_value = environment()->Lookup(var); 4414 if (env_value == graph()->GetConstantHole()) { 4415 return Bailout("assignment to let variable before initialization"); 4416 } 4417 } 4418 // We do not allow the arguments object to occur in a context where it 4419 // may escape, but assignments to stack-allocated locals are 4420 // permitted. 4421 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED)); 4422 HValue* value = Pop(); 4423 Bind(var, value); 4424 return ast_context()->ReturnValue(value); 4425 } 4426 4427 case Variable::CONTEXT: { 4428 // Bail out if we try to mutate a parameter value in a function using 4429 // the arguments object. We do not (yet) correctly handle the 4430 // arguments property of the function. 4431 if (info()->scope()->arguments() != NULL) { 4432 // Parameters will rewrite to context slots. We have no direct way 4433 // to detect that the variable is a parameter. 4434 int count = info()->scope()->num_parameters(); 4435 for (int i = 0; i < count; ++i) { 4436 if (var == info()->scope()->parameter(i)) { 4437 return Bailout("assignment to parameter in arguments object"); 4438 } 4439 } 4440 } 4441 4442 CHECK_ALIVE(VisitForValue(expr->value())); 4443 HStoreContextSlot::Mode mode; 4444 if (expr->op() == Token::ASSIGN) { 4445 switch (var->mode()) { 4446 case LET: 4447 mode = HStoreContextSlot::kCheckDeoptimize; 4448 break; 4449 case CONST: 4450 return ast_context()->ReturnValue(Pop()); 4451 case CONST_HARMONY: 4452 // This case is checked statically so no need to 4453 // perform checks here 4454 UNREACHABLE(); 4455 default: 4456 mode = HStoreContextSlot::kNoCheck; 4457 } 4458 } else if (expr->op() == Token::INIT_VAR || 4459 expr->op() == Token::INIT_LET || 4460 expr->op() == Token::INIT_CONST_HARMONY) { 4461 mode = HStoreContextSlot::kNoCheck; 4462 } else { 4463 ASSERT(expr->op() == Token::INIT_CONST); 4464 4465 mode = HStoreContextSlot::kCheckIgnoreAssignment; 4466 } 4467 4468 HValue* context = BuildContextChainWalk(var); 4469 HStoreContextSlot* instr = new(zone()) HStoreContextSlot( 4470 context, var->index(), mode, Top()); 4471 AddInstruction(instr); 4472 if (instr->HasObservableSideEffects()) { 4473 AddSimulate(expr->AssignmentId()); 4474 } 4475 return ast_context()->ReturnValue(Pop()); 4476 } 4477 4478 case Variable::LOOKUP: 4479 return Bailout("assignment to LOOKUP variable"); 4480 } 4481 } else { 4482 return Bailout("invalid left-hand side in assignment"); 4483 } 4484} 4485 4486 4487void HGraphBuilder::VisitThrow(Throw* expr) { 4488 ASSERT(!HasStackOverflow()); 4489 ASSERT(current_block() != NULL); 4490 ASSERT(current_block()->HasPredecessor()); 4491 // We don't optimize functions with invalid left-hand sides in 4492 // assignments, count operations, or for-in. Consequently throw can 4493 // currently only occur in an effect context. 4494 ASSERT(ast_context()->IsEffect()); 4495 CHECK_ALIVE(VisitForValue(expr->exception())); 4496 4497 HValue* context = environment()->LookupContext(); 4498 HValue* value = environment()->Pop(); 4499 HThrow* instr = new(zone()) HThrow(context, value); 4500 instr->set_position(expr->position()); 4501 AddInstruction(instr); 4502 AddSimulate(expr->id()); 4503 current_block()->FinishExit(new(zone()) HAbnormalExit); 4504 set_current_block(NULL); 4505} 4506 4507 4508HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object, 4509 Property* expr, 4510 Handle<Map> type, 4511 LookupResult* lookup, 4512 bool smi_and_map_check) { 4513 if (smi_and_map_check) { 4514 AddInstruction(new(zone()) HCheckNonSmi(object)); 4515 AddInstruction(new(zone()) HCheckMap(object, type, NULL, 4516 ALLOW_ELEMENT_TRANSITION_MAPS)); 4517 } 4518 4519 int index = lookup->GetLocalFieldIndexFromMap(*type); 4520 if (index < 0) { 4521 // Negative property indices are in-object properties, indexed 4522 // from the end of the fixed part of the object. 4523 int offset = (index * kPointerSize) + type->instance_size(); 4524 return new(zone()) HLoadNamedField(object, true, offset); 4525 } else { 4526 // Non-negative property indices are in the properties array. 4527 int offset = (index * kPointerSize) + FixedArray::kHeaderSize; 4528 return new(zone()) HLoadNamedField(object, false, offset); 4529 } 4530} 4531 4532 4533HInstruction* HGraphBuilder::BuildLoadNamedGeneric(HValue* obj, 4534 Property* expr) { 4535 if (expr->IsUninitialized() && !FLAG_always_opt) { 4536 AddInstruction(new(zone()) HSoftDeoptimize); 4537 current_block()->MarkAsDeoptimizing(); 4538 } 4539 ASSERT(expr->key()->IsPropertyName()); 4540 Handle<Object> name = expr->key()->AsLiteral()->handle(); 4541 HValue* context = environment()->LookupContext(); 4542 return new(zone()) HLoadNamedGeneric(context, obj, name); 4543} 4544 4545 4546HInstruction* HGraphBuilder::BuildLoadNamed(HValue* obj, 4547 Property* expr, 4548 Handle<Map> map, 4549 Handle<String> name) { 4550 LookupResult lookup(isolate()); 4551 map->LookupInDescriptors(NULL, *name, &lookup); 4552 if (lookup.IsFound() && lookup.type() == FIELD) { 4553 return BuildLoadNamedField(obj, 4554 expr, 4555 map, 4556 &lookup, 4557 true); 4558 } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) { 4559 AddInstruction(new(zone()) HCheckNonSmi(obj)); 4560 AddInstruction(new(zone()) HCheckMap(obj, map, NULL, 4561 ALLOW_ELEMENT_TRANSITION_MAPS)); 4562 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*map)); 4563 return new(zone()) HConstant(function, Representation::Tagged()); 4564 } else { 4565 return BuildLoadNamedGeneric(obj, expr); 4566 } 4567} 4568 4569 4570HInstruction* HGraphBuilder::BuildLoadKeyedGeneric(HValue* object, 4571 HValue* key) { 4572 HValue* context = environment()->LookupContext(); 4573 return new(zone()) HLoadKeyedGeneric(context, object, key); 4574} 4575 4576 4577HInstruction* HGraphBuilder::BuildExternalArrayElementAccess( 4578 HValue* external_elements, 4579 HValue* checked_key, 4580 HValue* val, 4581 ElementsKind elements_kind, 4582 bool is_store) { 4583 if (is_store) { 4584 ASSERT(val != NULL); 4585 switch (elements_kind) { 4586 case EXTERNAL_PIXEL_ELEMENTS: { 4587 val = AddInstruction(new(zone()) HClampToUint8(val)); 4588 break; 4589 } 4590 case EXTERNAL_BYTE_ELEMENTS: 4591 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 4592 case EXTERNAL_SHORT_ELEMENTS: 4593 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 4594 case EXTERNAL_INT_ELEMENTS: 4595 case EXTERNAL_UNSIGNED_INT_ELEMENTS: { 4596 if (!val->representation().IsInteger32()) { 4597 val = AddInstruction(new(zone()) HChange( 4598 val, 4599 Representation::Integer32(), 4600 true, // Truncate to int32. 4601 false)); // Don't deoptimize undefined (irrelevant here). 4602 } 4603 break; 4604 } 4605 case EXTERNAL_FLOAT_ELEMENTS: 4606 case EXTERNAL_DOUBLE_ELEMENTS: 4607 break; 4608 case FAST_SMI_ONLY_ELEMENTS: 4609 case FAST_ELEMENTS: 4610 case FAST_DOUBLE_ELEMENTS: 4611 case DICTIONARY_ELEMENTS: 4612 case NON_STRICT_ARGUMENTS_ELEMENTS: 4613 UNREACHABLE(); 4614 break; 4615 } 4616 return new(zone()) HStoreKeyedSpecializedArrayElement( 4617 external_elements, checked_key, val, elements_kind); 4618 } else { 4619 ASSERT(val == NULL); 4620 return new(zone()) HLoadKeyedSpecializedArrayElement( 4621 external_elements, checked_key, elements_kind); 4622 } 4623} 4624 4625 4626HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements, 4627 HValue* checked_key, 4628 HValue* val, 4629 ElementsKind elements_kind, 4630 bool is_store) { 4631 if (is_store) { 4632 ASSERT(val != NULL); 4633 switch (elements_kind) { 4634 case FAST_DOUBLE_ELEMENTS: 4635 return new(zone()) HStoreKeyedFastDoubleElement( 4636 elements, checked_key, val); 4637 case FAST_SMI_ONLY_ELEMENTS: 4638 // Smi-only arrays need a smi check. 4639 AddInstruction(new(zone()) HCheckSmi(val)); 4640 // Fall through. 4641 case FAST_ELEMENTS: 4642 return new(zone()) HStoreKeyedFastElement( 4643 elements, checked_key, val, elements_kind); 4644 default: 4645 UNREACHABLE(); 4646 return NULL; 4647 } 4648 } 4649 // It's an element load (!is_store). 4650 if (elements_kind == FAST_DOUBLE_ELEMENTS) { 4651 return new(zone()) HLoadKeyedFastDoubleElement(elements, checked_key); 4652 } else { // FAST_ELEMENTS or FAST_SMI_ONLY_ELEMENTS. 4653 return new(zone()) HLoadKeyedFastElement(elements, checked_key); 4654 } 4655} 4656 4657 4658HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object, 4659 HValue* key, 4660 HValue* val, 4661 Handle<Map> map, 4662 bool is_store) { 4663 HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMap(object, map)); 4664 bool fast_smi_only_elements = map->has_fast_smi_only_elements(); 4665 bool fast_elements = map->has_fast_elements(); 4666 HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object)); 4667 if (is_store && (fast_elements || fast_smi_only_elements)) { 4668 AddInstruction(new(zone()) HCheckMap( 4669 elements, isolate()->factory()->fixed_array_map())); 4670 } 4671 HInstruction* length = NULL; 4672 HInstruction* checked_key = NULL; 4673 if (map->has_external_array_elements()) { 4674 length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements)); 4675 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); 4676 HLoadExternalArrayPointer* external_elements = 4677 new(zone()) HLoadExternalArrayPointer(elements); 4678 AddInstruction(external_elements); 4679 return BuildExternalArrayElementAccess(external_elements, checked_key, 4680 val, map->elements_kind(), is_store); 4681 } 4682 ASSERT(fast_smi_only_elements || 4683 fast_elements || 4684 map->has_fast_double_elements()); 4685 if (map->instance_type() == JS_ARRAY_TYPE) { 4686 length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck)); 4687 } else { 4688 length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements)); 4689 } 4690 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); 4691 return BuildFastElementAccess(elements, checked_key, val, 4692 map->elements_kind(), is_store); 4693} 4694 4695 4696HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object, 4697 HValue* key, 4698 HValue* val, 4699 Expression* prop, 4700 int ast_id, 4701 int position, 4702 bool is_store, 4703 bool* has_side_effects) { 4704 *has_side_effects = false; 4705 AddInstruction(new(zone()) HCheckNonSmi(object)); 4706 SmallMapList* maps = prop->GetReceiverTypes(); 4707 bool todo_external_array = false; 4708 4709 static const int kNumElementTypes = kElementsKindCount; 4710 bool type_todo[kNumElementTypes]; 4711 for (int i = 0; i < kNumElementTypes; ++i) { 4712 type_todo[i] = false; 4713 } 4714 4715 // Elements_kind transition support. 4716 MapHandleList transition_target(maps->length()); 4717 // Collect possible transition targets. 4718 MapHandleList possible_transitioned_maps(maps->length()); 4719 for (int i = 0; i < maps->length(); ++i) { 4720 Handle<Map> map = maps->at(i); 4721 ElementsKind elements_kind = map->elements_kind(); 4722 if (elements_kind == FAST_DOUBLE_ELEMENTS || 4723 elements_kind == FAST_ELEMENTS) { 4724 possible_transitioned_maps.Add(map); 4725 } 4726 } 4727 // Get transition target for each map (NULL == no transition). 4728 for (int i = 0; i < maps->length(); ++i) { 4729 Handle<Map> map = maps->at(i); 4730 Handle<Map> transitioned_map = 4731 map->FindTransitionedMap(&possible_transitioned_maps); 4732 transition_target.Add(transitioned_map); 4733 } 4734 4735 int num_untransitionable_maps = 0; 4736 Handle<Map> untransitionable_map; 4737 for (int i = 0; i < maps->length(); ++i) { 4738 Handle<Map> map = maps->at(i); 4739 ASSERT(map->IsMap()); 4740 if (!transition_target.at(i).is_null()) { 4741 AddInstruction(new(zone()) HTransitionElementsKind( 4742 object, map, transition_target.at(i))); 4743 } else { 4744 type_todo[map->elements_kind()] = true; 4745 if (map->elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND) { 4746 todo_external_array = true; 4747 } 4748 num_untransitionable_maps++; 4749 untransitionable_map = map; 4750 } 4751 } 4752 4753 // If only one map is left after transitioning, handle this case 4754 // monomorphically. 4755 if (num_untransitionable_maps == 1) { 4756 HInstruction* instr = NULL; 4757 if (untransitionable_map->has_slow_elements_kind()) { 4758 instr = AddInstruction(is_store ? BuildStoreKeyedGeneric(object, key, val) 4759 : BuildLoadKeyedGeneric(object, key)); 4760 } else { 4761 instr = AddInstruction(BuildMonomorphicElementAccess( 4762 object, key, val, untransitionable_map, is_store)); 4763 } 4764 *has_side_effects |= instr->HasObservableSideEffects(); 4765 instr->set_position(position); 4766 return is_store ? NULL : instr; 4767 } 4768 4769 AddInstruction(HCheckInstanceType::NewIsSpecObject(object)); 4770 HBasicBlock* join = graph()->CreateBasicBlock(); 4771 4772 HInstruction* elements_kind_instr = 4773 AddInstruction(new(zone()) HElementsKind(object)); 4774 HCompareConstantEqAndBranch* elements_kind_branch = NULL; 4775 HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object)); 4776 HLoadExternalArrayPointer* external_elements = NULL; 4777 HInstruction* checked_key = NULL; 4778 4779 // Generated code assumes that FAST_SMI_ONLY_ELEMENTS, FAST_ELEMENTS, 4780 // FAST_DOUBLE_ELEMENTS and DICTIONARY_ELEMENTS are handled before external 4781 // arrays. 4782 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND); 4783 STATIC_ASSERT(FAST_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND); 4784 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND); 4785 STATIC_ASSERT(DICTIONARY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND); 4786 4787 for (ElementsKind elements_kind = FIRST_ELEMENTS_KIND; 4788 elements_kind <= LAST_ELEMENTS_KIND; 4789 elements_kind = ElementsKind(elements_kind + 1)) { 4790 // After having handled FAST_ELEMENTS, FAST_SMI_ONLY_ELEMENTS, 4791 // FAST_DOUBLE_ELEMENTS and DICTIONARY_ELEMENTS, we need to add some code 4792 // that's executed for all external array cases. 4793 STATIC_ASSERT(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND == 4794 LAST_ELEMENTS_KIND); 4795 if (elements_kind == FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND 4796 && todo_external_array) { 4797 HInstruction* length = 4798 AddInstruction(new(zone()) HFixedArrayBaseLength(elements)); 4799 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); 4800 external_elements = new(zone()) HLoadExternalArrayPointer(elements); 4801 AddInstruction(external_elements); 4802 } 4803 if (type_todo[elements_kind]) { 4804 HBasicBlock* if_true = graph()->CreateBasicBlock(); 4805 HBasicBlock* if_false = graph()->CreateBasicBlock(); 4806 elements_kind_branch = new(zone()) HCompareConstantEqAndBranch( 4807 elements_kind_instr, elements_kind, Token::EQ_STRICT); 4808 elements_kind_branch->SetSuccessorAt(0, if_true); 4809 elements_kind_branch->SetSuccessorAt(1, if_false); 4810 current_block()->Finish(elements_kind_branch); 4811 4812 set_current_block(if_true); 4813 HInstruction* access; 4814 if (elements_kind == FAST_SMI_ONLY_ELEMENTS || 4815 elements_kind == FAST_ELEMENTS || 4816 elements_kind == FAST_DOUBLE_ELEMENTS) { 4817 if (is_store && elements_kind != FAST_DOUBLE_ELEMENTS) { 4818 AddInstruction(new(zone()) HCheckMap( 4819 elements, isolate()->factory()->fixed_array_map(), 4820 elements_kind_branch)); 4821 } 4822 // TODO(jkummerow): The need for these two blocks could be avoided 4823 // in one of two ways: 4824 // (1) Introduce ElementsKinds for JSArrays that are distinct from 4825 // those for fast objects. 4826 // (2) Put the common instructions into a third "join" block. This 4827 // requires additional AST IDs that we can deopt to from inside 4828 // that join block. They must be added to the Property class (when 4829 // it's a keyed property) and registered in the full codegen. 4830 HBasicBlock* if_jsarray = graph()->CreateBasicBlock(); 4831 HBasicBlock* if_fastobject = graph()->CreateBasicBlock(); 4832 HHasInstanceTypeAndBranch* typecheck = 4833 new(zone()) HHasInstanceTypeAndBranch(object, JS_ARRAY_TYPE); 4834 typecheck->SetSuccessorAt(0, if_jsarray); 4835 typecheck->SetSuccessorAt(1, if_fastobject); 4836 current_block()->Finish(typecheck); 4837 4838 set_current_block(if_jsarray); 4839 HInstruction* length; 4840 length = AddInstruction(new(zone()) HJSArrayLength(object, typecheck)); 4841 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); 4842 access = AddInstruction(BuildFastElementAccess( 4843 elements, checked_key, val, elements_kind, is_store)); 4844 if (!is_store) { 4845 Push(access); 4846 } 4847 4848 *has_side_effects |= access->HasObservableSideEffects(); 4849 if (position != -1) { 4850 access->set_position(position); 4851 } 4852 if_jsarray->Goto(join); 4853 4854 set_current_block(if_fastobject); 4855 length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements)); 4856 checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); 4857 access = AddInstruction(BuildFastElementAccess( 4858 elements, checked_key, val, elements_kind, is_store)); 4859 } else if (elements_kind == DICTIONARY_ELEMENTS) { 4860 if (is_store) { 4861 access = AddInstruction(BuildStoreKeyedGeneric(object, key, val)); 4862 } else { 4863 access = AddInstruction(BuildLoadKeyedGeneric(object, key)); 4864 } 4865 } else { // External array elements. 4866 access = AddInstruction(BuildExternalArrayElementAccess( 4867 external_elements, checked_key, val, elements_kind, is_store)); 4868 } 4869 *has_side_effects |= access->HasObservableSideEffects(); 4870 access->set_position(position); 4871 if (!is_store) { 4872 Push(access); 4873 } 4874 current_block()->Goto(join); 4875 set_current_block(if_false); 4876 } 4877 } 4878 4879 // Deopt if none of the cases matched. 4880 current_block()->FinishExitWithDeoptimization(HDeoptimize::kNoUses); 4881 join->SetJoinId(ast_id); 4882 set_current_block(join); 4883 return is_store ? NULL : Pop(); 4884} 4885 4886 4887HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj, 4888 HValue* key, 4889 HValue* val, 4890 Expression* expr, 4891 int ast_id, 4892 int position, 4893 bool is_store, 4894 bool* has_side_effects) { 4895 ASSERT(!expr->IsPropertyName()); 4896 HInstruction* instr = NULL; 4897 if (expr->IsMonomorphic()) { 4898 Handle<Map> map = expr->GetMonomorphicReceiverType(); 4899 if (map->has_slow_elements_kind()) { 4900 instr = is_store ? BuildStoreKeyedGeneric(obj, key, val) 4901 : BuildLoadKeyedGeneric(obj, key); 4902 } else { 4903 AddInstruction(new(zone()) HCheckNonSmi(obj)); 4904 instr = BuildMonomorphicElementAccess(obj, key, val, map, is_store); 4905 } 4906 } else if (expr->GetReceiverTypes() != NULL && 4907 !expr->GetReceiverTypes()->is_empty()) { 4908 return HandlePolymorphicElementAccess( 4909 obj, key, val, expr, ast_id, position, is_store, has_side_effects); 4910 } else { 4911 if (is_store) { 4912 instr = BuildStoreKeyedGeneric(obj, key, val); 4913 } else { 4914 instr = BuildLoadKeyedGeneric(obj, key); 4915 } 4916 } 4917 instr->set_position(position); 4918 AddInstruction(instr); 4919 *has_side_effects = instr->HasObservableSideEffects(); 4920 return instr; 4921} 4922 4923 4924HInstruction* HGraphBuilder::BuildStoreKeyedGeneric(HValue* object, 4925 HValue* key, 4926 HValue* value) { 4927 HValue* context = environment()->LookupContext(); 4928 return new(zone()) HStoreKeyedGeneric( 4929 context, 4930 object, 4931 key, 4932 value, 4933 function_strict_mode_flag()); 4934} 4935 4936bool HGraphBuilder::TryArgumentsAccess(Property* expr) { 4937 VariableProxy* proxy = expr->obj()->AsVariableProxy(); 4938 if (proxy == NULL) return false; 4939 if (!proxy->var()->IsStackAllocated()) return false; 4940 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) { 4941 return false; 4942 } 4943 4944 // Our implementation of arguments (based on this stack frame or an 4945 // adapter below it) does not work for inlined functions. 4946 if (function_state()->outer() != NULL) { 4947 Bailout("arguments access in inlined function"); 4948 return true; 4949 } 4950 4951 HInstruction* result = NULL; 4952 if (expr->key()->IsPropertyName()) { 4953 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName(); 4954 if (!name->IsEqualTo(CStrVector("length"))) return false; 4955 HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements); 4956 result = new(zone()) HArgumentsLength(elements); 4957 } else { 4958 Push(graph()->GetArgumentsObject()); 4959 VisitForValue(expr->key()); 4960 if (HasStackOverflow() || current_block() == NULL) return true; 4961 HValue* key = Pop(); 4962 Drop(1); // Arguments object. 4963 HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements); 4964 HInstruction* length = AddInstruction( 4965 new(zone()) HArgumentsLength(elements)); 4966 HInstruction* checked_key = 4967 AddInstruction(new(zone()) HBoundsCheck(key, length)); 4968 result = new(zone()) HAccessArgumentsAt(elements, length, checked_key); 4969 } 4970 ast_context()->ReturnInstruction(result, expr->id()); 4971 return true; 4972} 4973 4974 4975void HGraphBuilder::VisitProperty(Property* expr) { 4976 ASSERT(!HasStackOverflow()); 4977 ASSERT(current_block() != NULL); 4978 ASSERT(current_block()->HasPredecessor()); 4979 expr->RecordTypeFeedback(oracle()); 4980 4981 if (TryArgumentsAccess(expr)) return; 4982 4983 CHECK_ALIVE(VisitForValue(expr->obj())); 4984 4985 HInstruction* instr = NULL; 4986 if (expr->AsProperty()->IsArrayLength()) { 4987 HValue* array = Pop(); 4988 AddInstruction(new(zone()) HCheckNonSmi(array)); 4989 HInstruction* mapcheck = 4990 AddInstruction(HCheckInstanceType::NewIsJSArray(array)); 4991 instr = new(zone()) HJSArrayLength(array, mapcheck); 4992 4993 } else if (expr->IsStringLength()) { 4994 HValue* string = Pop(); 4995 AddInstruction(new(zone()) HCheckNonSmi(string)); 4996 AddInstruction(HCheckInstanceType::NewIsString(string)); 4997 instr = new(zone()) HStringLength(string); 4998 } else if (expr->IsStringAccess()) { 4999 CHECK_ALIVE(VisitForValue(expr->key())); 5000 HValue* index = Pop(); 5001 HValue* string = Pop(); 5002 HValue* context = environment()->LookupContext(); 5003 HStringCharCodeAt* char_code = 5004 BuildStringCharCodeAt(context, string, index); 5005 AddInstruction(char_code); 5006 instr = new(zone()) HStringCharFromCode(context, char_code); 5007 5008 } else if (expr->IsFunctionPrototype()) { 5009 HValue* function = Pop(); 5010 AddInstruction(new(zone()) HCheckNonSmi(function)); 5011 instr = new(zone()) HLoadFunctionPrototype(function); 5012 5013 } else if (expr->key()->IsPropertyName()) { 5014 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName(); 5015 SmallMapList* types = expr->GetReceiverTypes(); 5016 5017 HValue* obj = Pop(); 5018 if (expr->IsMonomorphic()) { 5019 instr = BuildLoadNamed(obj, expr, types->first(), name); 5020 } else if (types != NULL && types->length() > 1) { 5021 AddInstruction(new(zone()) HCheckNonSmi(obj)); 5022 HValue* context = environment()->LookupContext(); 5023 instr = new(zone()) HLoadNamedFieldPolymorphic(context, obj, types, name); 5024 } else { 5025 instr = BuildLoadNamedGeneric(obj, expr); 5026 } 5027 5028 } else { 5029 CHECK_ALIVE(VisitForValue(expr->key())); 5030 5031 HValue* key = Pop(); 5032 HValue* obj = Pop(); 5033 5034 bool has_side_effects = false; 5035 HValue* load = HandleKeyedElementAccess( 5036 obj, key, NULL, expr, expr->id(), expr->position(), 5037 false, // is_store 5038 &has_side_effects); 5039 if (has_side_effects) { 5040 if (ast_context()->IsEffect()) { 5041 AddSimulate(expr->id()); 5042 } else { 5043 Push(load); 5044 AddSimulate(expr->id()); 5045 Drop(1); 5046 } 5047 } 5048 return ast_context()->ReturnValue(load); 5049 } 5050 instr->set_position(expr->position()); 5051 return ast_context()->ReturnInstruction(instr, expr->id()); 5052} 5053 5054 5055void HGraphBuilder::AddCheckConstantFunction(Call* expr, 5056 HValue* receiver, 5057 Handle<Map> receiver_map, 5058 bool smi_and_map_check) { 5059 // Constant functions have the nice property that the map will change if they 5060 // are overwritten. Therefore it is enough to check the map of the holder and 5061 // its prototypes. 5062 if (smi_and_map_check) { 5063 AddInstruction(new(zone()) HCheckNonSmi(receiver)); 5064 AddInstruction(new(zone()) HCheckMap(receiver, receiver_map, NULL, 5065 ALLOW_ELEMENT_TRANSITION_MAPS)); 5066 } 5067 if (!expr->holder().is_null()) { 5068 AddInstruction(new(zone()) HCheckPrototypeMaps( 5069 Handle<JSObject>(JSObject::cast(receiver_map->prototype())), 5070 expr->holder())); 5071 } 5072} 5073 5074 5075void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr, 5076 HValue* receiver, 5077 SmallMapList* types, 5078 Handle<String> name) { 5079 // TODO(ager): We should recognize when the prototype chains for different 5080 // maps are identical. In that case we can avoid repeatedly generating the 5081 // same prototype map checks. 5082 int argument_count = expr->arguments()->length() + 1; // Includes receiver. 5083 int count = 0; 5084 HBasicBlock* join = NULL; 5085 for (int i = 0; i < types->length() && count < kMaxCallPolymorphism; ++i) { 5086 Handle<Map> map = types->at(i); 5087 if (expr->ComputeTarget(map, name)) { 5088 if (count == 0) { 5089 // Only needed once. 5090 AddInstruction(new(zone()) HCheckNonSmi(receiver)); 5091 join = graph()->CreateBasicBlock(); 5092 } 5093 ++count; 5094 HBasicBlock* if_true = graph()->CreateBasicBlock(); 5095 HBasicBlock* if_false = graph()->CreateBasicBlock(); 5096 HCompareMap* compare = 5097 new(zone()) HCompareMap(receiver, map, if_true, if_false); 5098 current_block()->Finish(compare); 5099 5100 set_current_block(if_true); 5101 AddCheckConstantFunction(expr, receiver, map, false); 5102 if (FLAG_trace_inlining && FLAG_polymorphic_inlining) { 5103 PrintF("Trying to inline the polymorphic call to %s\n", 5104 *name->ToCString()); 5105 } 5106 if (FLAG_polymorphic_inlining && TryInlineCall(expr)) { 5107 // Trying to inline will signal that we should bailout from the 5108 // entire compilation by setting stack overflow on the visitor. 5109 if (HasStackOverflow()) return; 5110 } else { 5111 HCallConstantFunction* call = 5112 new(zone()) HCallConstantFunction(expr->target(), argument_count); 5113 call->set_position(expr->position()); 5114 PreProcessCall(call); 5115 AddInstruction(call); 5116 if (!ast_context()->IsEffect()) Push(call); 5117 } 5118 5119 if (current_block() != NULL) current_block()->Goto(join); 5120 set_current_block(if_false); 5121 } 5122 } 5123 5124 // Finish up. Unconditionally deoptimize if we've handled all the maps we 5125 // know about and do not want to handle ones we've never seen. Otherwise 5126 // use a generic IC. 5127 if (count == types->length() && FLAG_deoptimize_uncommon_cases) { 5128 current_block()->FinishExitWithDeoptimization(HDeoptimize::kNoUses); 5129 } else { 5130 HValue* context = environment()->LookupContext(); 5131 HCallNamed* call = new(zone()) HCallNamed(context, name, argument_count); 5132 call->set_position(expr->position()); 5133 PreProcessCall(call); 5134 5135 if (join != NULL) { 5136 AddInstruction(call); 5137 if (!ast_context()->IsEffect()) Push(call); 5138 current_block()->Goto(join); 5139 } else { 5140 return ast_context()->ReturnInstruction(call, expr->id()); 5141 } 5142 } 5143 5144 // We assume that control flow is always live after an expression. So 5145 // even without predecessors to the join block, we set it as the exit 5146 // block and continue by adding instructions there. 5147 ASSERT(join != NULL); 5148 if (join->HasPredecessor()) { 5149 set_current_block(join); 5150 join->SetJoinId(expr->id()); 5151 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop()); 5152 } else { 5153 set_current_block(NULL); 5154 } 5155} 5156 5157 5158void HGraphBuilder::TraceInline(Handle<JSFunction> target, 5159 Handle<JSFunction> caller, 5160 const char* reason) { 5161 if (FLAG_trace_inlining) { 5162 SmartArrayPointer<char> target_name = 5163 target->shared()->DebugName()->ToCString(); 5164 SmartArrayPointer<char> caller_name = 5165 caller->shared()->DebugName()->ToCString(); 5166 if (reason == NULL) { 5167 PrintF("Inlined %s called from %s.\n", *target_name, *caller_name); 5168 } else { 5169 PrintF("Did not inline %s called from %s (%s).\n", 5170 *target_name, *caller_name, reason); 5171 } 5172 } 5173} 5174 5175 5176bool HGraphBuilder::TryInline(CallKind call_kind, 5177 Handle<JSFunction> target, 5178 ZoneList<Expression*>* arguments, 5179 HValue* receiver, 5180 int ast_id, 5181 int return_id, 5182 ReturnHandlingFlag return_handling) { 5183 if (!FLAG_use_inlining) return false; 5184 5185 // Precondition: call is monomorphic and we have found a target with the 5186 // appropriate arity. 5187 Handle<JSFunction> caller = info()->closure(); 5188 Handle<SharedFunctionInfo> target_shared(target->shared()); 5189 5190 // Do a quick check on source code length to avoid parsing large 5191 // inlining candidates. 5192 if ((FLAG_limit_inlining && target_shared->SourceSize() > kMaxSourceSize) 5193 || target_shared->SourceSize() > kUnlimitedMaxSourceSize) { 5194 TraceInline(target, caller, "target text too big"); 5195 return false; 5196 } 5197 5198 // Target must be inlineable. 5199 if (!target->IsInlineable()) { 5200 TraceInline(target, caller, "target not inlineable"); 5201 return false; 5202 } 5203 if (target_shared->dont_inline() || target_shared->dont_optimize()) { 5204 TraceInline(target, caller, "target contains unsupported syntax [early]"); 5205 return false; 5206 } 5207 5208 int nodes_added = target_shared->ast_node_count(); 5209 if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) || 5210 nodes_added > kUnlimitedMaxInlinedSize) { 5211 TraceInline(target, caller, "target AST is too large [early]"); 5212 return false; 5213 } 5214 5215#if !defined(V8_TARGET_ARCH_IA32) 5216 // Target must be able to use caller's context. 5217 CompilationInfo* outer_info = info(); 5218 if (target->context() != outer_info->closure()->context() || 5219 outer_info->scope()->contains_with() || 5220 outer_info->scope()->num_heap_slots() > 0) { 5221 TraceInline(target, caller, "target requires context change"); 5222 return false; 5223 } 5224#endif 5225 5226 5227 // Don't inline deeper than kMaxInliningLevels calls. 5228 HEnvironment* env = environment(); 5229 int current_level = 1; 5230 while (env->outer() != NULL) { 5231 if (current_level == Compiler::kMaxInliningLevels) { 5232 TraceInline(target, caller, "inline depth limit reached"); 5233 return false; 5234 } 5235 if (env->outer()->frame_type() == JS_FUNCTION) { 5236 current_level++; 5237 } 5238 env = env->outer(); 5239 } 5240 5241 // Don't inline recursive functions. 5242 for (FunctionState* state = function_state(); 5243 state != NULL; 5244 state = state->outer()) { 5245 if (state->compilation_info()->closure()->shared() == *target_shared) { 5246 TraceInline(target, caller, "target is recursive"); 5247 return false; 5248 } 5249 } 5250 5251 // We don't want to add more than a certain number of nodes from inlining. 5252 if ((FLAG_limit_inlining && inlined_count_ > kMaxInlinedNodes) || 5253 inlined_count_ > kUnlimitedMaxInlinedNodes) { 5254 TraceInline(target, caller, "cumulative AST node limit reached"); 5255 return false; 5256 } 5257 5258 // Parse and allocate variables. 5259 CompilationInfo target_info(target); 5260 if (!ParserApi::Parse(&target_info, kNoParsingFlags) || 5261 !Scope::Analyze(&target_info)) { 5262 if (target_info.isolate()->has_pending_exception()) { 5263 // Parse or scope error, never optimize this function. 5264 SetStackOverflow(); 5265 target_shared->DisableOptimization(); 5266 } 5267 TraceInline(target, caller, "parse failure"); 5268 return false; 5269 } 5270 5271 if (target_info.scope()->num_heap_slots() > 0) { 5272 TraceInline(target, caller, "target has context-allocated variables"); 5273 return false; 5274 } 5275 FunctionLiteral* function = target_info.function(); 5276 5277 // The following conditions must be checked again after re-parsing, because 5278 // earlier the information might not have been complete due to lazy parsing. 5279 nodes_added = function->ast_node_count(); 5280 if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) || 5281 nodes_added > kUnlimitedMaxInlinedSize) { 5282 TraceInline(target, caller, "target AST is too large [late]"); 5283 return false; 5284 } 5285 AstProperties::Flags* flags(function->flags()); 5286 if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) { 5287 TraceInline(target, caller, "target contains unsupported syntax [late]"); 5288 return false; 5289 } 5290 5291 // If the function uses the arguments object check that inlining of functions 5292 // with arguments object is enabled and the arguments-variable is 5293 // stack allocated. 5294 if (function->scope()->arguments() != NULL) { 5295 if (!FLAG_inline_arguments) { 5296 TraceInline(target, caller, "target uses arguments object"); 5297 return false; 5298 } 5299 5300 if (!function->scope()->arguments()->IsStackAllocated()) { 5301 TraceInline(target, 5302 caller, 5303 "target uses non-stackallocated arguments object"); 5304 return false; 5305 } 5306 } 5307 5308 // All declarations must be inlineable. 5309 ZoneList<Declaration*>* decls = target_info.scope()->declarations(); 5310 int decl_count = decls->length(); 5311 for (int i = 0; i < decl_count; ++i) { 5312 if (!decls->at(i)->IsInlineable()) { 5313 TraceInline(target, caller, "target has non-trivial declaration"); 5314 return false; 5315 } 5316 } 5317 5318 // Generate the deoptimization data for the unoptimized version of 5319 // the target function if we don't already have it. 5320 if (!target_shared->has_deoptimization_support()) { 5321 // Note that we compile here using the same AST that we will use for 5322 // generating the optimized inline code. 5323 target_info.EnableDeoptimizationSupport(); 5324 if (!FullCodeGenerator::MakeCode(&target_info)) { 5325 TraceInline(target, caller, "could not generate deoptimization info"); 5326 return false; 5327 } 5328 if (target_shared->scope_info() == ScopeInfo::Empty()) { 5329 // The scope info might not have been set if a lazily compiled 5330 // function is inlined before being called for the first time. 5331 Handle<ScopeInfo> target_scope_info = 5332 ScopeInfo::Create(target_info.scope()); 5333 target_shared->set_scope_info(*target_scope_info); 5334 } 5335 target_shared->EnableDeoptimizationSupport(*target_info.code()); 5336 Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG, 5337 &target_info, 5338 target_shared); 5339 } 5340 5341 // ---------------------------------------------------------------- 5342 // After this point, we've made a decision to inline this function (so 5343 // TryInline should always return true). 5344 5345 // Save the pending call context and type feedback oracle. Set up new ones 5346 // for the inlined function. 5347 ASSERT(target_shared->has_deoptimization_support()); 5348 TypeFeedbackOracle target_oracle( 5349 Handle<Code>(target_shared->code()), 5350 Handle<Context>(target->context()->global_context()), 5351 isolate()); 5352 // The function state is new-allocated because we need to delete it 5353 // in two different places. 5354 FunctionState* target_state = new FunctionState( 5355 this, &target_info, &target_oracle, return_handling); 5356 5357 HConstant* undefined = graph()->GetConstantUndefined(); 5358 HEnvironment* inner_env = 5359 environment()->CopyForInlining(target, 5360 arguments->length(), 5361 function, 5362 undefined, 5363 call_kind, 5364 function_state()->is_construct()); 5365#ifdef V8_TARGET_ARCH_IA32 5366 // IA32 only, overwrite the caller's context in the deoptimization 5367 // environment with the correct one. 5368 // 5369 // TODO(kmillikin): implement the same inlining on other platforms so we 5370 // can remove the unsightly ifdefs in this function. 5371 HConstant* context = new HConstant(Handle<Context>(target->context()), 5372 Representation::Tagged()); 5373 AddInstruction(context); 5374 inner_env->BindContext(context); 5375#endif 5376 AddSimulate(return_id); 5377 current_block()->UpdateEnvironment(inner_env); 5378 AddInstruction(new(zone()) HEnterInlined(target, 5379 arguments->length(), 5380 function, 5381 call_kind, 5382 function_state()->is_construct(), 5383 function->scope()->arguments())); 5384 // If the function uses arguments object create and bind one. 5385 if (function->scope()->arguments() != NULL) { 5386 ASSERT(function->scope()->arguments()->IsStackAllocated()); 5387 environment()->Bind(function->scope()->arguments(), 5388 graph()->GetArgumentsObject()); 5389 } 5390 VisitDeclarations(target_info.scope()->declarations()); 5391 VisitStatements(function->body()); 5392 if (HasStackOverflow()) { 5393 // Bail out if the inline function did, as we cannot residualize a call 5394 // instead. 5395 TraceInline(target, caller, "inline graph construction failed"); 5396 target_shared->DisableOptimization(); 5397 inline_bailout_ = true; 5398 delete target_state; 5399 return true; 5400 } 5401 5402 // Update inlined nodes count. 5403 inlined_count_ += nodes_added; 5404 5405 TraceInline(target, caller, NULL); 5406 5407 if (current_block() != NULL) { 5408 // Add default return value (i.e. undefined for normals calls or the newly 5409 // allocated receiver for construct calls) if control can fall off the 5410 // body. In a test context, undefined is false and any JSObject is true. 5411 if (call_context()->IsValue()) { 5412 ASSERT(function_return() != NULL); 5413 HValue* return_value = function_state()->is_construct() 5414 ? receiver 5415 : undefined; 5416 current_block()->AddLeaveInlined(return_value, 5417 function_return(), 5418 function_state()->drop_extra()); 5419 } else if (call_context()->IsEffect()) { 5420 ASSERT(function_return() != NULL); 5421 current_block()->Goto(function_return(), function_state()->drop_extra()); 5422 } else { 5423 ASSERT(call_context()->IsTest()); 5424 ASSERT(inlined_test_context() != NULL); 5425 HBasicBlock* target = function_state()->is_construct() 5426 ? inlined_test_context()->if_true() 5427 : inlined_test_context()->if_false(); 5428 current_block()->Goto(target, function_state()->drop_extra()); 5429 } 5430 } 5431 5432 // Fix up the function exits. 5433 if (inlined_test_context() != NULL) { 5434 HBasicBlock* if_true = inlined_test_context()->if_true(); 5435 HBasicBlock* if_false = inlined_test_context()->if_false(); 5436 5437 // Pop the return test context from the expression context stack. 5438 ASSERT(ast_context() == inlined_test_context()); 5439 ClearInlinedTestContext(); 5440 delete target_state; 5441 5442 // Forward to the real test context. 5443 if (if_true->HasPredecessor()) { 5444 if_true->SetJoinId(ast_id); 5445 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true(); 5446 if_true->Goto(true_target, function_state()->drop_extra()); 5447 } 5448 if (if_false->HasPredecessor()) { 5449 if_false->SetJoinId(ast_id); 5450 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false(); 5451 if_false->Goto(false_target, function_state()->drop_extra()); 5452 } 5453 set_current_block(NULL); 5454 return true; 5455 5456 } else if (function_return()->HasPredecessor()) { 5457 function_return()->SetJoinId(ast_id); 5458 set_current_block(function_return()); 5459 } else { 5460 set_current_block(NULL); 5461 } 5462 delete target_state; 5463 return true; 5464} 5465 5466 5467bool HGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) { 5468 // The function call we are inlining is a method call if the call 5469 // is a property call. 5470 CallKind call_kind = (expr->expression()->AsProperty() == NULL) 5471 ? CALL_AS_FUNCTION 5472 : CALL_AS_METHOD; 5473 5474 return TryInline(call_kind, 5475 expr->target(), 5476 expr->arguments(), 5477 NULL, 5478 expr->id(), 5479 expr->ReturnId(), 5480 drop_extra ? DROP_EXTRA_ON_RETURN : NORMAL_RETURN); 5481} 5482 5483 5484bool HGraphBuilder::TryInlineConstruct(CallNew* expr, HValue* receiver) { 5485 return TryInline(CALL_AS_FUNCTION, 5486 expr->target(), 5487 expr->arguments(), 5488 receiver, 5489 expr->id(), 5490 expr->ReturnId(), 5491 CONSTRUCT_CALL_RETURN); 5492} 5493 5494 5495bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) { 5496 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false; 5497 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id(); 5498 switch (id) { 5499 case kMathRound: 5500 case kMathAbs: 5501 case kMathSqrt: 5502 case kMathLog: 5503 case kMathSin: 5504 case kMathCos: 5505 case kMathTan: 5506 if (expr->arguments()->length() == 1) { 5507 HValue* argument = Pop(); 5508 HValue* context = environment()->LookupContext(); 5509 Drop(1); // Receiver. 5510 HUnaryMathOperation* op = 5511 new(zone()) HUnaryMathOperation(context, argument, id); 5512 op->set_position(expr->position()); 5513 if (drop_extra) Drop(1); // Optionally drop the function. 5514 ast_context()->ReturnInstruction(op, expr->id()); 5515 return true; 5516 } 5517 break; 5518 default: 5519 // Not supported for inlining yet. 5520 break; 5521 } 5522 return false; 5523} 5524 5525 5526bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr, 5527 HValue* receiver, 5528 Handle<Map> receiver_map, 5529 CheckType check_type) { 5530 ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null()); 5531 // Try to inline calls like Math.* as operations in the calling function. 5532 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false; 5533 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id(); 5534 int argument_count = expr->arguments()->length() + 1; // Plus receiver. 5535 switch (id) { 5536 case kStringCharCodeAt: 5537 case kStringCharAt: 5538 if (argument_count == 2 && check_type == STRING_CHECK) { 5539 HValue* index = Pop(); 5540 HValue* string = Pop(); 5541 HValue* context = environment()->LookupContext(); 5542 ASSERT(!expr->holder().is_null()); 5543 AddInstruction(new(zone()) HCheckPrototypeMaps( 5544 oracle()->GetPrototypeForPrimitiveCheck(STRING_CHECK), 5545 expr->holder())); 5546 HStringCharCodeAt* char_code = 5547 BuildStringCharCodeAt(context, string, index); 5548 if (id == kStringCharCodeAt) { 5549 ast_context()->ReturnInstruction(char_code, expr->id()); 5550 return true; 5551 } 5552 AddInstruction(char_code); 5553 HStringCharFromCode* result = 5554 new(zone()) HStringCharFromCode(context, char_code); 5555 ast_context()->ReturnInstruction(result, expr->id()); 5556 return true; 5557 } 5558 break; 5559 case kMathRound: 5560 case kMathFloor: 5561 case kMathAbs: 5562 case kMathSqrt: 5563 case kMathLog: 5564 case kMathSin: 5565 case kMathCos: 5566 case kMathTan: 5567 if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) { 5568 AddCheckConstantFunction(expr, receiver, receiver_map, true); 5569 HValue* argument = Pop(); 5570 HValue* context = environment()->LookupContext(); 5571 Drop(1); // Receiver. 5572 HUnaryMathOperation* op = 5573 new(zone()) HUnaryMathOperation(context, argument, id); 5574 op->set_position(expr->position()); 5575 ast_context()->ReturnInstruction(op, expr->id()); 5576 return true; 5577 } 5578 break; 5579 case kMathPow: 5580 if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) { 5581 AddCheckConstantFunction(expr, receiver, receiver_map, true); 5582 HValue* right = Pop(); 5583 HValue* left = Pop(); 5584 Pop(); // Pop receiver. 5585 HValue* context = environment()->LookupContext(); 5586 HInstruction* result = NULL; 5587 // Use sqrt() if exponent is 0.5 or -0.5. 5588 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) { 5589 double exponent = HConstant::cast(right)->DoubleValue(); 5590 if (exponent == 0.5) { 5591 result = 5592 new(zone()) HUnaryMathOperation(context, left, kMathPowHalf); 5593 } else if (exponent == -0.5) { 5594 HConstant* double_one = 5595 new(zone()) HConstant(Handle<Object>(Smi::FromInt(1)), 5596 Representation::Double()); 5597 AddInstruction(double_one); 5598 HUnaryMathOperation* square_root = 5599 new(zone()) HUnaryMathOperation(context, left, kMathPowHalf); 5600 AddInstruction(square_root); 5601 // MathPowHalf doesn't have side effects so there's no need for 5602 // an environment simulation here. 5603 ASSERT(!square_root->HasObservableSideEffects()); 5604 result = new(zone()) HDiv(context, double_one, square_root); 5605 } else if (exponent == 2.0) { 5606 result = new(zone()) HMul(context, left, left); 5607 } 5608 } else if (right->IsConstant() && 5609 HConstant::cast(right)->HasInteger32Value() && 5610 HConstant::cast(right)->Integer32Value() == 2) { 5611 result = new(zone()) HMul(context, left, left); 5612 } 5613 5614 if (result == NULL) { 5615 result = new(zone()) HPower(left, right); 5616 } 5617 ast_context()->ReturnInstruction(result, expr->id()); 5618 return true; 5619 } 5620 break; 5621 case kMathRandom: 5622 if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) { 5623 AddCheckConstantFunction(expr, receiver, receiver_map, true); 5624 Drop(1); // Receiver. 5625 HValue* context = environment()->LookupContext(); 5626 HGlobalObject* global_object = new(zone()) HGlobalObject(context); 5627 AddInstruction(global_object); 5628 HRandom* result = new(zone()) HRandom(global_object); 5629 ast_context()->ReturnInstruction(result, expr->id()); 5630 return true; 5631 } 5632 break; 5633 case kMathMax: 5634 case kMathMin: 5635 if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) { 5636 AddCheckConstantFunction(expr, receiver, receiver_map, true); 5637 HValue* right = Pop(); 5638 HValue* left = Pop(); 5639 Pop(); // Pop receiver. 5640 5641 HValue* left_operand = left; 5642 HValue* right_operand = right; 5643 5644 // If we do not have two integers, we convert to double for comparison. 5645 if (!left->representation().IsInteger32() || 5646 !right->representation().IsInteger32()) { 5647 if (!left->representation().IsDouble()) { 5648 HChange* left_convert = new(zone()) HChange( 5649 left, 5650 Representation::Double(), 5651 false, // Do not truncate when converting to double. 5652 true); // Deoptimize for undefined. 5653 left_convert->SetFlag(HValue::kBailoutOnMinusZero); 5654 left_operand = AddInstruction(left_convert); 5655 } 5656 if (!right->representation().IsDouble()) { 5657 HChange* right_convert = new(zone()) HChange( 5658 right, 5659 Representation::Double(), 5660 false, // Do not truncate when converting to double. 5661 true); // Deoptimize for undefined. 5662 right_convert->SetFlag(HValue::kBailoutOnMinusZero); 5663 right_operand = AddInstruction(right_convert); 5664 } 5665 } 5666 5667 ASSERT(left_operand->representation().Equals( 5668 right_operand->representation())); 5669 ASSERT(!left_operand->representation().IsTagged()); 5670 5671 Token::Value op = (id == kMathMin) ? Token::LT : Token::GT; 5672 5673 HCompareIDAndBranch* compare = 5674 new(zone()) HCompareIDAndBranch(left_operand, right_operand, op); 5675 compare->SetInputRepresentation(left_operand->representation()); 5676 5677 HBasicBlock* return_left = graph()->CreateBasicBlock(); 5678 HBasicBlock* return_right = graph()->CreateBasicBlock(); 5679 5680 compare->SetSuccessorAt(0, return_left); 5681 compare->SetSuccessorAt(1, return_right); 5682 current_block()->Finish(compare); 5683 5684 set_current_block(return_left); 5685 Push(left); 5686 set_current_block(return_right); 5687 // The branch above always returns the right operand if either of 5688 // them is NaN, but the spec requires that max/min(NaN, X) = NaN. 5689 // We add another branch that checks if the left operand is NaN or not. 5690 if (left_operand->representation().IsDouble()) { 5691 // If left_operand != left_operand then it is NaN. 5692 HCompareIDAndBranch* compare_nan = new(zone()) HCompareIDAndBranch( 5693 left_operand, left_operand, Token::EQ); 5694 compare_nan->SetInputRepresentation(left_operand->representation()); 5695 HBasicBlock* left_is_number = graph()->CreateBasicBlock(); 5696 HBasicBlock* left_is_nan = graph()->CreateBasicBlock(); 5697 compare_nan->SetSuccessorAt(0, left_is_number); 5698 compare_nan->SetSuccessorAt(1, left_is_nan); 5699 current_block()->Finish(compare_nan); 5700 set_current_block(left_is_nan); 5701 Push(left); 5702 set_current_block(left_is_number); 5703 Push(right); 5704 return_right = CreateJoin(left_is_number, left_is_nan, expr->id()); 5705 } else { 5706 Push(right); 5707 } 5708 5709 HBasicBlock* join = CreateJoin(return_left, return_right, expr->id()); 5710 set_current_block(join); 5711 ast_context()->ReturnValue(Pop()); 5712 return true; 5713 } 5714 break; 5715 default: 5716 // Not yet supported for inlining. 5717 break; 5718 } 5719 return false; 5720} 5721 5722 5723bool HGraphBuilder::TryCallApply(Call* expr) { 5724 Expression* callee = expr->expression(); 5725 Property* prop = callee->AsProperty(); 5726 ASSERT(prop != NULL); 5727 5728 if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) { 5729 return false; 5730 } 5731 Handle<Map> function_map = expr->GetReceiverTypes()->first(); 5732 if (function_map->instance_type() != JS_FUNCTION_TYPE || 5733 !expr->target()->shared()->HasBuiltinFunctionId() || 5734 expr->target()->shared()->builtin_function_id() != kFunctionApply) { 5735 return false; 5736 } 5737 5738 if (info()->scope()->arguments() == NULL) return false; 5739 5740 ZoneList<Expression*>* args = expr->arguments(); 5741 if (args->length() != 2) return false; 5742 5743 VariableProxy* arg_two = args->at(1)->AsVariableProxy(); 5744 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false; 5745 HValue* arg_two_value = environment()->Lookup(arg_two->var()); 5746 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false; 5747 5748 // Found pattern f.apply(receiver, arguments). 5749 VisitForValue(prop->obj()); 5750 if (HasStackOverflow() || current_block() == NULL) return true; 5751 HValue* function = Top(); 5752 AddCheckConstantFunction(expr, function, function_map, true); 5753 Drop(1); 5754 5755 VisitForValue(args->at(0)); 5756 if (HasStackOverflow() || current_block() == NULL) return true; 5757 HValue* receiver = Pop(); 5758 5759 if (function_state()->outer() == NULL) { 5760 HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements); 5761 HInstruction* length = 5762 AddInstruction(new(zone()) HArgumentsLength(elements)); 5763 HValue* wrapped_receiver = 5764 AddInstruction(new(zone()) HWrapReceiver(receiver, function)); 5765 HInstruction* result = 5766 new(zone()) HApplyArguments(function, 5767 wrapped_receiver, 5768 length, 5769 elements); 5770 result->set_position(expr->position()); 5771 ast_context()->ReturnInstruction(result, expr->id()); 5772 return true; 5773 } else { 5774 // We are inside inlined function and we know exactly what is inside 5775 // arguments object. 5776 HValue* context = environment()->LookupContext(); 5777 5778 HValue* wrapped_receiver = 5779 AddInstruction(new(zone()) HWrapReceiver(receiver, function)); 5780 PushAndAdd(new(zone()) HPushArgument(wrapped_receiver)); 5781 5782 HEnvironment* arguments_env = environment()->arguments_environment(); 5783 5784 int parameter_count = arguments_env->parameter_count(); 5785 for (int i = 1; i < arguments_env->parameter_count(); i++) { 5786 PushAndAdd(new(zone()) HPushArgument(arguments_env->Lookup(i))); 5787 } 5788 5789 HInvokeFunction* call = new(zone()) HInvokeFunction( 5790 context, 5791 function, 5792 parameter_count); 5793 Drop(parameter_count); 5794 call->set_position(expr->position()); 5795 ast_context()->ReturnInstruction(call, expr->id()); 5796 return true; 5797 } 5798} 5799 5800 5801void HGraphBuilder::VisitCall(Call* expr) { 5802 ASSERT(!HasStackOverflow()); 5803 ASSERT(current_block() != NULL); 5804 ASSERT(current_block()->HasPredecessor()); 5805 Expression* callee = expr->expression(); 5806 int argument_count = expr->arguments()->length() + 1; // Plus receiver. 5807 HInstruction* call = NULL; 5808 5809 Property* prop = callee->AsProperty(); 5810 if (prop != NULL) { 5811 if (!prop->key()->IsPropertyName()) { 5812 // Keyed function call. 5813 CHECK_ALIVE(VisitArgument(prop->obj())); 5814 5815 CHECK_ALIVE(VisitForValue(prop->key())); 5816 // Push receiver and key like the non-optimized code generator expects it. 5817 HValue* key = Pop(); 5818 HValue* receiver = Pop(); 5819 Push(key); 5820 Push(receiver); 5821 5822 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 5823 5824 HValue* context = environment()->LookupContext(); 5825 call = new(zone()) HCallKeyed(context, key, argument_count); 5826 call->set_position(expr->position()); 5827 Drop(argument_count + 1); // 1 is the key. 5828 return ast_context()->ReturnInstruction(call, expr->id()); 5829 } 5830 5831 // Named function call. 5832 expr->RecordTypeFeedback(oracle(), CALL_AS_METHOD); 5833 5834 if (TryCallApply(expr)) return; 5835 5836 CHECK_ALIVE(VisitForValue(prop->obj())); 5837 CHECK_ALIVE(VisitExpressions(expr->arguments())); 5838 5839 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName(); 5840 5841 SmallMapList* types = expr->GetReceiverTypes(); 5842 5843 HValue* receiver = 5844 environment()->ExpressionStackAt(expr->arguments()->length()); 5845 if (expr->IsMonomorphic()) { 5846 Handle<Map> receiver_map = (types == NULL || types->is_empty()) 5847 ? Handle<Map>::null() 5848 : types->first(); 5849 if (TryInlineBuiltinMethodCall(expr, 5850 receiver, 5851 receiver_map, 5852 expr->check_type())) { 5853 if (FLAG_trace_inlining) { 5854 PrintF("Inlining builtin "); 5855 expr->target()->ShortPrint(); 5856 PrintF("\n"); 5857 } 5858 return; 5859 } 5860 5861 if (CallStubCompiler::HasCustomCallGenerator(expr->target()) || 5862 expr->check_type() != RECEIVER_MAP_CHECK) { 5863 // When the target has a custom call IC generator, use the IC, 5864 // because it is likely to generate better code. Also use the IC 5865 // when a primitive receiver check is required. 5866 HValue* context = environment()->LookupContext(); 5867 call = PreProcessCall( 5868 new(zone()) HCallNamed(context, name, argument_count)); 5869 } else { 5870 AddCheckConstantFunction(expr, receiver, receiver_map, true); 5871 5872 if (TryInlineCall(expr)) return; 5873 call = PreProcessCall( 5874 new(zone()) HCallConstantFunction(expr->target(), 5875 argument_count)); 5876 } 5877 } else if (types != NULL && types->length() > 1) { 5878 ASSERT(expr->check_type() == RECEIVER_MAP_CHECK); 5879 HandlePolymorphicCallNamed(expr, receiver, types, name); 5880 return; 5881 5882 } else { 5883 HValue* context = environment()->LookupContext(); 5884 call = PreProcessCall( 5885 new(zone()) HCallNamed(context, name, argument_count)); 5886 } 5887 5888 } else { 5889 expr->RecordTypeFeedback(oracle(), CALL_AS_FUNCTION); 5890 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 5891 bool global_call = proxy != NULL && proxy->var()->IsUnallocated(); 5892 5893 if (proxy != NULL && proxy->var()->is_possibly_eval()) { 5894 return Bailout("possible direct call to eval"); 5895 } 5896 5897 if (global_call) { 5898 Variable* var = proxy->var(); 5899 bool known_global_function = false; 5900 // If there is a global property cell for the name at compile time and 5901 // access check is not enabled we assume that the function will not change 5902 // and generate optimized code for calling the function. 5903 LookupResult lookup(isolate()); 5904 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, false); 5905 if (type == kUseCell && 5906 !info()->global_object()->IsAccessCheckNeeded()) { 5907 Handle<GlobalObject> global(info()->global_object()); 5908 known_global_function = expr->ComputeGlobalTarget(global, &lookup); 5909 } 5910 if (known_global_function) { 5911 // Push the global object instead of the global receiver because 5912 // code generated by the full code generator expects it. 5913 HValue* context = environment()->LookupContext(); 5914 HGlobalObject* global_object = new(zone()) HGlobalObject(context); 5915 PushAndAdd(global_object); 5916 CHECK_ALIVE(VisitExpressions(expr->arguments())); 5917 5918 CHECK_ALIVE(VisitForValue(expr->expression())); 5919 HValue* function = Pop(); 5920 AddInstruction(new(zone()) HCheckFunction(function, expr->target())); 5921 5922 // Replace the global object with the global receiver. 5923 HGlobalReceiver* global_receiver = 5924 new(zone()) HGlobalReceiver(global_object); 5925 // Index of the receiver from the top of the expression stack. 5926 const int receiver_index = argument_count - 1; 5927 AddInstruction(global_receiver); 5928 ASSERT(environment()->ExpressionStackAt(receiver_index)-> 5929 IsGlobalObject()); 5930 environment()->SetExpressionStackAt(receiver_index, global_receiver); 5931 5932 if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop. 5933 if (FLAG_trace_inlining) { 5934 PrintF("Inlining builtin "); 5935 expr->target()->ShortPrint(); 5936 PrintF("\n"); 5937 } 5938 return; 5939 } 5940 if (TryInlineCall(expr)) return; 5941 call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(), 5942 argument_count)); 5943 } else { 5944 HValue* context = environment()->LookupContext(); 5945 HGlobalObject* receiver = new(zone()) HGlobalObject(context); 5946 AddInstruction(receiver); 5947 PushAndAdd(new(zone()) HPushArgument(receiver)); 5948 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 5949 5950 call = new(zone()) HCallGlobal(context, var->name(), argument_count); 5951 Drop(argument_count); 5952 } 5953 5954 } else if (expr->IsMonomorphic()) { 5955 // The function is on the stack in the unoptimized code during 5956 // evaluation of the arguments. 5957 CHECK_ALIVE(VisitForValue(expr->expression())); 5958 HValue* function = Top(); 5959 HValue* context = environment()->LookupContext(); 5960 HGlobalObject* global = new(zone()) HGlobalObject(context); 5961 HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global); 5962 AddInstruction(global); 5963 PushAndAdd(receiver); 5964 CHECK_ALIVE(VisitExpressions(expr->arguments())); 5965 AddInstruction(new(zone()) HCheckFunction(function, expr->target())); 5966 5967 if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function. 5968 if (FLAG_trace_inlining) { 5969 PrintF("Inlining builtin "); 5970 expr->target()->ShortPrint(); 5971 PrintF("\n"); 5972 } 5973 return; 5974 } 5975 5976 if (TryInlineCall(expr, true)) { // Drop function from environment. 5977 return; 5978 } else { 5979 call = PreProcessCall(new(zone()) HInvokeFunction(context, 5980 function, 5981 argument_count)); 5982 Drop(1); // The function. 5983 } 5984 5985 } else { 5986 CHECK_ALIVE(VisitForValue(expr->expression())); 5987 HValue* function = Top(); 5988 HValue* context = environment()->LookupContext(); 5989 HGlobalObject* global_object = new(zone()) HGlobalObject(context); 5990 HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object); 5991 AddInstruction(global_object); 5992 AddInstruction(receiver); 5993 PushAndAdd(new(zone()) HPushArgument(receiver)); 5994 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 5995 5996 call = new(zone()) HCallFunction(context, function, argument_count); 5997 Drop(argument_count + 1); 5998 } 5999 } 6000 6001 call->set_position(expr->position()); 6002 return ast_context()->ReturnInstruction(call, expr->id()); 6003} 6004 6005 6006// Checks whether allocation using the given constructor can be inlined. 6007static bool IsAllocationInlineable(Handle<JSFunction> constructor) { 6008 return constructor->has_initial_map() && 6009 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE; 6010} 6011 6012 6013void HGraphBuilder::VisitCallNew(CallNew* expr) { 6014 ASSERT(!HasStackOverflow()); 6015 ASSERT(current_block() != NULL); 6016 ASSERT(current_block()->HasPredecessor()); 6017 expr->RecordTypeFeedback(oracle()); 6018 int argument_count = expr->arguments()->length() + 1; // Plus constructor. 6019 HValue* context = environment()->LookupContext(); 6020 6021 if (FLAG_inline_construct && 6022 expr->IsMonomorphic() && 6023 IsAllocationInlineable(expr->target())) { 6024 // The constructor function is on the stack in the unoptimized code 6025 // during evaluation of the arguments. 6026 CHECK_ALIVE(VisitForValue(expr->expression())); 6027 HValue* function = Top(); 6028 CHECK_ALIVE(VisitExpressions(expr->arguments())); 6029 Handle<JSFunction> constructor = expr->target(); 6030 HValue* check = AddInstruction( 6031 new(zone()) HCheckFunction(function, constructor)); 6032 6033 // Force completion of inobject slack tracking before generating 6034 // allocation code to finalize instance size. 6035 if (constructor->shared()->IsInobjectSlackTrackingInProgress()) { 6036 constructor->shared()->CompleteInobjectSlackTracking(); 6037 } 6038 6039 // Replace the constructor function with a newly allocated receiver. 6040 HInstruction* receiver = new(zone()) HAllocateObject(context, constructor); 6041 // Index of the receiver from the top of the expression stack. 6042 const int receiver_index = argument_count - 1; 6043 AddInstruction(receiver); 6044 ASSERT(environment()->ExpressionStackAt(receiver_index) == function); 6045 environment()->SetExpressionStackAt(receiver_index, receiver); 6046 6047 if (TryInlineConstruct(expr, receiver)) return; 6048 6049 // TODO(mstarzinger): For now we remove the previous HAllocateObject and 6050 // add HPushArgument for the arguments in case inlining failed. What we 6051 // actually should do is emit HInvokeFunction on the constructor instead 6052 // of using HCallNew as a fallback. 6053 receiver->DeleteAndReplaceWith(NULL); 6054 check->DeleteAndReplaceWith(NULL); 6055 environment()->SetExpressionStackAt(receiver_index, function); 6056 HInstruction* call = PreProcessCall( 6057 new(zone()) HCallNew(context, function, argument_count)); 6058 call->set_position(expr->position()); 6059 return ast_context()->ReturnInstruction(call, expr->id()); 6060 } else { 6061 // The constructor function is both an operand to the instruction and an 6062 // argument to the construct call. 6063 HValue* constructor = NULL; 6064 CHECK_ALIVE(constructor = VisitArgument(expr->expression())); 6065 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 6066 HInstruction* call = 6067 new(zone()) HCallNew(context, constructor, argument_count); 6068 Drop(argument_count); 6069 call->set_position(expr->position()); 6070 return ast_context()->ReturnInstruction(call, expr->id()); 6071 } 6072} 6073 6074 6075// Support for generating inlined runtime functions. 6076 6077// Lookup table for generators for runtime calls that are generated inline. 6078// Elements of the table are member pointers to functions of HGraphBuilder. 6079#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \ 6080 &HGraphBuilder::Generate##Name, 6081 6082const HGraphBuilder::InlineFunctionGenerator 6083 HGraphBuilder::kInlineFunctionGenerators[] = { 6084 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS) 6085 INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS) 6086}; 6087#undef INLINE_FUNCTION_GENERATOR_ADDRESS 6088 6089 6090void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) { 6091 ASSERT(!HasStackOverflow()); 6092 ASSERT(current_block() != NULL); 6093 ASSERT(current_block()->HasPredecessor()); 6094 if (expr->is_jsruntime()) { 6095 return Bailout("call to a JavaScript runtime function"); 6096 } 6097 6098 const Runtime::Function* function = expr->function(); 6099 ASSERT(function != NULL); 6100 if (function->intrinsic_type == Runtime::INLINE) { 6101 ASSERT(expr->name()->length() > 0); 6102 ASSERT(expr->name()->Get(0) == '_'); 6103 // Call to an inline function. 6104 int lookup_index = static_cast<int>(function->function_id) - 6105 static_cast<int>(Runtime::kFirstInlineFunction); 6106 ASSERT(lookup_index >= 0); 6107 ASSERT(static_cast<size_t>(lookup_index) < 6108 ARRAY_SIZE(kInlineFunctionGenerators)); 6109 InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index]; 6110 6111 // Call the inline code generator using the pointer-to-member. 6112 (this->*generator)(expr); 6113 } else { 6114 ASSERT(function->intrinsic_type == Runtime::RUNTIME); 6115 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 6116 6117 HValue* context = environment()->LookupContext(); 6118 Handle<String> name = expr->name(); 6119 int argument_count = expr->arguments()->length(); 6120 HCallRuntime* call = 6121 new(zone()) HCallRuntime(context, name, function, argument_count); 6122 call->set_position(RelocInfo::kNoPosition); 6123 Drop(argument_count); 6124 return ast_context()->ReturnInstruction(call, expr->id()); 6125 } 6126} 6127 6128 6129void HGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) { 6130 ASSERT(!HasStackOverflow()); 6131 ASSERT(current_block() != NULL); 6132 ASSERT(current_block()->HasPredecessor()); 6133 switch (expr->op()) { 6134 case Token::DELETE: return VisitDelete(expr); 6135 case Token::VOID: return VisitVoid(expr); 6136 case Token::TYPEOF: return VisitTypeof(expr); 6137 case Token::ADD: return VisitAdd(expr); 6138 case Token::SUB: return VisitSub(expr); 6139 case Token::BIT_NOT: return VisitBitNot(expr); 6140 case Token::NOT: return VisitNot(expr); 6141 default: UNREACHABLE(); 6142 } 6143} 6144 6145void HGraphBuilder::VisitDelete(UnaryOperation* expr) { 6146 Property* prop = expr->expression()->AsProperty(); 6147 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 6148 if (prop != NULL) { 6149 CHECK_ALIVE(VisitForValue(prop->obj())); 6150 CHECK_ALIVE(VisitForValue(prop->key())); 6151 HValue* key = Pop(); 6152 HValue* obj = Pop(); 6153 HValue* context = environment()->LookupContext(); 6154 HDeleteProperty* instr = new(zone()) HDeleteProperty(context, obj, key); 6155 return ast_context()->ReturnInstruction(instr, expr->id()); 6156 } else if (proxy != NULL) { 6157 Variable* var = proxy->var(); 6158 if (var->IsUnallocated()) { 6159 Bailout("delete with global variable"); 6160 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 6161 // Result of deleting non-global variables is false. 'this' is not 6162 // really a variable, though we implement it as one. The 6163 // subexpression does not have side effects. 6164 HValue* value = var->is_this() 6165 ? graph()->GetConstantTrue() 6166 : graph()->GetConstantFalse(); 6167 return ast_context()->ReturnValue(value); 6168 } else { 6169 Bailout("delete with non-global variable"); 6170 } 6171 } else { 6172 // Result of deleting non-property, non-variable reference is true. 6173 // Evaluate the subexpression for side effects. 6174 CHECK_ALIVE(VisitForEffect(expr->expression())); 6175 return ast_context()->ReturnValue(graph()->GetConstantTrue()); 6176 } 6177} 6178 6179 6180void HGraphBuilder::VisitVoid(UnaryOperation* expr) { 6181 CHECK_ALIVE(VisitForEffect(expr->expression())); 6182 return ast_context()->ReturnValue(graph()->GetConstantUndefined()); 6183} 6184 6185 6186void HGraphBuilder::VisitTypeof(UnaryOperation* expr) { 6187 CHECK_ALIVE(VisitForTypeOf(expr->expression())); 6188 HValue* value = Pop(); 6189 HValue* context = environment()->LookupContext(); 6190 HInstruction* instr = new(zone()) HTypeof(context, value); 6191 return ast_context()->ReturnInstruction(instr, expr->id()); 6192} 6193 6194 6195void HGraphBuilder::VisitAdd(UnaryOperation* expr) { 6196 CHECK_ALIVE(VisitForValue(expr->expression())); 6197 HValue* value = Pop(); 6198 HValue* context = environment()->LookupContext(); 6199 HInstruction* instr = 6200 new(zone()) HMul(context, value, graph_->GetConstant1()); 6201 return ast_context()->ReturnInstruction(instr, expr->id()); 6202} 6203 6204 6205void HGraphBuilder::VisitSub(UnaryOperation* expr) { 6206 CHECK_ALIVE(VisitForValue(expr->expression())); 6207 HValue* value = Pop(); 6208 HValue* context = environment()->LookupContext(); 6209 HInstruction* instr = 6210 new(zone()) HMul(context, value, graph_->GetConstantMinus1()); 6211 TypeInfo info = oracle()->UnaryType(expr); 6212 if (info.IsUninitialized()) { 6213 AddInstruction(new(zone()) HSoftDeoptimize); 6214 current_block()->MarkAsDeoptimizing(); 6215 info = TypeInfo::Unknown(); 6216 } 6217 Representation rep = ToRepresentation(info); 6218 TraceRepresentation(expr->op(), info, instr, rep); 6219 instr->AssumeRepresentation(rep); 6220 return ast_context()->ReturnInstruction(instr, expr->id()); 6221} 6222 6223 6224void HGraphBuilder::VisitBitNot(UnaryOperation* expr) { 6225 CHECK_ALIVE(VisitForValue(expr->expression())); 6226 HValue* value = Pop(); 6227 TypeInfo info = oracle()->UnaryType(expr); 6228 if (info.IsUninitialized()) { 6229 AddInstruction(new(zone()) HSoftDeoptimize); 6230 current_block()->MarkAsDeoptimizing(); 6231 } 6232 HInstruction* instr = new(zone()) HBitNot(value); 6233 return ast_context()->ReturnInstruction(instr, expr->id()); 6234} 6235 6236 6237void HGraphBuilder::VisitNot(UnaryOperation* expr) { 6238 if (ast_context()->IsTest()) { 6239 TestContext* context = TestContext::cast(ast_context()); 6240 VisitForControl(expr->expression(), 6241 context->if_false(), 6242 context->if_true()); 6243 return; 6244 } 6245 6246 if (ast_context()->IsEffect()) { 6247 VisitForEffect(expr->expression()); 6248 return; 6249 } 6250 6251 ASSERT(ast_context()->IsValue()); 6252 HBasicBlock* materialize_false = graph()->CreateBasicBlock(); 6253 HBasicBlock* materialize_true = graph()->CreateBasicBlock(); 6254 CHECK_BAILOUT(VisitForControl(expr->expression(), 6255 materialize_false, 6256 materialize_true)); 6257 6258 if (materialize_false->HasPredecessor()) { 6259 materialize_false->SetJoinId(expr->MaterializeFalseId()); 6260 set_current_block(materialize_false); 6261 Push(graph()->GetConstantFalse()); 6262 } else { 6263 materialize_false = NULL; 6264 } 6265 6266 if (materialize_true->HasPredecessor()) { 6267 materialize_true->SetJoinId(expr->MaterializeTrueId()); 6268 set_current_block(materialize_true); 6269 Push(graph()->GetConstantTrue()); 6270 } else { 6271 materialize_true = NULL; 6272 } 6273 6274 HBasicBlock* join = 6275 CreateJoin(materialize_false, materialize_true, expr->id()); 6276 set_current_block(join); 6277 if (join != NULL) return ast_context()->ReturnValue(Pop()); 6278} 6279 6280 6281HInstruction* HGraphBuilder::BuildIncrement(bool returns_original_input, 6282 CountOperation* expr) { 6283 // The input to the count operation is on top of the expression stack. 6284 TypeInfo info = oracle()->IncrementType(expr); 6285 Representation rep = ToRepresentation(info); 6286 if (rep.IsTagged()) { 6287 rep = Representation::Integer32(); 6288 } 6289 6290 if (returns_original_input) { 6291 // We need an explicit HValue representing ToNumber(input). The 6292 // actual HChange instruction we need is (sometimes) added in a later 6293 // phase, so it is not available now to be used as an input to HAdd and 6294 // as the return value. 6295 HInstruction* number_input = new(zone()) HForceRepresentation(Pop(), rep); 6296 AddInstruction(number_input); 6297 Push(number_input); 6298 } 6299 6300 // The addition has no side effects, so we do not need 6301 // to simulate the expression stack after this instruction. 6302 // Any later failures deopt to the load of the input or earlier. 6303 HConstant* delta = (expr->op() == Token::INC) 6304 ? graph_->GetConstant1() 6305 : graph_->GetConstantMinus1(); 6306 HValue* context = environment()->LookupContext(); 6307 HInstruction* instr = new(zone()) HAdd(context, Top(), delta); 6308 TraceRepresentation(expr->op(), info, instr, rep); 6309 instr->AssumeRepresentation(rep); 6310 AddInstruction(instr); 6311 return instr; 6312} 6313 6314 6315void HGraphBuilder::VisitCountOperation(CountOperation* expr) { 6316 ASSERT(!HasStackOverflow()); 6317 ASSERT(current_block() != NULL); 6318 ASSERT(current_block()->HasPredecessor()); 6319 Expression* target = expr->expression(); 6320 VariableProxy* proxy = target->AsVariableProxy(); 6321 Property* prop = target->AsProperty(); 6322 if (proxy == NULL && prop == NULL) { 6323 return Bailout("invalid lhs in count operation"); 6324 } 6325 6326 // Match the full code generator stack by simulating an extra stack 6327 // element for postfix operations in a non-effect context. The return 6328 // value is ToNumber(input). 6329 bool returns_original_input = 6330 expr->is_postfix() && !ast_context()->IsEffect(); 6331 HValue* input = NULL; // ToNumber(original_input). 6332 HValue* after = NULL; // The result after incrementing or decrementing. 6333 6334 if (proxy != NULL) { 6335 Variable* var = proxy->var(); 6336 if (var->mode() == CONST) { 6337 return Bailout("unsupported count operation with const"); 6338 } 6339 // Argument of the count operation is a variable, not a property. 6340 ASSERT(prop == NULL); 6341 CHECK_ALIVE(VisitForValue(target)); 6342 6343 after = BuildIncrement(returns_original_input, expr); 6344 input = returns_original_input ? Top() : Pop(); 6345 Push(after); 6346 6347 switch (var->location()) { 6348 case Variable::UNALLOCATED: 6349 HandleGlobalVariableAssignment(var, 6350 after, 6351 expr->position(), 6352 expr->AssignmentId()); 6353 break; 6354 6355 case Variable::PARAMETER: 6356 case Variable::LOCAL: 6357 Bind(var, after); 6358 break; 6359 6360 case Variable::CONTEXT: { 6361 // Bail out if we try to mutate a parameter value in a function 6362 // using the arguments object. We do not (yet) correctly handle the 6363 // arguments property of the function. 6364 if (info()->scope()->arguments() != NULL) { 6365 // Parameters will rewrite to context slots. We have no direct 6366 // way to detect that the variable is a parameter so we use a 6367 // linear search of the parameter list. 6368 int count = info()->scope()->num_parameters(); 6369 for (int i = 0; i < count; ++i) { 6370 if (var == info()->scope()->parameter(i)) { 6371 return Bailout("assignment to parameter in arguments object"); 6372 } 6373 } 6374 } 6375 6376 HValue* context = BuildContextChainWalk(var); 6377 HStoreContextSlot::Mode mode = 6378 (var->mode() == LET || var->mode() == CONST_HARMONY) 6379 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck; 6380 HStoreContextSlot* instr = 6381 new(zone()) HStoreContextSlot(context, var->index(), mode, after); 6382 AddInstruction(instr); 6383 if (instr->HasObservableSideEffects()) { 6384 AddSimulate(expr->AssignmentId()); 6385 } 6386 break; 6387 } 6388 6389 case Variable::LOOKUP: 6390 return Bailout("lookup variable in count operation"); 6391 } 6392 6393 } else { 6394 // Argument of the count operation is a property. 6395 ASSERT(prop != NULL); 6396 prop->RecordTypeFeedback(oracle()); 6397 6398 if (prop->key()->IsPropertyName()) { 6399 // Named property. 6400 if (returns_original_input) Push(graph_->GetConstantUndefined()); 6401 6402 CHECK_ALIVE(VisitForValue(prop->obj())); 6403 HValue* obj = Top(); 6404 6405 HInstruction* load = NULL; 6406 if (prop->IsMonomorphic()) { 6407 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName(); 6408 Handle<Map> map = prop->GetReceiverTypes()->first(); 6409 load = BuildLoadNamed(obj, prop, map, name); 6410 } else { 6411 load = BuildLoadNamedGeneric(obj, prop); 6412 } 6413 PushAndAdd(load); 6414 if (load->HasObservableSideEffects()) AddSimulate(expr->CountId()); 6415 6416 after = BuildIncrement(returns_original_input, expr); 6417 input = Pop(); 6418 6419 HInstruction* store = BuildStoreNamed(obj, after, prop); 6420 AddInstruction(store); 6421 6422 // Overwrite the receiver in the bailout environment with the result 6423 // of the operation, and the placeholder with the original value if 6424 // necessary. 6425 environment()->SetExpressionStackAt(0, after); 6426 if (returns_original_input) environment()->SetExpressionStackAt(1, input); 6427 if (store->HasObservableSideEffects()) AddSimulate(expr->AssignmentId()); 6428 6429 } else { 6430 // Keyed property. 6431 if (returns_original_input) Push(graph_->GetConstantUndefined()); 6432 6433 CHECK_ALIVE(VisitForValue(prop->obj())); 6434 CHECK_ALIVE(VisitForValue(prop->key())); 6435 HValue* obj = environment()->ExpressionStackAt(1); 6436 HValue* key = environment()->ExpressionStackAt(0); 6437 6438 bool has_side_effects = false; 6439 HValue* load = HandleKeyedElementAccess( 6440 obj, key, NULL, prop, expr->CountId(), RelocInfo::kNoPosition, 6441 false, // is_store 6442 &has_side_effects); 6443 Push(load); 6444 if (has_side_effects) AddSimulate(expr->CountId()); 6445 6446 after = BuildIncrement(returns_original_input, expr); 6447 input = Pop(); 6448 6449 expr->RecordTypeFeedback(oracle()); 6450 HandleKeyedElementAccess(obj, key, after, expr, expr->AssignmentId(), 6451 RelocInfo::kNoPosition, 6452 true, // is_store 6453 &has_side_effects); 6454 6455 // Drop the key from the bailout environment. Overwrite the receiver 6456 // with the result of the operation, and the placeholder with the 6457 // original value if necessary. 6458 Drop(1); 6459 environment()->SetExpressionStackAt(0, after); 6460 if (returns_original_input) environment()->SetExpressionStackAt(1, input); 6461 ASSERT(has_side_effects); // Stores always have side effects. 6462 AddSimulate(expr->AssignmentId()); 6463 } 6464 } 6465 6466 Drop(returns_original_input ? 2 : 1); 6467 return ast_context()->ReturnValue(expr->is_postfix() ? input : after); 6468} 6469 6470 6471HStringCharCodeAt* HGraphBuilder::BuildStringCharCodeAt(HValue* context, 6472 HValue* string, 6473 HValue* index) { 6474 AddInstruction(new(zone()) HCheckNonSmi(string)); 6475 AddInstruction(HCheckInstanceType::NewIsString(string)); 6476 HStringLength* length = new(zone()) HStringLength(string); 6477 AddInstruction(length); 6478 HInstruction* checked_index = 6479 AddInstruction(new(zone()) HBoundsCheck(index, length)); 6480 return new(zone()) HStringCharCodeAt(context, string, checked_index); 6481} 6482 6483 6484HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr, 6485 HValue* left, 6486 HValue* right) { 6487 HValue* context = environment()->LookupContext(); 6488 TypeInfo info = oracle()->BinaryType(expr); 6489 if (info.IsUninitialized()) { 6490 AddInstruction(new(zone()) HSoftDeoptimize); 6491 current_block()->MarkAsDeoptimizing(); 6492 info = TypeInfo::Unknown(); 6493 } 6494 HInstruction* instr = NULL; 6495 switch (expr->op()) { 6496 case Token::ADD: 6497 if (info.IsString()) { 6498 AddInstruction(new(zone()) HCheckNonSmi(left)); 6499 AddInstruction(HCheckInstanceType::NewIsString(left)); 6500 AddInstruction(new(zone()) HCheckNonSmi(right)); 6501 AddInstruction(HCheckInstanceType::NewIsString(right)); 6502 instr = new(zone()) HStringAdd(context, left, right); 6503 } else { 6504 instr = HAdd::NewHAdd(zone(), context, left, right); 6505 } 6506 break; 6507 case Token::SUB: 6508 instr = HSub::NewHSub(zone(), context, left, right); 6509 break; 6510 case Token::MUL: 6511 instr = HMul::NewHMul(zone(), context, left, right); 6512 break; 6513 case Token::MOD: 6514 instr = HMod::NewHMod(zone(), context, left, right); 6515 break; 6516 case Token::DIV: 6517 instr = HDiv::NewHDiv(zone(), context, left, right); 6518 break; 6519 case Token::BIT_XOR: 6520 case Token::BIT_AND: 6521 case Token::BIT_OR: 6522 instr = HBitwise::NewHBitwise(zone(), expr->op(), context, left, right); 6523 break; 6524 case Token::SAR: 6525 instr = HSar::NewHSar(zone(), context, left, right); 6526 break; 6527 case Token::SHR: 6528 instr = HShr::NewHShr(zone(), context, left, right); 6529 break; 6530 case Token::SHL: 6531 instr = HShl::NewHShl(zone(), context, left, right); 6532 break; 6533 default: 6534 UNREACHABLE(); 6535 } 6536 6537 // If we hit an uninitialized binary op stub we will get type info 6538 // for a smi operation. If one of the operands is a constant string 6539 // do not generate code assuming it is a smi operation. 6540 if (info.IsSmi() && 6541 ((left->IsConstant() && HConstant::cast(left)->HasStringValue()) || 6542 (right->IsConstant() && HConstant::cast(right)->HasStringValue()))) { 6543 return instr; 6544 } 6545 Representation rep = ToRepresentation(info); 6546 // We only generate either int32 or generic tagged bitwise operations. 6547 if (instr->IsBitwiseBinaryOperation() && rep.IsDouble()) { 6548 rep = Representation::Integer32(); 6549 } 6550 TraceRepresentation(expr->op(), info, instr, rep); 6551 instr->AssumeRepresentation(rep); 6552 return instr; 6553} 6554 6555 6556// Check for the form (%_ClassOf(foo) === 'BarClass'). 6557static bool IsClassOfTest(CompareOperation* expr) { 6558 if (expr->op() != Token::EQ_STRICT) return false; 6559 CallRuntime* call = expr->left()->AsCallRuntime(); 6560 if (call == NULL) return false; 6561 Literal* literal = expr->right()->AsLiteral(); 6562 if (literal == NULL) return false; 6563 if (!literal->handle()->IsString()) return false; 6564 if (!call->name()->IsEqualTo(CStrVector("_ClassOf"))) return false; 6565 ASSERT(call->arguments()->length() == 1); 6566 return true; 6567} 6568 6569 6570void HGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) { 6571 ASSERT(!HasStackOverflow()); 6572 ASSERT(current_block() != NULL); 6573 ASSERT(current_block()->HasPredecessor()); 6574 switch (expr->op()) { 6575 case Token::COMMA: 6576 return VisitComma(expr); 6577 case Token::OR: 6578 case Token::AND: 6579 return VisitLogicalExpression(expr); 6580 default: 6581 return VisitArithmeticExpression(expr); 6582 } 6583} 6584 6585 6586void HGraphBuilder::VisitComma(BinaryOperation* expr) { 6587 CHECK_ALIVE(VisitForEffect(expr->left())); 6588 // Visit the right subexpression in the same AST context as the entire 6589 // expression. 6590 Visit(expr->right()); 6591} 6592 6593 6594void HGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) { 6595 bool is_logical_and = expr->op() == Token::AND; 6596 if (ast_context()->IsTest()) { 6597 TestContext* context = TestContext::cast(ast_context()); 6598 // Translate left subexpression. 6599 HBasicBlock* eval_right = graph()->CreateBasicBlock(); 6600 if (is_logical_and) { 6601 CHECK_BAILOUT(VisitForControl(expr->left(), 6602 eval_right, 6603 context->if_false())); 6604 } else { 6605 CHECK_BAILOUT(VisitForControl(expr->left(), 6606 context->if_true(), 6607 eval_right)); 6608 } 6609 6610 // Translate right subexpression by visiting it in the same AST 6611 // context as the entire expression. 6612 if (eval_right->HasPredecessor()) { 6613 eval_right->SetJoinId(expr->RightId()); 6614 set_current_block(eval_right); 6615 Visit(expr->right()); 6616 } 6617 6618 } else if (ast_context()->IsValue()) { 6619 CHECK_ALIVE(VisitForValue(expr->left())); 6620 ASSERT(current_block() != NULL); 6621 6622 // We need an extra block to maintain edge-split form. 6623 HBasicBlock* empty_block = graph()->CreateBasicBlock(); 6624 HBasicBlock* eval_right = graph()->CreateBasicBlock(); 6625 unsigned test_id = expr->left()->test_id(); 6626 ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id)); 6627 HBranch* test = is_logical_and 6628 ? new(zone()) HBranch(Top(), eval_right, empty_block, expected) 6629 : new(zone()) HBranch(Top(), empty_block, eval_right, expected); 6630 current_block()->Finish(test); 6631 6632 set_current_block(eval_right); 6633 Drop(1); // Value of the left subexpression. 6634 CHECK_BAILOUT(VisitForValue(expr->right())); 6635 6636 HBasicBlock* join_block = 6637 CreateJoin(empty_block, current_block(), expr->id()); 6638 set_current_block(join_block); 6639 return ast_context()->ReturnValue(Pop()); 6640 6641 } else { 6642 ASSERT(ast_context()->IsEffect()); 6643 // In an effect context, we don't need the value of the left subexpression, 6644 // only its control flow and side effects. We need an extra block to 6645 // maintain edge-split form. 6646 HBasicBlock* empty_block = graph()->CreateBasicBlock(); 6647 HBasicBlock* right_block = graph()->CreateBasicBlock(); 6648 if (is_logical_and) { 6649 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block)); 6650 } else { 6651 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block)); 6652 } 6653 6654 // TODO(kmillikin): Find a way to fix this. It's ugly that there are 6655 // actually two empty blocks (one here and one inserted by 6656 // TestContext::BuildBranch, and that they both have an HSimulate though the 6657 // second one is not a merge node, and that we really have no good AST ID to 6658 // put on that first HSimulate. 6659 6660 if (empty_block->HasPredecessor()) { 6661 empty_block->SetJoinId(expr->id()); 6662 } else { 6663 empty_block = NULL; 6664 } 6665 6666 if (right_block->HasPredecessor()) { 6667 right_block->SetJoinId(expr->RightId()); 6668 set_current_block(right_block); 6669 CHECK_BAILOUT(VisitForEffect(expr->right())); 6670 right_block = current_block(); 6671 } else { 6672 right_block = NULL; 6673 } 6674 6675 HBasicBlock* join_block = 6676 CreateJoin(empty_block, right_block, expr->id()); 6677 set_current_block(join_block); 6678 // We did not materialize any value in the predecessor environments, 6679 // so there is no need to handle it here. 6680 } 6681} 6682 6683 6684void HGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) { 6685 CHECK_ALIVE(VisitForValue(expr->left())); 6686 CHECK_ALIVE(VisitForValue(expr->right())); 6687 HValue* right = Pop(); 6688 HValue* left = Pop(); 6689 HInstruction* instr = BuildBinaryOperation(expr, left, right); 6690 instr->set_position(expr->position()); 6691 return ast_context()->ReturnInstruction(instr, expr->id()); 6692} 6693 6694 6695void HGraphBuilder::TraceRepresentation(Token::Value op, 6696 TypeInfo info, 6697 HValue* value, 6698 Representation rep) { 6699 if (!FLAG_trace_representation) return; 6700 // TODO(svenpanne) Under which circumstances are we actually not flexible? 6701 // At first glance, this looks a bit weird... 6702 bool flexible = value->CheckFlag(HValue::kFlexibleRepresentation); 6703 PrintF("Operation %s has type info %s, %schange representation assumption " 6704 "for %s (ID %d) from %s to %s\n", 6705 Token::Name(op), 6706 info.ToString(), 6707 flexible ? "" : " DO NOT ", 6708 value->Mnemonic(), 6709 graph_->GetMaximumValueID(), 6710 value->representation().Mnemonic(), 6711 rep.Mnemonic()); 6712} 6713 6714 6715Representation HGraphBuilder::ToRepresentation(TypeInfo info) { 6716 if (info.IsSmi()) return Representation::Integer32(); 6717 if (info.IsInteger32()) return Representation::Integer32(); 6718 if (info.IsDouble()) return Representation::Double(); 6719 if (info.IsNumber()) return Representation::Double(); 6720 return Representation::Tagged(); 6721} 6722 6723 6724void HGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr, 6725 HTypeof* typeof_expr, 6726 Handle<String> check) { 6727 // Note: The HTypeof itself is removed during canonicalization, if possible. 6728 HValue* value = typeof_expr->value(); 6729 HTypeofIsAndBranch* instr = new(zone()) HTypeofIsAndBranch(value, check); 6730 instr->set_position(expr->position()); 6731 return ast_context()->ReturnControl(instr, expr->id()); 6732} 6733 6734 6735static bool MatchLiteralCompareNil(HValue* left, 6736 Token::Value op, 6737 HValue* right, 6738 Handle<Object> nil, 6739 HValue** expr) { 6740 if (left->IsConstant() && 6741 HConstant::cast(left)->handle().is_identical_to(nil) && 6742 Token::IsEqualityOp(op)) { 6743 *expr = right; 6744 return true; 6745 } 6746 return false; 6747} 6748 6749 6750static bool MatchLiteralCompareTypeof(HValue* left, 6751 Token::Value op, 6752 HValue* right, 6753 HTypeof** typeof_expr, 6754 Handle<String>* check) { 6755 if (left->IsTypeof() && 6756 Token::IsEqualityOp(op) && 6757 right->IsConstant() && 6758 HConstant::cast(right)->HasStringValue()) { 6759 *typeof_expr = HTypeof::cast(left); 6760 *check = Handle<String>::cast(HConstant::cast(right)->handle()); 6761 return true; 6762 } 6763 return false; 6764} 6765 6766 6767static bool IsLiteralCompareTypeof(HValue* left, 6768 Token::Value op, 6769 HValue* right, 6770 HTypeof** typeof_expr, 6771 Handle<String>* check) { 6772 return MatchLiteralCompareTypeof(left, op, right, typeof_expr, check) || 6773 MatchLiteralCompareTypeof(right, op, left, typeof_expr, check); 6774} 6775 6776 6777static bool IsLiteralCompareNil(HValue* left, 6778 Token::Value op, 6779 HValue* right, 6780 Handle<Object> nil, 6781 HValue** expr) { 6782 return MatchLiteralCompareNil(left, op, right, nil, expr) || 6783 MatchLiteralCompareNil(right, op, left, nil, expr); 6784} 6785 6786 6787static bool IsLiteralCompareBool(HValue* left, 6788 Token::Value op, 6789 HValue* right) { 6790 return op == Token::EQ_STRICT && 6791 ((left->IsConstant() && HConstant::cast(left)->handle()->IsBoolean()) || 6792 (right->IsConstant() && HConstant::cast(right)->handle()->IsBoolean())); 6793} 6794 6795 6796void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) { 6797 ASSERT(!HasStackOverflow()); 6798 ASSERT(current_block() != NULL); 6799 ASSERT(current_block()->HasPredecessor()); 6800 if (IsClassOfTest(expr)) { 6801 CallRuntime* call = expr->left()->AsCallRuntime(); 6802 ASSERT(call->arguments()->length() == 1); 6803 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 6804 HValue* value = Pop(); 6805 Literal* literal = expr->right()->AsLiteral(); 6806 Handle<String> rhs = Handle<String>::cast(literal->handle()); 6807 HClassOfTestAndBranch* instr = 6808 new(zone()) HClassOfTestAndBranch(value, rhs); 6809 instr->set_position(expr->position()); 6810 return ast_context()->ReturnControl(instr, expr->id()); 6811 } 6812 6813 TypeInfo type_info = oracle()->CompareType(expr); 6814 // Check if this expression was ever executed according to type feedback. 6815 // Note that for the special typeof/null/undefined cases we get unknown here. 6816 if (type_info.IsUninitialized()) { 6817 AddInstruction(new(zone()) HSoftDeoptimize); 6818 current_block()->MarkAsDeoptimizing(); 6819 type_info = TypeInfo::Unknown(); 6820 } 6821 6822 CHECK_ALIVE(VisitForValue(expr->left())); 6823 CHECK_ALIVE(VisitForValue(expr->right())); 6824 6825 HValue* context = environment()->LookupContext(); 6826 HValue* right = Pop(); 6827 HValue* left = Pop(); 6828 Token::Value op = expr->op(); 6829 6830 HTypeof* typeof_expr = NULL; 6831 Handle<String> check; 6832 if (IsLiteralCompareTypeof(left, op, right, &typeof_expr, &check)) { 6833 return HandleLiteralCompareTypeof(expr, typeof_expr, check); 6834 } 6835 HValue* sub_expr = NULL; 6836 Factory* f = graph()->isolate()->factory(); 6837 if (IsLiteralCompareNil(left, op, right, f->undefined_value(), &sub_expr)) { 6838 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue); 6839 } 6840 if (IsLiteralCompareNil(left, op, right, f->null_value(), &sub_expr)) { 6841 return HandleLiteralCompareNil(expr, sub_expr, kNullValue); 6842 } 6843 if (IsLiteralCompareBool(left, op, right)) { 6844 HCompareObjectEqAndBranch* result = 6845 new(zone()) HCompareObjectEqAndBranch(left, right); 6846 result->set_position(expr->position()); 6847 return ast_context()->ReturnControl(result, expr->id()); 6848 } 6849 6850 if (op == Token::INSTANCEOF) { 6851 // Check to see if the rhs of the instanceof is a global function not 6852 // residing in new space. If it is we assume that the function will stay the 6853 // same. 6854 Handle<JSFunction> target = Handle<JSFunction>::null(); 6855 VariableProxy* proxy = expr->right()->AsVariableProxy(); 6856 bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated(); 6857 if (global_function && 6858 info()->has_global_object() && 6859 !info()->global_object()->IsAccessCheckNeeded()) { 6860 Handle<String> name = proxy->name(); 6861 Handle<GlobalObject> global(info()->global_object()); 6862 LookupResult lookup(isolate()); 6863 global->Lookup(*name, &lookup); 6864 if (lookup.IsFound() && 6865 lookup.type() == NORMAL && 6866 lookup.GetValue()->IsJSFunction()) { 6867 Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue())); 6868 // If the function is in new space we assume it's more likely to 6869 // change and thus prefer the general IC code. 6870 if (!isolate()->heap()->InNewSpace(*candidate)) { 6871 target = candidate; 6872 } 6873 } 6874 } 6875 6876 // If the target is not null we have found a known global function that is 6877 // assumed to stay the same for this instanceof. 6878 if (target.is_null()) { 6879 HInstanceOf* result = new(zone()) HInstanceOf(context, left, right); 6880 result->set_position(expr->position()); 6881 return ast_context()->ReturnInstruction(result, expr->id()); 6882 } else { 6883 AddInstruction(new(zone()) HCheckFunction(right, target)); 6884 HInstanceOfKnownGlobal* result = 6885 new(zone()) HInstanceOfKnownGlobal(context, left, target); 6886 result->set_position(expr->position()); 6887 return ast_context()->ReturnInstruction(result, expr->id()); 6888 } 6889 } else if (op == Token::IN) { 6890 HIn* result = new(zone()) HIn(context, left, right); 6891 result->set_position(expr->position()); 6892 return ast_context()->ReturnInstruction(result, expr->id()); 6893 } else if (type_info.IsNonPrimitive()) { 6894 switch (op) { 6895 case Token::EQ: 6896 case Token::EQ_STRICT: { 6897 // Can we get away with map check and not instance type check? 6898 Handle<Map> map = oracle()->GetCompareMap(expr); 6899 if (!map.is_null()) { 6900 AddInstruction(new(zone()) HCheckNonSmi(left)); 6901 AddInstruction(new(zone()) HCheckMap(left, map, NULL, 6902 ALLOW_ELEMENT_TRANSITION_MAPS)); 6903 AddInstruction(new(zone()) HCheckNonSmi(right)); 6904 AddInstruction(new(zone()) HCheckMap(right, map, NULL, 6905 ALLOW_ELEMENT_TRANSITION_MAPS)); 6906 HCompareObjectEqAndBranch* result = 6907 new(zone()) HCompareObjectEqAndBranch(left, right); 6908 result->set_position(expr->position()); 6909 return ast_context()->ReturnControl(result, expr->id()); 6910 } else { 6911 AddInstruction(new(zone()) HCheckNonSmi(left)); 6912 AddInstruction(HCheckInstanceType::NewIsSpecObject(left)); 6913 AddInstruction(new(zone()) HCheckNonSmi(right)); 6914 AddInstruction(HCheckInstanceType::NewIsSpecObject(right)); 6915 HCompareObjectEqAndBranch* result = 6916 new(zone()) HCompareObjectEqAndBranch(left, right); 6917 result->set_position(expr->position()); 6918 return ast_context()->ReturnControl(result, expr->id()); 6919 } 6920 } 6921 default: 6922 return Bailout("Unsupported non-primitive compare"); 6923 } 6924 } else if (type_info.IsString() && oracle()->IsSymbolCompare(expr) && 6925 (op == Token::EQ || op == Token::EQ_STRICT)) { 6926 AddInstruction(new(zone()) HCheckNonSmi(left)); 6927 AddInstruction(HCheckInstanceType::NewIsSymbol(left)); 6928 AddInstruction(new(zone()) HCheckNonSmi(right)); 6929 AddInstruction(HCheckInstanceType::NewIsSymbol(right)); 6930 HCompareObjectEqAndBranch* result = 6931 new(zone()) HCompareObjectEqAndBranch(left, right); 6932 result->set_position(expr->position()); 6933 return ast_context()->ReturnControl(result, expr->id()); 6934 } else { 6935 Representation r = ToRepresentation(type_info); 6936 if (r.IsTagged()) { 6937 HCompareGeneric* result = 6938 new(zone()) HCompareGeneric(context, left, right, op); 6939 result->set_position(expr->position()); 6940 return ast_context()->ReturnInstruction(result, expr->id()); 6941 } else { 6942 HCompareIDAndBranch* result = 6943 new(zone()) HCompareIDAndBranch(left, right, op); 6944 result->set_position(expr->position()); 6945 result->SetInputRepresentation(r); 6946 return ast_context()->ReturnControl(result, expr->id()); 6947 } 6948 } 6949} 6950 6951 6952void HGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr, 6953 HValue* value, 6954 NilValue nil) { 6955 ASSERT(!HasStackOverflow()); 6956 ASSERT(current_block() != NULL); 6957 ASSERT(current_block()->HasPredecessor()); 6958 EqualityKind kind = 6959 expr->op() == Token::EQ_STRICT ? kStrictEquality : kNonStrictEquality; 6960 HIsNilAndBranch* instr = new(zone()) HIsNilAndBranch(value, kind, nil); 6961 instr->set_position(expr->position()); 6962 return ast_context()->ReturnControl(instr, expr->id()); 6963} 6964 6965 6966void HGraphBuilder::VisitThisFunction(ThisFunction* expr) { 6967 ASSERT(!HasStackOverflow()); 6968 ASSERT(current_block() != NULL); 6969 ASSERT(current_block()->HasPredecessor()); 6970 HThisFunction* self = new(zone()) HThisFunction( 6971 function_state()->compilation_info()->closure()); 6972 return ast_context()->ReturnInstruction(self, expr->id()); 6973} 6974 6975 6976void HGraphBuilder::VisitDeclarations(ZoneList<Declaration*>* declarations) { 6977 int length = declarations->length(); 6978 int global_count = 0; 6979 for (int i = 0; i < declarations->length(); i++) { 6980 Declaration* decl = declarations->at(i); 6981 FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration(); 6982 HandleDeclaration(decl->proxy(), 6983 decl->mode(), 6984 fun_decl != NULL ? fun_decl->fun() : NULL, 6985 &global_count); 6986 } 6987 6988 // Batch declare global functions and variables. 6989 if (global_count > 0) { 6990 Handle<FixedArray> array = 6991 isolate()->factory()->NewFixedArray(2 * global_count, TENURED); 6992 for (int j = 0, i = 0; i < length; i++) { 6993 Declaration* decl = declarations->at(i); 6994 Variable* var = decl->proxy()->var(); 6995 6996 if (var->IsUnallocated()) { 6997 array->set(j++, *(var->name())); 6998 FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration(); 6999 if (fun_decl == NULL) { 7000 if (var->binding_needs_init()) { 7001 // In case this binding needs initialization use the hole. 7002 array->set_the_hole(j++); 7003 } else { 7004 array->set_undefined(j++); 7005 } 7006 } else { 7007 Handle<SharedFunctionInfo> function = 7008 Compiler::BuildFunctionInfo(fun_decl->fun(), info()->script()); 7009 // Check for stack-overflow exception. 7010 if (function.is_null()) { 7011 SetStackOverflow(); 7012 return; 7013 } 7014 array->set(j++, *function); 7015 } 7016 } 7017 } 7018 int flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) | 7019 DeclareGlobalsNativeFlag::encode(info()->is_native()) | 7020 DeclareGlobalsLanguageMode::encode(info()->language_mode()); 7021 HInstruction* result = 7022 new(zone()) HDeclareGlobals(environment()->LookupContext(), 7023 array, 7024 flags); 7025 AddInstruction(result); 7026 } 7027} 7028 7029 7030void HGraphBuilder::HandleDeclaration(VariableProxy* proxy, 7031 VariableMode mode, 7032 FunctionLiteral* function, 7033 int* global_count) { 7034 Variable* var = proxy->var(); 7035 bool binding_needs_init = 7036 (mode == CONST || mode == CONST_HARMONY || mode == LET); 7037 switch (var->location()) { 7038 case Variable::UNALLOCATED: 7039 ++(*global_count); 7040 return; 7041 case Variable::PARAMETER: 7042 case Variable::LOCAL: 7043 case Variable::CONTEXT: 7044 if (binding_needs_init || function != NULL) { 7045 HValue* value = NULL; 7046 if (function != NULL) { 7047 CHECK_ALIVE(VisitForValue(function)); 7048 value = Pop(); 7049 } else { 7050 value = graph()->GetConstantHole(); 7051 } 7052 if (var->IsContextSlot()) { 7053 HValue* context = environment()->LookupContext(); 7054 HStoreContextSlot* store = new HStoreContextSlot( 7055 context, var->index(), HStoreContextSlot::kNoCheck, value); 7056 AddInstruction(store); 7057 if (store->HasObservableSideEffects()) AddSimulate(proxy->id()); 7058 } else { 7059 environment()->Bind(var, value); 7060 } 7061 } 7062 break; 7063 case Variable::LOOKUP: 7064 return Bailout("unsupported lookup slot in declaration"); 7065 } 7066} 7067 7068 7069void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) { 7070 UNREACHABLE(); 7071} 7072 7073 7074void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* decl) { 7075 UNREACHABLE(); 7076} 7077 7078 7079void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* decl) { 7080 UNREACHABLE(); 7081} 7082 7083 7084void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* decl) { 7085 UNREACHABLE(); 7086} 7087 7088 7089void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* decl) { 7090 UNREACHABLE(); 7091} 7092 7093 7094void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) { 7095 // TODO(rossberg) 7096} 7097 7098 7099void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) { 7100 // TODO(rossberg) 7101} 7102 7103 7104void HGraphBuilder::VisitModulePath(ModulePath* module) { 7105 // TODO(rossberg) 7106} 7107 7108 7109void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) { 7110 // TODO(rossberg) 7111} 7112 7113 7114// Generators for inline runtime functions. 7115// Support for types. 7116void HGraphBuilder::GenerateIsSmi(CallRuntime* call) { 7117 ASSERT(call->arguments()->length() == 1); 7118 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7119 HValue* value = Pop(); 7120 HIsSmiAndBranch* result = new(zone()) HIsSmiAndBranch(value); 7121 return ast_context()->ReturnControl(result, call->id()); 7122} 7123 7124 7125void HGraphBuilder::GenerateIsSpecObject(CallRuntime* call) { 7126 ASSERT(call->arguments()->length() == 1); 7127 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7128 HValue* value = Pop(); 7129 HHasInstanceTypeAndBranch* result = 7130 new(zone()) HHasInstanceTypeAndBranch(value, 7131 FIRST_SPEC_OBJECT_TYPE, 7132 LAST_SPEC_OBJECT_TYPE); 7133 return ast_context()->ReturnControl(result, call->id()); 7134} 7135 7136 7137void HGraphBuilder::GenerateIsFunction(CallRuntime* call) { 7138 ASSERT(call->arguments()->length() == 1); 7139 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7140 HValue* value = Pop(); 7141 HHasInstanceTypeAndBranch* result = 7142 new(zone()) HHasInstanceTypeAndBranch(value, JS_FUNCTION_TYPE); 7143 return ast_context()->ReturnControl(result, call->id()); 7144} 7145 7146 7147void HGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) { 7148 ASSERT(call->arguments()->length() == 1); 7149 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7150 HValue* value = Pop(); 7151 HHasCachedArrayIndexAndBranch* result = 7152 new(zone()) HHasCachedArrayIndexAndBranch(value); 7153 return ast_context()->ReturnControl(result, call->id()); 7154} 7155 7156 7157void HGraphBuilder::GenerateIsArray(CallRuntime* call) { 7158 ASSERT(call->arguments()->length() == 1); 7159 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7160 HValue* value = Pop(); 7161 HHasInstanceTypeAndBranch* result = 7162 new(zone()) HHasInstanceTypeAndBranch(value, JS_ARRAY_TYPE); 7163 return ast_context()->ReturnControl(result, call->id()); 7164} 7165 7166 7167void HGraphBuilder::GenerateIsRegExp(CallRuntime* call) { 7168 ASSERT(call->arguments()->length() == 1); 7169 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7170 HValue* value = Pop(); 7171 HHasInstanceTypeAndBranch* result = 7172 new(zone()) HHasInstanceTypeAndBranch(value, JS_REGEXP_TYPE); 7173 return ast_context()->ReturnControl(result, call->id()); 7174} 7175 7176 7177void HGraphBuilder::GenerateIsObject(CallRuntime* call) { 7178 ASSERT(call->arguments()->length() == 1); 7179 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7180 HValue* value = Pop(); 7181 HIsObjectAndBranch* result = new(zone()) HIsObjectAndBranch(value); 7182 return ast_context()->ReturnControl(result, call->id()); 7183} 7184 7185 7186void HGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) { 7187 return Bailout("inlined runtime function: IsNonNegativeSmi"); 7188} 7189 7190 7191void HGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) { 7192 ASSERT(call->arguments()->length() == 1); 7193 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7194 HValue* value = Pop(); 7195 HIsUndetectableAndBranch* result = 7196 new(zone()) HIsUndetectableAndBranch(value); 7197 return ast_context()->ReturnControl(result, call->id()); 7198} 7199 7200 7201void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf( 7202 CallRuntime* call) { 7203 return Bailout( 7204 "inlined runtime function: IsStringWrapperSafeForDefaultValueOf"); 7205} 7206 7207 7208// Support for construct call checks. 7209void HGraphBuilder::GenerateIsConstructCall(CallRuntime* call) { 7210 ASSERT(call->arguments()->length() == 0); 7211 if (function_state()->outer() != NULL) { 7212 // We are generating graph for inlined function. 7213 HValue* value = function_state()->is_construct() 7214 ? graph()->GetConstantTrue() 7215 : graph()->GetConstantFalse(); 7216 return ast_context()->ReturnValue(value); 7217 } else { 7218 return ast_context()->ReturnControl(new(zone()) HIsConstructCallAndBranch, 7219 call->id()); 7220 } 7221} 7222 7223 7224// Support for arguments.length and arguments[?]. 7225void HGraphBuilder::GenerateArgumentsLength(CallRuntime* call) { 7226 // Our implementation of arguments (based on this stack frame or an 7227 // adapter below it) does not work for inlined functions. This runtime 7228 // function is blacklisted by AstNode::IsInlineable. 7229 ASSERT(function_state()->outer() == NULL); 7230 ASSERT(call->arguments()->length() == 0); 7231 HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements); 7232 HArgumentsLength* result = new(zone()) HArgumentsLength(elements); 7233 return ast_context()->ReturnInstruction(result, call->id()); 7234} 7235 7236 7237void HGraphBuilder::GenerateArguments(CallRuntime* call) { 7238 // Our implementation of arguments (based on this stack frame or an 7239 // adapter below it) does not work for inlined functions. This runtime 7240 // function is blacklisted by AstNode::IsInlineable. 7241 ASSERT(function_state()->outer() == NULL); 7242 ASSERT(call->arguments()->length() == 1); 7243 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7244 HValue* index = Pop(); 7245 HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements); 7246 HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements)); 7247 HAccessArgumentsAt* result = 7248 new(zone()) HAccessArgumentsAt(elements, length, index); 7249 return ast_context()->ReturnInstruction(result, call->id()); 7250} 7251 7252 7253// Support for accessing the class and value fields of an object. 7254void HGraphBuilder::GenerateClassOf(CallRuntime* call) { 7255 // The special form detected by IsClassOfTest is detected before we get here 7256 // and does not cause a bailout. 7257 return Bailout("inlined runtime function: ClassOf"); 7258} 7259 7260 7261void HGraphBuilder::GenerateValueOf(CallRuntime* call) { 7262 ASSERT(call->arguments()->length() == 1); 7263 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7264 HValue* value = Pop(); 7265 HValueOf* result = new(zone()) HValueOf(value); 7266 return ast_context()->ReturnInstruction(result, call->id()); 7267} 7268 7269 7270void HGraphBuilder::GenerateDateField(CallRuntime* call) { 7271 ASSERT(call->arguments()->length() == 2); 7272 ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral()); 7273 Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->handle())); 7274 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7275 HValue* date = Pop(); 7276 HDateField* result = new(zone()) HDateField(date, index); 7277 return ast_context()->ReturnInstruction(result, call->id()); 7278} 7279 7280 7281void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) { 7282 ASSERT(call->arguments()->length() == 2); 7283 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7284 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 7285 HValue* value = Pop(); 7286 HValue* object = Pop(); 7287 // Check if object is a not a smi. 7288 HIsSmiAndBranch* smicheck = new(zone()) HIsSmiAndBranch(object); 7289 HBasicBlock* if_smi = graph()->CreateBasicBlock(); 7290 HBasicBlock* if_heap_object = graph()->CreateBasicBlock(); 7291 HBasicBlock* join = graph()->CreateBasicBlock(); 7292 smicheck->SetSuccessorAt(0, if_smi); 7293 smicheck->SetSuccessorAt(1, if_heap_object); 7294 current_block()->Finish(smicheck); 7295 if_smi->Goto(join); 7296 7297 // Check if object is a JSValue. 7298 set_current_block(if_heap_object); 7299 HHasInstanceTypeAndBranch* typecheck = 7300 new(zone()) HHasInstanceTypeAndBranch(object, JS_VALUE_TYPE); 7301 HBasicBlock* if_js_value = graph()->CreateBasicBlock(); 7302 HBasicBlock* not_js_value = graph()->CreateBasicBlock(); 7303 typecheck->SetSuccessorAt(0, if_js_value); 7304 typecheck->SetSuccessorAt(1, not_js_value); 7305 current_block()->Finish(typecheck); 7306 not_js_value->Goto(join); 7307 7308 // Create in-object property store to kValueOffset. 7309 set_current_block(if_js_value); 7310 Handle<String> name = isolate()->factory()->undefined_symbol(); 7311 AddInstruction(new HStoreNamedField(object, 7312 name, 7313 value, 7314 true, // in-object store. 7315 JSValue::kValueOffset)); 7316 if_js_value->Goto(join); 7317 join->SetJoinId(call->id()); 7318 set_current_block(join); 7319 return ast_context()->ReturnValue(value); 7320} 7321 7322 7323// Fast support for charCodeAt(n). 7324void HGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) { 7325 ASSERT(call->arguments()->length() == 2); 7326 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7327 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 7328 HValue* index = Pop(); 7329 HValue* string = Pop(); 7330 HValue* context = environment()->LookupContext(); 7331 HStringCharCodeAt* result = BuildStringCharCodeAt(context, string, index); 7332 return ast_context()->ReturnInstruction(result, call->id()); 7333} 7334 7335 7336// Fast support for string.charAt(n) and string[n]. 7337void HGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) { 7338 ASSERT(call->arguments()->length() == 1); 7339 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7340 HValue* char_code = Pop(); 7341 HValue* context = environment()->LookupContext(); 7342 HStringCharFromCode* result = 7343 new(zone()) HStringCharFromCode(context, char_code); 7344 return ast_context()->ReturnInstruction(result, call->id()); 7345} 7346 7347 7348// Fast support for string.charAt(n) and string[n]. 7349void HGraphBuilder::GenerateStringCharAt(CallRuntime* call) { 7350 ASSERT(call->arguments()->length() == 2); 7351 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7352 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 7353 HValue* index = Pop(); 7354 HValue* string = Pop(); 7355 HValue* context = environment()->LookupContext(); 7356 HStringCharCodeAt* char_code = BuildStringCharCodeAt(context, string, index); 7357 AddInstruction(char_code); 7358 HStringCharFromCode* result = 7359 new(zone()) HStringCharFromCode(context, char_code); 7360 return ast_context()->ReturnInstruction(result, call->id()); 7361} 7362 7363 7364// Fast support for object equality testing. 7365void HGraphBuilder::GenerateObjectEquals(CallRuntime* call) { 7366 ASSERT(call->arguments()->length() == 2); 7367 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7368 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 7369 HValue* right = Pop(); 7370 HValue* left = Pop(); 7371 HCompareObjectEqAndBranch* result = 7372 new(zone()) HCompareObjectEqAndBranch(left, right); 7373 return ast_context()->ReturnControl(result, call->id()); 7374} 7375 7376 7377void HGraphBuilder::GenerateLog(CallRuntime* call) { 7378 // %_Log is ignored in optimized code. 7379 return ast_context()->ReturnValue(graph()->GetConstantUndefined()); 7380} 7381 7382 7383// Fast support for Math.random(). 7384void HGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) { 7385 HValue* context = environment()->LookupContext(); 7386 HGlobalObject* global_object = new(zone()) HGlobalObject(context); 7387 AddInstruction(global_object); 7388 HRandom* result = new(zone()) HRandom(global_object); 7389 return ast_context()->ReturnInstruction(result, call->id()); 7390} 7391 7392 7393// Fast support for StringAdd. 7394void HGraphBuilder::GenerateStringAdd(CallRuntime* call) { 7395 ASSERT_EQ(2, call->arguments()->length()); 7396 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7397 HValue* context = environment()->LookupContext(); 7398 HCallStub* result = new(zone()) HCallStub(context, CodeStub::StringAdd, 2); 7399 Drop(2); 7400 return ast_context()->ReturnInstruction(result, call->id()); 7401} 7402 7403 7404// Fast support for SubString. 7405void HGraphBuilder::GenerateSubString(CallRuntime* call) { 7406 ASSERT_EQ(3, call->arguments()->length()); 7407 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7408 HValue* context = environment()->LookupContext(); 7409 HCallStub* result = new(zone()) HCallStub(context, CodeStub::SubString, 3); 7410 Drop(3); 7411 return ast_context()->ReturnInstruction(result, call->id()); 7412} 7413 7414 7415// Fast support for StringCompare. 7416void HGraphBuilder::GenerateStringCompare(CallRuntime* call) { 7417 ASSERT_EQ(2, call->arguments()->length()); 7418 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7419 HValue* context = environment()->LookupContext(); 7420 HCallStub* result = 7421 new(zone()) HCallStub(context, CodeStub::StringCompare, 2); 7422 Drop(2); 7423 return ast_context()->ReturnInstruction(result, call->id()); 7424} 7425 7426 7427// Support for direct calls from JavaScript to native RegExp code. 7428void HGraphBuilder::GenerateRegExpExec(CallRuntime* call) { 7429 ASSERT_EQ(4, call->arguments()->length()); 7430 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7431 HValue* context = environment()->LookupContext(); 7432 HCallStub* result = new(zone()) HCallStub(context, CodeStub::RegExpExec, 4); 7433 Drop(4); 7434 return ast_context()->ReturnInstruction(result, call->id()); 7435} 7436 7437 7438// Construct a RegExp exec result with two in-object properties. 7439void HGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) { 7440 ASSERT_EQ(3, call->arguments()->length()); 7441 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7442 HValue* context = environment()->LookupContext(); 7443 HCallStub* result = 7444 new(zone()) HCallStub(context, CodeStub::RegExpConstructResult, 3); 7445 Drop(3); 7446 return ast_context()->ReturnInstruction(result, call->id()); 7447} 7448 7449 7450// Support for fast native caches. 7451void HGraphBuilder::GenerateGetFromCache(CallRuntime* call) { 7452 return Bailout("inlined runtime function: GetFromCache"); 7453} 7454 7455 7456// Fast support for number to string. 7457void HGraphBuilder::GenerateNumberToString(CallRuntime* call) { 7458 ASSERT_EQ(1, call->arguments()->length()); 7459 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7460 HValue* context = environment()->LookupContext(); 7461 HCallStub* result = 7462 new(zone()) HCallStub(context, CodeStub::NumberToString, 1); 7463 Drop(1); 7464 return ast_context()->ReturnInstruction(result, call->id()); 7465} 7466 7467 7468// Fast call for custom callbacks. 7469void HGraphBuilder::GenerateCallFunction(CallRuntime* call) { 7470 // 1 ~ The function to call is not itself an argument to the call. 7471 int arg_count = call->arguments()->length() - 1; 7472 ASSERT(arg_count >= 1); // There's always at least a receiver. 7473 7474 for (int i = 0; i < arg_count; ++i) { 7475 CHECK_ALIVE(VisitArgument(call->arguments()->at(i))); 7476 } 7477 CHECK_ALIVE(VisitForValue(call->arguments()->last())); 7478 7479 HValue* function = Pop(); 7480 HValue* context = environment()->LookupContext(); 7481 7482 // Branch for function proxies, or other non-functions. 7483 HHasInstanceTypeAndBranch* typecheck = 7484 new(zone()) HHasInstanceTypeAndBranch(function, JS_FUNCTION_TYPE); 7485 HBasicBlock* if_jsfunction = graph()->CreateBasicBlock(); 7486 HBasicBlock* if_nonfunction = graph()->CreateBasicBlock(); 7487 HBasicBlock* join = graph()->CreateBasicBlock(); 7488 typecheck->SetSuccessorAt(0, if_jsfunction); 7489 typecheck->SetSuccessorAt(1, if_nonfunction); 7490 current_block()->Finish(typecheck); 7491 7492 set_current_block(if_jsfunction); 7493 HInstruction* invoke_result = AddInstruction( 7494 new(zone()) HInvokeFunction(context, function, arg_count)); 7495 Drop(arg_count); 7496 Push(invoke_result); 7497 if_jsfunction->Goto(join); 7498 7499 set_current_block(if_nonfunction); 7500 HInstruction* call_result = AddInstruction( 7501 new(zone()) HCallFunction(context, function, arg_count)); 7502 Drop(arg_count); 7503 Push(call_result); 7504 if_nonfunction->Goto(join); 7505 7506 set_current_block(join); 7507 join->SetJoinId(call->id()); 7508 return ast_context()->ReturnValue(Pop()); 7509} 7510 7511 7512// Fast call to math functions. 7513void HGraphBuilder::GenerateMathPow(CallRuntime* call) { 7514 ASSERT_EQ(2, call->arguments()->length()); 7515 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7516 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 7517 HValue* right = Pop(); 7518 HValue* left = Pop(); 7519 HPower* result = new(zone()) HPower(left, right); 7520 return ast_context()->ReturnInstruction(result, call->id()); 7521} 7522 7523 7524void HGraphBuilder::GenerateMathSin(CallRuntime* call) { 7525 ASSERT_EQ(1, call->arguments()->length()); 7526 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7527 HValue* context = environment()->LookupContext(); 7528 HCallStub* result = 7529 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1); 7530 result->set_transcendental_type(TranscendentalCache::SIN); 7531 Drop(1); 7532 return ast_context()->ReturnInstruction(result, call->id()); 7533} 7534 7535 7536void HGraphBuilder::GenerateMathCos(CallRuntime* call) { 7537 ASSERT_EQ(1, call->arguments()->length()); 7538 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7539 HValue* context = environment()->LookupContext(); 7540 HCallStub* result = 7541 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1); 7542 result->set_transcendental_type(TranscendentalCache::COS); 7543 Drop(1); 7544 return ast_context()->ReturnInstruction(result, call->id()); 7545} 7546 7547 7548void HGraphBuilder::GenerateMathTan(CallRuntime* call) { 7549 ASSERT_EQ(1, call->arguments()->length()); 7550 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7551 HValue* context = environment()->LookupContext(); 7552 HCallStub* result = 7553 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1); 7554 result->set_transcendental_type(TranscendentalCache::TAN); 7555 Drop(1); 7556 return ast_context()->ReturnInstruction(result, call->id()); 7557} 7558 7559 7560void HGraphBuilder::GenerateMathLog(CallRuntime* call) { 7561 ASSERT_EQ(1, call->arguments()->length()); 7562 CHECK_ALIVE(VisitArgumentList(call->arguments())); 7563 HValue* context = environment()->LookupContext(); 7564 HCallStub* result = 7565 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1); 7566 result->set_transcendental_type(TranscendentalCache::LOG); 7567 Drop(1); 7568 return ast_context()->ReturnInstruction(result, call->id()); 7569} 7570 7571 7572void HGraphBuilder::GenerateMathSqrt(CallRuntime* call) { 7573 return Bailout("inlined runtime function: MathSqrt"); 7574} 7575 7576 7577// Check whether two RegExps are equivalent 7578void HGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) { 7579 return Bailout("inlined runtime function: IsRegExpEquivalent"); 7580} 7581 7582 7583void HGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) { 7584 ASSERT(call->arguments()->length() == 1); 7585 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 7586 HValue* value = Pop(); 7587 HGetCachedArrayIndex* result = new(zone()) HGetCachedArrayIndex(value); 7588 return ast_context()->ReturnInstruction(result, call->id()); 7589} 7590 7591 7592void HGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) { 7593 return Bailout("inlined runtime function: FastAsciiArrayJoin"); 7594} 7595 7596 7597#undef CHECK_BAILOUT 7598#undef CHECK_ALIVE 7599 7600 7601HEnvironment::HEnvironment(HEnvironment* outer, 7602 Scope* scope, 7603 Handle<JSFunction> closure) 7604 : closure_(closure), 7605 values_(0), 7606 assigned_variables_(4), 7607 frame_type_(JS_FUNCTION), 7608 parameter_count_(0), 7609 specials_count_(1), 7610 local_count_(0), 7611 outer_(outer), 7612 pop_count_(0), 7613 push_count_(0), 7614 ast_id_(AstNode::kNoNumber) { 7615 Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0); 7616} 7617 7618 7619HEnvironment::HEnvironment(const HEnvironment* other) 7620 : values_(0), 7621 assigned_variables_(0), 7622 frame_type_(JS_FUNCTION), 7623 parameter_count_(0), 7624 specials_count_(1), 7625 local_count_(0), 7626 outer_(NULL), 7627 pop_count_(0), 7628 push_count_(0), 7629 ast_id_(other->ast_id()) { 7630 Initialize(other); 7631} 7632 7633 7634HEnvironment::HEnvironment(HEnvironment* outer, 7635 Handle<JSFunction> closure, 7636 FrameType frame_type, 7637 int arguments) 7638 : closure_(closure), 7639 values_(arguments), 7640 assigned_variables_(0), 7641 frame_type_(frame_type), 7642 parameter_count_(arguments), 7643 local_count_(0), 7644 outer_(outer), 7645 pop_count_(0), 7646 push_count_(0), 7647 ast_id_(AstNode::kNoNumber) { 7648} 7649 7650 7651void HEnvironment::Initialize(int parameter_count, 7652 int local_count, 7653 int stack_height) { 7654 parameter_count_ = parameter_count; 7655 local_count_ = local_count; 7656 7657 // Avoid reallocating the temporaries' backing store on the first Push. 7658 int total = parameter_count + specials_count_ + local_count + stack_height; 7659 values_.Initialize(total + 4); 7660 for (int i = 0; i < total; ++i) values_.Add(NULL); 7661} 7662 7663 7664void HEnvironment::Initialize(const HEnvironment* other) { 7665 closure_ = other->closure(); 7666 values_.AddAll(other->values_); 7667 assigned_variables_.AddAll(other->assigned_variables_); 7668 frame_type_ = other->frame_type_; 7669 parameter_count_ = other->parameter_count_; 7670 local_count_ = other->local_count_; 7671 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy. 7672 pop_count_ = other->pop_count_; 7673 push_count_ = other->push_count_; 7674 ast_id_ = other->ast_id_; 7675} 7676 7677 7678void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) { 7679 ASSERT(!block->IsLoopHeader()); 7680 ASSERT(values_.length() == other->values_.length()); 7681 7682 int length = values_.length(); 7683 for (int i = 0; i < length; ++i) { 7684 HValue* value = values_[i]; 7685 if (value != NULL && value->IsPhi() && value->block() == block) { 7686 // There is already a phi for the i'th value. 7687 HPhi* phi = HPhi::cast(value); 7688 // Assert index is correct and that we haven't missed an incoming edge. 7689 ASSERT(phi->merged_index() == i); 7690 ASSERT(phi->OperandCount() == block->predecessors()->length()); 7691 phi->AddInput(other->values_[i]); 7692 } else if (values_[i] != other->values_[i]) { 7693 // There is a fresh value on the incoming edge, a phi is needed. 7694 ASSERT(values_[i] != NULL && other->values_[i] != NULL); 7695 HPhi* phi = new(block->zone()) HPhi(i); 7696 HValue* old_value = values_[i]; 7697 for (int j = 0; j < block->predecessors()->length(); j++) { 7698 phi->AddInput(old_value); 7699 } 7700 phi->AddInput(other->values_[i]); 7701 this->values_[i] = phi; 7702 block->AddPhi(phi); 7703 } 7704 } 7705} 7706 7707 7708void HEnvironment::Bind(int index, HValue* value) { 7709 ASSERT(value != NULL); 7710 if (!assigned_variables_.Contains(index)) { 7711 assigned_variables_.Add(index); 7712 } 7713 values_[index] = value; 7714} 7715 7716 7717bool HEnvironment::HasExpressionAt(int index) const { 7718 return index >= parameter_count_ + specials_count_ + local_count_; 7719} 7720 7721 7722bool HEnvironment::ExpressionStackIsEmpty() const { 7723 ASSERT(length() >= first_expression_index()); 7724 return length() == first_expression_index(); 7725} 7726 7727 7728void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) { 7729 int count = index_from_top + 1; 7730 int index = values_.length() - count; 7731 ASSERT(HasExpressionAt(index)); 7732 // The push count must include at least the element in question or else 7733 // the new value will not be included in this environment's history. 7734 if (push_count_ < count) { 7735 // This is the same effect as popping then re-pushing 'count' elements. 7736 pop_count_ += (count - push_count_); 7737 push_count_ = count; 7738 } 7739 values_[index] = value; 7740} 7741 7742 7743void HEnvironment::Drop(int count) { 7744 for (int i = 0; i < count; ++i) { 7745 Pop(); 7746 } 7747} 7748 7749 7750HEnvironment* HEnvironment::Copy() const { 7751 return new(closure()->GetIsolate()->zone()) HEnvironment(this); 7752} 7753 7754 7755HEnvironment* HEnvironment::CopyWithoutHistory() const { 7756 HEnvironment* result = Copy(); 7757 result->ClearHistory(); 7758 return result; 7759} 7760 7761 7762HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const { 7763 HEnvironment* new_env = Copy(); 7764 for (int i = 0; i < values_.length(); ++i) { 7765 HPhi* phi = new(loop_header->zone()) HPhi(i); 7766 phi->AddInput(values_[i]); 7767 new_env->values_[i] = phi; 7768 loop_header->AddPhi(phi); 7769 } 7770 new_env->ClearHistory(); 7771 return new_env; 7772} 7773 7774 7775HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer, 7776 Handle<JSFunction> target, 7777 FrameType frame_type, 7778 int arguments) const { 7779 HEnvironment* new_env = new(closure()->GetIsolate()->zone()) 7780 HEnvironment(outer, target, frame_type, arguments + 1); 7781 for (int i = 0; i <= arguments; ++i) { // Include receiver. 7782 new_env->Push(ExpressionStackAt(arguments - i)); 7783 } 7784 new_env->ClearHistory(); 7785 return new_env; 7786} 7787 7788 7789HEnvironment* HEnvironment::CopyForInlining( 7790 Handle<JSFunction> target, 7791 int arguments, 7792 FunctionLiteral* function, 7793 HConstant* undefined, 7794 CallKind call_kind, 7795 bool is_construct) const { 7796 ASSERT(frame_type() == JS_FUNCTION); 7797 7798 Zone* zone = closure()->GetIsolate()->zone(); 7799 7800 // Outer environment is a copy of this one without the arguments. 7801 int arity = function->scope()->num_parameters(); 7802 7803 HEnvironment* outer = Copy(); 7804 outer->Drop(arguments + 1); // Including receiver. 7805 outer->ClearHistory(); 7806 7807 if (is_construct) { 7808 // Create artificial constructor stub environment. The receiver should 7809 // actually be the constructor function, but we pass the newly allocated 7810 // object instead, DoComputeConstructStubFrame() relies on that. 7811 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments); 7812 } 7813 7814 if (arity != arguments) { 7815 // Create artificial arguments adaptation environment. 7816 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments); 7817 } 7818 7819 HEnvironment* inner = 7820 new(zone) HEnvironment(outer, function->scope(), target); 7821 // Get the argument values from the original environment. 7822 for (int i = 0; i <= arity; ++i) { // Include receiver. 7823 HValue* push = (i <= arguments) ? 7824 ExpressionStackAt(arguments - i) : undefined; 7825 inner->SetValueAt(i, push); 7826 } 7827 // If the function we are inlining is a strict mode function or a 7828 // builtin function, pass undefined as the receiver for function 7829 // calls (instead of the global receiver). 7830 if ((target->shared()->native() || !function->is_classic_mode()) && 7831 call_kind == CALL_AS_FUNCTION && !is_construct) { 7832 inner->SetValueAt(0, undefined); 7833 } 7834 inner->SetValueAt(arity + 1, LookupContext()); 7835 for (int i = arity + 2; i < inner->length(); ++i) { 7836 inner->SetValueAt(i, undefined); 7837 } 7838 7839 inner->set_ast_id(AstNode::kFunctionEntryId); 7840 return inner; 7841} 7842 7843 7844void HEnvironment::PrintTo(StringStream* stream) { 7845 for (int i = 0; i < length(); i++) { 7846 if (i == 0) stream->Add("parameters\n"); 7847 if (i == parameter_count()) stream->Add("specials\n"); 7848 if (i == parameter_count() + specials_count()) stream->Add("locals\n"); 7849 if (i == parameter_count() + specials_count() + local_count()) { 7850 stream->Add("expressions\n"); 7851 } 7852 HValue* val = values_.at(i); 7853 stream->Add("%d: ", i); 7854 if (val != NULL) { 7855 val->PrintNameTo(stream); 7856 } else { 7857 stream->Add("NULL"); 7858 } 7859 stream->Add("\n"); 7860 } 7861 PrintF("\n"); 7862} 7863 7864 7865void HEnvironment::PrintToStd() { 7866 HeapStringAllocator string_allocator; 7867 StringStream trace(&string_allocator); 7868 PrintTo(&trace); 7869 PrintF("%s", *trace.ToCString()); 7870} 7871 7872 7873void HTracer::TraceCompilation(FunctionLiteral* function) { 7874 Tag tag(this, "compilation"); 7875 Handle<String> name = function->debug_name(); 7876 PrintStringProperty("name", *name->ToCString()); 7877 PrintStringProperty("method", *name->ToCString()); 7878 PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis())); 7879} 7880 7881 7882void HTracer::TraceLithium(const char* name, LChunk* chunk) { 7883 Trace(name, chunk->graph(), chunk); 7884} 7885 7886 7887void HTracer::TraceHydrogen(const char* name, HGraph* graph) { 7888 Trace(name, graph, NULL); 7889} 7890 7891 7892void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) { 7893 Tag tag(this, "cfg"); 7894 PrintStringProperty("name", name); 7895 const ZoneList<HBasicBlock*>* blocks = graph->blocks(); 7896 for (int i = 0; i < blocks->length(); i++) { 7897 HBasicBlock* current = blocks->at(i); 7898 Tag block_tag(this, "block"); 7899 PrintBlockProperty("name", current->block_id()); 7900 PrintIntProperty("from_bci", -1); 7901 PrintIntProperty("to_bci", -1); 7902 7903 if (!current->predecessors()->is_empty()) { 7904 PrintIndent(); 7905 trace_.Add("predecessors"); 7906 for (int j = 0; j < current->predecessors()->length(); ++j) { 7907 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id()); 7908 } 7909 trace_.Add("\n"); 7910 } else { 7911 PrintEmptyProperty("predecessors"); 7912 } 7913 7914 if (current->end()->SuccessorCount() == 0) { 7915 PrintEmptyProperty("successors"); 7916 } else { 7917 PrintIndent(); 7918 trace_.Add("successors"); 7919 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) { 7920 trace_.Add(" \"B%d\"", it.Current()->block_id()); 7921 } 7922 trace_.Add("\n"); 7923 } 7924 7925 PrintEmptyProperty("xhandlers"); 7926 const char* flags = current->IsLoopSuccessorDominator() 7927 ? "dom-loop-succ" 7928 : ""; 7929 PrintStringProperty("flags", flags); 7930 7931 if (current->dominator() != NULL) { 7932 PrintBlockProperty("dominator", current->dominator()->block_id()); 7933 } 7934 7935 PrintIntProperty("loop_depth", current->LoopNestingDepth()); 7936 7937 if (chunk != NULL) { 7938 int first_index = current->first_instruction_index(); 7939 int last_index = current->last_instruction_index(); 7940 PrintIntProperty( 7941 "first_lir_id", 7942 LifetimePosition::FromInstructionIndex(first_index).Value()); 7943 PrintIntProperty( 7944 "last_lir_id", 7945 LifetimePosition::FromInstructionIndex(last_index).Value()); 7946 } 7947 7948 { 7949 Tag states_tag(this, "states"); 7950 Tag locals_tag(this, "locals"); 7951 int total = current->phis()->length(); 7952 PrintIntProperty("size", current->phis()->length()); 7953 PrintStringProperty("method", "None"); 7954 for (int j = 0; j < total; ++j) { 7955 HPhi* phi = current->phis()->at(j); 7956 PrintIndent(); 7957 trace_.Add("%d ", phi->merged_index()); 7958 phi->PrintNameTo(&trace_); 7959 trace_.Add(" "); 7960 phi->PrintTo(&trace_); 7961 trace_.Add("\n"); 7962 } 7963 } 7964 7965 { 7966 Tag HIR_tag(this, "HIR"); 7967 HInstruction* instruction = current->first(); 7968 while (instruction != NULL) { 7969 int bci = 0; 7970 int uses = instruction->UseCount(); 7971 PrintIndent(); 7972 trace_.Add("%d %d ", bci, uses); 7973 instruction->PrintNameTo(&trace_); 7974 trace_.Add(" "); 7975 instruction->PrintTo(&trace_); 7976 trace_.Add(" <|@\n"); 7977 instruction = instruction->next(); 7978 } 7979 } 7980 7981 7982 if (chunk != NULL) { 7983 Tag LIR_tag(this, "LIR"); 7984 int first_index = current->first_instruction_index(); 7985 int last_index = current->last_instruction_index(); 7986 if (first_index != -1 && last_index != -1) { 7987 const ZoneList<LInstruction*>* instructions = chunk->instructions(); 7988 for (int i = first_index; i <= last_index; ++i) { 7989 LInstruction* linstr = instructions->at(i); 7990 if (linstr != NULL) { 7991 PrintIndent(); 7992 trace_.Add("%d ", 7993 LifetimePosition::FromInstructionIndex(i).Value()); 7994 linstr->PrintTo(&trace_); 7995 trace_.Add(" <|@\n"); 7996 } 7997 } 7998 } 7999 } 8000 } 8001} 8002 8003 8004void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) { 8005 Tag tag(this, "intervals"); 8006 PrintStringProperty("name", name); 8007 8008 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges(); 8009 for (int i = 0; i < fixed_d->length(); ++i) { 8010 TraceLiveRange(fixed_d->at(i), "fixed"); 8011 } 8012 8013 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges(); 8014 for (int i = 0; i < fixed->length(); ++i) { 8015 TraceLiveRange(fixed->at(i), "fixed"); 8016 } 8017 8018 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges(); 8019 for (int i = 0; i < live_ranges->length(); ++i) { 8020 TraceLiveRange(live_ranges->at(i), "object"); 8021 } 8022} 8023 8024 8025void HTracer::TraceLiveRange(LiveRange* range, const char* type) { 8026 if (range != NULL && !range->IsEmpty()) { 8027 PrintIndent(); 8028 trace_.Add("%d %s", range->id(), type); 8029 if (range->HasRegisterAssigned()) { 8030 LOperand* op = range->CreateAssignedOperand(ZONE); 8031 int assigned_reg = op->index(); 8032 if (op->IsDoubleRegister()) { 8033 trace_.Add(" \"%s\"", 8034 DoubleRegister::AllocationIndexToString(assigned_reg)); 8035 } else { 8036 ASSERT(op->IsRegister()); 8037 trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg)); 8038 } 8039 } else if (range->IsSpilled()) { 8040 LOperand* op = range->TopLevel()->GetSpillOperand(); 8041 if (op->IsDoubleStackSlot()) { 8042 trace_.Add(" \"double_stack:%d\"", op->index()); 8043 } else { 8044 ASSERT(op->IsStackSlot()); 8045 trace_.Add(" \"stack:%d\"", op->index()); 8046 } 8047 } 8048 int parent_index = -1; 8049 if (range->IsChild()) { 8050 parent_index = range->parent()->id(); 8051 } else { 8052 parent_index = range->id(); 8053 } 8054 LOperand* op = range->FirstHint(); 8055 int hint_index = -1; 8056 if (op != NULL && op->IsUnallocated()) { 8057 hint_index = LUnallocated::cast(op)->virtual_register(); 8058 } 8059 trace_.Add(" %d %d", parent_index, hint_index); 8060 UseInterval* cur_interval = range->first_interval(); 8061 while (cur_interval != NULL && range->Covers(cur_interval->start())) { 8062 trace_.Add(" [%d, %d[", 8063 cur_interval->start().Value(), 8064 cur_interval->end().Value()); 8065 cur_interval = cur_interval->next(); 8066 } 8067 8068 UsePosition* current_pos = range->first_pos(); 8069 while (current_pos != NULL) { 8070 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) { 8071 trace_.Add(" %d M", current_pos->pos().Value()); 8072 } 8073 current_pos = current_pos->next(); 8074 } 8075 8076 trace_.Add(" \"\"\n"); 8077 } 8078} 8079 8080 8081void HTracer::FlushToFile() { 8082 AppendChars(filename_, *trace_.ToCString(), trace_.length(), false); 8083 trace_.Reset(); 8084} 8085 8086 8087void HStatistics::Initialize(CompilationInfo* info) { 8088 source_size_ += info->shared_info()->SourceSize(); 8089} 8090 8091 8092void HStatistics::Print() { 8093 PrintF("Timing results:\n"); 8094 int64_t sum = 0; 8095 for (int i = 0; i < timing_.length(); ++i) { 8096 sum += timing_[i]; 8097 } 8098 8099 for (int i = 0; i < names_.length(); ++i) { 8100 PrintF("%30s", names_[i]); 8101 double ms = static_cast<double>(timing_[i]) / 1000; 8102 double percent = static_cast<double>(timing_[i]) * 100 / sum; 8103 PrintF(" - %7.3f ms / %4.1f %% ", ms, percent); 8104 8105 unsigned size = sizes_[i]; 8106 double size_percent = static_cast<double>(size) * 100 / total_size_; 8107 PrintF(" %8u bytes / %4.1f %%\n", size, size_percent); 8108 } 8109 double source_size_in_kb = static_cast<double>(source_size_) / 1024; 8110 double normalized_time = source_size_in_kb > 0 8111 ? (static_cast<double>(sum) / 1000) / source_size_in_kb 8112 : 0; 8113 double normalized_bytes = source_size_in_kb > 0 8114 ? total_size_ / source_size_in_kb 8115 : 0; 8116 PrintF("%30s - %7.3f ms %7.3f bytes\n", "Sum", 8117 normalized_time, normalized_bytes); 8118 PrintF("---------------------------------------------------------------\n"); 8119 PrintF("%30s - %7.3f ms (%.1f times slower than full code gen)\n", 8120 "Total", 8121 static_cast<double>(total_) / 1000, 8122 static_cast<double>(total_) / full_code_gen_); 8123} 8124 8125 8126void HStatistics::SaveTiming(const char* name, int64_t ticks, unsigned size) { 8127 if (name == HPhase::kFullCodeGen) { 8128 full_code_gen_ += ticks; 8129 } else if (name == HPhase::kTotal) { 8130 total_ += ticks; 8131 } else { 8132 total_size_ += size; 8133 for (int i = 0; i < names_.length(); ++i) { 8134 if (names_[i] == name) { 8135 timing_[i] += ticks; 8136 sizes_[i] += size; 8137 return; 8138 } 8139 } 8140 names_.Add(name); 8141 timing_.Add(ticks); 8142 sizes_.Add(size); 8143 } 8144} 8145 8146 8147const char* const HPhase::kFullCodeGen = "Full code generator"; 8148const char* const HPhase::kTotal = "Total"; 8149 8150 8151void HPhase::Begin(const char* name, 8152 HGraph* graph, 8153 LChunk* chunk, 8154 LAllocator* allocator) { 8155 name_ = name; 8156 graph_ = graph; 8157 chunk_ = chunk; 8158 allocator_ = allocator; 8159 if (allocator != NULL && chunk_ == NULL) { 8160 chunk_ = allocator->chunk(); 8161 } 8162 if (FLAG_hydrogen_stats) start_ = OS::Ticks(); 8163 start_allocation_size_ = Zone::allocation_size_; 8164} 8165 8166 8167void HPhase::End() const { 8168 if (FLAG_hydrogen_stats) { 8169 int64_t end = OS::Ticks(); 8170 unsigned size = Zone::allocation_size_ - start_allocation_size_; 8171 HStatistics::Instance()->SaveTiming(name_, end - start_, size); 8172 } 8173 8174 // Produce trace output if flag is set so that the first letter of the 8175 // phase name matches the command line parameter FLAG_trace_phase. 8176 if (FLAG_trace_hydrogen && 8177 OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL) { 8178 if (graph_ != NULL) HTracer::Instance()->TraceHydrogen(name_, graph_); 8179 if (chunk_ != NULL) HTracer::Instance()->TraceLithium(name_, chunk_); 8180 if (allocator_ != NULL) { 8181 HTracer::Instance()->TraceLiveRanges(name_, allocator_); 8182 } 8183 } 8184 8185#ifdef DEBUG 8186 if (graph_ != NULL) graph_->Verify(false); // No full verify. 8187 if (allocator_ != NULL) allocator_->Verify(); 8188#endif 8189} 8190 8191} } // namespace v8::internal 8192