1// Copyright 2012 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#include "v8.h" 29#include "lithium-allocator-inl.h" 30 31#include "hydrogen.h" 32#include "string-stream.h" 33 34#if V8_TARGET_ARCH_IA32 35#include "ia32/lithium-ia32.h" 36#elif V8_TARGET_ARCH_X64 37#include "x64/lithium-x64.h" 38#elif V8_TARGET_ARCH_ARM 39#include "arm/lithium-arm.h" 40#elif V8_TARGET_ARCH_MIPS 41#include "mips/lithium-mips.h" 42#else 43#error "Unknown architecture." 44#endif 45 46namespace v8 { 47namespace internal { 48 49static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 50 return a.Value() < b.Value() ? a : b; 51} 52 53 54static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { 55 return a.Value() > b.Value() ? a : b; 56} 57 58 59UsePosition::UsePosition(LifetimePosition pos, 60 LOperand* operand, 61 LOperand* hint) 62 : operand_(operand), 63 hint_(hint), 64 pos_(pos), 65 next_(NULL), 66 requires_reg_(false), 67 register_beneficial_(true) { 68 if (operand_ != NULL && operand_->IsUnallocated()) { 69 LUnallocated* unalloc = LUnallocated::cast(operand_); 70 requires_reg_ = unalloc->HasRegisterPolicy(); 71 register_beneficial_ = !unalloc->HasAnyPolicy(); 72 } 73 ASSERT(pos_.IsValid()); 74} 75 76 77bool UsePosition::HasHint() const { 78 return hint_ != NULL && !hint_->IsUnallocated(); 79} 80 81 82bool UsePosition::RequiresRegister() const { 83 return requires_reg_; 84} 85 86 87bool UsePosition::RegisterIsBeneficial() const { 88 return register_beneficial_; 89} 90 91 92void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { 93 ASSERT(Contains(pos) && pos.Value() != start().Value()); 94 UseInterval* after = new(zone) UseInterval(pos, end_); 95 after->next_ = next_; 96 next_ = after; 97 end_ = pos; 98} 99 100 101#ifdef DEBUG 102 103 104void LiveRange::Verify() const { 105 UsePosition* cur = first_pos_; 106 while (cur != NULL) { 107 ASSERT(Start().Value() <= cur->pos().Value() && 108 cur->pos().Value() <= End().Value()); 109 cur = cur->next(); 110 } 111} 112 113 114bool LiveRange::HasOverlap(UseInterval* target) const { 115 UseInterval* current_interval = first_interval_; 116 while (current_interval != NULL) { 117 // Intervals overlap if the start of one is contained in the other. 118 if (current_interval->Contains(target->start()) || 119 target->Contains(current_interval->start())) { 120 return true; 121 } 122 current_interval = current_interval->next(); 123 } 124 return false; 125} 126 127 128#endif 129 130 131LiveRange::LiveRange(int id, Zone* zone) 132 : id_(id), 133 spilled_(false), 134 is_double_(false), 135 assigned_register_(kInvalidAssignment), 136 last_interval_(NULL), 137 first_interval_(NULL), 138 first_pos_(NULL), 139 parent_(NULL), 140 next_(NULL), 141 current_interval_(NULL), 142 last_processed_use_(NULL), 143 current_hint_operand_(NULL), 144 spill_operand_(new(zone) LOperand()), 145 spill_start_index_(kMaxInt) { } 146 147 148void LiveRange::set_assigned_register(int reg, 149 RegisterKind register_kind, 150 Zone* zone) { 151 ASSERT(!HasRegisterAssigned() && !IsSpilled()); 152 assigned_register_ = reg; 153 is_double_ = (register_kind == DOUBLE_REGISTERS); 154 ConvertOperands(zone); 155} 156 157 158void LiveRange::MakeSpilled(Zone* zone) { 159 ASSERT(!IsSpilled()); 160 ASSERT(TopLevel()->HasAllocatedSpillOperand()); 161 spilled_ = true; 162 assigned_register_ = kInvalidAssignment; 163 ConvertOperands(zone); 164} 165 166 167bool LiveRange::HasAllocatedSpillOperand() const { 168 ASSERT(spill_operand_ != NULL); 169 return !spill_operand_->IsIgnored(); 170} 171 172 173void LiveRange::SetSpillOperand(LOperand* operand) { 174 ASSERT(!operand->IsUnallocated()); 175 ASSERT(spill_operand_ != NULL); 176 ASSERT(spill_operand_->IsIgnored()); 177 spill_operand_->ConvertTo(operand->kind(), operand->index()); 178} 179 180 181UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { 182 UsePosition* use_pos = last_processed_use_; 183 if (use_pos == NULL) use_pos = first_pos(); 184 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) { 185 use_pos = use_pos->next(); 186 } 187 last_processed_use_ = use_pos; 188 return use_pos; 189} 190 191 192UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial( 193 LifetimePosition start) { 194 UsePosition* pos = NextUsePosition(start); 195 while (pos != NULL && !pos->RegisterIsBeneficial()) { 196 pos = pos->next(); 197 } 198 return pos; 199} 200 201 202UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial( 203 LifetimePosition start) { 204 UsePosition* pos = first_pos(); 205 UsePosition* prev = NULL; 206 while (pos != NULL && pos->pos().Value() < start.Value()) { 207 if (pos->RegisterIsBeneficial()) prev = pos; 208 pos = pos->next(); 209 } 210 return prev; 211} 212 213 214UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) { 215 UsePosition* pos = NextUsePosition(start); 216 while (pos != NULL && !pos->RequiresRegister()) { 217 pos = pos->next(); 218 } 219 return pos; 220} 221 222 223bool LiveRange::CanBeSpilled(LifetimePosition pos) { 224 // We cannot spill a live range that has a use requiring a register 225 // at the current or the immediate next position. 226 UsePosition* use_pos = NextRegisterPosition(pos); 227 if (use_pos == NULL) return true; 228 return 229 use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value(); 230} 231 232 233LOperand* LiveRange::CreateAssignedOperand(Zone* zone) { 234 LOperand* op = NULL; 235 if (HasRegisterAssigned()) { 236 ASSERT(!IsSpilled()); 237 if (IsDouble()) { 238 op = LDoubleRegister::Create(assigned_register(), zone); 239 } else { 240 op = LRegister::Create(assigned_register(), zone); 241 } 242 } else if (IsSpilled()) { 243 ASSERT(!HasRegisterAssigned()); 244 op = TopLevel()->GetSpillOperand(); 245 ASSERT(!op->IsUnallocated()); 246 } else { 247 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE); 248 unalloc->set_virtual_register(id_); 249 op = unalloc; 250 } 251 return op; 252} 253 254 255UseInterval* LiveRange::FirstSearchIntervalForPosition( 256 LifetimePosition position) const { 257 if (current_interval_ == NULL) return first_interval_; 258 if (current_interval_->start().Value() > position.Value()) { 259 current_interval_ = NULL; 260 return first_interval_; 261 } 262 return current_interval_; 263} 264 265 266void LiveRange::AdvanceLastProcessedMarker( 267 UseInterval* to_start_of, LifetimePosition but_not_past) const { 268 if (to_start_of == NULL) return; 269 if (to_start_of->start().Value() > but_not_past.Value()) return; 270 LifetimePosition start = 271 current_interval_ == NULL ? LifetimePosition::Invalid() 272 : current_interval_->start(); 273 if (to_start_of->start().Value() > start.Value()) { 274 current_interval_ = to_start_of; 275 } 276} 277 278 279void LiveRange::SplitAt(LifetimePosition position, 280 LiveRange* result, 281 Zone* zone) { 282 ASSERT(Start().Value() < position.Value()); 283 ASSERT(result->IsEmpty()); 284 // Find the last interval that ends before the position. If the 285 // position is contained in one of the intervals in the chain, we 286 // split that interval and use the first part. 287 UseInterval* current = FirstSearchIntervalForPosition(position); 288 289 // If the split position coincides with the beginning of a use interval 290 // we need to split use positons in a special way. 291 bool split_at_start = false; 292 293 if (current->start().Value() == position.Value()) { 294 // When splitting at start we need to locate the previous use interval. 295 current = first_interval_; 296 } 297 298 while (current != NULL) { 299 if (current->Contains(position)) { 300 current->SplitAt(position, zone); 301 break; 302 } 303 UseInterval* next = current->next(); 304 if (next->start().Value() >= position.Value()) { 305 split_at_start = (next->start().Value() == position.Value()); 306 break; 307 } 308 current = next; 309 } 310 311 // Partition original use intervals to the two live ranges. 312 UseInterval* before = current; 313 UseInterval* after = before->next(); 314 result->last_interval_ = (last_interval_ == before) 315 ? after // Only interval in the range after split. 316 : last_interval_; // Last interval of the original range. 317 result->first_interval_ = after; 318 last_interval_ = before; 319 320 // Find the last use position before the split and the first use 321 // position after it. 322 UsePosition* use_after = first_pos_; 323 UsePosition* use_before = NULL; 324 if (split_at_start) { 325 // The split position coincides with the beginning of a use interval (the 326 // end of a lifetime hole). Use at this position should be attributed to 327 // the split child because split child owns use interval covering it. 328 while (use_after != NULL && use_after->pos().Value() < position.Value()) { 329 use_before = use_after; 330 use_after = use_after->next(); 331 } 332 } else { 333 while (use_after != NULL && use_after->pos().Value() <= position.Value()) { 334 use_before = use_after; 335 use_after = use_after->next(); 336 } 337 } 338 339 // Partition original use positions to the two live ranges. 340 if (use_before != NULL) { 341 use_before->next_ = NULL; 342 } else { 343 first_pos_ = NULL; 344 } 345 result->first_pos_ = use_after; 346 347 // Discard cached iteration state. It might be pointing 348 // to the use that no longer belongs to this live range. 349 last_processed_use_ = NULL; 350 current_interval_ = NULL; 351 352 // Link the new live range in the chain before any of the other 353 // ranges linked from the range before the split. 354 result->parent_ = (parent_ == NULL) ? this : parent_; 355 result->next_ = next_; 356 next_ = result; 357 358#ifdef DEBUG 359 Verify(); 360 result->Verify(); 361#endif 362} 363 364 365// This implements an ordering on live ranges so that they are ordered by their 366// start positions. This is needed for the correctness of the register 367// allocation algorithm. If two live ranges start at the same offset then there 368// is a tie breaker based on where the value is first used. This part of the 369// ordering is merely a heuristic. 370bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { 371 LifetimePosition start = Start(); 372 LifetimePosition other_start = other->Start(); 373 if (start.Value() == other_start.Value()) { 374 UsePosition* pos = first_pos(); 375 if (pos == NULL) return false; 376 UsePosition* other_pos = other->first_pos(); 377 if (other_pos == NULL) return true; 378 return pos->pos().Value() < other_pos->pos().Value(); 379 } 380 return start.Value() < other_start.Value(); 381} 382 383 384void LiveRange::ShortenTo(LifetimePosition start) { 385 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value()); 386 ASSERT(first_interval_ != NULL); 387 ASSERT(first_interval_->start().Value() <= start.Value()); 388 ASSERT(start.Value() < first_interval_->end().Value()); 389 first_interval_->set_start(start); 390} 391 392 393void LiveRange::EnsureInterval(LifetimePosition start, 394 LifetimePosition end, 395 Zone* zone) { 396 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n", 397 id_, 398 start.Value(), 399 end.Value()); 400 LifetimePosition new_end = end; 401 while (first_interval_ != NULL && 402 first_interval_->start().Value() <= end.Value()) { 403 if (first_interval_->end().Value() > end.Value()) { 404 new_end = first_interval_->end(); 405 } 406 first_interval_ = first_interval_->next(); 407 } 408 409 UseInterval* new_interval = new(zone) UseInterval(start, new_end); 410 new_interval->next_ = first_interval_; 411 first_interval_ = new_interval; 412 if (new_interval->next() == NULL) { 413 last_interval_ = new_interval; 414 } 415} 416 417 418void LiveRange::AddUseInterval(LifetimePosition start, 419 LifetimePosition end, 420 Zone* zone) { 421 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n", 422 id_, 423 start.Value(), 424 end.Value()); 425 if (first_interval_ == NULL) { 426 UseInterval* interval = new(zone) UseInterval(start, end); 427 first_interval_ = interval; 428 last_interval_ = interval; 429 } else { 430 if (end.Value() == first_interval_->start().Value()) { 431 first_interval_->set_start(start); 432 } else if (end.Value() < first_interval_->start().Value()) { 433 UseInterval* interval = new(zone) UseInterval(start, end); 434 interval->set_next(first_interval_); 435 first_interval_ = interval; 436 } else { 437 // Order of instruction's processing (see ProcessInstructions) guarantees 438 // that each new use interval either precedes or intersects with 439 // last added interval. 440 ASSERT(start.Value() < first_interval_->end().Value()); 441 first_interval_->start_ = Min(start, first_interval_->start_); 442 first_interval_->end_ = Max(end, first_interval_->end_); 443 } 444 } 445} 446 447 448void LiveRange::AddUsePosition(LifetimePosition pos, 449 LOperand* operand, 450 LOperand* hint, 451 Zone* zone) { 452 LAllocator::TraceAlloc("Add to live range %d use position %d\n", 453 id_, 454 pos.Value()); 455 UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint); 456 UsePosition* prev_hint = NULL; 457 UsePosition* prev = NULL; 458 UsePosition* current = first_pos_; 459 while (current != NULL && current->pos().Value() < pos.Value()) { 460 prev_hint = current->HasHint() ? current : prev_hint; 461 prev = current; 462 current = current->next(); 463 } 464 465 if (prev == NULL) { 466 use_pos->set_next(first_pos_); 467 first_pos_ = use_pos; 468 } else { 469 use_pos->next_ = prev->next_; 470 prev->next_ = use_pos; 471 } 472 473 if (prev_hint == NULL && use_pos->HasHint()) { 474 current_hint_operand_ = hint; 475 } 476} 477 478 479void LiveRange::ConvertOperands(Zone* zone) { 480 LOperand* op = CreateAssignedOperand(zone); 481 UsePosition* use_pos = first_pos(); 482 while (use_pos != NULL) { 483 ASSERT(Start().Value() <= use_pos->pos().Value() && 484 use_pos->pos().Value() <= End().Value()); 485 486 if (use_pos->HasOperand()) { 487 ASSERT(op->IsRegister() || op->IsDoubleRegister() || 488 !use_pos->RequiresRegister()); 489 use_pos->operand()->ConvertTo(op->kind(), op->index()); 490 } 491 use_pos = use_pos->next(); 492 } 493} 494 495 496bool LiveRange::CanCover(LifetimePosition position) const { 497 if (IsEmpty()) return false; 498 return Start().Value() <= position.Value() && 499 position.Value() < End().Value(); 500} 501 502 503bool LiveRange::Covers(LifetimePosition position) { 504 if (!CanCover(position)) return false; 505 UseInterval* start_search = FirstSearchIntervalForPosition(position); 506 for (UseInterval* interval = start_search; 507 interval != NULL; 508 interval = interval->next()) { 509 ASSERT(interval->next() == NULL || 510 interval->next()->start().Value() >= interval->start().Value()); 511 AdvanceLastProcessedMarker(interval, position); 512 if (interval->Contains(position)) return true; 513 if (interval->start().Value() > position.Value()) return false; 514 } 515 return false; 516} 517 518 519LifetimePosition LiveRange::FirstIntersection(LiveRange* other) { 520 UseInterval* b = other->first_interval(); 521 if (b == NULL) return LifetimePosition::Invalid(); 522 LifetimePosition advance_last_processed_up_to = b->start(); 523 UseInterval* a = FirstSearchIntervalForPosition(b->start()); 524 while (a != NULL && b != NULL) { 525 if (a->start().Value() > other->End().Value()) break; 526 if (b->start().Value() > End().Value()) break; 527 LifetimePosition cur_intersection = a->Intersect(b); 528 if (cur_intersection.IsValid()) { 529 return cur_intersection; 530 } 531 if (a->start().Value() < b->start().Value()) { 532 a = a->next(); 533 if (a == NULL || a->start().Value() > other->End().Value()) break; 534 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); 535 } else { 536 b = b->next(); 537 } 538 } 539 return LifetimePosition::Invalid(); 540} 541 542 543LAllocator::LAllocator(int num_values, HGraph* graph) 544 : zone_(graph->isolate()), 545 chunk_(NULL), 546 live_in_sets_(graph->blocks()->length(), zone()), 547 live_ranges_(num_values * 2, zone()), 548 fixed_live_ranges_(NULL), 549 fixed_double_live_ranges_(NULL), 550 unhandled_live_ranges_(num_values * 2, zone()), 551 active_live_ranges_(8, zone()), 552 inactive_live_ranges_(8, zone()), 553 reusable_slots_(8, zone()), 554 next_virtual_register_(num_values), 555 first_artificial_register_(num_values), 556 mode_(GENERAL_REGISTERS), 557 num_registers_(-1), 558 graph_(graph), 559 has_osr_entry_(false), 560 allocation_ok_(true) { } 561 562 563void LAllocator::InitializeLivenessAnalysis() { 564 // Initialize the live_in sets for each block to NULL. 565 int block_count = graph_->blocks()->length(); 566 live_in_sets_.Initialize(block_count, zone()); 567 live_in_sets_.AddBlock(NULL, block_count, zone()); 568} 569 570 571BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) { 572 // Compute live out for the given block, except not including backward 573 // successor edges. 574 BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone()); 575 576 // Process all successor blocks. 577 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { 578 // Add values live on entry to the successor. Note the successor's 579 // live_in will not be computed yet for backwards edges. 580 HBasicBlock* successor = it.Current(); 581 BitVector* live_in = live_in_sets_[successor->block_id()]; 582 if (live_in != NULL) live_out->Union(*live_in); 583 584 // All phi input operands corresponding to this successor edge are live 585 // out from this block. 586 int index = successor->PredecessorIndexOf(block); 587 const ZoneList<HPhi*>* phis = successor->phis(); 588 for (int i = 0; i < phis->length(); ++i) { 589 HPhi* phi = phis->at(i); 590 if (!phi->OperandAt(index)->IsConstant()) { 591 live_out->Add(phi->OperandAt(index)->id()); 592 } 593 } 594 } 595 596 return live_out; 597} 598 599 600void LAllocator::AddInitialIntervals(HBasicBlock* block, 601 BitVector* live_out) { 602 // Add an interval that includes the entire block to the live range for 603 // each live_out value. 604 LifetimePosition start = LifetimePosition::FromInstructionIndex( 605 block->first_instruction_index()); 606 LifetimePosition end = LifetimePosition::FromInstructionIndex( 607 block->last_instruction_index()).NextInstruction(); 608 BitVector::Iterator iterator(live_out); 609 while (!iterator.Done()) { 610 int operand_index = iterator.Current(); 611 LiveRange* range = LiveRangeFor(operand_index); 612 range->AddUseInterval(start, end, zone()); 613 iterator.Advance(); 614 } 615} 616 617 618int LAllocator::FixedDoubleLiveRangeID(int index) { 619 return -index - 1 - Register::kMaxNumAllocatableRegisters; 620} 621 622 623LOperand* LAllocator::AllocateFixed(LUnallocated* operand, 624 int pos, 625 bool is_tagged) { 626 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); 627 ASSERT(operand->HasFixedPolicy()); 628 if (operand->HasFixedSlotPolicy()) { 629 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index()); 630 } else if (operand->HasFixedRegisterPolicy()) { 631 int reg_index = operand->fixed_register_index(); 632 operand->ConvertTo(LOperand::REGISTER, reg_index); 633 } else if (operand->HasFixedDoubleRegisterPolicy()) { 634 int reg_index = operand->fixed_register_index(); 635 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); 636 } else { 637 UNREACHABLE(); 638 } 639 if (is_tagged) { 640 TraceAlloc("Fixed reg is tagged at %d\n", pos); 641 LInstruction* instr = InstructionAt(pos); 642 if (instr->HasPointerMap()) { 643 instr->pointer_map()->RecordPointer(operand, chunk()->zone()); 644 } 645 } 646 return operand; 647} 648 649 650LiveRange* LAllocator::FixedLiveRangeFor(int index) { 651 ASSERT(index < Register::kMaxNumAllocatableRegisters); 652 LiveRange* result = fixed_live_ranges_[index]; 653 if (result == NULL) { 654 result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone()); 655 ASSERT(result->IsFixed()); 656 SetLiveRangeAssignedRegister(result, index, GENERAL_REGISTERS); 657 fixed_live_ranges_[index] = result; 658 } 659 return result; 660} 661 662 663LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { 664 ASSERT(index < DoubleRegister::NumAllocatableRegisters()); 665 LiveRange* result = fixed_double_live_ranges_[index]; 666 if (result == NULL) { 667 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index), 668 chunk()->zone()); 669 ASSERT(result->IsFixed()); 670 SetLiveRangeAssignedRegister(result, index, DOUBLE_REGISTERS); 671 fixed_double_live_ranges_[index] = result; 672 } 673 return result; 674} 675 676 677LiveRange* LAllocator::LiveRangeFor(int index) { 678 if (index >= live_ranges_.length()) { 679 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone()); 680 } 681 LiveRange* result = live_ranges_[index]; 682 if (result == NULL) { 683 result = new(zone()) LiveRange(index, chunk()->zone()); 684 live_ranges_[index] = result; 685 } 686 return result; 687} 688 689 690LGap* LAllocator::GetLastGap(HBasicBlock* block) { 691 int last_instruction = block->last_instruction_index(); 692 int index = chunk_->NearestGapPos(last_instruction); 693 return GapAt(index); 694} 695 696 697HPhi* LAllocator::LookupPhi(LOperand* operand) const { 698 if (!operand->IsUnallocated()) return NULL; 699 int index = LUnallocated::cast(operand)->virtual_register(); 700 HValue* instr = graph_->LookupValue(index); 701 if (instr != NULL && instr->IsPhi()) { 702 return HPhi::cast(instr); 703 } 704 return NULL; 705} 706 707 708LiveRange* LAllocator::LiveRangeFor(LOperand* operand) { 709 if (operand->IsUnallocated()) { 710 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register()); 711 } else if (operand->IsRegister()) { 712 return FixedLiveRangeFor(operand->index()); 713 } else if (operand->IsDoubleRegister()) { 714 return FixedDoubleLiveRangeFor(operand->index()); 715 } else { 716 return NULL; 717 } 718} 719 720 721void LAllocator::Define(LifetimePosition position, 722 LOperand* operand, 723 LOperand* hint) { 724 LiveRange* range = LiveRangeFor(operand); 725 if (range == NULL) return; 726 727 if (range->IsEmpty() || range->Start().Value() > position.Value()) { 728 // Can happen if there is a definition without use. 729 range->AddUseInterval(position, position.NextInstruction(), zone()); 730 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone()); 731 } else { 732 range->ShortenTo(position); 733 } 734 735 if (operand->IsUnallocated()) { 736 LUnallocated* unalloc_operand = LUnallocated::cast(operand); 737 range->AddUsePosition(position, unalloc_operand, hint, zone()); 738 } 739} 740 741 742void LAllocator::Use(LifetimePosition block_start, 743 LifetimePosition position, 744 LOperand* operand, 745 LOperand* hint) { 746 LiveRange* range = LiveRangeFor(operand); 747 if (range == NULL) return; 748 if (operand->IsUnallocated()) { 749 LUnallocated* unalloc_operand = LUnallocated::cast(operand); 750 range->AddUsePosition(position, unalloc_operand, hint, zone()); 751 } 752 range->AddUseInterval(block_start, position, zone()); 753} 754 755 756void LAllocator::AddConstraintsGapMove(int index, 757 LOperand* from, 758 LOperand* to) { 759 LGap* gap = GapAt(index); 760 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, 761 chunk()->zone()); 762 if (from->IsUnallocated()) { 763 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 764 for (int i = 0; i < move_operands->length(); ++i) { 765 LMoveOperands cur = move_operands->at(i); 766 LOperand* cur_to = cur.destination(); 767 if (cur_to->IsUnallocated()) { 768 if (LUnallocated::cast(cur_to)->virtual_register() == 769 LUnallocated::cast(from)->virtual_register()) { 770 move->AddMove(cur.source(), to, chunk()->zone()); 771 return; 772 } 773 } 774 } 775 } 776 move->AddMove(from, to, chunk()->zone()); 777} 778 779 780void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { 781 int start = block->first_instruction_index(); 782 int end = block->last_instruction_index(); 783 if (start == -1) return; 784 for (int i = start; i <= end; ++i) { 785 if (IsGapAt(i)) { 786 LInstruction* instr = NULL; 787 LInstruction* prev_instr = NULL; 788 if (i < end) instr = InstructionAt(i + 1); 789 if (i > start) prev_instr = InstructionAt(i - 1); 790 MeetConstraintsBetween(prev_instr, instr, i); 791 if (!AllocationOk()) return; 792 } 793 } 794} 795 796 797void LAllocator::MeetConstraintsBetween(LInstruction* first, 798 LInstruction* second, 799 int gap_index) { 800 // Handle fixed temporaries. 801 if (first != NULL) { 802 for (TempIterator it(first); !it.Done(); it.Advance()) { 803 LUnallocated* temp = LUnallocated::cast(it.Current()); 804 if (temp->HasFixedPolicy()) { 805 AllocateFixed(temp, gap_index - 1, false); 806 } 807 } 808 } 809 810 // Handle fixed output operand. 811 if (first != NULL && first->Output() != NULL) { 812 LUnallocated* first_output = LUnallocated::cast(first->Output()); 813 LiveRange* range = LiveRangeFor(first_output->virtual_register()); 814 bool assigned = false; 815 if (first_output->HasFixedPolicy()) { 816 LUnallocated* output_copy = first_output->CopyUnconstrained( 817 chunk()->zone()); 818 bool is_tagged = HasTaggedValue(first_output->virtual_register()); 819 AllocateFixed(first_output, gap_index, is_tagged); 820 821 // This value is produced on the stack, we never need to spill it. 822 if (first_output->IsStackSlot()) { 823 range->SetSpillOperand(first_output); 824 range->SetSpillStartIndex(gap_index - 1); 825 assigned = true; 826 } 827 chunk_->AddGapMove(gap_index, first_output, output_copy); 828 } 829 830 if (!assigned) { 831 range->SetSpillStartIndex(gap_index); 832 833 // This move to spill operand is not a real use. Liveness analysis 834 // and splitting of live ranges do not account for it. 835 // Thus it should be inserted to a lifetime position corresponding to 836 // the instruction end. 837 LGap* gap = GapAt(gap_index); 838 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, 839 chunk()->zone()); 840 move->AddMove(first_output, range->GetSpillOperand(), 841 chunk()->zone()); 842 } 843 } 844 845 // Handle fixed input operands of second instruction. 846 if (second != NULL) { 847 for (UseIterator it(second); !it.Done(); it.Advance()) { 848 LUnallocated* cur_input = LUnallocated::cast(it.Current()); 849 if (cur_input->HasFixedPolicy()) { 850 LUnallocated* input_copy = cur_input->CopyUnconstrained( 851 chunk()->zone()); 852 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); 853 AllocateFixed(cur_input, gap_index + 1, is_tagged); 854 AddConstraintsGapMove(gap_index, input_copy, cur_input); 855 } else if (cur_input->HasWritableRegisterPolicy()) { 856 // The live range of writable input registers always goes until the end 857 // of the instruction. 858 ASSERT(!cur_input->IsUsedAtStart()); 859 860 LUnallocated* input_copy = cur_input->CopyUnconstrained( 861 chunk()->zone()); 862 int vreg = GetVirtualRegister(); 863 if (!AllocationOk()) return; 864 cur_input->set_virtual_register(vreg); 865 866 if (RequiredRegisterKind(input_copy->virtual_register()) == 867 DOUBLE_REGISTERS) { 868 double_artificial_registers_.Add( 869 cur_input->virtual_register() - first_artificial_register_, 870 zone()); 871 } 872 873 AddConstraintsGapMove(gap_index, input_copy, cur_input); 874 } 875 } 876 } 877 878 // Handle "output same as input" for second instruction. 879 if (second != NULL && second->Output() != NULL) { 880 LUnallocated* second_output = LUnallocated::cast(second->Output()); 881 if (second_output->HasSameAsInputPolicy()) { 882 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput()); 883 int output_vreg = second_output->virtual_register(); 884 int input_vreg = cur_input->virtual_register(); 885 886 LUnallocated* input_copy = cur_input->CopyUnconstrained( 887 chunk()->zone()); 888 cur_input->set_virtual_register(second_output->virtual_register()); 889 AddConstraintsGapMove(gap_index, input_copy, cur_input); 890 891 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { 892 int index = gap_index + 1; 893 LInstruction* instr = InstructionAt(index); 894 if (instr->HasPointerMap()) { 895 instr->pointer_map()->RecordPointer(input_copy, chunk()->zone()); 896 } 897 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { 898 // The input is assumed to immediately have a tagged representation, 899 // before the pointer map can be used. I.e. the pointer map at the 900 // instruction will include the output operand (whose value at the 901 // beginning of the instruction is equal to the input operand). If 902 // this is not desired, then the pointer map at this instruction needs 903 // to be adjusted manually. 904 } 905 } 906 } 907} 908 909 910void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { 911 int block_start = block->first_instruction_index(); 912 int index = block->last_instruction_index(); 913 914 LifetimePosition block_start_position = 915 LifetimePosition::FromInstructionIndex(block_start); 916 917 while (index >= block_start) { 918 LifetimePosition curr_position = 919 LifetimePosition::FromInstructionIndex(index); 920 921 if (IsGapAt(index)) { 922 // We have a gap at this position. 923 LGap* gap = GapAt(index); 924 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, 925 chunk()->zone()); 926 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 927 for (int i = 0; i < move_operands->length(); ++i) { 928 LMoveOperands* cur = &move_operands->at(i); 929 if (cur->IsIgnored()) continue; 930 LOperand* from = cur->source(); 931 LOperand* to = cur->destination(); 932 HPhi* phi = LookupPhi(to); 933 LOperand* hint = to; 934 if (phi != NULL) { 935 // This is a phi resolving move. 936 if (!phi->block()->IsLoopHeader()) { 937 hint = LiveRangeFor(phi->id())->current_hint_operand(); 938 } 939 } else { 940 if (to->IsUnallocated()) { 941 if (live->Contains(LUnallocated::cast(to)->virtual_register())) { 942 Define(curr_position, to, from); 943 live->Remove(LUnallocated::cast(to)->virtual_register()); 944 } else { 945 cur->Eliminate(); 946 continue; 947 } 948 } else { 949 Define(curr_position, to, from); 950 } 951 } 952 Use(block_start_position, curr_position, from, hint); 953 if (from->IsUnallocated()) { 954 live->Add(LUnallocated::cast(from)->virtual_register()); 955 } 956 } 957 } else { 958 ASSERT(!IsGapAt(index)); 959 LInstruction* instr = InstructionAt(index); 960 961 if (instr != NULL) { 962 LOperand* output = instr->Output(); 963 if (output != NULL) { 964 if (output->IsUnallocated()) { 965 live->Remove(LUnallocated::cast(output)->virtual_register()); 966 } 967 Define(curr_position, output, NULL); 968 } 969 970 if (instr->ClobbersRegisters()) { 971 for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) { 972 if (output == NULL || !output->IsRegister() || 973 output->index() != i) { 974 LiveRange* range = FixedLiveRangeFor(i); 975 range->AddUseInterval(curr_position, 976 curr_position.InstructionEnd(), 977 zone()); 978 } 979 } 980 } 981 982 if (instr->ClobbersDoubleRegisters()) { 983 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { 984 if (output == NULL || !output->IsDoubleRegister() || 985 output->index() != i) { 986 LiveRange* range = FixedDoubleLiveRangeFor(i); 987 range->AddUseInterval(curr_position, 988 curr_position.InstructionEnd(), 989 zone()); 990 } 991 } 992 } 993 994 for (UseIterator it(instr); !it.Done(); it.Advance()) { 995 LOperand* input = it.Current(); 996 997 LifetimePosition use_pos; 998 if (input->IsUnallocated() && 999 LUnallocated::cast(input)->IsUsedAtStart()) { 1000 use_pos = curr_position; 1001 } else { 1002 use_pos = curr_position.InstructionEnd(); 1003 } 1004 1005 Use(block_start_position, use_pos, input, NULL); 1006 if (input->IsUnallocated()) { 1007 live->Add(LUnallocated::cast(input)->virtual_register()); 1008 } 1009 } 1010 1011 for (TempIterator it(instr); !it.Done(); it.Advance()) { 1012 LOperand* temp = it.Current(); 1013 if (instr->ClobbersTemps()) { 1014 if (temp->IsRegister()) continue; 1015 if (temp->IsUnallocated()) { 1016 LUnallocated* temp_unalloc = LUnallocated::cast(temp); 1017 if (temp_unalloc->HasFixedPolicy()) { 1018 continue; 1019 } 1020 } 1021 } 1022 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); 1023 Define(curr_position, temp, NULL); 1024 } 1025 } 1026 } 1027 1028 index = index - 1; 1029 } 1030} 1031 1032 1033void LAllocator::ResolvePhis(HBasicBlock* block) { 1034 const ZoneList<HPhi*>* phis = block->phis(); 1035 for (int i = 0; i < phis->length(); ++i) { 1036 HPhi* phi = phis->at(i); 1037 LUnallocated* phi_operand = 1038 new(chunk()->zone()) LUnallocated(LUnallocated::NONE); 1039 phi_operand->set_virtual_register(phi->id()); 1040 for (int j = 0; j < phi->OperandCount(); ++j) { 1041 HValue* op = phi->OperandAt(j); 1042 LOperand* operand = NULL; 1043 if (op->IsConstant() && op->EmitAtUses()) { 1044 HConstant* constant = HConstant::cast(op); 1045 operand = chunk_->DefineConstantOperand(constant); 1046 } else { 1047 ASSERT(!op->EmitAtUses()); 1048 LUnallocated* unalloc = 1049 new(chunk()->zone()) LUnallocated(LUnallocated::ANY); 1050 unalloc->set_virtual_register(op->id()); 1051 operand = unalloc; 1052 } 1053 HBasicBlock* cur_block = block->predecessors()->at(j); 1054 // The gap move must be added without any special processing as in 1055 // the AddConstraintsGapMove. 1056 chunk_->AddGapMove(cur_block->last_instruction_index() - 1, 1057 operand, 1058 phi_operand); 1059 1060 // We are going to insert a move before the branch instruction. 1061 // Some branch instructions (e.g. loops' back edges) 1062 // can potentially cause a GC so they have a pointer map. 1063 // By inserting a move we essentially create a copy of a 1064 // value which is invisible to PopulatePointerMaps(), because we store 1065 // it into a location different from the operand of a live range 1066 // covering a branch instruction. 1067 // Thus we need to manually record a pointer. 1068 LInstruction* branch = 1069 InstructionAt(cur_block->last_instruction_index()); 1070 if (branch->HasPointerMap()) { 1071 if (phi->representation().IsTagged() && !phi->type().IsSmi()) { 1072 branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone()); 1073 } else if (!phi->representation().IsDouble()) { 1074 branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone()); 1075 } 1076 } 1077 } 1078 1079 LiveRange* live_range = LiveRangeFor(phi->id()); 1080 LLabel* label = chunk_->GetLabel(phi->block()->block_id()); 1081 label->GetOrCreateParallelMove(LGap::START, chunk()->zone())-> 1082 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone()); 1083 live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); 1084 } 1085} 1086 1087 1088bool LAllocator::Allocate(LChunk* chunk) { 1089 ASSERT(chunk_ == NULL); 1090 chunk_ = static_cast<LPlatformChunk*>(chunk); 1091 assigned_registers_ = 1092 new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(), 1093 chunk->zone()); 1094 assigned_double_registers_ = 1095 new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(), 1096 chunk->zone()); 1097 MeetRegisterConstraints(); 1098 if (!AllocationOk()) return false; 1099 ResolvePhis(); 1100 BuildLiveRanges(); 1101 AllocateGeneralRegisters(); 1102 if (!AllocationOk()) return false; 1103 AllocateDoubleRegisters(); 1104 if (!AllocationOk()) return false; 1105 PopulatePointerMaps(); 1106 ConnectRanges(); 1107 ResolveControlFlow(); 1108 return true; 1109} 1110 1111 1112void LAllocator::MeetRegisterConstraints() { 1113 LAllocatorPhase phase("L_Register constraints", this); 1114 first_artificial_register_ = next_virtual_register_; 1115 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); 1116 for (int i = 0; i < blocks->length(); ++i) { 1117 HBasicBlock* block = blocks->at(i); 1118 MeetRegisterConstraints(block); 1119 if (!AllocationOk()) return; 1120 } 1121} 1122 1123 1124void LAllocator::ResolvePhis() { 1125 LAllocatorPhase phase("L_Resolve phis", this); 1126 1127 // Process the blocks in reverse order. 1128 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); 1129 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { 1130 HBasicBlock* block = blocks->at(block_id); 1131 ResolvePhis(block); 1132 } 1133} 1134 1135 1136void LAllocator::ResolveControlFlow(LiveRange* range, 1137 HBasicBlock* block, 1138 HBasicBlock* pred) { 1139 LifetimePosition pred_end = 1140 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); 1141 LifetimePosition cur_start = 1142 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); 1143 LiveRange* pred_cover = NULL; 1144 LiveRange* cur_cover = NULL; 1145 LiveRange* cur_range = range; 1146 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { 1147 if (cur_range->CanCover(cur_start)) { 1148 ASSERT(cur_cover == NULL); 1149 cur_cover = cur_range; 1150 } 1151 if (cur_range->CanCover(pred_end)) { 1152 ASSERT(pred_cover == NULL); 1153 pred_cover = cur_range; 1154 } 1155 cur_range = cur_range->next(); 1156 } 1157 1158 if (cur_cover->IsSpilled()) return; 1159 ASSERT(pred_cover != NULL && cur_cover != NULL); 1160 if (pred_cover != cur_cover) { 1161 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone()); 1162 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone()); 1163 if (!pred_op->Equals(cur_op)) { 1164 LGap* gap = NULL; 1165 if (block->predecessors()->length() == 1) { 1166 gap = GapAt(block->first_instruction_index()); 1167 } else { 1168 ASSERT(pred->end()->SecondSuccessor() == NULL); 1169 gap = GetLastGap(pred); 1170 1171 // We are going to insert a move before the branch instruction. 1172 // Some branch instructions (e.g. loops' back edges) 1173 // can potentially cause a GC so they have a pointer map. 1174 // By inserting a move we essentially create a copy of a 1175 // value which is invisible to PopulatePointerMaps(), because we store 1176 // it into a location different from the operand of a live range 1177 // covering a branch instruction. 1178 // Thus we need to manually record a pointer. 1179 LInstruction* branch = InstructionAt(pred->last_instruction_index()); 1180 if (branch->HasPointerMap()) { 1181 if (HasTaggedValue(range->id())) { 1182 branch->pointer_map()->RecordPointer(cur_op, chunk()->zone()); 1183 } else if (!cur_op->IsDoubleStackSlot() && 1184 !cur_op->IsDoubleRegister()) { 1185 branch->pointer_map()->RemovePointer(cur_op); 1186 } 1187 } 1188 } 1189 gap->GetOrCreateParallelMove( 1190 LGap::START, chunk()->zone())->AddMove(pred_op, cur_op, 1191 chunk()->zone()); 1192 } 1193 } 1194} 1195 1196 1197LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) { 1198 int index = pos.InstructionIndex(); 1199 if (IsGapAt(index)) { 1200 LGap* gap = GapAt(index); 1201 return gap->GetOrCreateParallelMove( 1202 pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone()); 1203 } 1204 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); 1205 return GapAt(gap_pos)->GetOrCreateParallelMove( 1206 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone()); 1207} 1208 1209 1210HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) { 1211 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex())); 1212 return gap->block(); 1213} 1214 1215 1216void LAllocator::ConnectRanges() { 1217 LAllocatorPhase phase("L_Connect ranges", this); 1218 for (int i = 0; i < live_ranges()->length(); ++i) { 1219 LiveRange* first_range = live_ranges()->at(i); 1220 if (first_range == NULL || first_range->parent() != NULL) continue; 1221 1222 LiveRange* second_range = first_range->next(); 1223 while (second_range != NULL) { 1224 LifetimePosition pos = second_range->Start(); 1225 1226 if (!second_range->IsSpilled()) { 1227 // Add gap move if the two live ranges touch and there is no block 1228 // boundary. 1229 if (first_range->End().Value() == pos.Value()) { 1230 bool should_insert = true; 1231 if (IsBlockBoundary(pos)) { 1232 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); 1233 } 1234 if (should_insert) { 1235 LParallelMove* move = GetConnectingParallelMove(pos); 1236 LOperand* prev_operand = first_range->CreateAssignedOperand( 1237 chunk()->zone()); 1238 LOperand* cur_operand = second_range->CreateAssignedOperand( 1239 chunk()->zone()); 1240 move->AddMove(prev_operand, cur_operand, 1241 chunk()->zone()); 1242 } 1243 } 1244 } 1245 1246 first_range = second_range; 1247 second_range = second_range->next(); 1248 } 1249 } 1250} 1251 1252 1253bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const { 1254 if (block->predecessors()->length() != 1) return false; 1255 return block->predecessors()->first()->block_id() == block->block_id() - 1; 1256} 1257 1258 1259void LAllocator::ResolveControlFlow() { 1260 LAllocatorPhase phase("L_Resolve control flow", this); 1261 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); 1262 for (int block_id = 1; block_id < blocks->length(); ++block_id) { 1263 HBasicBlock* block = blocks->at(block_id); 1264 if (CanEagerlyResolveControlFlow(block)) continue; 1265 BitVector* live = live_in_sets_[block->block_id()]; 1266 BitVector::Iterator iterator(live); 1267 while (!iterator.Done()) { 1268 int operand_index = iterator.Current(); 1269 for (int i = 0; i < block->predecessors()->length(); ++i) { 1270 HBasicBlock* cur = block->predecessors()->at(i); 1271 LiveRange* cur_range = LiveRangeFor(operand_index); 1272 ResolveControlFlow(cur_range, block, cur); 1273 } 1274 iterator.Advance(); 1275 } 1276 } 1277} 1278 1279 1280void LAllocator::BuildLiveRanges() { 1281 LAllocatorPhase phase("L_Build live ranges", this); 1282 InitializeLivenessAnalysis(); 1283 // Process the blocks in reverse order. 1284 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); 1285 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { 1286 HBasicBlock* block = blocks->at(block_id); 1287 BitVector* live = ComputeLiveOut(block); 1288 // Initially consider all live_out values live for the entire block. We 1289 // will shorten these intervals if necessary. 1290 AddInitialIntervals(block, live); 1291 1292 // Process the instructions in reverse order, generating and killing 1293 // live values. 1294 ProcessInstructions(block, live); 1295 // All phi output operands are killed by this block. 1296 const ZoneList<HPhi*>* phis = block->phis(); 1297 for (int i = 0; i < phis->length(); ++i) { 1298 // The live range interval already ends at the first instruction of the 1299 // block. 1300 HPhi* phi = phis->at(i); 1301 live->Remove(phi->id()); 1302 1303 LOperand* hint = NULL; 1304 LOperand* phi_operand = NULL; 1305 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); 1306 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, 1307 chunk()->zone()); 1308 for (int j = 0; j < move->move_operands()->length(); ++j) { 1309 LOperand* to = move->move_operands()->at(j).destination(); 1310 if (to->IsUnallocated() && 1311 LUnallocated::cast(to)->virtual_register() == phi->id()) { 1312 hint = move->move_operands()->at(j).source(); 1313 phi_operand = to; 1314 break; 1315 } 1316 } 1317 ASSERT(hint != NULL); 1318 1319 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( 1320 block->first_instruction_index()); 1321 Define(block_start, phi_operand, hint); 1322 } 1323 1324 // Now live is live_in for this block except not including values live 1325 // out on backward successor edges. 1326 live_in_sets_[block_id] = live; 1327 1328 // If this block is a loop header go back and patch up the necessary 1329 // predecessor blocks. 1330 if (block->IsLoopHeader()) { 1331 // TODO(kmillikin): Need to be able to get the last block of the loop 1332 // in the loop information. Add a live range stretching from the first 1333 // loop instruction to the last for each value live on entry to the 1334 // header. 1335 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge(); 1336 BitVector::Iterator iterator(live); 1337 LifetimePosition start = LifetimePosition::FromInstructionIndex( 1338 block->first_instruction_index()); 1339 LifetimePosition end = LifetimePosition::FromInstructionIndex( 1340 back_edge->last_instruction_index()).NextInstruction(); 1341 while (!iterator.Done()) { 1342 int operand_index = iterator.Current(); 1343 LiveRange* range = LiveRangeFor(operand_index); 1344 range->EnsureInterval(start, end, zone()); 1345 iterator.Advance(); 1346 } 1347 1348 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) { 1349 live_in_sets_[i]->Union(*live); 1350 } 1351 } 1352 1353#ifdef DEBUG 1354 if (block_id == 0) { 1355 BitVector::Iterator iterator(live); 1356 bool found = false; 1357 while (!iterator.Done()) { 1358 found = true; 1359 int operand_index = iterator.Current(); 1360 if (chunk_->info()->IsStub()) { 1361 CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey(); 1362 PrintF("Function: %s\n", CodeStub::MajorName(major_key, false)); 1363 } else { 1364 ASSERT(chunk_->info()->IsOptimizing()); 1365 AllowHandleDereference allow_deref; 1366 PrintF("Function: %s\n", 1367 *chunk_->info()->function()->debug_name()->ToCString()); 1368 } 1369 PrintF("Value %d used before first definition!\n", operand_index); 1370 LiveRange* range = LiveRangeFor(operand_index); 1371 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); 1372 iterator.Advance(); 1373 } 1374 ASSERT(!found); 1375 } 1376#endif 1377 } 1378} 1379 1380 1381bool LAllocator::SafePointsAreInOrder() const { 1382 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); 1383 int safe_point = 0; 1384 for (int i = 0; i < pointer_maps->length(); ++i) { 1385 LPointerMap* map = pointer_maps->at(i); 1386 if (safe_point > map->lithium_position()) return false; 1387 safe_point = map->lithium_position(); 1388 } 1389 return true; 1390} 1391 1392 1393void LAllocator::PopulatePointerMaps() { 1394 LAllocatorPhase phase("L_Populate pointer maps", this); 1395 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); 1396 1397 ASSERT(SafePointsAreInOrder()); 1398 1399 // Iterate over all safe point positions and record a pointer 1400 // for all spilled live ranges at this point. 1401 int first_safe_point_index = 0; 1402 int last_range_start = 0; 1403 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) { 1404 LiveRange* range = live_ranges()->at(range_idx); 1405 if (range == NULL) continue; 1406 // Iterate over the first parts of multi-part live ranges. 1407 if (range->parent() != NULL) continue; 1408 // Skip non-pointer values. 1409 if (!HasTaggedValue(range->id())) continue; 1410 // Skip empty live ranges. 1411 if (range->IsEmpty()) continue; 1412 1413 // Find the extent of the range and its children. 1414 int start = range->Start().InstructionIndex(); 1415 int end = 0; 1416 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) { 1417 LifetimePosition this_end = cur->End(); 1418 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); 1419 ASSERT(cur->Start().InstructionIndex() >= start); 1420 } 1421 1422 // Most of the ranges are in order, but not all. Keep an eye on when 1423 // they step backwards and reset the first_safe_point_index so we don't 1424 // miss any safe points. 1425 if (start < last_range_start) { 1426 first_safe_point_index = 0; 1427 } 1428 last_range_start = start; 1429 1430 // Step across all the safe points that are before the start of this range, 1431 // recording how far we step in order to save doing this for the next range. 1432 while (first_safe_point_index < pointer_maps->length()) { 1433 LPointerMap* map = pointer_maps->at(first_safe_point_index); 1434 int safe_point = map->lithium_position(); 1435 if (safe_point >= start) break; 1436 first_safe_point_index++; 1437 } 1438 1439 // Step through the safe points to see whether they are in the range. 1440 for (int safe_point_index = first_safe_point_index; 1441 safe_point_index < pointer_maps->length(); 1442 ++safe_point_index) { 1443 LPointerMap* map = pointer_maps->at(safe_point_index); 1444 int safe_point = map->lithium_position(); 1445 1446 // The safe points are sorted so we can stop searching here. 1447 if (safe_point - 1 > end) break; 1448 1449 // Advance to the next active range that covers the current 1450 // safe point position. 1451 LifetimePosition safe_point_pos = 1452 LifetimePosition::FromInstructionIndex(safe_point); 1453 LiveRange* cur = range; 1454 while (cur != NULL && !cur->Covers(safe_point_pos)) { 1455 cur = cur->next(); 1456 } 1457 if (cur == NULL) continue; 1458 1459 // Check if the live range is spilled and the safe point is after 1460 // the spill position. 1461 if (range->HasAllocatedSpillOperand() && 1462 safe_point >= range->spill_start_index()) { 1463 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", 1464 range->id(), range->spill_start_index(), safe_point); 1465 map->RecordPointer(range->GetSpillOperand(), chunk()->zone()); 1466 } 1467 1468 if (!cur->IsSpilled()) { 1469 TraceAlloc("Pointer in register for range %d (start at %d) " 1470 "at safe point %d\n", 1471 cur->id(), cur->Start().Value(), safe_point); 1472 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone()); 1473 ASSERT(!operand->IsStackSlot()); 1474 map->RecordPointer(operand, chunk()->zone()); 1475 } 1476 } 1477 } 1478} 1479 1480 1481void LAllocator::AllocateGeneralRegisters() { 1482 LAllocatorPhase phase("L_Allocate general registers", this); 1483 num_registers_ = Register::NumAllocatableRegisters(); 1484 AllocateRegisters(); 1485} 1486 1487 1488void LAllocator::AllocateDoubleRegisters() { 1489 LAllocatorPhase phase("L_Allocate double registers", this); 1490 num_registers_ = DoubleRegister::NumAllocatableRegisters(); 1491 mode_ = DOUBLE_REGISTERS; 1492 AllocateRegisters(); 1493} 1494 1495 1496void LAllocator::AllocateRegisters() { 1497 ASSERT(unhandled_live_ranges_.is_empty()); 1498 1499 for (int i = 0; i < live_ranges_.length(); ++i) { 1500 if (live_ranges_[i] != NULL) { 1501 if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) { 1502 AddToUnhandledUnsorted(live_ranges_[i]); 1503 } 1504 } 1505 } 1506 SortUnhandled(); 1507 ASSERT(UnhandledIsSorted()); 1508 1509 ASSERT(reusable_slots_.is_empty()); 1510 ASSERT(active_live_ranges_.is_empty()); 1511 ASSERT(inactive_live_ranges_.is_empty()); 1512 1513 if (mode_ == DOUBLE_REGISTERS) { 1514 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { 1515 LiveRange* current = fixed_double_live_ranges_.at(i); 1516 if (current != NULL) { 1517 AddToInactive(current); 1518 } 1519 } 1520 } else { 1521 for (int i = 0; i < fixed_live_ranges_.length(); ++i) { 1522 LiveRange* current = fixed_live_ranges_.at(i); 1523 if (current != NULL) { 1524 AddToInactive(current); 1525 } 1526 } 1527 } 1528 1529 while (!unhandled_live_ranges_.is_empty()) { 1530 ASSERT(UnhandledIsSorted()); 1531 LiveRange* current = unhandled_live_ranges_.RemoveLast(); 1532 ASSERT(UnhandledIsSorted()); 1533 LifetimePosition position = current->Start(); 1534#ifdef DEBUG 1535 allocation_finger_ = position; 1536#endif 1537 TraceAlloc("Processing interval %d start=%d\n", 1538 current->id(), 1539 position.Value()); 1540 1541 if (current->HasAllocatedSpillOperand()) { 1542 TraceAlloc("Live range %d already has a spill operand\n", current->id()); 1543 LifetimePosition next_pos = position; 1544 if (IsGapAt(next_pos.InstructionIndex())) { 1545 next_pos = next_pos.NextInstruction(); 1546 } 1547 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); 1548 // If the range already has a spill operand and it doesn't need a 1549 // register immediately, split it and spill the first part of the range. 1550 if (pos == NULL) { 1551 Spill(current); 1552 continue; 1553 } else if (pos->pos().Value() > 1554 current->Start().NextInstruction().Value()) { 1555 // Do not spill live range eagerly if use position that can benefit from 1556 // the register is too close to the start of live range. 1557 SpillBetween(current, current->Start(), pos->pos()); 1558 if (!AllocationOk()) return; 1559 ASSERT(UnhandledIsSorted()); 1560 continue; 1561 } 1562 } 1563 1564 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1565 LiveRange* cur_active = active_live_ranges_.at(i); 1566 if (cur_active->End().Value() <= position.Value()) { 1567 ActiveToHandled(cur_active); 1568 --i; // The live range was removed from the list of active live ranges. 1569 } else if (!cur_active->Covers(position)) { 1570 ActiveToInactive(cur_active); 1571 --i; // The live range was removed from the list of active live ranges. 1572 } 1573 } 1574 1575 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { 1576 LiveRange* cur_inactive = inactive_live_ranges_.at(i); 1577 if (cur_inactive->End().Value() <= position.Value()) { 1578 InactiveToHandled(cur_inactive); 1579 --i; // Live range was removed from the list of inactive live ranges. 1580 } else if (cur_inactive->Covers(position)) { 1581 InactiveToActive(cur_inactive); 1582 --i; // Live range was removed from the list of inactive live ranges. 1583 } 1584 } 1585 1586 ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled()); 1587 1588 bool result = TryAllocateFreeReg(current); 1589 if (!AllocationOk()) return; 1590 1591 if (!result) AllocateBlockedReg(current); 1592 if (!AllocationOk()) return; 1593 1594 if (current->HasRegisterAssigned()) { 1595 AddToActive(current); 1596 } 1597 } 1598 1599 reusable_slots_.Rewind(0); 1600 active_live_ranges_.Rewind(0); 1601 inactive_live_ranges_.Rewind(0); 1602} 1603 1604 1605const char* LAllocator::RegisterName(int allocation_index) { 1606 if (mode_ == GENERAL_REGISTERS) { 1607 return Register::AllocationIndexToString(allocation_index); 1608 } else { 1609 return DoubleRegister::AllocationIndexToString(allocation_index); 1610 } 1611} 1612 1613 1614void LAllocator::TraceAlloc(const char* msg, ...) { 1615 if (FLAG_trace_alloc) { 1616 va_list arguments; 1617 va_start(arguments, msg); 1618 OS::VPrint(msg, arguments); 1619 va_end(arguments); 1620 } 1621} 1622 1623 1624bool LAllocator::HasTaggedValue(int virtual_register) const { 1625 HValue* value = graph_->LookupValue(virtual_register); 1626 if (value == NULL) return false; 1627 return value->representation().IsTagged() && !value->type().IsSmi(); 1628} 1629 1630 1631RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const { 1632 if (virtual_register < first_artificial_register_) { 1633 HValue* value = graph_->LookupValue(virtual_register); 1634 if (value != NULL && value->representation().IsDouble()) { 1635 return DOUBLE_REGISTERS; 1636 } 1637 } else if (double_artificial_registers_.Contains( 1638 virtual_register - first_artificial_register_)) { 1639 return DOUBLE_REGISTERS; 1640 } 1641 1642 return GENERAL_REGISTERS; 1643} 1644 1645 1646void LAllocator::AddToActive(LiveRange* range) { 1647 TraceAlloc("Add live range %d to active\n", range->id()); 1648 active_live_ranges_.Add(range, zone()); 1649} 1650 1651 1652void LAllocator::AddToInactive(LiveRange* range) { 1653 TraceAlloc("Add live range %d to inactive\n", range->id()); 1654 inactive_live_ranges_.Add(range, zone()); 1655} 1656 1657 1658void LAllocator::AddToUnhandledSorted(LiveRange* range) { 1659 if (range == NULL || range->IsEmpty()) return; 1660 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); 1661 ASSERT(allocation_finger_.Value() <= range->Start().Value()); 1662 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { 1663 LiveRange* cur_range = unhandled_live_ranges_.at(i); 1664 if (range->ShouldBeAllocatedBefore(cur_range)) { 1665 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); 1666 unhandled_live_ranges_.InsertAt(i + 1, range, zone()); 1667 ASSERT(UnhandledIsSorted()); 1668 return; 1669 } 1670 } 1671 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); 1672 unhandled_live_ranges_.InsertAt(0, range, zone()); 1673 ASSERT(UnhandledIsSorted()); 1674} 1675 1676 1677void LAllocator::AddToUnhandledUnsorted(LiveRange* range) { 1678 if (range == NULL || range->IsEmpty()) return; 1679 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); 1680 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); 1681 unhandled_live_ranges_.Add(range, zone()); 1682} 1683 1684 1685static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { 1686 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) || 1687 !(*b)->ShouldBeAllocatedBefore(*a)); 1688 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; 1689 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; 1690 return (*a)->id() - (*b)->id(); 1691} 1692 1693 1694// Sort the unhandled live ranges so that the ranges to be processed first are 1695// at the end of the array list. This is convenient for the register allocation 1696// algorithm because it is efficient to remove elements from the end. 1697void LAllocator::SortUnhandled() { 1698 TraceAlloc("Sort unhandled\n"); 1699 unhandled_live_ranges_.Sort(&UnhandledSortHelper); 1700} 1701 1702 1703bool LAllocator::UnhandledIsSorted() { 1704 int len = unhandled_live_ranges_.length(); 1705 for (int i = 1; i < len; i++) { 1706 LiveRange* a = unhandled_live_ranges_.at(i - 1); 1707 LiveRange* b = unhandled_live_ranges_.at(i); 1708 if (a->Start().Value() < b->Start().Value()) return false; 1709 } 1710 return true; 1711} 1712 1713 1714void LAllocator::FreeSpillSlot(LiveRange* range) { 1715 // Check that we are the last range. 1716 if (range->next() != NULL) return; 1717 1718 if (!range->TopLevel()->HasAllocatedSpillOperand()) return; 1719 1720 int index = range->TopLevel()->GetSpillOperand()->index(); 1721 if (index >= 0) { 1722 reusable_slots_.Add(range, zone()); 1723 } 1724} 1725 1726 1727LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) { 1728 if (reusable_slots_.is_empty()) return NULL; 1729 if (reusable_slots_.first()->End().Value() > 1730 range->TopLevel()->Start().Value()) { 1731 return NULL; 1732 } 1733 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); 1734 reusable_slots_.Remove(0); 1735 return result; 1736} 1737 1738 1739void LAllocator::ActiveToHandled(LiveRange* range) { 1740 ASSERT(active_live_ranges_.Contains(range)); 1741 active_live_ranges_.RemoveElement(range); 1742 TraceAlloc("Moving live range %d from active to handled\n", range->id()); 1743 FreeSpillSlot(range); 1744} 1745 1746 1747void LAllocator::ActiveToInactive(LiveRange* range) { 1748 ASSERT(active_live_ranges_.Contains(range)); 1749 active_live_ranges_.RemoveElement(range); 1750 inactive_live_ranges_.Add(range, zone()); 1751 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); 1752} 1753 1754 1755void LAllocator::InactiveToHandled(LiveRange* range) { 1756 ASSERT(inactive_live_ranges_.Contains(range)); 1757 inactive_live_ranges_.RemoveElement(range); 1758 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); 1759 FreeSpillSlot(range); 1760} 1761 1762 1763void LAllocator::InactiveToActive(LiveRange* range) { 1764 ASSERT(inactive_live_ranges_.Contains(range)); 1765 inactive_live_ranges_.RemoveElement(range); 1766 active_live_ranges_.Add(range, zone()); 1767 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); 1768} 1769 1770 1771// TryAllocateFreeReg and AllocateBlockedReg assume this 1772// when allocating local arrays. 1773STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >= 1774 Register::kMaxNumAllocatableRegisters); 1775 1776 1777bool LAllocator::TryAllocateFreeReg(LiveRange* current) { 1778 LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters]; 1779 1780 for (int i = 0; i < num_registers_; i++) { 1781 free_until_pos[i] = LifetimePosition::MaxPosition(); 1782 } 1783 1784 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1785 LiveRange* cur_active = active_live_ranges_.at(i); 1786 free_until_pos[cur_active->assigned_register()] = 1787 LifetimePosition::FromInstructionIndex(0); 1788 } 1789 1790 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { 1791 LiveRange* cur_inactive = inactive_live_ranges_.at(i); 1792 ASSERT(cur_inactive->End().Value() > current->Start().Value()); 1793 LifetimePosition next_intersection = 1794 cur_inactive->FirstIntersection(current); 1795 if (!next_intersection.IsValid()) continue; 1796 int cur_reg = cur_inactive->assigned_register(); 1797 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); 1798 } 1799 1800 LOperand* hint = current->FirstHint(); 1801 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) { 1802 int register_index = hint->index(); 1803 TraceAlloc( 1804 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n", 1805 RegisterName(register_index), 1806 free_until_pos[register_index].Value(), 1807 current->id(), 1808 current->End().Value()); 1809 1810 // The desired register is free until the end of the current live range. 1811 if (free_until_pos[register_index].Value() >= current->End().Value()) { 1812 TraceAlloc("Assigning preferred reg %s to live range %d\n", 1813 RegisterName(register_index), 1814 current->id()); 1815 SetLiveRangeAssignedRegister(current, register_index, mode_); 1816 return true; 1817 } 1818 } 1819 1820 // Find the register which stays free for the longest time. 1821 int reg = 0; 1822 for (int i = 1; i < RegisterCount(); ++i) { 1823 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) { 1824 reg = i; 1825 } 1826 } 1827 1828 LifetimePosition pos = free_until_pos[reg]; 1829 1830 if (pos.Value() <= current->Start().Value()) { 1831 // All registers are blocked. 1832 return false; 1833 } 1834 1835 if (pos.Value() < current->End().Value()) { 1836 // Register reg is available at the range start but becomes blocked before 1837 // the range end. Split current at position where it becomes blocked. 1838 LiveRange* tail = SplitRangeAt(current, pos); 1839 if (!AllocationOk()) return false; 1840 AddToUnhandledSorted(tail); 1841 } 1842 1843 1844 // Register reg is available at the range start and is free until 1845 // the range end. 1846 ASSERT(pos.Value() >= current->End().Value()); 1847 TraceAlloc("Assigning free reg %s to live range %d\n", 1848 RegisterName(reg), 1849 current->id()); 1850 SetLiveRangeAssignedRegister(current, reg, mode_); 1851 1852 return true; 1853} 1854 1855 1856void LAllocator::AllocateBlockedReg(LiveRange* current) { 1857 UsePosition* register_use = current->NextRegisterPosition(current->Start()); 1858 if (register_use == NULL) { 1859 // There is no use in the current live range that requires a register. 1860 // We can just spill it. 1861 Spill(current); 1862 return; 1863 } 1864 1865 1866 LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters]; 1867 LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters]; 1868 1869 for (int i = 0; i < num_registers_; i++) { 1870 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); 1871 } 1872 1873 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1874 LiveRange* range = active_live_ranges_[i]; 1875 int cur_reg = range->assigned_register(); 1876 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { 1877 block_pos[cur_reg] = use_pos[cur_reg] = 1878 LifetimePosition::FromInstructionIndex(0); 1879 } else { 1880 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial( 1881 current->Start()); 1882 if (next_use == NULL) { 1883 use_pos[cur_reg] = range->End(); 1884 } else { 1885 use_pos[cur_reg] = next_use->pos(); 1886 } 1887 } 1888 } 1889 1890 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { 1891 LiveRange* range = inactive_live_ranges_.at(i); 1892 ASSERT(range->End().Value() > current->Start().Value()); 1893 LifetimePosition next_intersection = range->FirstIntersection(current); 1894 if (!next_intersection.IsValid()) continue; 1895 int cur_reg = range->assigned_register(); 1896 if (range->IsFixed()) { 1897 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); 1898 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); 1899 } else { 1900 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); 1901 } 1902 } 1903 1904 int reg = 0; 1905 for (int i = 1; i < RegisterCount(); ++i) { 1906 if (use_pos[i].Value() > use_pos[reg].Value()) { 1907 reg = i; 1908 } 1909 } 1910 1911 LifetimePosition pos = use_pos[reg]; 1912 1913 if (pos.Value() < register_use->pos().Value()) { 1914 // All registers are blocked before the first use that requires a register. 1915 // Spill starting part of live range up to that use. 1916 SpillBetween(current, current->Start(), register_use->pos()); 1917 return; 1918 } 1919 1920 if (block_pos[reg].Value() < current->End().Value()) { 1921 // Register becomes blocked before the current range end. Split before that 1922 // position. 1923 LiveRange* tail = SplitBetween(current, 1924 current->Start(), 1925 block_pos[reg].InstructionStart()); 1926 if (!AllocationOk()) return; 1927 AddToUnhandledSorted(tail); 1928 } 1929 1930 // Register reg is not blocked for the whole range. 1931 ASSERT(block_pos[reg].Value() >= current->End().Value()); 1932 TraceAlloc("Assigning blocked reg %s to live range %d\n", 1933 RegisterName(reg), 1934 current->id()); 1935 SetLiveRangeAssignedRegister(current, reg, mode_); 1936 1937 // This register was not free. Thus we need to find and spill 1938 // parts of active and inactive live regions that use the same register 1939 // at the same lifetime positions as current. 1940 SplitAndSpillIntersecting(current); 1941} 1942 1943 1944LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range, 1945 LifetimePosition pos) { 1946 HBasicBlock* block = GetBlock(pos.InstructionStart()); 1947 HBasicBlock* loop_header = 1948 block->IsLoopHeader() ? block : block->parent_loop_header(); 1949 1950 if (loop_header == NULL) return pos; 1951 1952 UsePosition* prev_use = 1953 range->PreviousUsePositionRegisterIsBeneficial(pos); 1954 1955 while (loop_header != NULL) { 1956 // We are going to spill live range inside the loop. 1957 // If possible try to move spilling position backwards to loop header. 1958 // This will reduce number of memory moves on the back edge. 1959 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex( 1960 loop_header->first_instruction_index()); 1961 1962 if (range->Covers(loop_start)) { 1963 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) { 1964 // No register beneficial use inside the loop before the pos. 1965 pos = loop_start; 1966 } 1967 } 1968 1969 // Try hoisting out to an outer loop. 1970 loop_header = loop_header->parent_loop_header(); 1971 } 1972 1973 return pos; 1974} 1975 1976 1977void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { 1978 ASSERT(current->HasRegisterAssigned()); 1979 int reg = current->assigned_register(); 1980 LifetimePosition split_pos = current->Start(); 1981 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1982 LiveRange* range = active_live_ranges_[i]; 1983 if (range->assigned_register() == reg) { 1984 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); 1985 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos); 1986 if (next_pos == NULL) { 1987 SpillAfter(range, spill_pos); 1988 } else { 1989 // When spilling between spill_pos and next_pos ensure that the range 1990 // remains spilled at least until the start of the current live range. 1991 // This guarantees that we will not introduce new unhandled ranges that 1992 // start before the current range as this violates allocation invariant 1993 // and will lead to an inconsistent state of active and inactive 1994 // live-ranges: ranges are allocated in order of their start positions, 1995 // ranges are retired from active/inactive when the start of the 1996 // current live-range is larger than their end. 1997 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); 1998 } 1999 if (!AllocationOk()) return; 2000 ActiveToHandled(range); 2001 --i; 2002 } 2003 } 2004 2005 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { 2006 LiveRange* range = inactive_live_ranges_[i]; 2007 ASSERT(range->End().Value() > current->Start().Value()); 2008 if (range->assigned_register() == reg && !range->IsFixed()) { 2009 LifetimePosition next_intersection = range->FirstIntersection(current); 2010 if (next_intersection.IsValid()) { 2011 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); 2012 if (next_pos == NULL) { 2013 SpillAfter(range, split_pos); 2014 } else { 2015 next_intersection = Min(next_intersection, next_pos->pos()); 2016 SpillBetween(range, split_pos, next_intersection); 2017 } 2018 if (!AllocationOk()) return; 2019 InactiveToHandled(range); 2020 --i; 2021 } 2022 } 2023 } 2024} 2025 2026 2027bool LAllocator::IsBlockBoundary(LifetimePosition pos) { 2028 return pos.IsInstructionStart() && 2029 InstructionAt(pos.InstructionIndex())->IsLabel(); 2030} 2031 2032 2033LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) { 2034 ASSERT(!range->IsFixed()); 2035 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); 2036 2037 if (pos.Value() <= range->Start().Value()) return range; 2038 2039 // We can't properly connect liveranges if split occured at the end 2040 // of control instruction. 2041 ASSERT(pos.IsInstructionStart() || 2042 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl()); 2043 2044 int vreg = GetVirtualRegister(); 2045 if (!AllocationOk()) return NULL; 2046 LiveRange* result = LiveRangeFor(vreg); 2047 range->SplitAt(pos, result, zone()); 2048 return result; 2049} 2050 2051 2052LiveRange* LAllocator::SplitBetween(LiveRange* range, 2053 LifetimePosition start, 2054 LifetimePosition end) { 2055 ASSERT(!range->IsFixed()); 2056 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", 2057 range->id(), 2058 start.Value(), 2059 end.Value()); 2060 2061 LifetimePosition split_pos = FindOptimalSplitPos(start, end); 2062 ASSERT(split_pos.Value() >= start.Value()); 2063 return SplitRangeAt(range, split_pos); 2064} 2065 2066 2067LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, 2068 LifetimePosition end) { 2069 int start_instr = start.InstructionIndex(); 2070 int end_instr = end.InstructionIndex(); 2071 ASSERT(start_instr <= end_instr); 2072 2073 // We have no choice 2074 if (start_instr == end_instr) return end; 2075 2076 HBasicBlock* start_block = GetBlock(start); 2077 HBasicBlock* end_block = GetBlock(end); 2078 2079 if (end_block == start_block) { 2080 // The interval is split in the same basic block. Split at the latest 2081 // possible position. 2082 return end; 2083 } 2084 2085 HBasicBlock* block = end_block; 2086 // Find header of outermost loop. 2087 while (block->parent_loop_header() != NULL && 2088 block->parent_loop_header()->block_id() > start_block->block_id()) { 2089 block = block->parent_loop_header(); 2090 } 2091 2092 // We did not find any suitable outer loop. Split at the latest possible 2093 // position unless end_block is a loop header itself. 2094 if (block == end_block && !end_block->IsLoopHeader()) return end; 2095 2096 return LifetimePosition::FromInstructionIndex( 2097 block->first_instruction_index()); 2098} 2099 2100 2101void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { 2102 LiveRange* second_part = SplitRangeAt(range, pos); 2103 if (!AllocationOk()) return; 2104 Spill(second_part); 2105} 2106 2107 2108void LAllocator::SpillBetween(LiveRange* range, 2109 LifetimePosition start, 2110 LifetimePosition end) { 2111 SpillBetweenUntil(range, start, start, end); 2112} 2113 2114 2115void LAllocator::SpillBetweenUntil(LiveRange* range, 2116 LifetimePosition start, 2117 LifetimePosition until, 2118 LifetimePosition end) { 2119 CHECK(start.Value() < end.Value()); 2120 LiveRange* second_part = SplitRangeAt(range, start); 2121 if (!AllocationOk()) return; 2122 2123 if (second_part->Start().Value() < end.Value()) { 2124 // The split result intersects with [start, end[. 2125 // Split it at position between ]start+1, end[, spill the middle part 2126 // and put the rest to unhandled. 2127 LiveRange* third_part = SplitBetween( 2128 second_part, 2129 Max(second_part->Start().InstructionEnd(), until), 2130 end.PrevInstruction().InstructionEnd()); 2131 if (!AllocationOk()) return; 2132 2133 ASSERT(third_part != second_part); 2134 2135 Spill(second_part); 2136 AddToUnhandledSorted(third_part); 2137 } else { 2138 // The split result does not intersect with [start, end[. 2139 // Nothing to spill. Just put it to unhandled as whole. 2140 AddToUnhandledSorted(second_part); 2141 } 2142} 2143 2144 2145void LAllocator::Spill(LiveRange* range) { 2146 ASSERT(!range->IsSpilled()); 2147 TraceAlloc("Spilling live range %d\n", range->id()); 2148 LiveRange* first = range->TopLevel(); 2149 2150 if (!first->HasAllocatedSpillOperand()) { 2151 LOperand* op = TryReuseSpillSlot(range); 2152 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS); 2153 first->SetSpillOperand(op); 2154 } 2155 range->MakeSpilled(chunk()->zone()); 2156} 2157 2158 2159int LAllocator::RegisterCount() const { 2160 return num_registers_; 2161} 2162 2163 2164#ifdef DEBUG 2165 2166 2167void LAllocator::Verify() const { 2168 for (int i = 0; i < live_ranges()->length(); ++i) { 2169 LiveRange* current = live_ranges()->at(i); 2170 if (current != NULL) current->Verify(); 2171 } 2172} 2173 2174 2175#endif 2176 2177 2178LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator) 2179 : CompilationPhase(name, allocator->graph()->info()), 2180 allocator_(allocator) { 2181 if (FLAG_hydrogen_stats) { 2182 allocator_zone_start_allocation_size_ = 2183 allocator->zone()->allocation_size(); 2184 } 2185} 2186 2187 2188LAllocatorPhase::~LAllocatorPhase() { 2189 if (FLAG_hydrogen_stats) { 2190 unsigned size = allocator_->zone()->allocation_size() - 2191 allocator_zone_start_allocation_size_; 2192 isolate()->GetHStatistics()->SaveTiming(name(), 0, size); 2193 } 2194 2195 if (ShouldProduceTraceOutput()) { 2196 isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk()); 2197 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_); 2198 } 2199 2200#ifdef DEBUG 2201 if (allocator_ != NULL) allocator_->Verify(); 2202#endif 2203} 2204 2205 2206} } // namespace v8::internal 2207