nodes.h revision 805b3b56c6eb542298db33e0181f135dc9fed3d9
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_containers.h" 25#include "base/arena_object.h" 26#include "base/stl_util.h" 27#include "dex/compiler_enums.h" 28#include "entrypoints/quick/quick_entrypoints_enum.h" 29#include "handle.h" 30#include "handle_scope.h" 31#include "invoke_type.h" 32#include "locations.h" 33#include "method_reference.h" 34#include "mirror/class.h" 35#include "offsets.h" 36#include "primitive.h" 37#include "utils/arena_bit_vector.h" 38 39namespace art { 40 41class GraphChecker; 42class HBasicBlock; 43class HCurrentMethod; 44class HDoubleConstant; 45class HEnvironment; 46class HFakeString; 47class HFloatConstant; 48class HGraphBuilder; 49class HGraphVisitor; 50class HInstruction; 51class HIntConstant; 52class HInvoke; 53class HLongConstant; 54class HNullConstant; 55class HPhi; 56class HSuspendCheck; 57class HTryBoundary; 58class LiveInterval; 59class LocationSummary; 60class SlowPathCode; 61class SsaBuilder; 62 63namespace mirror { 64class DexCache; 65} // namespace mirror 66 67static const int kDefaultNumberOfBlocks = 8; 68static const int kDefaultNumberOfSuccessors = 2; 69static const int kDefaultNumberOfPredecessors = 2; 70static const int kDefaultNumberOfExceptionalPredecessors = 0; 71static const int kDefaultNumberOfDominatedBlocks = 1; 72static const int kDefaultNumberOfBackEdges = 1; 73 74static constexpr uint32_t kMaxIntShiftValue = 0x1f; 75static constexpr uint64_t kMaxLongShiftValue = 0x3f; 76 77static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 78 79static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 80 81static constexpr uint32_t kNoDexPc = -1; 82 83enum IfCondition { 84 kCondEQ, 85 kCondNE, 86 kCondLT, 87 kCondLE, 88 kCondGT, 89 kCondGE, 90}; 91 92class HInstructionList : public ValueObject { 93 public: 94 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 95 96 void AddInstruction(HInstruction* instruction); 97 void RemoveInstruction(HInstruction* instruction); 98 99 // Insert `instruction` before/after an existing instruction `cursor`. 100 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 101 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 102 103 // Return true if this list contains `instruction`. 104 bool Contains(HInstruction* instruction) const; 105 106 // Return true if `instruction1` is found before `instruction2` in 107 // this instruction list and false otherwise. Abort if none 108 // of these instructions is found. 109 bool FoundBefore(const HInstruction* instruction1, 110 const HInstruction* instruction2) const; 111 112 bool IsEmpty() const { return first_instruction_ == nullptr; } 113 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 114 115 // Update the block of all instructions to be `block`. 116 void SetBlockOfInstructions(HBasicBlock* block) const; 117 118 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 119 void Add(const HInstructionList& instruction_list); 120 121 // Return the number of instructions in the list. This is an expensive operation. 122 size_t CountSize() const; 123 124 private: 125 HInstruction* first_instruction_; 126 HInstruction* last_instruction_; 127 128 friend class HBasicBlock; 129 friend class HGraph; 130 friend class HInstruction; 131 friend class HInstructionIterator; 132 friend class HBackwardInstructionIterator; 133 134 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 135}; 136 137// Control-flow graph of a method. Contains a list of basic blocks. 138class HGraph : public ArenaObject<kArenaAllocGraph> { 139 public: 140 HGraph(ArenaAllocator* arena, 141 const DexFile& dex_file, 142 uint32_t method_idx, 143 bool should_generate_constructor_barrier, 144 InstructionSet instruction_set, 145 InvokeType invoke_type = kInvalidInvokeType, 146 bool debuggable = false, 147 int start_instruction_id = 0) 148 : arena_(arena), 149 blocks_(arena->Adapter(kArenaAllocBlockList)), 150 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 151 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 152 entry_block_(nullptr), 153 exit_block_(nullptr), 154 maximum_number_of_out_vregs_(0), 155 number_of_vregs_(0), 156 number_of_in_vregs_(0), 157 temporaries_vreg_slots_(0), 158 has_bounds_checks_(false), 159 has_try_catch_(false), 160 debuggable_(debuggable), 161 current_instruction_id_(start_instruction_id), 162 dex_file_(dex_file), 163 method_idx_(method_idx), 164 invoke_type_(invoke_type), 165 in_ssa_form_(false), 166 should_generate_constructor_barrier_(should_generate_constructor_barrier), 167 instruction_set_(instruction_set), 168 cached_null_constant_(nullptr), 169 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 170 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 171 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 172 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 173 cached_current_method_(nullptr) { 174 blocks_.reserve(kDefaultNumberOfBlocks); 175 } 176 177 ArenaAllocator* GetArena() const { return arena_; } 178 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 179 180 bool IsInSsaForm() const { return in_ssa_form_; } 181 182 HBasicBlock* GetEntryBlock() const { return entry_block_; } 183 HBasicBlock* GetExitBlock() const { return exit_block_; } 184 bool HasExitBlock() const { return exit_block_ != nullptr; } 185 186 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 187 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 188 189 void AddBlock(HBasicBlock* block); 190 191 // Try building the SSA form of this graph, with dominance computation and loop 192 // recognition. Returns whether it was successful in doing all these steps. 193 bool TryBuildingSsa() { 194 BuildDominatorTree(); 195 // The SSA builder requires loops to all be natural. Specifically, the dead phi 196 // elimination phase checks the consistency of the graph when doing a post-order 197 // visit for eliminating dead phis: a dead phi can only have loop header phi 198 // users remaining when being visited. 199 if (!AnalyzeNaturalLoops()) return false; 200 // Precompute per-block try membership before entering the SSA builder, 201 // which needs the information to build catch block phis from values of 202 // locals at throwing instructions inside try blocks. 203 ComputeTryBlockInformation(); 204 TransformToSsa(); 205 in_ssa_form_ = true; 206 return true; 207 } 208 209 void ComputeDominanceInformation(); 210 void ClearDominanceInformation(); 211 212 void BuildDominatorTree(); 213 void TransformToSsa(); 214 void SimplifyCFG(); 215 void SimplifyCatchBlocks(); 216 217 // Analyze all natural loops in this graph. Returns false if one 218 // loop is not natural, that is the header does not dominate the 219 // back edge. 220 bool AnalyzeNaturalLoops() const; 221 222 // Iterate over blocks to compute try block membership. Needs reverse post 223 // order and loop information. 224 void ComputeTryBlockInformation(); 225 226 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 227 // Returns the instruction used to replace the invoke expression or null if the 228 // invoke is for a void method. 229 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 230 231 // Need to add a couple of blocks to test if the loop body is entered and 232 // put deoptimization instructions, etc. 233 void TransformLoopHeaderForBCE(HBasicBlock* header); 234 235 // Removes `block` from the graph. 236 void DeleteDeadBlock(HBasicBlock* block); 237 238 // Splits the edge between `block` and `successor` while preserving the 239 // indices in the predecessor/successor lists. If there are multiple edges 240 // between the blocks, the lowest indices are used. 241 // Returns the new block which is empty and has the same dex pc as `successor`. 242 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 243 244 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 245 void SimplifyLoop(HBasicBlock* header); 246 247 int32_t GetNextInstructionId() { 248 DCHECK_NE(current_instruction_id_, INT32_MAX); 249 return current_instruction_id_++; 250 } 251 252 int32_t GetCurrentInstructionId() const { 253 return current_instruction_id_; 254 } 255 256 void SetCurrentInstructionId(int32_t id) { 257 current_instruction_id_ = id; 258 } 259 260 uint16_t GetMaximumNumberOfOutVRegs() const { 261 return maximum_number_of_out_vregs_; 262 } 263 264 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 265 maximum_number_of_out_vregs_ = new_value; 266 } 267 268 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 269 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 270 } 271 272 void UpdateTemporariesVRegSlots(size_t slots) { 273 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 274 } 275 276 size_t GetTemporariesVRegSlots() const { 277 DCHECK(!in_ssa_form_); 278 return temporaries_vreg_slots_; 279 } 280 281 void SetNumberOfVRegs(uint16_t number_of_vregs) { 282 number_of_vregs_ = number_of_vregs; 283 } 284 285 uint16_t GetNumberOfVRegs() const { 286 return number_of_vregs_; 287 } 288 289 void SetNumberOfInVRegs(uint16_t value) { 290 number_of_in_vregs_ = value; 291 } 292 293 uint16_t GetNumberOfLocalVRegs() const { 294 DCHECK(!in_ssa_form_); 295 return number_of_vregs_ - number_of_in_vregs_; 296 } 297 298 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 299 return reverse_post_order_; 300 } 301 302 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 303 return linear_order_; 304 } 305 306 bool HasBoundsChecks() const { 307 return has_bounds_checks_; 308 } 309 310 void SetHasBoundsChecks(bool value) { 311 has_bounds_checks_ = value; 312 } 313 314 bool ShouldGenerateConstructorBarrier() const { 315 return should_generate_constructor_barrier_; 316 } 317 318 bool IsDebuggable() const { return debuggable_; } 319 320 // Returns a constant of the given type and value. If it does not exist 321 // already, it is created and inserted into the graph. This method is only for 322 // integral types. 323 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 324 325 // TODO: This is problematic for the consistency of reference type propagation 326 // because it can be created anytime after the pass and thus it will be left 327 // with an invalid type. 328 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 329 330 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 331 return CreateConstant(value, &cached_int_constants_, dex_pc); 332 } 333 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 334 return CreateConstant(value, &cached_long_constants_, dex_pc); 335 } 336 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 337 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 338 } 339 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 340 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 341 } 342 343 HCurrentMethod* GetCurrentMethod(); 344 345 HBasicBlock* FindCommonDominator(HBasicBlock* first, HBasicBlock* second) const; 346 347 const DexFile& GetDexFile() const { 348 return dex_file_; 349 } 350 351 uint32_t GetMethodIdx() const { 352 return method_idx_; 353 } 354 355 InvokeType GetInvokeType() const { 356 return invoke_type_; 357 } 358 359 InstructionSet GetInstructionSet() const { 360 return instruction_set_; 361 } 362 363 bool HasTryCatch() const { return has_try_catch_; } 364 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 365 366 private: 367 void FindBackEdges(ArenaBitVector* visited); 368 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 369 void RemoveDeadBlocks(const ArenaBitVector& visited); 370 371 template <class InstructionType, typename ValueType> 372 InstructionType* CreateConstant(ValueType value, 373 ArenaSafeMap<ValueType, InstructionType*>* cache, 374 uint32_t dex_pc = kNoDexPc) { 375 // Try to find an existing constant of the given value. 376 InstructionType* constant = nullptr; 377 auto cached_constant = cache->find(value); 378 if (cached_constant != cache->end()) { 379 constant = cached_constant->second; 380 } 381 382 // If not found or previously deleted, create and cache a new instruction. 383 // Don't bother reviving a previously deleted instruction, for simplicity. 384 if (constant == nullptr || constant->GetBlock() == nullptr) { 385 constant = new (arena_) InstructionType(value, dex_pc); 386 cache->Overwrite(value, constant); 387 InsertConstant(constant); 388 } 389 return constant; 390 } 391 392 void InsertConstant(HConstant* instruction); 393 394 // Cache a float constant into the graph. This method should only be 395 // called by the SsaBuilder when creating "equivalent" instructions. 396 void CacheFloatConstant(HFloatConstant* constant); 397 398 // See CacheFloatConstant comment. 399 void CacheDoubleConstant(HDoubleConstant* constant); 400 401 ArenaAllocator* const arena_; 402 403 // List of blocks in insertion order. 404 ArenaVector<HBasicBlock*> blocks_; 405 406 // List of blocks to perform a reverse post order tree traversal. 407 ArenaVector<HBasicBlock*> reverse_post_order_; 408 409 // List of blocks to perform a linear order tree traversal. 410 ArenaVector<HBasicBlock*> linear_order_; 411 412 HBasicBlock* entry_block_; 413 HBasicBlock* exit_block_; 414 415 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 416 uint16_t maximum_number_of_out_vregs_; 417 418 // The number of virtual registers in this method. Contains the parameters. 419 uint16_t number_of_vregs_; 420 421 // The number of virtual registers used by parameters of this method. 422 uint16_t number_of_in_vregs_; 423 424 // Number of vreg size slots that the temporaries use (used in baseline compiler). 425 size_t temporaries_vreg_slots_; 426 427 // Has bounds checks. We can totally skip BCE if it's false. 428 bool has_bounds_checks_; 429 430 // Flag whether there are any try/catch blocks in the graph. We will skip 431 // try/catch-related passes if false. 432 bool has_try_catch_; 433 434 // Indicates whether the graph should be compiled in a way that 435 // ensures full debuggability. If false, we can apply more 436 // aggressive optimizations that may limit the level of debugging. 437 const bool debuggable_; 438 439 // The current id to assign to a newly added instruction. See HInstruction.id_. 440 int32_t current_instruction_id_; 441 442 // The dex file from which the method is from. 443 const DexFile& dex_file_; 444 445 // The method index in the dex file. 446 const uint32_t method_idx_; 447 448 // If inlined, this encodes how the callee is being invoked. 449 const InvokeType invoke_type_; 450 451 // Whether the graph has been transformed to SSA form. Only used 452 // in debug mode to ensure we are not using properties only valid 453 // for non-SSA form (like the number of temporaries). 454 bool in_ssa_form_; 455 456 const bool should_generate_constructor_barrier_; 457 458 const InstructionSet instruction_set_; 459 460 // Cached constants. 461 HNullConstant* cached_null_constant_; 462 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 463 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 464 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 465 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 466 467 HCurrentMethod* cached_current_method_; 468 469 friend class SsaBuilder; // For caching constants. 470 friend class SsaLivenessAnalysis; // For the linear order. 471 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 472 DISALLOW_COPY_AND_ASSIGN(HGraph); 473}; 474 475class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 476 public: 477 HLoopInformation(HBasicBlock* header, HGraph* graph) 478 : header_(header), 479 suspend_check_(nullptr), 480 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 481 // Make bit vector growable, as the number of blocks may change. 482 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 483 back_edges_.reserve(kDefaultNumberOfBackEdges); 484 } 485 486 HBasicBlock* GetHeader() const { 487 return header_; 488 } 489 490 void SetHeader(HBasicBlock* block) { 491 header_ = block; 492 } 493 494 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 495 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 496 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 497 498 void AddBackEdge(HBasicBlock* back_edge) { 499 back_edges_.push_back(back_edge); 500 } 501 502 void RemoveBackEdge(HBasicBlock* back_edge) { 503 RemoveElement(back_edges_, back_edge); 504 } 505 506 bool IsBackEdge(const HBasicBlock& block) const { 507 return ContainsElement(back_edges_, &block); 508 } 509 510 size_t NumberOfBackEdges() const { 511 return back_edges_.size(); 512 } 513 514 HBasicBlock* GetPreHeader() const; 515 516 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 517 return back_edges_; 518 } 519 520 // Returns the lifetime position of the back edge that has the 521 // greatest lifetime position. 522 size_t GetLifetimeEnd() const; 523 524 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 525 ReplaceElement(back_edges_, existing, new_back_edge); 526 } 527 528 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 529 // that is the header dominates the back edge. 530 bool Populate(); 531 532 // Reanalyzes the loop by removing loop info from its blocks and re-running 533 // Populate(). If there are no back edges left, the loop info is completely 534 // removed as well as its SuspendCheck instruction. It must be run on nested 535 // inner loops first. 536 void Update(); 537 538 // Returns whether this loop information contains `block`. 539 // Note that this loop information *must* be populated before entering this function. 540 bool Contains(const HBasicBlock& block) const; 541 542 // Returns whether this loop information is an inner loop of `other`. 543 // Note that `other` *must* be populated before entering this function. 544 bool IsIn(const HLoopInformation& other) const; 545 546 const ArenaBitVector& GetBlocks() const { return blocks_; } 547 548 void Add(HBasicBlock* block); 549 void Remove(HBasicBlock* block); 550 551 private: 552 // Internal recursive implementation of `Populate`. 553 void PopulateRecursive(HBasicBlock* block); 554 555 HBasicBlock* header_; 556 HSuspendCheck* suspend_check_; 557 ArenaVector<HBasicBlock*> back_edges_; 558 ArenaBitVector blocks_; 559 560 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 561}; 562 563// Stores try/catch information for basic blocks. 564// Note that HGraph is constructed so that catch blocks cannot simultaneously 565// be try blocks. 566class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 567 public: 568 // Try block information constructor. 569 explicit TryCatchInformation(const HTryBoundary& try_entry) 570 : try_entry_(&try_entry), 571 catch_dex_file_(nullptr), 572 catch_type_index_(DexFile::kDexNoIndex16) { 573 DCHECK(try_entry_ != nullptr); 574 } 575 576 // Catch block information constructor. 577 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 578 : try_entry_(nullptr), 579 catch_dex_file_(&dex_file), 580 catch_type_index_(catch_type_index) {} 581 582 bool IsTryBlock() const { return try_entry_ != nullptr; } 583 584 const HTryBoundary& GetTryEntry() const { 585 DCHECK(IsTryBlock()); 586 return *try_entry_; 587 } 588 589 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 590 591 bool IsCatchAllTypeIndex() const { 592 DCHECK(IsCatchBlock()); 593 return catch_type_index_ == DexFile::kDexNoIndex16; 594 } 595 596 uint16_t GetCatchTypeIndex() const { 597 DCHECK(IsCatchBlock()); 598 return catch_type_index_; 599 } 600 601 const DexFile& GetCatchDexFile() const { 602 DCHECK(IsCatchBlock()); 603 return *catch_dex_file_; 604 } 605 606 private: 607 // One of possibly several TryBoundary instructions entering the block's try. 608 // Only set for try blocks. 609 const HTryBoundary* try_entry_; 610 611 // Exception type information. Only set for catch blocks. 612 const DexFile* catch_dex_file_; 613 const uint16_t catch_type_index_; 614}; 615 616static constexpr size_t kNoLifetime = -1; 617static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 618 619// A block in a method. Contains the list of instructions represented 620// as a double linked list. Each block knows its predecessors and 621// successors. 622 623class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 624 public: 625 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 626 : graph_(graph), 627 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 628 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 629 loop_information_(nullptr), 630 dominator_(nullptr), 631 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 632 block_id_(kInvalidBlockId), 633 dex_pc_(dex_pc), 634 lifetime_start_(kNoLifetime), 635 lifetime_end_(kNoLifetime), 636 try_catch_information_(nullptr) { 637 predecessors_.reserve(kDefaultNumberOfPredecessors); 638 successors_.reserve(kDefaultNumberOfSuccessors); 639 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 640 } 641 642 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 643 return predecessors_; 644 } 645 646 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 647 return successors_; 648 } 649 650 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 651 return ContainsElement(successors_, block, start_from); 652 } 653 654 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 655 return dominated_blocks_; 656 } 657 658 bool IsEntryBlock() const { 659 return graph_->GetEntryBlock() == this; 660 } 661 662 bool IsExitBlock() const { 663 return graph_->GetExitBlock() == this; 664 } 665 666 bool IsSingleGoto() const; 667 bool IsSingleTryBoundary() const; 668 669 // Returns true if this block emits nothing but a jump. 670 bool IsSingleJump() const { 671 HLoopInformation* loop_info = GetLoopInformation(); 672 return (IsSingleGoto() || IsSingleTryBoundary()) 673 // Back edges generate a suspend check. 674 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 675 } 676 677 void AddBackEdge(HBasicBlock* back_edge) { 678 if (loop_information_ == nullptr) { 679 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 680 } 681 DCHECK_EQ(loop_information_->GetHeader(), this); 682 loop_information_->AddBackEdge(back_edge); 683 } 684 685 HGraph* GetGraph() const { return graph_; } 686 void SetGraph(HGraph* graph) { graph_ = graph; } 687 688 uint32_t GetBlockId() const { return block_id_; } 689 void SetBlockId(int id) { block_id_ = id; } 690 uint32_t GetDexPc() const { return dex_pc_; } 691 692 HBasicBlock* GetDominator() const { return dominator_; } 693 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 694 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 695 696 void RemoveDominatedBlock(HBasicBlock* block) { 697 RemoveElement(dominated_blocks_, block); 698 } 699 700 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 701 ReplaceElement(dominated_blocks_, existing, new_block); 702 } 703 704 void ClearDominanceInformation(); 705 706 int NumberOfBackEdges() const { 707 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 708 } 709 710 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 711 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 712 const HInstructionList& GetInstructions() const { return instructions_; } 713 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 714 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 715 const HInstructionList& GetPhis() const { return phis_; } 716 717 void AddSuccessor(HBasicBlock* block) { 718 successors_.push_back(block); 719 block->predecessors_.push_back(this); 720 } 721 722 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 723 size_t successor_index = GetSuccessorIndexOf(existing); 724 existing->RemovePredecessor(this); 725 new_block->predecessors_.push_back(this); 726 successors_[successor_index] = new_block; 727 } 728 729 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 730 size_t predecessor_index = GetPredecessorIndexOf(existing); 731 existing->RemoveSuccessor(this); 732 new_block->successors_.push_back(this); 733 predecessors_[predecessor_index] = new_block; 734 } 735 736 // Insert `this` between `predecessor` and `successor. This method 737 // preserves the indicies, and will update the first edge found between 738 // `predecessor` and `successor`. 739 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 740 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 741 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 742 successor->predecessors_[predecessor_index] = this; 743 predecessor->successors_[successor_index] = this; 744 successors_.push_back(successor); 745 predecessors_.push_back(predecessor); 746 } 747 748 void RemovePredecessor(HBasicBlock* block) { 749 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 750 } 751 752 void RemoveSuccessor(HBasicBlock* block) { 753 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 754 } 755 756 void ClearAllPredecessors() { 757 predecessors_.clear(); 758 } 759 760 void AddPredecessor(HBasicBlock* block) { 761 predecessors_.push_back(block); 762 block->successors_.push_back(this); 763 } 764 765 void SwapPredecessors() { 766 DCHECK_EQ(predecessors_.size(), 2u); 767 std::swap(predecessors_[0], predecessors_[1]); 768 } 769 770 void SwapSuccessors() { 771 DCHECK_EQ(successors_.size(), 2u); 772 std::swap(successors_[0], successors_[1]); 773 } 774 775 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 776 return IndexOfElement(predecessors_, predecessor); 777 } 778 779 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 780 return IndexOfElement(successors_, successor); 781 } 782 783 HBasicBlock* GetSinglePredecessor() const { 784 DCHECK_EQ(GetPredecessors().size(), 1u); 785 return GetPredecessors()[0]; 786 } 787 788 HBasicBlock* GetSingleSuccessor() const { 789 DCHECK_EQ(GetSuccessors().size(), 1u); 790 return GetSuccessors()[0]; 791 } 792 793 // Returns whether the first occurrence of `predecessor` in the list of 794 // predecessors is at index `idx`. 795 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 796 DCHECK_EQ(GetPredecessors()[idx], predecessor); 797 return GetPredecessorIndexOf(predecessor) == idx; 798 } 799 800 // Returns the number of non-exceptional successors. SsaChecker ensures that 801 // these are stored at the beginning of the successor list. 802 size_t NumberOfNormalSuccessors() const { 803 return EndsWithTryBoundary() ? 1 : GetSuccessors().size(); 804 } 805 806 // Create a new block between this block and its predecessors. The new block 807 // is added to the graph, all predecessor edges are relinked to it and an edge 808 // is created to `this`. Returns the new empty block. Reverse post order or 809 // loop and try/catch information are not updated. 810 HBasicBlock* CreateImmediateDominator(); 811 812 // Split the block into two blocks just before `cursor`. Returns the newly 813 // created, latter block. Note that this method will add the block to the 814 // graph, create a Goto at the end of the former block and will create an edge 815 // between the blocks. It will not, however, update the reverse post order or 816 // loop and try/catch information. 817 HBasicBlock* SplitBefore(HInstruction* cursor); 818 819 // Split the block into two blocks just after `cursor`. Returns the newly 820 // created block. Note that this method just updates raw block information, 821 // like predecessors, successors, dominators, and instruction list. It does not 822 // update the graph, reverse post order, loop information, nor make sure the 823 // blocks are consistent (for example ending with a control flow instruction). 824 HBasicBlock* SplitAfter(HInstruction* cursor); 825 826 // Merge `other` at the end of `this`. Successors and dominated blocks of 827 // `other` are changed to be successors and dominated blocks of `this`. Note 828 // that this method does not update the graph, reverse post order, loop 829 // information, nor make sure the blocks are consistent (for example ending 830 // with a control flow instruction). 831 void MergeWithInlined(HBasicBlock* other); 832 833 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 834 // of `this` are moved to `other`. 835 // Note that this method does not update the graph, reverse post order, loop 836 // information, nor make sure the blocks are consistent (for example ending 837 // with a control flow instruction). 838 void ReplaceWith(HBasicBlock* other); 839 840 // Merge `other` at the end of `this`. This method updates loops, reverse post 841 // order, links to predecessors, successors, dominators and deletes the block 842 // from the graph. The two blocks must be successive, i.e. `this` the only 843 // predecessor of `other` and vice versa. 844 void MergeWith(HBasicBlock* other); 845 846 // Disconnects `this` from all its predecessors, successors and dominator, 847 // removes it from all loops it is included in and eventually from the graph. 848 // The block must not dominate any other block. Predecessors and successors 849 // are safely updated. 850 void DisconnectAndDelete(); 851 852 void AddInstruction(HInstruction* instruction); 853 // Insert `instruction` before/after an existing instruction `cursor`. 854 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 855 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 856 // Replace instruction `initial` with `replacement` within this block. 857 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 858 HInstruction* replacement); 859 void AddPhi(HPhi* phi); 860 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 861 // RemoveInstruction and RemovePhi delete a given instruction from the respective 862 // instruction list. With 'ensure_safety' set to true, it verifies that the 863 // instruction is not in use and removes it from the use lists of its inputs. 864 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 865 void RemovePhi(HPhi* phi, bool ensure_safety = true); 866 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 867 868 bool IsLoopHeader() const { 869 return IsInLoop() && (loop_information_->GetHeader() == this); 870 } 871 872 bool IsLoopPreHeaderFirstPredecessor() const { 873 DCHECK(IsLoopHeader()); 874 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 875 } 876 877 HLoopInformation* GetLoopInformation() const { 878 return loop_information_; 879 } 880 881 // Set the loop_information_ on this block. Overrides the current 882 // loop_information if it is an outer loop of the passed loop information. 883 // Note that this method is called while creating the loop information. 884 void SetInLoop(HLoopInformation* info) { 885 if (IsLoopHeader()) { 886 // Nothing to do. This just means `info` is an outer loop. 887 } else if (!IsInLoop()) { 888 loop_information_ = info; 889 } else if (loop_information_->Contains(*info->GetHeader())) { 890 // Block is currently part of an outer loop. Make it part of this inner loop. 891 // Note that a non loop header having a loop information means this loop information 892 // has already been populated 893 loop_information_ = info; 894 } else { 895 // Block is part of an inner loop. Do not update the loop information. 896 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 897 // at this point, because this method is being called while populating `info`. 898 } 899 } 900 901 // Raw update of the loop information. 902 void SetLoopInformation(HLoopInformation* info) { 903 loop_information_ = info; 904 } 905 906 bool IsInLoop() const { return loop_information_ != nullptr; } 907 908 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 909 910 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 911 try_catch_information_ = try_catch_information; 912 } 913 914 bool IsTryBlock() const { 915 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 916 } 917 918 bool IsCatchBlock() const { 919 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 920 } 921 922 // Returns the try entry that this block's successors should have. They will 923 // be in the same try, unless the block ends in a try boundary. In that case, 924 // the appropriate try entry will be returned. 925 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 926 927 bool HasThrowingInstructions() const; 928 929 // Returns whether this block dominates the blocked passed as parameter. 930 bool Dominates(HBasicBlock* block) const; 931 932 size_t GetLifetimeStart() const { return lifetime_start_; } 933 size_t GetLifetimeEnd() const { return lifetime_end_; } 934 935 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 936 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 937 938 bool EndsWithControlFlowInstruction() const; 939 bool EndsWithIf() const; 940 bool EndsWithTryBoundary() const; 941 bool HasSinglePhi() const; 942 943 private: 944 HGraph* graph_; 945 ArenaVector<HBasicBlock*> predecessors_; 946 ArenaVector<HBasicBlock*> successors_; 947 HInstructionList instructions_; 948 HInstructionList phis_; 949 HLoopInformation* loop_information_; 950 HBasicBlock* dominator_; 951 ArenaVector<HBasicBlock*> dominated_blocks_; 952 uint32_t block_id_; 953 // The dex program counter of the first instruction of this block. 954 const uint32_t dex_pc_; 955 size_t lifetime_start_; 956 size_t lifetime_end_; 957 TryCatchInformation* try_catch_information_; 958 959 friend class HGraph; 960 friend class HInstruction; 961 962 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 963}; 964 965// Iterates over the LoopInformation of all loops which contain 'block' 966// from the innermost to the outermost. 967class HLoopInformationOutwardIterator : public ValueObject { 968 public: 969 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 970 : current_(block.GetLoopInformation()) {} 971 972 bool Done() const { return current_ == nullptr; } 973 974 void Advance() { 975 DCHECK(!Done()); 976 current_ = current_->GetPreHeader()->GetLoopInformation(); 977 } 978 979 HLoopInformation* Current() const { 980 DCHECK(!Done()); 981 return current_; 982 } 983 984 private: 985 HLoopInformation* current_; 986 987 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 988}; 989 990#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 991 M(Add, BinaryOperation) \ 992 M(And, BinaryOperation) \ 993 M(ArrayGet, Instruction) \ 994 M(ArrayLength, Instruction) \ 995 M(ArraySet, Instruction) \ 996 M(BooleanNot, UnaryOperation) \ 997 M(BoundsCheck, Instruction) \ 998 M(BoundType, Instruction) \ 999 M(CheckCast, Instruction) \ 1000 M(ClearException, Instruction) \ 1001 M(ClinitCheck, Instruction) \ 1002 M(Compare, BinaryOperation) \ 1003 M(Condition, BinaryOperation) \ 1004 M(CurrentMethod, Instruction) \ 1005 M(Deoptimize, Instruction) \ 1006 M(Div, BinaryOperation) \ 1007 M(DivZeroCheck, Instruction) \ 1008 M(DoubleConstant, Constant) \ 1009 M(Equal, Condition) \ 1010 M(Exit, Instruction) \ 1011 M(FakeString, Instruction) \ 1012 M(FloatConstant, Constant) \ 1013 M(Goto, Instruction) \ 1014 M(GreaterThan, Condition) \ 1015 M(GreaterThanOrEqual, Condition) \ 1016 M(If, Instruction) \ 1017 M(InstanceFieldGet, Instruction) \ 1018 M(InstanceFieldSet, Instruction) \ 1019 M(InstanceOf, Instruction) \ 1020 M(IntConstant, Constant) \ 1021 M(InvokeUnresolved, Invoke) \ 1022 M(InvokeInterface, Invoke) \ 1023 M(InvokeStaticOrDirect, Invoke) \ 1024 M(InvokeVirtual, Invoke) \ 1025 M(LessThan, Condition) \ 1026 M(LessThanOrEqual, Condition) \ 1027 M(LoadClass, Instruction) \ 1028 M(LoadException, Instruction) \ 1029 M(LoadLocal, Instruction) \ 1030 M(LoadString, Instruction) \ 1031 M(Local, Instruction) \ 1032 M(LongConstant, Constant) \ 1033 M(MemoryBarrier, Instruction) \ 1034 M(MonitorOperation, Instruction) \ 1035 M(Mul, BinaryOperation) \ 1036 M(Neg, UnaryOperation) \ 1037 M(NewArray, Instruction) \ 1038 M(NewInstance, Instruction) \ 1039 M(Not, UnaryOperation) \ 1040 M(NotEqual, Condition) \ 1041 M(NullConstant, Instruction) \ 1042 M(NullCheck, Instruction) \ 1043 M(Or, BinaryOperation) \ 1044 M(PackedSwitch, Instruction) \ 1045 M(ParallelMove, Instruction) \ 1046 M(ParameterValue, Instruction) \ 1047 M(Phi, Instruction) \ 1048 M(Rem, BinaryOperation) \ 1049 M(Return, Instruction) \ 1050 M(ReturnVoid, Instruction) \ 1051 M(Shl, BinaryOperation) \ 1052 M(Shr, BinaryOperation) \ 1053 M(StaticFieldGet, Instruction) \ 1054 M(StaticFieldSet, Instruction) \ 1055 M(UnresolvedInstanceFieldGet, Instruction) \ 1056 M(UnresolvedInstanceFieldSet, Instruction) \ 1057 M(UnresolvedStaticFieldGet, Instruction) \ 1058 M(UnresolvedStaticFieldSet, Instruction) \ 1059 M(StoreLocal, Instruction) \ 1060 M(Sub, BinaryOperation) \ 1061 M(SuspendCheck, Instruction) \ 1062 M(Temporary, Instruction) \ 1063 M(Throw, Instruction) \ 1064 M(TryBoundary, Instruction) \ 1065 M(TypeConversion, Instruction) \ 1066 M(UShr, BinaryOperation) \ 1067 M(Xor, BinaryOperation) \ 1068 1069#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1070 1071#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1072 1073#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1074 1075#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1076 M(X86ComputeBaseMethodAddress, Instruction) \ 1077 M(X86LoadFromConstantTable, Instruction) \ 1078 M(X86PackedSwitch, Instruction) 1079 1080#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1081 1082#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1083 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1084 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1085 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1086 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1087 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1088 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1089 1090#define FOR_EACH_INSTRUCTION(M) \ 1091 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1092 M(Constant, Instruction) \ 1093 M(UnaryOperation, Instruction) \ 1094 M(BinaryOperation, Instruction) \ 1095 M(Invoke, Instruction) 1096 1097#define FORWARD_DECLARATION(type, super) class H##type; 1098FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1099#undef FORWARD_DECLARATION 1100 1101#define DECLARE_INSTRUCTION(type) \ 1102 InstructionKind GetKind() const OVERRIDE { return k##type; } \ 1103 const char* DebugName() const OVERRIDE { return #type; } \ 1104 const H##type* As##type() const OVERRIDE { return this; } \ 1105 H##type* As##type() OVERRIDE { return this; } \ 1106 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1107 return other->Is##type(); \ 1108 } \ 1109 void Accept(HGraphVisitor* visitor) OVERRIDE 1110 1111template <typename T> class HUseList; 1112 1113template <typename T> 1114class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1115 public: 1116 HUseListNode* GetPrevious() const { return prev_; } 1117 HUseListNode* GetNext() const { return next_; } 1118 T GetUser() const { return user_; } 1119 size_t GetIndex() const { return index_; } 1120 void SetIndex(size_t index) { index_ = index; } 1121 1122 private: 1123 HUseListNode(T user, size_t index) 1124 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1125 1126 T const user_; 1127 size_t index_; 1128 HUseListNode<T>* prev_; 1129 HUseListNode<T>* next_; 1130 1131 friend class HUseList<T>; 1132 1133 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1134}; 1135 1136template <typename T> 1137class HUseList : public ValueObject { 1138 public: 1139 HUseList() : first_(nullptr) {} 1140 1141 void Clear() { 1142 first_ = nullptr; 1143 } 1144 1145 // Adds a new entry at the beginning of the use list and returns 1146 // the newly created node. 1147 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1148 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1149 if (IsEmpty()) { 1150 first_ = new_node; 1151 } else { 1152 first_->prev_ = new_node; 1153 new_node->next_ = first_; 1154 first_ = new_node; 1155 } 1156 return new_node; 1157 } 1158 1159 HUseListNode<T>* GetFirst() const { 1160 return first_; 1161 } 1162 1163 void Remove(HUseListNode<T>* node) { 1164 DCHECK(node != nullptr); 1165 DCHECK(Contains(node)); 1166 1167 if (node->prev_ != nullptr) { 1168 node->prev_->next_ = node->next_; 1169 } 1170 if (node->next_ != nullptr) { 1171 node->next_->prev_ = node->prev_; 1172 } 1173 if (node == first_) { 1174 first_ = node->next_; 1175 } 1176 } 1177 1178 bool Contains(const HUseListNode<T>* node) const { 1179 if (node == nullptr) { 1180 return false; 1181 } 1182 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1183 if (current == node) { 1184 return true; 1185 } 1186 } 1187 return false; 1188 } 1189 1190 bool IsEmpty() const { 1191 return first_ == nullptr; 1192 } 1193 1194 bool HasOnlyOneUse() const { 1195 return first_ != nullptr && first_->next_ == nullptr; 1196 } 1197 1198 size_t SizeSlow() const { 1199 size_t count = 0; 1200 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1201 ++count; 1202 } 1203 return count; 1204 } 1205 1206 private: 1207 HUseListNode<T>* first_; 1208}; 1209 1210template<typename T> 1211class HUseIterator : public ValueObject { 1212 public: 1213 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1214 1215 bool Done() const { return current_ == nullptr; } 1216 1217 void Advance() { 1218 DCHECK(!Done()); 1219 current_ = current_->GetNext(); 1220 } 1221 1222 HUseListNode<T>* Current() const { 1223 DCHECK(!Done()); 1224 return current_; 1225 } 1226 1227 private: 1228 HUseListNode<T>* current_; 1229 1230 friend class HValue; 1231}; 1232 1233// This class is used by HEnvironment and HInstruction classes to record the 1234// instructions they use and pointers to the corresponding HUseListNodes kept 1235// by the used instructions. 1236template <typename T> 1237class HUserRecord : public ValueObject { 1238 public: 1239 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1240 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1241 1242 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1243 : instruction_(old_record.instruction_), use_node_(use_node) { 1244 DCHECK(instruction_ != nullptr); 1245 DCHECK(use_node_ != nullptr); 1246 DCHECK(old_record.use_node_ == nullptr); 1247 } 1248 1249 HInstruction* GetInstruction() const { return instruction_; } 1250 HUseListNode<T>* GetUseNode() const { return use_node_; } 1251 1252 private: 1253 // Instruction used by the user. 1254 HInstruction* instruction_; 1255 1256 // Corresponding entry in the use list kept by 'instruction_'. 1257 HUseListNode<T>* use_node_; 1258}; 1259 1260/** 1261 * Side-effects representation. 1262 * 1263 * For write/read dependences on fields/arrays, the dependence analysis uses 1264 * type disambiguation (e.g. a float field write cannot modify the value of an 1265 * integer field read) and the access type (e.g. a reference array write cannot 1266 * modify the value of a reference field read [although it may modify the 1267 * reference fetch prior to reading the field, which is represented by its own 1268 * write/read dependence]). The analysis makes conservative points-to 1269 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1270 * the same, and any reference read depends on any reference read without 1271 * further regard of its type). 1272 * 1273 * The internal representation uses 38-bit and is described in the table below. 1274 * The first line indicates the side effect, and for field/array accesses the 1275 * second line indicates the type of the access (in the order of the 1276 * Primitive::Type enum). 1277 * The two numbered lines below indicate the bit position in the bitfield (read 1278 * vertically). 1279 * 1280 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1281 * +-------------+---------+---------+--------------+---------+---------+ 1282 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1283 * | 3 |333333322|222222221| 1 |111111110|000000000| 1284 * | 7 |654321098|765432109| 8 |765432109|876543210| 1285 * 1286 * Note that, to ease the implementation, 'changes' bits are least significant 1287 * bits, while 'dependency' bits are most significant bits. 1288 */ 1289class SideEffects : public ValueObject { 1290 public: 1291 SideEffects() : flags_(0) {} 1292 1293 static SideEffects None() { 1294 return SideEffects(0); 1295 } 1296 1297 static SideEffects All() { 1298 return SideEffects(kAllChangeBits | kAllDependOnBits); 1299 } 1300 1301 static SideEffects AllChanges() { 1302 return SideEffects(kAllChangeBits); 1303 } 1304 1305 static SideEffects AllDependencies() { 1306 return SideEffects(kAllDependOnBits); 1307 } 1308 1309 static SideEffects AllExceptGCDependency() { 1310 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1311 } 1312 1313 static SideEffects AllWritesAndReads() { 1314 return SideEffects(kAllWrites | kAllReads); 1315 } 1316 1317 static SideEffects AllWrites() { 1318 return SideEffects(kAllWrites); 1319 } 1320 1321 static SideEffects AllReads() { 1322 return SideEffects(kAllReads); 1323 } 1324 1325 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1326 return is_volatile 1327 ? AllWritesAndReads() 1328 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1329 } 1330 1331 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1332 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1333 } 1334 1335 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1336 return is_volatile 1337 ? AllWritesAndReads() 1338 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1339 } 1340 1341 static SideEffects ArrayReadOfType(Primitive::Type type) { 1342 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1343 } 1344 1345 static SideEffects CanTriggerGC() { 1346 return SideEffects(1ULL << kCanTriggerGCBit); 1347 } 1348 1349 static SideEffects DependsOnGC() { 1350 return SideEffects(1ULL << kDependsOnGCBit); 1351 } 1352 1353 // Combines the side-effects of this and the other. 1354 SideEffects Union(SideEffects other) const { 1355 return SideEffects(flags_ | other.flags_); 1356 } 1357 1358 SideEffects Exclusion(SideEffects other) const { 1359 return SideEffects(flags_ & ~other.flags_); 1360 } 1361 1362 bool Includes(SideEffects other) const { 1363 return (other.flags_ & flags_) == other.flags_; 1364 } 1365 1366 bool HasSideEffects() const { 1367 return (flags_ & kAllChangeBits); 1368 } 1369 1370 bool HasDependencies() const { 1371 return (flags_ & kAllDependOnBits); 1372 } 1373 1374 // Returns true if there are no side effects or dependencies. 1375 bool DoesNothing() const { 1376 return flags_ == 0; 1377 } 1378 1379 // Returns true if something is written. 1380 bool DoesAnyWrite() const { 1381 return (flags_ & kAllWrites); 1382 } 1383 1384 // Returns true if something is read. 1385 bool DoesAnyRead() const { 1386 return (flags_ & kAllReads); 1387 } 1388 1389 // Returns true if potentially everything is written and read 1390 // (every type and every kind of access). 1391 bool DoesAllReadWrite() const { 1392 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1393 } 1394 1395 bool DoesAll() const { 1396 return flags_ == (kAllChangeBits | kAllDependOnBits); 1397 } 1398 1399 // Returns true if this may read something written by other. 1400 bool MayDependOn(SideEffects other) const { 1401 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1402 return (other.flags_ & depends_on_flags); 1403 } 1404 1405 // Returns string representation of flags (for debugging only). 1406 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1407 std::string ToString() const { 1408 std::string flags = "|"; 1409 for (int s = kLastBit; s >= 0; s--) { 1410 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1411 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1412 // This is a bit for the GC side effect. 1413 if (current_bit_is_set) { 1414 flags += "GC"; 1415 } 1416 flags += "|"; 1417 } else { 1418 // This is a bit for the array/field analysis. 1419 // The underscore character stands for the 'can trigger GC' bit. 1420 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1421 if (current_bit_is_set) { 1422 flags += kDebug[s]; 1423 } 1424 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1425 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1426 flags += "|"; 1427 } 1428 } 1429 } 1430 return flags; 1431 } 1432 1433 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1434 1435 private: 1436 static constexpr int kFieldArrayAnalysisBits = 9; 1437 1438 static constexpr int kFieldWriteOffset = 0; 1439 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1440 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1441 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1442 1443 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1444 1445 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1446 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1447 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1448 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1449 1450 static constexpr int kLastBit = kDependsOnGCBit; 1451 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1452 1453 // Aliases. 1454 1455 static_assert(kChangeBits == kDependOnBits, 1456 "the 'change' bits should match the 'depend on' bits."); 1457 1458 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1459 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1460 static constexpr uint64_t kAllWrites = 1461 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1462 static constexpr uint64_t kAllReads = 1463 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1464 1465 // Work around the fact that HIR aliases I/F and J/D. 1466 // TODO: remove this interceptor once HIR types are clean 1467 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1468 switch (type) { 1469 case Primitive::kPrimInt: 1470 case Primitive::kPrimFloat: 1471 return TypeFlag(Primitive::kPrimInt, offset) | 1472 TypeFlag(Primitive::kPrimFloat, offset); 1473 case Primitive::kPrimLong: 1474 case Primitive::kPrimDouble: 1475 return TypeFlag(Primitive::kPrimLong, offset) | 1476 TypeFlag(Primitive::kPrimDouble, offset); 1477 default: 1478 return TypeFlag(type, offset); 1479 } 1480 } 1481 1482 // Translates type to bit flag. 1483 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1484 CHECK_NE(type, Primitive::kPrimVoid); 1485 const uint64_t one = 1; 1486 const int shift = type; // 0-based consecutive enum 1487 DCHECK_LE(kFieldWriteOffset, shift); 1488 DCHECK_LT(shift, kArrayWriteOffset); 1489 return one << (type + offset); 1490 } 1491 1492 // Private constructor on direct flags value. 1493 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1494 1495 uint64_t flags_; 1496}; 1497 1498// A HEnvironment object contains the values of virtual registers at a given location. 1499class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1500 public: 1501 HEnvironment(ArenaAllocator* arena, 1502 size_t number_of_vregs, 1503 const DexFile& dex_file, 1504 uint32_t method_idx, 1505 uint32_t dex_pc, 1506 InvokeType invoke_type, 1507 HInstruction* holder) 1508 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1509 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1510 parent_(nullptr), 1511 dex_file_(dex_file), 1512 method_idx_(method_idx), 1513 dex_pc_(dex_pc), 1514 invoke_type_(invoke_type), 1515 holder_(holder) { 1516 } 1517 1518 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1519 : HEnvironment(arena, 1520 to_copy.Size(), 1521 to_copy.GetDexFile(), 1522 to_copy.GetMethodIdx(), 1523 to_copy.GetDexPc(), 1524 to_copy.GetInvokeType(), 1525 holder) {} 1526 1527 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1528 if (parent_ != nullptr) { 1529 parent_->SetAndCopyParentChain(allocator, parent); 1530 } else { 1531 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1532 parent_->CopyFrom(parent); 1533 if (parent->GetParent() != nullptr) { 1534 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1535 } 1536 } 1537 } 1538 1539 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1540 void CopyFrom(HEnvironment* environment); 1541 1542 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1543 // input to the loop phi instead. This is for inserting instructions that 1544 // require an environment (like HDeoptimization) in the loop pre-header. 1545 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1546 1547 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1548 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1549 } 1550 1551 HInstruction* GetInstructionAt(size_t index) const { 1552 return vregs_[index].GetInstruction(); 1553 } 1554 1555 void RemoveAsUserOfInput(size_t index) const; 1556 1557 size_t Size() const { return vregs_.size(); } 1558 1559 HEnvironment* GetParent() const { return parent_; } 1560 1561 void SetLocationAt(size_t index, Location location) { 1562 locations_[index] = location; 1563 } 1564 1565 Location GetLocationAt(size_t index) const { 1566 return locations_[index]; 1567 } 1568 1569 uint32_t GetDexPc() const { 1570 return dex_pc_; 1571 } 1572 1573 uint32_t GetMethodIdx() const { 1574 return method_idx_; 1575 } 1576 1577 InvokeType GetInvokeType() const { 1578 return invoke_type_; 1579 } 1580 1581 const DexFile& GetDexFile() const { 1582 return dex_file_; 1583 } 1584 1585 HInstruction* GetHolder() const { 1586 return holder_; 1587 } 1588 1589 private: 1590 // Record instructions' use entries of this environment for constant-time removal. 1591 // It should only be called by HInstruction when a new environment use is added. 1592 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1593 DCHECK(env_use->GetUser() == this); 1594 size_t index = env_use->GetIndex(); 1595 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1596 } 1597 1598 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1599 ArenaVector<Location> locations_; 1600 HEnvironment* parent_; 1601 const DexFile& dex_file_; 1602 const uint32_t method_idx_; 1603 const uint32_t dex_pc_; 1604 const InvokeType invoke_type_; 1605 1606 // The instruction that holds this environment. 1607 HInstruction* const holder_; 1608 1609 friend class HInstruction; 1610 1611 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1612}; 1613 1614class ReferenceTypeInfo : ValueObject { 1615 public: 1616 typedef Handle<mirror::Class> TypeHandle; 1617 1618 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1619 // The constructor will check that the type_handle is valid. 1620 return ReferenceTypeInfo(type_handle, is_exact); 1621 } 1622 1623 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1624 1625 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1626 return handle.GetReference() != nullptr; 1627 } 1628 1629 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1630 return IsValidHandle(type_handle_); 1631 } 1632 1633 bool IsExact() const { return is_exact_; } 1634 1635 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1636 DCHECK(IsValid()); 1637 return GetTypeHandle()->IsObjectClass(); 1638 } 1639 1640 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1641 DCHECK(IsValid()); 1642 return GetTypeHandle()->IsStringClass(); 1643 } 1644 1645 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1646 DCHECK(IsValid()); 1647 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1648 } 1649 1650 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1651 DCHECK(IsValid()); 1652 return GetTypeHandle()->IsInterface(); 1653 } 1654 1655 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1656 DCHECK(IsValid()); 1657 return GetTypeHandle()->IsArrayClass(); 1658 } 1659 1660 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1661 DCHECK(IsValid()); 1662 return GetTypeHandle()->IsPrimitiveArray(); 1663 } 1664 1665 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1666 DCHECK(IsValid()); 1667 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 1668 } 1669 1670 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1671 DCHECK(IsValid()); 1672 if (!IsExact()) return false; 1673 if (!IsArrayClass()) return false; 1674 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1675 } 1676 1677 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1678 DCHECK(IsValid()); 1679 if (!IsExact()) return false; 1680 if (!IsArrayClass()) return false; 1681 if (!rti.IsArrayClass()) return false; 1682 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 1683 rti.GetTypeHandle()->GetComponentType()); 1684 } 1685 1686 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1687 1688 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1689 DCHECK(IsValid()); 1690 DCHECK(rti.IsValid()); 1691 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1692 } 1693 1694 // Returns true if the type information provide the same amount of details. 1695 // Note that it does not mean that the instructions have the same actual type 1696 // (because the type can be the result of a merge). 1697 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1698 if (!IsValid() && !rti.IsValid()) { 1699 // Invalid types are equal. 1700 return true; 1701 } 1702 if (!IsValid() || !rti.IsValid()) { 1703 // One is valid, the other not. 1704 return false; 1705 } 1706 return IsExact() == rti.IsExact() 1707 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1708 } 1709 1710 private: 1711 ReferenceTypeInfo(); 1712 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1713 1714 // The class of the object. 1715 TypeHandle type_handle_; 1716 // Whether or not the type is exact or a superclass of the actual type. 1717 // Whether or not we have any information about this type. 1718 bool is_exact_; 1719}; 1720 1721std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1722 1723class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1724 public: 1725 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1726 : previous_(nullptr), 1727 next_(nullptr), 1728 block_(nullptr), 1729 dex_pc_(dex_pc), 1730 id_(-1), 1731 ssa_index_(-1), 1732 environment_(nullptr), 1733 locations_(nullptr), 1734 live_interval_(nullptr), 1735 lifetime_position_(kNoLifetime), 1736 side_effects_(side_effects), 1737 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1738 1739 virtual ~HInstruction() {} 1740 1741#define DECLARE_KIND(type, super) k##type, 1742 enum InstructionKind { 1743 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1744 }; 1745#undef DECLARE_KIND 1746 1747 HInstruction* GetNext() const { return next_; } 1748 HInstruction* GetPrevious() const { return previous_; } 1749 1750 HInstruction* GetNextDisregardingMoves() const; 1751 HInstruction* GetPreviousDisregardingMoves() const; 1752 1753 HBasicBlock* GetBlock() const { return block_; } 1754 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1755 void SetBlock(HBasicBlock* block) { block_ = block; } 1756 bool IsInBlock() const { return block_ != nullptr; } 1757 bool IsInLoop() const { return block_->IsInLoop(); } 1758 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1759 1760 virtual size_t InputCount() const = 0; 1761 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1762 1763 virtual void Accept(HGraphVisitor* visitor) = 0; 1764 virtual const char* DebugName() const = 0; 1765 1766 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1767 void SetRawInputAt(size_t index, HInstruction* input) { 1768 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1769 } 1770 1771 virtual bool NeedsEnvironment() const { return false; } 1772 1773 uint32_t GetDexPc() const { return dex_pc_; } 1774 1775 virtual bool IsControlFlow() const { return false; } 1776 1777 virtual bool CanThrow() const { return false; } 1778 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1779 1780 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1781 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1782 1783 // Does not apply for all instructions, but having this at top level greatly 1784 // simplifies the null check elimination. 1785 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1786 virtual bool CanBeNull() const { 1787 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1788 return true; 1789 } 1790 1791 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj) const { 1792 UNUSED(obj); 1793 return false; 1794 } 1795 1796 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1797 1798 ReferenceTypeInfo GetReferenceTypeInfo() const { 1799 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1800 return reference_type_info_; 1801 } 1802 1803 void AddUseAt(HInstruction* user, size_t index) { 1804 DCHECK(user != nullptr); 1805 HUseListNode<HInstruction*>* use = 1806 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1807 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1808 } 1809 1810 void AddEnvUseAt(HEnvironment* user, size_t index) { 1811 DCHECK(user != nullptr); 1812 HUseListNode<HEnvironment*>* env_use = 1813 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1814 user->RecordEnvUse(env_use); 1815 } 1816 1817 void RemoveAsUserOfInput(size_t input) { 1818 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1819 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1820 } 1821 1822 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1823 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1824 1825 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1826 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1827 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1828 bool HasOnlyOneNonEnvironmentUse() const { 1829 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1830 } 1831 1832 // Does this instruction strictly dominate `other_instruction`? 1833 // Returns false if this instruction and `other_instruction` are the same. 1834 // Aborts if this instruction and `other_instruction` are both phis. 1835 bool StrictlyDominates(HInstruction* other_instruction) const; 1836 1837 int GetId() const { return id_; } 1838 void SetId(int id) { id_ = id; } 1839 1840 int GetSsaIndex() const { return ssa_index_; } 1841 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1842 bool HasSsaIndex() const { return ssa_index_ != -1; } 1843 1844 bool HasEnvironment() const { return environment_ != nullptr; } 1845 HEnvironment* GetEnvironment() const { return environment_; } 1846 // Set the `environment_` field. Raw because this method does not 1847 // update the uses lists. 1848 void SetRawEnvironment(HEnvironment* environment) { 1849 DCHECK(environment_ == nullptr); 1850 DCHECK_EQ(environment->GetHolder(), this); 1851 environment_ = environment; 1852 } 1853 1854 // Set the environment of this instruction, copying it from `environment`. While 1855 // copying, the uses lists are being updated. 1856 void CopyEnvironmentFrom(HEnvironment* environment) { 1857 DCHECK(environment_ == nullptr); 1858 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1859 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1860 environment_->CopyFrom(environment); 1861 if (environment->GetParent() != nullptr) { 1862 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1863 } 1864 } 1865 1866 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1867 HBasicBlock* block) { 1868 DCHECK(environment_ == nullptr); 1869 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1870 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1871 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1872 if (environment->GetParent() != nullptr) { 1873 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1874 } 1875 } 1876 1877 // Returns the number of entries in the environment. Typically, that is the 1878 // number of dex registers in a method. It could be more in case of inlining. 1879 size_t EnvironmentSize() const; 1880 1881 LocationSummary* GetLocations() const { return locations_; } 1882 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1883 1884 void ReplaceWith(HInstruction* instruction); 1885 void ReplaceInput(HInstruction* replacement, size_t index); 1886 1887 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1888 // uses of this instruction by `other` are *not* updated. 1889 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1890 ReplaceWith(other); 1891 other->ReplaceInput(this, use_index); 1892 } 1893 1894 // Move `this` instruction before `cursor`. 1895 void MoveBefore(HInstruction* cursor); 1896 1897#define INSTRUCTION_TYPE_CHECK(type, super) \ 1898 bool Is##type() const { return (As##type() != nullptr); } \ 1899 virtual const H##type* As##type() const { return nullptr; } \ 1900 virtual H##type* As##type() { return nullptr; } 1901 1902 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1903#undef INSTRUCTION_TYPE_CHECK 1904 1905 // Returns whether the instruction can be moved within the graph. 1906 virtual bool CanBeMoved() const { return false; } 1907 1908 // Returns whether the two instructions are of the same kind. 1909 virtual bool InstructionTypeEquals(HInstruction* other) const { 1910 UNUSED(other); 1911 return false; 1912 } 1913 1914 // Returns whether any data encoded in the two instructions is equal. 1915 // This method does not look at the inputs. Both instructions must be 1916 // of the same type, otherwise the method has undefined behavior. 1917 virtual bool InstructionDataEquals(HInstruction* other) const { 1918 UNUSED(other); 1919 return false; 1920 } 1921 1922 // Returns whether two instructions are equal, that is: 1923 // 1) They have the same type and contain the same data (InstructionDataEquals). 1924 // 2) Their inputs are identical. 1925 bool Equals(HInstruction* other) const; 1926 1927 virtual InstructionKind GetKind() const = 0; 1928 1929 virtual size_t ComputeHashCode() const { 1930 size_t result = GetKind(); 1931 for (size_t i = 0, e = InputCount(); i < e; ++i) { 1932 result = (result * 31) + InputAt(i)->GetId(); 1933 } 1934 return result; 1935 } 1936 1937 SideEffects GetSideEffects() const { return side_effects_; } 1938 1939 size_t GetLifetimePosition() const { return lifetime_position_; } 1940 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 1941 LiveInterval* GetLiveInterval() const { return live_interval_; } 1942 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 1943 bool HasLiveInterval() const { return live_interval_ != nullptr; } 1944 1945 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 1946 1947 // Returns whether the code generation of the instruction will require to have access 1948 // to the current method. Such instructions are: 1949 // (1): Instructions that require an environment, as calling the runtime requires 1950 // to walk the stack and have the current method stored at a specific stack address. 1951 // (2): Object literals like classes and strings, that are loaded from the dex cache 1952 // fields of the current method. 1953 bool NeedsCurrentMethod() const { 1954 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 1955 } 1956 1957 virtual bool NeedsDexCache() const { return false; } 1958 1959 // Does this instruction have any use in an environment before 1960 // control flow hits 'other'? 1961 bool HasAnyEnvironmentUseBefore(HInstruction* other); 1962 1963 // Remove all references to environment uses of this instruction. 1964 // The caller must ensure that this is safe to do. 1965 void RemoveEnvironmentUsers(); 1966 1967 protected: 1968 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 1969 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 1970 1971 private: 1972 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 1973 1974 HInstruction* previous_; 1975 HInstruction* next_; 1976 HBasicBlock* block_; 1977 const uint32_t dex_pc_; 1978 1979 // An instruction gets an id when it is added to the graph. 1980 // It reflects creation order. A negative id means the instruction 1981 // has not been added to the graph. 1982 int id_; 1983 1984 // When doing liveness analysis, instructions that have uses get an SSA index. 1985 int ssa_index_; 1986 1987 // List of instructions that have this instruction as input. 1988 HUseList<HInstruction*> uses_; 1989 1990 // List of environments that contain this instruction. 1991 HUseList<HEnvironment*> env_uses_; 1992 1993 // The environment associated with this instruction. Not null if the instruction 1994 // might jump out of the method. 1995 HEnvironment* environment_; 1996 1997 // Set by the code generator. 1998 LocationSummary* locations_; 1999 2000 // Set by the liveness analysis. 2001 LiveInterval* live_interval_; 2002 2003 // Set by the liveness analysis, this is the position in a linear 2004 // order of blocks where this instruction's live interval start. 2005 size_t lifetime_position_; 2006 2007 const SideEffects side_effects_; 2008 2009 // TODO: for primitive types this should be marked as invalid. 2010 ReferenceTypeInfo reference_type_info_; 2011 2012 friend class GraphChecker; 2013 friend class HBasicBlock; 2014 friend class HEnvironment; 2015 friend class HGraph; 2016 friend class HInstructionList; 2017 2018 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2019}; 2020std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2021 2022class HInputIterator : public ValueObject { 2023 public: 2024 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2025 2026 bool Done() const { return index_ == instruction_->InputCount(); } 2027 HInstruction* Current() const { return instruction_->InputAt(index_); } 2028 void Advance() { index_++; } 2029 2030 private: 2031 HInstruction* instruction_; 2032 size_t index_; 2033 2034 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2035}; 2036 2037class HInstructionIterator : public ValueObject { 2038 public: 2039 explicit HInstructionIterator(const HInstructionList& instructions) 2040 : instruction_(instructions.first_instruction_) { 2041 next_ = Done() ? nullptr : instruction_->GetNext(); 2042 } 2043 2044 bool Done() const { return instruction_ == nullptr; } 2045 HInstruction* Current() const { return instruction_; } 2046 void Advance() { 2047 instruction_ = next_; 2048 next_ = Done() ? nullptr : instruction_->GetNext(); 2049 } 2050 2051 private: 2052 HInstruction* instruction_; 2053 HInstruction* next_; 2054 2055 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2056}; 2057 2058class HBackwardInstructionIterator : public ValueObject { 2059 public: 2060 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2061 : instruction_(instructions.last_instruction_) { 2062 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2063 } 2064 2065 bool Done() const { return instruction_ == nullptr; } 2066 HInstruction* Current() const { return instruction_; } 2067 void Advance() { 2068 instruction_ = next_; 2069 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2070 } 2071 2072 private: 2073 HInstruction* instruction_; 2074 HInstruction* next_; 2075 2076 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2077}; 2078 2079template<size_t N> 2080class HTemplateInstruction: public HInstruction { 2081 public: 2082 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2083 : HInstruction(side_effects, dex_pc), inputs_() {} 2084 virtual ~HTemplateInstruction() {} 2085 2086 size_t InputCount() const OVERRIDE { return N; } 2087 2088 protected: 2089 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2090 DCHECK_LT(i, N); 2091 return inputs_[i]; 2092 } 2093 2094 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2095 DCHECK_LT(i, N); 2096 inputs_[i] = input; 2097 } 2098 2099 private: 2100 std::array<HUserRecord<HInstruction*>, N> inputs_; 2101 2102 friend class SsaBuilder; 2103}; 2104 2105// HTemplateInstruction specialization for N=0. 2106template<> 2107class HTemplateInstruction<0>: public HInstruction { 2108 public: 2109 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2110 : HInstruction(side_effects, dex_pc) {} 2111 2112 virtual ~HTemplateInstruction() {} 2113 2114 size_t InputCount() const OVERRIDE { return 0; } 2115 2116 protected: 2117 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2118 LOG(FATAL) << "Unreachable"; 2119 UNREACHABLE(); 2120 } 2121 2122 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2123 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2124 LOG(FATAL) << "Unreachable"; 2125 UNREACHABLE(); 2126 } 2127 2128 private: 2129 friend class SsaBuilder; 2130}; 2131 2132template<intptr_t N> 2133class HExpression : public HTemplateInstruction<N> { 2134 public: 2135 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2136 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2137 virtual ~HExpression() {} 2138 2139 Primitive::Type GetType() const OVERRIDE { return type_; } 2140 2141 protected: 2142 Primitive::Type type_; 2143}; 2144 2145// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2146// instruction that branches to the exit block. 2147class HReturnVoid : public HTemplateInstruction<0> { 2148 public: 2149 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2150 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2151 2152 bool IsControlFlow() const OVERRIDE { return true; } 2153 2154 DECLARE_INSTRUCTION(ReturnVoid); 2155 2156 private: 2157 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2158}; 2159 2160// Represents dex's RETURN opcodes. A HReturn is a control flow 2161// instruction that branches to the exit block. 2162class HReturn : public HTemplateInstruction<1> { 2163 public: 2164 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2165 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2166 SetRawInputAt(0, value); 2167 } 2168 2169 bool IsControlFlow() const OVERRIDE { return true; } 2170 2171 DECLARE_INSTRUCTION(Return); 2172 2173 private: 2174 DISALLOW_COPY_AND_ASSIGN(HReturn); 2175}; 2176 2177// The exit instruction is the only instruction of the exit block. 2178// Instructions aborting the method (HThrow and HReturn) must branch to the 2179// exit block. 2180class HExit : public HTemplateInstruction<0> { 2181 public: 2182 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2183 2184 bool IsControlFlow() const OVERRIDE { return true; } 2185 2186 DECLARE_INSTRUCTION(Exit); 2187 2188 private: 2189 DISALLOW_COPY_AND_ASSIGN(HExit); 2190}; 2191 2192// Jumps from one block to another. 2193class HGoto : public HTemplateInstruction<0> { 2194 public: 2195 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2196 2197 bool IsControlFlow() const OVERRIDE { return true; } 2198 2199 HBasicBlock* GetSuccessor() const { 2200 return GetBlock()->GetSingleSuccessor(); 2201 } 2202 2203 DECLARE_INSTRUCTION(Goto); 2204 2205 private: 2206 DISALLOW_COPY_AND_ASSIGN(HGoto); 2207}; 2208 2209class HConstant : public HExpression<0> { 2210 public: 2211 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2212 : HExpression(type, SideEffects::None(), dex_pc) {} 2213 2214 bool CanBeMoved() const OVERRIDE { return true; } 2215 2216 virtual bool IsMinusOne() const { return false; } 2217 virtual bool IsZero() const { return false; } 2218 virtual bool IsOne() const { return false; } 2219 2220 virtual uint64_t GetValueAsUint64() const = 0; 2221 2222 DECLARE_INSTRUCTION(Constant); 2223 2224 private: 2225 DISALLOW_COPY_AND_ASSIGN(HConstant); 2226}; 2227 2228class HNullConstant : public HConstant { 2229 public: 2230 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2231 return true; 2232 } 2233 2234 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2235 2236 size_t ComputeHashCode() const OVERRIDE { return 0; } 2237 2238 DECLARE_INSTRUCTION(NullConstant); 2239 2240 private: 2241 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2242 2243 friend class HGraph; 2244 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2245}; 2246 2247// Constants of the type int. Those can be from Dex instructions, or 2248// synthesized (for example with the if-eqz instruction). 2249class HIntConstant : public HConstant { 2250 public: 2251 int32_t GetValue() const { return value_; } 2252 2253 uint64_t GetValueAsUint64() const OVERRIDE { 2254 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2255 } 2256 2257 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2258 DCHECK(other->IsIntConstant()); 2259 return other->AsIntConstant()->value_ == value_; 2260 } 2261 2262 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2263 2264 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2265 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2266 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2267 2268 DECLARE_INSTRUCTION(IntConstant); 2269 2270 private: 2271 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2272 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2273 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2274 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2275 2276 const int32_t value_; 2277 2278 friend class HGraph; 2279 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2280 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2281 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2282}; 2283 2284class HLongConstant : public HConstant { 2285 public: 2286 int64_t GetValue() const { return value_; } 2287 2288 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2289 2290 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2291 DCHECK(other->IsLongConstant()); 2292 return other->AsLongConstant()->value_ == value_; 2293 } 2294 2295 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2296 2297 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2298 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2299 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2300 2301 DECLARE_INSTRUCTION(LongConstant); 2302 2303 private: 2304 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2305 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2306 2307 const int64_t value_; 2308 2309 friend class HGraph; 2310 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2311}; 2312 2313// Conditional branch. A block ending with an HIf instruction must have 2314// two successors. 2315class HIf : public HTemplateInstruction<1> { 2316 public: 2317 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2318 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2319 SetRawInputAt(0, input); 2320 } 2321 2322 bool IsControlFlow() const OVERRIDE { return true; } 2323 2324 HBasicBlock* IfTrueSuccessor() const { 2325 return GetBlock()->GetSuccessors()[0]; 2326 } 2327 2328 HBasicBlock* IfFalseSuccessor() const { 2329 return GetBlock()->GetSuccessors()[1]; 2330 } 2331 2332 DECLARE_INSTRUCTION(If); 2333 2334 private: 2335 DISALLOW_COPY_AND_ASSIGN(HIf); 2336}; 2337 2338 2339// Abstract instruction which marks the beginning and/or end of a try block and 2340// links it to the respective exception handlers. Behaves the same as a Goto in 2341// non-exceptional control flow. 2342// Normal-flow successor is stored at index zero, exception handlers under 2343// higher indices in no particular order. 2344class HTryBoundary : public HTemplateInstruction<0> { 2345 public: 2346 enum BoundaryKind { 2347 kEntry, 2348 kExit, 2349 }; 2350 2351 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2352 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2353 2354 bool IsControlFlow() const OVERRIDE { return true; } 2355 2356 // Returns the block's non-exceptional successor (index zero). 2357 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2358 2359 // Returns whether `handler` is among its exception handlers (non-zero index 2360 // successors). 2361 bool HasExceptionHandler(const HBasicBlock& handler) const { 2362 DCHECK(handler.IsCatchBlock()); 2363 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2364 } 2365 2366 // If not present already, adds `handler` to its block's list of exception 2367 // handlers. 2368 void AddExceptionHandler(HBasicBlock* handler) { 2369 if (!HasExceptionHandler(*handler)) { 2370 GetBlock()->AddSuccessor(handler); 2371 } 2372 } 2373 2374 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2375 2376 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2377 2378 DECLARE_INSTRUCTION(TryBoundary); 2379 2380 private: 2381 const BoundaryKind kind_; 2382 2383 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2384}; 2385 2386// Iterator over exception handlers of a given HTryBoundary, i.e. over 2387// exceptional successors of its basic block. 2388class HExceptionHandlerIterator : public ValueObject { 2389 public: 2390 explicit HExceptionHandlerIterator(const HTryBoundary& try_boundary) 2391 : block_(*try_boundary.GetBlock()), index_(block_.NumberOfNormalSuccessors()) {} 2392 2393 bool Done() const { return index_ == block_.GetSuccessors().size(); } 2394 HBasicBlock* Current() const { return block_.GetSuccessors()[index_]; } 2395 size_t CurrentSuccessorIndex() const { return index_; } 2396 void Advance() { ++index_; } 2397 2398 private: 2399 const HBasicBlock& block_; 2400 size_t index_; 2401 2402 DISALLOW_COPY_AND_ASSIGN(HExceptionHandlerIterator); 2403}; 2404 2405// Deoptimize to interpreter, upon checking a condition. 2406class HDeoptimize : public HTemplateInstruction<1> { 2407 public: 2408 explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2409 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2410 SetRawInputAt(0, cond); 2411 } 2412 2413 bool NeedsEnvironment() const OVERRIDE { return true; } 2414 bool CanThrow() const OVERRIDE { return true; } 2415 2416 DECLARE_INSTRUCTION(Deoptimize); 2417 2418 private: 2419 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2420}; 2421 2422// Represents the ArtMethod that was passed as a first argument to 2423// the method. It is used by instructions that depend on it, like 2424// instructions that work with the dex cache. 2425class HCurrentMethod : public HExpression<0> { 2426 public: 2427 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2428 : HExpression(type, SideEffects::None(), dex_pc) {} 2429 2430 DECLARE_INSTRUCTION(CurrentMethod); 2431 2432 private: 2433 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2434}; 2435 2436// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2437// have one successor for each entry in the switch table, and the final successor 2438// will be the block containing the next Dex opcode. 2439class HPackedSwitch : public HTemplateInstruction<1> { 2440 public: 2441 HPackedSwitch(int32_t start_value, 2442 uint32_t num_entries, 2443 HInstruction* input, 2444 uint32_t dex_pc = kNoDexPc) 2445 : HTemplateInstruction(SideEffects::None(), dex_pc), 2446 start_value_(start_value), 2447 num_entries_(num_entries) { 2448 SetRawInputAt(0, input); 2449 } 2450 2451 bool IsControlFlow() const OVERRIDE { return true; } 2452 2453 int32_t GetStartValue() const { return start_value_; } 2454 2455 uint32_t GetNumEntries() const { return num_entries_; } 2456 2457 HBasicBlock* GetDefaultBlock() const { 2458 // Last entry is the default block. 2459 return GetBlock()->GetSuccessors()[num_entries_]; 2460 } 2461 DECLARE_INSTRUCTION(PackedSwitch); 2462 2463 private: 2464 const int32_t start_value_; 2465 const uint32_t num_entries_; 2466 2467 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2468}; 2469 2470class HUnaryOperation : public HExpression<1> { 2471 public: 2472 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2473 : HExpression(result_type, SideEffects::None(), dex_pc) { 2474 SetRawInputAt(0, input); 2475 } 2476 2477 HInstruction* GetInput() const { return InputAt(0); } 2478 Primitive::Type GetResultType() const { return GetType(); } 2479 2480 bool CanBeMoved() const OVERRIDE { return true; } 2481 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2482 UNUSED(other); 2483 return true; 2484 } 2485 2486 // Try to statically evaluate `operation` and return a HConstant 2487 // containing the result of this evaluation. If `operation` cannot 2488 // be evaluated as a constant, return null. 2489 HConstant* TryStaticEvaluation() const; 2490 2491 // Apply this operation to `x`. 2492 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2493 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2494 2495 DECLARE_INSTRUCTION(UnaryOperation); 2496 2497 private: 2498 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2499}; 2500 2501class HBinaryOperation : public HExpression<2> { 2502 public: 2503 HBinaryOperation(Primitive::Type result_type, 2504 HInstruction* left, 2505 HInstruction* right, 2506 SideEffects side_effects = SideEffects::None(), 2507 uint32_t dex_pc = kNoDexPc) 2508 : HExpression(result_type, side_effects, dex_pc) { 2509 SetRawInputAt(0, left); 2510 SetRawInputAt(1, right); 2511 } 2512 2513 HInstruction* GetLeft() const { return InputAt(0); } 2514 HInstruction* GetRight() const { return InputAt(1); } 2515 Primitive::Type GetResultType() const { return GetType(); } 2516 2517 virtual bool IsCommutative() const { return false; } 2518 2519 // Put constant on the right. 2520 // Returns whether order is changed. 2521 bool OrderInputsWithConstantOnTheRight() { 2522 HInstruction* left = InputAt(0); 2523 HInstruction* right = InputAt(1); 2524 if (left->IsConstant() && !right->IsConstant()) { 2525 ReplaceInput(right, 0); 2526 ReplaceInput(left, 1); 2527 return true; 2528 } 2529 return false; 2530 } 2531 2532 // Order inputs by instruction id, but favor constant on the right side. 2533 // This helps GVN for commutative ops. 2534 void OrderInputs() { 2535 DCHECK(IsCommutative()); 2536 HInstruction* left = InputAt(0); 2537 HInstruction* right = InputAt(1); 2538 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2539 return; 2540 } 2541 if (OrderInputsWithConstantOnTheRight()) { 2542 return; 2543 } 2544 // Order according to instruction id. 2545 if (left->GetId() > right->GetId()) { 2546 ReplaceInput(right, 0); 2547 ReplaceInput(left, 1); 2548 } 2549 } 2550 2551 bool CanBeMoved() const OVERRIDE { return true; } 2552 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2553 UNUSED(other); 2554 return true; 2555 } 2556 2557 // Try to statically evaluate `operation` and return a HConstant 2558 // containing the result of this evaluation. If `operation` cannot 2559 // be evaluated as a constant, return null. 2560 HConstant* TryStaticEvaluation() const; 2561 2562 // Apply this operation to `x` and `y`. 2563 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2564 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2565 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2566 HLongConstant* y ATTRIBUTE_UNUSED) const { 2567 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2568 return nullptr; 2569 } 2570 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2571 HIntConstant* y ATTRIBUTE_UNUSED) const { 2572 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2573 return nullptr; 2574 } 2575 2576 // Returns an input that can legally be used as the right input and is 2577 // constant, or null. 2578 HConstant* GetConstantRight() const; 2579 2580 // If `GetConstantRight()` returns one of the input, this returns the other 2581 // one. Otherwise it returns null. 2582 HInstruction* GetLeastConstantLeft() const; 2583 2584 DECLARE_INSTRUCTION(BinaryOperation); 2585 2586 private: 2587 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2588}; 2589 2590// The comparison bias applies for floating point operations and indicates how NaN 2591// comparisons are treated: 2592enum class ComparisonBias { 2593 kNoBias, // bias is not applicable (i.e. for long operation) 2594 kGtBias, // return 1 for NaN comparisons 2595 kLtBias, // return -1 for NaN comparisons 2596}; 2597 2598class HCondition : public HBinaryOperation { 2599 public: 2600 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2601 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2602 needs_materialization_(true), 2603 bias_(ComparisonBias::kNoBias) {} 2604 2605 bool NeedsMaterialization() const { return needs_materialization_; } 2606 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2607 2608 // For code generation purposes, returns whether this instruction is just before 2609 // `instruction`, and disregard moves in between. 2610 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2611 2612 DECLARE_INSTRUCTION(Condition); 2613 2614 virtual IfCondition GetCondition() const = 0; 2615 2616 virtual IfCondition GetOppositeCondition() const = 0; 2617 2618 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2619 2620 void SetBias(ComparisonBias bias) { bias_ = bias; } 2621 2622 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2623 return bias_ == other->AsCondition()->bias_; 2624 } 2625 2626 bool IsFPConditionTrueIfNaN() const { 2627 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2628 IfCondition if_cond = GetCondition(); 2629 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2630 } 2631 2632 bool IsFPConditionFalseIfNaN() const { 2633 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2634 IfCondition if_cond = GetCondition(); 2635 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2636 } 2637 2638 private: 2639 // For register allocation purposes, returns whether this instruction needs to be 2640 // materialized (that is, not just be in the processor flags). 2641 bool needs_materialization_; 2642 2643 // Needed if we merge a HCompare into a HCondition. 2644 ComparisonBias bias_; 2645 2646 DISALLOW_COPY_AND_ASSIGN(HCondition); 2647}; 2648 2649// Instruction to check if two inputs are equal to each other. 2650class HEqual : public HCondition { 2651 public: 2652 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2653 : HCondition(first, second, dex_pc) {} 2654 2655 bool IsCommutative() const OVERRIDE { return true; } 2656 2657 template <typename T> bool Compute(T x, T y) const { return x == y; } 2658 2659 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2660 return GetBlock()->GetGraph()->GetIntConstant( 2661 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2662 } 2663 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2664 return GetBlock()->GetGraph()->GetIntConstant( 2665 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2666 } 2667 2668 DECLARE_INSTRUCTION(Equal); 2669 2670 IfCondition GetCondition() const OVERRIDE { 2671 return kCondEQ; 2672 } 2673 2674 IfCondition GetOppositeCondition() const OVERRIDE { 2675 return kCondNE; 2676 } 2677 2678 private: 2679 DISALLOW_COPY_AND_ASSIGN(HEqual); 2680}; 2681 2682class HNotEqual : public HCondition { 2683 public: 2684 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2685 : HCondition(first, second, dex_pc) {} 2686 2687 bool IsCommutative() const OVERRIDE { return true; } 2688 2689 template <typename T> bool Compute(T x, T y) const { return x != y; } 2690 2691 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2692 return GetBlock()->GetGraph()->GetIntConstant( 2693 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2694 } 2695 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2696 return GetBlock()->GetGraph()->GetIntConstant( 2697 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2698 } 2699 2700 DECLARE_INSTRUCTION(NotEqual); 2701 2702 IfCondition GetCondition() const OVERRIDE { 2703 return kCondNE; 2704 } 2705 2706 IfCondition GetOppositeCondition() const OVERRIDE { 2707 return kCondEQ; 2708 } 2709 2710 private: 2711 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2712}; 2713 2714class HLessThan : public HCondition { 2715 public: 2716 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2717 : HCondition(first, second, dex_pc) {} 2718 2719 template <typename T> bool Compute(T x, T y) const { return x < y; } 2720 2721 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2722 return GetBlock()->GetGraph()->GetIntConstant( 2723 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2724 } 2725 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2726 return GetBlock()->GetGraph()->GetIntConstant( 2727 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2728 } 2729 2730 DECLARE_INSTRUCTION(LessThan); 2731 2732 IfCondition GetCondition() const OVERRIDE { 2733 return kCondLT; 2734 } 2735 2736 IfCondition GetOppositeCondition() const OVERRIDE { 2737 return kCondGE; 2738 } 2739 2740 private: 2741 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2742}; 2743 2744class HLessThanOrEqual : public HCondition { 2745 public: 2746 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2747 : HCondition(first, second, dex_pc) {} 2748 2749 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2750 2751 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2752 return GetBlock()->GetGraph()->GetIntConstant( 2753 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2754 } 2755 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2756 return GetBlock()->GetGraph()->GetIntConstant( 2757 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2758 } 2759 2760 DECLARE_INSTRUCTION(LessThanOrEqual); 2761 2762 IfCondition GetCondition() const OVERRIDE { 2763 return kCondLE; 2764 } 2765 2766 IfCondition GetOppositeCondition() const OVERRIDE { 2767 return kCondGT; 2768 } 2769 2770 private: 2771 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2772}; 2773 2774class HGreaterThan : public HCondition { 2775 public: 2776 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2777 : HCondition(first, second, dex_pc) {} 2778 2779 template <typename T> bool Compute(T x, T y) const { return x > y; } 2780 2781 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2782 return GetBlock()->GetGraph()->GetIntConstant( 2783 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2784 } 2785 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2786 return GetBlock()->GetGraph()->GetIntConstant( 2787 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2788 } 2789 2790 DECLARE_INSTRUCTION(GreaterThan); 2791 2792 IfCondition GetCondition() const OVERRIDE { 2793 return kCondGT; 2794 } 2795 2796 IfCondition GetOppositeCondition() const OVERRIDE { 2797 return kCondLE; 2798 } 2799 2800 private: 2801 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2802}; 2803 2804class HGreaterThanOrEqual : public HCondition { 2805 public: 2806 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2807 : HCondition(first, second, dex_pc) {} 2808 2809 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2810 2811 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2812 return GetBlock()->GetGraph()->GetIntConstant( 2813 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2814 } 2815 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2816 return GetBlock()->GetGraph()->GetIntConstant( 2817 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2818 } 2819 2820 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2821 2822 IfCondition GetCondition() const OVERRIDE { 2823 return kCondGE; 2824 } 2825 2826 IfCondition GetOppositeCondition() const OVERRIDE { 2827 return kCondLT; 2828 } 2829 2830 private: 2831 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2832}; 2833 2834 2835// Instruction to check how two inputs compare to each other. 2836// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 2837class HCompare : public HBinaryOperation { 2838 public: 2839 HCompare(Primitive::Type type, 2840 HInstruction* first, 2841 HInstruction* second, 2842 ComparisonBias bias, 2843 uint32_t dex_pc) 2844 : HBinaryOperation(Primitive::kPrimInt, 2845 first, 2846 second, 2847 SideEffectsForArchRuntimeCalls(type), 2848 dex_pc), 2849 bias_(bias) { 2850 DCHECK_EQ(type, first->GetType()); 2851 DCHECK_EQ(type, second->GetType()); 2852 } 2853 2854 template <typename T> 2855 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 2856 2857 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2858 return GetBlock()->GetGraph()->GetIntConstant( 2859 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2860 } 2861 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2862 return GetBlock()->GetGraph()->GetIntConstant( 2863 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2864 } 2865 2866 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2867 return bias_ == other->AsCompare()->bias_; 2868 } 2869 2870 ComparisonBias GetBias() const { return bias_; } 2871 2872 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 2873 2874 2875 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 2876 // MIPS64 uses a runtime call for FP comparisons. 2877 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 2878 } 2879 2880 DECLARE_INSTRUCTION(Compare); 2881 2882 private: 2883 const ComparisonBias bias_; 2884 2885 DISALLOW_COPY_AND_ASSIGN(HCompare); 2886}; 2887 2888// A local in the graph. Corresponds to a Dex register. 2889class HLocal : public HTemplateInstruction<0> { 2890 public: 2891 explicit HLocal(uint16_t reg_number) 2892 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 2893 2894 DECLARE_INSTRUCTION(Local); 2895 2896 uint16_t GetRegNumber() const { return reg_number_; } 2897 2898 private: 2899 // The Dex register number. 2900 const uint16_t reg_number_; 2901 2902 DISALLOW_COPY_AND_ASSIGN(HLocal); 2903}; 2904 2905// Load a given local. The local is an input of this instruction. 2906class HLoadLocal : public HExpression<1> { 2907 public: 2908 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2909 : HExpression(type, SideEffects::None(), dex_pc) { 2910 SetRawInputAt(0, local); 2911 } 2912 2913 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 2914 2915 DECLARE_INSTRUCTION(LoadLocal); 2916 2917 private: 2918 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 2919}; 2920 2921// Store a value in a given local. This instruction has two inputs: the value 2922// and the local. 2923class HStoreLocal : public HTemplateInstruction<2> { 2924 public: 2925 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 2926 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2927 SetRawInputAt(0, local); 2928 SetRawInputAt(1, value); 2929 } 2930 2931 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 2932 2933 DECLARE_INSTRUCTION(StoreLocal); 2934 2935 private: 2936 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 2937}; 2938 2939class HFloatConstant : public HConstant { 2940 public: 2941 float GetValue() const { return value_; } 2942 2943 uint64_t GetValueAsUint64() const OVERRIDE { 2944 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 2945 } 2946 2947 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2948 DCHECK(other->IsFloatConstant()); 2949 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 2950 } 2951 2952 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2953 2954 bool IsMinusOne() const OVERRIDE { 2955 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 2956 } 2957 bool IsZero() const OVERRIDE { 2958 return value_ == 0.0f; 2959 } 2960 bool IsOne() const OVERRIDE { 2961 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 2962 } 2963 bool IsNaN() const { 2964 return std::isnan(value_); 2965 } 2966 2967 DECLARE_INSTRUCTION(FloatConstant); 2968 2969 private: 2970 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 2971 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 2972 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2973 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 2974 2975 const float value_; 2976 2977 // Only the SsaBuilder and HGraph can create floating-point constants. 2978 friend class SsaBuilder; 2979 friend class HGraph; 2980 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 2981}; 2982 2983class HDoubleConstant : public HConstant { 2984 public: 2985 double GetValue() const { return value_; } 2986 2987 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 2988 2989 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2990 DCHECK(other->IsDoubleConstant()); 2991 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 2992 } 2993 2994 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2995 2996 bool IsMinusOne() const OVERRIDE { 2997 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 2998 } 2999 bool IsZero() const OVERRIDE { 3000 return value_ == 0.0; 3001 } 3002 bool IsOne() const OVERRIDE { 3003 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3004 } 3005 bool IsNaN() const { 3006 return std::isnan(value_); 3007 } 3008 3009 DECLARE_INSTRUCTION(DoubleConstant); 3010 3011 private: 3012 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3013 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3014 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3015 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3016 3017 const double value_; 3018 3019 // Only the SsaBuilder and HGraph can create floating-point constants. 3020 friend class SsaBuilder; 3021 friend class HGraph; 3022 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3023}; 3024 3025enum class Intrinsics { 3026#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache) k ## Name, 3027#include "intrinsics_list.h" 3028 kNone, 3029 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3030#undef INTRINSICS_LIST 3031#undef OPTIMIZING_INTRINSICS 3032}; 3033std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3034 3035enum IntrinsicNeedsEnvironmentOrCache { 3036 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3037 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3038}; 3039 3040class HInvoke : public HInstruction { 3041 public: 3042 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3043 3044 bool NeedsEnvironment() const OVERRIDE; 3045 3046 void SetArgumentAt(size_t index, HInstruction* argument) { 3047 SetRawInputAt(index, argument); 3048 } 3049 3050 // Return the number of arguments. This number can be lower than 3051 // the number of inputs returned by InputCount(), as some invoke 3052 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3053 // inputs at the end of their list of inputs. 3054 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3055 3056 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3057 3058 3059 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3060 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3061 3062 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3063 3064 Intrinsics GetIntrinsic() const { 3065 return intrinsic_; 3066 } 3067 3068 void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache); 3069 3070 bool IsFromInlinedInvoke() const { 3071 return GetEnvironment()->GetParent() != nullptr; 3072 } 3073 3074 bool CanThrow() const OVERRIDE { return true; } 3075 3076 uint32_t* GetIntrinsicOptimizations() { 3077 return &intrinsic_optimizations_; 3078 } 3079 3080 const uint32_t* GetIntrinsicOptimizations() const { 3081 return &intrinsic_optimizations_; 3082 } 3083 3084 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3085 3086 DECLARE_INSTRUCTION(Invoke); 3087 3088 protected: 3089 HInvoke(ArenaAllocator* arena, 3090 uint32_t number_of_arguments, 3091 uint32_t number_of_other_inputs, 3092 Primitive::Type return_type, 3093 uint32_t dex_pc, 3094 uint32_t dex_method_index, 3095 InvokeType original_invoke_type) 3096 : HInstruction( 3097 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3098 number_of_arguments_(number_of_arguments), 3099 inputs_(number_of_arguments + number_of_other_inputs, 3100 arena->Adapter(kArenaAllocInvokeInputs)), 3101 return_type_(return_type), 3102 dex_method_index_(dex_method_index), 3103 original_invoke_type_(original_invoke_type), 3104 intrinsic_(Intrinsics::kNone), 3105 intrinsic_optimizations_(0) { 3106 } 3107 3108 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3109 return inputs_[index]; 3110 } 3111 3112 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3113 inputs_[index] = input; 3114 } 3115 3116 uint32_t number_of_arguments_; 3117 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3118 const Primitive::Type return_type_; 3119 const uint32_t dex_method_index_; 3120 const InvokeType original_invoke_type_; 3121 Intrinsics intrinsic_; 3122 3123 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3124 uint32_t intrinsic_optimizations_; 3125 3126 private: 3127 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3128}; 3129 3130class HInvokeUnresolved : public HInvoke { 3131 public: 3132 HInvokeUnresolved(ArenaAllocator* arena, 3133 uint32_t number_of_arguments, 3134 Primitive::Type return_type, 3135 uint32_t dex_pc, 3136 uint32_t dex_method_index, 3137 InvokeType invoke_type) 3138 : HInvoke(arena, 3139 number_of_arguments, 3140 0u /* number_of_other_inputs */, 3141 return_type, 3142 dex_pc, 3143 dex_method_index, 3144 invoke_type) { 3145 } 3146 3147 DECLARE_INSTRUCTION(InvokeUnresolved); 3148 3149 private: 3150 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3151}; 3152 3153class HInvokeStaticOrDirect : public HInvoke { 3154 public: 3155 // Requirements of this method call regarding the class 3156 // initialization (clinit) check of its declaring class. 3157 enum class ClinitCheckRequirement { 3158 kNone, // Class already initialized. 3159 kExplicit, // Static call having explicit clinit check as last input. 3160 kImplicit, // Static call implicitly requiring a clinit check. 3161 }; 3162 3163 // Determines how to load the target ArtMethod*. 3164 enum class MethodLoadKind { 3165 // Use a String init ArtMethod* loaded from Thread entrypoints. 3166 kStringInit, 3167 3168 // Use the method's own ArtMethod* loaded by the register allocator. 3169 kRecursive, 3170 3171 // Use ArtMethod* at a known address, embed the direct address in the code. 3172 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3173 kDirectAddress, 3174 3175 // Use ArtMethod* at an address that will be known at link time, embed the direct 3176 // address in the code. If the image is relocatable, emit .patch_oat entry. 3177 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3178 // the image relocatable or not. 3179 kDirectAddressWithFixup, 3180 3181 // Load from resoved methods array in the dex cache using a PC-relative load. 3182 // Used when we need to use the dex cache, for example for invoke-static that 3183 // may cause class initialization (the entry may point to a resolution method), 3184 // and we know that we can access the dex cache arrays using a PC-relative load. 3185 kDexCachePcRelative, 3186 3187 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3188 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3189 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3190 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3191 kDexCacheViaMethod, 3192 }; 3193 3194 // Determines the location of the code pointer. 3195 enum class CodePtrLocation { 3196 // Recursive call, use local PC-relative call instruction. 3197 kCallSelf, 3198 3199 // Use PC-relative call instruction patched at link time. 3200 // Used for calls within an oat file, boot->boot or app->app. 3201 kCallPCRelative, 3202 3203 // Call to a known target address, embed the direct address in code. 3204 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3205 kCallDirect, 3206 3207 // Call to a target address that will be known at link time, embed the direct 3208 // address in code. If the image is relocatable, emit .patch_oat entry. 3209 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3210 // the image relocatable or not. 3211 kCallDirectWithFixup, 3212 3213 // Use code pointer from the ArtMethod*. 3214 // Used when we don't know the target code. This is also the last-resort-kind used when 3215 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3216 kCallArtMethod, 3217 }; 3218 3219 struct DispatchInfo { 3220 const MethodLoadKind method_load_kind; 3221 const CodePtrLocation code_ptr_location; 3222 // The method load data holds 3223 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3224 // Note that there are multiple string init methods, each having its own offset. 3225 // - the method address for kDirectAddress 3226 // - the dex cache arrays offset for kDexCachePcRel. 3227 const uint64_t method_load_data; 3228 const uint64_t direct_code_ptr; 3229 }; 3230 3231 HInvokeStaticOrDirect(ArenaAllocator* arena, 3232 uint32_t number_of_arguments, 3233 Primitive::Type return_type, 3234 uint32_t dex_pc, 3235 uint32_t method_index, 3236 MethodReference target_method, 3237 DispatchInfo dispatch_info, 3238 InvokeType original_invoke_type, 3239 InvokeType invoke_type, 3240 ClinitCheckRequirement clinit_check_requirement) 3241 : HInvoke(arena, 3242 number_of_arguments, 3243 // There is one extra argument for the HCurrentMethod node, and 3244 // potentially one other if the clinit check is explicit, and one other 3245 // if the method is a string factory. 3246 1u + (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) 3247 + (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3248 return_type, 3249 dex_pc, 3250 method_index, 3251 original_invoke_type), 3252 invoke_type_(invoke_type), 3253 clinit_check_requirement_(clinit_check_requirement), 3254 target_method_(target_method), 3255 dispatch_info_(dispatch_info) {} 3256 3257 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3258 UNUSED(obj); 3259 // We access the method via the dex cache so we can't do an implicit null check. 3260 // TODO: for intrinsics we can generate implicit null checks. 3261 return false; 3262 } 3263 3264 bool CanBeNull() const OVERRIDE { 3265 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3266 } 3267 3268 InvokeType GetInvokeType() const { return invoke_type_; } 3269 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3270 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3271 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3272 bool NeedsDexCache() const OVERRIDE; 3273 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3274 uint32_t GetCurrentMethodInputIndex() const { return GetNumberOfArguments(); } 3275 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3276 bool HasPcRelDexCache() const { return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; } 3277 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3278 MethodReference GetTargetMethod() const { return target_method_; } 3279 3280 int32_t GetStringInitOffset() const { 3281 DCHECK(IsStringInit()); 3282 return dispatch_info_.method_load_data; 3283 } 3284 3285 uint64_t GetMethodAddress() const { 3286 DCHECK(HasMethodAddress()); 3287 return dispatch_info_.method_load_data; 3288 } 3289 3290 uint32_t GetDexCacheArrayOffset() const { 3291 DCHECK(HasPcRelDexCache()); 3292 return dispatch_info_.method_load_data; 3293 } 3294 3295 uint64_t GetDirectCodePtr() const { 3296 DCHECK(HasDirectCodePtr()); 3297 return dispatch_info_.direct_code_ptr; 3298 } 3299 3300 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3301 3302 // Is this instruction a call to a static method? 3303 bool IsStatic() const { 3304 return GetInvokeType() == kStatic; 3305 } 3306 3307 // Remove the art::HLoadClass instruction set as last input by 3308 // art::PrepareForRegisterAllocation::VisitClinitCheck in lieu of 3309 // the initial art::HClinitCheck instruction (only relevant for 3310 // static calls with explicit clinit check). 3311 void RemoveLoadClassAsLastInput() { 3312 DCHECK(IsStaticWithExplicitClinitCheck()); 3313 size_t last_input_index = InputCount() - 1; 3314 HInstruction* last_input = InputAt(last_input_index); 3315 DCHECK(last_input != nullptr); 3316 DCHECK(last_input->IsLoadClass()) << last_input->DebugName(); 3317 RemoveAsUserOfInput(last_input_index); 3318 inputs_.pop_back(); 3319 clinit_check_requirement_ = ClinitCheckRequirement::kImplicit; 3320 DCHECK(IsStaticWithImplicitClinitCheck()); 3321 } 3322 3323 bool IsStringFactoryFor(HFakeString* str) const { 3324 if (!IsStringInit()) return false; 3325 // +1 for the current method. 3326 if (InputCount() == (number_of_arguments_ + 1)) return false; 3327 return InputAt(InputCount() - 1)->AsFakeString() == str; 3328 } 3329 3330 void RemoveFakeStringArgumentAsLastInput() { 3331 DCHECK(IsStringInit()); 3332 size_t last_input_index = InputCount() - 1; 3333 HInstruction* last_input = InputAt(last_input_index); 3334 DCHECK(last_input != nullptr); 3335 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3336 RemoveAsUserOfInput(last_input_index); 3337 inputs_.pop_back(); 3338 } 3339 3340 // Is this a call to a static method whose declaring class has an 3341 // explicit intialization check in the graph? 3342 bool IsStaticWithExplicitClinitCheck() const { 3343 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3344 } 3345 3346 // Is this a call to a static method whose declaring class has an 3347 // implicit intialization check requirement? 3348 bool IsStaticWithImplicitClinitCheck() const { 3349 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3350 } 3351 3352 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3353 3354 protected: 3355 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3356 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3357 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3358 HInstruction* input = input_record.GetInstruction(); 3359 // `input` is the last input of a static invoke marked as having 3360 // an explicit clinit check. It must either be: 3361 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3362 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3363 DCHECK(input != nullptr); 3364 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3365 } 3366 return input_record; 3367 } 3368 3369 private: 3370 const InvokeType invoke_type_; 3371 ClinitCheckRequirement clinit_check_requirement_; 3372 // The target method may refer to different dex file or method index than the original 3373 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3374 // in derived class to increase visibility. 3375 MethodReference target_method_; 3376 DispatchInfo dispatch_info_; 3377 3378 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3379}; 3380 3381class HInvokeVirtual : public HInvoke { 3382 public: 3383 HInvokeVirtual(ArenaAllocator* arena, 3384 uint32_t number_of_arguments, 3385 Primitive::Type return_type, 3386 uint32_t dex_pc, 3387 uint32_t dex_method_index, 3388 uint32_t vtable_index) 3389 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3390 vtable_index_(vtable_index) {} 3391 3392 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3393 // TODO: Add implicit null checks in intrinsics. 3394 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3395 } 3396 3397 uint32_t GetVTableIndex() const { return vtable_index_; } 3398 3399 DECLARE_INSTRUCTION(InvokeVirtual); 3400 3401 private: 3402 const uint32_t vtable_index_; 3403 3404 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3405}; 3406 3407class HInvokeInterface : public HInvoke { 3408 public: 3409 HInvokeInterface(ArenaAllocator* arena, 3410 uint32_t number_of_arguments, 3411 Primitive::Type return_type, 3412 uint32_t dex_pc, 3413 uint32_t dex_method_index, 3414 uint32_t imt_index) 3415 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3416 imt_index_(imt_index) {} 3417 3418 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3419 // TODO: Add implicit null checks in intrinsics. 3420 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3421 } 3422 3423 uint32_t GetImtIndex() const { return imt_index_; } 3424 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3425 3426 DECLARE_INSTRUCTION(InvokeInterface); 3427 3428 private: 3429 const uint32_t imt_index_; 3430 3431 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3432}; 3433 3434class HNewInstance : public HExpression<1> { 3435 public: 3436 HNewInstance(HCurrentMethod* current_method, 3437 uint32_t dex_pc, 3438 uint16_t type_index, 3439 const DexFile& dex_file, 3440 QuickEntrypointEnum entrypoint) 3441 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3442 type_index_(type_index), 3443 dex_file_(dex_file), 3444 entrypoint_(entrypoint) { 3445 SetRawInputAt(0, current_method); 3446 } 3447 3448 uint16_t GetTypeIndex() const { return type_index_; } 3449 const DexFile& GetDexFile() const { return dex_file_; } 3450 3451 // Calls runtime so needs an environment. 3452 bool NeedsEnvironment() const OVERRIDE { return true; } 3453 // It may throw when called on: 3454 // - interfaces 3455 // - abstract/innaccessible/unknown classes 3456 // TODO: optimize when possible. 3457 bool CanThrow() const OVERRIDE { return true; } 3458 3459 bool CanBeNull() const OVERRIDE { return false; } 3460 3461 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3462 3463 DECLARE_INSTRUCTION(NewInstance); 3464 3465 private: 3466 const uint16_t type_index_; 3467 const DexFile& dex_file_; 3468 const QuickEntrypointEnum entrypoint_; 3469 3470 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3471}; 3472 3473class HNeg : public HUnaryOperation { 3474 public: 3475 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3476 : HUnaryOperation(result_type, input, dex_pc) {} 3477 3478 template <typename T> T Compute(T x) const { return -x; } 3479 3480 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3481 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3482 } 3483 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3484 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3485 } 3486 3487 DECLARE_INSTRUCTION(Neg); 3488 3489 private: 3490 DISALLOW_COPY_AND_ASSIGN(HNeg); 3491}; 3492 3493class HNewArray : public HExpression<2> { 3494 public: 3495 HNewArray(HInstruction* length, 3496 HCurrentMethod* current_method, 3497 uint32_t dex_pc, 3498 uint16_t type_index, 3499 const DexFile& dex_file, 3500 QuickEntrypointEnum entrypoint) 3501 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3502 type_index_(type_index), 3503 dex_file_(dex_file), 3504 entrypoint_(entrypoint) { 3505 SetRawInputAt(0, length); 3506 SetRawInputAt(1, current_method); 3507 } 3508 3509 uint16_t GetTypeIndex() const { return type_index_; } 3510 const DexFile& GetDexFile() const { return dex_file_; } 3511 3512 // Calls runtime so needs an environment. 3513 bool NeedsEnvironment() const OVERRIDE { return true; } 3514 3515 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3516 bool CanThrow() const OVERRIDE { return true; } 3517 3518 bool CanBeNull() const OVERRIDE { return false; } 3519 3520 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3521 3522 DECLARE_INSTRUCTION(NewArray); 3523 3524 private: 3525 const uint16_t type_index_; 3526 const DexFile& dex_file_; 3527 const QuickEntrypointEnum entrypoint_; 3528 3529 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3530}; 3531 3532class HAdd : public HBinaryOperation { 3533 public: 3534 HAdd(Primitive::Type result_type, 3535 HInstruction* left, 3536 HInstruction* right, 3537 uint32_t dex_pc = kNoDexPc) 3538 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3539 3540 bool IsCommutative() const OVERRIDE { return true; } 3541 3542 template <typename T> T Compute(T x, T y) const { return x + y; } 3543 3544 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3545 return GetBlock()->GetGraph()->GetIntConstant( 3546 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3547 } 3548 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3549 return GetBlock()->GetGraph()->GetLongConstant( 3550 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3551 } 3552 3553 DECLARE_INSTRUCTION(Add); 3554 3555 private: 3556 DISALLOW_COPY_AND_ASSIGN(HAdd); 3557}; 3558 3559class HSub : public HBinaryOperation { 3560 public: 3561 HSub(Primitive::Type result_type, 3562 HInstruction* left, 3563 HInstruction* right, 3564 uint32_t dex_pc = kNoDexPc) 3565 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3566 3567 template <typename T> T Compute(T x, T y) const { return x - y; } 3568 3569 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3570 return GetBlock()->GetGraph()->GetIntConstant( 3571 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3572 } 3573 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3574 return GetBlock()->GetGraph()->GetLongConstant( 3575 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3576 } 3577 3578 DECLARE_INSTRUCTION(Sub); 3579 3580 private: 3581 DISALLOW_COPY_AND_ASSIGN(HSub); 3582}; 3583 3584class HMul : public HBinaryOperation { 3585 public: 3586 HMul(Primitive::Type result_type, 3587 HInstruction* left, 3588 HInstruction* right, 3589 uint32_t dex_pc = kNoDexPc) 3590 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3591 3592 bool IsCommutative() const OVERRIDE { return true; } 3593 3594 template <typename T> T Compute(T x, T y) const { return x * y; } 3595 3596 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3597 return GetBlock()->GetGraph()->GetIntConstant( 3598 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3599 } 3600 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3601 return GetBlock()->GetGraph()->GetLongConstant( 3602 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3603 } 3604 3605 DECLARE_INSTRUCTION(Mul); 3606 3607 private: 3608 DISALLOW_COPY_AND_ASSIGN(HMul); 3609}; 3610 3611class HDiv : public HBinaryOperation { 3612 public: 3613 HDiv(Primitive::Type result_type, 3614 HInstruction* left, 3615 HInstruction* right, 3616 uint32_t dex_pc) 3617 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3618 3619 template <typename T> 3620 T Compute(T x, T y) const { 3621 // Our graph structure ensures we never have 0 for `y` during 3622 // constant folding. 3623 DCHECK_NE(y, 0); 3624 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3625 return (y == -1) ? -x : x / y; 3626 } 3627 3628 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3629 return GetBlock()->GetGraph()->GetIntConstant( 3630 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3631 } 3632 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3633 return GetBlock()->GetGraph()->GetLongConstant( 3634 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3635 } 3636 3637 static SideEffects SideEffectsForArchRuntimeCalls() { 3638 // The generated code can use a runtime call. 3639 return SideEffects::CanTriggerGC(); 3640 } 3641 3642 DECLARE_INSTRUCTION(Div); 3643 3644 private: 3645 DISALLOW_COPY_AND_ASSIGN(HDiv); 3646}; 3647 3648class HRem : public HBinaryOperation { 3649 public: 3650 HRem(Primitive::Type result_type, 3651 HInstruction* left, 3652 HInstruction* right, 3653 uint32_t dex_pc) 3654 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3655 3656 template <typename T> 3657 T Compute(T x, T y) const { 3658 // Our graph structure ensures we never have 0 for `y` during 3659 // constant folding. 3660 DCHECK_NE(y, 0); 3661 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3662 return (y == -1) ? 0 : x % y; 3663 } 3664 3665 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3666 return GetBlock()->GetGraph()->GetIntConstant( 3667 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3668 } 3669 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3670 return GetBlock()->GetGraph()->GetLongConstant( 3671 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3672 } 3673 3674 3675 static SideEffects SideEffectsForArchRuntimeCalls() { 3676 return SideEffects::CanTriggerGC(); 3677 } 3678 3679 DECLARE_INSTRUCTION(Rem); 3680 3681 private: 3682 DISALLOW_COPY_AND_ASSIGN(HRem); 3683}; 3684 3685class HDivZeroCheck : public HExpression<1> { 3686 public: 3687 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 3688 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 3689 SetRawInputAt(0, value); 3690 } 3691 3692 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 3693 3694 bool CanBeMoved() const OVERRIDE { return true; } 3695 3696 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3697 UNUSED(other); 3698 return true; 3699 } 3700 3701 bool NeedsEnvironment() const OVERRIDE { return true; } 3702 bool CanThrow() const OVERRIDE { return true; } 3703 3704 DECLARE_INSTRUCTION(DivZeroCheck); 3705 3706 private: 3707 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 3708}; 3709 3710class HShl : public HBinaryOperation { 3711 public: 3712 HShl(Primitive::Type result_type, 3713 HInstruction* left, 3714 HInstruction* right, 3715 uint32_t dex_pc = kNoDexPc) 3716 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3717 3718 template <typename T, typename U, typename V> 3719 T Compute(T x, U y, V max_shift_value) const { 3720 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3721 "V is not the unsigned integer type corresponding to T"); 3722 return x << (y & max_shift_value); 3723 } 3724 3725 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3726 return GetBlock()->GetGraph()->GetIntConstant( 3727 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3728 } 3729 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3730 // case is handled as `x << static_cast<int>(y)`. 3731 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3732 return GetBlock()->GetGraph()->GetLongConstant( 3733 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3734 } 3735 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3736 return GetBlock()->GetGraph()->GetLongConstant( 3737 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3738 } 3739 3740 DECLARE_INSTRUCTION(Shl); 3741 3742 private: 3743 DISALLOW_COPY_AND_ASSIGN(HShl); 3744}; 3745 3746class HShr : public HBinaryOperation { 3747 public: 3748 HShr(Primitive::Type result_type, 3749 HInstruction* left, 3750 HInstruction* right, 3751 uint32_t dex_pc = kNoDexPc) 3752 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3753 3754 template <typename T, typename U, typename V> 3755 T Compute(T x, U y, V max_shift_value) const { 3756 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3757 "V is not the unsigned integer type corresponding to T"); 3758 return x >> (y & max_shift_value); 3759 } 3760 3761 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3762 return GetBlock()->GetGraph()->GetIntConstant( 3763 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3764 } 3765 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3766 // case is handled as `x >> static_cast<int>(y)`. 3767 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3768 return GetBlock()->GetGraph()->GetLongConstant( 3769 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3770 } 3771 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3772 return GetBlock()->GetGraph()->GetLongConstant( 3773 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3774 } 3775 3776 DECLARE_INSTRUCTION(Shr); 3777 3778 private: 3779 DISALLOW_COPY_AND_ASSIGN(HShr); 3780}; 3781 3782class HUShr : public HBinaryOperation { 3783 public: 3784 HUShr(Primitive::Type result_type, 3785 HInstruction* left, 3786 HInstruction* right, 3787 uint32_t dex_pc = kNoDexPc) 3788 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3789 3790 template <typename T, typename U, typename V> 3791 T Compute(T x, U y, V max_shift_value) const { 3792 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3793 "V is not the unsigned integer type corresponding to T"); 3794 V ux = static_cast<V>(x); 3795 return static_cast<T>(ux >> (y & max_shift_value)); 3796 } 3797 3798 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3799 return GetBlock()->GetGraph()->GetIntConstant( 3800 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3801 } 3802 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3803 // case is handled as `x >>> static_cast<int>(y)`. 3804 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3805 return GetBlock()->GetGraph()->GetLongConstant( 3806 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3807 } 3808 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3809 return GetBlock()->GetGraph()->GetLongConstant( 3810 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3811 } 3812 3813 DECLARE_INSTRUCTION(UShr); 3814 3815 private: 3816 DISALLOW_COPY_AND_ASSIGN(HUShr); 3817}; 3818 3819class HAnd : public HBinaryOperation { 3820 public: 3821 HAnd(Primitive::Type result_type, 3822 HInstruction* left, 3823 HInstruction* right, 3824 uint32_t dex_pc = kNoDexPc) 3825 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3826 3827 bool IsCommutative() const OVERRIDE { return true; } 3828 3829 template <typename T, typename U> 3830 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 3831 3832 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3833 return GetBlock()->GetGraph()->GetIntConstant( 3834 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3835 } 3836 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 3837 return GetBlock()->GetGraph()->GetLongConstant( 3838 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3839 } 3840 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3841 return GetBlock()->GetGraph()->GetLongConstant( 3842 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3843 } 3844 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3845 return GetBlock()->GetGraph()->GetLongConstant( 3846 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3847 } 3848 3849 DECLARE_INSTRUCTION(And); 3850 3851 private: 3852 DISALLOW_COPY_AND_ASSIGN(HAnd); 3853}; 3854 3855class HOr : public HBinaryOperation { 3856 public: 3857 HOr(Primitive::Type result_type, 3858 HInstruction* left, 3859 HInstruction* right, 3860 uint32_t dex_pc = kNoDexPc) 3861 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3862 3863 bool IsCommutative() const OVERRIDE { return true; } 3864 3865 template <typename T, typename U> 3866 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 3867 3868 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3869 return GetBlock()->GetGraph()->GetIntConstant( 3870 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3871 } 3872 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 3873 return GetBlock()->GetGraph()->GetLongConstant( 3874 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3875 } 3876 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3877 return GetBlock()->GetGraph()->GetLongConstant( 3878 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3879 } 3880 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3881 return GetBlock()->GetGraph()->GetLongConstant( 3882 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3883 } 3884 3885 DECLARE_INSTRUCTION(Or); 3886 3887 private: 3888 DISALLOW_COPY_AND_ASSIGN(HOr); 3889}; 3890 3891class HXor : public HBinaryOperation { 3892 public: 3893 HXor(Primitive::Type result_type, 3894 HInstruction* left, 3895 HInstruction* right, 3896 uint32_t dex_pc = kNoDexPc) 3897 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3898 3899 bool IsCommutative() const OVERRIDE { return true; } 3900 3901 template <typename T, typename U> 3902 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 3903 3904 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3905 return GetBlock()->GetGraph()->GetIntConstant( 3906 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3907 } 3908 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 3909 return GetBlock()->GetGraph()->GetLongConstant( 3910 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3911 } 3912 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3913 return GetBlock()->GetGraph()->GetLongConstant( 3914 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3915 } 3916 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3917 return GetBlock()->GetGraph()->GetLongConstant( 3918 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3919 } 3920 3921 DECLARE_INSTRUCTION(Xor); 3922 3923 private: 3924 DISALLOW_COPY_AND_ASSIGN(HXor); 3925}; 3926 3927// The value of a parameter in this method. Its location depends on 3928// the calling convention. 3929class HParameterValue : public HExpression<0> { 3930 public: 3931 HParameterValue(uint8_t index, 3932 Primitive::Type parameter_type, 3933 bool is_this = false) 3934 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 3935 index_(index), 3936 is_this_(is_this), 3937 can_be_null_(!is_this) {} 3938 3939 uint8_t GetIndex() const { return index_; } 3940 3941 bool CanBeNull() const OVERRIDE { return can_be_null_; } 3942 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 3943 3944 bool IsThis() const { return is_this_; } 3945 3946 DECLARE_INSTRUCTION(ParameterValue); 3947 3948 private: 3949 // The index of this parameter in the parameters list. Must be less 3950 // than HGraph::number_of_in_vregs_. 3951 const uint8_t index_; 3952 3953 // Whether or not the parameter value corresponds to 'this' argument. 3954 const bool is_this_; 3955 3956 bool can_be_null_; 3957 3958 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 3959}; 3960 3961class HNot : public HUnaryOperation { 3962 public: 3963 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3964 : HUnaryOperation(result_type, input, dex_pc) {} 3965 3966 bool CanBeMoved() const OVERRIDE { return true; } 3967 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3968 UNUSED(other); 3969 return true; 3970 } 3971 3972 template <typename T> T Compute(T x) const { return ~x; } 3973 3974 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3975 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3976 } 3977 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3978 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3979 } 3980 3981 DECLARE_INSTRUCTION(Not); 3982 3983 private: 3984 DISALLOW_COPY_AND_ASSIGN(HNot); 3985}; 3986 3987class HBooleanNot : public HUnaryOperation { 3988 public: 3989 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 3990 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 3991 3992 bool CanBeMoved() const OVERRIDE { return true; } 3993 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3994 UNUSED(other); 3995 return true; 3996 } 3997 3998 template <typename T> bool Compute(T x) const { 3999 DCHECK(IsUint<1>(x)); 4000 return !x; 4001 } 4002 4003 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4004 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4005 } 4006 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4007 LOG(FATAL) << DebugName() << " is not defined for long values"; 4008 UNREACHABLE(); 4009 } 4010 4011 DECLARE_INSTRUCTION(BooleanNot); 4012 4013 private: 4014 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4015}; 4016 4017class HTypeConversion : public HExpression<1> { 4018 public: 4019 // Instantiate a type conversion of `input` to `result_type`. 4020 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4021 : HExpression(result_type, 4022 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4023 dex_pc) { 4024 SetRawInputAt(0, input); 4025 DCHECK_NE(input->GetType(), result_type); 4026 } 4027 4028 HInstruction* GetInput() const { return InputAt(0); } 4029 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4030 Primitive::Type GetResultType() const { return GetType(); } 4031 4032 // Required by the x86 and ARM code generators when producing calls 4033 // to the runtime. 4034 4035 bool CanBeMoved() const OVERRIDE { return true; } 4036 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4037 4038 // Try to statically evaluate the conversion and return a HConstant 4039 // containing the result. If the input cannot be converted, return nullptr. 4040 HConstant* TryStaticEvaluation() const; 4041 4042 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4043 Primitive::Type result_type) { 4044 // Some architectures may not require the 'GC' side effects, but at this point 4045 // in the compilation process we do not know what architecture we will 4046 // generate code for, so we must be conservative. 4047 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4048 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4049 return SideEffects::CanTriggerGC(); 4050 } 4051 return SideEffects::None(); 4052 } 4053 4054 DECLARE_INSTRUCTION(TypeConversion); 4055 4056 private: 4057 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4058}; 4059 4060static constexpr uint32_t kNoRegNumber = -1; 4061 4062class HPhi : public HInstruction { 4063 public: 4064 HPhi(ArenaAllocator* arena, 4065 uint32_t reg_number, 4066 size_t number_of_inputs, 4067 Primitive::Type type, 4068 uint32_t dex_pc = kNoDexPc) 4069 : HInstruction(SideEffects::None(), dex_pc), 4070 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4071 reg_number_(reg_number), 4072 type_(type), 4073 is_live_(false), 4074 can_be_null_(true) { 4075 } 4076 4077 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4078 static Primitive::Type ToPhiType(Primitive::Type type) { 4079 switch (type) { 4080 case Primitive::kPrimBoolean: 4081 case Primitive::kPrimByte: 4082 case Primitive::kPrimShort: 4083 case Primitive::kPrimChar: 4084 return Primitive::kPrimInt; 4085 default: 4086 return type; 4087 } 4088 } 4089 4090 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4091 4092 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4093 4094 void AddInput(HInstruction* input); 4095 void RemoveInputAt(size_t index); 4096 4097 Primitive::Type GetType() const OVERRIDE { return type_; } 4098 void SetType(Primitive::Type type) { type_ = type; } 4099 4100 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4101 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4102 4103 uint32_t GetRegNumber() const { return reg_number_; } 4104 4105 void SetDead() { is_live_ = false; } 4106 void SetLive() { is_live_ = true; } 4107 bool IsDead() const { return !is_live_; } 4108 bool IsLive() const { return is_live_; } 4109 4110 bool IsVRegEquivalentOf(HInstruction* other) const { 4111 return other != nullptr 4112 && other->IsPhi() 4113 && other->AsPhi()->GetBlock() == GetBlock() 4114 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4115 } 4116 4117 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4118 // An equivalent phi is a phi having the same dex register and type. 4119 // It assumes that phis with the same dex register are adjacent. 4120 HPhi* GetNextEquivalentPhiWithSameType() { 4121 HInstruction* next = GetNext(); 4122 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4123 if (next->GetType() == GetType()) { 4124 return next->AsPhi(); 4125 } 4126 next = next->GetNext(); 4127 } 4128 return nullptr; 4129 } 4130 4131 DECLARE_INSTRUCTION(Phi); 4132 4133 protected: 4134 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4135 return inputs_[index]; 4136 } 4137 4138 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4139 inputs_[index] = input; 4140 } 4141 4142 private: 4143 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4144 const uint32_t reg_number_; 4145 Primitive::Type type_; 4146 bool is_live_; 4147 bool can_be_null_; 4148 4149 DISALLOW_COPY_AND_ASSIGN(HPhi); 4150}; 4151 4152class HNullCheck : public HExpression<1> { 4153 public: 4154 HNullCheck(HInstruction* value, uint32_t dex_pc) 4155 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4156 SetRawInputAt(0, value); 4157 } 4158 4159 bool CanBeMoved() const OVERRIDE { return true; } 4160 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4161 UNUSED(other); 4162 return true; 4163 } 4164 4165 bool NeedsEnvironment() const OVERRIDE { return true; } 4166 4167 bool CanThrow() const OVERRIDE { return true; } 4168 4169 bool CanBeNull() const OVERRIDE { return false; } 4170 4171 4172 DECLARE_INSTRUCTION(NullCheck); 4173 4174 private: 4175 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4176}; 4177 4178class FieldInfo : public ValueObject { 4179 public: 4180 FieldInfo(MemberOffset field_offset, 4181 Primitive::Type field_type, 4182 bool is_volatile, 4183 uint32_t index, 4184 const DexFile& dex_file, 4185 Handle<mirror::DexCache> dex_cache) 4186 : field_offset_(field_offset), 4187 field_type_(field_type), 4188 is_volatile_(is_volatile), 4189 index_(index), 4190 dex_file_(dex_file), 4191 dex_cache_(dex_cache) {} 4192 4193 MemberOffset GetFieldOffset() const { return field_offset_; } 4194 Primitive::Type GetFieldType() const { return field_type_; } 4195 uint32_t GetFieldIndex() const { return index_; } 4196 const DexFile& GetDexFile() const { return dex_file_; } 4197 bool IsVolatile() const { return is_volatile_; } 4198 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4199 4200 private: 4201 const MemberOffset field_offset_; 4202 const Primitive::Type field_type_; 4203 const bool is_volatile_; 4204 const uint32_t index_; 4205 const DexFile& dex_file_; 4206 const Handle<mirror::DexCache> dex_cache_; 4207}; 4208 4209class HInstanceFieldGet : public HExpression<1> { 4210 public: 4211 HInstanceFieldGet(HInstruction* value, 4212 Primitive::Type field_type, 4213 MemberOffset field_offset, 4214 bool is_volatile, 4215 uint32_t field_idx, 4216 const DexFile& dex_file, 4217 Handle<mirror::DexCache> dex_cache, 4218 uint32_t dex_pc) 4219 : HExpression( 4220 field_type, 4221 SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), 4222 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache) { 4223 SetRawInputAt(0, value); 4224 } 4225 4226 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4227 4228 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4229 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4230 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4231 } 4232 4233 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4234 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4235 } 4236 4237 size_t ComputeHashCode() const OVERRIDE { 4238 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4239 } 4240 4241 const FieldInfo& GetFieldInfo() const { return field_info_; } 4242 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4243 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4244 bool IsVolatile() const { return field_info_.IsVolatile(); } 4245 4246 DECLARE_INSTRUCTION(InstanceFieldGet); 4247 4248 private: 4249 const FieldInfo field_info_; 4250 4251 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4252}; 4253 4254class HInstanceFieldSet : public HTemplateInstruction<2> { 4255 public: 4256 HInstanceFieldSet(HInstruction* object, 4257 HInstruction* value, 4258 Primitive::Type field_type, 4259 MemberOffset field_offset, 4260 bool is_volatile, 4261 uint32_t field_idx, 4262 const DexFile& dex_file, 4263 Handle<mirror::DexCache> dex_cache, 4264 uint32_t dex_pc) 4265 : HTemplateInstruction( 4266 SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), 4267 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache), 4268 value_can_be_null_(true) { 4269 SetRawInputAt(0, object); 4270 SetRawInputAt(1, value); 4271 } 4272 4273 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4274 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4275 } 4276 4277 const FieldInfo& GetFieldInfo() const { return field_info_; } 4278 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4279 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4280 bool IsVolatile() const { return field_info_.IsVolatile(); } 4281 HInstruction* GetValue() const { return InputAt(1); } 4282 bool GetValueCanBeNull() const { return value_can_be_null_; } 4283 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4284 4285 DECLARE_INSTRUCTION(InstanceFieldSet); 4286 4287 private: 4288 const FieldInfo field_info_; 4289 bool value_can_be_null_; 4290 4291 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4292}; 4293 4294class HArrayGet : public HExpression<2> { 4295 public: 4296 HArrayGet(HInstruction* array, 4297 HInstruction* index, 4298 Primitive::Type type, 4299 uint32_t dex_pc) 4300 : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) { 4301 SetRawInputAt(0, array); 4302 SetRawInputAt(1, index); 4303 } 4304 4305 bool CanBeMoved() const OVERRIDE { return true; } 4306 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4307 UNUSED(other); 4308 return true; 4309 } 4310 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4311 UNUSED(obj); 4312 // TODO: We can be smarter here. 4313 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4314 // which generates the implicit null check. There are cases when these can be removed 4315 // to produce better code. If we ever add optimizations to do so we should allow an 4316 // implicit check here (as long as the address falls in the first page). 4317 return false; 4318 } 4319 4320 void SetType(Primitive::Type type) { type_ = type; } 4321 4322 HInstruction* GetArray() const { return InputAt(0); } 4323 HInstruction* GetIndex() const { return InputAt(1); } 4324 4325 DECLARE_INSTRUCTION(ArrayGet); 4326 4327 private: 4328 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4329}; 4330 4331class HArraySet : public HTemplateInstruction<3> { 4332 public: 4333 HArraySet(HInstruction* array, 4334 HInstruction* index, 4335 HInstruction* value, 4336 Primitive::Type expected_component_type, 4337 uint32_t dex_pc) 4338 : HTemplateInstruction( 4339 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4340 SideEffectsForArchRuntimeCalls(value->GetType())), dex_pc), 4341 expected_component_type_(expected_component_type), 4342 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4343 value_can_be_null_(true), 4344 static_type_of_array_is_object_array_(false) { 4345 SetRawInputAt(0, array); 4346 SetRawInputAt(1, index); 4347 SetRawInputAt(2, value); 4348 } 4349 4350 bool NeedsEnvironment() const OVERRIDE { 4351 // We currently always call a runtime method to catch array store 4352 // exceptions. 4353 return needs_type_check_; 4354 } 4355 4356 // Can throw ArrayStoreException. 4357 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4358 4359 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4360 UNUSED(obj); 4361 // TODO: Same as for ArrayGet. 4362 return false; 4363 } 4364 4365 void ClearNeedsTypeCheck() { 4366 needs_type_check_ = false; 4367 } 4368 4369 void ClearValueCanBeNull() { 4370 value_can_be_null_ = false; 4371 } 4372 4373 void SetStaticTypeOfArrayIsObjectArray() { 4374 static_type_of_array_is_object_array_ = true; 4375 } 4376 4377 bool GetValueCanBeNull() const { return value_can_be_null_; } 4378 bool NeedsTypeCheck() const { return needs_type_check_; } 4379 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4380 4381 HInstruction* GetArray() const { return InputAt(0); } 4382 HInstruction* GetIndex() const { return InputAt(1); } 4383 HInstruction* GetValue() const { return InputAt(2); } 4384 4385 Primitive::Type GetComponentType() const { 4386 // The Dex format does not type floating point index operations. Since the 4387 // `expected_component_type_` is set during building and can therefore not 4388 // be correct, we also check what is the value type. If it is a floating 4389 // point type, we must use that type. 4390 Primitive::Type value_type = GetValue()->GetType(); 4391 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4392 ? value_type 4393 : expected_component_type_; 4394 } 4395 4396 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4397 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4398 } 4399 4400 DECLARE_INSTRUCTION(ArraySet); 4401 4402 private: 4403 const Primitive::Type expected_component_type_; 4404 bool needs_type_check_; 4405 bool value_can_be_null_; 4406 // Cached information for the reference_type_info_ so that codegen 4407 // does not need to inspect the static type. 4408 bool static_type_of_array_is_object_array_; 4409 4410 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4411}; 4412 4413class HArrayLength : public HExpression<1> { 4414 public: 4415 HArrayLength(HInstruction* array, uint32_t dex_pc) 4416 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4417 // Note that arrays do not change length, so the instruction does not 4418 // depend on any write. 4419 SetRawInputAt(0, array); 4420 } 4421 4422 bool CanBeMoved() const OVERRIDE { return true; } 4423 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4424 UNUSED(other); 4425 return true; 4426 } 4427 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4428 return obj == InputAt(0); 4429 } 4430 4431 DECLARE_INSTRUCTION(ArrayLength); 4432 4433 private: 4434 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4435}; 4436 4437class HBoundsCheck : public HExpression<2> { 4438 public: 4439 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4440 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4441 DCHECK(index->GetType() == Primitive::kPrimInt); 4442 SetRawInputAt(0, index); 4443 SetRawInputAt(1, length); 4444 } 4445 4446 bool CanBeMoved() const OVERRIDE { return true; } 4447 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4448 UNUSED(other); 4449 return true; 4450 } 4451 4452 bool NeedsEnvironment() const OVERRIDE { return true; } 4453 4454 bool CanThrow() const OVERRIDE { return true; } 4455 4456 4457 DECLARE_INSTRUCTION(BoundsCheck); 4458 4459 private: 4460 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4461}; 4462 4463/** 4464 * Some DEX instructions are folded into multiple HInstructions that need 4465 * to stay live until the last HInstruction. This class 4466 * is used as a marker for the baseline compiler to ensure its preceding 4467 * HInstruction stays live. `index` represents the stack location index of the 4468 * instruction (the actual offset is computed as index * vreg_size). 4469 */ 4470class HTemporary : public HTemplateInstruction<0> { 4471 public: 4472 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4473 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4474 4475 size_t GetIndex() const { return index_; } 4476 4477 Primitive::Type GetType() const OVERRIDE { 4478 // The previous instruction is the one that will be stored in the temporary location. 4479 DCHECK(GetPrevious() != nullptr); 4480 return GetPrevious()->GetType(); 4481 } 4482 4483 DECLARE_INSTRUCTION(Temporary); 4484 4485 private: 4486 const size_t index_; 4487 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4488}; 4489 4490class HSuspendCheck : public HTemplateInstruction<0> { 4491 public: 4492 explicit HSuspendCheck(uint32_t dex_pc) 4493 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4494 4495 bool NeedsEnvironment() const OVERRIDE { 4496 return true; 4497 } 4498 4499 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4500 SlowPathCode* GetSlowPath() const { return slow_path_; } 4501 4502 DECLARE_INSTRUCTION(SuspendCheck); 4503 4504 private: 4505 // Only used for code generation, in order to share the same slow path between back edges 4506 // of a same loop. 4507 SlowPathCode* slow_path_; 4508 4509 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4510}; 4511 4512/** 4513 * Instruction to load a Class object. 4514 */ 4515class HLoadClass : public HExpression<1> { 4516 public: 4517 HLoadClass(HCurrentMethod* current_method, 4518 uint16_t type_index, 4519 const DexFile& dex_file, 4520 bool is_referrers_class, 4521 uint32_t dex_pc, 4522 bool needs_access_check) 4523 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4524 type_index_(type_index), 4525 dex_file_(dex_file), 4526 is_referrers_class_(is_referrers_class), 4527 generate_clinit_check_(false), 4528 needs_access_check_(needs_access_check), 4529 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4530 // Referrers class should not need access check. We never inline unverified 4531 // methods so we can't possibly end up in this situation. 4532 DCHECK(!is_referrers_class_ || !needs_access_check_); 4533 SetRawInputAt(0, current_method); 4534 } 4535 4536 bool CanBeMoved() const OVERRIDE { return true; } 4537 4538 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4539 // Note that we don't need to test for generate_clinit_check_. 4540 // Whether or not we need to generate the clinit check is processed in 4541 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4542 return other->AsLoadClass()->type_index_ == type_index_ && 4543 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4544 } 4545 4546 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4547 4548 uint16_t GetTypeIndex() const { return type_index_; } 4549 bool IsReferrersClass() const { return is_referrers_class_; } 4550 bool CanBeNull() const OVERRIDE { return false; } 4551 4552 bool NeedsEnvironment() const OVERRIDE { 4553 // Will call runtime and load the class if the class is not loaded yet. 4554 // TODO: finer grain decision. 4555 return !is_referrers_class_; 4556 } 4557 4558 bool MustGenerateClinitCheck() const { 4559 return generate_clinit_check_; 4560 } 4561 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4562 generate_clinit_check_ = generate_clinit_check; 4563 } 4564 4565 bool CanCallRuntime() const { 4566 return MustGenerateClinitCheck() || !is_referrers_class_ || needs_access_check_; 4567 } 4568 4569 bool NeedsAccessCheck() const { 4570 return needs_access_check_; 4571 } 4572 4573 bool CanThrow() const OVERRIDE { 4574 // May call runtime and and therefore can throw. 4575 // TODO: finer grain decision. 4576 return CanCallRuntime(); 4577 } 4578 4579 ReferenceTypeInfo GetLoadedClassRTI() { 4580 return loaded_class_rti_; 4581 } 4582 4583 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4584 // Make sure we only set exact types (the loaded class should never be merged). 4585 DCHECK(rti.IsExact()); 4586 loaded_class_rti_ = rti; 4587 } 4588 4589 const DexFile& GetDexFile() { return dex_file_; } 4590 4591 bool NeedsDexCache() const OVERRIDE { return !is_referrers_class_; } 4592 4593 static SideEffects SideEffectsForArchRuntimeCalls() { 4594 return SideEffects::CanTriggerGC(); 4595 } 4596 4597 DECLARE_INSTRUCTION(LoadClass); 4598 4599 private: 4600 const uint16_t type_index_; 4601 const DexFile& dex_file_; 4602 const bool is_referrers_class_; 4603 // Whether this instruction must generate the initialization check. 4604 // Used for code generation. 4605 bool generate_clinit_check_; 4606 bool needs_access_check_; 4607 4608 ReferenceTypeInfo loaded_class_rti_; 4609 4610 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 4611}; 4612 4613class HLoadString : public HExpression<1> { 4614 public: 4615 HLoadString(HCurrentMethod* current_method, uint32_t string_index, uint32_t dex_pc) 4616 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4617 string_index_(string_index) { 4618 SetRawInputAt(0, current_method); 4619 } 4620 4621 bool CanBeMoved() const OVERRIDE { return true; } 4622 4623 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4624 return other->AsLoadString()->string_index_ == string_index_; 4625 } 4626 4627 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 4628 4629 uint32_t GetStringIndex() const { return string_index_; } 4630 4631 // TODO: Can we deopt or debug when we resolve a string? 4632 bool NeedsEnvironment() const OVERRIDE { return false; } 4633 bool NeedsDexCache() const OVERRIDE { return true; } 4634 bool CanBeNull() const OVERRIDE { return false; } 4635 4636 static SideEffects SideEffectsForArchRuntimeCalls() { 4637 return SideEffects::CanTriggerGC(); 4638 } 4639 4640 DECLARE_INSTRUCTION(LoadString); 4641 4642 private: 4643 const uint32_t string_index_; 4644 4645 DISALLOW_COPY_AND_ASSIGN(HLoadString); 4646}; 4647 4648/** 4649 * Performs an initialization check on its Class object input. 4650 */ 4651class HClinitCheck : public HExpression<1> { 4652 public: 4653 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 4654 : HExpression( 4655 Primitive::kPrimNot, 4656 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 4657 dex_pc) { 4658 SetRawInputAt(0, constant); 4659 } 4660 4661 bool CanBeMoved() const OVERRIDE { return true; } 4662 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4663 UNUSED(other); 4664 return true; 4665 } 4666 4667 bool NeedsEnvironment() const OVERRIDE { 4668 // May call runtime to initialize the class. 4669 return true; 4670 } 4671 4672 4673 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 4674 4675 DECLARE_INSTRUCTION(ClinitCheck); 4676 4677 private: 4678 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 4679}; 4680 4681class HStaticFieldGet : public HExpression<1> { 4682 public: 4683 HStaticFieldGet(HInstruction* cls, 4684 Primitive::Type field_type, 4685 MemberOffset field_offset, 4686 bool is_volatile, 4687 uint32_t field_idx, 4688 const DexFile& dex_file, 4689 Handle<mirror::DexCache> dex_cache, 4690 uint32_t dex_pc) 4691 : HExpression( 4692 field_type, 4693 SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), 4694 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache) { 4695 SetRawInputAt(0, cls); 4696 } 4697 4698 4699 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4700 4701 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4702 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 4703 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4704 } 4705 4706 size_t ComputeHashCode() const OVERRIDE { 4707 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4708 } 4709 4710 const FieldInfo& GetFieldInfo() const { return field_info_; } 4711 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4712 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4713 bool IsVolatile() const { return field_info_.IsVolatile(); } 4714 4715 DECLARE_INSTRUCTION(StaticFieldGet); 4716 4717 private: 4718 const FieldInfo field_info_; 4719 4720 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 4721}; 4722 4723class HStaticFieldSet : public HTemplateInstruction<2> { 4724 public: 4725 HStaticFieldSet(HInstruction* cls, 4726 HInstruction* value, 4727 Primitive::Type field_type, 4728 MemberOffset field_offset, 4729 bool is_volatile, 4730 uint32_t field_idx, 4731 const DexFile& dex_file, 4732 Handle<mirror::DexCache> dex_cache, 4733 uint32_t dex_pc) 4734 : HTemplateInstruction( 4735 SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), 4736 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache), 4737 value_can_be_null_(true) { 4738 SetRawInputAt(0, cls); 4739 SetRawInputAt(1, value); 4740 } 4741 4742 const FieldInfo& GetFieldInfo() const { return field_info_; } 4743 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4744 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4745 bool IsVolatile() const { return field_info_.IsVolatile(); } 4746 4747 HInstruction* GetValue() const { return InputAt(1); } 4748 bool GetValueCanBeNull() const { return value_can_be_null_; } 4749 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4750 4751 DECLARE_INSTRUCTION(StaticFieldSet); 4752 4753 private: 4754 const FieldInfo field_info_; 4755 bool value_can_be_null_; 4756 4757 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 4758}; 4759 4760class HUnresolvedInstanceFieldGet : public HExpression<1> { 4761 public: 4762 HUnresolvedInstanceFieldGet(HInstruction* obj, 4763 Primitive::Type field_type, 4764 uint32_t field_index, 4765 uint32_t dex_pc) 4766 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 4767 field_index_(field_index) { 4768 SetRawInputAt(0, obj); 4769 } 4770 4771 bool NeedsEnvironment() const OVERRIDE { return true; } 4772 bool CanThrow() const OVERRIDE { return true; } 4773 4774 Primitive::Type GetFieldType() const { return GetType(); } 4775 uint32_t GetFieldIndex() const { return field_index_; } 4776 4777 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 4778 4779 private: 4780 const uint32_t field_index_; 4781 4782 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 4783}; 4784 4785class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 4786 public: 4787 HUnresolvedInstanceFieldSet(HInstruction* obj, 4788 HInstruction* value, 4789 Primitive::Type field_type, 4790 uint32_t field_index, 4791 uint32_t dex_pc) 4792 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 4793 field_type_(field_type), 4794 field_index_(field_index) { 4795 DCHECK_EQ(field_type, value->GetType()); 4796 SetRawInputAt(0, obj); 4797 SetRawInputAt(1, value); 4798 } 4799 4800 bool NeedsEnvironment() const OVERRIDE { return true; } 4801 bool CanThrow() const OVERRIDE { return true; } 4802 4803 Primitive::Type GetFieldType() const { return field_type_; } 4804 uint32_t GetFieldIndex() const { return field_index_; } 4805 4806 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 4807 4808 private: 4809 const Primitive::Type field_type_; 4810 const uint32_t field_index_; 4811 4812 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 4813}; 4814 4815class HUnresolvedStaticFieldGet : public HExpression<0> { 4816 public: 4817 HUnresolvedStaticFieldGet(Primitive::Type field_type, 4818 uint32_t field_index, 4819 uint32_t dex_pc) 4820 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 4821 field_index_(field_index) { 4822 } 4823 4824 bool NeedsEnvironment() const OVERRIDE { return true; } 4825 bool CanThrow() const OVERRIDE { return true; } 4826 4827 Primitive::Type GetFieldType() const { return GetType(); } 4828 uint32_t GetFieldIndex() const { return field_index_; } 4829 4830 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 4831 4832 private: 4833 const uint32_t field_index_; 4834 4835 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 4836}; 4837 4838class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 4839 public: 4840 HUnresolvedStaticFieldSet(HInstruction* value, 4841 Primitive::Type field_type, 4842 uint32_t field_index, 4843 uint32_t dex_pc) 4844 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 4845 field_type_(field_type), 4846 field_index_(field_index) { 4847 DCHECK_EQ(field_type, value->GetType()); 4848 SetRawInputAt(0, value); 4849 } 4850 4851 bool NeedsEnvironment() const OVERRIDE { return true; } 4852 bool CanThrow() const OVERRIDE { return true; } 4853 4854 Primitive::Type GetFieldType() const { return field_type_; } 4855 uint32_t GetFieldIndex() const { return field_index_; } 4856 4857 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 4858 4859 private: 4860 const Primitive::Type field_type_; 4861 const uint32_t field_index_; 4862 4863 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 4864}; 4865 4866// Implement the move-exception DEX instruction. 4867class HLoadException : public HExpression<0> { 4868 public: 4869 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 4870 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 4871 4872 bool CanBeNull() const OVERRIDE { return false; } 4873 4874 DECLARE_INSTRUCTION(LoadException); 4875 4876 private: 4877 DISALLOW_COPY_AND_ASSIGN(HLoadException); 4878}; 4879 4880// Implicit part of move-exception which clears thread-local exception storage. 4881// Must not be removed because the runtime expects the TLS to get cleared. 4882class HClearException : public HTemplateInstruction<0> { 4883 public: 4884 explicit HClearException(uint32_t dex_pc = kNoDexPc) 4885 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 4886 4887 DECLARE_INSTRUCTION(ClearException); 4888 4889 private: 4890 DISALLOW_COPY_AND_ASSIGN(HClearException); 4891}; 4892 4893class HThrow : public HTemplateInstruction<1> { 4894 public: 4895 HThrow(HInstruction* exception, uint32_t dex_pc) 4896 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 4897 SetRawInputAt(0, exception); 4898 } 4899 4900 bool IsControlFlow() const OVERRIDE { return true; } 4901 4902 bool NeedsEnvironment() const OVERRIDE { return true; } 4903 4904 bool CanThrow() const OVERRIDE { return true; } 4905 4906 4907 DECLARE_INSTRUCTION(Throw); 4908 4909 private: 4910 DISALLOW_COPY_AND_ASSIGN(HThrow); 4911}; 4912 4913/** 4914 * Implementation strategies for the code generator of a HInstanceOf 4915 * or `HCheckCast`. 4916 */ 4917enum class TypeCheckKind { 4918 kUnresolvedCheck, // Check against an unresolved type. 4919 kExactCheck, // Can do a single class compare. 4920 kClassHierarchyCheck, // Can just walk the super class chain. 4921 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 4922 kInterfaceCheck, // No optimization yet when checking against an interface. 4923 kArrayObjectCheck, // Can just check if the array is not primitive. 4924 kArrayCheck // No optimization yet when checking against a generic array. 4925}; 4926 4927class HInstanceOf : public HExpression<2> { 4928 public: 4929 HInstanceOf(HInstruction* object, 4930 HLoadClass* constant, 4931 TypeCheckKind check_kind, 4932 uint32_t dex_pc) 4933 : HExpression(Primitive::kPrimBoolean, 4934 SideEffectsForArchRuntimeCalls(check_kind), 4935 dex_pc), 4936 check_kind_(check_kind), 4937 must_do_null_check_(true) { 4938 SetRawInputAt(0, object); 4939 SetRawInputAt(1, constant); 4940 } 4941 4942 bool CanBeMoved() const OVERRIDE { return true; } 4943 4944 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4945 return true; 4946 } 4947 4948 bool NeedsEnvironment() const OVERRIDE { 4949 return false; 4950 } 4951 4952 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 4953 4954 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 4955 4956 // Used only in code generation. 4957 bool MustDoNullCheck() const { return must_do_null_check_; } 4958 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 4959 4960 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 4961 return (check_kind == TypeCheckKind::kExactCheck) 4962 ? SideEffects::None() 4963 // Mips currently does runtime calls for any other checks. 4964 : SideEffects::CanTriggerGC(); 4965 } 4966 4967 DECLARE_INSTRUCTION(InstanceOf); 4968 4969 private: 4970 const TypeCheckKind check_kind_; 4971 bool must_do_null_check_; 4972 4973 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 4974}; 4975 4976class HBoundType : public HExpression<1> { 4977 public: 4978 // Constructs an HBoundType with the given upper_bound. 4979 // Ensures that the upper_bound is valid. 4980 HBoundType(HInstruction* input, 4981 ReferenceTypeInfo upper_bound, 4982 bool upper_can_be_null, 4983 uint32_t dex_pc = kNoDexPc) 4984 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 4985 upper_bound_(upper_bound), 4986 upper_can_be_null_(upper_can_be_null), 4987 can_be_null_(upper_can_be_null) { 4988 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 4989 SetRawInputAt(0, input); 4990 SetReferenceTypeInfo(upper_bound_); 4991 } 4992 4993 // GetUpper* should only be used in reference type propagation. 4994 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 4995 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 4996 4997 void SetCanBeNull(bool can_be_null) { 4998 DCHECK(upper_can_be_null_ || !can_be_null); 4999 can_be_null_ = can_be_null; 5000 } 5001 5002 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5003 5004 DECLARE_INSTRUCTION(BoundType); 5005 5006 private: 5007 // Encodes the most upper class that this instruction can have. In other words 5008 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5009 // It is used to bound the type in cases like: 5010 // if (x instanceof ClassX) { 5011 // // uper_bound_ will be ClassX 5012 // } 5013 const ReferenceTypeInfo upper_bound_; 5014 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5015 // is false then can_be_null_ cannot be true). 5016 const bool upper_can_be_null_; 5017 bool can_be_null_; 5018 5019 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5020}; 5021 5022class HCheckCast : public HTemplateInstruction<2> { 5023 public: 5024 HCheckCast(HInstruction* object, 5025 HLoadClass* constant, 5026 TypeCheckKind check_kind, 5027 uint32_t dex_pc) 5028 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5029 check_kind_(check_kind), 5030 must_do_null_check_(true) { 5031 SetRawInputAt(0, object); 5032 SetRawInputAt(1, constant); 5033 } 5034 5035 bool CanBeMoved() const OVERRIDE { return true; } 5036 5037 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5038 return true; 5039 } 5040 5041 bool NeedsEnvironment() const OVERRIDE { 5042 // Instruction may throw a CheckCastError. 5043 return true; 5044 } 5045 5046 bool CanThrow() const OVERRIDE { return true; } 5047 5048 bool MustDoNullCheck() const { return must_do_null_check_; } 5049 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5050 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5051 5052 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5053 5054 DECLARE_INSTRUCTION(CheckCast); 5055 5056 private: 5057 const TypeCheckKind check_kind_; 5058 bool must_do_null_check_; 5059 5060 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5061}; 5062 5063class HMemoryBarrier : public HTemplateInstruction<0> { 5064 public: 5065 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5066 : HTemplateInstruction( 5067 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5068 barrier_kind_(barrier_kind) {} 5069 5070 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5071 5072 DECLARE_INSTRUCTION(MemoryBarrier); 5073 5074 private: 5075 const MemBarrierKind barrier_kind_; 5076 5077 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5078}; 5079 5080class HMonitorOperation : public HTemplateInstruction<1> { 5081 public: 5082 enum OperationKind { 5083 kEnter, 5084 kExit, 5085 }; 5086 5087 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5088 : HTemplateInstruction( 5089 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5090 kind_(kind) { 5091 SetRawInputAt(0, object); 5092 } 5093 5094 // Instruction may throw a Java exception, so we need an environment. 5095 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5096 5097 bool CanThrow() const OVERRIDE { 5098 // Verifier guarantees that monitor-exit cannot throw. 5099 // This is important because it allows the HGraphBuilder to remove 5100 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5101 return IsEnter(); 5102 } 5103 5104 5105 bool IsEnter() const { return kind_ == kEnter; } 5106 5107 DECLARE_INSTRUCTION(MonitorOperation); 5108 5109 private: 5110 const OperationKind kind_; 5111 5112 private: 5113 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5114}; 5115 5116/** 5117 * A HInstruction used as a marker for the replacement of new + <init> 5118 * of a String to a call to a StringFactory. Only baseline will see 5119 * the node at code generation, where it will be be treated as null. 5120 * When compiling non-baseline, `HFakeString` instructions are being removed 5121 * in the instruction simplifier. 5122 */ 5123class HFakeString : public HTemplateInstruction<0> { 5124 public: 5125 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5126 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5127 5128 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5129 5130 DECLARE_INSTRUCTION(FakeString); 5131 5132 private: 5133 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5134}; 5135 5136class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5137 public: 5138 MoveOperands(Location source, 5139 Location destination, 5140 Primitive::Type type, 5141 HInstruction* instruction) 5142 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5143 5144 Location GetSource() const { return source_; } 5145 Location GetDestination() const { return destination_; } 5146 5147 void SetSource(Location value) { source_ = value; } 5148 void SetDestination(Location value) { destination_ = value; } 5149 5150 // The parallel move resolver marks moves as "in-progress" by clearing the 5151 // destination (but not the source). 5152 Location MarkPending() { 5153 DCHECK(!IsPending()); 5154 Location dest = destination_; 5155 destination_ = Location::NoLocation(); 5156 return dest; 5157 } 5158 5159 void ClearPending(Location dest) { 5160 DCHECK(IsPending()); 5161 destination_ = dest; 5162 } 5163 5164 bool IsPending() const { 5165 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5166 return destination_.IsInvalid() && !source_.IsInvalid(); 5167 } 5168 5169 // True if this blocks a move from the given location. 5170 bool Blocks(Location loc) const { 5171 return !IsEliminated() && source_.OverlapsWith(loc); 5172 } 5173 5174 // A move is redundant if it's been eliminated, if its source and 5175 // destination are the same, or if its destination is unneeded. 5176 bool IsRedundant() const { 5177 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5178 } 5179 5180 // We clear both operands to indicate move that's been eliminated. 5181 void Eliminate() { 5182 source_ = destination_ = Location::NoLocation(); 5183 } 5184 5185 bool IsEliminated() const { 5186 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5187 return source_.IsInvalid(); 5188 } 5189 5190 Primitive::Type GetType() const { return type_; } 5191 5192 bool Is64BitMove() const { 5193 return Primitive::Is64BitType(type_); 5194 } 5195 5196 HInstruction* GetInstruction() const { return instruction_; } 5197 5198 private: 5199 Location source_; 5200 Location destination_; 5201 // The type this move is for. 5202 Primitive::Type type_; 5203 // The instruction this move is assocatied with. Null when this move is 5204 // for moving an input in the expected locations of user (including a phi user). 5205 // This is only used in debug mode, to ensure we do not connect interval siblings 5206 // in the same parallel move. 5207 HInstruction* instruction_; 5208}; 5209 5210static constexpr size_t kDefaultNumberOfMoves = 4; 5211 5212class HParallelMove : public HTemplateInstruction<0> { 5213 public: 5214 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5215 : HTemplateInstruction(SideEffects::None(), dex_pc), 5216 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5217 moves_.reserve(kDefaultNumberOfMoves); 5218 } 5219 5220 void AddMove(Location source, 5221 Location destination, 5222 Primitive::Type type, 5223 HInstruction* instruction) { 5224 DCHECK(source.IsValid()); 5225 DCHECK(destination.IsValid()); 5226 if (kIsDebugBuild) { 5227 if (instruction != nullptr) { 5228 for (const MoveOperands& move : moves_) { 5229 if (move.GetInstruction() == instruction) { 5230 // Special case the situation where the move is for the spill slot 5231 // of the instruction. 5232 if ((GetPrevious() == instruction) 5233 || ((GetPrevious() == nullptr) 5234 && instruction->IsPhi() 5235 && instruction->GetBlock() == GetBlock())) { 5236 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5237 << "Doing parallel moves for the same instruction."; 5238 } else { 5239 DCHECK(false) << "Doing parallel moves for the same instruction."; 5240 } 5241 } 5242 } 5243 } 5244 for (const MoveOperands& move : moves_) { 5245 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5246 << "Overlapped destination for two moves in a parallel move: " 5247 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5248 << source << " ==> " << destination; 5249 } 5250 } 5251 moves_.emplace_back(source, destination, type, instruction); 5252 } 5253 5254 MoveOperands* MoveOperandsAt(size_t index) { 5255 return &moves_[index]; 5256 } 5257 5258 size_t NumMoves() const { return moves_.size(); } 5259 5260 DECLARE_INSTRUCTION(ParallelMove); 5261 5262 private: 5263 ArenaVector<MoveOperands> moves_; 5264 5265 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5266}; 5267 5268} // namespace art 5269 5270#ifdef ART_ENABLE_CODEGEN_x86 5271#include "nodes_x86.h" 5272#endif 5273 5274namespace art { 5275 5276class HGraphVisitor : public ValueObject { 5277 public: 5278 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5279 virtual ~HGraphVisitor() {} 5280 5281 virtual void VisitInstruction(HInstruction* instruction) { UNUSED(instruction); } 5282 virtual void VisitBasicBlock(HBasicBlock* block); 5283 5284 // Visit the graph following basic block insertion order. 5285 void VisitInsertionOrder(); 5286 5287 // Visit the graph following dominator tree reverse post-order. 5288 void VisitReversePostOrder(); 5289 5290 HGraph* GetGraph() const { return graph_; } 5291 5292 // Visit functions for instruction classes. 5293#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5294 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5295 5296 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5297 5298#undef DECLARE_VISIT_INSTRUCTION 5299 5300 private: 5301 HGraph* const graph_; 5302 5303 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5304}; 5305 5306class HGraphDelegateVisitor : public HGraphVisitor { 5307 public: 5308 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5309 virtual ~HGraphDelegateVisitor() {} 5310 5311 // Visit functions that delegate to to super class. 5312#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5313 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5314 5315 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5316 5317#undef DECLARE_VISIT_INSTRUCTION 5318 5319 private: 5320 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5321}; 5322 5323class HInsertionOrderIterator : public ValueObject { 5324 public: 5325 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5326 5327 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5328 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5329 void Advance() { ++index_; } 5330 5331 private: 5332 const HGraph& graph_; 5333 size_t index_; 5334 5335 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5336}; 5337 5338class HReversePostOrderIterator : public ValueObject { 5339 public: 5340 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5341 // Check that reverse post order of the graph has been built. 5342 DCHECK(!graph.GetReversePostOrder().empty()); 5343 } 5344 5345 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5346 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5347 void Advance() { ++index_; } 5348 5349 private: 5350 const HGraph& graph_; 5351 size_t index_; 5352 5353 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5354}; 5355 5356class HPostOrderIterator : public ValueObject { 5357 public: 5358 explicit HPostOrderIterator(const HGraph& graph) 5359 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5360 // Check that reverse post order of the graph has been built. 5361 DCHECK(!graph.GetReversePostOrder().empty()); 5362 } 5363 5364 bool Done() const { return index_ == 0; } 5365 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5366 void Advance() { --index_; } 5367 5368 private: 5369 const HGraph& graph_; 5370 size_t index_; 5371 5372 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5373}; 5374 5375class HLinearPostOrderIterator : public ValueObject { 5376 public: 5377 explicit HLinearPostOrderIterator(const HGraph& graph) 5378 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5379 5380 bool Done() const { return index_ == 0; } 5381 5382 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5383 5384 void Advance() { 5385 --index_; 5386 DCHECK_GE(index_, 0U); 5387 } 5388 5389 private: 5390 const ArenaVector<HBasicBlock*>& order_; 5391 size_t index_; 5392 5393 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5394}; 5395 5396class HLinearOrderIterator : public ValueObject { 5397 public: 5398 explicit HLinearOrderIterator(const HGraph& graph) 5399 : order_(graph.GetLinearOrder()), index_(0) {} 5400 5401 bool Done() const { return index_ == order_.size(); } 5402 HBasicBlock* Current() const { return order_[index_]; } 5403 void Advance() { ++index_; } 5404 5405 private: 5406 const ArenaVector<HBasicBlock*>& order_; 5407 size_t index_; 5408 5409 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5410}; 5411 5412// Iterator over the blocks that art part of the loop. Includes blocks part 5413// of an inner loop. The order in which the blocks are iterated is on their 5414// block id. 5415class HBlocksInLoopIterator : public ValueObject { 5416 public: 5417 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5418 : blocks_in_loop_(info.GetBlocks()), 5419 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5420 index_(0) { 5421 if (!blocks_in_loop_.IsBitSet(index_)) { 5422 Advance(); 5423 } 5424 } 5425 5426 bool Done() const { return index_ == blocks_.size(); } 5427 HBasicBlock* Current() const { return blocks_[index_]; } 5428 void Advance() { 5429 ++index_; 5430 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5431 if (blocks_in_loop_.IsBitSet(index_)) { 5432 break; 5433 } 5434 } 5435 } 5436 5437 private: 5438 const BitVector& blocks_in_loop_; 5439 const ArenaVector<HBasicBlock*>& blocks_; 5440 size_t index_; 5441 5442 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5443}; 5444 5445// Iterator over the blocks that art part of the loop. Includes blocks part 5446// of an inner loop. The order in which the blocks are iterated is reverse 5447// post order. 5448class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5449 public: 5450 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5451 : blocks_in_loop_(info.GetBlocks()), 5452 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5453 index_(0) { 5454 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5455 Advance(); 5456 } 5457 } 5458 5459 bool Done() const { return index_ == blocks_.size(); } 5460 HBasicBlock* Current() const { return blocks_[index_]; } 5461 void Advance() { 5462 ++index_; 5463 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5464 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5465 break; 5466 } 5467 } 5468 } 5469 5470 private: 5471 const BitVector& blocks_in_loop_; 5472 const ArenaVector<HBasicBlock*>& blocks_; 5473 size_t index_; 5474 5475 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5476}; 5477 5478inline int64_t Int64FromConstant(HConstant* constant) { 5479 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5480 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5481 : constant->AsLongConstant()->GetValue(); 5482} 5483 5484inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5485 // For the purposes of the compiler, the dex files must actually be the same object 5486 // if we want to safely treat them as the same. This is especially important for JIT 5487 // as custom class loaders can open the same underlying file (or memory) multiple 5488 // times and provide different class resolution but no two class loaders should ever 5489 // use the same DexFile object - doing so is an unsupported hack that can lead to 5490 // all sorts of weird failures. 5491 return &lhs == &rhs; 5492} 5493 5494} // namespace art 5495 5496#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5497