nodes.h revision 0f7dca4ca0be8d2f8776794d35edf8b51b5bc997
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38 39namespace art { 40 41class GraphChecker; 42class HBasicBlock; 43class HCurrentMethod; 44class HDoubleConstant; 45class HEnvironment; 46class HFakeString; 47class HFloatConstant; 48class HGraphBuilder; 49class HGraphVisitor; 50class HInstruction; 51class HIntConstant; 52class HInvoke; 53class HLongConstant; 54class HNullConstant; 55class HPhi; 56class HSuspendCheck; 57class HTryBoundary; 58class LiveInterval; 59class LocationSummary; 60class SlowPathCode; 61class SsaBuilder; 62 63namespace mirror { 64class DexCache; 65} // namespace mirror 66 67static const int kDefaultNumberOfBlocks = 8; 68static const int kDefaultNumberOfSuccessors = 2; 69static const int kDefaultNumberOfPredecessors = 2; 70static const int kDefaultNumberOfExceptionalPredecessors = 0; 71static const int kDefaultNumberOfDominatedBlocks = 1; 72static const int kDefaultNumberOfBackEdges = 1; 73 74static constexpr uint32_t kMaxIntShiftValue = 0x1f; 75static constexpr uint64_t kMaxLongShiftValue = 0x3f; 76 77static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 78static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 79 80static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 81 82static constexpr uint32_t kNoDexPc = -1; 83 84enum IfCondition { 85 // All types. 86 kCondEQ, // == 87 kCondNE, // != 88 // Signed integers and floating-point numbers. 89 kCondLT, // < 90 kCondLE, // <= 91 kCondGT, // > 92 kCondGE, // >= 93 // Unsigned integers. 94 kCondB, // < 95 kCondBE, // <= 96 kCondA, // > 97 kCondAE, // >= 98}; 99 100class HInstructionList : public ValueObject { 101 public: 102 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 103 104 void AddInstruction(HInstruction* instruction); 105 void RemoveInstruction(HInstruction* instruction); 106 107 // Insert `instruction` before/after an existing instruction `cursor`. 108 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 109 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 110 111 // Return true if this list contains `instruction`. 112 bool Contains(HInstruction* instruction) const; 113 114 // Return true if `instruction1` is found before `instruction2` in 115 // this instruction list and false otherwise. Abort if none 116 // of these instructions is found. 117 bool FoundBefore(const HInstruction* instruction1, 118 const HInstruction* instruction2) const; 119 120 bool IsEmpty() const { return first_instruction_ == nullptr; } 121 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 122 123 // Update the block of all instructions to be `block`. 124 void SetBlockOfInstructions(HBasicBlock* block) const; 125 126 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 127 void Add(const HInstructionList& instruction_list); 128 129 // Return the number of instructions in the list. This is an expensive operation. 130 size_t CountSize() const; 131 132 private: 133 HInstruction* first_instruction_; 134 HInstruction* last_instruction_; 135 136 friend class HBasicBlock; 137 friend class HGraph; 138 friend class HInstruction; 139 friend class HInstructionIterator; 140 friend class HBackwardInstructionIterator; 141 142 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 143}; 144 145// Control-flow graph of a method. Contains a list of basic blocks. 146class HGraph : public ArenaObject<kArenaAllocGraph> { 147 public: 148 HGraph(ArenaAllocator* arena, 149 const DexFile& dex_file, 150 uint32_t method_idx, 151 bool should_generate_constructor_barrier, 152 InstructionSet instruction_set, 153 InvokeType invoke_type = kInvalidInvokeType, 154 bool debuggable = false, 155 int start_instruction_id = 0) 156 : arena_(arena), 157 blocks_(arena->Adapter(kArenaAllocBlockList)), 158 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 159 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 160 entry_block_(nullptr), 161 exit_block_(nullptr), 162 maximum_number_of_out_vregs_(0), 163 number_of_vregs_(0), 164 number_of_in_vregs_(0), 165 temporaries_vreg_slots_(0), 166 has_bounds_checks_(false), 167 has_try_catch_(false), 168 debuggable_(debuggable), 169 current_instruction_id_(start_instruction_id), 170 dex_file_(dex_file), 171 method_idx_(method_idx), 172 invoke_type_(invoke_type), 173 in_ssa_form_(false), 174 should_generate_constructor_barrier_(should_generate_constructor_barrier), 175 instruction_set_(instruction_set), 176 cached_null_constant_(nullptr), 177 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 178 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 179 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 180 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 181 cached_current_method_(nullptr) { 182 blocks_.reserve(kDefaultNumberOfBlocks); 183 } 184 185 ArenaAllocator* GetArena() const { return arena_; } 186 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 187 188 bool IsInSsaForm() const { return in_ssa_form_; } 189 190 HBasicBlock* GetEntryBlock() const { return entry_block_; } 191 HBasicBlock* GetExitBlock() const { return exit_block_; } 192 bool HasExitBlock() const { return exit_block_ != nullptr; } 193 194 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 195 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 196 197 void AddBlock(HBasicBlock* block); 198 199 // Try building the SSA form of this graph, with dominance computation and loop 200 // recognition. Returns whether it was successful in doing all these steps. 201 bool TryBuildingSsa() { 202 BuildDominatorTree(); 203 // The SSA builder requires loops to all be natural. Specifically, the dead phi 204 // elimination phase checks the consistency of the graph when doing a post-order 205 // visit for eliminating dead phis: a dead phi can only have loop header phi 206 // users remaining when being visited. 207 if (!AnalyzeNaturalLoops()) return false; 208 // Precompute per-block try membership before entering the SSA builder, 209 // which needs the information to build catch block phis from values of 210 // locals at throwing instructions inside try blocks. 211 ComputeTryBlockInformation(); 212 TransformToSsa(); 213 in_ssa_form_ = true; 214 return true; 215 } 216 217 void ComputeDominanceInformation(); 218 void ClearDominanceInformation(); 219 220 void BuildDominatorTree(); 221 void TransformToSsa(); 222 void SimplifyCFG(); 223 void SimplifyCatchBlocks(); 224 225 // Analyze all natural loops in this graph. Returns false if one 226 // loop is not natural, that is the header does not dominate the 227 // back edge. 228 bool AnalyzeNaturalLoops() const; 229 230 // Iterate over blocks to compute try block membership. Needs reverse post 231 // order and loop information. 232 void ComputeTryBlockInformation(); 233 234 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 235 // Returns the instruction used to replace the invoke expression or null if the 236 // invoke is for a void method. 237 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 238 239 // Need to add a couple of blocks to test if the loop body is entered and 240 // put deoptimization instructions, etc. 241 void TransformLoopHeaderForBCE(HBasicBlock* header); 242 243 // Removes `block` from the graph. 244 void DeleteDeadBlock(HBasicBlock* block); 245 246 // Splits the edge between `block` and `successor` while preserving the 247 // indices in the predecessor/successor lists. If there are multiple edges 248 // between the blocks, the lowest indices are used. 249 // Returns the new block which is empty and has the same dex pc as `successor`. 250 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 251 252 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 253 void SimplifyLoop(HBasicBlock* header); 254 255 int32_t GetNextInstructionId() { 256 DCHECK_NE(current_instruction_id_, INT32_MAX); 257 return current_instruction_id_++; 258 } 259 260 int32_t GetCurrentInstructionId() const { 261 return current_instruction_id_; 262 } 263 264 void SetCurrentInstructionId(int32_t id) { 265 current_instruction_id_ = id; 266 } 267 268 uint16_t GetMaximumNumberOfOutVRegs() const { 269 return maximum_number_of_out_vregs_; 270 } 271 272 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 273 maximum_number_of_out_vregs_ = new_value; 274 } 275 276 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 277 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 278 } 279 280 void UpdateTemporariesVRegSlots(size_t slots) { 281 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 282 } 283 284 size_t GetTemporariesVRegSlots() const { 285 DCHECK(!in_ssa_form_); 286 return temporaries_vreg_slots_; 287 } 288 289 void SetNumberOfVRegs(uint16_t number_of_vregs) { 290 number_of_vregs_ = number_of_vregs; 291 } 292 293 uint16_t GetNumberOfVRegs() const { 294 return number_of_vregs_; 295 } 296 297 void SetNumberOfInVRegs(uint16_t value) { 298 number_of_in_vregs_ = value; 299 } 300 301 uint16_t GetNumberOfLocalVRegs() const { 302 DCHECK(!in_ssa_form_); 303 return number_of_vregs_ - number_of_in_vregs_; 304 } 305 306 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 307 return reverse_post_order_; 308 } 309 310 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 311 return linear_order_; 312 } 313 314 bool HasBoundsChecks() const { 315 return has_bounds_checks_; 316 } 317 318 void SetHasBoundsChecks(bool value) { 319 has_bounds_checks_ = value; 320 } 321 322 bool ShouldGenerateConstructorBarrier() const { 323 return should_generate_constructor_barrier_; 324 } 325 326 bool IsDebuggable() const { return debuggable_; } 327 328 // Returns a constant of the given type and value. If it does not exist 329 // already, it is created and inserted into the graph. This method is only for 330 // integral types. 331 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 332 333 // TODO: This is problematic for the consistency of reference type propagation 334 // because it can be created anytime after the pass and thus it will be left 335 // with an invalid type. 336 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 337 338 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 339 return CreateConstant(value, &cached_int_constants_, dex_pc); 340 } 341 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 342 return CreateConstant(value, &cached_long_constants_, dex_pc); 343 } 344 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 345 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 346 } 347 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 348 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 349 } 350 351 HCurrentMethod* GetCurrentMethod(); 352 353 const DexFile& GetDexFile() const { 354 return dex_file_; 355 } 356 357 uint32_t GetMethodIdx() const { 358 return method_idx_; 359 } 360 361 InvokeType GetInvokeType() const { 362 return invoke_type_; 363 } 364 365 InstructionSet GetInstructionSet() const { 366 return instruction_set_; 367 } 368 369 bool HasTryCatch() const { return has_try_catch_; } 370 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 371 372 private: 373 void FindBackEdges(ArenaBitVector* visited); 374 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 375 void RemoveDeadBlocks(const ArenaBitVector& visited); 376 377 template <class InstructionType, typename ValueType> 378 InstructionType* CreateConstant(ValueType value, 379 ArenaSafeMap<ValueType, InstructionType*>* cache, 380 uint32_t dex_pc = kNoDexPc) { 381 // Try to find an existing constant of the given value. 382 InstructionType* constant = nullptr; 383 auto cached_constant = cache->find(value); 384 if (cached_constant != cache->end()) { 385 constant = cached_constant->second; 386 } 387 388 // If not found or previously deleted, create and cache a new instruction. 389 // Don't bother reviving a previously deleted instruction, for simplicity. 390 if (constant == nullptr || constant->GetBlock() == nullptr) { 391 constant = new (arena_) InstructionType(value, dex_pc); 392 cache->Overwrite(value, constant); 393 InsertConstant(constant); 394 } 395 return constant; 396 } 397 398 void InsertConstant(HConstant* instruction); 399 400 // Cache a float constant into the graph. This method should only be 401 // called by the SsaBuilder when creating "equivalent" instructions. 402 void CacheFloatConstant(HFloatConstant* constant); 403 404 // See CacheFloatConstant comment. 405 void CacheDoubleConstant(HDoubleConstant* constant); 406 407 ArenaAllocator* const arena_; 408 409 // List of blocks in insertion order. 410 ArenaVector<HBasicBlock*> blocks_; 411 412 // List of blocks to perform a reverse post order tree traversal. 413 ArenaVector<HBasicBlock*> reverse_post_order_; 414 415 // List of blocks to perform a linear order tree traversal. 416 ArenaVector<HBasicBlock*> linear_order_; 417 418 HBasicBlock* entry_block_; 419 HBasicBlock* exit_block_; 420 421 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 422 uint16_t maximum_number_of_out_vregs_; 423 424 // The number of virtual registers in this method. Contains the parameters. 425 uint16_t number_of_vregs_; 426 427 // The number of virtual registers used by parameters of this method. 428 uint16_t number_of_in_vregs_; 429 430 // Number of vreg size slots that the temporaries use (used in baseline compiler). 431 size_t temporaries_vreg_slots_; 432 433 // Has bounds checks. We can totally skip BCE if it's false. 434 bool has_bounds_checks_; 435 436 // Flag whether there are any try/catch blocks in the graph. We will skip 437 // try/catch-related passes if false. 438 bool has_try_catch_; 439 440 // Indicates whether the graph should be compiled in a way that 441 // ensures full debuggability. If false, we can apply more 442 // aggressive optimizations that may limit the level of debugging. 443 const bool debuggable_; 444 445 // The current id to assign to a newly added instruction. See HInstruction.id_. 446 int32_t current_instruction_id_; 447 448 // The dex file from which the method is from. 449 const DexFile& dex_file_; 450 451 // The method index in the dex file. 452 const uint32_t method_idx_; 453 454 // If inlined, this encodes how the callee is being invoked. 455 const InvokeType invoke_type_; 456 457 // Whether the graph has been transformed to SSA form. Only used 458 // in debug mode to ensure we are not using properties only valid 459 // for non-SSA form (like the number of temporaries). 460 bool in_ssa_form_; 461 462 const bool should_generate_constructor_barrier_; 463 464 const InstructionSet instruction_set_; 465 466 // Cached constants. 467 HNullConstant* cached_null_constant_; 468 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 469 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 470 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 471 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 472 473 HCurrentMethod* cached_current_method_; 474 475 friend class SsaBuilder; // For caching constants. 476 friend class SsaLivenessAnalysis; // For the linear order. 477 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 478 DISALLOW_COPY_AND_ASSIGN(HGraph); 479}; 480 481class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 482 public: 483 HLoopInformation(HBasicBlock* header, HGraph* graph) 484 : header_(header), 485 suspend_check_(nullptr), 486 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 487 // Make bit vector growable, as the number of blocks may change. 488 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 489 back_edges_.reserve(kDefaultNumberOfBackEdges); 490 } 491 492 HBasicBlock* GetHeader() const { 493 return header_; 494 } 495 496 void SetHeader(HBasicBlock* block) { 497 header_ = block; 498 } 499 500 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 501 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 502 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 503 504 void AddBackEdge(HBasicBlock* back_edge) { 505 back_edges_.push_back(back_edge); 506 } 507 508 void RemoveBackEdge(HBasicBlock* back_edge) { 509 RemoveElement(back_edges_, back_edge); 510 } 511 512 bool IsBackEdge(const HBasicBlock& block) const { 513 return ContainsElement(back_edges_, &block); 514 } 515 516 size_t NumberOfBackEdges() const { 517 return back_edges_.size(); 518 } 519 520 HBasicBlock* GetPreHeader() const; 521 522 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 523 return back_edges_; 524 } 525 526 // Returns the lifetime position of the back edge that has the 527 // greatest lifetime position. 528 size_t GetLifetimeEnd() const; 529 530 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 531 ReplaceElement(back_edges_, existing, new_back_edge); 532 } 533 534 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 535 // that is the header dominates the back edge. 536 bool Populate(); 537 538 // Reanalyzes the loop by removing loop info from its blocks and re-running 539 // Populate(). If there are no back edges left, the loop info is completely 540 // removed as well as its SuspendCheck instruction. It must be run on nested 541 // inner loops first. 542 void Update(); 543 544 // Returns whether this loop information contains `block`. 545 // Note that this loop information *must* be populated before entering this function. 546 bool Contains(const HBasicBlock& block) const; 547 548 // Returns whether this loop information is an inner loop of `other`. 549 // Note that `other` *must* be populated before entering this function. 550 bool IsIn(const HLoopInformation& other) const; 551 552 // Returns true if instruction is not defined within this loop or any loop nested inside 553 // this loop. If must_dominate is set, only definitions that actually dominate the loop 554 // header can be invariant. Otherwise, any definition outside the loop, including 555 // definitions that appear after the loop, is invariant. 556 bool IsLoopInvariant(HInstruction* instruction, bool must_dominate) const; 557 558 const ArenaBitVector& GetBlocks() const { return blocks_; } 559 560 void Add(HBasicBlock* block); 561 void Remove(HBasicBlock* block); 562 563 private: 564 // Internal recursive implementation of `Populate`. 565 void PopulateRecursive(HBasicBlock* block); 566 567 HBasicBlock* header_; 568 HSuspendCheck* suspend_check_; 569 ArenaVector<HBasicBlock*> back_edges_; 570 ArenaBitVector blocks_; 571 572 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 573}; 574 575// Stores try/catch information for basic blocks. 576// Note that HGraph is constructed so that catch blocks cannot simultaneously 577// be try blocks. 578class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 579 public: 580 // Try block information constructor. 581 explicit TryCatchInformation(const HTryBoundary& try_entry) 582 : try_entry_(&try_entry), 583 catch_dex_file_(nullptr), 584 catch_type_index_(DexFile::kDexNoIndex16) { 585 DCHECK(try_entry_ != nullptr); 586 } 587 588 // Catch block information constructor. 589 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 590 : try_entry_(nullptr), 591 catch_dex_file_(&dex_file), 592 catch_type_index_(catch_type_index) {} 593 594 bool IsTryBlock() const { return try_entry_ != nullptr; } 595 596 const HTryBoundary& GetTryEntry() const { 597 DCHECK(IsTryBlock()); 598 return *try_entry_; 599 } 600 601 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 602 603 bool IsCatchAllTypeIndex() const { 604 DCHECK(IsCatchBlock()); 605 return catch_type_index_ == DexFile::kDexNoIndex16; 606 } 607 608 uint16_t GetCatchTypeIndex() const { 609 DCHECK(IsCatchBlock()); 610 return catch_type_index_; 611 } 612 613 const DexFile& GetCatchDexFile() const { 614 DCHECK(IsCatchBlock()); 615 return *catch_dex_file_; 616 } 617 618 private: 619 // One of possibly several TryBoundary instructions entering the block's try. 620 // Only set for try blocks. 621 const HTryBoundary* try_entry_; 622 623 // Exception type information. Only set for catch blocks. 624 const DexFile* catch_dex_file_; 625 const uint16_t catch_type_index_; 626}; 627 628static constexpr size_t kNoLifetime = -1; 629static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 630 631// A block in a method. Contains the list of instructions represented 632// as a double linked list. Each block knows its predecessors and 633// successors. 634 635class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 636 public: 637 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 638 : graph_(graph), 639 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 640 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 641 loop_information_(nullptr), 642 dominator_(nullptr), 643 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 644 block_id_(kInvalidBlockId), 645 dex_pc_(dex_pc), 646 lifetime_start_(kNoLifetime), 647 lifetime_end_(kNoLifetime), 648 try_catch_information_(nullptr) { 649 predecessors_.reserve(kDefaultNumberOfPredecessors); 650 successors_.reserve(kDefaultNumberOfSuccessors); 651 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 652 } 653 654 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 655 return predecessors_; 656 } 657 658 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 659 return successors_; 660 } 661 662 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 663 return ContainsElement(successors_, block, start_from); 664 } 665 666 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 667 return dominated_blocks_; 668 } 669 670 bool IsEntryBlock() const { 671 return graph_->GetEntryBlock() == this; 672 } 673 674 bool IsExitBlock() const { 675 return graph_->GetExitBlock() == this; 676 } 677 678 bool IsSingleGoto() const; 679 bool IsSingleTryBoundary() const; 680 681 // Returns true if this block emits nothing but a jump. 682 bool IsSingleJump() const { 683 HLoopInformation* loop_info = GetLoopInformation(); 684 return (IsSingleGoto() || IsSingleTryBoundary()) 685 // Back edges generate a suspend check. 686 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 687 } 688 689 void AddBackEdge(HBasicBlock* back_edge) { 690 if (loop_information_ == nullptr) { 691 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 692 } 693 DCHECK_EQ(loop_information_->GetHeader(), this); 694 loop_information_->AddBackEdge(back_edge); 695 } 696 697 HGraph* GetGraph() const { return graph_; } 698 void SetGraph(HGraph* graph) { graph_ = graph; } 699 700 uint32_t GetBlockId() const { return block_id_; } 701 void SetBlockId(int id) { block_id_ = id; } 702 uint32_t GetDexPc() const { return dex_pc_; } 703 704 HBasicBlock* GetDominator() const { return dominator_; } 705 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 706 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 707 708 void RemoveDominatedBlock(HBasicBlock* block) { 709 RemoveElement(dominated_blocks_, block); 710 } 711 712 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 713 ReplaceElement(dominated_blocks_, existing, new_block); 714 } 715 716 void ClearDominanceInformation(); 717 718 int NumberOfBackEdges() const { 719 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 720 } 721 722 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 723 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 724 const HInstructionList& GetInstructions() const { return instructions_; } 725 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 726 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 727 const HInstructionList& GetPhis() const { return phis_; } 728 729 void AddSuccessor(HBasicBlock* block) { 730 successors_.push_back(block); 731 block->predecessors_.push_back(this); 732 } 733 734 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 735 size_t successor_index = GetSuccessorIndexOf(existing); 736 existing->RemovePredecessor(this); 737 new_block->predecessors_.push_back(this); 738 successors_[successor_index] = new_block; 739 } 740 741 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 742 size_t predecessor_index = GetPredecessorIndexOf(existing); 743 existing->RemoveSuccessor(this); 744 new_block->successors_.push_back(this); 745 predecessors_[predecessor_index] = new_block; 746 } 747 748 // Insert `this` between `predecessor` and `successor. This method 749 // preserves the indicies, and will update the first edge found between 750 // `predecessor` and `successor`. 751 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 752 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 753 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 754 successor->predecessors_[predecessor_index] = this; 755 predecessor->successors_[successor_index] = this; 756 successors_.push_back(successor); 757 predecessors_.push_back(predecessor); 758 } 759 760 void RemovePredecessor(HBasicBlock* block) { 761 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 762 } 763 764 void RemoveSuccessor(HBasicBlock* block) { 765 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 766 } 767 768 void ClearAllPredecessors() { 769 predecessors_.clear(); 770 } 771 772 void AddPredecessor(HBasicBlock* block) { 773 predecessors_.push_back(block); 774 block->successors_.push_back(this); 775 } 776 777 void SwapPredecessors() { 778 DCHECK_EQ(predecessors_.size(), 2u); 779 std::swap(predecessors_[0], predecessors_[1]); 780 } 781 782 void SwapSuccessors() { 783 DCHECK_EQ(successors_.size(), 2u); 784 std::swap(successors_[0], successors_[1]); 785 } 786 787 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 788 return IndexOfElement(predecessors_, predecessor); 789 } 790 791 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 792 return IndexOfElement(successors_, successor); 793 } 794 795 HBasicBlock* GetSinglePredecessor() const { 796 DCHECK_EQ(GetPredecessors().size(), 1u); 797 return GetPredecessors()[0]; 798 } 799 800 HBasicBlock* GetSingleSuccessor() const { 801 DCHECK_EQ(GetSuccessors().size(), 1u); 802 return GetSuccessors()[0]; 803 } 804 805 // Returns whether the first occurrence of `predecessor` in the list of 806 // predecessors is at index `idx`. 807 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 808 DCHECK_EQ(GetPredecessors()[idx], predecessor); 809 return GetPredecessorIndexOf(predecessor) == idx; 810 } 811 812 // Returns the number of non-exceptional successors. SsaChecker ensures that 813 // these are stored at the beginning of the successor list. 814 size_t NumberOfNormalSuccessors() const { 815 return EndsWithTryBoundary() ? 1 : GetSuccessors().size(); 816 } 817 818 // Create a new block between this block and its predecessors. The new block 819 // is added to the graph, all predecessor edges are relinked to it and an edge 820 // is created to `this`. Returns the new empty block. Reverse post order or 821 // loop and try/catch information are not updated. 822 HBasicBlock* CreateImmediateDominator(); 823 824 // Split the block into two blocks just before `cursor`. Returns the newly 825 // created, latter block. Note that this method will add the block to the 826 // graph, create a Goto at the end of the former block and will create an edge 827 // between the blocks. It will not, however, update the reverse post order or 828 // loop and try/catch information. 829 HBasicBlock* SplitBefore(HInstruction* cursor); 830 831 // Split the block into two blocks just after `cursor`. Returns the newly 832 // created block. Note that this method just updates raw block information, 833 // like predecessors, successors, dominators, and instruction list. It does not 834 // update the graph, reverse post order, loop information, nor make sure the 835 // blocks are consistent (for example ending with a control flow instruction). 836 HBasicBlock* SplitAfter(HInstruction* cursor); 837 838 // Split catch block into two blocks after the original move-exception bytecode 839 // instruction, or at the beginning if not present. Returns the newly created, 840 // latter block, or nullptr if such block could not be created (must be dead 841 // in that case). Note that this method just updates raw block information, 842 // like predecessors, successors, dominators, and instruction list. It does not 843 // update the graph, reverse post order, loop information, nor make sure the 844 // blocks are consistent (for example ending with a control flow instruction). 845 HBasicBlock* SplitCatchBlockAfterMoveException(); 846 847 // Merge `other` at the end of `this`. Successors and dominated blocks of 848 // `other` are changed to be successors and dominated blocks of `this`. Note 849 // that this method does not update the graph, reverse post order, loop 850 // information, nor make sure the blocks are consistent (for example ending 851 // with a control flow instruction). 852 void MergeWithInlined(HBasicBlock* other); 853 854 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 855 // of `this` are moved to `other`. 856 // Note that this method does not update the graph, reverse post order, loop 857 // information, nor make sure the blocks are consistent (for example ending 858 // with a control flow instruction). 859 void ReplaceWith(HBasicBlock* other); 860 861 // Merge `other` at the end of `this`. This method updates loops, reverse post 862 // order, links to predecessors, successors, dominators and deletes the block 863 // from the graph. The two blocks must be successive, i.e. `this` the only 864 // predecessor of `other` and vice versa. 865 void MergeWith(HBasicBlock* other); 866 867 // Disconnects `this` from all its predecessors, successors and dominator, 868 // removes it from all loops it is included in and eventually from the graph. 869 // The block must not dominate any other block. Predecessors and successors 870 // are safely updated. 871 void DisconnectAndDelete(); 872 873 void AddInstruction(HInstruction* instruction); 874 // Insert `instruction` before/after an existing instruction `cursor`. 875 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 876 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 877 // Replace instruction `initial` with `replacement` within this block. 878 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 879 HInstruction* replacement); 880 void AddPhi(HPhi* phi); 881 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 882 // RemoveInstruction and RemovePhi delete a given instruction from the respective 883 // instruction list. With 'ensure_safety' set to true, it verifies that the 884 // instruction is not in use and removes it from the use lists of its inputs. 885 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 886 void RemovePhi(HPhi* phi, bool ensure_safety = true); 887 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 888 889 bool IsLoopHeader() const { 890 return IsInLoop() && (loop_information_->GetHeader() == this); 891 } 892 893 bool IsLoopPreHeaderFirstPredecessor() const { 894 DCHECK(IsLoopHeader()); 895 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 896 } 897 898 HLoopInformation* GetLoopInformation() const { 899 return loop_information_; 900 } 901 902 // Set the loop_information_ on this block. Overrides the current 903 // loop_information if it is an outer loop of the passed loop information. 904 // Note that this method is called while creating the loop information. 905 void SetInLoop(HLoopInformation* info) { 906 if (IsLoopHeader()) { 907 // Nothing to do. This just means `info` is an outer loop. 908 } else if (!IsInLoop()) { 909 loop_information_ = info; 910 } else if (loop_information_->Contains(*info->GetHeader())) { 911 // Block is currently part of an outer loop. Make it part of this inner loop. 912 // Note that a non loop header having a loop information means this loop information 913 // has already been populated 914 loop_information_ = info; 915 } else { 916 // Block is part of an inner loop. Do not update the loop information. 917 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 918 // at this point, because this method is being called while populating `info`. 919 } 920 } 921 922 // Raw update of the loop information. 923 void SetLoopInformation(HLoopInformation* info) { 924 loop_information_ = info; 925 } 926 927 bool IsInLoop() const { return loop_information_ != nullptr; } 928 929 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 930 931 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 932 try_catch_information_ = try_catch_information; 933 } 934 935 bool IsTryBlock() const { 936 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 937 } 938 939 bool IsCatchBlock() const { 940 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 941 } 942 943 // Returns the try entry that this block's successors should have. They will 944 // be in the same try, unless the block ends in a try boundary. In that case, 945 // the appropriate try entry will be returned. 946 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 947 948 bool HasThrowingInstructions() const; 949 950 // Returns whether this block dominates the blocked passed as parameter. 951 bool Dominates(HBasicBlock* block) const; 952 953 size_t GetLifetimeStart() const { return lifetime_start_; } 954 size_t GetLifetimeEnd() const { return lifetime_end_; } 955 956 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 957 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 958 959 bool EndsWithControlFlowInstruction() const; 960 bool EndsWithIf() const; 961 bool EndsWithTryBoundary() const; 962 bool HasSinglePhi() const; 963 964 private: 965 HGraph* graph_; 966 ArenaVector<HBasicBlock*> predecessors_; 967 ArenaVector<HBasicBlock*> successors_; 968 HInstructionList instructions_; 969 HInstructionList phis_; 970 HLoopInformation* loop_information_; 971 HBasicBlock* dominator_; 972 ArenaVector<HBasicBlock*> dominated_blocks_; 973 uint32_t block_id_; 974 // The dex program counter of the first instruction of this block. 975 const uint32_t dex_pc_; 976 size_t lifetime_start_; 977 size_t lifetime_end_; 978 TryCatchInformation* try_catch_information_; 979 980 friend class HGraph; 981 friend class HInstruction; 982 983 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 984}; 985 986// Iterates over the LoopInformation of all loops which contain 'block' 987// from the innermost to the outermost. 988class HLoopInformationOutwardIterator : public ValueObject { 989 public: 990 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 991 : current_(block.GetLoopInformation()) {} 992 993 bool Done() const { return current_ == nullptr; } 994 995 void Advance() { 996 DCHECK(!Done()); 997 current_ = current_->GetPreHeader()->GetLoopInformation(); 998 } 999 1000 HLoopInformation* Current() const { 1001 DCHECK(!Done()); 1002 return current_; 1003 } 1004 1005 private: 1006 HLoopInformation* current_; 1007 1008 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1009}; 1010 1011#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1012 M(Above, Condition) \ 1013 M(AboveOrEqual, Condition) \ 1014 M(Add, BinaryOperation) \ 1015 M(And, BinaryOperation) \ 1016 M(ArrayGet, Instruction) \ 1017 M(ArrayLength, Instruction) \ 1018 M(ArraySet, Instruction) \ 1019 M(Below, Condition) \ 1020 M(BelowOrEqual, Condition) \ 1021 M(BooleanNot, UnaryOperation) \ 1022 M(BoundsCheck, Instruction) \ 1023 M(BoundType, Instruction) \ 1024 M(CheckCast, Instruction) \ 1025 M(ClearException, Instruction) \ 1026 M(ClinitCheck, Instruction) \ 1027 M(Compare, BinaryOperation) \ 1028 M(Condition, BinaryOperation) \ 1029 M(CurrentMethod, Instruction) \ 1030 M(Deoptimize, Instruction) \ 1031 M(Div, BinaryOperation) \ 1032 M(DivZeroCheck, Instruction) \ 1033 M(DoubleConstant, Constant) \ 1034 M(Equal, Condition) \ 1035 M(Exit, Instruction) \ 1036 M(FakeString, Instruction) \ 1037 M(FloatConstant, Constant) \ 1038 M(Goto, Instruction) \ 1039 M(GreaterThan, Condition) \ 1040 M(GreaterThanOrEqual, Condition) \ 1041 M(If, Instruction) \ 1042 M(InstanceFieldGet, Instruction) \ 1043 M(InstanceFieldSet, Instruction) \ 1044 M(InstanceOf, Instruction) \ 1045 M(IntConstant, Constant) \ 1046 M(InvokeUnresolved, Invoke) \ 1047 M(InvokeInterface, Invoke) \ 1048 M(InvokeStaticOrDirect, Invoke) \ 1049 M(InvokeVirtual, Invoke) \ 1050 M(LessThan, Condition) \ 1051 M(LessThanOrEqual, Condition) \ 1052 M(LoadClass, Instruction) \ 1053 M(LoadException, Instruction) \ 1054 M(LoadLocal, Instruction) \ 1055 M(LoadString, Instruction) \ 1056 M(Local, Instruction) \ 1057 M(LongConstant, Constant) \ 1058 M(MemoryBarrier, Instruction) \ 1059 M(MonitorOperation, Instruction) \ 1060 M(Mul, BinaryOperation) \ 1061 M(Neg, UnaryOperation) \ 1062 M(NewArray, Instruction) \ 1063 M(NewInstance, Instruction) \ 1064 M(Not, UnaryOperation) \ 1065 M(NotEqual, Condition) \ 1066 M(NullConstant, Instruction) \ 1067 M(NullCheck, Instruction) \ 1068 M(Or, BinaryOperation) \ 1069 M(PackedSwitch, Instruction) \ 1070 M(ParallelMove, Instruction) \ 1071 M(ParameterValue, Instruction) \ 1072 M(Phi, Instruction) \ 1073 M(Rem, BinaryOperation) \ 1074 M(Return, Instruction) \ 1075 M(ReturnVoid, Instruction) \ 1076 M(Shl, BinaryOperation) \ 1077 M(Shr, BinaryOperation) \ 1078 M(StaticFieldGet, Instruction) \ 1079 M(StaticFieldSet, Instruction) \ 1080 M(UnresolvedInstanceFieldGet, Instruction) \ 1081 M(UnresolvedInstanceFieldSet, Instruction) \ 1082 M(UnresolvedStaticFieldGet, Instruction) \ 1083 M(UnresolvedStaticFieldSet, Instruction) \ 1084 M(StoreLocal, Instruction) \ 1085 M(Sub, BinaryOperation) \ 1086 M(SuspendCheck, Instruction) \ 1087 M(Temporary, Instruction) \ 1088 M(Throw, Instruction) \ 1089 M(TryBoundary, Instruction) \ 1090 M(TypeConversion, Instruction) \ 1091 M(UShr, BinaryOperation) \ 1092 M(Xor, BinaryOperation) \ 1093 1094#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1095 1096#ifndef ART_ENABLE_CODEGEN_arm64 1097#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1098#else 1099#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1100 M(Arm64IntermediateAddress, Instruction) 1101#endif 1102 1103#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1104 1105#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1106 1107#ifndef ART_ENABLE_CODEGEN_x86 1108#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1109#else 1110#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1111 M(X86ComputeBaseMethodAddress, Instruction) \ 1112 M(X86LoadFromConstantTable, Instruction) \ 1113 M(X86PackedSwitch, Instruction) 1114#endif 1115 1116#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1117 1118#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1119 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1120 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1121 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1122 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1123 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1124 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1125 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1126 1127#define FOR_EACH_INSTRUCTION(M) \ 1128 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1129 M(Constant, Instruction) \ 1130 M(UnaryOperation, Instruction) \ 1131 M(BinaryOperation, Instruction) \ 1132 M(Invoke, Instruction) 1133 1134#define FORWARD_DECLARATION(type, super) class H##type; 1135FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1136#undef FORWARD_DECLARATION 1137 1138#define DECLARE_INSTRUCTION(type) \ 1139 InstructionKind GetKind() const OVERRIDE { return k##type; } \ 1140 const char* DebugName() const OVERRIDE { return #type; } \ 1141 const H##type* As##type() const OVERRIDE { return this; } \ 1142 H##type* As##type() OVERRIDE { return this; } \ 1143 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1144 return other->Is##type(); \ 1145 } \ 1146 void Accept(HGraphVisitor* visitor) OVERRIDE 1147 1148template <typename T> class HUseList; 1149 1150template <typename T> 1151class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1152 public: 1153 HUseListNode* GetPrevious() const { return prev_; } 1154 HUseListNode* GetNext() const { return next_; } 1155 T GetUser() const { return user_; } 1156 size_t GetIndex() const { return index_; } 1157 void SetIndex(size_t index) { index_ = index; } 1158 1159 private: 1160 HUseListNode(T user, size_t index) 1161 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1162 1163 T const user_; 1164 size_t index_; 1165 HUseListNode<T>* prev_; 1166 HUseListNode<T>* next_; 1167 1168 friend class HUseList<T>; 1169 1170 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1171}; 1172 1173template <typename T> 1174class HUseList : public ValueObject { 1175 public: 1176 HUseList() : first_(nullptr) {} 1177 1178 void Clear() { 1179 first_ = nullptr; 1180 } 1181 1182 // Adds a new entry at the beginning of the use list and returns 1183 // the newly created node. 1184 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1185 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1186 if (IsEmpty()) { 1187 first_ = new_node; 1188 } else { 1189 first_->prev_ = new_node; 1190 new_node->next_ = first_; 1191 first_ = new_node; 1192 } 1193 return new_node; 1194 } 1195 1196 HUseListNode<T>* GetFirst() const { 1197 return first_; 1198 } 1199 1200 void Remove(HUseListNode<T>* node) { 1201 DCHECK(node != nullptr); 1202 DCHECK(Contains(node)); 1203 1204 if (node->prev_ != nullptr) { 1205 node->prev_->next_ = node->next_; 1206 } 1207 if (node->next_ != nullptr) { 1208 node->next_->prev_ = node->prev_; 1209 } 1210 if (node == first_) { 1211 first_ = node->next_; 1212 } 1213 } 1214 1215 bool Contains(const HUseListNode<T>* node) const { 1216 if (node == nullptr) { 1217 return false; 1218 } 1219 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1220 if (current == node) { 1221 return true; 1222 } 1223 } 1224 return false; 1225 } 1226 1227 bool IsEmpty() const { 1228 return first_ == nullptr; 1229 } 1230 1231 bool HasOnlyOneUse() const { 1232 return first_ != nullptr && first_->next_ == nullptr; 1233 } 1234 1235 size_t SizeSlow() const { 1236 size_t count = 0; 1237 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1238 ++count; 1239 } 1240 return count; 1241 } 1242 1243 private: 1244 HUseListNode<T>* first_; 1245}; 1246 1247template<typename T> 1248class HUseIterator : public ValueObject { 1249 public: 1250 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1251 1252 bool Done() const { return current_ == nullptr; } 1253 1254 void Advance() { 1255 DCHECK(!Done()); 1256 current_ = current_->GetNext(); 1257 } 1258 1259 HUseListNode<T>* Current() const { 1260 DCHECK(!Done()); 1261 return current_; 1262 } 1263 1264 private: 1265 HUseListNode<T>* current_; 1266 1267 friend class HValue; 1268}; 1269 1270// This class is used by HEnvironment and HInstruction classes to record the 1271// instructions they use and pointers to the corresponding HUseListNodes kept 1272// by the used instructions. 1273template <typename T> 1274class HUserRecord : public ValueObject { 1275 public: 1276 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1277 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1278 1279 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1280 : instruction_(old_record.instruction_), use_node_(use_node) { 1281 DCHECK(instruction_ != nullptr); 1282 DCHECK(use_node_ != nullptr); 1283 DCHECK(old_record.use_node_ == nullptr); 1284 } 1285 1286 HInstruction* GetInstruction() const { return instruction_; } 1287 HUseListNode<T>* GetUseNode() const { return use_node_; } 1288 1289 private: 1290 // Instruction used by the user. 1291 HInstruction* instruction_; 1292 1293 // Corresponding entry in the use list kept by 'instruction_'. 1294 HUseListNode<T>* use_node_; 1295}; 1296 1297/** 1298 * Side-effects representation. 1299 * 1300 * For write/read dependences on fields/arrays, the dependence analysis uses 1301 * type disambiguation (e.g. a float field write cannot modify the value of an 1302 * integer field read) and the access type (e.g. a reference array write cannot 1303 * modify the value of a reference field read [although it may modify the 1304 * reference fetch prior to reading the field, which is represented by its own 1305 * write/read dependence]). The analysis makes conservative points-to 1306 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1307 * the same, and any reference read depends on any reference read without 1308 * further regard of its type). 1309 * 1310 * The internal representation uses 38-bit and is described in the table below. 1311 * The first line indicates the side effect, and for field/array accesses the 1312 * second line indicates the type of the access (in the order of the 1313 * Primitive::Type enum). 1314 * The two numbered lines below indicate the bit position in the bitfield (read 1315 * vertically). 1316 * 1317 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1318 * +-------------+---------+---------+--------------+---------+---------+ 1319 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1320 * | 3 |333333322|222222221| 1 |111111110|000000000| 1321 * | 7 |654321098|765432109| 8 |765432109|876543210| 1322 * 1323 * Note that, to ease the implementation, 'changes' bits are least significant 1324 * bits, while 'dependency' bits are most significant bits. 1325 */ 1326class SideEffects : public ValueObject { 1327 public: 1328 SideEffects() : flags_(0) {} 1329 1330 static SideEffects None() { 1331 return SideEffects(0); 1332 } 1333 1334 static SideEffects All() { 1335 return SideEffects(kAllChangeBits | kAllDependOnBits); 1336 } 1337 1338 static SideEffects AllChanges() { 1339 return SideEffects(kAllChangeBits); 1340 } 1341 1342 static SideEffects AllDependencies() { 1343 return SideEffects(kAllDependOnBits); 1344 } 1345 1346 static SideEffects AllExceptGCDependency() { 1347 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1348 } 1349 1350 static SideEffects AllWritesAndReads() { 1351 return SideEffects(kAllWrites | kAllReads); 1352 } 1353 1354 static SideEffects AllWrites() { 1355 return SideEffects(kAllWrites); 1356 } 1357 1358 static SideEffects AllReads() { 1359 return SideEffects(kAllReads); 1360 } 1361 1362 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1363 return is_volatile 1364 ? AllWritesAndReads() 1365 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1366 } 1367 1368 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1369 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1370 } 1371 1372 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1373 return is_volatile 1374 ? AllWritesAndReads() 1375 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1376 } 1377 1378 static SideEffects ArrayReadOfType(Primitive::Type type) { 1379 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1380 } 1381 1382 static SideEffects CanTriggerGC() { 1383 return SideEffects(1ULL << kCanTriggerGCBit); 1384 } 1385 1386 static SideEffects DependsOnGC() { 1387 return SideEffects(1ULL << kDependsOnGCBit); 1388 } 1389 1390 // Combines the side-effects of this and the other. 1391 SideEffects Union(SideEffects other) const { 1392 return SideEffects(flags_ | other.flags_); 1393 } 1394 1395 SideEffects Exclusion(SideEffects other) const { 1396 return SideEffects(flags_ & ~other.flags_); 1397 } 1398 1399 void Add(SideEffects other) { 1400 flags_ |= other.flags_; 1401 } 1402 1403 bool Includes(SideEffects other) const { 1404 return (other.flags_ & flags_) == other.flags_; 1405 } 1406 1407 bool HasSideEffects() const { 1408 return (flags_ & kAllChangeBits); 1409 } 1410 1411 bool HasDependencies() const { 1412 return (flags_ & kAllDependOnBits); 1413 } 1414 1415 // Returns true if there are no side effects or dependencies. 1416 bool DoesNothing() const { 1417 return flags_ == 0; 1418 } 1419 1420 // Returns true if something is written. 1421 bool DoesAnyWrite() const { 1422 return (flags_ & kAllWrites); 1423 } 1424 1425 // Returns true if something is read. 1426 bool DoesAnyRead() const { 1427 return (flags_ & kAllReads); 1428 } 1429 1430 // Returns true if potentially everything is written and read 1431 // (every type and every kind of access). 1432 bool DoesAllReadWrite() const { 1433 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1434 } 1435 1436 bool DoesAll() const { 1437 return flags_ == (kAllChangeBits | kAllDependOnBits); 1438 } 1439 1440 // Returns true if this may read something written by other. 1441 bool MayDependOn(SideEffects other) const { 1442 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1443 return (other.flags_ & depends_on_flags); 1444 } 1445 1446 // Returns string representation of flags (for debugging only). 1447 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1448 std::string ToString() const { 1449 std::string flags = "|"; 1450 for (int s = kLastBit; s >= 0; s--) { 1451 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1452 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1453 // This is a bit for the GC side effect. 1454 if (current_bit_is_set) { 1455 flags += "GC"; 1456 } 1457 flags += "|"; 1458 } else { 1459 // This is a bit for the array/field analysis. 1460 // The underscore character stands for the 'can trigger GC' bit. 1461 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1462 if (current_bit_is_set) { 1463 flags += kDebug[s]; 1464 } 1465 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1466 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1467 flags += "|"; 1468 } 1469 } 1470 } 1471 return flags; 1472 } 1473 1474 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1475 1476 private: 1477 static constexpr int kFieldArrayAnalysisBits = 9; 1478 1479 static constexpr int kFieldWriteOffset = 0; 1480 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1481 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1482 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1483 1484 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1485 1486 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1487 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1488 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1489 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1490 1491 static constexpr int kLastBit = kDependsOnGCBit; 1492 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1493 1494 // Aliases. 1495 1496 static_assert(kChangeBits == kDependOnBits, 1497 "the 'change' bits should match the 'depend on' bits."); 1498 1499 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1500 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1501 static constexpr uint64_t kAllWrites = 1502 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1503 static constexpr uint64_t kAllReads = 1504 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1505 1506 // Work around the fact that HIR aliases I/F and J/D. 1507 // TODO: remove this interceptor once HIR types are clean 1508 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1509 switch (type) { 1510 case Primitive::kPrimInt: 1511 case Primitive::kPrimFloat: 1512 return TypeFlag(Primitive::kPrimInt, offset) | 1513 TypeFlag(Primitive::kPrimFloat, offset); 1514 case Primitive::kPrimLong: 1515 case Primitive::kPrimDouble: 1516 return TypeFlag(Primitive::kPrimLong, offset) | 1517 TypeFlag(Primitive::kPrimDouble, offset); 1518 default: 1519 return TypeFlag(type, offset); 1520 } 1521 } 1522 1523 // Translates type to bit flag. 1524 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1525 CHECK_NE(type, Primitive::kPrimVoid); 1526 const uint64_t one = 1; 1527 const int shift = type; // 0-based consecutive enum 1528 DCHECK_LE(kFieldWriteOffset, shift); 1529 DCHECK_LT(shift, kArrayWriteOffset); 1530 return one << (type + offset); 1531 } 1532 1533 // Private constructor on direct flags value. 1534 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1535 1536 uint64_t flags_; 1537}; 1538 1539// A HEnvironment object contains the values of virtual registers at a given location. 1540class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1541 public: 1542 HEnvironment(ArenaAllocator* arena, 1543 size_t number_of_vregs, 1544 const DexFile& dex_file, 1545 uint32_t method_idx, 1546 uint32_t dex_pc, 1547 InvokeType invoke_type, 1548 HInstruction* holder) 1549 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1550 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1551 parent_(nullptr), 1552 dex_file_(dex_file), 1553 method_idx_(method_idx), 1554 dex_pc_(dex_pc), 1555 invoke_type_(invoke_type), 1556 holder_(holder) { 1557 } 1558 1559 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1560 : HEnvironment(arena, 1561 to_copy.Size(), 1562 to_copy.GetDexFile(), 1563 to_copy.GetMethodIdx(), 1564 to_copy.GetDexPc(), 1565 to_copy.GetInvokeType(), 1566 holder) {} 1567 1568 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1569 if (parent_ != nullptr) { 1570 parent_->SetAndCopyParentChain(allocator, parent); 1571 } else { 1572 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1573 parent_->CopyFrom(parent); 1574 if (parent->GetParent() != nullptr) { 1575 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1576 } 1577 } 1578 } 1579 1580 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1581 void CopyFrom(HEnvironment* environment); 1582 1583 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1584 // input to the loop phi instead. This is for inserting instructions that 1585 // require an environment (like HDeoptimization) in the loop pre-header. 1586 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1587 1588 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1589 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1590 } 1591 1592 HInstruction* GetInstructionAt(size_t index) const { 1593 return vregs_[index].GetInstruction(); 1594 } 1595 1596 void RemoveAsUserOfInput(size_t index) const; 1597 1598 size_t Size() const { return vregs_.size(); } 1599 1600 HEnvironment* GetParent() const { return parent_; } 1601 1602 void SetLocationAt(size_t index, Location location) { 1603 locations_[index] = location; 1604 } 1605 1606 Location GetLocationAt(size_t index) const { 1607 return locations_[index]; 1608 } 1609 1610 uint32_t GetDexPc() const { 1611 return dex_pc_; 1612 } 1613 1614 uint32_t GetMethodIdx() const { 1615 return method_idx_; 1616 } 1617 1618 InvokeType GetInvokeType() const { 1619 return invoke_type_; 1620 } 1621 1622 const DexFile& GetDexFile() const { 1623 return dex_file_; 1624 } 1625 1626 HInstruction* GetHolder() const { 1627 return holder_; 1628 } 1629 1630 private: 1631 // Record instructions' use entries of this environment for constant-time removal. 1632 // It should only be called by HInstruction when a new environment use is added. 1633 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1634 DCHECK(env_use->GetUser() == this); 1635 size_t index = env_use->GetIndex(); 1636 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1637 } 1638 1639 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1640 ArenaVector<Location> locations_; 1641 HEnvironment* parent_; 1642 const DexFile& dex_file_; 1643 const uint32_t method_idx_; 1644 const uint32_t dex_pc_; 1645 const InvokeType invoke_type_; 1646 1647 // The instruction that holds this environment. 1648 HInstruction* const holder_; 1649 1650 friend class HInstruction; 1651 1652 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1653}; 1654 1655class ReferenceTypeInfo : ValueObject { 1656 public: 1657 typedef Handle<mirror::Class> TypeHandle; 1658 1659 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1660 // The constructor will check that the type_handle is valid. 1661 return ReferenceTypeInfo(type_handle, is_exact); 1662 } 1663 1664 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1665 1666 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1667 return handle.GetReference() != nullptr; 1668 } 1669 1670 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1671 return IsValidHandle(type_handle_); 1672 } 1673 1674 bool IsExact() const { return is_exact_; } 1675 1676 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1677 DCHECK(IsValid()); 1678 return GetTypeHandle()->IsObjectClass(); 1679 } 1680 1681 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1682 DCHECK(IsValid()); 1683 return GetTypeHandle()->IsStringClass(); 1684 } 1685 1686 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1687 DCHECK(IsValid()); 1688 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1689 } 1690 1691 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1692 DCHECK(IsValid()); 1693 return GetTypeHandle()->IsInterface(); 1694 } 1695 1696 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1697 DCHECK(IsValid()); 1698 return GetTypeHandle()->IsArrayClass(); 1699 } 1700 1701 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1702 DCHECK(IsValid()); 1703 return GetTypeHandle()->IsPrimitiveArray(); 1704 } 1705 1706 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1707 DCHECK(IsValid()); 1708 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 1709 } 1710 1711 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1712 DCHECK(IsValid()); 1713 if (!IsExact()) return false; 1714 if (!IsArrayClass()) return false; 1715 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1716 } 1717 1718 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1719 DCHECK(IsValid()); 1720 if (!IsExact()) return false; 1721 if (!IsArrayClass()) return false; 1722 if (!rti.IsArrayClass()) return false; 1723 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 1724 rti.GetTypeHandle()->GetComponentType()); 1725 } 1726 1727 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1728 1729 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1730 DCHECK(IsValid()); 1731 DCHECK(rti.IsValid()); 1732 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1733 } 1734 1735 // Returns true if the type information provide the same amount of details. 1736 // Note that it does not mean that the instructions have the same actual type 1737 // (because the type can be the result of a merge). 1738 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1739 if (!IsValid() && !rti.IsValid()) { 1740 // Invalid types are equal. 1741 return true; 1742 } 1743 if (!IsValid() || !rti.IsValid()) { 1744 // One is valid, the other not. 1745 return false; 1746 } 1747 return IsExact() == rti.IsExact() 1748 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1749 } 1750 1751 private: 1752 ReferenceTypeInfo(); 1753 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1754 1755 // The class of the object. 1756 TypeHandle type_handle_; 1757 // Whether or not the type is exact or a superclass of the actual type. 1758 // Whether or not we have any information about this type. 1759 bool is_exact_; 1760}; 1761 1762std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1763 1764class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1765 public: 1766 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1767 : previous_(nullptr), 1768 next_(nullptr), 1769 block_(nullptr), 1770 dex_pc_(dex_pc), 1771 id_(-1), 1772 ssa_index_(-1), 1773 environment_(nullptr), 1774 locations_(nullptr), 1775 live_interval_(nullptr), 1776 lifetime_position_(kNoLifetime), 1777 side_effects_(side_effects), 1778 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1779 1780 virtual ~HInstruction() {} 1781 1782#define DECLARE_KIND(type, super) k##type, 1783 enum InstructionKind { 1784 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1785 }; 1786#undef DECLARE_KIND 1787 1788 HInstruction* GetNext() const { return next_; } 1789 HInstruction* GetPrevious() const { return previous_; } 1790 1791 HInstruction* GetNextDisregardingMoves() const; 1792 HInstruction* GetPreviousDisregardingMoves() const; 1793 1794 HBasicBlock* GetBlock() const { return block_; } 1795 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1796 void SetBlock(HBasicBlock* block) { block_ = block; } 1797 bool IsInBlock() const { return block_ != nullptr; } 1798 bool IsInLoop() const { return block_->IsInLoop(); } 1799 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1800 1801 virtual size_t InputCount() const = 0; 1802 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1803 1804 virtual void Accept(HGraphVisitor* visitor) = 0; 1805 virtual const char* DebugName() const = 0; 1806 1807 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1808 void SetRawInputAt(size_t index, HInstruction* input) { 1809 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1810 } 1811 1812 virtual bool NeedsEnvironment() const { return false; } 1813 1814 uint32_t GetDexPc() const { return dex_pc_; } 1815 1816 virtual bool IsControlFlow() const { return false; } 1817 1818 virtual bool CanThrow() const { return false; } 1819 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1820 1821 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1822 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1823 1824 // Does not apply for all instructions, but having this at top level greatly 1825 // simplifies the null check elimination. 1826 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1827 virtual bool CanBeNull() const { 1828 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1829 return true; 1830 } 1831 1832 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1833 return false; 1834 } 1835 1836 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1837 1838 ReferenceTypeInfo GetReferenceTypeInfo() const { 1839 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1840 return reference_type_info_; 1841 } 1842 1843 void AddUseAt(HInstruction* user, size_t index) { 1844 DCHECK(user != nullptr); 1845 HUseListNode<HInstruction*>* use = 1846 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1847 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1848 } 1849 1850 void AddEnvUseAt(HEnvironment* user, size_t index) { 1851 DCHECK(user != nullptr); 1852 HUseListNode<HEnvironment*>* env_use = 1853 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1854 user->RecordEnvUse(env_use); 1855 } 1856 1857 void RemoveAsUserOfInput(size_t input) { 1858 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1859 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1860 } 1861 1862 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1863 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1864 1865 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1866 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1867 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1868 bool HasOnlyOneNonEnvironmentUse() const { 1869 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1870 } 1871 1872 // Does this instruction strictly dominate `other_instruction`? 1873 // Returns false if this instruction and `other_instruction` are the same. 1874 // Aborts if this instruction and `other_instruction` are both phis. 1875 bool StrictlyDominates(HInstruction* other_instruction) const; 1876 1877 int GetId() const { return id_; } 1878 void SetId(int id) { id_ = id; } 1879 1880 int GetSsaIndex() const { return ssa_index_; } 1881 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1882 bool HasSsaIndex() const { return ssa_index_ != -1; } 1883 1884 bool HasEnvironment() const { return environment_ != nullptr; } 1885 HEnvironment* GetEnvironment() const { return environment_; } 1886 // Set the `environment_` field. Raw because this method does not 1887 // update the uses lists. 1888 void SetRawEnvironment(HEnvironment* environment) { 1889 DCHECK(environment_ == nullptr); 1890 DCHECK_EQ(environment->GetHolder(), this); 1891 environment_ = environment; 1892 } 1893 1894 // Set the environment of this instruction, copying it from `environment`. While 1895 // copying, the uses lists are being updated. 1896 void CopyEnvironmentFrom(HEnvironment* environment) { 1897 DCHECK(environment_ == nullptr); 1898 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1899 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1900 environment_->CopyFrom(environment); 1901 if (environment->GetParent() != nullptr) { 1902 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1903 } 1904 } 1905 1906 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1907 HBasicBlock* block) { 1908 DCHECK(environment_ == nullptr); 1909 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1910 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1911 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1912 if (environment->GetParent() != nullptr) { 1913 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1914 } 1915 } 1916 1917 // Returns the number of entries in the environment. Typically, that is the 1918 // number of dex registers in a method. It could be more in case of inlining. 1919 size_t EnvironmentSize() const; 1920 1921 LocationSummary* GetLocations() const { return locations_; } 1922 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1923 1924 void ReplaceWith(HInstruction* instruction); 1925 void ReplaceInput(HInstruction* replacement, size_t index); 1926 1927 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1928 // uses of this instruction by `other` are *not* updated. 1929 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1930 ReplaceWith(other); 1931 other->ReplaceInput(this, use_index); 1932 } 1933 1934 // Move `this` instruction before `cursor`. 1935 void MoveBefore(HInstruction* cursor); 1936 1937#define INSTRUCTION_TYPE_CHECK(type, super) \ 1938 bool Is##type() const { return (As##type() != nullptr); } \ 1939 virtual const H##type* As##type() const { return nullptr; } \ 1940 virtual H##type* As##type() { return nullptr; } 1941 1942 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1943#undef INSTRUCTION_TYPE_CHECK 1944 1945 // Returns whether the instruction can be moved within the graph. 1946 virtual bool CanBeMoved() const { return false; } 1947 1948 // Returns whether the two instructions are of the same kind. 1949 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1950 return false; 1951 } 1952 1953 // Returns whether any data encoded in the two instructions is equal. 1954 // This method does not look at the inputs. Both instructions must be 1955 // of the same type, otherwise the method has undefined behavior. 1956 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1957 return false; 1958 } 1959 1960 // Returns whether two instructions are equal, that is: 1961 // 1) They have the same type and contain the same data (InstructionDataEquals). 1962 // 2) Their inputs are identical. 1963 bool Equals(HInstruction* other) const; 1964 1965 virtual InstructionKind GetKind() const = 0; 1966 1967 virtual size_t ComputeHashCode() const { 1968 size_t result = GetKind(); 1969 for (size_t i = 0, e = InputCount(); i < e; ++i) { 1970 result = (result * 31) + InputAt(i)->GetId(); 1971 } 1972 return result; 1973 } 1974 1975 SideEffects GetSideEffects() const { return side_effects_; } 1976 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 1977 1978 size_t GetLifetimePosition() const { return lifetime_position_; } 1979 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 1980 LiveInterval* GetLiveInterval() const { return live_interval_; } 1981 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 1982 bool HasLiveInterval() const { return live_interval_ != nullptr; } 1983 1984 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 1985 1986 // Returns whether the code generation of the instruction will require to have access 1987 // to the current method. Such instructions are: 1988 // (1): Instructions that require an environment, as calling the runtime requires 1989 // to walk the stack and have the current method stored at a specific stack address. 1990 // (2): Object literals like classes and strings, that are loaded from the dex cache 1991 // fields of the current method. 1992 bool NeedsCurrentMethod() const { 1993 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 1994 } 1995 1996 // Returns whether the code generation of the instruction will require to have access 1997 // to the dex cache of the current method's declaring class via the current method. 1998 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 1999 2000 // Does this instruction have any use in an environment before 2001 // control flow hits 'other'? 2002 bool HasAnyEnvironmentUseBefore(HInstruction* other); 2003 2004 // Remove all references to environment uses of this instruction. 2005 // The caller must ensure that this is safe to do. 2006 void RemoveEnvironmentUsers(); 2007 2008 protected: 2009 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2010 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2011 2012 private: 2013 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2014 2015 HInstruction* previous_; 2016 HInstruction* next_; 2017 HBasicBlock* block_; 2018 const uint32_t dex_pc_; 2019 2020 // An instruction gets an id when it is added to the graph. 2021 // It reflects creation order. A negative id means the instruction 2022 // has not been added to the graph. 2023 int id_; 2024 2025 // When doing liveness analysis, instructions that have uses get an SSA index. 2026 int ssa_index_; 2027 2028 // List of instructions that have this instruction as input. 2029 HUseList<HInstruction*> uses_; 2030 2031 // List of environments that contain this instruction. 2032 HUseList<HEnvironment*> env_uses_; 2033 2034 // The environment associated with this instruction. Not null if the instruction 2035 // might jump out of the method. 2036 HEnvironment* environment_; 2037 2038 // Set by the code generator. 2039 LocationSummary* locations_; 2040 2041 // Set by the liveness analysis. 2042 LiveInterval* live_interval_; 2043 2044 // Set by the liveness analysis, this is the position in a linear 2045 // order of blocks where this instruction's live interval start. 2046 size_t lifetime_position_; 2047 2048 SideEffects side_effects_; 2049 2050 // TODO: for primitive types this should be marked as invalid. 2051 ReferenceTypeInfo reference_type_info_; 2052 2053 friend class GraphChecker; 2054 friend class HBasicBlock; 2055 friend class HEnvironment; 2056 friend class HGraph; 2057 friend class HInstructionList; 2058 2059 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2060}; 2061std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2062 2063class HInputIterator : public ValueObject { 2064 public: 2065 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2066 2067 bool Done() const { return index_ == instruction_->InputCount(); } 2068 HInstruction* Current() const { return instruction_->InputAt(index_); } 2069 void Advance() { index_++; } 2070 2071 private: 2072 HInstruction* instruction_; 2073 size_t index_; 2074 2075 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2076}; 2077 2078class HInstructionIterator : public ValueObject { 2079 public: 2080 explicit HInstructionIterator(const HInstructionList& instructions) 2081 : instruction_(instructions.first_instruction_) { 2082 next_ = Done() ? nullptr : instruction_->GetNext(); 2083 } 2084 2085 bool Done() const { return instruction_ == nullptr; } 2086 HInstruction* Current() const { return instruction_; } 2087 void Advance() { 2088 instruction_ = next_; 2089 next_ = Done() ? nullptr : instruction_->GetNext(); 2090 } 2091 2092 private: 2093 HInstruction* instruction_; 2094 HInstruction* next_; 2095 2096 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2097}; 2098 2099class HBackwardInstructionIterator : public ValueObject { 2100 public: 2101 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2102 : instruction_(instructions.last_instruction_) { 2103 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2104 } 2105 2106 bool Done() const { return instruction_ == nullptr; } 2107 HInstruction* Current() const { return instruction_; } 2108 void Advance() { 2109 instruction_ = next_; 2110 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2111 } 2112 2113 private: 2114 HInstruction* instruction_; 2115 HInstruction* next_; 2116 2117 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2118}; 2119 2120template<size_t N> 2121class HTemplateInstruction: public HInstruction { 2122 public: 2123 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2124 : HInstruction(side_effects, dex_pc), inputs_() {} 2125 virtual ~HTemplateInstruction() {} 2126 2127 size_t InputCount() const OVERRIDE { return N; } 2128 2129 protected: 2130 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2131 DCHECK_LT(i, N); 2132 return inputs_[i]; 2133 } 2134 2135 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2136 DCHECK_LT(i, N); 2137 inputs_[i] = input; 2138 } 2139 2140 private: 2141 std::array<HUserRecord<HInstruction*>, N> inputs_; 2142 2143 friend class SsaBuilder; 2144}; 2145 2146// HTemplateInstruction specialization for N=0. 2147template<> 2148class HTemplateInstruction<0>: public HInstruction { 2149 public: 2150 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2151 : HInstruction(side_effects, dex_pc) {} 2152 2153 virtual ~HTemplateInstruction() {} 2154 2155 size_t InputCount() const OVERRIDE { return 0; } 2156 2157 protected: 2158 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2159 LOG(FATAL) << "Unreachable"; 2160 UNREACHABLE(); 2161 } 2162 2163 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2164 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2165 LOG(FATAL) << "Unreachable"; 2166 UNREACHABLE(); 2167 } 2168 2169 private: 2170 friend class SsaBuilder; 2171}; 2172 2173template<intptr_t N> 2174class HExpression : public HTemplateInstruction<N> { 2175 public: 2176 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2177 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2178 virtual ~HExpression() {} 2179 2180 Primitive::Type GetType() const OVERRIDE { return type_; } 2181 2182 protected: 2183 Primitive::Type type_; 2184}; 2185 2186// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2187// instruction that branches to the exit block. 2188class HReturnVoid : public HTemplateInstruction<0> { 2189 public: 2190 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2191 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2192 2193 bool IsControlFlow() const OVERRIDE { return true; } 2194 2195 DECLARE_INSTRUCTION(ReturnVoid); 2196 2197 private: 2198 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2199}; 2200 2201// Represents dex's RETURN opcodes. A HReturn is a control flow 2202// instruction that branches to the exit block. 2203class HReturn : public HTemplateInstruction<1> { 2204 public: 2205 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2206 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2207 SetRawInputAt(0, value); 2208 } 2209 2210 bool IsControlFlow() const OVERRIDE { return true; } 2211 2212 DECLARE_INSTRUCTION(Return); 2213 2214 private: 2215 DISALLOW_COPY_AND_ASSIGN(HReturn); 2216}; 2217 2218// The exit instruction is the only instruction of the exit block. 2219// Instructions aborting the method (HThrow and HReturn) must branch to the 2220// exit block. 2221class HExit : public HTemplateInstruction<0> { 2222 public: 2223 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2224 2225 bool IsControlFlow() const OVERRIDE { return true; } 2226 2227 DECLARE_INSTRUCTION(Exit); 2228 2229 private: 2230 DISALLOW_COPY_AND_ASSIGN(HExit); 2231}; 2232 2233// Jumps from one block to another. 2234class HGoto : public HTemplateInstruction<0> { 2235 public: 2236 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2237 2238 bool IsControlFlow() const OVERRIDE { return true; } 2239 2240 HBasicBlock* GetSuccessor() const { 2241 return GetBlock()->GetSingleSuccessor(); 2242 } 2243 2244 DECLARE_INSTRUCTION(Goto); 2245 2246 private: 2247 DISALLOW_COPY_AND_ASSIGN(HGoto); 2248}; 2249 2250class HConstant : public HExpression<0> { 2251 public: 2252 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2253 : HExpression(type, SideEffects::None(), dex_pc) {} 2254 2255 bool CanBeMoved() const OVERRIDE { return true; } 2256 2257 virtual bool IsMinusOne() const { return false; } 2258 virtual bool IsZero() const { return false; } 2259 virtual bool IsOne() const { return false; } 2260 2261 virtual uint64_t GetValueAsUint64() const = 0; 2262 2263 DECLARE_INSTRUCTION(Constant); 2264 2265 private: 2266 DISALLOW_COPY_AND_ASSIGN(HConstant); 2267}; 2268 2269class HNullConstant : public HConstant { 2270 public: 2271 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2272 return true; 2273 } 2274 2275 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2276 2277 size_t ComputeHashCode() const OVERRIDE { return 0; } 2278 2279 DECLARE_INSTRUCTION(NullConstant); 2280 2281 private: 2282 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2283 2284 friend class HGraph; 2285 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2286}; 2287 2288// Constants of the type int. Those can be from Dex instructions, or 2289// synthesized (for example with the if-eqz instruction). 2290class HIntConstant : public HConstant { 2291 public: 2292 int32_t GetValue() const { return value_; } 2293 2294 uint64_t GetValueAsUint64() const OVERRIDE { 2295 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2296 } 2297 2298 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2299 DCHECK(other->IsIntConstant()); 2300 return other->AsIntConstant()->value_ == value_; 2301 } 2302 2303 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2304 2305 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2306 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2307 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2308 2309 DECLARE_INSTRUCTION(IntConstant); 2310 2311 private: 2312 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2313 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2314 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2315 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2316 2317 const int32_t value_; 2318 2319 friend class HGraph; 2320 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2321 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2322 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2323}; 2324 2325class HLongConstant : public HConstant { 2326 public: 2327 int64_t GetValue() const { return value_; } 2328 2329 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2330 2331 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2332 DCHECK(other->IsLongConstant()); 2333 return other->AsLongConstant()->value_ == value_; 2334 } 2335 2336 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2337 2338 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2339 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2340 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2341 2342 DECLARE_INSTRUCTION(LongConstant); 2343 2344 private: 2345 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2346 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2347 2348 const int64_t value_; 2349 2350 friend class HGraph; 2351 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2352}; 2353 2354// Conditional branch. A block ending with an HIf instruction must have 2355// two successors. 2356class HIf : public HTemplateInstruction<1> { 2357 public: 2358 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2359 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2360 SetRawInputAt(0, input); 2361 } 2362 2363 bool IsControlFlow() const OVERRIDE { return true; } 2364 2365 HBasicBlock* IfTrueSuccessor() const { 2366 return GetBlock()->GetSuccessors()[0]; 2367 } 2368 2369 HBasicBlock* IfFalseSuccessor() const { 2370 return GetBlock()->GetSuccessors()[1]; 2371 } 2372 2373 DECLARE_INSTRUCTION(If); 2374 2375 private: 2376 DISALLOW_COPY_AND_ASSIGN(HIf); 2377}; 2378 2379 2380// Abstract instruction which marks the beginning and/or end of a try block and 2381// links it to the respective exception handlers. Behaves the same as a Goto in 2382// non-exceptional control flow. 2383// Normal-flow successor is stored at index zero, exception handlers under 2384// higher indices in no particular order. 2385class HTryBoundary : public HTemplateInstruction<0> { 2386 public: 2387 enum BoundaryKind { 2388 kEntry, 2389 kExit, 2390 }; 2391 2392 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2393 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2394 2395 bool IsControlFlow() const OVERRIDE { return true; } 2396 2397 // Returns the block's non-exceptional successor (index zero). 2398 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2399 2400 // Returns whether `handler` is among its exception handlers (non-zero index 2401 // successors). 2402 bool HasExceptionHandler(const HBasicBlock& handler) const { 2403 DCHECK(handler.IsCatchBlock()); 2404 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2405 } 2406 2407 // If not present already, adds `handler` to its block's list of exception 2408 // handlers. 2409 void AddExceptionHandler(HBasicBlock* handler) { 2410 if (!HasExceptionHandler(*handler)) { 2411 GetBlock()->AddSuccessor(handler); 2412 } 2413 } 2414 2415 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2416 2417 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2418 2419 DECLARE_INSTRUCTION(TryBoundary); 2420 2421 private: 2422 const BoundaryKind kind_; 2423 2424 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2425}; 2426 2427// Iterator over exception handlers of a given HTryBoundary, i.e. over 2428// exceptional successors of its basic block. 2429class HExceptionHandlerIterator : public ValueObject { 2430 public: 2431 explicit HExceptionHandlerIterator(const HTryBoundary& try_boundary) 2432 : block_(*try_boundary.GetBlock()), index_(block_.NumberOfNormalSuccessors()) {} 2433 2434 bool Done() const { return index_ == block_.GetSuccessors().size(); } 2435 HBasicBlock* Current() const { return block_.GetSuccessors()[index_]; } 2436 size_t CurrentSuccessorIndex() const { return index_; } 2437 void Advance() { ++index_; } 2438 2439 private: 2440 const HBasicBlock& block_; 2441 size_t index_; 2442 2443 DISALLOW_COPY_AND_ASSIGN(HExceptionHandlerIterator); 2444}; 2445 2446// Deoptimize to interpreter, upon checking a condition. 2447class HDeoptimize : public HTemplateInstruction<1> { 2448 public: 2449 explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2450 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2451 SetRawInputAt(0, cond); 2452 } 2453 2454 bool NeedsEnvironment() const OVERRIDE { return true; } 2455 bool CanThrow() const OVERRIDE { return true; } 2456 2457 DECLARE_INSTRUCTION(Deoptimize); 2458 2459 private: 2460 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2461}; 2462 2463// Represents the ArtMethod that was passed as a first argument to 2464// the method. It is used by instructions that depend on it, like 2465// instructions that work with the dex cache. 2466class HCurrentMethod : public HExpression<0> { 2467 public: 2468 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2469 : HExpression(type, SideEffects::None(), dex_pc) {} 2470 2471 DECLARE_INSTRUCTION(CurrentMethod); 2472 2473 private: 2474 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2475}; 2476 2477// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2478// have one successor for each entry in the switch table, and the final successor 2479// will be the block containing the next Dex opcode. 2480class HPackedSwitch : public HTemplateInstruction<1> { 2481 public: 2482 HPackedSwitch(int32_t start_value, 2483 uint32_t num_entries, 2484 HInstruction* input, 2485 uint32_t dex_pc = kNoDexPc) 2486 : HTemplateInstruction(SideEffects::None(), dex_pc), 2487 start_value_(start_value), 2488 num_entries_(num_entries) { 2489 SetRawInputAt(0, input); 2490 } 2491 2492 bool IsControlFlow() const OVERRIDE { return true; } 2493 2494 int32_t GetStartValue() const { return start_value_; } 2495 2496 uint32_t GetNumEntries() const { return num_entries_; } 2497 2498 HBasicBlock* GetDefaultBlock() const { 2499 // Last entry is the default block. 2500 return GetBlock()->GetSuccessors()[num_entries_]; 2501 } 2502 DECLARE_INSTRUCTION(PackedSwitch); 2503 2504 private: 2505 const int32_t start_value_; 2506 const uint32_t num_entries_; 2507 2508 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2509}; 2510 2511class HUnaryOperation : public HExpression<1> { 2512 public: 2513 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2514 : HExpression(result_type, SideEffects::None(), dex_pc) { 2515 SetRawInputAt(0, input); 2516 } 2517 2518 HInstruction* GetInput() const { return InputAt(0); } 2519 Primitive::Type GetResultType() const { return GetType(); } 2520 2521 bool CanBeMoved() const OVERRIDE { return true; } 2522 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2523 return true; 2524 } 2525 2526 // Try to statically evaluate `operation` and return a HConstant 2527 // containing the result of this evaluation. If `operation` cannot 2528 // be evaluated as a constant, return null. 2529 HConstant* TryStaticEvaluation() const; 2530 2531 // Apply this operation to `x`. 2532 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2533 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2534 2535 DECLARE_INSTRUCTION(UnaryOperation); 2536 2537 private: 2538 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2539}; 2540 2541class HBinaryOperation : public HExpression<2> { 2542 public: 2543 HBinaryOperation(Primitive::Type result_type, 2544 HInstruction* left, 2545 HInstruction* right, 2546 SideEffects side_effects = SideEffects::None(), 2547 uint32_t dex_pc = kNoDexPc) 2548 : HExpression(result_type, side_effects, dex_pc) { 2549 SetRawInputAt(0, left); 2550 SetRawInputAt(1, right); 2551 } 2552 2553 HInstruction* GetLeft() const { return InputAt(0); } 2554 HInstruction* GetRight() const { return InputAt(1); } 2555 Primitive::Type GetResultType() const { return GetType(); } 2556 2557 virtual bool IsCommutative() const { return false; } 2558 2559 // Put constant on the right. 2560 // Returns whether order is changed. 2561 bool OrderInputsWithConstantOnTheRight() { 2562 HInstruction* left = InputAt(0); 2563 HInstruction* right = InputAt(1); 2564 if (left->IsConstant() && !right->IsConstant()) { 2565 ReplaceInput(right, 0); 2566 ReplaceInput(left, 1); 2567 return true; 2568 } 2569 return false; 2570 } 2571 2572 // Order inputs by instruction id, but favor constant on the right side. 2573 // This helps GVN for commutative ops. 2574 void OrderInputs() { 2575 DCHECK(IsCommutative()); 2576 HInstruction* left = InputAt(0); 2577 HInstruction* right = InputAt(1); 2578 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2579 return; 2580 } 2581 if (OrderInputsWithConstantOnTheRight()) { 2582 return; 2583 } 2584 // Order according to instruction id. 2585 if (left->GetId() > right->GetId()) { 2586 ReplaceInput(right, 0); 2587 ReplaceInput(left, 1); 2588 } 2589 } 2590 2591 bool CanBeMoved() const OVERRIDE { return true; } 2592 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2593 return true; 2594 } 2595 2596 // Try to statically evaluate `operation` and return a HConstant 2597 // containing the result of this evaluation. If `operation` cannot 2598 // be evaluated as a constant, return null. 2599 HConstant* TryStaticEvaluation() const; 2600 2601 // Apply this operation to `x` and `y`. 2602 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2603 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2604 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2605 HLongConstant* y ATTRIBUTE_UNUSED) const { 2606 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2607 return nullptr; 2608 } 2609 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2610 HIntConstant* y ATTRIBUTE_UNUSED) const { 2611 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2612 return nullptr; 2613 } 2614 2615 // Returns an input that can legally be used as the right input and is 2616 // constant, or null. 2617 HConstant* GetConstantRight() const; 2618 2619 // If `GetConstantRight()` returns one of the input, this returns the other 2620 // one. Otherwise it returns null. 2621 HInstruction* GetLeastConstantLeft() const; 2622 2623 DECLARE_INSTRUCTION(BinaryOperation); 2624 2625 private: 2626 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2627}; 2628 2629// The comparison bias applies for floating point operations and indicates how NaN 2630// comparisons are treated: 2631enum class ComparisonBias { 2632 kNoBias, // bias is not applicable (i.e. for long operation) 2633 kGtBias, // return 1 for NaN comparisons 2634 kLtBias, // return -1 for NaN comparisons 2635}; 2636 2637class HCondition : public HBinaryOperation { 2638 public: 2639 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2640 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2641 needs_materialization_(true), 2642 bias_(ComparisonBias::kNoBias) {} 2643 2644 bool NeedsMaterialization() const { return needs_materialization_; } 2645 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2646 2647 // For code generation purposes, returns whether this instruction is just before 2648 // `instruction`, and disregard moves in between. 2649 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2650 2651 DECLARE_INSTRUCTION(Condition); 2652 2653 virtual IfCondition GetCondition() const = 0; 2654 2655 virtual IfCondition GetOppositeCondition() const = 0; 2656 2657 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2658 2659 void SetBias(ComparisonBias bias) { bias_ = bias; } 2660 2661 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2662 return bias_ == other->AsCondition()->bias_; 2663 } 2664 2665 bool IsFPConditionTrueIfNaN() const { 2666 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2667 IfCondition if_cond = GetCondition(); 2668 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2669 } 2670 2671 bool IsFPConditionFalseIfNaN() const { 2672 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2673 IfCondition if_cond = GetCondition(); 2674 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2675 } 2676 2677 private: 2678 // For register allocation purposes, returns whether this instruction needs to be 2679 // materialized (that is, not just be in the processor flags). 2680 bool needs_materialization_; 2681 2682 // Needed if we merge a HCompare into a HCondition. 2683 ComparisonBias bias_; 2684 2685 DISALLOW_COPY_AND_ASSIGN(HCondition); 2686}; 2687 2688// Instruction to check if two inputs are equal to each other. 2689class HEqual : public HCondition { 2690 public: 2691 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2692 : HCondition(first, second, dex_pc) {} 2693 2694 bool IsCommutative() const OVERRIDE { return true; } 2695 2696 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2697 return GetBlock()->GetGraph()->GetIntConstant( 2698 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2699 } 2700 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2701 return GetBlock()->GetGraph()->GetIntConstant( 2702 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2703 } 2704 2705 DECLARE_INSTRUCTION(Equal); 2706 2707 IfCondition GetCondition() const OVERRIDE { 2708 return kCondEQ; 2709 } 2710 2711 IfCondition GetOppositeCondition() const OVERRIDE { 2712 return kCondNE; 2713 } 2714 2715 private: 2716 template <typename T> bool Compute(T x, T y) const { return x == y; } 2717 2718 DISALLOW_COPY_AND_ASSIGN(HEqual); 2719}; 2720 2721class HNotEqual : public HCondition { 2722 public: 2723 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2724 : HCondition(first, second, dex_pc) {} 2725 2726 bool IsCommutative() const OVERRIDE { return true; } 2727 2728 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2729 return GetBlock()->GetGraph()->GetIntConstant( 2730 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2731 } 2732 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2733 return GetBlock()->GetGraph()->GetIntConstant( 2734 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2735 } 2736 2737 DECLARE_INSTRUCTION(NotEqual); 2738 2739 IfCondition GetCondition() const OVERRIDE { 2740 return kCondNE; 2741 } 2742 2743 IfCondition GetOppositeCondition() const OVERRIDE { 2744 return kCondEQ; 2745 } 2746 2747 private: 2748 template <typename T> bool Compute(T x, T y) const { return x != y; } 2749 2750 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2751}; 2752 2753class HLessThan : public HCondition { 2754 public: 2755 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2756 : HCondition(first, second, dex_pc) {} 2757 2758 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2759 return GetBlock()->GetGraph()->GetIntConstant( 2760 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2761 } 2762 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2763 return GetBlock()->GetGraph()->GetIntConstant( 2764 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2765 } 2766 2767 DECLARE_INSTRUCTION(LessThan); 2768 2769 IfCondition GetCondition() const OVERRIDE { 2770 return kCondLT; 2771 } 2772 2773 IfCondition GetOppositeCondition() const OVERRIDE { 2774 return kCondGE; 2775 } 2776 2777 private: 2778 template <typename T> bool Compute(T x, T y) const { return x < y; } 2779 2780 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2781}; 2782 2783class HLessThanOrEqual : public HCondition { 2784 public: 2785 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2786 : HCondition(first, second, dex_pc) {} 2787 2788 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2789 return GetBlock()->GetGraph()->GetIntConstant( 2790 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2791 } 2792 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2793 return GetBlock()->GetGraph()->GetIntConstant( 2794 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2795 } 2796 2797 DECLARE_INSTRUCTION(LessThanOrEqual); 2798 2799 IfCondition GetCondition() const OVERRIDE { 2800 return kCondLE; 2801 } 2802 2803 IfCondition GetOppositeCondition() const OVERRIDE { 2804 return kCondGT; 2805 } 2806 2807 private: 2808 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2809 2810 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2811}; 2812 2813class HGreaterThan : public HCondition { 2814 public: 2815 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2816 : HCondition(first, second, dex_pc) {} 2817 2818 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2819 return GetBlock()->GetGraph()->GetIntConstant( 2820 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2821 } 2822 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2823 return GetBlock()->GetGraph()->GetIntConstant( 2824 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2825 } 2826 2827 DECLARE_INSTRUCTION(GreaterThan); 2828 2829 IfCondition GetCondition() const OVERRIDE { 2830 return kCondGT; 2831 } 2832 2833 IfCondition GetOppositeCondition() const OVERRIDE { 2834 return kCondLE; 2835 } 2836 2837 private: 2838 template <typename T> bool Compute(T x, T y) const { return x > y; } 2839 2840 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2841}; 2842 2843class HGreaterThanOrEqual : public HCondition { 2844 public: 2845 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2846 : HCondition(first, second, dex_pc) {} 2847 2848 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2849 return GetBlock()->GetGraph()->GetIntConstant( 2850 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2851 } 2852 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2853 return GetBlock()->GetGraph()->GetIntConstant( 2854 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2855 } 2856 2857 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2858 2859 IfCondition GetCondition() const OVERRIDE { 2860 return kCondGE; 2861 } 2862 2863 IfCondition GetOppositeCondition() const OVERRIDE { 2864 return kCondLT; 2865 } 2866 2867 private: 2868 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2869 2870 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2871}; 2872 2873class HBelow : public HCondition { 2874 public: 2875 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2876 : HCondition(first, second, dex_pc) {} 2877 2878 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2879 return GetBlock()->GetGraph()->GetIntConstant( 2880 Compute(static_cast<uint32_t>(x->GetValue()), 2881 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2882 } 2883 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2884 return GetBlock()->GetGraph()->GetIntConstant( 2885 Compute(static_cast<uint64_t>(x->GetValue()), 2886 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2887 } 2888 2889 DECLARE_INSTRUCTION(Below); 2890 2891 IfCondition GetCondition() const OVERRIDE { 2892 return kCondB; 2893 } 2894 2895 IfCondition GetOppositeCondition() const OVERRIDE { 2896 return kCondAE; 2897 } 2898 2899 private: 2900 template <typename T> bool Compute(T x, T y) const { return x < y; } 2901 2902 DISALLOW_COPY_AND_ASSIGN(HBelow); 2903}; 2904 2905class HBelowOrEqual : public HCondition { 2906 public: 2907 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2908 : HCondition(first, second, dex_pc) {} 2909 2910 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2911 return GetBlock()->GetGraph()->GetIntConstant( 2912 Compute(static_cast<uint32_t>(x->GetValue()), 2913 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2914 } 2915 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2916 return GetBlock()->GetGraph()->GetIntConstant( 2917 Compute(static_cast<uint64_t>(x->GetValue()), 2918 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2919 } 2920 2921 DECLARE_INSTRUCTION(BelowOrEqual); 2922 2923 IfCondition GetCondition() const OVERRIDE { 2924 return kCondBE; 2925 } 2926 2927 IfCondition GetOppositeCondition() const OVERRIDE { 2928 return kCondA; 2929 } 2930 2931 private: 2932 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2933 2934 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 2935}; 2936 2937class HAbove : public HCondition { 2938 public: 2939 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2940 : HCondition(first, second, dex_pc) {} 2941 2942 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2943 return GetBlock()->GetGraph()->GetIntConstant( 2944 Compute(static_cast<uint32_t>(x->GetValue()), 2945 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2946 } 2947 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2948 return GetBlock()->GetGraph()->GetIntConstant( 2949 Compute(static_cast<uint64_t>(x->GetValue()), 2950 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2951 } 2952 2953 DECLARE_INSTRUCTION(Above); 2954 2955 IfCondition GetCondition() const OVERRIDE { 2956 return kCondA; 2957 } 2958 2959 IfCondition GetOppositeCondition() const OVERRIDE { 2960 return kCondBE; 2961 } 2962 2963 private: 2964 template <typename T> bool Compute(T x, T y) const { return x > y; } 2965 2966 DISALLOW_COPY_AND_ASSIGN(HAbove); 2967}; 2968 2969class HAboveOrEqual : public HCondition { 2970 public: 2971 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2972 : HCondition(first, second, dex_pc) {} 2973 2974 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2975 return GetBlock()->GetGraph()->GetIntConstant( 2976 Compute(static_cast<uint32_t>(x->GetValue()), 2977 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2978 } 2979 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2980 return GetBlock()->GetGraph()->GetIntConstant( 2981 Compute(static_cast<uint64_t>(x->GetValue()), 2982 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2983 } 2984 2985 DECLARE_INSTRUCTION(AboveOrEqual); 2986 2987 IfCondition GetCondition() const OVERRIDE { 2988 return kCondAE; 2989 } 2990 2991 IfCondition GetOppositeCondition() const OVERRIDE { 2992 return kCondB; 2993 } 2994 2995 private: 2996 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2997 2998 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 2999}; 3000 3001// Instruction to check how two inputs compare to each other. 3002// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 3003class HCompare : public HBinaryOperation { 3004 public: 3005 HCompare(Primitive::Type type, 3006 HInstruction* first, 3007 HInstruction* second, 3008 ComparisonBias bias, 3009 uint32_t dex_pc) 3010 : HBinaryOperation(Primitive::kPrimInt, 3011 first, 3012 second, 3013 SideEffectsForArchRuntimeCalls(type), 3014 dex_pc), 3015 bias_(bias) { 3016 DCHECK_EQ(type, first->GetType()); 3017 DCHECK_EQ(type, second->GetType()); 3018 } 3019 3020 template <typename T> 3021 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3022 3023 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3024 return GetBlock()->GetGraph()->GetIntConstant( 3025 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3026 } 3027 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3028 return GetBlock()->GetGraph()->GetIntConstant( 3029 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3030 } 3031 3032 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3033 return bias_ == other->AsCompare()->bias_; 3034 } 3035 3036 ComparisonBias GetBias() const { return bias_; } 3037 3038 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3039 3040 3041 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3042 // MIPS64 uses a runtime call for FP comparisons. 3043 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3044 } 3045 3046 DECLARE_INSTRUCTION(Compare); 3047 3048 private: 3049 const ComparisonBias bias_; 3050 3051 DISALLOW_COPY_AND_ASSIGN(HCompare); 3052}; 3053 3054// A local in the graph. Corresponds to a Dex register. 3055class HLocal : public HTemplateInstruction<0> { 3056 public: 3057 explicit HLocal(uint16_t reg_number) 3058 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3059 3060 DECLARE_INSTRUCTION(Local); 3061 3062 uint16_t GetRegNumber() const { return reg_number_; } 3063 3064 private: 3065 // The Dex register number. 3066 const uint16_t reg_number_; 3067 3068 DISALLOW_COPY_AND_ASSIGN(HLocal); 3069}; 3070 3071// Load a given local. The local is an input of this instruction. 3072class HLoadLocal : public HExpression<1> { 3073 public: 3074 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3075 : HExpression(type, SideEffects::None(), dex_pc) { 3076 SetRawInputAt(0, local); 3077 } 3078 3079 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3080 3081 DECLARE_INSTRUCTION(LoadLocal); 3082 3083 private: 3084 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3085}; 3086 3087// Store a value in a given local. This instruction has two inputs: the value 3088// and the local. 3089class HStoreLocal : public HTemplateInstruction<2> { 3090 public: 3091 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3092 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3093 SetRawInputAt(0, local); 3094 SetRawInputAt(1, value); 3095 } 3096 3097 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3098 3099 DECLARE_INSTRUCTION(StoreLocal); 3100 3101 private: 3102 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3103}; 3104 3105class HFloatConstant : public HConstant { 3106 public: 3107 float GetValue() const { return value_; } 3108 3109 uint64_t GetValueAsUint64() const OVERRIDE { 3110 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3111 } 3112 3113 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3114 DCHECK(other->IsFloatConstant()); 3115 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3116 } 3117 3118 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3119 3120 bool IsMinusOne() const OVERRIDE { 3121 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3122 } 3123 bool IsZero() const OVERRIDE { 3124 return value_ == 0.0f; 3125 } 3126 bool IsOne() const OVERRIDE { 3127 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3128 } 3129 bool IsNaN() const { 3130 return std::isnan(value_); 3131 } 3132 3133 DECLARE_INSTRUCTION(FloatConstant); 3134 3135 private: 3136 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3137 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3138 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3139 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3140 3141 const float value_; 3142 3143 // Only the SsaBuilder and HGraph can create floating-point constants. 3144 friend class SsaBuilder; 3145 friend class HGraph; 3146 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3147}; 3148 3149class HDoubleConstant : public HConstant { 3150 public: 3151 double GetValue() const { return value_; } 3152 3153 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3154 3155 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3156 DCHECK(other->IsDoubleConstant()); 3157 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3158 } 3159 3160 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3161 3162 bool IsMinusOne() const OVERRIDE { 3163 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3164 } 3165 bool IsZero() const OVERRIDE { 3166 return value_ == 0.0; 3167 } 3168 bool IsOne() const OVERRIDE { 3169 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3170 } 3171 bool IsNaN() const { 3172 return std::isnan(value_); 3173 } 3174 3175 DECLARE_INSTRUCTION(DoubleConstant); 3176 3177 private: 3178 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3179 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3180 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3181 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3182 3183 const double value_; 3184 3185 // Only the SsaBuilder and HGraph can create floating-point constants. 3186 friend class SsaBuilder; 3187 friend class HGraph; 3188 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3189}; 3190 3191enum class Intrinsics { 3192#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache) k ## Name, 3193#include "intrinsics_list.h" 3194 kNone, 3195 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3196#undef INTRINSICS_LIST 3197#undef OPTIMIZING_INTRINSICS 3198}; 3199std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3200 3201enum IntrinsicNeedsEnvironmentOrCache { 3202 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3203 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3204}; 3205 3206class HInvoke : public HInstruction { 3207 public: 3208 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3209 3210 bool NeedsEnvironment() const OVERRIDE; 3211 3212 void SetArgumentAt(size_t index, HInstruction* argument) { 3213 SetRawInputAt(index, argument); 3214 } 3215 3216 // Return the number of arguments. This number can be lower than 3217 // the number of inputs returned by InputCount(), as some invoke 3218 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3219 // inputs at the end of their list of inputs. 3220 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3221 3222 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3223 3224 3225 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3226 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3227 3228 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3229 3230 Intrinsics GetIntrinsic() const { 3231 return intrinsic_; 3232 } 3233 3234 void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache); 3235 3236 bool IsFromInlinedInvoke() const { 3237 return GetEnvironment()->GetParent() != nullptr; 3238 } 3239 3240 bool CanThrow() const OVERRIDE { return true; } 3241 3242 uint32_t* GetIntrinsicOptimizations() { 3243 return &intrinsic_optimizations_; 3244 } 3245 3246 const uint32_t* GetIntrinsicOptimizations() const { 3247 return &intrinsic_optimizations_; 3248 } 3249 3250 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3251 3252 DECLARE_INSTRUCTION(Invoke); 3253 3254 protected: 3255 HInvoke(ArenaAllocator* arena, 3256 uint32_t number_of_arguments, 3257 uint32_t number_of_other_inputs, 3258 Primitive::Type return_type, 3259 uint32_t dex_pc, 3260 uint32_t dex_method_index, 3261 InvokeType original_invoke_type) 3262 : HInstruction( 3263 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3264 number_of_arguments_(number_of_arguments), 3265 inputs_(number_of_arguments + number_of_other_inputs, 3266 arena->Adapter(kArenaAllocInvokeInputs)), 3267 return_type_(return_type), 3268 dex_method_index_(dex_method_index), 3269 original_invoke_type_(original_invoke_type), 3270 intrinsic_(Intrinsics::kNone), 3271 intrinsic_optimizations_(0) { 3272 } 3273 3274 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3275 return inputs_[index]; 3276 } 3277 3278 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3279 inputs_[index] = input; 3280 } 3281 3282 uint32_t number_of_arguments_; 3283 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3284 const Primitive::Type return_type_; 3285 const uint32_t dex_method_index_; 3286 const InvokeType original_invoke_type_; 3287 Intrinsics intrinsic_; 3288 3289 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3290 uint32_t intrinsic_optimizations_; 3291 3292 private: 3293 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3294}; 3295 3296class HInvokeUnresolved : public HInvoke { 3297 public: 3298 HInvokeUnresolved(ArenaAllocator* arena, 3299 uint32_t number_of_arguments, 3300 Primitive::Type return_type, 3301 uint32_t dex_pc, 3302 uint32_t dex_method_index, 3303 InvokeType invoke_type) 3304 : HInvoke(arena, 3305 number_of_arguments, 3306 0u /* number_of_other_inputs */, 3307 return_type, 3308 dex_pc, 3309 dex_method_index, 3310 invoke_type) { 3311 } 3312 3313 DECLARE_INSTRUCTION(InvokeUnresolved); 3314 3315 private: 3316 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3317}; 3318 3319class HInvokeStaticOrDirect : public HInvoke { 3320 public: 3321 // Requirements of this method call regarding the class 3322 // initialization (clinit) check of its declaring class. 3323 enum class ClinitCheckRequirement { 3324 kNone, // Class already initialized. 3325 kExplicit, // Static call having explicit clinit check as last input. 3326 kImplicit, // Static call implicitly requiring a clinit check. 3327 }; 3328 3329 // Determines how to load the target ArtMethod*. 3330 enum class MethodLoadKind { 3331 // Use a String init ArtMethod* loaded from Thread entrypoints. 3332 kStringInit, 3333 3334 // Use the method's own ArtMethod* loaded by the register allocator. 3335 kRecursive, 3336 3337 // Use ArtMethod* at a known address, embed the direct address in the code. 3338 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3339 kDirectAddress, 3340 3341 // Use ArtMethod* at an address that will be known at link time, embed the direct 3342 // address in the code. If the image is relocatable, emit .patch_oat entry. 3343 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3344 // the image relocatable or not. 3345 kDirectAddressWithFixup, 3346 3347 // Load from resoved methods array in the dex cache using a PC-relative load. 3348 // Used when we need to use the dex cache, for example for invoke-static that 3349 // may cause class initialization (the entry may point to a resolution method), 3350 // and we know that we can access the dex cache arrays using a PC-relative load. 3351 kDexCachePcRelative, 3352 3353 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3354 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3355 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3356 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3357 kDexCacheViaMethod, 3358 }; 3359 3360 // Determines the location of the code pointer. 3361 enum class CodePtrLocation { 3362 // Recursive call, use local PC-relative call instruction. 3363 kCallSelf, 3364 3365 // Use PC-relative call instruction patched at link time. 3366 // Used for calls within an oat file, boot->boot or app->app. 3367 kCallPCRelative, 3368 3369 // Call to a known target address, embed the direct address in code. 3370 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3371 kCallDirect, 3372 3373 // Call to a target address that will be known at link time, embed the direct 3374 // address in code. If the image is relocatable, emit .patch_oat entry. 3375 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3376 // the image relocatable or not. 3377 kCallDirectWithFixup, 3378 3379 // Use code pointer from the ArtMethod*. 3380 // Used when we don't know the target code. This is also the last-resort-kind used when 3381 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3382 kCallArtMethod, 3383 }; 3384 3385 struct DispatchInfo { 3386 MethodLoadKind method_load_kind; 3387 CodePtrLocation code_ptr_location; 3388 // The method load data holds 3389 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3390 // Note that there are multiple string init methods, each having its own offset. 3391 // - the method address for kDirectAddress 3392 // - the dex cache arrays offset for kDexCachePcRel. 3393 uint64_t method_load_data; 3394 uint64_t direct_code_ptr; 3395 }; 3396 3397 HInvokeStaticOrDirect(ArenaAllocator* arena, 3398 uint32_t number_of_arguments, 3399 Primitive::Type return_type, 3400 uint32_t dex_pc, 3401 uint32_t method_index, 3402 MethodReference target_method, 3403 DispatchInfo dispatch_info, 3404 InvokeType original_invoke_type, 3405 InvokeType invoke_type, 3406 ClinitCheckRequirement clinit_check_requirement) 3407 : HInvoke(arena, 3408 number_of_arguments, 3409 // There is potentially one extra argument for the HCurrentMethod node, and 3410 // potentially one other if the clinit check is explicit, and potentially 3411 // one other if the method is a string factory. 3412 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) + 3413 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) + 3414 (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3415 return_type, 3416 dex_pc, 3417 method_index, 3418 original_invoke_type), 3419 invoke_type_(invoke_type), 3420 clinit_check_requirement_(clinit_check_requirement), 3421 target_method_(target_method), 3422 dispatch_info_(dispatch_info) { } 3423 3424 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3425 bool had_current_method_input = HasCurrentMethodInput(); 3426 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind); 3427 3428 // Using the current method is the default and once we find a better 3429 // method load kind, we should not go back to using the current method. 3430 DCHECK(had_current_method_input || !needs_current_method_input); 3431 3432 if (had_current_method_input && !needs_current_method_input) { 3433 DCHECK_EQ(InputAt(GetCurrentMethodInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod()); 3434 RemoveInputAt(GetCurrentMethodInputIndex()); 3435 } 3436 dispatch_info_ = dispatch_info; 3437 } 3438 3439 void InsertInputAt(size_t index, HInstruction* input); 3440 void RemoveInputAt(size_t index); 3441 3442 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3443 // We access the method via the dex cache so we can't do an implicit null check. 3444 // TODO: for intrinsics we can generate implicit null checks. 3445 return false; 3446 } 3447 3448 bool CanBeNull() const OVERRIDE { 3449 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3450 } 3451 3452 InvokeType GetInvokeType() const { return invoke_type_; } 3453 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3454 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3455 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3456 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3457 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3458 uint32_t GetCurrentMethodInputIndex() const { return GetNumberOfArguments(); } 3459 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3460 bool HasPcRelativeDexCache() const { 3461 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3462 } 3463 bool HasCurrentMethodInput() const { 3464 // This function can be called only after the invoke has been fully initialized by the builder. 3465 if (NeedsCurrentMethodInput(GetMethodLoadKind())) { 3466 DCHECK(InputAt(GetCurrentMethodInputIndex())->IsCurrentMethod()); 3467 return true; 3468 } else { 3469 DCHECK(InputCount() == GetCurrentMethodInputIndex() || 3470 !InputAt(GetCurrentMethodInputIndex())->IsCurrentMethod()); 3471 return false; 3472 } 3473 } 3474 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3475 MethodReference GetTargetMethod() const { return target_method_; } 3476 3477 int32_t GetStringInitOffset() const { 3478 DCHECK(IsStringInit()); 3479 return dispatch_info_.method_load_data; 3480 } 3481 3482 uint64_t GetMethodAddress() const { 3483 DCHECK(HasMethodAddress()); 3484 return dispatch_info_.method_load_data; 3485 } 3486 3487 uint32_t GetDexCacheArrayOffset() const { 3488 DCHECK(HasPcRelativeDexCache()); 3489 return dispatch_info_.method_load_data; 3490 } 3491 3492 uint64_t GetDirectCodePtr() const { 3493 DCHECK(HasDirectCodePtr()); 3494 return dispatch_info_.direct_code_ptr; 3495 } 3496 3497 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3498 3499 // Is this instruction a call to a static method? 3500 bool IsStatic() const { 3501 return GetInvokeType() == kStatic; 3502 } 3503 3504 // Remove the art::HLoadClass instruction set as last input by 3505 // art::PrepareForRegisterAllocation::VisitClinitCheck in lieu of 3506 // the initial art::HClinitCheck instruction (only relevant for 3507 // static calls with explicit clinit check). 3508 void RemoveLoadClassAsLastInput() { 3509 DCHECK(IsStaticWithExplicitClinitCheck()); 3510 size_t last_input_index = InputCount() - 1; 3511 HInstruction* last_input = InputAt(last_input_index); 3512 DCHECK(last_input != nullptr); 3513 DCHECK(last_input->IsLoadClass()) << last_input->DebugName(); 3514 RemoveAsUserOfInput(last_input_index); 3515 inputs_.pop_back(); 3516 clinit_check_requirement_ = ClinitCheckRequirement::kImplicit; 3517 DCHECK(IsStaticWithImplicitClinitCheck()); 3518 } 3519 3520 bool IsStringFactoryFor(HFakeString* str) const { 3521 if (!IsStringInit()) return false; 3522 DCHECK(!HasCurrentMethodInput()); 3523 if (InputCount() == (number_of_arguments_)) return false; 3524 return InputAt(InputCount() - 1)->AsFakeString() == str; 3525 } 3526 3527 void RemoveFakeStringArgumentAsLastInput() { 3528 DCHECK(IsStringInit()); 3529 size_t last_input_index = InputCount() - 1; 3530 HInstruction* last_input = InputAt(last_input_index); 3531 DCHECK(last_input != nullptr); 3532 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3533 RemoveAsUserOfInput(last_input_index); 3534 inputs_.pop_back(); 3535 } 3536 3537 // Is this a call to a static method whose declaring class has an 3538 // explicit intialization check in the graph? 3539 bool IsStaticWithExplicitClinitCheck() const { 3540 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3541 } 3542 3543 // Is this a call to a static method whose declaring class has an 3544 // implicit intialization check requirement? 3545 bool IsStaticWithImplicitClinitCheck() const { 3546 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3547 } 3548 3549 // Does this method load kind need the current method as an input? 3550 static bool NeedsCurrentMethodInput(MethodLoadKind kind) { 3551 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod; 3552 } 3553 3554 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3555 3556 protected: 3557 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3558 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3559 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3560 HInstruction* input = input_record.GetInstruction(); 3561 // `input` is the last input of a static invoke marked as having 3562 // an explicit clinit check. It must either be: 3563 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3564 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3565 DCHECK(input != nullptr); 3566 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3567 } 3568 return input_record; 3569 } 3570 3571 private: 3572 const InvokeType invoke_type_; 3573 ClinitCheckRequirement clinit_check_requirement_; 3574 // The target method may refer to different dex file or method index than the original 3575 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3576 // in derived class to increase visibility. 3577 MethodReference target_method_; 3578 DispatchInfo dispatch_info_; 3579 3580 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3581}; 3582 3583class HInvokeVirtual : public HInvoke { 3584 public: 3585 HInvokeVirtual(ArenaAllocator* arena, 3586 uint32_t number_of_arguments, 3587 Primitive::Type return_type, 3588 uint32_t dex_pc, 3589 uint32_t dex_method_index, 3590 uint32_t vtable_index) 3591 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3592 vtable_index_(vtable_index) {} 3593 3594 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3595 // TODO: Add implicit null checks in intrinsics. 3596 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3597 } 3598 3599 uint32_t GetVTableIndex() const { return vtable_index_; } 3600 3601 DECLARE_INSTRUCTION(InvokeVirtual); 3602 3603 private: 3604 const uint32_t vtable_index_; 3605 3606 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3607}; 3608 3609class HInvokeInterface : public HInvoke { 3610 public: 3611 HInvokeInterface(ArenaAllocator* arena, 3612 uint32_t number_of_arguments, 3613 Primitive::Type return_type, 3614 uint32_t dex_pc, 3615 uint32_t dex_method_index, 3616 uint32_t imt_index) 3617 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3618 imt_index_(imt_index) {} 3619 3620 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3621 // TODO: Add implicit null checks in intrinsics. 3622 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3623 } 3624 3625 uint32_t GetImtIndex() const { return imt_index_; } 3626 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3627 3628 DECLARE_INSTRUCTION(InvokeInterface); 3629 3630 private: 3631 const uint32_t imt_index_; 3632 3633 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3634}; 3635 3636class HNewInstance : public HExpression<1> { 3637 public: 3638 HNewInstance(HCurrentMethod* current_method, 3639 uint32_t dex_pc, 3640 uint16_t type_index, 3641 const DexFile& dex_file, 3642 QuickEntrypointEnum entrypoint) 3643 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3644 type_index_(type_index), 3645 dex_file_(dex_file), 3646 entrypoint_(entrypoint) { 3647 SetRawInputAt(0, current_method); 3648 } 3649 3650 uint16_t GetTypeIndex() const { return type_index_; } 3651 const DexFile& GetDexFile() const { return dex_file_; } 3652 3653 // Calls runtime so needs an environment. 3654 bool NeedsEnvironment() const OVERRIDE { return true; } 3655 // It may throw when called on: 3656 // - interfaces 3657 // - abstract/innaccessible/unknown classes 3658 // TODO: optimize when possible. 3659 bool CanThrow() const OVERRIDE { return true; } 3660 3661 bool CanBeNull() const OVERRIDE { return false; } 3662 3663 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3664 3665 DECLARE_INSTRUCTION(NewInstance); 3666 3667 private: 3668 const uint16_t type_index_; 3669 const DexFile& dex_file_; 3670 const QuickEntrypointEnum entrypoint_; 3671 3672 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3673}; 3674 3675class HNeg : public HUnaryOperation { 3676 public: 3677 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3678 : HUnaryOperation(result_type, input, dex_pc) {} 3679 3680 template <typename T> T Compute(T x) const { return -x; } 3681 3682 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3683 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3684 } 3685 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3686 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3687 } 3688 3689 DECLARE_INSTRUCTION(Neg); 3690 3691 private: 3692 DISALLOW_COPY_AND_ASSIGN(HNeg); 3693}; 3694 3695class HNewArray : public HExpression<2> { 3696 public: 3697 HNewArray(HInstruction* length, 3698 HCurrentMethod* current_method, 3699 uint32_t dex_pc, 3700 uint16_t type_index, 3701 const DexFile& dex_file, 3702 QuickEntrypointEnum entrypoint) 3703 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3704 type_index_(type_index), 3705 dex_file_(dex_file), 3706 entrypoint_(entrypoint) { 3707 SetRawInputAt(0, length); 3708 SetRawInputAt(1, current_method); 3709 } 3710 3711 uint16_t GetTypeIndex() const { return type_index_; } 3712 const DexFile& GetDexFile() const { return dex_file_; } 3713 3714 // Calls runtime so needs an environment. 3715 bool NeedsEnvironment() const OVERRIDE { return true; } 3716 3717 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3718 bool CanThrow() const OVERRIDE { return true; } 3719 3720 bool CanBeNull() const OVERRIDE { return false; } 3721 3722 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3723 3724 DECLARE_INSTRUCTION(NewArray); 3725 3726 private: 3727 const uint16_t type_index_; 3728 const DexFile& dex_file_; 3729 const QuickEntrypointEnum entrypoint_; 3730 3731 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3732}; 3733 3734class HAdd : public HBinaryOperation { 3735 public: 3736 HAdd(Primitive::Type result_type, 3737 HInstruction* left, 3738 HInstruction* right, 3739 uint32_t dex_pc = kNoDexPc) 3740 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3741 3742 bool IsCommutative() const OVERRIDE { return true; } 3743 3744 template <typename T> T Compute(T x, T y) const { return x + y; } 3745 3746 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3747 return GetBlock()->GetGraph()->GetIntConstant( 3748 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3749 } 3750 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3751 return GetBlock()->GetGraph()->GetLongConstant( 3752 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3753 } 3754 3755 DECLARE_INSTRUCTION(Add); 3756 3757 private: 3758 DISALLOW_COPY_AND_ASSIGN(HAdd); 3759}; 3760 3761class HSub : public HBinaryOperation { 3762 public: 3763 HSub(Primitive::Type result_type, 3764 HInstruction* left, 3765 HInstruction* right, 3766 uint32_t dex_pc = kNoDexPc) 3767 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3768 3769 template <typename T> T Compute(T x, T y) const { return x - y; } 3770 3771 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3772 return GetBlock()->GetGraph()->GetIntConstant( 3773 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3774 } 3775 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3776 return GetBlock()->GetGraph()->GetLongConstant( 3777 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3778 } 3779 3780 DECLARE_INSTRUCTION(Sub); 3781 3782 private: 3783 DISALLOW_COPY_AND_ASSIGN(HSub); 3784}; 3785 3786class HMul : public HBinaryOperation { 3787 public: 3788 HMul(Primitive::Type result_type, 3789 HInstruction* left, 3790 HInstruction* right, 3791 uint32_t dex_pc = kNoDexPc) 3792 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3793 3794 bool IsCommutative() const OVERRIDE { return true; } 3795 3796 template <typename T> T Compute(T x, T y) const { return x * y; } 3797 3798 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3799 return GetBlock()->GetGraph()->GetIntConstant( 3800 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3801 } 3802 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3803 return GetBlock()->GetGraph()->GetLongConstant( 3804 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3805 } 3806 3807 DECLARE_INSTRUCTION(Mul); 3808 3809 private: 3810 DISALLOW_COPY_AND_ASSIGN(HMul); 3811}; 3812 3813class HDiv : public HBinaryOperation { 3814 public: 3815 HDiv(Primitive::Type result_type, 3816 HInstruction* left, 3817 HInstruction* right, 3818 uint32_t dex_pc) 3819 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3820 3821 template <typename T> 3822 T Compute(T x, T y) const { 3823 // Our graph structure ensures we never have 0 for `y` during 3824 // constant folding. 3825 DCHECK_NE(y, 0); 3826 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3827 return (y == -1) ? -x : x / y; 3828 } 3829 3830 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3831 return GetBlock()->GetGraph()->GetIntConstant( 3832 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3833 } 3834 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3835 return GetBlock()->GetGraph()->GetLongConstant( 3836 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3837 } 3838 3839 static SideEffects SideEffectsForArchRuntimeCalls() { 3840 // The generated code can use a runtime call. 3841 return SideEffects::CanTriggerGC(); 3842 } 3843 3844 DECLARE_INSTRUCTION(Div); 3845 3846 private: 3847 DISALLOW_COPY_AND_ASSIGN(HDiv); 3848}; 3849 3850class HRem : public HBinaryOperation { 3851 public: 3852 HRem(Primitive::Type result_type, 3853 HInstruction* left, 3854 HInstruction* right, 3855 uint32_t dex_pc) 3856 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3857 3858 template <typename T> 3859 T Compute(T x, T y) const { 3860 // Our graph structure ensures we never have 0 for `y` during 3861 // constant folding. 3862 DCHECK_NE(y, 0); 3863 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3864 return (y == -1) ? 0 : x % y; 3865 } 3866 3867 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3868 return GetBlock()->GetGraph()->GetIntConstant( 3869 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3870 } 3871 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3872 return GetBlock()->GetGraph()->GetLongConstant( 3873 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3874 } 3875 3876 3877 static SideEffects SideEffectsForArchRuntimeCalls() { 3878 return SideEffects::CanTriggerGC(); 3879 } 3880 3881 DECLARE_INSTRUCTION(Rem); 3882 3883 private: 3884 DISALLOW_COPY_AND_ASSIGN(HRem); 3885}; 3886 3887class HDivZeroCheck : public HExpression<1> { 3888 public: 3889 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 3890 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 3891 SetRawInputAt(0, value); 3892 } 3893 3894 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 3895 3896 bool CanBeMoved() const OVERRIDE { return true; } 3897 3898 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 3899 return true; 3900 } 3901 3902 bool NeedsEnvironment() const OVERRIDE { return true; } 3903 bool CanThrow() const OVERRIDE { return true; } 3904 3905 DECLARE_INSTRUCTION(DivZeroCheck); 3906 3907 private: 3908 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 3909}; 3910 3911class HShl : public HBinaryOperation { 3912 public: 3913 HShl(Primitive::Type result_type, 3914 HInstruction* left, 3915 HInstruction* right, 3916 uint32_t dex_pc = kNoDexPc) 3917 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3918 3919 template <typename T, typename U, typename V> 3920 T Compute(T x, U y, V max_shift_value) const { 3921 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3922 "V is not the unsigned integer type corresponding to T"); 3923 return x << (y & max_shift_value); 3924 } 3925 3926 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3927 return GetBlock()->GetGraph()->GetIntConstant( 3928 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3929 } 3930 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3931 // case is handled as `x << static_cast<int>(y)`. 3932 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3933 return GetBlock()->GetGraph()->GetLongConstant( 3934 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3935 } 3936 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3937 return GetBlock()->GetGraph()->GetLongConstant( 3938 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3939 } 3940 3941 DECLARE_INSTRUCTION(Shl); 3942 3943 private: 3944 DISALLOW_COPY_AND_ASSIGN(HShl); 3945}; 3946 3947class HShr : public HBinaryOperation { 3948 public: 3949 HShr(Primitive::Type result_type, 3950 HInstruction* left, 3951 HInstruction* right, 3952 uint32_t dex_pc = kNoDexPc) 3953 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3954 3955 template <typename T, typename U, typename V> 3956 T Compute(T x, U y, V max_shift_value) const { 3957 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3958 "V is not the unsigned integer type corresponding to T"); 3959 return x >> (y & max_shift_value); 3960 } 3961 3962 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3963 return GetBlock()->GetGraph()->GetIntConstant( 3964 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3965 } 3966 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3967 // case is handled as `x >> static_cast<int>(y)`. 3968 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3969 return GetBlock()->GetGraph()->GetLongConstant( 3970 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3971 } 3972 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3973 return GetBlock()->GetGraph()->GetLongConstant( 3974 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3975 } 3976 3977 DECLARE_INSTRUCTION(Shr); 3978 3979 private: 3980 DISALLOW_COPY_AND_ASSIGN(HShr); 3981}; 3982 3983class HUShr : public HBinaryOperation { 3984 public: 3985 HUShr(Primitive::Type result_type, 3986 HInstruction* left, 3987 HInstruction* right, 3988 uint32_t dex_pc = kNoDexPc) 3989 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3990 3991 template <typename T, typename U, typename V> 3992 T Compute(T x, U y, V max_shift_value) const { 3993 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3994 "V is not the unsigned integer type corresponding to T"); 3995 V ux = static_cast<V>(x); 3996 return static_cast<T>(ux >> (y & max_shift_value)); 3997 } 3998 3999 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4000 return GetBlock()->GetGraph()->GetIntConstant( 4001 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4002 } 4003 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4004 // case is handled as `x >>> static_cast<int>(y)`. 4005 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4006 return GetBlock()->GetGraph()->GetLongConstant( 4007 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4008 } 4009 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4010 return GetBlock()->GetGraph()->GetLongConstant( 4011 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4012 } 4013 4014 DECLARE_INSTRUCTION(UShr); 4015 4016 private: 4017 DISALLOW_COPY_AND_ASSIGN(HUShr); 4018}; 4019 4020class HAnd : public HBinaryOperation { 4021 public: 4022 HAnd(Primitive::Type result_type, 4023 HInstruction* left, 4024 HInstruction* right, 4025 uint32_t dex_pc = kNoDexPc) 4026 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4027 4028 bool IsCommutative() const OVERRIDE { return true; } 4029 4030 template <typename T, typename U> 4031 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 4032 4033 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4034 return GetBlock()->GetGraph()->GetIntConstant( 4035 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4036 } 4037 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4038 return GetBlock()->GetGraph()->GetLongConstant( 4039 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4040 } 4041 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4042 return GetBlock()->GetGraph()->GetLongConstant( 4043 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4044 } 4045 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4046 return GetBlock()->GetGraph()->GetLongConstant( 4047 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4048 } 4049 4050 DECLARE_INSTRUCTION(And); 4051 4052 private: 4053 DISALLOW_COPY_AND_ASSIGN(HAnd); 4054}; 4055 4056class HOr : public HBinaryOperation { 4057 public: 4058 HOr(Primitive::Type result_type, 4059 HInstruction* left, 4060 HInstruction* right, 4061 uint32_t dex_pc = kNoDexPc) 4062 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4063 4064 bool IsCommutative() const OVERRIDE { return true; } 4065 4066 template <typename T, typename U> 4067 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4068 4069 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4070 return GetBlock()->GetGraph()->GetIntConstant( 4071 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4072 } 4073 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4074 return GetBlock()->GetGraph()->GetLongConstant( 4075 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4076 } 4077 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4078 return GetBlock()->GetGraph()->GetLongConstant( 4079 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4080 } 4081 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4082 return GetBlock()->GetGraph()->GetLongConstant( 4083 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4084 } 4085 4086 DECLARE_INSTRUCTION(Or); 4087 4088 private: 4089 DISALLOW_COPY_AND_ASSIGN(HOr); 4090}; 4091 4092class HXor : public HBinaryOperation { 4093 public: 4094 HXor(Primitive::Type result_type, 4095 HInstruction* left, 4096 HInstruction* right, 4097 uint32_t dex_pc = kNoDexPc) 4098 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4099 4100 bool IsCommutative() const OVERRIDE { return true; } 4101 4102 template <typename T, typename U> 4103 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4104 4105 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4106 return GetBlock()->GetGraph()->GetIntConstant( 4107 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4108 } 4109 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4110 return GetBlock()->GetGraph()->GetLongConstant( 4111 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4112 } 4113 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4114 return GetBlock()->GetGraph()->GetLongConstant( 4115 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4116 } 4117 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4118 return GetBlock()->GetGraph()->GetLongConstant( 4119 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4120 } 4121 4122 DECLARE_INSTRUCTION(Xor); 4123 4124 private: 4125 DISALLOW_COPY_AND_ASSIGN(HXor); 4126}; 4127 4128// The value of a parameter in this method. Its location depends on 4129// the calling convention. 4130class HParameterValue : public HExpression<0> { 4131 public: 4132 HParameterValue(const DexFile& dex_file, 4133 uint16_t type_index, 4134 uint8_t index, 4135 Primitive::Type parameter_type, 4136 bool is_this = false) 4137 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4138 dex_file_(dex_file), 4139 type_index_(type_index), 4140 index_(index), 4141 is_this_(is_this), 4142 can_be_null_(!is_this) {} 4143 4144 const DexFile& GetDexFile() const { return dex_file_; } 4145 uint16_t GetTypeIndex() const { return type_index_; } 4146 uint8_t GetIndex() const { return index_; } 4147 bool IsThis() const { return is_this_; } 4148 4149 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4150 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4151 4152 DECLARE_INSTRUCTION(ParameterValue); 4153 4154 private: 4155 const DexFile& dex_file_; 4156 const uint16_t type_index_; 4157 // The index of this parameter in the parameters list. Must be less 4158 // than HGraph::number_of_in_vregs_. 4159 const uint8_t index_; 4160 4161 // Whether or not the parameter value corresponds to 'this' argument. 4162 const bool is_this_; 4163 4164 bool can_be_null_; 4165 4166 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4167}; 4168 4169class HNot : public HUnaryOperation { 4170 public: 4171 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4172 : HUnaryOperation(result_type, input, dex_pc) {} 4173 4174 bool CanBeMoved() const OVERRIDE { return true; } 4175 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4176 return true; 4177 } 4178 4179 template <typename T> T Compute(T x) const { return ~x; } 4180 4181 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4182 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4183 } 4184 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4185 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4186 } 4187 4188 DECLARE_INSTRUCTION(Not); 4189 4190 private: 4191 DISALLOW_COPY_AND_ASSIGN(HNot); 4192}; 4193 4194class HBooleanNot : public HUnaryOperation { 4195 public: 4196 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4197 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4198 4199 bool CanBeMoved() const OVERRIDE { return true; } 4200 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4201 return true; 4202 } 4203 4204 template <typename T> bool Compute(T x) const { 4205 DCHECK(IsUint<1>(x)); 4206 return !x; 4207 } 4208 4209 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4210 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4211 } 4212 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4213 LOG(FATAL) << DebugName() << " is not defined for long values"; 4214 UNREACHABLE(); 4215 } 4216 4217 DECLARE_INSTRUCTION(BooleanNot); 4218 4219 private: 4220 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4221}; 4222 4223class HTypeConversion : public HExpression<1> { 4224 public: 4225 // Instantiate a type conversion of `input` to `result_type`. 4226 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4227 : HExpression(result_type, 4228 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4229 dex_pc) { 4230 SetRawInputAt(0, input); 4231 DCHECK_NE(input->GetType(), result_type); 4232 } 4233 4234 HInstruction* GetInput() const { return InputAt(0); } 4235 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4236 Primitive::Type GetResultType() const { return GetType(); } 4237 4238 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4239 // to the runtime. 4240 4241 bool CanBeMoved() const OVERRIDE { return true; } 4242 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4243 4244 // Try to statically evaluate the conversion and return a HConstant 4245 // containing the result. If the input cannot be converted, return nullptr. 4246 HConstant* TryStaticEvaluation() const; 4247 4248 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4249 Primitive::Type result_type) { 4250 // Some architectures may not require the 'GC' side effects, but at this point 4251 // in the compilation process we do not know what architecture we will 4252 // generate code for, so we must be conservative. 4253 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4254 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4255 return SideEffects::CanTriggerGC(); 4256 } 4257 return SideEffects::None(); 4258 } 4259 4260 DECLARE_INSTRUCTION(TypeConversion); 4261 4262 private: 4263 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4264}; 4265 4266static constexpr uint32_t kNoRegNumber = -1; 4267 4268class HPhi : public HInstruction { 4269 public: 4270 HPhi(ArenaAllocator* arena, 4271 uint32_t reg_number, 4272 size_t number_of_inputs, 4273 Primitive::Type type, 4274 uint32_t dex_pc = kNoDexPc) 4275 : HInstruction(SideEffects::None(), dex_pc), 4276 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4277 reg_number_(reg_number), 4278 type_(type), 4279 is_live_(false), 4280 can_be_null_(true) { 4281 } 4282 4283 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4284 static Primitive::Type ToPhiType(Primitive::Type type) { 4285 switch (type) { 4286 case Primitive::kPrimBoolean: 4287 case Primitive::kPrimByte: 4288 case Primitive::kPrimShort: 4289 case Primitive::kPrimChar: 4290 return Primitive::kPrimInt; 4291 default: 4292 return type; 4293 } 4294 } 4295 4296 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4297 4298 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4299 4300 void AddInput(HInstruction* input); 4301 void RemoveInputAt(size_t index); 4302 4303 Primitive::Type GetType() const OVERRIDE { return type_; } 4304 void SetType(Primitive::Type type) { type_ = type; } 4305 4306 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4307 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4308 4309 uint32_t GetRegNumber() const { return reg_number_; } 4310 4311 void SetDead() { is_live_ = false; } 4312 void SetLive() { is_live_ = true; } 4313 bool IsDead() const { return !is_live_; } 4314 bool IsLive() const { return is_live_; } 4315 4316 bool IsVRegEquivalentOf(HInstruction* other) const { 4317 return other != nullptr 4318 && other->IsPhi() 4319 && other->AsPhi()->GetBlock() == GetBlock() 4320 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4321 } 4322 4323 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4324 // An equivalent phi is a phi having the same dex register and type. 4325 // It assumes that phis with the same dex register are adjacent. 4326 HPhi* GetNextEquivalentPhiWithSameType() { 4327 HInstruction* next = GetNext(); 4328 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4329 if (next->GetType() == GetType()) { 4330 return next->AsPhi(); 4331 } 4332 next = next->GetNext(); 4333 } 4334 return nullptr; 4335 } 4336 4337 DECLARE_INSTRUCTION(Phi); 4338 4339 protected: 4340 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4341 return inputs_[index]; 4342 } 4343 4344 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4345 inputs_[index] = input; 4346 } 4347 4348 private: 4349 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4350 const uint32_t reg_number_; 4351 Primitive::Type type_; 4352 bool is_live_; 4353 bool can_be_null_; 4354 4355 DISALLOW_COPY_AND_ASSIGN(HPhi); 4356}; 4357 4358class HNullCheck : public HExpression<1> { 4359 public: 4360 HNullCheck(HInstruction* value, uint32_t dex_pc) 4361 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4362 SetRawInputAt(0, value); 4363 } 4364 4365 bool CanBeMoved() const OVERRIDE { return true; } 4366 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4367 return true; 4368 } 4369 4370 bool NeedsEnvironment() const OVERRIDE { return true; } 4371 4372 bool CanThrow() const OVERRIDE { return true; } 4373 4374 bool CanBeNull() const OVERRIDE { return false; } 4375 4376 4377 DECLARE_INSTRUCTION(NullCheck); 4378 4379 private: 4380 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4381}; 4382 4383class FieldInfo : public ValueObject { 4384 public: 4385 FieldInfo(MemberOffset field_offset, 4386 Primitive::Type field_type, 4387 bool is_volatile, 4388 uint32_t index, 4389 uint16_t declaring_class_def_index, 4390 const DexFile& dex_file, 4391 Handle<mirror::DexCache> dex_cache) 4392 : field_offset_(field_offset), 4393 field_type_(field_type), 4394 is_volatile_(is_volatile), 4395 index_(index), 4396 declaring_class_def_index_(declaring_class_def_index), 4397 dex_file_(dex_file), 4398 dex_cache_(dex_cache) {} 4399 4400 MemberOffset GetFieldOffset() const { return field_offset_; } 4401 Primitive::Type GetFieldType() const { return field_type_; } 4402 uint32_t GetFieldIndex() const { return index_; } 4403 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4404 const DexFile& GetDexFile() const { return dex_file_; } 4405 bool IsVolatile() const { return is_volatile_; } 4406 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4407 4408 private: 4409 const MemberOffset field_offset_; 4410 const Primitive::Type field_type_; 4411 const bool is_volatile_; 4412 const uint32_t index_; 4413 const uint16_t declaring_class_def_index_; 4414 const DexFile& dex_file_; 4415 const Handle<mirror::DexCache> dex_cache_; 4416}; 4417 4418class HInstanceFieldGet : public HExpression<1> { 4419 public: 4420 HInstanceFieldGet(HInstruction* value, 4421 Primitive::Type field_type, 4422 MemberOffset field_offset, 4423 bool is_volatile, 4424 uint32_t field_idx, 4425 uint16_t declaring_class_def_index, 4426 const DexFile& dex_file, 4427 Handle<mirror::DexCache> dex_cache, 4428 uint32_t dex_pc) 4429 : HExpression(field_type, 4430 SideEffects::FieldReadOfType(field_type, is_volatile), 4431 dex_pc), 4432 field_info_(field_offset, 4433 field_type, 4434 is_volatile, 4435 field_idx, 4436 declaring_class_def_index, 4437 dex_file, 4438 dex_cache) { 4439 SetRawInputAt(0, value); 4440 } 4441 4442 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4443 4444 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4445 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4446 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4447 } 4448 4449 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4450 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4451 } 4452 4453 size_t ComputeHashCode() const OVERRIDE { 4454 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4455 } 4456 4457 const FieldInfo& GetFieldInfo() const { return field_info_; } 4458 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4459 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4460 bool IsVolatile() const { return field_info_.IsVolatile(); } 4461 4462 DECLARE_INSTRUCTION(InstanceFieldGet); 4463 4464 private: 4465 const FieldInfo field_info_; 4466 4467 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4468}; 4469 4470class HInstanceFieldSet : public HTemplateInstruction<2> { 4471 public: 4472 HInstanceFieldSet(HInstruction* object, 4473 HInstruction* value, 4474 Primitive::Type field_type, 4475 MemberOffset field_offset, 4476 bool is_volatile, 4477 uint32_t field_idx, 4478 uint16_t declaring_class_def_index, 4479 const DexFile& dex_file, 4480 Handle<mirror::DexCache> dex_cache, 4481 uint32_t dex_pc) 4482 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4483 dex_pc), 4484 field_info_(field_offset, 4485 field_type, 4486 is_volatile, 4487 field_idx, 4488 declaring_class_def_index, 4489 dex_file, 4490 dex_cache), 4491 value_can_be_null_(true) { 4492 SetRawInputAt(0, object); 4493 SetRawInputAt(1, value); 4494 } 4495 4496 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4497 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4498 } 4499 4500 const FieldInfo& GetFieldInfo() const { return field_info_; } 4501 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4502 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4503 bool IsVolatile() const { return field_info_.IsVolatile(); } 4504 HInstruction* GetValue() const { return InputAt(1); } 4505 bool GetValueCanBeNull() const { return value_can_be_null_; } 4506 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4507 4508 DECLARE_INSTRUCTION(InstanceFieldSet); 4509 4510 private: 4511 const FieldInfo field_info_; 4512 bool value_can_be_null_; 4513 4514 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4515}; 4516 4517class HArrayGet : public HExpression<2> { 4518 public: 4519 HArrayGet(HInstruction* array, 4520 HInstruction* index, 4521 Primitive::Type type, 4522 uint32_t dex_pc, 4523 SideEffects additional_side_effects = SideEffects::None()) 4524 : HExpression(type, 4525 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4526 dex_pc) { 4527 SetRawInputAt(0, array); 4528 SetRawInputAt(1, index); 4529 } 4530 4531 bool CanBeMoved() const OVERRIDE { return true; } 4532 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4533 return true; 4534 } 4535 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4536 // TODO: We can be smarter here. 4537 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4538 // which generates the implicit null check. There are cases when these can be removed 4539 // to produce better code. If we ever add optimizations to do so we should allow an 4540 // implicit check here (as long as the address falls in the first page). 4541 return false; 4542 } 4543 4544 void SetType(Primitive::Type type) { type_ = type; } 4545 4546 HInstruction* GetArray() const { return InputAt(0); } 4547 HInstruction* GetIndex() const { return InputAt(1); } 4548 4549 DECLARE_INSTRUCTION(ArrayGet); 4550 4551 private: 4552 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4553}; 4554 4555class HArraySet : public HTemplateInstruction<3> { 4556 public: 4557 HArraySet(HInstruction* array, 4558 HInstruction* index, 4559 HInstruction* value, 4560 Primitive::Type expected_component_type, 4561 uint32_t dex_pc, 4562 SideEffects additional_side_effects = SideEffects::None()) 4563 : HTemplateInstruction( 4564 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4565 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4566 additional_side_effects), 4567 dex_pc), 4568 expected_component_type_(expected_component_type), 4569 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4570 value_can_be_null_(true), 4571 static_type_of_array_is_object_array_(false) { 4572 SetRawInputAt(0, array); 4573 SetRawInputAt(1, index); 4574 SetRawInputAt(2, value); 4575 } 4576 4577 bool NeedsEnvironment() const OVERRIDE { 4578 // We currently always call a runtime method to catch array store 4579 // exceptions. 4580 return needs_type_check_; 4581 } 4582 4583 // Can throw ArrayStoreException. 4584 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4585 4586 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4587 // TODO: Same as for ArrayGet. 4588 return false; 4589 } 4590 4591 void ClearNeedsTypeCheck() { 4592 needs_type_check_ = false; 4593 } 4594 4595 void ClearValueCanBeNull() { 4596 value_can_be_null_ = false; 4597 } 4598 4599 void SetStaticTypeOfArrayIsObjectArray() { 4600 static_type_of_array_is_object_array_ = true; 4601 } 4602 4603 bool GetValueCanBeNull() const { return value_can_be_null_; } 4604 bool NeedsTypeCheck() const { return needs_type_check_; } 4605 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4606 4607 HInstruction* GetArray() const { return InputAt(0); } 4608 HInstruction* GetIndex() const { return InputAt(1); } 4609 HInstruction* GetValue() const { return InputAt(2); } 4610 4611 Primitive::Type GetComponentType() const { 4612 // The Dex format does not type floating point index operations. Since the 4613 // `expected_component_type_` is set during building and can therefore not 4614 // be correct, we also check what is the value type. If it is a floating 4615 // point type, we must use that type. 4616 Primitive::Type value_type = GetValue()->GetType(); 4617 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4618 ? value_type 4619 : expected_component_type_; 4620 } 4621 4622 Primitive::Type GetRawExpectedComponentType() const { 4623 return expected_component_type_; 4624 } 4625 4626 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4627 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4628 } 4629 4630 DECLARE_INSTRUCTION(ArraySet); 4631 4632 private: 4633 const Primitive::Type expected_component_type_; 4634 bool needs_type_check_; 4635 bool value_can_be_null_; 4636 // Cached information for the reference_type_info_ so that codegen 4637 // does not need to inspect the static type. 4638 bool static_type_of_array_is_object_array_; 4639 4640 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4641}; 4642 4643class HArrayLength : public HExpression<1> { 4644 public: 4645 HArrayLength(HInstruction* array, uint32_t dex_pc) 4646 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4647 // Note that arrays do not change length, so the instruction does not 4648 // depend on any write. 4649 SetRawInputAt(0, array); 4650 } 4651 4652 bool CanBeMoved() const OVERRIDE { return true; } 4653 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4654 return true; 4655 } 4656 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4657 return obj == InputAt(0); 4658 } 4659 4660 DECLARE_INSTRUCTION(ArrayLength); 4661 4662 private: 4663 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4664}; 4665 4666class HBoundsCheck : public HExpression<2> { 4667 public: 4668 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4669 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4670 DCHECK(index->GetType() == Primitive::kPrimInt); 4671 SetRawInputAt(0, index); 4672 SetRawInputAt(1, length); 4673 } 4674 4675 bool CanBeMoved() const OVERRIDE { return true; } 4676 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4677 return true; 4678 } 4679 4680 bool NeedsEnvironment() const OVERRIDE { return true; } 4681 4682 bool CanThrow() const OVERRIDE { return true; } 4683 4684 HInstruction* GetIndex() const { return InputAt(0); } 4685 4686 DECLARE_INSTRUCTION(BoundsCheck); 4687 4688 private: 4689 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4690}; 4691 4692/** 4693 * Some DEX instructions are folded into multiple HInstructions that need 4694 * to stay live until the last HInstruction. This class 4695 * is used as a marker for the baseline compiler to ensure its preceding 4696 * HInstruction stays live. `index` represents the stack location index of the 4697 * instruction (the actual offset is computed as index * vreg_size). 4698 */ 4699class HTemporary : public HTemplateInstruction<0> { 4700 public: 4701 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4702 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4703 4704 size_t GetIndex() const { return index_; } 4705 4706 Primitive::Type GetType() const OVERRIDE { 4707 // The previous instruction is the one that will be stored in the temporary location. 4708 DCHECK(GetPrevious() != nullptr); 4709 return GetPrevious()->GetType(); 4710 } 4711 4712 DECLARE_INSTRUCTION(Temporary); 4713 4714 private: 4715 const size_t index_; 4716 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4717}; 4718 4719class HSuspendCheck : public HTemplateInstruction<0> { 4720 public: 4721 explicit HSuspendCheck(uint32_t dex_pc) 4722 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4723 4724 bool NeedsEnvironment() const OVERRIDE { 4725 return true; 4726 } 4727 4728 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4729 SlowPathCode* GetSlowPath() const { return slow_path_; } 4730 4731 DECLARE_INSTRUCTION(SuspendCheck); 4732 4733 private: 4734 // Only used for code generation, in order to share the same slow path between back edges 4735 // of a same loop. 4736 SlowPathCode* slow_path_; 4737 4738 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4739}; 4740 4741/** 4742 * Instruction to load a Class object. 4743 */ 4744class HLoadClass : public HExpression<1> { 4745 public: 4746 HLoadClass(HCurrentMethod* current_method, 4747 uint16_t type_index, 4748 const DexFile& dex_file, 4749 bool is_referrers_class, 4750 uint32_t dex_pc, 4751 bool needs_access_check) 4752 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4753 type_index_(type_index), 4754 dex_file_(dex_file), 4755 is_referrers_class_(is_referrers_class), 4756 generate_clinit_check_(false), 4757 needs_access_check_(needs_access_check), 4758 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4759 // Referrers class should not need access check. We never inline unverified 4760 // methods so we can't possibly end up in this situation. 4761 DCHECK(!is_referrers_class_ || !needs_access_check_); 4762 SetRawInputAt(0, current_method); 4763 } 4764 4765 bool CanBeMoved() const OVERRIDE { return true; } 4766 4767 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4768 // Note that we don't need to test for generate_clinit_check_. 4769 // Whether or not we need to generate the clinit check is processed in 4770 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4771 return other->AsLoadClass()->type_index_ == type_index_ && 4772 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4773 } 4774 4775 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4776 4777 uint16_t GetTypeIndex() const { return type_index_; } 4778 bool IsReferrersClass() const { return is_referrers_class_; } 4779 bool CanBeNull() const OVERRIDE { return false; } 4780 4781 bool NeedsEnvironment() const OVERRIDE { 4782 // Will call runtime and load the class if the class is not loaded yet. 4783 // TODO: finer grain decision. 4784 return !is_referrers_class_; 4785 } 4786 4787 bool MustGenerateClinitCheck() const { 4788 return generate_clinit_check_; 4789 } 4790 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4791 // The entrypoint the code generator is going to call does not do 4792 // clinit of the class. 4793 DCHECK(!NeedsAccessCheck()); 4794 generate_clinit_check_ = generate_clinit_check; 4795 } 4796 4797 bool CanCallRuntime() const { 4798 return MustGenerateClinitCheck() || !is_referrers_class_ || needs_access_check_; 4799 } 4800 4801 bool NeedsAccessCheck() const { 4802 return needs_access_check_; 4803 } 4804 4805 bool CanThrow() const OVERRIDE { 4806 // May call runtime and and therefore can throw. 4807 // TODO: finer grain decision. 4808 return CanCallRuntime(); 4809 } 4810 4811 ReferenceTypeInfo GetLoadedClassRTI() { 4812 return loaded_class_rti_; 4813 } 4814 4815 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4816 // Make sure we only set exact types (the loaded class should never be merged). 4817 DCHECK(rti.IsExact()); 4818 loaded_class_rti_ = rti; 4819 } 4820 4821 const DexFile& GetDexFile() { return dex_file_; } 4822 4823 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 4824 4825 static SideEffects SideEffectsForArchRuntimeCalls() { 4826 return SideEffects::CanTriggerGC(); 4827 } 4828 4829 DECLARE_INSTRUCTION(LoadClass); 4830 4831 private: 4832 const uint16_t type_index_; 4833 const DexFile& dex_file_; 4834 const bool is_referrers_class_; 4835 // Whether this instruction must generate the initialization check. 4836 // Used for code generation. 4837 bool generate_clinit_check_; 4838 bool needs_access_check_; 4839 4840 ReferenceTypeInfo loaded_class_rti_; 4841 4842 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 4843}; 4844 4845class HLoadString : public HExpression<1> { 4846 public: 4847 HLoadString(HCurrentMethod* current_method, uint32_t string_index, uint32_t dex_pc) 4848 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4849 string_index_(string_index) { 4850 SetRawInputAt(0, current_method); 4851 } 4852 4853 bool CanBeMoved() const OVERRIDE { return true; } 4854 4855 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4856 return other->AsLoadString()->string_index_ == string_index_; 4857 } 4858 4859 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 4860 4861 uint32_t GetStringIndex() const { return string_index_; } 4862 4863 // TODO: Can we deopt or debug when we resolve a string? 4864 bool NeedsEnvironment() const OVERRIDE { return false; } 4865 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 4866 bool CanBeNull() const OVERRIDE { return false; } 4867 4868 static SideEffects SideEffectsForArchRuntimeCalls() { 4869 return SideEffects::CanTriggerGC(); 4870 } 4871 4872 DECLARE_INSTRUCTION(LoadString); 4873 4874 private: 4875 const uint32_t string_index_; 4876 4877 DISALLOW_COPY_AND_ASSIGN(HLoadString); 4878}; 4879 4880/** 4881 * Performs an initialization check on its Class object input. 4882 */ 4883class HClinitCheck : public HExpression<1> { 4884 public: 4885 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 4886 : HExpression( 4887 Primitive::kPrimNot, 4888 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 4889 dex_pc) { 4890 SetRawInputAt(0, constant); 4891 } 4892 4893 bool CanBeMoved() const OVERRIDE { return true; } 4894 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4895 return true; 4896 } 4897 4898 bool NeedsEnvironment() const OVERRIDE { 4899 // May call runtime to initialize the class. 4900 return true; 4901 } 4902 4903 4904 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 4905 4906 DECLARE_INSTRUCTION(ClinitCheck); 4907 4908 private: 4909 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 4910}; 4911 4912class HStaticFieldGet : public HExpression<1> { 4913 public: 4914 HStaticFieldGet(HInstruction* cls, 4915 Primitive::Type field_type, 4916 MemberOffset field_offset, 4917 bool is_volatile, 4918 uint32_t field_idx, 4919 uint16_t declaring_class_def_index, 4920 const DexFile& dex_file, 4921 Handle<mirror::DexCache> dex_cache, 4922 uint32_t dex_pc) 4923 : HExpression(field_type, 4924 SideEffects::FieldReadOfType(field_type, is_volatile), 4925 dex_pc), 4926 field_info_(field_offset, 4927 field_type, 4928 is_volatile, 4929 field_idx, 4930 declaring_class_def_index, 4931 dex_file, 4932 dex_cache) { 4933 SetRawInputAt(0, cls); 4934 } 4935 4936 4937 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4938 4939 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4940 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 4941 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4942 } 4943 4944 size_t ComputeHashCode() const OVERRIDE { 4945 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4946 } 4947 4948 const FieldInfo& GetFieldInfo() const { return field_info_; } 4949 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4950 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4951 bool IsVolatile() const { return field_info_.IsVolatile(); } 4952 4953 DECLARE_INSTRUCTION(StaticFieldGet); 4954 4955 private: 4956 const FieldInfo field_info_; 4957 4958 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 4959}; 4960 4961class HStaticFieldSet : public HTemplateInstruction<2> { 4962 public: 4963 HStaticFieldSet(HInstruction* cls, 4964 HInstruction* value, 4965 Primitive::Type field_type, 4966 MemberOffset field_offset, 4967 bool is_volatile, 4968 uint32_t field_idx, 4969 uint16_t declaring_class_def_index, 4970 const DexFile& dex_file, 4971 Handle<mirror::DexCache> dex_cache, 4972 uint32_t dex_pc) 4973 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4974 dex_pc), 4975 field_info_(field_offset, 4976 field_type, 4977 is_volatile, 4978 field_idx, 4979 declaring_class_def_index, 4980 dex_file, 4981 dex_cache), 4982 value_can_be_null_(true) { 4983 SetRawInputAt(0, cls); 4984 SetRawInputAt(1, value); 4985 } 4986 4987 const FieldInfo& GetFieldInfo() const { return field_info_; } 4988 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4989 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4990 bool IsVolatile() const { return field_info_.IsVolatile(); } 4991 4992 HInstruction* GetValue() const { return InputAt(1); } 4993 bool GetValueCanBeNull() const { return value_can_be_null_; } 4994 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4995 4996 DECLARE_INSTRUCTION(StaticFieldSet); 4997 4998 private: 4999 const FieldInfo field_info_; 5000 bool value_can_be_null_; 5001 5002 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 5003}; 5004 5005class HUnresolvedInstanceFieldGet : public HExpression<1> { 5006 public: 5007 HUnresolvedInstanceFieldGet(HInstruction* obj, 5008 Primitive::Type field_type, 5009 uint32_t field_index, 5010 uint32_t dex_pc) 5011 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5012 field_index_(field_index) { 5013 SetRawInputAt(0, obj); 5014 } 5015 5016 bool NeedsEnvironment() const OVERRIDE { return true; } 5017 bool CanThrow() const OVERRIDE { return true; } 5018 5019 Primitive::Type GetFieldType() const { return GetType(); } 5020 uint32_t GetFieldIndex() const { return field_index_; } 5021 5022 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 5023 5024 private: 5025 const uint32_t field_index_; 5026 5027 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 5028}; 5029 5030class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 5031 public: 5032 HUnresolvedInstanceFieldSet(HInstruction* obj, 5033 HInstruction* value, 5034 Primitive::Type field_type, 5035 uint32_t field_index, 5036 uint32_t dex_pc) 5037 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5038 field_type_(field_type), 5039 field_index_(field_index) { 5040 DCHECK_EQ(field_type, value->GetType()); 5041 SetRawInputAt(0, obj); 5042 SetRawInputAt(1, value); 5043 } 5044 5045 bool NeedsEnvironment() const OVERRIDE { return true; } 5046 bool CanThrow() const OVERRIDE { return true; } 5047 5048 Primitive::Type GetFieldType() const { return field_type_; } 5049 uint32_t GetFieldIndex() const { return field_index_; } 5050 5051 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5052 5053 private: 5054 const Primitive::Type field_type_; 5055 const uint32_t field_index_; 5056 5057 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5058}; 5059 5060class HUnresolvedStaticFieldGet : public HExpression<0> { 5061 public: 5062 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5063 uint32_t field_index, 5064 uint32_t dex_pc) 5065 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5066 field_index_(field_index) { 5067 } 5068 5069 bool NeedsEnvironment() const OVERRIDE { return true; } 5070 bool CanThrow() const OVERRIDE { return true; } 5071 5072 Primitive::Type GetFieldType() const { return GetType(); } 5073 uint32_t GetFieldIndex() const { return field_index_; } 5074 5075 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5076 5077 private: 5078 const uint32_t field_index_; 5079 5080 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5081}; 5082 5083class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5084 public: 5085 HUnresolvedStaticFieldSet(HInstruction* value, 5086 Primitive::Type field_type, 5087 uint32_t field_index, 5088 uint32_t dex_pc) 5089 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5090 field_type_(field_type), 5091 field_index_(field_index) { 5092 DCHECK_EQ(field_type, value->GetType()); 5093 SetRawInputAt(0, value); 5094 } 5095 5096 bool NeedsEnvironment() const OVERRIDE { return true; } 5097 bool CanThrow() const OVERRIDE { return true; } 5098 5099 Primitive::Type GetFieldType() const { return field_type_; } 5100 uint32_t GetFieldIndex() const { return field_index_; } 5101 5102 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5103 5104 private: 5105 const Primitive::Type field_type_; 5106 const uint32_t field_index_; 5107 5108 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5109}; 5110 5111// Implement the move-exception DEX instruction. 5112class HLoadException : public HExpression<0> { 5113 public: 5114 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5115 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5116 5117 bool CanBeNull() const OVERRIDE { return false; } 5118 5119 DECLARE_INSTRUCTION(LoadException); 5120 5121 private: 5122 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5123}; 5124 5125// Implicit part of move-exception which clears thread-local exception storage. 5126// Must not be removed because the runtime expects the TLS to get cleared. 5127class HClearException : public HTemplateInstruction<0> { 5128 public: 5129 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5130 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5131 5132 DECLARE_INSTRUCTION(ClearException); 5133 5134 private: 5135 DISALLOW_COPY_AND_ASSIGN(HClearException); 5136}; 5137 5138class HThrow : public HTemplateInstruction<1> { 5139 public: 5140 HThrow(HInstruction* exception, uint32_t dex_pc) 5141 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5142 SetRawInputAt(0, exception); 5143 } 5144 5145 bool IsControlFlow() const OVERRIDE { return true; } 5146 5147 bool NeedsEnvironment() const OVERRIDE { return true; } 5148 5149 bool CanThrow() const OVERRIDE { return true; } 5150 5151 5152 DECLARE_INSTRUCTION(Throw); 5153 5154 private: 5155 DISALLOW_COPY_AND_ASSIGN(HThrow); 5156}; 5157 5158/** 5159 * Implementation strategies for the code generator of a HInstanceOf 5160 * or `HCheckCast`. 5161 */ 5162enum class TypeCheckKind { 5163 kUnresolvedCheck, // Check against an unresolved type. 5164 kExactCheck, // Can do a single class compare. 5165 kClassHierarchyCheck, // Can just walk the super class chain. 5166 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5167 kInterfaceCheck, // No optimization yet when checking against an interface. 5168 kArrayObjectCheck, // Can just check if the array is not primitive. 5169 kArrayCheck // No optimization yet when checking against a generic array. 5170}; 5171 5172class HInstanceOf : public HExpression<2> { 5173 public: 5174 HInstanceOf(HInstruction* object, 5175 HLoadClass* constant, 5176 TypeCheckKind check_kind, 5177 uint32_t dex_pc) 5178 : HExpression(Primitive::kPrimBoolean, 5179 SideEffectsForArchRuntimeCalls(check_kind), 5180 dex_pc), 5181 check_kind_(check_kind), 5182 must_do_null_check_(true) { 5183 SetRawInputAt(0, object); 5184 SetRawInputAt(1, constant); 5185 } 5186 5187 bool CanBeMoved() const OVERRIDE { return true; } 5188 5189 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5190 return true; 5191 } 5192 5193 bool NeedsEnvironment() const OVERRIDE { 5194 return false; 5195 } 5196 5197 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5198 5199 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5200 5201 // Used only in code generation. 5202 bool MustDoNullCheck() const { return must_do_null_check_; } 5203 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5204 5205 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5206 return (check_kind == TypeCheckKind::kExactCheck) 5207 ? SideEffects::None() 5208 // Mips currently does runtime calls for any other checks. 5209 : SideEffects::CanTriggerGC(); 5210 } 5211 5212 DECLARE_INSTRUCTION(InstanceOf); 5213 5214 private: 5215 const TypeCheckKind check_kind_; 5216 bool must_do_null_check_; 5217 5218 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5219}; 5220 5221class HBoundType : public HExpression<1> { 5222 public: 5223 // Constructs an HBoundType with the given upper_bound. 5224 // Ensures that the upper_bound is valid. 5225 HBoundType(HInstruction* input, 5226 ReferenceTypeInfo upper_bound, 5227 bool upper_can_be_null, 5228 uint32_t dex_pc = kNoDexPc) 5229 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5230 upper_bound_(upper_bound), 5231 upper_can_be_null_(upper_can_be_null), 5232 can_be_null_(upper_can_be_null) { 5233 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5234 SetRawInputAt(0, input); 5235 SetReferenceTypeInfo(upper_bound_); 5236 } 5237 5238 // GetUpper* should only be used in reference type propagation. 5239 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5240 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5241 5242 void SetCanBeNull(bool can_be_null) { 5243 DCHECK(upper_can_be_null_ || !can_be_null); 5244 can_be_null_ = can_be_null; 5245 } 5246 5247 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5248 5249 DECLARE_INSTRUCTION(BoundType); 5250 5251 private: 5252 // Encodes the most upper class that this instruction can have. In other words 5253 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5254 // It is used to bound the type in cases like: 5255 // if (x instanceof ClassX) { 5256 // // uper_bound_ will be ClassX 5257 // } 5258 const ReferenceTypeInfo upper_bound_; 5259 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5260 // is false then can_be_null_ cannot be true). 5261 const bool upper_can_be_null_; 5262 bool can_be_null_; 5263 5264 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5265}; 5266 5267class HCheckCast : public HTemplateInstruction<2> { 5268 public: 5269 HCheckCast(HInstruction* object, 5270 HLoadClass* constant, 5271 TypeCheckKind check_kind, 5272 uint32_t dex_pc) 5273 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5274 check_kind_(check_kind), 5275 must_do_null_check_(true) { 5276 SetRawInputAt(0, object); 5277 SetRawInputAt(1, constant); 5278 } 5279 5280 bool CanBeMoved() const OVERRIDE { return true; } 5281 5282 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5283 return true; 5284 } 5285 5286 bool NeedsEnvironment() const OVERRIDE { 5287 // Instruction may throw a CheckCastError. 5288 return true; 5289 } 5290 5291 bool CanThrow() const OVERRIDE { return true; } 5292 5293 bool MustDoNullCheck() const { return must_do_null_check_; } 5294 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5295 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5296 5297 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5298 5299 DECLARE_INSTRUCTION(CheckCast); 5300 5301 private: 5302 const TypeCheckKind check_kind_; 5303 bool must_do_null_check_; 5304 5305 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5306}; 5307 5308class HMemoryBarrier : public HTemplateInstruction<0> { 5309 public: 5310 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5311 : HTemplateInstruction( 5312 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5313 barrier_kind_(barrier_kind) {} 5314 5315 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5316 5317 DECLARE_INSTRUCTION(MemoryBarrier); 5318 5319 private: 5320 const MemBarrierKind barrier_kind_; 5321 5322 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5323}; 5324 5325class HMonitorOperation : public HTemplateInstruction<1> { 5326 public: 5327 enum OperationKind { 5328 kEnter, 5329 kExit, 5330 }; 5331 5332 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5333 : HTemplateInstruction( 5334 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5335 kind_(kind) { 5336 SetRawInputAt(0, object); 5337 } 5338 5339 // Instruction may throw a Java exception, so we need an environment. 5340 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5341 5342 bool CanThrow() const OVERRIDE { 5343 // Verifier guarantees that monitor-exit cannot throw. 5344 // This is important because it allows the HGraphBuilder to remove 5345 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5346 return IsEnter(); 5347 } 5348 5349 5350 bool IsEnter() const { return kind_ == kEnter; } 5351 5352 DECLARE_INSTRUCTION(MonitorOperation); 5353 5354 private: 5355 const OperationKind kind_; 5356 5357 private: 5358 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5359}; 5360 5361/** 5362 * A HInstruction used as a marker for the replacement of new + <init> 5363 * of a String to a call to a StringFactory. Only baseline will see 5364 * the node at code generation, where it will be be treated as null. 5365 * When compiling non-baseline, `HFakeString` instructions are being removed 5366 * in the instruction simplifier. 5367 */ 5368class HFakeString : public HTemplateInstruction<0> { 5369 public: 5370 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5371 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5372 5373 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5374 5375 DECLARE_INSTRUCTION(FakeString); 5376 5377 private: 5378 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5379}; 5380 5381class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5382 public: 5383 MoveOperands(Location source, 5384 Location destination, 5385 Primitive::Type type, 5386 HInstruction* instruction) 5387 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5388 5389 Location GetSource() const { return source_; } 5390 Location GetDestination() const { return destination_; } 5391 5392 void SetSource(Location value) { source_ = value; } 5393 void SetDestination(Location value) { destination_ = value; } 5394 5395 // The parallel move resolver marks moves as "in-progress" by clearing the 5396 // destination (but not the source). 5397 Location MarkPending() { 5398 DCHECK(!IsPending()); 5399 Location dest = destination_; 5400 destination_ = Location::NoLocation(); 5401 return dest; 5402 } 5403 5404 void ClearPending(Location dest) { 5405 DCHECK(IsPending()); 5406 destination_ = dest; 5407 } 5408 5409 bool IsPending() const { 5410 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5411 return destination_.IsInvalid() && !source_.IsInvalid(); 5412 } 5413 5414 // True if this blocks a move from the given location. 5415 bool Blocks(Location loc) const { 5416 return !IsEliminated() && source_.OverlapsWith(loc); 5417 } 5418 5419 // A move is redundant if it's been eliminated, if its source and 5420 // destination are the same, or if its destination is unneeded. 5421 bool IsRedundant() const { 5422 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5423 } 5424 5425 // We clear both operands to indicate move that's been eliminated. 5426 void Eliminate() { 5427 source_ = destination_ = Location::NoLocation(); 5428 } 5429 5430 bool IsEliminated() const { 5431 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5432 return source_.IsInvalid(); 5433 } 5434 5435 Primitive::Type GetType() const { return type_; } 5436 5437 bool Is64BitMove() const { 5438 return Primitive::Is64BitType(type_); 5439 } 5440 5441 HInstruction* GetInstruction() const { return instruction_; } 5442 5443 private: 5444 Location source_; 5445 Location destination_; 5446 // The type this move is for. 5447 Primitive::Type type_; 5448 // The instruction this move is assocatied with. Null when this move is 5449 // for moving an input in the expected locations of user (including a phi user). 5450 // This is only used in debug mode, to ensure we do not connect interval siblings 5451 // in the same parallel move. 5452 HInstruction* instruction_; 5453}; 5454 5455static constexpr size_t kDefaultNumberOfMoves = 4; 5456 5457class HParallelMove : public HTemplateInstruction<0> { 5458 public: 5459 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5460 : HTemplateInstruction(SideEffects::None(), dex_pc), 5461 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5462 moves_.reserve(kDefaultNumberOfMoves); 5463 } 5464 5465 void AddMove(Location source, 5466 Location destination, 5467 Primitive::Type type, 5468 HInstruction* instruction) { 5469 DCHECK(source.IsValid()); 5470 DCHECK(destination.IsValid()); 5471 if (kIsDebugBuild) { 5472 if (instruction != nullptr) { 5473 for (const MoveOperands& move : moves_) { 5474 if (move.GetInstruction() == instruction) { 5475 // Special case the situation where the move is for the spill slot 5476 // of the instruction. 5477 if ((GetPrevious() == instruction) 5478 || ((GetPrevious() == nullptr) 5479 && instruction->IsPhi() 5480 && instruction->GetBlock() == GetBlock())) { 5481 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5482 << "Doing parallel moves for the same instruction."; 5483 } else { 5484 DCHECK(false) << "Doing parallel moves for the same instruction."; 5485 } 5486 } 5487 } 5488 } 5489 for (const MoveOperands& move : moves_) { 5490 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5491 << "Overlapped destination for two moves in a parallel move: " 5492 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5493 << source << " ==> " << destination; 5494 } 5495 } 5496 moves_.emplace_back(source, destination, type, instruction); 5497 } 5498 5499 MoveOperands* MoveOperandsAt(size_t index) { 5500 return &moves_[index]; 5501 } 5502 5503 size_t NumMoves() const { return moves_.size(); } 5504 5505 DECLARE_INSTRUCTION(ParallelMove); 5506 5507 private: 5508 ArenaVector<MoveOperands> moves_; 5509 5510 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5511}; 5512 5513} // namespace art 5514 5515#ifdef ART_ENABLE_CODEGEN_arm64 5516#include "nodes_arm64.h" 5517#endif 5518#ifdef ART_ENABLE_CODEGEN_x86 5519#include "nodes_x86.h" 5520#endif 5521 5522namespace art { 5523 5524class HGraphVisitor : public ValueObject { 5525 public: 5526 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5527 virtual ~HGraphVisitor() {} 5528 5529 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5530 virtual void VisitBasicBlock(HBasicBlock* block); 5531 5532 // Visit the graph following basic block insertion order. 5533 void VisitInsertionOrder(); 5534 5535 // Visit the graph following dominator tree reverse post-order. 5536 void VisitReversePostOrder(); 5537 5538 HGraph* GetGraph() const { return graph_; } 5539 5540 // Visit functions for instruction classes. 5541#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5542 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5543 5544 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5545 5546#undef DECLARE_VISIT_INSTRUCTION 5547 5548 private: 5549 HGraph* const graph_; 5550 5551 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5552}; 5553 5554class HGraphDelegateVisitor : public HGraphVisitor { 5555 public: 5556 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5557 virtual ~HGraphDelegateVisitor() {} 5558 5559 // Visit functions that delegate to to super class. 5560#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5561 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5562 5563 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5564 5565#undef DECLARE_VISIT_INSTRUCTION 5566 5567 private: 5568 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5569}; 5570 5571class HInsertionOrderIterator : public ValueObject { 5572 public: 5573 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5574 5575 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5576 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5577 void Advance() { ++index_; } 5578 5579 private: 5580 const HGraph& graph_; 5581 size_t index_; 5582 5583 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5584}; 5585 5586class HReversePostOrderIterator : public ValueObject { 5587 public: 5588 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5589 // Check that reverse post order of the graph has been built. 5590 DCHECK(!graph.GetReversePostOrder().empty()); 5591 } 5592 5593 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5594 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5595 void Advance() { ++index_; } 5596 5597 private: 5598 const HGraph& graph_; 5599 size_t index_; 5600 5601 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5602}; 5603 5604class HPostOrderIterator : public ValueObject { 5605 public: 5606 explicit HPostOrderIterator(const HGraph& graph) 5607 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5608 // Check that reverse post order of the graph has been built. 5609 DCHECK(!graph.GetReversePostOrder().empty()); 5610 } 5611 5612 bool Done() const { return index_ == 0; } 5613 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5614 void Advance() { --index_; } 5615 5616 private: 5617 const HGraph& graph_; 5618 size_t index_; 5619 5620 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5621}; 5622 5623class HLinearPostOrderIterator : public ValueObject { 5624 public: 5625 explicit HLinearPostOrderIterator(const HGraph& graph) 5626 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5627 5628 bool Done() const { return index_ == 0; } 5629 5630 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5631 5632 void Advance() { 5633 --index_; 5634 DCHECK_GE(index_, 0U); 5635 } 5636 5637 private: 5638 const ArenaVector<HBasicBlock*>& order_; 5639 size_t index_; 5640 5641 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5642}; 5643 5644class HLinearOrderIterator : public ValueObject { 5645 public: 5646 explicit HLinearOrderIterator(const HGraph& graph) 5647 : order_(graph.GetLinearOrder()), index_(0) {} 5648 5649 bool Done() const { return index_ == order_.size(); } 5650 HBasicBlock* Current() const { return order_[index_]; } 5651 void Advance() { ++index_; } 5652 5653 private: 5654 const ArenaVector<HBasicBlock*>& order_; 5655 size_t index_; 5656 5657 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5658}; 5659 5660// Iterator over the blocks that art part of the loop. Includes blocks part 5661// of an inner loop. The order in which the blocks are iterated is on their 5662// block id. 5663class HBlocksInLoopIterator : public ValueObject { 5664 public: 5665 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5666 : blocks_in_loop_(info.GetBlocks()), 5667 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5668 index_(0) { 5669 if (!blocks_in_loop_.IsBitSet(index_)) { 5670 Advance(); 5671 } 5672 } 5673 5674 bool Done() const { return index_ == blocks_.size(); } 5675 HBasicBlock* Current() const { return blocks_[index_]; } 5676 void Advance() { 5677 ++index_; 5678 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5679 if (blocks_in_loop_.IsBitSet(index_)) { 5680 break; 5681 } 5682 } 5683 } 5684 5685 private: 5686 const BitVector& blocks_in_loop_; 5687 const ArenaVector<HBasicBlock*>& blocks_; 5688 size_t index_; 5689 5690 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5691}; 5692 5693// Iterator over the blocks that art part of the loop. Includes blocks part 5694// of an inner loop. The order in which the blocks are iterated is reverse 5695// post order. 5696class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5697 public: 5698 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5699 : blocks_in_loop_(info.GetBlocks()), 5700 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5701 index_(0) { 5702 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5703 Advance(); 5704 } 5705 } 5706 5707 bool Done() const { return index_ == blocks_.size(); } 5708 HBasicBlock* Current() const { return blocks_[index_]; } 5709 void Advance() { 5710 ++index_; 5711 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5712 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5713 break; 5714 } 5715 } 5716 } 5717 5718 private: 5719 const BitVector& blocks_in_loop_; 5720 const ArenaVector<HBasicBlock*>& blocks_; 5721 size_t index_; 5722 5723 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5724}; 5725 5726inline int64_t Int64FromConstant(HConstant* constant) { 5727 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5728 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5729 : constant->AsLongConstant()->GetValue(); 5730} 5731 5732inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5733 // For the purposes of the compiler, the dex files must actually be the same object 5734 // if we want to safely treat them as the same. This is especially important for JIT 5735 // as custom class loaders can open the same underlying file (or memory) multiple 5736 // times and provide different class resolution but no two class loaders should ever 5737 // use the same DexFile object - doing so is an unsupported hack that can lead to 5738 // all sorts of weird failures. 5739 return &lhs == &rhs; 5740} 5741 5742} // namespace art 5743 5744#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5745