nodes.h revision 391d01f3e8dedf3af3727bdf5d5b76ab35d52795
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38 39namespace art { 40 41class GraphChecker; 42class HBasicBlock; 43class HCurrentMethod; 44class HDoubleConstant; 45class HEnvironment; 46class HFakeString; 47class HFloatConstant; 48class HGraphBuilder; 49class HGraphVisitor; 50class HInstruction; 51class HIntConstant; 52class HInvoke; 53class HLongConstant; 54class HNullConstant; 55class HPhi; 56class HSuspendCheck; 57class HTryBoundary; 58class LiveInterval; 59class LocationSummary; 60class SlowPathCode; 61class SsaBuilder; 62 63namespace mirror { 64class DexCache; 65} // namespace mirror 66 67static const int kDefaultNumberOfBlocks = 8; 68static const int kDefaultNumberOfSuccessors = 2; 69static const int kDefaultNumberOfPredecessors = 2; 70static const int kDefaultNumberOfExceptionalPredecessors = 0; 71static const int kDefaultNumberOfDominatedBlocks = 1; 72static const int kDefaultNumberOfBackEdges = 1; 73 74static constexpr uint32_t kMaxIntShiftValue = 0x1f; 75static constexpr uint64_t kMaxLongShiftValue = 0x3f; 76 77static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 78static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 79 80static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 81 82static constexpr uint32_t kNoDexPc = -1; 83 84enum IfCondition { 85 // All types. 86 kCondEQ, // == 87 kCondNE, // != 88 // Signed integers and floating-point numbers. 89 kCondLT, // < 90 kCondLE, // <= 91 kCondGT, // > 92 kCondGE, // >= 93 // Unsigned integers. 94 kCondB, // < 95 kCondBE, // <= 96 kCondA, // > 97 kCondAE, // >= 98}; 99 100class HInstructionList : public ValueObject { 101 public: 102 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 103 104 void AddInstruction(HInstruction* instruction); 105 void RemoveInstruction(HInstruction* instruction); 106 107 // Insert `instruction` before/after an existing instruction `cursor`. 108 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 109 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 110 111 // Return true if this list contains `instruction`. 112 bool Contains(HInstruction* instruction) const; 113 114 // Return true if `instruction1` is found before `instruction2` in 115 // this instruction list and false otherwise. Abort if none 116 // of these instructions is found. 117 bool FoundBefore(const HInstruction* instruction1, 118 const HInstruction* instruction2) const; 119 120 bool IsEmpty() const { return first_instruction_ == nullptr; } 121 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 122 123 // Update the block of all instructions to be `block`. 124 void SetBlockOfInstructions(HBasicBlock* block) const; 125 126 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 127 void Add(const HInstructionList& instruction_list); 128 129 // Return the number of instructions in the list. This is an expensive operation. 130 size_t CountSize() const; 131 132 private: 133 HInstruction* first_instruction_; 134 HInstruction* last_instruction_; 135 136 friend class HBasicBlock; 137 friend class HGraph; 138 friend class HInstruction; 139 friend class HInstructionIterator; 140 friend class HBackwardInstructionIterator; 141 142 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 143}; 144 145// Control-flow graph of a method. Contains a list of basic blocks. 146class HGraph : public ArenaObject<kArenaAllocGraph> { 147 public: 148 HGraph(ArenaAllocator* arena, 149 const DexFile& dex_file, 150 uint32_t method_idx, 151 bool should_generate_constructor_barrier, 152 InstructionSet instruction_set, 153 InvokeType invoke_type = kInvalidInvokeType, 154 bool debuggable = false, 155 int start_instruction_id = 0) 156 : arena_(arena), 157 blocks_(arena->Adapter(kArenaAllocBlockList)), 158 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 159 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 160 entry_block_(nullptr), 161 exit_block_(nullptr), 162 maximum_number_of_out_vregs_(0), 163 number_of_vregs_(0), 164 number_of_in_vregs_(0), 165 temporaries_vreg_slots_(0), 166 has_bounds_checks_(false), 167 has_try_catch_(false), 168 debuggable_(debuggable), 169 current_instruction_id_(start_instruction_id), 170 dex_file_(dex_file), 171 method_idx_(method_idx), 172 invoke_type_(invoke_type), 173 in_ssa_form_(false), 174 should_generate_constructor_barrier_(should_generate_constructor_barrier), 175 instruction_set_(instruction_set), 176 cached_null_constant_(nullptr), 177 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 178 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 179 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 180 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 181 cached_current_method_(nullptr) { 182 blocks_.reserve(kDefaultNumberOfBlocks); 183 } 184 185 ArenaAllocator* GetArena() const { return arena_; } 186 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 187 188 bool IsInSsaForm() const { return in_ssa_form_; } 189 190 HBasicBlock* GetEntryBlock() const { return entry_block_; } 191 HBasicBlock* GetExitBlock() const { return exit_block_; } 192 bool HasExitBlock() const { return exit_block_ != nullptr; } 193 194 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 195 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 196 197 void AddBlock(HBasicBlock* block); 198 199 // Try building the SSA form of this graph, with dominance computation and loop 200 // recognition. Returns whether it was successful in doing all these steps. 201 bool TryBuildingSsa() { 202 BuildDominatorTree(); 203 // The SSA builder requires loops to all be natural. Specifically, the dead phi 204 // elimination phase checks the consistency of the graph when doing a post-order 205 // visit for eliminating dead phis: a dead phi can only have loop header phi 206 // users remaining when being visited. 207 if (!AnalyzeNaturalLoops()) return false; 208 // Precompute per-block try membership before entering the SSA builder, 209 // which needs the information to build catch block phis from values of 210 // locals at throwing instructions inside try blocks. 211 ComputeTryBlockInformation(); 212 TransformToSsa(); 213 in_ssa_form_ = true; 214 return true; 215 } 216 217 void ComputeDominanceInformation(); 218 void ClearDominanceInformation(); 219 220 void BuildDominatorTree(); 221 void TransformToSsa(); 222 void SimplifyCFG(); 223 void SimplifyCatchBlocks(); 224 225 // Analyze all natural loops in this graph. Returns false if one 226 // loop is not natural, that is the header does not dominate the 227 // back edge. 228 bool AnalyzeNaturalLoops() const; 229 230 // Iterate over blocks to compute try block membership. Needs reverse post 231 // order and loop information. 232 void ComputeTryBlockInformation(); 233 234 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 235 // Returns the instruction used to replace the invoke expression or null if the 236 // invoke is for a void method. 237 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 238 239 // Need to add a couple of blocks to test if the loop body is entered and 240 // put deoptimization instructions, etc. 241 void TransformLoopHeaderForBCE(HBasicBlock* header); 242 243 // Removes `block` from the graph. 244 void DeleteDeadBlock(HBasicBlock* block); 245 246 // Splits the edge between `block` and `successor` while preserving the 247 // indices in the predecessor/successor lists. If there are multiple edges 248 // between the blocks, the lowest indices are used. 249 // Returns the new block which is empty and has the same dex pc as `successor`. 250 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 251 252 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 253 void SimplifyLoop(HBasicBlock* header); 254 255 int32_t GetNextInstructionId() { 256 DCHECK_NE(current_instruction_id_, INT32_MAX); 257 return current_instruction_id_++; 258 } 259 260 int32_t GetCurrentInstructionId() const { 261 return current_instruction_id_; 262 } 263 264 void SetCurrentInstructionId(int32_t id) { 265 current_instruction_id_ = id; 266 } 267 268 uint16_t GetMaximumNumberOfOutVRegs() const { 269 return maximum_number_of_out_vregs_; 270 } 271 272 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 273 maximum_number_of_out_vregs_ = new_value; 274 } 275 276 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 277 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 278 } 279 280 void UpdateTemporariesVRegSlots(size_t slots) { 281 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 282 } 283 284 size_t GetTemporariesVRegSlots() const { 285 DCHECK(!in_ssa_form_); 286 return temporaries_vreg_slots_; 287 } 288 289 void SetNumberOfVRegs(uint16_t number_of_vregs) { 290 number_of_vregs_ = number_of_vregs; 291 } 292 293 uint16_t GetNumberOfVRegs() const { 294 return number_of_vregs_; 295 } 296 297 void SetNumberOfInVRegs(uint16_t value) { 298 number_of_in_vregs_ = value; 299 } 300 301 uint16_t GetNumberOfLocalVRegs() const { 302 DCHECK(!in_ssa_form_); 303 return number_of_vregs_ - number_of_in_vregs_; 304 } 305 306 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 307 return reverse_post_order_; 308 } 309 310 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 311 return linear_order_; 312 } 313 314 bool HasBoundsChecks() const { 315 return has_bounds_checks_; 316 } 317 318 void SetHasBoundsChecks(bool value) { 319 has_bounds_checks_ = value; 320 } 321 322 bool ShouldGenerateConstructorBarrier() const { 323 return should_generate_constructor_barrier_; 324 } 325 326 bool IsDebuggable() const { return debuggable_; } 327 328 // Returns a constant of the given type and value. If it does not exist 329 // already, it is created and inserted into the graph. This method is only for 330 // integral types. 331 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 332 333 // TODO: This is problematic for the consistency of reference type propagation 334 // because it can be created anytime after the pass and thus it will be left 335 // with an invalid type. 336 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 337 338 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 339 return CreateConstant(value, &cached_int_constants_, dex_pc); 340 } 341 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 342 return CreateConstant(value, &cached_long_constants_, dex_pc); 343 } 344 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 345 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 346 } 347 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 348 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 349 } 350 351 HCurrentMethod* GetCurrentMethod(); 352 353 const DexFile& GetDexFile() const { 354 return dex_file_; 355 } 356 357 uint32_t GetMethodIdx() const { 358 return method_idx_; 359 } 360 361 InvokeType GetInvokeType() const { 362 return invoke_type_; 363 } 364 365 InstructionSet GetInstructionSet() const { 366 return instruction_set_; 367 } 368 369 bool HasTryCatch() const { return has_try_catch_; } 370 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 371 372 private: 373 void FindBackEdges(ArenaBitVector* visited); 374 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 375 void RemoveDeadBlocks(const ArenaBitVector& visited); 376 377 template <class InstructionType, typename ValueType> 378 InstructionType* CreateConstant(ValueType value, 379 ArenaSafeMap<ValueType, InstructionType*>* cache, 380 uint32_t dex_pc = kNoDexPc) { 381 // Try to find an existing constant of the given value. 382 InstructionType* constant = nullptr; 383 auto cached_constant = cache->find(value); 384 if (cached_constant != cache->end()) { 385 constant = cached_constant->second; 386 } 387 388 // If not found or previously deleted, create and cache a new instruction. 389 // Don't bother reviving a previously deleted instruction, for simplicity. 390 if (constant == nullptr || constant->GetBlock() == nullptr) { 391 constant = new (arena_) InstructionType(value, dex_pc); 392 cache->Overwrite(value, constant); 393 InsertConstant(constant); 394 } 395 return constant; 396 } 397 398 void InsertConstant(HConstant* instruction); 399 400 // Cache a float constant into the graph. This method should only be 401 // called by the SsaBuilder when creating "equivalent" instructions. 402 void CacheFloatConstant(HFloatConstant* constant); 403 404 // See CacheFloatConstant comment. 405 void CacheDoubleConstant(HDoubleConstant* constant); 406 407 ArenaAllocator* const arena_; 408 409 // List of blocks in insertion order. 410 ArenaVector<HBasicBlock*> blocks_; 411 412 // List of blocks to perform a reverse post order tree traversal. 413 ArenaVector<HBasicBlock*> reverse_post_order_; 414 415 // List of blocks to perform a linear order tree traversal. 416 ArenaVector<HBasicBlock*> linear_order_; 417 418 HBasicBlock* entry_block_; 419 HBasicBlock* exit_block_; 420 421 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 422 uint16_t maximum_number_of_out_vregs_; 423 424 // The number of virtual registers in this method. Contains the parameters. 425 uint16_t number_of_vregs_; 426 427 // The number of virtual registers used by parameters of this method. 428 uint16_t number_of_in_vregs_; 429 430 // Number of vreg size slots that the temporaries use (used in baseline compiler). 431 size_t temporaries_vreg_slots_; 432 433 // Has bounds checks. We can totally skip BCE if it's false. 434 bool has_bounds_checks_; 435 436 // Flag whether there are any try/catch blocks in the graph. We will skip 437 // try/catch-related passes if false. 438 bool has_try_catch_; 439 440 // Indicates whether the graph should be compiled in a way that 441 // ensures full debuggability. If false, we can apply more 442 // aggressive optimizations that may limit the level of debugging. 443 const bool debuggable_; 444 445 // The current id to assign to a newly added instruction. See HInstruction.id_. 446 int32_t current_instruction_id_; 447 448 // The dex file from which the method is from. 449 const DexFile& dex_file_; 450 451 // The method index in the dex file. 452 const uint32_t method_idx_; 453 454 // If inlined, this encodes how the callee is being invoked. 455 const InvokeType invoke_type_; 456 457 // Whether the graph has been transformed to SSA form. Only used 458 // in debug mode to ensure we are not using properties only valid 459 // for non-SSA form (like the number of temporaries). 460 bool in_ssa_form_; 461 462 const bool should_generate_constructor_barrier_; 463 464 const InstructionSet instruction_set_; 465 466 // Cached constants. 467 HNullConstant* cached_null_constant_; 468 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 469 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 470 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 471 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 472 473 HCurrentMethod* cached_current_method_; 474 475 friend class SsaBuilder; // For caching constants. 476 friend class SsaLivenessAnalysis; // For the linear order. 477 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 478 DISALLOW_COPY_AND_ASSIGN(HGraph); 479}; 480 481class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 482 public: 483 HLoopInformation(HBasicBlock* header, HGraph* graph) 484 : header_(header), 485 suspend_check_(nullptr), 486 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 487 // Make bit vector growable, as the number of blocks may change. 488 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 489 back_edges_.reserve(kDefaultNumberOfBackEdges); 490 } 491 492 HBasicBlock* GetHeader() const { 493 return header_; 494 } 495 496 void SetHeader(HBasicBlock* block) { 497 header_ = block; 498 } 499 500 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 501 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 502 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 503 504 void AddBackEdge(HBasicBlock* back_edge) { 505 back_edges_.push_back(back_edge); 506 } 507 508 void RemoveBackEdge(HBasicBlock* back_edge) { 509 RemoveElement(back_edges_, back_edge); 510 } 511 512 bool IsBackEdge(const HBasicBlock& block) const { 513 return ContainsElement(back_edges_, &block); 514 } 515 516 size_t NumberOfBackEdges() const { 517 return back_edges_.size(); 518 } 519 520 HBasicBlock* GetPreHeader() const; 521 522 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 523 return back_edges_; 524 } 525 526 // Returns the lifetime position of the back edge that has the 527 // greatest lifetime position. 528 size_t GetLifetimeEnd() const; 529 530 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 531 ReplaceElement(back_edges_, existing, new_back_edge); 532 } 533 534 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 535 // that is the header dominates the back edge. 536 bool Populate(); 537 538 // Reanalyzes the loop by removing loop info from its blocks and re-running 539 // Populate(). If there are no back edges left, the loop info is completely 540 // removed as well as its SuspendCheck instruction. It must be run on nested 541 // inner loops first. 542 void Update(); 543 544 // Returns whether this loop information contains `block`. 545 // Note that this loop information *must* be populated before entering this function. 546 bool Contains(const HBasicBlock& block) const; 547 548 // Returns whether this loop information is an inner loop of `other`. 549 // Note that `other` *must* be populated before entering this function. 550 bool IsIn(const HLoopInformation& other) const; 551 552 // Returns true if instruction is not defined within this loop or any loop nested inside 553 // this loop. If must_dominate is set, only definitions that actually dominate the loop 554 // header can be invariant. Otherwise, any definition outside the loop, including 555 // definitions that appear after the loop, is invariant. 556 bool IsLoopInvariant(HInstruction* instruction, bool must_dominate) const; 557 558 const ArenaBitVector& GetBlocks() const { return blocks_; } 559 560 void Add(HBasicBlock* block); 561 void Remove(HBasicBlock* block); 562 563 private: 564 // Internal recursive implementation of `Populate`. 565 void PopulateRecursive(HBasicBlock* block); 566 567 HBasicBlock* header_; 568 HSuspendCheck* suspend_check_; 569 ArenaVector<HBasicBlock*> back_edges_; 570 ArenaBitVector blocks_; 571 572 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 573}; 574 575// Stores try/catch information for basic blocks. 576// Note that HGraph is constructed so that catch blocks cannot simultaneously 577// be try blocks. 578class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 579 public: 580 // Try block information constructor. 581 explicit TryCatchInformation(const HTryBoundary& try_entry) 582 : try_entry_(&try_entry), 583 catch_dex_file_(nullptr), 584 catch_type_index_(DexFile::kDexNoIndex16) { 585 DCHECK(try_entry_ != nullptr); 586 } 587 588 // Catch block information constructor. 589 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 590 : try_entry_(nullptr), 591 catch_dex_file_(&dex_file), 592 catch_type_index_(catch_type_index) {} 593 594 bool IsTryBlock() const { return try_entry_ != nullptr; } 595 596 const HTryBoundary& GetTryEntry() const { 597 DCHECK(IsTryBlock()); 598 return *try_entry_; 599 } 600 601 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 602 603 bool IsCatchAllTypeIndex() const { 604 DCHECK(IsCatchBlock()); 605 return catch_type_index_ == DexFile::kDexNoIndex16; 606 } 607 608 uint16_t GetCatchTypeIndex() const { 609 DCHECK(IsCatchBlock()); 610 return catch_type_index_; 611 } 612 613 const DexFile& GetCatchDexFile() const { 614 DCHECK(IsCatchBlock()); 615 return *catch_dex_file_; 616 } 617 618 private: 619 // One of possibly several TryBoundary instructions entering the block's try. 620 // Only set for try blocks. 621 const HTryBoundary* try_entry_; 622 623 // Exception type information. Only set for catch blocks. 624 const DexFile* catch_dex_file_; 625 const uint16_t catch_type_index_; 626}; 627 628static constexpr size_t kNoLifetime = -1; 629static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 630 631// A block in a method. Contains the list of instructions represented 632// as a double linked list. Each block knows its predecessors and 633// successors. 634 635class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 636 public: 637 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 638 : graph_(graph), 639 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 640 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 641 loop_information_(nullptr), 642 dominator_(nullptr), 643 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 644 block_id_(kInvalidBlockId), 645 dex_pc_(dex_pc), 646 lifetime_start_(kNoLifetime), 647 lifetime_end_(kNoLifetime), 648 try_catch_information_(nullptr) { 649 predecessors_.reserve(kDefaultNumberOfPredecessors); 650 successors_.reserve(kDefaultNumberOfSuccessors); 651 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 652 } 653 654 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 655 return predecessors_; 656 } 657 658 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 659 return successors_; 660 } 661 662 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 663 return ContainsElement(successors_, block, start_from); 664 } 665 666 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 667 return dominated_blocks_; 668 } 669 670 bool IsEntryBlock() const { 671 return graph_->GetEntryBlock() == this; 672 } 673 674 bool IsExitBlock() const { 675 return graph_->GetExitBlock() == this; 676 } 677 678 bool IsSingleGoto() const; 679 bool IsSingleTryBoundary() const; 680 681 // Returns true if this block emits nothing but a jump. 682 bool IsSingleJump() const { 683 HLoopInformation* loop_info = GetLoopInformation(); 684 return (IsSingleGoto() || IsSingleTryBoundary()) 685 // Back edges generate a suspend check. 686 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 687 } 688 689 void AddBackEdge(HBasicBlock* back_edge) { 690 if (loop_information_ == nullptr) { 691 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 692 } 693 DCHECK_EQ(loop_information_->GetHeader(), this); 694 loop_information_->AddBackEdge(back_edge); 695 } 696 697 HGraph* GetGraph() const { return graph_; } 698 void SetGraph(HGraph* graph) { graph_ = graph; } 699 700 uint32_t GetBlockId() const { return block_id_; } 701 void SetBlockId(int id) { block_id_ = id; } 702 uint32_t GetDexPc() const { return dex_pc_; } 703 704 HBasicBlock* GetDominator() const { return dominator_; } 705 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 706 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 707 708 void RemoveDominatedBlock(HBasicBlock* block) { 709 RemoveElement(dominated_blocks_, block); 710 } 711 712 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 713 ReplaceElement(dominated_blocks_, existing, new_block); 714 } 715 716 void ClearDominanceInformation(); 717 718 int NumberOfBackEdges() const { 719 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 720 } 721 722 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 723 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 724 const HInstructionList& GetInstructions() const { return instructions_; } 725 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 726 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 727 const HInstructionList& GetPhis() const { return phis_; } 728 729 void AddSuccessor(HBasicBlock* block) { 730 successors_.push_back(block); 731 block->predecessors_.push_back(this); 732 } 733 734 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 735 size_t successor_index = GetSuccessorIndexOf(existing); 736 existing->RemovePredecessor(this); 737 new_block->predecessors_.push_back(this); 738 successors_[successor_index] = new_block; 739 } 740 741 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 742 size_t predecessor_index = GetPredecessorIndexOf(existing); 743 existing->RemoveSuccessor(this); 744 new_block->successors_.push_back(this); 745 predecessors_[predecessor_index] = new_block; 746 } 747 748 // Insert `this` between `predecessor` and `successor. This method 749 // preserves the indicies, and will update the first edge found between 750 // `predecessor` and `successor`. 751 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 752 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 753 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 754 successor->predecessors_[predecessor_index] = this; 755 predecessor->successors_[successor_index] = this; 756 successors_.push_back(successor); 757 predecessors_.push_back(predecessor); 758 } 759 760 void RemovePredecessor(HBasicBlock* block) { 761 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 762 } 763 764 void RemoveSuccessor(HBasicBlock* block) { 765 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 766 } 767 768 void ClearAllPredecessors() { 769 predecessors_.clear(); 770 } 771 772 void AddPredecessor(HBasicBlock* block) { 773 predecessors_.push_back(block); 774 block->successors_.push_back(this); 775 } 776 777 void SwapPredecessors() { 778 DCHECK_EQ(predecessors_.size(), 2u); 779 std::swap(predecessors_[0], predecessors_[1]); 780 } 781 782 void SwapSuccessors() { 783 DCHECK_EQ(successors_.size(), 2u); 784 std::swap(successors_[0], successors_[1]); 785 } 786 787 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 788 return IndexOfElement(predecessors_, predecessor); 789 } 790 791 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 792 return IndexOfElement(successors_, successor); 793 } 794 795 HBasicBlock* GetSinglePredecessor() const { 796 DCHECK_EQ(GetPredecessors().size(), 1u); 797 return GetPredecessors()[0]; 798 } 799 800 HBasicBlock* GetSingleSuccessor() const { 801 DCHECK_EQ(GetSuccessors().size(), 1u); 802 return GetSuccessors()[0]; 803 } 804 805 // Returns whether the first occurrence of `predecessor` in the list of 806 // predecessors is at index `idx`. 807 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 808 DCHECK_EQ(GetPredecessors()[idx], predecessor); 809 return GetPredecessorIndexOf(predecessor) == idx; 810 } 811 812 // Returns the number of non-exceptional successors. SsaChecker ensures that 813 // these are stored at the beginning of the successor list. 814 size_t NumberOfNormalSuccessors() const { 815 return EndsWithTryBoundary() ? 1 : GetSuccessors().size(); 816 } 817 818 // Create a new block between this block and its predecessors. The new block 819 // is added to the graph, all predecessor edges are relinked to it and an edge 820 // is created to `this`. Returns the new empty block. Reverse post order or 821 // loop and try/catch information are not updated. 822 HBasicBlock* CreateImmediateDominator(); 823 824 // Split the block into two blocks just before `cursor`. Returns the newly 825 // created, latter block. Note that this method will add the block to the 826 // graph, create a Goto at the end of the former block and will create an edge 827 // between the blocks. It will not, however, update the reverse post order or 828 // loop and try/catch information. 829 HBasicBlock* SplitBefore(HInstruction* cursor); 830 831 // Split the block into two blocks just after `cursor`. Returns the newly 832 // created block. Note that this method just updates raw block information, 833 // like predecessors, successors, dominators, and instruction list. It does not 834 // update the graph, reverse post order, loop information, nor make sure the 835 // blocks are consistent (for example ending with a control flow instruction). 836 HBasicBlock* SplitAfter(HInstruction* cursor); 837 838 // Merge `other` at the end of `this`. Successors and dominated blocks of 839 // `other` are changed to be successors and dominated blocks of `this`. Note 840 // that this method does not update the graph, reverse post order, loop 841 // information, nor make sure the blocks are consistent (for example ending 842 // with a control flow instruction). 843 void MergeWithInlined(HBasicBlock* other); 844 845 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 846 // of `this` are moved to `other`. 847 // Note that this method does not update the graph, reverse post order, loop 848 // information, nor make sure the blocks are consistent (for example ending 849 // with a control flow instruction). 850 void ReplaceWith(HBasicBlock* other); 851 852 // Merge `other` at the end of `this`. This method updates loops, reverse post 853 // order, links to predecessors, successors, dominators and deletes the block 854 // from the graph. The two blocks must be successive, i.e. `this` the only 855 // predecessor of `other` and vice versa. 856 void MergeWith(HBasicBlock* other); 857 858 // Disconnects `this` from all its predecessors, successors and dominator, 859 // removes it from all loops it is included in and eventually from the graph. 860 // The block must not dominate any other block. Predecessors and successors 861 // are safely updated. 862 void DisconnectAndDelete(); 863 864 void AddInstruction(HInstruction* instruction); 865 // Insert `instruction` before/after an existing instruction `cursor`. 866 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 867 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 868 // Replace instruction `initial` with `replacement` within this block. 869 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 870 HInstruction* replacement); 871 void AddPhi(HPhi* phi); 872 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 873 // RemoveInstruction and RemovePhi delete a given instruction from the respective 874 // instruction list. With 'ensure_safety' set to true, it verifies that the 875 // instruction is not in use and removes it from the use lists of its inputs. 876 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 877 void RemovePhi(HPhi* phi, bool ensure_safety = true); 878 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 879 880 bool IsLoopHeader() const { 881 return IsInLoop() && (loop_information_->GetHeader() == this); 882 } 883 884 bool IsLoopPreHeaderFirstPredecessor() const { 885 DCHECK(IsLoopHeader()); 886 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 887 } 888 889 HLoopInformation* GetLoopInformation() const { 890 return loop_information_; 891 } 892 893 // Set the loop_information_ on this block. Overrides the current 894 // loop_information if it is an outer loop of the passed loop information. 895 // Note that this method is called while creating the loop information. 896 void SetInLoop(HLoopInformation* info) { 897 if (IsLoopHeader()) { 898 // Nothing to do. This just means `info` is an outer loop. 899 } else if (!IsInLoop()) { 900 loop_information_ = info; 901 } else if (loop_information_->Contains(*info->GetHeader())) { 902 // Block is currently part of an outer loop. Make it part of this inner loop. 903 // Note that a non loop header having a loop information means this loop information 904 // has already been populated 905 loop_information_ = info; 906 } else { 907 // Block is part of an inner loop. Do not update the loop information. 908 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 909 // at this point, because this method is being called while populating `info`. 910 } 911 } 912 913 // Raw update of the loop information. 914 void SetLoopInformation(HLoopInformation* info) { 915 loop_information_ = info; 916 } 917 918 bool IsInLoop() const { return loop_information_ != nullptr; } 919 920 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 921 922 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 923 try_catch_information_ = try_catch_information; 924 } 925 926 bool IsTryBlock() const { 927 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 928 } 929 930 bool IsCatchBlock() const { 931 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 932 } 933 934 // Returns the try entry that this block's successors should have. They will 935 // be in the same try, unless the block ends in a try boundary. In that case, 936 // the appropriate try entry will be returned. 937 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 938 939 bool HasThrowingInstructions() const; 940 941 // Returns whether this block dominates the blocked passed as parameter. 942 bool Dominates(HBasicBlock* block) const; 943 944 size_t GetLifetimeStart() const { return lifetime_start_; } 945 size_t GetLifetimeEnd() const { return lifetime_end_; } 946 947 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 948 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 949 950 bool EndsWithControlFlowInstruction() const; 951 bool EndsWithIf() const; 952 bool EndsWithTryBoundary() const; 953 bool HasSinglePhi() const; 954 955 private: 956 HGraph* graph_; 957 ArenaVector<HBasicBlock*> predecessors_; 958 ArenaVector<HBasicBlock*> successors_; 959 HInstructionList instructions_; 960 HInstructionList phis_; 961 HLoopInformation* loop_information_; 962 HBasicBlock* dominator_; 963 ArenaVector<HBasicBlock*> dominated_blocks_; 964 uint32_t block_id_; 965 // The dex program counter of the first instruction of this block. 966 const uint32_t dex_pc_; 967 size_t lifetime_start_; 968 size_t lifetime_end_; 969 TryCatchInformation* try_catch_information_; 970 971 friend class HGraph; 972 friend class HInstruction; 973 974 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 975}; 976 977// Iterates over the LoopInformation of all loops which contain 'block' 978// from the innermost to the outermost. 979class HLoopInformationOutwardIterator : public ValueObject { 980 public: 981 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 982 : current_(block.GetLoopInformation()) {} 983 984 bool Done() const { return current_ == nullptr; } 985 986 void Advance() { 987 DCHECK(!Done()); 988 current_ = current_->GetPreHeader()->GetLoopInformation(); 989 } 990 991 HLoopInformation* Current() const { 992 DCHECK(!Done()); 993 return current_; 994 } 995 996 private: 997 HLoopInformation* current_; 998 999 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1000}; 1001 1002#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1003 M(Above, Condition) \ 1004 M(AboveOrEqual, Condition) \ 1005 M(Add, BinaryOperation) \ 1006 M(And, BinaryOperation) \ 1007 M(ArrayGet, Instruction) \ 1008 M(ArrayLength, Instruction) \ 1009 M(ArraySet, Instruction) \ 1010 M(Below, Condition) \ 1011 M(BelowOrEqual, Condition) \ 1012 M(BooleanNot, UnaryOperation) \ 1013 M(BoundsCheck, Instruction) \ 1014 M(BoundType, Instruction) \ 1015 M(CheckCast, Instruction) \ 1016 M(ClearException, Instruction) \ 1017 M(ClinitCheck, Instruction) \ 1018 M(Compare, BinaryOperation) \ 1019 M(Condition, BinaryOperation) \ 1020 M(CurrentMethod, Instruction) \ 1021 M(Deoptimize, Instruction) \ 1022 M(Div, BinaryOperation) \ 1023 M(DivZeroCheck, Instruction) \ 1024 M(DoubleConstant, Constant) \ 1025 M(Equal, Condition) \ 1026 M(Exit, Instruction) \ 1027 M(FakeString, Instruction) \ 1028 M(FloatConstant, Constant) \ 1029 M(Goto, Instruction) \ 1030 M(GreaterThan, Condition) \ 1031 M(GreaterThanOrEqual, Condition) \ 1032 M(If, Instruction) \ 1033 M(InstanceFieldGet, Instruction) \ 1034 M(InstanceFieldSet, Instruction) \ 1035 M(InstanceOf, Instruction) \ 1036 M(IntConstant, Constant) \ 1037 M(InvokeUnresolved, Invoke) \ 1038 M(InvokeInterface, Invoke) \ 1039 M(InvokeStaticOrDirect, Invoke) \ 1040 M(InvokeVirtual, Invoke) \ 1041 M(LessThan, Condition) \ 1042 M(LessThanOrEqual, Condition) \ 1043 M(LoadClass, Instruction) \ 1044 M(LoadException, Instruction) \ 1045 M(LoadLocal, Instruction) \ 1046 M(LoadString, Instruction) \ 1047 M(Local, Instruction) \ 1048 M(LongConstant, Constant) \ 1049 M(MemoryBarrier, Instruction) \ 1050 M(MonitorOperation, Instruction) \ 1051 M(Mul, BinaryOperation) \ 1052 M(Neg, UnaryOperation) \ 1053 M(NewArray, Instruction) \ 1054 M(NewInstance, Instruction) \ 1055 M(Not, UnaryOperation) \ 1056 M(NotEqual, Condition) \ 1057 M(NullConstant, Instruction) \ 1058 M(NullCheck, Instruction) \ 1059 M(Or, BinaryOperation) \ 1060 M(PackedSwitch, Instruction) \ 1061 M(ParallelMove, Instruction) \ 1062 M(ParameterValue, Instruction) \ 1063 M(Phi, Instruction) \ 1064 M(Rem, BinaryOperation) \ 1065 M(Return, Instruction) \ 1066 M(ReturnVoid, Instruction) \ 1067 M(Shl, BinaryOperation) \ 1068 M(Shr, BinaryOperation) \ 1069 M(StaticFieldGet, Instruction) \ 1070 M(StaticFieldSet, Instruction) \ 1071 M(UnresolvedInstanceFieldGet, Instruction) \ 1072 M(UnresolvedInstanceFieldSet, Instruction) \ 1073 M(UnresolvedStaticFieldGet, Instruction) \ 1074 M(UnresolvedStaticFieldSet, Instruction) \ 1075 M(StoreLocal, Instruction) \ 1076 M(Sub, BinaryOperation) \ 1077 M(SuspendCheck, Instruction) \ 1078 M(Temporary, Instruction) \ 1079 M(Throw, Instruction) \ 1080 M(TryBoundary, Instruction) \ 1081 M(TypeConversion, Instruction) \ 1082 M(UShr, BinaryOperation) \ 1083 M(Xor, BinaryOperation) \ 1084 1085#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1086 1087#ifndef ART_ENABLE_CODEGEN_arm64 1088#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1089#else 1090#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1091 M(Arm64IntermediateAddress, Instruction) 1092#endif 1093 1094#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1095 1096#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1097 1098#ifndef ART_ENABLE_CODEGEN_x86 1099#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1100#else 1101#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1102 M(X86ComputeBaseMethodAddress, Instruction) \ 1103 M(X86LoadFromConstantTable, Instruction) \ 1104 M(X86PackedSwitch, Instruction) 1105#endif 1106 1107#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1108 1109#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1110 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1111 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1112 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1113 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1114 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1115 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1116 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1117 1118#define FOR_EACH_INSTRUCTION(M) \ 1119 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1120 M(Constant, Instruction) \ 1121 M(UnaryOperation, Instruction) \ 1122 M(BinaryOperation, Instruction) \ 1123 M(Invoke, Instruction) 1124 1125#define FORWARD_DECLARATION(type, super) class H##type; 1126FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1127#undef FORWARD_DECLARATION 1128 1129#define DECLARE_INSTRUCTION(type) \ 1130 InstructionKind GetKind() const OVERRIDE { return k##type; } \ 1131 const char* DebugName() const OVERRIDE { return #type; } \ 1132 const H##type* As##type() const OVERRIDE { return this; } \ 1133 H##type* As##type() OVERRIDE { return this; } \ 1134 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1135 return other->Is##type(); \ 1136 } \ 1137 void Accept(HGraphVisitor* visitor) OVERRIDE 1138 1139template <typename T> class HUseList; 1140 1141template <typename T> 1142class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1143 public: 1144 HUseListNode* GetPrevious() const { return prev_; } 1145 HUseListNode* GetNext() const { return next_; } 1146 T GetUser() const { return user_; } 1147 size_t GetIndex() const { return index_; } 1148 void SetIndex(size_t index) { index_ = index; } 1149 1150 private: 1151 HUseListNode(T user, size_t index) 1152 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1153 1154 T const user_; 1155 size_t index_; 1156 HUseListNode<T>* prev_; 1157 HUseListNode<T>* next_; 1158 1159 friend class HUseList<T>; 1160 1161 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1162}; 1163 1164template <typename T> 1165class HUseList : public ValueObject { 1166 public: 1167 HUseList() : first_(nullptr) {} 1168 1169 void Clear() { 1170 first_ = nullptr; 1171 } 1172 1173 // Adds a new entry at the beginning of the use list and returns 1174 // the newly created node. 1175 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1176 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1177 if (IsEmpty()) { 1178 first_ = new_node; 1179 } else { 1180 first_->prev_ = new_node; 1181 new_node->next_ = first_; 1182 first_ = new_node; 1183 } 1184 return new_node; 1185 } 1186 1187 HUseListNode<T>* GetFirst() const { 1188 return first_; 1189 } 1190 1191 void Remove(HUseListNode<T>* node) { 1192 DCHECK(node != nullptr); 1193 DCHECK(Contains(node)); 1194 1195 if (node->prev_ != nullptr) { 1196 node->prev_->next_ = node->next_; 1197 } 1198 if (node->next_ != nullptr) { 1199 node->next_->prev_ = node->prev_; 1200 } 1201 if (node == first_) { 1202 first_ = node->next_; 1203 } 1204 } 1205 1206 bool Contains(const HUseListNode<T>* node) const { 1207 if (node == nullptr) { 1208 return false; 1209 } 1210 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1211 if (current == node) { 1212 return true; 1213 } 1214 } 1215 return false; 1216 } 1217 1218 bool IsEmpty() const { 1219 return first_ == nullptr; 1220 } 1221 1222 bool HasOnlyOneUse() const { 1223 return first_ != nullptr && first_->next_ == nullptr; 1224 } 1225 1226 size_t SizeSlow() const { 1227 size_t count = 0; 1228 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1229 ++count; 1230 } 1231 return count; 1232 } 1233 1234 private: 1235 HUseListNode<T>* first_; 1236}; 1237 1238template<typename T> 1239class HUseIterator : public ValueObject { 1240 public: 1241 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1242 1243 bool Done() const { return current_ == nullptr; } 1244 1245 void Advance() { 1246 DCHECK(!Done()); 1247 current_ = current_->GetNext(); 1248 } 1249 1250 HUseListNode<T>* Current() const { 1251 DCHECK(!Done()); 1252 return current_; 1253 } 1254 1255 private: 1256 HUseListNode<T>* current_; 1257 1258 friend class HValue; 1259}; 1260 1261// This class is used by HEnvironment and HInstruction classes to record the 1262// instructions they use and pointers to the corresponding HUseListNodes kept 1263// by the used instructions. 1264template <typename T> 1265class HUserRecord : public ValueObject { 1266 public: 1267 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1268 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1269 1270 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1271 : instruction_(old_record.instruction_), use_node_(use_node) { 1272 DCHECK(instruction_ != nullptr); 1273 DCHECK(use_node_ != nullptr); 1274 DCHECK(old_record.use_node_ == nullptr); 1275 } 1276 1277 HInstruction* GetInstruction() const { return instruction_; } 1278 HUseListNode<T>* GetUseNode() const { return use_node_; } 1279 1280 private: 1281 // Instruction used by the user. 1282 HInstruction* instruction_; 1283 1284 // Corresponding entry in the use list kept by 'instruction_'. 1285 HUseListNode<T>* use_node_; 1286}; 1287 1288/** 1289 * Side-effects representation. 1290 * 1291 * For write/read dependences on fields/arrays, the dependence analysis uses 1292 * type disambiguation (e.g. a float field write cannot modify the value of an 1293 * integer field read) and the access type (e.g. a reference array write cannot 1294 * modify the value of a reference field read [although it may modify the 1295 * reference fetch prior to reading the field, which is represented by its own 1296 * write/read dependence]). The analysis makes conservative points-to 1297 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1298 * the same, and any reference read depends on any reference read without 1299 * further regard of its type). 1300 * 1301 * The internal representation uses 38-bit and is described in the table below. 1302 * The first line indicates the side effect, and for field/array accesses the 1303 * second line indicates the type of the access (in the order of the 1304 * Primitive::Type enum). 1305 * The two numbered lines below indicate the bit position in the bitfield (read 1306 * vertically). 1307 * 1308 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1309 * +-------------+---------+---------+--------------+---------+---------+ 1310 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1311 * | 3 |333333322|222222221| 1 |111111110|000000000| 1312 * | 7 |654321098|765432109| 8 |765432109|876543210| 1313 * 1314 * Note that, to ease the implementation, 'changes' bits are least significant 1315 * bits, while 'dependency' bits are most significant bits. 1316 */ 1317class SideEffects : public ValueObject { 1318 public: 1319 SideEffects() : flags_(0) {} 1320 1321 static SideEffects None() { 1322 return SideEffects(0); 1323 } 1324 1325 static SideEffects All() { 1326 return SideEffects(kAllChangeBits | kAllDependOnBits); 1327 } 1328 1329 static SideEffects AllChanges() { 1330 return SideEffects(kAllChangeBits); 1331 } 1332 1333 static SideEffects AllDependencies() { 1334 return SideEffects(kAllDependOnBits); 1335 } 1336 1337 static SideEffects AllExceptGCDependency() { 1338 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1339 } 1340 1341 static SideEffects AllWritesAndReads() { 1342 return SideEffects(kAllWrites | kAllReads); 1343 } 1344 1345 static SideEffects AllWrites() { 1346 return SideEffects(kAllWrites); 1347 } 1348 1349 static SideEffects AllReads() { 1350 return SideEffects(kAllReads); 1351 } 1352 1353 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1354 return is_volatile 1355 ? AllWritesAndReads() 1356 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1357 } 1358 1359 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1360 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1361 } 1362 1363 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1364 return is_volatile 1365 ? AllWritesAndReads() 1366 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1367 } 1368 1369 static SideEffects ArrayReadOfType(Primitive::Type type) { 1370 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1371 } 1372 1373 static SideEffects CanTriggerGC() { 1374 return SideEffects(1ULL << kCanTriggerGCBit); 1375 } 1376 1377 static SideEffects DependsOnGC() { 1378 return SideEffects(1ULL << kDependsOnGCBit); 1379 } 1380 1381 // Combines the side-effects of this and the other. 1382 SideEffects Union(SideEffects other) const { 1383 return SideEffects(flags_ | other.flags_); 1384 } 1385 1386 SideEffects Exclusion(SideEffects other) const { 1387 return SideEffects(flags_ & ~other.flags_); 1388 } 1389 1390 void Add(SideEffects other) { 1391 flags_ |= other.flags_; 1392 } 1393 1394 bool Includes(SideEffects other) const { 1395 return (other.flags_ & flags_) == other.flags_; 1396 } 1397 1398 bool HasSideEffects() const { 1399 return (flags_ & kAllChangeBits); 1400 } 1401 1402 bool HasDependencies() const { 1403 return (flags_ & kAllDependOnBits); 1404 } 1405 1406 // Returns true if there are no side effects or dependencies. 1407 bool DoesNothing() const { 1408 return flags_ == 0; 1409 } 1410 1411 // Returns true if something is written. 1412 bool DoesAnyWrite() const { 1413 return (flags_ & kAllWrites); 1414 } 1415 1416 // Returns true if something is read. 1417 bool DoesAnyRead() const { 1418 return (flags_ & kAllReads); 1419 } 1420 1421 // Returns true if potentially everything is written and read 1422 // (every type and every kind of access). 1423 bool DoesAllReadWrite() const { 1424 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1425 } 1426 1427 bool DoesAll() const { 1428 return flags_ == (kAllChangeBits | kAllDependOnBits); 1429 } 1430 1431 // Returns true if this may read something written by other. 1432 bool MayDependOn(SideEffects other) const { 1433 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1434 return (other.flags_ & depends_on_flags); 1435 } 1436 1437 // Returns string representation of flags (for debugging only). 1438 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1439 std::string ToString() const { 1440 std::string flags = "|"; 1441 for (int s = kLastBit; s >= 0; s--) { 1442 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1443 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1444 // This is a bit for the GC side effect. 1445 if (current_bit_is_set) { 1446 flags += "GC"; 1447 } 1448 flags += "|"; 1449 } else { 1450 // This is a bit for the array/field analysis. 1451 // The underscore character stands for the 'can trigger GC' bit. 1452 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1453 if (current_bit_is_set) { 1454 flags += kDebug[s]; 1455 } 1456 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1457 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1458 flags += "|"; 1459 } 1460 } 1461 } 1462 return flags; 1463 } 1464 1465 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1466 1467 private: 1468 static constexpr int kFieldArrayAnalysisBits = 9; 1469 1470 static constexpr int kFieldWriteOffset = 0; 1471 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1472 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1473 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1474 1475 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1476 1477 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1478 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1479 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1480 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1481 1482 static constexpr int kLastBit = kDependsOnGCBit; 1483 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1484 1485 // Aliases. 1486 1487 static_assert(kChangeBits == kDependOnBits, 1488 "the 'change' bits should match the 'depend on' bits."); 1489 1490 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1491 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1492 static constexpr uint64_t kAllWrites = 1493 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1494 static constexpr uint64_t kAllReads = 1495 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1496 1497 // Work around the fact that HIR aliases I/F and J/D. 1498 // TODO: remove this interceptor once HIR types are clean 1499 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1500 switch (type) { 1501 case Primitive::kPrimInt: 1502 case Primitive::kPrimFloat: 1503 return TypeFlag(Primitive::kPrimInt, offset) | 1504 TypeFlag(Primitive::kPrimFloat, offset); 1505 case Primitive::kPrimLong: 1506 case Primitive::kPrimDouble: 1507 return TypeFlag(Primitive::kPrimLong, offset) | 1508 TypeFlag(Primitive::kPrimDouble, offset); 1509 default: 1510 return TypeFlag(type, offset); 1511 } 1512 } 1513 1514 // Translates type to bit flag. 1515 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1516 CHECK_NE(type, Primitive::kPrimVoid); 1517 const uint64_t one = 1; 1518 const int shift = type; // 0-based consecutive enum 1519 DCHECK_LE(kFieldWriteOffset, shift); 1520 DCHECK_LT(shift, kArrayWriteOffset); 1521 return one << (type + offset); 1522 } 1523 1524 // Private constructor on direct flags value. 1525 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1526 1527 uint64_t flags_; 1528}; 1529 1530// A HEnvironment object contains the values of virtual registers at a given location. 1531class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1532 public: 1533 HEnvironment(ArenaAllocator* arena, 1534 size_t number_of_vregs, 1535 const DexFile& dex_file, 1536 uint32_t method_idx, 1537 uint32_t dex_pc, 1538 InvokeType invoke_type, 1539 HInstruction* holder) 1540 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1541 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1542 parent_(nullptr), 1543 dex_file_(dex_file), 1544 method_idx_(method_idx), 1545 dex_pc_(dex_pc), 1546 invoke_type_(invoke_type), 1547 holder_(holder) { 1548 } 1549 1550 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1551 : HEnvironment(arena, 1552 to_copy.Size(), 1553 to_copy.GetDexFile(), 1554 to_copy.GetMethodIdx(), 1555 to_copy.GetDexPc(), 1556 to_copy.GetInvokeType(), 1557 holder) {} 1558 1559 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1560 if (parent_ != nullptr) { 1561 parent_->SetAndCopyParentChain(allocator, parent); 1562 } else { 1563 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1564 parent_->CopyFrom(parent); 1565 if (parent->GetParent() != nullptr) { 1566 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1567 } 1568 } 1569 } 1570 1571 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1572 void CopyFrom(HEnvironment* environment); 1573 1574 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1575 // input to the loop phi instead. This is for inserting instructions that 1576 // require an environment (like HDeoptimization) in the loop pre-header. 1577 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1578 1579 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1580 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1581 } 1582 1583 HInstruction* GetInstructionAt(size_t index) const { 1584 return vregs_[index].GetInstruction(); 1585 } 1586 1587 void RemoveAsUserOfInput(size_t index) const; 1588 1589 size_t Size() const { return vregs_.size(); } 1590 1591 HEnvironment* GetParent() const { return parent_; } 1592 1593 void SetLocationAt(size_t index, Location location) { 1594 locations_[index] = location; 1595 } 1596 1597 Location GetLocationAt(size_t index) const { 1598 return locations_[index]; 1599 } 1600 1601 uint32_t GetDexPc() const { 1602 return dex_pc_; 1603 } 1604 1605 uint32_t GetMethodIdx() const { 1606 return method_idx_; 1607 } 1608 1609 InvokeType GetInvokeType() const { 1610 return invoke_type_; 1611 } 1612 1613 const DexFile& GetDexFile() const { 1614 return dex_file_; 1615 } 1616 1617 HInstruction* GetHolder() const { 1618 return holder_; 1619 } 1620 1621 private: 1622 // Record instructions' use entries of this environment for constant-time removal. 1623 // It should only be called by HInstruction when a new environment use is added. 1624 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1625 DCHECK(env_use->GetUser() == this); 1626 size_t index = env_use->GetIndex(); 1627 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1628 } 1629 1630 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1631 ArenaVector<Location> locations_; 1632 HEnvironment* parent_; 1633 const DexFile& dex_file_; 1634 const uint32_t method_idx_; 1635 const uint32_t dex_pc_; 1636 const InvokeType invoke_type_; 1637 1638 // The instruction that holds this environment. 1639 HInstruction* const holder_; 1640 1641 friend class HInstruction; 1642 1643 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1644}; 1645 1646class ReferenceTypeInfo : ValueObject { 1647 public: 1648 typedef Handle<mirror::Class> TypeHandle; 1649 1650 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1651 // The constructor will check that the type_handle is valid. 1652 return ReferenceTypeInfo(type_handle, is_exact); 1653 } 1654 1655 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1656 1657 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1658 return handle.GetReference() != nullptr; 1659 } 1660 1661 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1662 return IsValidHandle(type_handle_); 1663 } 1664 1665 bool IsExact() const { return is_exact_; } 1666 1667 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1668 DCHECK(IsValid()); 1669 return GetTypeHandle()->IsObjectClass(); 1670 } 1671 1672 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1673 DCHECK(IsValid()); 1674 return GetTypeHandle()->IsStringClass(); 1675 } 1676 1677 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1678 DCHECK(IsValid()); 1679 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1680 } 1681 1682 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1683 DCHECK(IsValid()); 1684 return GetTypeHandle()->IsInterface(); 1685 } 1686 1687 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1688 DCHECK(IsValid()); 1689 return GetTypeHandle()->IsArrayClass(); 1690 } 1691 1692 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1693 DCHECK(IsValid()); 1694 return GetTypeHandle()->IsPrimitiveArray(); 1695 } 1696 1697 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1698 DCHECK(IsValid()); 1699 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 1700 } 1701 1702 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1703 DCHECK(IsValid()); 1704 if (!IsExact()) return false; 1705 if (!IsArrayClass()) return false; 1706 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1707 } 1708 1709 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1710 DCHECK(IsValid()); 1711 if (!IsExact()) return false; 1712 if (!IsArrayClass()) return false; 1713 if (!rti.IsArrayClass()) return false; 1714 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 1715 rti.GetTypeHandle()->GetComponentType()); 1716 } 1717 1718 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1719 1720 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1721 DCHECK(IsValid()); 1722 DCHECK(rti.IsValid()); 1723 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1724 } 1725 1726 // Returns true if the type information provide the same amount of details. 1727 // Note that it does not mean that the instructions have the same actual type 1728 // (because the type can be the result of a merge). 1729 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1730 if (!IsValid() && !rti.IsValid()) { 1731 // Invalid types are equal. 1732 return true; 1733 } 1734 if (!IsValid() || !rti.IsValid()) { 1735 // One is valid, the other not. 1736 return false; 1737 } 1738 return IsExact() == rti.IsExact() 1739 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1740 } 1741 1742 private: 1743 ReferenceTypeInfo(); 1744 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1745 1746 // The class of the object. 1747 TypeHandle type_handle_; 1748 // Whether or not the type is exact or a superclass of the actual type. 1749 // Whether or not we have any information about this type. 1750 bool is_exact_; 1751}; 1752 1753std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1754 1755class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1756 public: 1757 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1758 : previous_(nullptr), 1759 next_(nullptr), 1760 block_(nullptr), 1761 dex_pc_(dex_pc), 1762 id_(-1), 1763 ssa_index_(-1), 1764 environment_(nullptr), 1765 locations_(nullptr), 1766 live_interval_(nullptr), 1767 lifetime_position_(kNoLifetime), 1768 side_effects_(side_effects), 1769 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1770 1771 virtual ~HInstruction() {} 1772 1773#define DECLARE_KIND(type, super) k##type, 1774 enum InstructionKind { 1775 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1776 }; 1777#undef DECLARE_KIND 1778 1779 HInstruction* GetNext() const { return next_; } 1780 HInstruction* GetPrevious() const { return previous_; } 1781 1782 HInstruction* GetNextDisregardingMoves() const; 1783 HInstruction* GetPreviousDisregardingMoves() const; 1784 1785 HBasicBlock* GetBlock() const { return block_; } 1786 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1787 void SetBlock(HBasicBlock* block) { block_ = block; } 1788 bool IsInBlock() const { return block_ != nullptr; } 1789 bool IsInLoop() const { return block_->IsInLoop(); } 1790 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1791 1792 virtual size_t InputCount() const = 0; 1793 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1794 1795 virtual void Accept(HGraphVisitor* visitor) = 0; 1796 virtual const char* DebugName() const = 0; 1797 1798 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1799 void SetRawInputAt(size_t index, HInstruction* input) { 1800 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1801 } 1802 1803 virtual bool NeedsEnvironment() const { return false; } 1804 1805 uint32_t GetDexPc() const { return dex_pc_; } 1806 1807 virtual bool IsControlFlow() const { return false; } 1808 1809 virtual bool CanThrow() const { return false; } 1810 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1811 1812 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1813 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1814 1815 // Does not apply for all instructions, but having this at top level greatly 1816 // simplifies the null check elimination. 1817 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1818 virtual bool CanBeNull() const { 1819 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1820 return true; 1821 } 1822 1823 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1824 return false; 1825 } 1826 1827 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1828 1829 ReferenceTypeInfo GetReferenceTypeInfo() const { 1830 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1831 return reference_type_info_; 1832 } 1833 1834 void AddUseAt(HInstruction* user, size_t index) { 1835 DCHECK(user != nullptr); 1836 HUseListNode<HInstruction*>* use = 1837 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1838 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1839 } 1840 1841 void AddEnvUseAt(HEnvironment* user, size_t index) { 1842 DCHECK(user != nullptr); 1843 HUseListNode<HEnvironment*>* env_use = 1844 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1845 user->RecordEnvUse(env_use); 1846 } 1847 1848 void RemoveAsUserOfInput(size_t input) { 1849 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1850 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1851 } 1852 1853 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1854 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1855 1856 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1857 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1858 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1859 bool HasOnlyOneNonEnvironmentUse() const { 1860 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1861 } 1862 1863 // Does this instruction strictly dominate `other_instruction`? 1864 // Returns false if this instruction and `other_instruction` are the same. 1865 // Aborts if this instruction and `other_instruction` are both phis. 1866 bool StrictlyDominates(HInstruction* other_instruction) const; 1867 1868 int GetId() const { return id_; } 1869 void SetId(int id) { id_ = id; } 1870 1871 int GetSsaIndex() const { return ssa_index_; } 1872 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1873 bool HasSsaIndex() const { return ssa_index_ != -1; } 1874 1875 bool HasEnvironment() const { return environment_ != nullptr; } 1876 HEnvironment* GetEnvironment() const { return environment_; } 1877 // Set the `environment_` field. Raw because this method does not 1878 // update the uses lists. 1879 void SetRawEnvironment(HEnvironment* environment) { 1880 DCHECK(environment_ == nullptr); 1881 DCHECK_EQ(environment->GetHolder(), this); 1882 environment_ = environment; 1883 } 1884 1885 // Set the environment of this instruction, copying it from `environment`. While 1886 // copying, the uses lists are being updated. 1887 void CopyEnvironmentFrom(HEnvironment* environment) { 1888 DCHECK(environment_ == nullptr); 1889 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1890 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1891 environment_->CopyFrom(environment); 1892 if (environment->GetParent() != nullptr) { 1893 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1894 } 1895 } 1896 1897 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1898 HBasicBlock* block) { 1899 DCHECK(environment_ == nullptr); 1900 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1901 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1902 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1903 if (environment->GetParent() != nullptr) { 1904 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1905 } 1906 } 1907 1908 // Returns the number of entries in the environment. Typically, that is the 1909 // number of dex registers in a method. It could be more in case of inlining. 1910 size_t EnvironmentSize() const; 1911 1912 LocationSummary* GetLocations() const { return locations_; } 1913 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1914 1915 void ReplaceWith(HInstruction* instruction); 1916 void ReplaceInput(HInstruction* replacement, size_t index); 1917 1918 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1919 // uses of this instruction by `other` are *not* updated. 1920 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1921 ReplaceWith(other); 1922 other->ReplaceInput(this, use_index); 1923 } 1924 1925 // Move `this` instruction before `cursor`. 1926 void MoveBefore(HInstruction* cursor); 1927 1928#define INSTRUCTION_TYPE_CHECK(type, super) \ 1929 bool Is##type() const { return (As##type() != nullptr); } \ 1930 virtual const H##type* As##type() const { return nullptr; } \ 1931 virtual H##type* As##type() { return nullptr; } 1932 1933 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1934#undef INSTRUCTION_TYPE_CHECK 1935 1936 // Returns whether the instruction can be moved within the graph. 1937 virtual bool CanBeMoved() const { return false; } 1938 1939 // Returns whether the two instructions are of the same kind. 1940 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1941 return false; 1942 } 1943 1944 // Returns whether any data encoded in the two instructions is equal. 1945 // This method does not look at the inputs. Both instructions must be 1946 // of the same type, otherwise the method has undefined behavior. 1947 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1948 return false; 1949 } 1950 1951 // Returns whether two instructions are equal, that is: 1952 // 1) They have the same type and contain the same data (InstructionDataEquals). 1953 // 2) Their inputs are identical. 1954 bool Equals(HInstruction* other) const; 1955 1956 virtual InstructionKind GetKind() const = 0; 1957 1958 virtual size_t ComputeHashCode() const { 1959 size_t result = GetKind(); 1960 for (size_t i = 0, e = InputCount(); i < e; ++i) { 1961 result = (result * 31) + InputAt(i)->GetId(); 1962 } 1963 return result; 1964 } 1965 1966 SideEffects GetSideEffects() const { return side_effects_; } 1967 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 1968 1969 size_t GetLifetimePosition() const { return lifetime_position_; } 1970 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 1971 LiveInterval* GetLiveInterval() const { return live_interval_; } 1972 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 1973 bool HasLiveInterval() const { return live_interval_ != nullptr; } 1974 1975 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 1976 1977 // Returns whether the code generation of the instruction will require to have access 1978 // to the current method. Such instructions are: 1979 // (1): Instructions that require an environment, as calling the runtime requires 1980 // to walk the stack and have the current method stored at a specific stack address. 1981 // (2): Object literals like classes and strings, that are loaded from the dex cache 1982 // fields of the current method. 1983 bool NeedsCurrentMethod() const { 1984 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 1985 } 1986 1987 // Returns whether the code generation of the instruction will require to have access 1988 // to the dex cache of the current method's declaring class via the current method. 1989 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 1990 1991 // Does this instruction have any use in an environment before 1992 // control flow hits 'other'? 1993 bool HasAnyEnvironmentUseBefore(HInstruction* other); 1994 1995 // Remove all references to environment uses of this instruction. 1996 // The caller must ensure that this is safe to do. 1997 void RemoveEnvironmentUsers(); 1998 1999 protected: 2000 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2001 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2002 2003 private: 2004 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2005 2006 HInstruction* previous_; 2007 HInstruction* next_; 2008 HBasicBlock* block_; 2009 const uint32_t dex_pc_; 2010 2011 // An instruction gets an id when it is added to the graph. 2012 // It reflects creation order. A negative id means the instruction 2013 // has not been added to the graph. 2014 int id_; 2015 2016 // When doing liveness analysis, instructions that have uses get an SSA index. 2017 int ssa_index_; 2018 2019 // List of instructions that have this instruction as input. 2020 HUseList<HInstruction*> uses_; 2021 2022 // List of environments that contain this instruction. 2023 HUseList<HEnvironment*> env_uses_; 2024 2025 // The environment associated with this instruction. Not null if the instruction 2026 // might jump out of the method. 2027 HEnvironment* environment_; 2028 2029 // Set by the code generator. 2030 LocationSummary* locations_; 2031 2032 // Set by the liveness analysis. 2033 LiveInterval* live_interval_; 2034 2035 // Set by the liveness analysis, this is the position in a linear 2036 // order of blocks where this instruction's live interval start. 2037 size_t lifetime_position_; 2038 2039 SideEffects side_effects_; 2040 2041 // TODO: for primitive types this should be marked as invalid. 2042 ReferenceTypeInfo reference_type_info_; 2043 2044 friend class GraphChecker; 2045 friend class HBasicBlock; 2046 friend class HEnvironment; 2047 friend class HGraph; 2048 friend class HInstructionList; 2049 2050 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2051}; 2052std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2053 2054class HInputIterator : public ValueObject { 2055 public: 2056 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2057 2058 bool Done() const { return index_ == instruction_->InputCount(); } 2059 HInstruction* Current() const { return instruction_->InputAt(index_); } 2060 void Advance() { index_++; } 2061 2062 private: 2063 HInstruction* instruction_; 2064 size_t index_; 2065 2066 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2067}; 2068 2069class HInstructionIterator : public ValueObject { 2070 public: 2071 explicit HInstructionIterator(const HInstructionList& instructions) 2072 : instruction_(instructions.first_instruction_) { 2073 next_ = Done() ? nullptr : instruction_->GetNext(); 2074 } 2075 2076 bool Done() const { return instruction_ == nullptr; } 2077 HInstruction* Current() const { return instruction_; } 2078 void Advance() { 2079 instruction_ = next_; 2080 next_ = Done() ? nullptr : instruction_->GetNext(); 2081 } 2082 2083 private: 2084 HInstruction* instruction_; 2085 HInstruction* next_; 2086 2087 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2088}; 2089 2090class HBackwardInstructionIterator : public ValueObject { 2091 public: 2092 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2093 : instruction_(instructions.last_instruction_) { 2094 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2095 } 2096 2097 bool Done() const { return instruction_ == nullptr; } 2098 HInstruction* Current() const { return instruction_; } 2099 void Advance() { 2100 instruction_ = next_; 2101 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2102 } 2103 2104 private: 2105 HInstruction* instruction_; 2106 HInstruction* next_; 2107 2108 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2109}; 2110 2111template<size_t N> 2112class HTemplateInstruction: public HInstruction { 2113 public: 2114 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2115 : HInstruction(side_effects, dex_pc), inputs_() {} 2116 virtual ~HTemplateInstruction() {} 2117 2118 size_t InputCount() const OVERRIDE { return N; } 2119 2120 protected: 2121 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2122 DCHECK_LT(i, N); 2123 return inputs_[i]; 2124 } 2125 2126 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2127 DCHECK_LT(i, N); 2128 inputs_[i] = input; 2129 } 2130 2131 private: 2132 std::array<HUserRecord<HInstruction*>, N> inputs_; 2133 2134 friend class SsaBuilder; 2135}; 2136 2137// HTemplateInstruction specialization for N=0. 2138template<> 2139class HTemplateInstruction<0>: public HInstruction { 2140 public: 2141 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2142 : HInstruction(side_effects, dex_pc) {} 2143 2144 virtual ~HTemplateInstruction() {} 2145 2146 size_t InputCount() const OVERRIDE { return 0; } 2147 2148 protected: 2149 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2150 LOG(FATAL) << "Unreachable"; 2151 UNREACHABLE(); 2152 } 2153 2154 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2155 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2156 LOG(FATAL) << "Unreachable"; 2157 UNREACHABLE(); 2158 } 2159 2160 private: 2161 friend class SsaBuilder; 2162}; 2163 2164template<intptr_t N> 2165class HExpression : public HTemplateInstruction<N> { 2166 public: 2167 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2168 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2169 virtual ~HExpression() {} 2170 2171 Primitive::Type GetType() const OVERRIDE { return type_; } 2172 2173 protected: 2174 Primitive::Type type_; 2175}; 2176 2177// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2178// instruction that branches to the exit block. 2179class HReturnVoid : public HTemplateInstruction<0> { 2180 public: 2181 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2182 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2183 2184 bool IsControlFlow() const OVERRIDE { return true; } 2185 2186 DECLARE_INSTRUCTION(ReturnVoid); 2187 2188 private: 2189 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2190}; 2191 2192// Represents dex's RETURN opcodes. A HReturn is a control flow 2193// instruction that branches to the exit block. 2194class HReturn : public HTemplateInstruction<1> { 2195 public: 2196 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2197 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2198 SetRawInputAt(0, value); 2199 } 2200 2201 bool IsControlFlow() const OVERRIDE { return true; } 2202 2203 DECLARE_INSTRUCTION(Return); 2204 2205 private: 2206 DISALLOW_COPY_AND_ASSIGN(HReturn); 2207}; 2208 2209// The exit instruction is the only instruction of the exit block. 2210// Instructions aborting the method (HThrow and HReturn) must branch to the 2211// exit block. 2212class HExit : public HTemplateInstruction<0> { 2213 public: 2214 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2215 2216 bool IsControlFlow() const OVERRIDE { return true; } 2217 2218 DECLARE_INSTRUCTION(Exit); 2219 2220 private: 2221 DISALLOW_COPY_AND_ASSIGN(HExit); 2222}; 2223 2224// Jumps from one block to another. 2225class HGoto : public HTemplateInstruction<0> { 2226 public: 2227 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2228 2229 bool IsControlFlow() const OVERRIDE { return true; } 2230 2231 HBasicBlock* GetSuccessor() const { 2232 return GetBlock()->GetSingleSuccessor(); 2233 } 2234 2235 DECLARE_INSTRUCTION(Goto); 2236 2237 private: 2238 DISALLOW_COPY_AND_ASSIGN(HGoto); 2239}; 2240 2241class HConstant : public HExpression<0> { 2242 public: 2243 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2244 : HExpression(type, SideEffects::None(), dex_pc) {} 2245 2246 bool CanBeMoved() const OVERRIDE { return true; } 2247 2248 virtual bool IsMinusOne() const { return false; } 2249 virtual bool IsZero() const { return false; } 2250 virtual bool IsOne() const { return false; } 2251 2252 virtual uint64_t GetValueAsUint64() const = 0; 2253 2254 DECLARE_INSTRUCTION(Constant); 2255 2256 private: 2257 DISALLOW_COPY_AND_ASSIGN(HConstant); 2258}; 2259 2260class HNullConstant : public HConstant { 2261 public: 2262 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2263 return true; 2264 } 2265 2266 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2267 2268 size_t ComputeHashCode() const OVERRIDE { return 0; } 2269 2270 DECLARE_INSTRUCTION(NullConstant); 2271 2272 private: 2273 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2274 2275 friend class HGraph; 2276 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2277}; 2278 2279// Constants of the type int. Those can be from Dex instructions, or 2280// synthesized (for example with the if-eqz instruction). 2281class HIntConstant : public HConstant { 2282 public: 2283 int32_t GetValue() const { return value_; } 2284 2285 uint64_t GetValueAsUint64() const OVERRIDE { 2286 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2287 } 2288 2289 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2290 DCHECK(other->IsIntConstant()); 2291 return other->AsIntConstant()->value_ == value_; 2292 } 2293 2294 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2295 2296 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2297 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2298 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2299 2300 DECLARE_INSTRUCTION(IntConstant); 2301 2302 private: 2303 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2304 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2305 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2306 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2307 2308 const int32_t value_; 2309 2310 friend class HGraph; 2311 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2312 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2313 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2314}; 2315 2316class HLongConstant : public HConstant { 2317 public: 2318 int64_t GetValue() const { return value_; } 2319 2320 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2321 2322 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2323 DCHECK(other->IsLongConstant()); 2324 return other->AsLongConstant()->value_ == value_; 2325 } 2326 2327 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2328 2329 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2330 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2331 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2332 2333 DECLARE_INSTRUCTION(LongConstant); 2334 2335 private: 2336 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2337 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2338 2339 const int64_t value_; 2340 2341 friend class HGraph; 2342 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2343}; 2344 2345// Conditional branch. A block ending with an HIf instruction must have 2346// two successors. 2347class HIf : public HTemplateInstruction<1> { 2348 public: 2349 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2350 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2351 SetRawInputAt(0, input); 2352 } 2353 2354 bool IsControlFlow() const OVERRIDE { return true; } 2355 2356 HBasicBlock* IfTrueSuccessor() const { 2357 return GetBlock()->GetSuccessors()[0]; 2358 } 2359 2360 HBasicBlock* IfFalseSuccessor() const { 2361 return GetBlock()->GetSuccessors()[1]; 2362 } 2363 2364 DECLARE_INSTRUCTION(If); 2365 2366 private: 2367 DISALLOW_COPY_AND_ASSIGN(HIf); 2368}; 2369 2370 2371// Abstract instruction which marks the beginning and/or end of a try block and 2372// links it to the respective exception handlers. Behaves the same as a Goto in 2373// non-exceptional control flow. 2374// Normal-flow successor is stored at index zero, exception handlers under 2375// higher indices in no particular order. 2376class HTryBoundary : public HTemplateInstruction<0> { 2377 public: 2378 enum BoundaryKind { 2379 kEntry, 2380 kExit, 2381 }; 2382 2383 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2384 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2385 2386 bool IsControlFlow() const OVERRIDE { return true; } 2387 2388 // Returns the block's non-exceptional successor (index zero). 2389 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2390 2391 // Returns whether `handler` is among its exception handlers (non-zero index 2392 // successors). 2393 bool HasExceptionHandler(const HBasicBlock& handler) const { 2394 DCHECK(handler.IsCatchBlock()); 2395 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2396 } 2397 2398 // If not present already, adds `handler` to its block's list of exception 2399 // handlers. 2400 void AddExceptionHandler(HBasicBlock* handler) { 2401 if (!HasExceptionHandler(*handler)) { 2402 GetBlock()->AddSuccessor(handler); 2403 } 2404 } 2405 2406 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2407 2408 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2409 2410 DECLARE_INSTRUCTION(TryBoundary); 2411 2412 private: 2413 const BoundaryKind kind_; 2414 2415 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2416}; 2417 2418// Iterator over exception handlers of a given HTryBoundary, i.e. over 2419// exceptional successors of its basic block. 2420class HExceptionHandlerIterator : public ValueObject { 2421 public: 2422 explicit HExceptionHandlerIterator(const HTryBoundary& try_boundary) 2423 : block_(*try_boundary.GetBlock()), index_(block_.NumberOfNormalSuccessors()) {} 2424 2425 bool Done() const { return index_ == block_.GetSuccessors().size(); } 2426 HBasicBlock* Current() const { return block_.GetSuccessors()[index_]; } 2427 size_t CurrentSuccessorIndex() const { return index_; } 2428 void Advance() { ++index_; } 2429 2430 private: 2431 const HBasicBlock& block_; 2432 size_t index_; 2433 2434 DISALLOW_COPY_AND_ASSIGN(HExceptionHandlerIterator); 2435}; 2436 2437// Deoptimize to interpreter, upon checking a condition. 2438class HDeoptimize : public HTemplateInstruction<1> { 2439 public: 2440 explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2441 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2442 SetRawInputAt(0, cond); 2443 } 2444 2445 bool NeedsEnvironment() const OVERRIDE { return true; } 2446 bool CanThrow() const OVERRIDE { return true; } 2447 2448 DECLARE_INSTRUCTION(Deoptimize); 2449 2450 private: 2451 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2452}; 2453 2454// Represents the ArtMethod that was passed as a first argument to 2455// the method. It is used by instructions that depend on it, like 2456// instructions that work with the dex cache. 2457class HCurrentMethod : public HExpression<0> { 2458 public: 2459 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2460 : HExpression(type, SideEffects::None(), dex_pc) {} 2461 2462 DECLARE_INSTRUCTION(CurrentMethod); 2463 2464 private: 2465 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2466}; 2467 2468// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2469// have one successor for each entry in the switch table, and the final successor 2470// will be the block containing the next Dex opcode. 2471class HPackedSwitch : public HTemplateInstruction<1> { 2472 public: 2473 HPackedSwitch(int32_t start_value, 2474 uint32_t num_entries, 2475 HInstruction* input, 2476 uint32_t dex_pc = kNoDexPc) 2477 : HTemplateInstruction(SideEffects::None(), dex_pc), 2478 start_value_(start_value), 2479 num_entries_(num_entries) { 2480 SetRawInputAt(0, input); 2481 } 2482 2483 bool IsControlFlow() const OVERRIDE { return true; } 2484 2485 int32_t GetStartValue() const { return start_value_; } 2486 2487 uint32_t GetNumEntries() const { return num_entries_; } 2488 2489 HBasicBlock* GetDefaultBlock() const { 2490 // Last entry is the default block. 2491 return GetBlock()->GetSuccessors()[num_entries_]; 2492 } 2493 DECLARE_INSTRUCTION(PackedSwitch); 2494 2495 private: 2496 const int32_t start_value_; 2497 const uint32_t num_entries_; 2498 2499 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2500}; 2501 2502class HUnaryOperation : public HExpression<1> { 2503 public: 2504 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2505 : HExpression(result_type, SideEffects::None(), dex_pc) { 2506 SetRawInputAt(0, input); 2507 } 2508 2509 HInstruction* GetInput() const { return InputAt(0); } 2510 Primitive::Type GetResultType() const { return GetType(); } 2511 2512 bool CanBeMoved() const OVERRIDE { return true; } 2513 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2514 return true; 2515 } 2516 2517 // Try to statically evaluate `operation` and return a HConstant 2518 // containing the result of this evaluation. If `operation` cannot 2519 // be evaluated as a constant, return null. 2520 HConstant* TryStaticEvaluation() const; 2521 2522 // Apply this operation to `x`. 2523 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2524 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2525 2526 DECLARE_INSTRUCTION(UnaryOperation); 2527 2528 private: 2529 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2530}; 2531 2532class HBinaryOperation : public HExpression<2> { 2533 public: 2534 HBinaryOperation(Primitive::Type result_type, 2535 HInstruction* left, 2536 HInstruction* right, 2537 SideEffects side_effects = SideEffects::None(), 2538 uint32_t dex_pc = kNoDexPc) 2539 : HExpression(result_type, side_effects, dex_pc) { 2540 SetRawInputAt(0, left); 2541 SetRawInputAt(1, right); 2542 } 2543 2544 HInstruction* GetLeft() const { return InputAt(0); } 2545 HInstruction* GetRight() const { return InputAt(1); } 2546 Primitive::Type GetResultType() const { return GetType(); } 2547 2548 virtual bool IsCommutative() const { return false; } 2549 2550 // Put constant on the right. 2551 // Returns whether order is changed. 2552 bool OrderInputsWithConstantOnTheRight() { 2553 HInstruction* left = InputAt(0); 2554 HInstruction* right = InputAt(1); 2555 if (left->IsConstant() && !right->IsConstant()) { 2556 ReplaceInput(right, 0); 2557 ReplaceInput(left, 1); 2558 return true; 2559 } 2560 return false; 2561 } 2562 2563 // Order inputs by instruction id, but favor constant on the right side. 2564 // This helps GVN for commutative ops. 2565 void OrderInputs() { 2566 DCHECK(IsCommutative()); 2567 HInstruction* left = InputAt(0); 2568 HInstruction* right = InputAt(1); 2569 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2570 return; 2571 } 2572 if (OrderInputsWithConstantOnTheRight()) { 2573 return; 2574 } 2575 // Order according to instruction id. 2576 if (left->GetId() > right->GetId()) { 2577 ReplaceInput(right, 0); 2578 ReplaceInput(left, 1); 2579 } 2580 } 2581 2582 bool CanBeMoved() const OVERRIDE { return true; } 2583 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2584 return true; 2585 } 2586 2587 // Try to statically evaluate `operation` and return a HConstant 2588 // containing the result of this evaluation. If `operation` cannot 2589 // be evaluated as a constant, return null. 2590 HConstant* TryStaticEvaluation() const; 2591 2592 // Apply this operation to `x` and `y`. 2593 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2594 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2595 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2596 HLongConstant* y ATTRIBUTE_UNUSED) const { 2597 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2598 return nullptr; 2599 } 2600 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2601 HIntConstant* y ATTRIBUTE_UNUSED) const { 2602 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2603 return nullptr; 2604 } 2605 2606 // Returns an input that can legally be used as the right input and is 2607 // constant, or null. 2608 HConstant* GetConstantRight() const; 2609 2610 // If `GetConstantRight()` returns one of the input, this returns the other 2611 // one. Otherwise it returns null. 2612 HInstruction* GetLeastConstantLeft() const; 2613 2614 DECLARE_INSTRUCTION(BinaryOperation); 2615 2616 private: 2617 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2618}; 2619 2620// The comparison bias applies for floating point operations and indicates how NaN 2621// comparisons are treated: 2622enum class ComparisonBias { 2623 kNoBias, // bias is not applicable (i.e. for long operation) 2624 kGtBias, // return 1 for NaN comparisons 2625 kLtBias, // return -1 for NaN comparisons 2626}; 2627 2628class HCondition : public HBinaryOperation { 2629 public: 2630 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2631 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2632 needs_materialization_(true), 2633 bias_(ComparisonBias::kNoBias) {} 2634 2635 bool NeedsMaterialization() const { return needs_materialization_; } 2636 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2637 2638 // For code generation purposes, returns whether this instruction is just before 2639 // `instruction`, and disregard moves in between. 2640 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2641 2642 DECLARE_INSTRUCTION(Condition); 2643 2644 virtual IfCondition GetCondition() const = 0; 2645 2646 virtual IfCondition GetOppositeCondition() const = 0; 2647 2648 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2649 2650 void SetBias(ComparisonBias bias) { bias_ = bias; } 2651 2652 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2653 return bias_ == other->AsCondition()->bias_; 2654 } 2655 2656 bool IsFPConditionTrueIfNaN() const { 2657 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2658 IfCondition if_cond = GetCondition(); 2659 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2660 } 2661 2662 bool IsFPConditionFalseIfNaN() const { 2663 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2664 IfCondition if_cond = GetCondition(); 2665 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2666 } 2667 2668 private: 2669 // For register allocation purposes, returns whether this instruction needs to be 2670 // materialized (that is, not just be in the processor flags). 2671 bool needs_materialization_; 2672 2673 // Needed if we merge a HCompare into a HCondition. 2674 ComparisonBias bias_; 2675 2676 DISALLOW_COPY_AND_ASSIGN(HCondition); 2677}; 2678 2679// Instruction to check if two inputs are equal to each other. 2680class HEqual : public HCondition { 2681 public: 2682 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2683 : HCondition(first, second, dex_pc) {} 2684 2685 bool IsCommutative() const OVERRIDE { return true; } 2686 2687 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2688 return GetBlock()->GetGraph()->GetIntConstant( 2689 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2690 } 2691 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2692 return GetBlock()->GetGraph()->GetIntConstant( 2693 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2694 } 2695 2696 DECLARE_INSTRUCTION(Equal); 2697 2698 IfCondition GetCondition() const OVERRIDE { 2699 return kCondEQ; 2700 } 2701 2702 IfCondition GetOppositeCondition() const OVERRIDE { 2703 return kCondNE; 2704 } 2705 2706 private: 2707 template <typename T> bool Compute(T x, T y) const { return x == y; } 2708 2709 DISALLOW_COPY_AND_ASSIGN(HEqual); 2710}; 2711 2712class HNotEqual : public HCondition { 2713 public: 2714 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2715 : HCondition(first, second, dex_pc) {} 2716 2717 bool IsCommutative() const OVERRIDE { return true; } 2718 2719 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2720 return GetBlock()->GetGraph()->GetIntConstant( 2721 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2722 } 2723 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2724 return GetBlock()->GetGraph()->GetIntConstant( 2725 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2726 } 2727 2728 DECLARE_INSTRUCTION(NotEqual); 2729 2730 IfCondition GetCondition() const OVERRIDE { 2731 return kCondNE; 2732 } 2733 2734 IfCondition GetOppositeCondition() const OVERRIDE { 2735 return kCondEQ; 2736 } 2737 2738 private: 2739 template <typename T> bool Compute(T x, T y) const { return x != y; } 2740 2741 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2742}; 2743 2744class HLessThan : public HCondition { 2745 public: 2746 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2747 : HCondition(first, second, dex_pc) {} 2748 2749 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2750 return GetBlock()->GetGraph()->GetIntConstant( 2751 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2752 } 2753 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2754 return GetBlock()->GetGraph()->GetIntConstant( 2755 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2756 } 2757 2758 DECLARE_INSTRUCTION(LessThan); 2759 2760 IfCondition GetCondition() const OVERRIDE { 2761 return kCondLT; 2762 } 2763 2764 IfCondition GetOppositeCondition() const OVERRIDE { 2765 return kCondGE; 2766 } 2767 2768 private: 2769 template <typename T> bool Compute(T x, T y) const { return x < y; } 2770 2771 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2772}; 2773 2774class HLessThanOrEqual : public HCondition { 2775 public: 2776 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2777 : HCondition(first, second, dex_pc) {} 2778 2779 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2780 return GetBlock()->GetGraph()->GetIntConstant( 2781 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2782 } 2783 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2784 return GetBlock()->GetGraph()->GetIntConstant( 2785 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2786 } 2787 2788 DECLARE_INSTRUCTION(LessThanOrEqual); 2789 2790 IfCondition GetCondition() const OVERRIDE { 2791 return kCondLE; 2792 } 2793 2794 IfCondition GetOppositeCondition() const OVERRIDE { 2795 return kCondGT; 2796 } 2797 2798 private: 2799 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2800 2801 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2802}; 2803 2804class HGreaterThan : public HCondition { 2805 public: 2806 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2807 : HCondition(first, second, dex_pc) {} 2808 2809 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2810 return GetBlock()->GetGraph()->GetIntConstant( 2811 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2812 } 2813 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2814 return GetBlock()->GetGraph()->GetIntConstant( 2815 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2816 } 2817 2818 DECLARE_INSTRUCTION(GreaterThan); 2819 2820 IfCondition GetCondition() const OVERRIDE { 2821 return kCondGT; 2822 } 2823 2824 IfCondition GetOppositeCondition() const OVERRIDE { 2825 return kCondLE; 2826 } 2827 2828 private: 2829 template <typename T> bool Compute(T x, T y) const { return x > y; } 2830 2831 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2832}; 2833 2834class HGreaterThanOrEqual : public HCondition { 2835 public: 2836 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2837 : HCondition(first, second, dex_pc) {} 2838 2839 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2840 return GetBlock()->GetGraph()->GetIntConstant( 2841 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2842 } 2843 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2844 return GetBlock()->GetGraph()->GetIntConstant( 2845 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2846 } 2847 2848 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2849 2850 IfCondition GetCondition() const OVERRIDE { 2851 return kCondGE; 2852 } 2853 2854 IfCondition GetOppositeCondition() const OVERRIDE { 2855 return kCondLT; 2856 } 2857 2858 private: 2859 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2860 2861 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2862}; 2863 2864class HBelow : public HCondition { 2865 public: 2866 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2867 : HCondition(first, second, dex_pc) {} 2868 2869 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2870 return GetBlock()->GetGraph()->GetIntConstant( 2871 Compute(static_cast<uint32_t>(x->GetValue()), 2872 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2873 } 2874 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2875 return GetBlock()->GetGraph()->GetIntConstant( 2876 Compute(static_cast<uint64_t>(x->GetValue()), 2877 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2878 } 2879 2880 DECLARE_INSTRUCTION(Below); 2881 2882 IfCondition GetCondition() const OVERRIDE { 2883 return kCondB; 2884 } 2885 2886 IfCondition GetOppositeCondition() const OVERRIDE { 2887 return kCondAE; 2888 } 2889 2890 private: 2891 template <typename T> bool Compute(T x, T y) const { return x < y; } 2892 2893 DISALLOW_COPY_AND_ASSIGN(HBelow); 2894}; 2895 2896class HBelowOrEqual : public HCondition { 2897 public: 2898 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2899 : HCondition(first, second, dex_pc) {} 2900 2901 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2902 return GetBlock()->GetGraph()->GetIntConstant( 2903 Compute(static_cast<uint32_t>(x->GetValue()), 2904 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2905 } 2906 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2907 return GetBlock()->GetGraph()->GetIntConstant( 2908 Compute(static_cast<uint64_t>(x->GetValue()), 2909 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2910 } 2911 2912 DECLARE_INSTRUCTION(BelowOrEqual); 2913 2914 IfCondition GetCondition() const OVERRIDE { 2915 return kCondBE; 2916 } 2917 2918 IfCondition GetOppositeCondition() const OVERRIDE { 2919 return kCondA; 2920 } 2921 2922 private: 2923 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2924 2925 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 2926}; 2927 2928class HAbove : public HCondition { 2929 public: 2930 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2931 : HCondition(first, second, dex_pc) {} 2932 2933 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2934 return GetBlock()->GetGraph()->GetIntConstant( 2935 Compute(static_cast<uint32_t>(x->GetValue()), 2936 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2937 } 2938 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2939 return GetBlock()->GetGraph()->GetIntConstant( 2940 Compute(static_cast<uint64_t>(x->GetValue()), 2941 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2942 } 2943 2944 DECLARE_INSTRUCTION(Above); 2945 2946 IfCondition GetCondition() const OVERRIDE { 2947 return kCondA; 2948 } 2949 2950 IfCondition GetOppositeCondition() const OVERRIDE { 2951 return kCondBE; 2952 } 2953 2954 private: 2955 template <typename T> bool Compute(T x, T y) const { return x > y; } 2956 2957 DISALLOW_COPY_AND_ASSIGN(HAbove); 2958}; 2959 2960class HAboveOrEqual : public HCondition { 2961 public: 2962 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2963 : HCondition(first, second, dex_pc) {} 2964 2965 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2966 return GetBlock()->GetGraph()->GetIntConstant( 2967 Compute(static_cast<uint32_t>(x->GetValue()), 2968 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2969 } 2970 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2971 return GetBlock()->GetGraph()->GetIntConstant( 2972 Compute(static_cast<uint64_t>(x->GetValue()), 2973 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2974 } 2975 2976 DECLARE_INSTRUCTION(AboveOrEqual); 2977 2978 IfCondition GetCondition() const OVERRIDE { 2979 return kCondAE; 2980 } 2981 2982 IfCondition GetOppositeCondition() const OVERRIDE { 2983 return kCondB; 2984 } 2985 2986 private: 2987 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2988 2989 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 2990}; 2991 2992// Instruction to check how two inputs compare to each other. 2993// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 2994class HCompare : public HBinaryOperation { 2995 public: 2996 HCompare(Primitive::Type type, 2997 HInstruction* first, 2998 HInstruction* second, 2999 ComparisonBias bias, 3000 uint32_t dex_pc) 3001 : HBinaryOperation(Primitive::kPrimInt, 3002 first, 3003 second, 3004 SideEffectsForArchRuntimeCalls(type), 3005 dex_pc), 3006 bias_(bias) { 3007 DCHECK_EQ(type, first->GetType()); 3008 DCHECK_EQ(type, second->GetType()); 3009 } 3010 3011 template <typename T> 3012 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3013 3014 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3015 return GetBlock()->GetGraph()->GetIntConstant( 3016 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3017 } 3018 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3019 return GetBlock()->GetGraph()->GetIntConstant( 3020 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3021 } 3022 3023 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3024 return bias_ == other->AsCompare()->bias_; 3025 } 3026 3027 ComparisonBias GetBias() const { return bias_; } 3028 3029 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3030 3031 3032 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3033 // MIPS64 uses a runtime call for FP comparisons. 3034 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3035 } 3036 3037 DECLARE_INSTRUCTION(Compare); 3038 3039 private: 3040 const ComparisonBias bias_; 3041 3042 DISALLOW_COPY_AND_ASSIGN(HCompare); 3043}; 3044 3045// A local in the graph. Corresponds to a Dex register. 3046class HLocal : public HTemplateInstruction<0> { 3047 public: 3048 explicit HLocal(uint16_t reg_number) 3049 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3050 3051 DECLARE_INSTRUCTION(Local); 3052 3053 uint16_t GetRegNumber() const { return reg_number_; } 3054 3055 private: 3056 // The Dex register number. 3057 const uint16_t reg_number_; 3058 3059 DISALLOW_COPY_AND_ASSIGN(HLocal); 3060}; 3061 3062// Load a given local. The local is an input of this instruction. 3063class HLoadLocal : public HExpression<1> { 3064 public: 3065 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3066 : HExpression(type, SideEffects::None(), dex_pc) { 3067 SetRawInputAt(0, local); 3068 } 3069 3070 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3071 3072 DECLARE_INSTRUCTION(LoadLocal); 3073 3074 private: 3075 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3076}; 3077 3078// Store a value in a given local. This instruction has two inputs: the value 3079// and the local. 3080class HStoreLocal : public HTemplateInstruction<2> { 3081 public: 3082 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3083 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3084 SetRawInputAt(0, local); 3085 SetRawInputAt(1, value); 3086 } 3087 3088 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3089 3090 DECLARE_INSTRUCTION(StoreLocal); 3091 3092 private: 3093 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3094}; 3095 3096class HFloatConstant : public HConstant { 3097 public: 3098 float GetValue() const { return value_; } 3099 3100 uint64_t GetValueAsUint64() const OVERRIDE { 3101 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3102 } 3103 3104 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3105 DCHECK(other->IsFloatConstant()); 3106 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3107 } 3108 3109 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3110 3111 bool IsMinusOne() const OVERRIDE { 3112 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3113 } 3114 bool IsZero() const OVERRIDE { 3115 return value_ == 0.0f; 3116 } 3117 bool IsOne() const OVERRIDE { 3118 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3119 } 3120 bool IsNaN() const { 3121 return std::isnan(value_); 3122 } 3123 3124 DECLARE_INSTRUCTION(FloatConstant); 3125 3126 private: 3127 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3128 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3129 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3130 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3131 3132 const float value_; 3133 3134 // Only the SsaBuilder and HGraph can create floating-point constants. 3135 friend class SsaBuilder; 3136 friend class HGraph; 3137 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3138}; 3139 3140class HDoubleConstant : public HConstant { 3141 public: 3142 double GetValue() const { return value_; } 3143 3144 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3145 3146 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3147 DCHECK(other->IsDoubleConstant()); 3148 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3149 } 3150 3151 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3152 3153 bool IsMinusOne() const OVERRIDE { 3154 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3155 } 3156 bool IsZero() const OVERRIDE { 3157 return value_ == 0.0; 3158 } 3159 bool IsOne() const OVERRIDE { 3160 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3161 } 3162 bool IsNaN() const { 3163 return std::isnan(value_); 3164 } 3165 3166 DECLARE_INSTRUCTION(DoubleConstant); 3167 3168 private: 3169 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3170 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3171 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3172 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3173 3174 const double value_; 3175 3176 // Only the SsaBuilder and HGraph can create floating-point constants. 3177 friend class SsaBuilder; 3178 friend class HGraph; 3179 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3180}; 3181 3182enum class Intrinsics { 3183#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache) k ## Name, 3184#include "intrinsics_list.h" 3185 kNone, 3186 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3187#undef INTRINSICS_LIST 3188#undef OPTIMIZING_INTRINSICS 3189}; 3190std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3191 3192enum IntrinsicNeedsEnvironmentOrCache { 3193 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3194 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3195}; 3196 3197class HInvoke : public HInstruction { 3198 public: 3199 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3200 3201 bool NeedsEnvironment() const OVERRIDE; 3202 3203 void SetArgumentAt(size_t index, HInstruction* argument) { 3204 SetRawInputAt(index, argument); 3205 } 3206 3207 // Return the number of arguments. This number can be lower than 3208 // the number of inputs returned by InputCount(), as some invoke 3209 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3210 // inputs at the end of their list of inputs. 3211 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3212 3213 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3214 3215 3216 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3217 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3218 3219 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3220 3221 Intrinsics GetIntrinsic() const { 3222 return intrinsic_; 3223 } 3224 3225 void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache); 3226 3227 bool IsFromInlinedInvoke() const { 3228 return GetEnvironment()->GetParent() != nullptr; 3229 } 3230 3231 bool CanThrow() const OVERRIDE { return true; } 3232 3233 uint32_t* GetIntrinsicOptimizations() { 3234 return &intrinsic_optimizations_; 3235 } 3236 3237 const uint32_t* GetIntrinsicOptimizations() const { 3238 return &intrinsic_optimizations_; 3239 } 3240 3241 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3242 3243 DECLARE_INSTRUCTION(Invoke); 3244 3245 protected: 3246 HInvoke(ArenaAllocator* arena, 3247 uint32_t number_of_arguments, 3248 uint32_t number_of_other_inputs, 3249 Primitive::Type return_type, 3250 uint32_t dex_pc, 3251 uint32_t dex_method_index, 3252 InvokeType original_invoke_type) 3253 : HInstruction( 3254 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3255 number_of_arguments_(number_of_arguments), 3256 inputs_(number_of_arguments + number_of_other_inputs, 3257 arena->Adapter(kArenaAllocInvokeInputs)), 3258 return_type_(return_type), 3259 dex_method_index_(dex_method_index), 3260 original_invoke_type_(original_invoke_type), 3261 intrinsic_(Intrinsics::kNone), 3262 intrinsic_optimizations_(0) { 3263 } 3264 3265 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3266 return inputs_[index]; 3267 } 3268 3269 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3270 inputs_[index] = input; 3271 } 3272 3273 uint32_t number_of_arguments_; 3274 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3275 const Primitive::Type return_type_; 3276 const uint32_t dex_method_index_; 3277 const InvokeType original_invoke_type_; 3278 Intrinsics intrinsic_; 3279 3280 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3281 uint32_t intrinsic_optimizations_; 3282 3283 private: 3284 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3285}; 3286 3287class HInvokeUnresolved : public HInvoke { 3288 public: 3289 HInvokeUnresolved(ArenaAllocator* arena, 3290 uint32_t number_of_arguments, 3291 Primitive::Type return_type, 3292 uint32_t dex_pc, 3293 uint32_t dex_method_index, 3294 InvokeType invoke_type) 3295 : HInvoke(arena, 3296 number_of_arguments, 3297 0u /* number_of_other_inputs */, 3298 return_type, 3299 dex_pc, 3300 dex_method_index, 3301 invoke_type) { 3302 } 3303 3304 DECLARE_INSTRUCTION(InvokeUnresolved); 3305 3306 private: 3307 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3308}; 3309 3310class HInvokeStaticOrDirect : public HInvoke { 3311 public: 3312 // Requirements of this method call regarding the class 3313 // initialization (clinit) check of its declaring class. 3314 enum class ClinitCheckRequirement { 3315 kNone, // Class already initialized. 3316 kExplicit, // Static call having explicit clinit check as last input. 3317 kImplicit, // Static call implicitly requiring a clinit check. 3318 }; 3319 3320 // Determines how to load the target ArtMethod*. 3321 enum class MethodLoadKind { 3322 // Use a String init ArtMethod* loaded from Thread entrypoints. 3323 kStringInit, 3324 3325 // Use the method's own ArtMethod* loaded by the register allocator. 3326 kRecursive, 3327 3328 // Use ArtMethod* at a known address, embed the direct address in the code. 3329 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3330 kDirectAddress, 3331 3332 // Use ArtMethod* at an address that will be known at link time, embed the direct 3333 // address in the code. If the image is relocatable, emit .patch_oat entry. 3334 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3335 // the image relocatable or not. 3336 kDirectAddressWithFixup, 3337 3338 // Load from resoved methods array in the dex cache using a PC-relative load. 3339 // Used when we need to use the dex cache, for example for invoke-static that 3340 // may cause class initialization (the entry may point to a resolution method), 3341 // and we know that we can access the dex cache arrays using a PC-relative load. 3342 kDexCachePcRelative, 3343 3344 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3345 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3346 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3347 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3348 kDexCacheViaMethod, 3349 }; 3350 3351 // Determines the location of the code pointer. 3352 enum class CodePtrLocation { 3353 // Recursive call, use local PC-relative call instruction. 3354 kCallSelf, 3355 3356 // Use PC-relative call instruction patched at link time. 3357 // Used for calls within an oat file, boot->boot or app->app. 3358 kCallPCRelative, 3359 3360 // Call to a known target address, embed the direct address in code. 3361 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3362 kCallDirect, 3363 3364 // Call to a target address that will be known at link time, embed the direct 3365 // address in code. If the image is relocatable, emit .patch_oat entry. 3366 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3367 // the image relocatable or not. 3368 kCallDirectWithFixup, 3369 3370 // Use code pointer from the ArtMethod*. 3371 // Used when we don't know the target code. This is also the last-resort-kind used when 3372 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3373 kCallArtMethod, 3374 }; 3375 3376 struct DispatchInfo { 3377 MethodLoadKind method_load_kind; 3378 CodePtrLocation code_ptr_location; 3379 // The method load data holds 3380 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3381 // Note that there are multiple string init methods, each having its own offset. 3382 // - the method address for kDirectAddress 3383 // - the dex cache arrays offset for kDexCachePcRel. 3384 uint64_t method_load_data; 3385 uint64_t direct_code_ptr; 3386 }; 3387 3388 HInvokeStaticOrDirect(ArenaAllocator* arena, 3389 uint32_t number_of_arguments, 3390 Primitive::Type return_type, 3391 uint32_t dex_pc, 3392 uint32_t method_index, 3393 MethodReference target_method, 3394 DispatchInfo dispatch_info, 3395 InvokeType original_invoke_type, 3396 InvokeType invoke_type, 3397 ClinitCheckRequirement clinit_check_requirement) 3398 : HInvoke(arena, 3399 number_of_arguments, 3400 // There is one extra argument for the HCurrentMethod node, and 3401 // potentially one other if the clinit check is explicit, and one other 3402 // if the method is a string factory. 3403 1u + (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) 3404 + (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3405 return_type, 3406 dex_pc, 3407 method_index, 3408 original_invoke_type), 3409 invoke_type_(invoke_type), 3410 clinit_check_requirement_(clinit_check_requirement), 3411 target_method_(target_method), 3412 dispatch_info_(dispatch_info) {} 3413 3414 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3415 dispatch_info_ = dispatch_info; 3416 } 3417 3418 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3419 // We access the method via the dex cache so we can't do an implicit null check. 3420 // TODO: for intrinsics we can generate implicit null checks. 3421 return false; 3422 } 3423 3424 bool CanBeNull() const OVERRIDE { 3425 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3426 } 3427 3428 InvokeType GetInvokeType() const { return invoke_type_; } 3429 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3430 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3431 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3432 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3433 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3434 uint32_t GetCurrentMethodInputIndex() const { return GetNumberOfArguments(); } 3435 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3436 bool HasPcRelDexCache() const { 3437 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3438 } 3439 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3440 MethodReference GetTargetMethod() const { return target_method_; } 3441 3442 int32_t GetStringInitOffset() const { 3443 DCHECK(IsStringInit()); 3444 return dispatch_info_.method_load_data; 3445 } 3446 3447 uint64_t GetMethodAddress() const { 3448 DCHECK(HasMethodAddress()); 3449 return dispatch_info_.method_load_data; 3450 } 3451 3452 uint32_t GetDexCacheArrayOffset() const { 3453 DCHECK(HasPcRelDexCache()); 3454 return dispatch_info_.method_load_data; 3455 } 3456 3457 uint64_t GetDirectCodePtr() const { 3458 DCHECK(HasDirectCodePtr()); 3459 return dispatch_info_.direct_code_ptr; 3460 } 3461 3462 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3463 3464 // Is this instruction a call to a static method? 3465 bool IsStatic() const { 3466 return GetInvokeType() == kStatic; 3467 } 3468 3469 // Remove the art::HLoadClass instruction set as last input by 3470 // art::PrepareForRegisterAllocation::VisitClinitCheck in lieu of 3471 // the initial art::HClinitCheck instruction (only relevant for 3472 // static calls with explicit clinit check). 3473 void RemoveLoadClassAsLastInput() { 3474 DCHECK(IsStaticWithExplicitClinitCheck()); 3475 size_t last_input_index = InputCount() - 1; 3476 HInstruction* last_input = InputAt(last_input_index); 3477 DCHECK(last_input != nullptr); 3478 DCHECK(last_input->IsLoadClass()) << last_input->DebugName(); 3479 RemoveAsUserOfInput(last_input_index); 3480 inputs_.pop_back(); 3481 clinit_check_requirement_ = ClinitCheckRequirement::kImplicit; 3482 DCHECK(IsStaticWithImplicitClinitCheck()); 3483 } 3484 3485 bool IsStringFactoryFor(HFakeString* str) const { 3486 if (!IsStringInit()) return false; 3487 // +1 for the current method. 3488 if (InputCount() == (number_of_arguments_ + 1)) return false; 3489 return InputAt(InputCount() - 1)->AsFakeString() == str; 3490 } 3491 3492 void RemoveFakeStringArgumentAsLastInput() { 3493 DCHECK(IsStringInit()); 3494 size_t last_input_index = InputCount() - 1; 3495 HInstruction* last_input = InputAt(last_input_index); 3496 DCHECK(last_input != nullptr); 3497 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3498 RemoveAsUserOfInput(last_input_index); 3499 inputs_.pop_back(); 3500 } 3501 3502 // Is this a call to a static method whose declaring class has an 3503 // explicit intialization check in the graph? 3504 bool IsStaticWithExplicitClinitCheck() const { 3505 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3506 } 3507 3508 // Is this a call to a static method whose declaring class has an 3509 // implicit intialization check requirement? 3510 bool IsStaticWithImplicitClinitCheck() const { 3511 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3512 } 3513 3514 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3515 3516 protected: 3517 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3518 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3519 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3520 HInstruction* input = input_record.GetInstruction(); 3521 // `input` is the last input of a static invoke marked as having 3522 // an explicit clinit check. It must either be: 3523 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3524 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3525 DCHECK(input != nullptr); 3526 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3527 } 3528 return input_record; 3529 } 3530 3531 private: 3532 const InvokeType invoke_type_; 3533 ClinitCheckRequirement clinit_check_requirement_; 3534 // The target method may refer to different dex file or method index than the original 3535 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3536 // in derived class to increase visibility. 3537 MethodReference target_method_; 3538 DispatchInfo dispatch_info_; 3539 3540 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3541}; 3542 3543class HInvokeVirtual : public HInvoke { 3544 public: 3545 HInvokeVirtual(ArenaAllocator* arena, 3546 uint32_t number_of_arguments, 3547 Primitive::Type return_type, 3548 uint32_t dex_pc, 3549 uint32_t dex_method_index, 3550 uint32_t vtable_index) 3551 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3552 vtable_index_(vtable_index) {} 3553 3554 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3555 // TODO: Add implicit null checks in intrinsics. 3556 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3557 } 3558 3559 uint32_t GetVTableIndex() const { return vtable_index_; } 3560 3561 DECLARE_INSTRUCTION(InvokeVirtual); 3562 3563 private: 3564 const uint32_t vtable_index_; 3565 3566 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3567}; 3568 3569class HInvokeInterface : public HInvoke { 3570 public: 3571 HInvokeInterface(ArenaAllocator* arena, 3572 uint32_t number_of_arguments, 3573 Primitive::Type return_type, 3574 uint32_t dex_pc, 3575 uint32_t dex_method_index, 3576 uint32_t imt_index) 3577 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3578 imt_index_(imt_index) {} 3579 3580 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3581 // TODO: Add implicit null checks in intrinsics. 3582 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3583 } 3584 3585 uint32_t GetImtIndex() const { return imt_index_; } 3586 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3587 3588 DECLARE_INSTRUCTION(InvokeInterface); 3589 3590 private: 3591 const uint32_t imt_index_; 3592 3593 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3594}; 3595 3596class HNewInstance : public HExpression<1> { 3597 public: 3598 HNewInstance(HCurrentMethod* current_method, 3599 uint32_t dex_pc, 3600 uint16_t type_index, 3601 const DexFile& dex_file, 3602 QuickEntrypointEnum entrypoint) 3603 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3604 type_index_(type_index), 3605 dex_file_(dex_file), 3606 entrypoint_(entrypoint) { 3607 SetRawInputAt(0, current_method); 3608 } 3609 3610 uint16_t GetTypeIndex() const { return type_index_; } 3611 const DexFile& GetDexFile() const { return dex_file_; } 3612 3613 // Calls runtime so needs an environment. 3614 bool NeedsEnvironment() const OVERRIDE { return true; } 3615 // It may throw when called on: 3616 // - interfaces 3617 // - abstract/innaccessible/unknown classes 3618 // TODO: optimize when possible. 3619 bool CanThrow() const OVERRIDE { return true; } 3620 3621 bool CanBeNull() const OVERRIDE { return false; } 3622 3623 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3624 3625 DECLARE_INSTRUCTION(NewInstance); 3626 3627 private: 3628 const uint16_t type_index_; 3629 const DexFile& dex_file_; 3630 const QuickEntrypointEnum entrypoint_; 3631 3632 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3633}; 3634 3635class HNeg : public HUnaryOperation { 3636 public: 3637 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3638 : HUnaryOperation(result_type, input, dex_pc) {} 3639 3640 template <typename T> T Compute(T x) const { return -x; } 3641 3642 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3643 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3644 } 3645 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3646 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3647 } 3648 3649 DECLARE_INSTRUCTION(Neg); 3650 3651 private: 3652 DISALLOW_COPY_AND_ASSIGN(HNeg); 3653}; 3654 3655class HNewArray : public HExpression<2> { 3656 public: 3657 HNewArray(HInstruction* length, 3658 HCurrentMethod* current_method, 3659 uint32_t dex_pc, 3660 uint16_t type_index, 3661 const DexFile& dex_file, 3662 QuickEntrypointEnum entrypoint) 3663 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3664 type_index_(type_index), 3665 dex_file_(dex_file), 3666 entrypoint_(entrypoint) { 3667 SetRawInputAt(0, length); 3668 SetRawInputAt(1, current_method); 3669 } 3670 3671 uint16_t GetTypeIndex() const { return type_index_; } 3672 const DexFile& GetDexFile() const { return dex_file_; } 3673 3674 // Calls runtime so needs an environment. 3675 bool NeedsEnvironment() const OVERRIDE { return true; } 3676 3677 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3678 bool CanThrow() const OVERRIDE { return true; } 3679 3680 bool CanBeNull() const OVERRIDE { return false; } 3681 3682 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3683 3684 DECLARE_INSTRUCTION(NewArray); 3685 3686 private: 3687 const uint16_t type_index_; 3688 const DexFile& dex_file_; 3689 const QuickEntrypointEnum entrypoint_; 3690 3691 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3692}; 3693 3694class HAdd : public HBinaryOperation { 3695 public: 3696 HAdd(Primitive::Type result_type, 3697 HInstruction* left, 3698 HInstruction* right, 3699 uint32_t dex_pc = kNoDexPc) 3700 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3701 3702 bool IsCommutative() const OVERRIDE { return true; } 3703 3704 template <typename T> T Compute(T x, T y) const { return x + y; } 3705 3706 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3707 return GetBlock()->GetGraph()->GetIntConstant( 3708 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3709 } 3710 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3711 return GetBlock()->GetGraph()->GetLongConstant( 3712 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3713 } 3714 3715 DECLARE_INSTRUCTION(Add); 3716 3717 private: 3718 DISALLOW_COPY_AND_ASSIGN(HAdd); 3719}; 3720 3721class HSub : public HBinaryOperation { 3722 public: 3723 HSub(Primitive::Type result_type, 3724 HInstruction* left, 3725 HInstruction* right, 3726 uint32_t dex_pc = kNoDexPc) 3727 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3728 3729 template <typename T> T Compute(T x, T y) const { return x - y; } 3730 3731 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3732 return GetBlock()->GetGraph()->GetIntConstant( 3733 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3734 } 3735 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3736 return GetBlock()->GetGraph()->GetLongConstant( 3737 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3738 } 3739 3740 DECLARE_INSTRUCTION(Sub); 3741 3742 private: 3743 DISALLOW_COPY_AND_ASSIGN(HSub); 3744}; 3745 3746class HMul : public HBinaryOperation { 3747 public: 3748 HMul(Primitive::Type result_type, 3749 HInstruction* left, 3750 HInstruction* right, 3751 uint32_t dex_pc = kNoDexPc) 3752 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3753 3754 bool IsCommutative() const OVERRIDE { return true; } 3755 3756 template <typename T> T Compute(T x, T y) const { return x * y; } 3757 3758 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3759 return GetBlock()->GetGraph()->GetIntConstant( 3760 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3761 } 3762 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3763 return GetBlock()->GetGraph()->GetLongConstant( 3764 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3765 } 3766 3767 DECLARE_INSTRUCTION(Mul); 3768 3769 private: 3770 DISALLOW_COPY_AND_ASSIGN(HMul); 3771}; 3772 3773class HDiv : public HBinaryOperation { 3774 public: 3775 HDiv(Primitive::Type result_type, 3776 HInstruction* left, 3777 HInstruction* right, 3778 uint32_t dex_pc) 3779 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3780 3781 template <typename T> 3782 T Compute(T x, T y) const { 3783 // Our graph structure ensures we never have 0 for `y` during 3784 // constant folding. 3785 DCHECK_NE(y, 0); 3786 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3787 return (y == -1) ? -x : x / y; 3788 } 3789 3790 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3791 return GetBlock()->GetGraph()->GetIntConstant( 3792 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3793 } 3794 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3795 return GetBlock()->GetGraph()->GetLongConstant( 3796 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3797 } 3798 3799 static SideEffects SideEffectsForArchRuntimeCalls() { 3800 // The generated code can use a runtime call. 3801 return SideEffects::CanTriggerGC(); 3802 } 3803 3804 DECLARE_INSTRUCTION(Div); 3805 3806 private: 3807 DISALLOW_COPY_AND_ASSIGN(HDiv); 3808}; 3809 3810class HRem : public HBinaryOperation { 3811 public: 3812 HRem(Primitive::Type result_type, 3813 HInstruction* left, 3814 HInstruction* right, 3815 uint32_t dex_pc) 3816 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3817 3818 template <typename T> 3819 T Compute(T x, T y) const { 3820 // Our graph structure ensures we never have 0 for `y` during 3821 // constant folding. 3822 DCHECK_NE(y, 0); 3823 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3824 return (y == -1) ? 0 : x % y; 3825 } 3826 3827 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3828 return GetBlock()->GetGraph()->GetIntConstant( 3829 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3830 } 3831 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3832 return GetBlock()->GetGraph()->GetLongConstant( 3833 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3834 } 3835 3836 3837 static SideEffects SideEffectsForArchRuntimeCalls() { 3838 return SideEffects::CanTriggerGC(); 3839 } 3840 3841 DECLARE_INSTRUCTION(Rem); 3842 3843 private: 3844 DISALLOW_COPY_AND_ASSIGN(HRem); 3845}; 3846 3847class HDivZeroCheck : public HExpression<1> { 3848 public: 3849 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 3850 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 3851 SetRawInputAt(0, value); 3852 } 3853 3854 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 3855 3856 bool CanBeMoved() const OVERRIDE { return true; } 3857 3858 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 3859 return true; 3860 } 3861 3862 bool NeedsEnvironment() const OVERRIDE { return true; } 3863 bool CanThrow() const OVERRIDE { return true; } 3864 3865 DECLARE_INSTRUCTION(DivZeroCheck); 3866 3867 private: 3868 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 3869}; 3870 3871class HShl : public HBinaryOperation { 3872 public: 3873 HShl(Primitive::Type result_type, 3874 HInstruction* left, 3875 HInstruction* right, 3876 uint32_t dex_pc = kNoDexPc) 3877 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3878 3879 template <typename T, typename U, typename V> 3880 T Compute(T x, U y, V max_shift_value) const { 3881 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3882 "V is not the unsigned integer type corresponding to T"); 3883 return x << (y & max_shift_value); 3884 } 3885 3886 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3887 return GetBlock()->GetGraph()->GetIntConstant( 3888 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3889 } 3890 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3891 // case is handled as `x << static_cast<int>(y)`. 3892 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3893 return GetBlock()->GetGraph()->GetLongConstant( 3894 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3895 } 3896 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3897 return GetBlock()->GetGraph()->GetLongConstant( 3898 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3899 } 3900 3901 DECLARE_INSTRUCTION(Shl); 3902 3903 private: 3904 DISALLOW_COPY_AND_ASSIGN(HShl); 3905}; 3906 3907class HShr : public HBinaryOperation { 3908 public: 3909 HShr(Primitive::Type result_type, 3910 HInstruction* left, 3911 HInstruction* right, 3912 uint32_t dex_pc = kNoDexPc) 3913 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3914 3915 template <typename T, typename U, typename V> 3916 T Compute(T x, U y, V max_shift_value) const { 3917 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3918 "V is not the unsigned integer type corresponding to T"); 3919 return x >> (y & max_shift_value); 3920 } 3921 3922 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3923 return GetBlock()->GetGraph()->GetIntConstant( 3924 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3925 } 3926 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3927 // case is handled as `x >> static_cast<int>(y)`. 3928 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3929 return GetBlock()->GetGraph()->GetLongConstant( 3930 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3931 } 3932 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3933 return GetBlock()->GetGraph()->GetLongConstant( 3934 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3935 } 3936 3937 DECLARE_INSTRUCTION(Shr); 3938 3939 private: 3940 DISALLOW_COPY_AND_ASSIGN(HShr); 3941}; 3942 3943class HUShr : public HBinaryOperation { 3944 public: 3945 HUShr(Primitive::Type result_type, 3946 HInstruction* left, 3947 HInstruction* right, 3948 uint32_t dex_pc = kNoDexPc) 3949 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3950 3951 template <typename T, typename U, typename V> 3952 T Compute(T x, U y, V max_shift_value) const { 3953 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3954 "V is not the unsigned integer type corresponding to T"); 3955 V ux = static_cast<V>(x); 3956 return static_cast<T>(ux >> (y & max_shift_value)); 3957 } 3958 3959 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3960 return GetBlock()->GetGraph()->GetIntConstant( 3961 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3962 } 3963 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3964 // case is handled as `x >>> static_cast<int>(y)`. 3965 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3966 return GetBlock()->GetGraph()->GetLongConstant( 3967 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3968 } 3969 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3970 return GetBlock()->GetGraph()->GetLongConstant( 3971 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3972 } 3973 3974 DECLARE_INSTRUCTION(UShr); 3975 3976 private: 3977 DISALLOW_COPY_AND_ASSIGN(HUShr); 3978}; 3979 3980class HAnd : public HBinaryOperation { 3981 public: 3982 HAnd(Primitive::Type result_type, 3983 HInstruction* left, 3984 HInstruction* right, 3985 uint32_t dex_pc = kNoDexPc) 3986 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3987 3988 bool IsCommutative() const OVERRIDE { return true; } 3989 3990 template <typename T, typename U> 3991 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 3992 3993 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3994 return GetBlock()->GetGraph()->GetIntConstant( 3995 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3996 } 3997 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 3998 return GetBlock()->GetGraph()->GetLongConstant( 3999 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4000 } 4001 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4002 return GetBlock()->GetGraph()->GetLongConstant( 4003 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4004 } 4005 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4006 return GetBlock()->GetGraph()->GetLongConstant( 4007 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4008 } 4009 4010 DECLARE_INSTRUCTION(And); 4011 4012 private: 4013 DISALLOW_COPY_AND_ASSIGN(HAnd); 4014}; 4015 4016class HOr : public HBinaryOperation { 4017 public: 4018 HOr(Primitive::Type result_type, 4019 HInstruction* left, 4020 HInstruction* right, 4021 uint32_t dex_pc = kNoDexPc) 4022 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4023 4024 bool IsCommutative() const OVERRIDE { return true; } 4025 4026 template <typename T, typename U> 4027 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4028 4029 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4030 return GetBlock()->GetGraph()->GetIntConstant( 4031 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4032 } 4033 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4034 return GetBlock()->GetGraph()->GetLongConstant( 4035 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4036 } 4037 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4038 return GetBlock()->GetGraph()->GetLongConstant( 4039 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4040 } 4041 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4042 return GetBlock()->GetGraph()->GetLongConstant( 4043 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4044 } 4045 4046 DECLARE_INSTRUCTION(Or); 4047 4048 private: 4049 DISALLOW_COPY_AND_ASSIGN(HOr); 4050}; 4051 4052class HXor : public HBinaryOperation { 4053 public: 4054 HXor(Primitive::Type result_type, 4055 HInstruction* left, 4056 HInstruction* right, 4057 uint32_t dex_pc = kNoDexPc) 4058 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4059 4060 bool IsCommutative() const OVERRIDE { return true; } 4061 4062 template <typename T, typename U> 4063 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4064 4065 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4066 return GetBlock()->GetGraph()->GetIntConstant( 4067 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4068 } 4069 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4070 return GetBlock()->GetGraph()->GetLongConstant( 4071 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4072 } 4073 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4074 return GetBlock()->GetGraph()->GetLongConstant( 4075 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4076 } 4077 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4078 return GetBlock()->GetGraph()->GetLongConstant( 4079 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4080 } 4081 4082 DECLARE_INSTRUCTION(Xor); 4083 4084 private: 4085 DISALLOW_COPY_AND_ASSIGN(HXor); 4086}; 4087 4088// The value of a parameter in this method. Its location depends on 4089// the calling convention. 4090class HParameterValue : public HExpression<0> { 4091 public: 4092 HParameterValue(const DexFile& dex_file, 4093 uint16_t type_index, 4094 uint8_t index, 4095 Primitive::Type parameter_type, 4096 bool is_this = false) 4097 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4098 dex_file_(dex_file), 4099 type_index_(type_index), 4100 index_(index), 4101 is_this_(is_this), 4102 can_be_null_(!is_this) {} 4103 4104 const DexFile& GetDexFile() const { return dex_file_; } 4105 uint16_t GetTypeIndex() const { return type_index_; } 4106 uint8_t GetIndex() const { return index_; } 4107 bool IsThis() const { return is_this_; } 4108 4109 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4110 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4111 4112 DECLARE_INSTRUCTION(ParameterValue); 4113 4114 private: 4115 const DexFile& dex_file_; 4116 const uint16_t type_index_; 4117 // The index of this parameter in the parameters list. Must be less 4118 // than HGraph::number_of_in_vregs_. 4119 const uint8_t index_; 4120 4121 // Whether or not the parameter value corresponds to 'this' argument. 4122 const bool is_this_; 4123 4124 bool can_be_null_; 4125 4126 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4127}; 4128 4129class HNot : public HUnaryOperation { 4130 public: 4131 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4132 : HUnaryOperation(result_type, input, dex_pc) {} 4133 4134 bool CanBeMoved() const OVERRIDE { return true; } 4135 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4136 return true; 4137 } 4138 4139 template <typename T> T Compute(T x) const { return ~x; } 4140 4141 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4142 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4143 } 4144 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4145 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4146 } 4147 4148 DECLARE_INSTRUCTION(Not); 4149 4150 private: 4151 DISALLOW_COPY_AND_ASSIGN(HNot); 4152}; 4153 4154class HBooleanNot : public HUnaryOperation { 4155 public: 4156 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4157 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4158 4159 bool CanBeMoved() const OVERRIDE { return true; } 4160 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4161 return true; 4162 } 4163 4164 template <typename T> bool Compute(T x) const { 4165 DCHECK(IsUint<1>(x)); 4166 return !x; 4167 } 4168 4169 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4170 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4171 } 4172 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4173 LOG(FATAL) << DebugName() << " is not defined for long values"; 4174 UNREACHABLE(); 4175 } 4176 4177 DECLARE_INSTRUCTION(BooleanNot); 4178 4179 private: 4180 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4181}; 4182 4183class HTypeConversion : public HExpression<1> { 4184 public: 4185 // Instantiate a type conversion of `input` to `result_type`. 4186 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4187 : HExpression(result_type, 4188 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4189 dex_pc) { 4190 SetRawInputAt(0, input); 4191 DCHECK_NE(input->GetType(), result_type); 4192 } 4193 4194 HInstruction* GetInput() const { return InputAt(0); } 4195 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4196 Primitive::Type GetResultType() const { return GetType(); } 4197 4198 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4199 // to the runtime. 4200 4201 bool CanBeMoved() const OVERRIDE { return true; } 4202 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4203 4204 // Try to statically evaluate the conversion and return a HConstant 4205 // containing the result. If the input cannot be converted, return nullptr. 4206 HConstant* TryStaticEvaluation() const; 4207 4208 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4209 Primitive::Type result_type) { 4210 // Some architectures may not require the 'GC' side effects, but at this point 4211 // in the compilation process we do not know what architecture we will 4212 // generate code for, so we must be conservative. 4213 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4214 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4215 return SideEffects::CanTriggerGC(); 4216 } 4217 return SideEffects::None(); 4218 } 4219 4220 DECLARE_INSTRUCTION(TypeConversion); 4221 4222 private: 4223 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4224}; 4225 4226static constexpr uint32_t kNoRegNumber = -1; 4227 4228class HPhi : public HInstruction { 4229 public: 4230 HPhi(ArenaAllocator* arena, 4231 uint32_t reg_number, 4232 size_t number_of_inputs, 4233 Primitive::Type type, 4234 uint32_t dex_pc = kNoDexPc) 4235 : HInstruction(SideEffects::None(), dex_pc), 4236 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4237 reg_number_(reg_number), 4238 type_(type), 4239 is_live_(false), 4240 can_be_null_(true) { 4241 } 4242 4243 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4244 static Primitive::Type ToPhiType(Primitive::Type type) { 4245 switch (type) { 4246 case Primitive::kPrimBoolean: 4247 case Primitive::kPrimByte: 4248 case Primitive::kPrimShort: 4249 case Primitive::kPrimChar: 4250 return Primitive::kPrimInt; 4251 default: 4252 return type; 4253 } 4254 } 4255 4256 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4257 4258 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4259 4260 void AddInput(HInstruction* input); 4261 void RemoveInputAt(size_t index); 4262 4263 Primitive::Type GetType() const OVERRIDE { return type_; } 4264 void SetType(Primitive::Type type) { type_ = type; } 4265 4266 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4267 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4268 4269 uint32_t GetRegNumber() const { return reg_number_; } 4270 4271 void SetDead() { is_live_ = false; } 4272 void SetLive() { is_live_ = true; } 4273 bool IsDead() const { return !is_live_; } 4274 bool IsLive() const { return is_live_; } 4275 4276 bool IsVRegEquivalentOf(HInstruction* other) const { 4277 return other != nullptr 4278 && other->IsPhi() 4279 && other->AsPhi()->GetBlock() == GetBlock() 4280 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4281 } 4282 4283 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4284 // An equivalent phi is a phi having the same dex register and type. 4285 // It assumes that phis with the same dex register are adjacent. 4286 HPhi* GetNextEquivalentPhiWithSameType() { 4287 HInstruction* next = GetNext(); 4288 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4289 if (next->GetType() == GetType()) { 4290 return next->AsPhi(); 4291 } 4292 next = next->GetNext(); 4293 } 4294 return nullptr; 4295 } 4296 4297 DECLARE_INSTRUCTION(Phi); 4298 4299 protected: 4300 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4301 return inputs_[index]; 4302 } 4303 4304 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4305 inputs_[index] = input; 4306 } 4307 4308 private: 4309 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4310 const uint32_t reg_number_; 4311 Primitive::Type type_; 4312 bool is_live_; 4313 bool can_be_null_; 4314 4315 DISALLOW_COPY_AND_ASSIGN(HPhi); 4316}; 4317 4318class HNullCheck : public HExpression<1> { 4319 public: 4320 HNullCheck(HInstruction* value, uint32_t dex_pc) 4321 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4322 SetRawInputAt(0, value); 4323 } 4324 4325 bool CanBeMoved() const OVERRIDE { return true; } 4326 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4327 return true; 4328 } 4329 4330 bool NeedsEnvironment() const OVERRIDE { return true; } 4331 4332 bool CanThrow() const OVERRIDE { return true; } 4333 4334 bool CanBeNull() const OVERRIDE { return false; } 4335 4336 4337 DECLARE_INSTRUCTION(NullCheck); 4338 4339 private: 4340 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4341}; 4342 4343class FieldInfo : public ValueObject { 4344 public: 4345 FieldInfo(MemberOffset field_offset, 4346 Primitive::Type field_type, 4347 bool is_volatile, 4348 uint32_t index, 4349 uint16_t declaring_class_def_index, 4350 const DexFile& dex_file, 4351 Handle<mirror::DexCache> dex_cache) 4352 : field_offset_(field_offset), 4353 field_type_(field_type), 4354 is_volatile_(is_volatile), 4355 index_(index), 4356 declaring_class_def_index_(declaring_class_def_index), 4357 dex_file_(dex_file), 4358 dex_cache_(dex_cache) {} 4359 4360 MemberOffset GetFieldOffset() const { return field_offset_; } 4361 Primitive::Type GetFieldType() const { return field_type_; } 4362 uint32_t GetFieldIndex() const { return index_; } 4363 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4364 const DexFile& GetDexFile() const { return dex_file_; } 4365 bool IsVolatile() const { return is_volatile_; } 4366 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4367 4368 private: 4369 const MemberOffset field_offset_; 4370 const Primitive::Type field_type_; 4371 const bool is_volatile_; 4372 const uint32_t index_; 4373 const uint16_t declaring_class_def_index_; 4374 const DexFile& dex_file_; 4375 const Handle<mirror::DexCache> dex_cache_; 4376}; 4377 4378class HInstanceFieldGet : public HExpression<1> { 4379 public: 4380 HInstanceFieldGet(HInstruction* value, 4381 Primitive::Type field_type, 4382 MemberOffset field_offset, 4383 bool is_volatile, 4384 uint32_t field_idx, 4385 uint16_t declaring_class_def_index, 4386 const DexFile& dex_file, 4387 Handle<mirror::DexCache> dex_cache, 4388 uint32_t dex_pc) 4389 : HExpression(field_type, 4390 SideEffects::FieldReadOfType(field_type, is_volatile), 4391 dex_pc), 4392 field_info_(field_offset, 4393 field_type, 4394 is_volatile, 4395 field_idx, 4396 declaring_class_def_index, 4397 dex_file, 4398 dex_cache) { 4399 SetRawInputAt(0, value); 4400 } 4401 4402 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4403 4404 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4405 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4406 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4407 } 4408 4409 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4410 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4411 } 4412 4413 size_t ComputeHashCode() const OVERRIDE { 4414 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4415 } 4416 4417 const FieldInfo& GetFieldInfo() const { return field_info_; } 4418 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4419 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4420 bool IsVolatile() const { return field_info_.IsVolatile(); } 4421 4422 DECLARE_INSTRUCTION(InstanceFieldGet); 4423 4424 private: 4425 const FieldInfo field_info_; 4426 4427 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4428}; 4429 4430class HInstanceFieldSet : public HTemplateInstruction<2> { 4431 public: 4432 HInstanceFieldSet(HInstruction* object, 4433 HInstruction* value, 4434 Primitive::Type field_type, 4435 MemberOffset field_offset, 4436 bool is_volatile, 4437 uint32_t field_idx, 4438 uint16_t declaring_class_def_index, 4439 const DexFile& dex_file, 4440 Handle<mirror::DexCache> dex_cache, 4441 uint32_t dex_pc) 4442 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4443 dex_pc), 4444 field_info_(field_offset, 4445 field_type, 4446 is_volatile, 4447 field_idx, 4448 declaring_class_def_index, 4449 dex_file, 4450 dex_cache), 4451 value_can_be_null_(true) { 4452 SetRawInputAt(0, object); 4453 SetRawInputAt(1, value); 4454 } 4455 4456 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4457 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4458 } 4459 4460 const FieldInfo& GetFieldInfo() const { return field_info_; } 4461 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4462 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4463 bool IsVolatile() const { return field_info_.IsVolatile(); } 4464 HInstruction* GetValue() const { return InputAt(1); } 4465 bool GetValueCanBeNull() const { return value_can_be_null_; } 4466 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4467 4468 DECLARE_INSTRUCTION(InstanceFieldSet); 4469 4470 private: 4471 const FieldInfo field_info_; 4472 bool value_can_be_null_; 4473 4474 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4475}; 4476 4477class HArrayGet : public HExpression<2> { 4478 public: 4479 HArrayGet(HInstruction* array, 4480 HInstruction* index, 4481 Primitive::Type type, 4482 uint32_t dex_pc, 4483 SideEffects additional_side_effects = SideEffects::None()) 4484 : HExpression(type, 4485 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4486 dex_pc) { 4487 SetRawInputAt(0, array); 4488 SetRawInputAt(1, index); 4489 } 4490 4491 bool CanBeMoved() const OVERRIDE { return true; } 4492 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4493 return true; 4494 } 4495 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4496 // TODO: We can be smarter here. 4497 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4498 // which generates the implicit null check. There are cases when these can be removed 4499 // to produce better code. If we ever add optimizations to do so we should allow an 4500 // implicit check here (as long as the address falls in the first page). 4501 return false; 4502 } 4503 4504 void SetType(Primitive::Type type) { type_ = type; } 4505 4506 HInstruction* GetArray() const { return InputAt(0); } 4507 HInstruction* GetIndex() const { return InputAt(1); } 4508 4509 DECLARE_INSTRUCTION(ArrayGet); 4510 4511 private: 4512 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4513}; 4514 4515class HArraySet : public HTemplateInstruction<3> { 4516 public: 4517 HArraySet(HInstruction* array, 4518 HInstruction* index, 4519 HInstruction* value, 4520 Primitive::Type expected_component_type, 4521 uint32_t dex_pc, 4522 SideEffects additional_side_effects = SideEffects::None()) 4523 : HTemplateInstruction( 4524 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4525 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4526 additional_side_effects), 4527 dex_pc), 4528 expected_component_type_(expected_component_type), 4529 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4530 value_can_be_null_(true), 4531 static_type_of_array_is_object_array_(false) { 4532 SetRawInputAt(0, array); 4533 SetRawInputAt(1, index); 4534 SetRawInputAt(2, value); 4535 } 4536 4537 bool NeedsEnvironment() const OVERRIDE { 4538 // We currently always call a runtime method to catch array store 4539 // exceptions. 4540 return needs_type_check_; 4541 } 4542 4543 // Can throw ArrayStoreException. 4544 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4545 4546 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4547 // TODO: Same as for ArrayGet. 4548 return false; 4549 } 4550 4551 void ClearNeedsTypeCheck() { 4552 needs_type_check_ = false; 4553 } 4554 4555 void ClearValueCanBeNull() { 4556 value_can_be_null_ = false; 4557 } 4558 4559 void SetStaticTypeOfArrayIsObjectArray() { 4560 static_type_of_array_is_object_array_ = true; 4561 } 4562 4563 bool GetValueCanBeNull() const { return value_can_be_null_; } 4564 bool NeedsTypeCheck() const { return needs_type_check_; } 4565 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4566 4567 HInstruction* GetArray() const { return InputAt(0); } 4568 HInstruction* GetIndex() const { return InputAt(1); } 4569 HInstruction* GetValue() const { return InputAt(2); } 4570 4571 Primitive::Type GetComponentType() const { 4572 // The Dex format does not type floating point index operations. Since the 4573 // `expected_component_type_` is set during building and can therefore not 4574 // be correct, we also check what is the value type. If it is a floating 4575 // point type, we must use that type. 4576 Primitive::Type value_type = GetValue()->GetType(); 4577 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4578 ? value_type 4579 : expected_component_type_; 4580 } 4581 4582 Primitive::Type GetRawExpectedComponentType() const { 4583 return expected_component_type_; 4584 } 4585 4586 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4587 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4588 } 4589 4590 DECLARE_INSTRUCTION(ArraySet); 4591 4592 private: 4593 const Primitive::Type expected_component_type_; 4594 bool needs_type_check_; 4595 bool value_can_be_null_; 4596 // Cached information for the reference_type_info_ so that codegen 4597 // does not need to inspect the static type. 4598 bool static_type_of_array_is_object_array_; 4599 4600 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4601}; 4602 4603class HArrayLength : public HExpression<1> { 4604 public: 4605 HArrayLength(HInstruction* array, uint32_t dex_pc) 4606 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4607 // Note that arrays do not change length, so the instruction does not 4608 // depend on any write. 4609 SetRawInputAt(0, array); 4610 } 4611 4612 bool CanBeMoved() const OVERRIDE { return true; } 4613 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4614 return true; 4615 } 4616 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4617 return obj == InputAt(0); 4618 } 4619 4620 DECLARE_INSTRUCTION(ArrayLength); 4621 4622 private: 4623 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4624}; 4625 4626class HBoundsCheck : public HExpression<2> { 4627 public: 4628 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4629 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4630 DCHECK(index->GetType() == Primitive::kPrimInt); 4631 SetRawInputAt(0, index); 4632 SetRawInputAt(1, length); 4633 } 4634 4635 bool CanBeMoved() const OVERRIDE { return true; } 4636 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4637 return true; 4638 } 4639 4640 bool NeedsEnvironment() const OVERRIDE { return true; } 4641 4642 bool CanThrow() const OVERRIDE { return true; } 4643 4644 HInstruction* GetIndex() const { return InputAt(0); } 4645 4646 DECLARE_INSTRUCTION(BoundsCheck); 4647 4648 private: 4649 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4650}; 4651 4652/** 4653 * Some DEX instructions are folded into multiple HInstructions that need 4654 * to stay live until the last HInstruction. This class 4655 * is used as a marker for the baseline compiler to ensure its preceding 4656 * HInstruction stays live. `index` represents the stack location index of the 4657 * instruction (the actual offset is computed as index * vreg_size). 4658 */ 4659class HTemporary : public HTemplateInstruction<0> { 4660 public: 4661 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4662 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4663 4664 size_t GetIndex() const { return index_; } 4665 4666 Primitive::Type GetType() const OVERRIDE { 4667 // The previous instruction is the one that will be stored in the temporary location. 4668 DCHECK(GetPrevious() != nullptr); 4669 return GetPrevious()->GetType(); 4670 } 4671 4672 DECLARE_INSTRUCTION(Temporary); 4673 4674 private: 4675 const size_t index_; 4676 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4677}; 4678 4679class HSuspendCheck : public HTemplateInstruction<0> { 4680 public: 4681 explicit HSuspendCheck(uint32_t dex_pc) 4682 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4683 4684 bool NeedsEnvironment() const OVERRIDE { 4685 return true; 4686 } 4687 4688 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4689 SlowPathCode* GetSlowPath() const { return slow_path_; } 4690 4691 DECLARE_INSTRUCTION(SuspendCheck); 4692 4693 private: 4694 // Only used for code generation, in order to share the same slow path between back edges 4695 // of a same loop. 4696 SlowPathCode* slow_path_; 4697 4698 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4699}; 4700 4701/** 4702 * Instruction to load a Class object. 4703 */ 4704class HLoadClass : public HExpression<1> { 4705 public: 4706 HLoadClass(HCurrentMethod* current_method, 4707 uint16_t type_index, 4708 const DexFile& dex_file, 4709 bool is_referrers_class, 4710 uint32_t dex_pc, 4711 bool needs_access_check) 4712 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4713 type_index_(type_index), 4714 dex_file_(dex_file), 4715 is_referrers_class_(is_referrers_class), 4716 generate_clinit_check_(false), 4717 needs_access_check_(needs_access_check), 4718 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4719 // Referrers class should not need access check. We never inline unverified 4720 // methods so we can't possibly end up in this situation. 4721 DCHECK(!is_referrers_class_ || !needs_access_check_); 4722 SetRawInputAt(0, current_method); 4723 } 4724 4725 bool CanBeMoved() const OVERRIDE { return true; } 4726 4727 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4728 // Note that we don't need to test for generate_clinit_check_. 4729 // Whether or not we need to generate the clinit check is processed in 4730 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4731 return other->AsLoadClass()->type_index_ == type_index_ && 4732 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4733 } 4734 4735 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4736 4737 uint16_t GetTypeIndex() const { return type_index_; } 4738 bool IsReferrersClass() const { return is_referrers_class_; } 4739 bool CanBeNull() const OVERRIDE { return false; } 4740 4741 bool NeedsEnvironment() const OVERRIDE { 4742 // Will call runtime and load the class if the class is not loaded yet. 4743 // TODO: finer grain decision. 4744 return !is_referrers_class_; 4745 } 4746 4747 bool MustGenerateClinitCheck() const { 4748 return generate_clinit_check_; 4749 } 4750 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4751 // The entrypoint the code generator is going to call does not do 4752 // clinit of the class. 4753 DCHECK(!NeedsAccessCheck()); 4754 generate_clinit_check_ = generate_clinit_check; 4755 } 4756 4757 bool CanCallRuntime() const { 4758 return MustGenerateClinitCheck() || !is_referrers_class_ || needs_access_check_; 4759 } 4760 4761 bool NeedsAccessCheck() const { 4762 return needs_access_check_; 4763 } 4764 4765 bool CanThrow() const OVERRIDE { 4766 // May call runtime and and therefore can throw. 4767 // TODO: finer grain decision. 4768 return CanCallRuntime(); 4769 } 4770 4771 ReferenceTypeInfo GetLoadedClassRTI() { 4772 return loaded_class_rti_; 4773 } 4774 4775 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4776 // Make sure we only set exact types (the loaded class should never be merged). 4777 DCHECK(rti.IsExact()); 4778 loaded_class_rti_ = rti; 4779 } 4780 4781 const DexFile& GetDexFile() { return dex_file_; } 4782 4783 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 4784 4785 static SideEffects SideEffectsForArchRuntimeCalls() { 4786 return SideEffects::CanTriggerGC(); 4787 } 4788 4789 DECLARE_INSTRUCTION(LoadClass); 4790 4791 private: 4792 const uint16_t type_index_; 4793 const DexFile& dex_file_; 4794 const bool is_referrers_class_; 4795 // Whether this instruction must generate the initialization check. 4796 // Used for code generation. 4797 bool generate_clinit_check_; 4798 bool needs_access_check_; 4799 4800 ReferenceTypeInfo loaded_class_rti_; 4801 4802 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 4803}; 4804 4805class HLoadString : public HExpression<1> { 4806 public: 4807 HLoadString(HCurrentMethod* current_method, uint32_t string_index, uint32_t dex_pc) 4808 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4809 string_index_(string_index) { 4810 SetRawInputAt(0, current_method); 4811 } 4812 4813 bool CanBeMoved() const OVERRIDE { return true; } 4814 4815 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4816 return other->AsLoadString()->string_index_ == string_index_; 4817 } 4818 4819 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 4820 4821 uint32_t GetStringIndex() const { return string_index_; } 4822 4823 // TODO: Can we deopt or debug when we resolve a string? 4824 bool NeedsEnvironment() const OVERRIDE { return false; } 4825 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 4826 bool CanBeNull() const OVERRIDE { return false; } 4827 4828 static SideEffects SideEffectsForArchRuntimeCalls() { 4829 return SideEffects::CanTriggerGC(); 4830 } 4831 4832 DECLARE_INSTRUCTION(LoadString); 4833 4834 private: 4835 const uint32_t string_index_; 4836 4837 DISALLOW_COPY_AND_ASSIGN(HLoadString); 4838}; 4839 4840/** 4841 * Performs an initialization check on its Class object input. 4842 */ 4843class HClinitCheck : public HExpression<1> { 4844 public: 4845 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 4846 : HExpression( 4847 Primitive::kPrimNot, 4848 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 4849 dex_pc) { 4850 SetRawInputAt(0, constant); 4851 } 4852 4853 bool CanBeMoved() const OVERRIDE { return true; } 4854 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4855 return true; 4856 } 4857 4858 bool NeedsEnvironment() const OVERRIDE { 4859 // May call runtime to initialize the class. 4860 return true; 4861 } 4862 4863 4864 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 4865 4866 DECLARE_INSTRUCTION(ClinitCheck); 4867 4868 private: 4869 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 4870}; 4871 4872class HStaticFieldGet : public HExpression<1> { 4873 public: 4874 HStaticFieldGet(HInstruction* cls, 4875 Primitive::Type field_type, 4876 MemberOffset field_offset, 4877 bool is_volatile, 4878 uint32_t field_idx, 4879 uint16_t declaring_class_def_index, 4880 const DexFile& dex_file, 4881 Handle<mirror::DexCache> dex_cache, 4882 uint32_t dex_pc) 4883 : HExpression(field_type, 4884 SideEffects::FieldReadOfType(field_type, is_volatile), 4885 dex_pc), 4886 field_info_(field_offset, 4887 field_type, 4888 is_volatile, 4889 field_idx, 4890 declaring_class_def_index, 4891 dex_file, 4892 dex_cache) { 4893 SetRawInputAt(0, cls); 4894 } 4895 4896 4897 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4898 4899 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4900 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 4901 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4902 } 4903 4904 size_t ComputeHashCode() const OVERRIDE { 4905 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4906 } 4907 4908 const FieldInfo& GetFieldInfo() const { return field_info_; } 4909 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4910 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4911 bool IsVolatile() const { return field_info_.IsVolatile(); } 4912 4913 DECLARE_INSTRUCTION(StaticFieldGet); 4914 4915 private: 4916 const FieldInfo field_info_; 4917 4918 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 4919}; 4920 4921class HStaticFieldSet : public HTemplateInstruction<2> { 4922 public: 4923 HStaticFieldSet(HInstruction* cls, 4924 HInstruction* value, 4925 Primitive::Type field_type, 4926 MemberOffset field_offset, 4927 bool is_volatile, 4928 uint32_t field_idx, 4929 uint16_t declaring_class_def_index, 4930 const DexFile& dex_file, 4931 Handle<mirror::DexCache> dex_cache, 4932 uint32_t dex_pc) 4933 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4934 dex_pc), 4935 field_info_(field_offset, 4936 field_type, 4937 is_volatile, 4938 field_idx, 4939 declaring_class_def_index, 4940 dex_file, 4941 dex_cache), 4942 value_can_be_null_(true) { 4943 SetRawInputAt(0, cls); 4944 SetRawInputAt(1, value); 4945 } 4946 4947 const FieldInfo& GetFieldInfo() const { return field_info_; } 4948 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4949 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4950 bool IsVolatile() const { return field_info_.IsVolatile(); } 4951 4952 HInstruction* GetValue() const { return InputAt(1); } 4953 bool GetValueCanBeNull() const { return value_can_be_null_; } 4954 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4955 4956 DECLARE_INSTRUCTION(StaticFieldSet); 4957 4958 private: 4959 const FieldInfo field_info_; 4960 bool value_can_be_null_; 4961 4962 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 4963}; 4964 4965class HUnresolvedInstanceFieldGet : public HExpression<1> { 4966 public: 4967 HUnresolvedInstanceFieldGet(HInstruction* obj, 4968 Primitive::Type field_type, 4969 uint32_t field_index, 4970 uint32_t dex_pc) 4971 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 4972 field_index_(field_index) { 4973 SetRawInputAt(0, obj); 4974 } 4975 4976 bool NeedsEnvironment() const OVERRIDE { return true; } 4977 bool CanThrow() const OVERRIDE { return true; } 4978 4979 Primitive::Type GetFieldType() const { return GetType(); } 4980 uint32_t GetFieldIndex() const { return field_index_; } 4981 4982 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 4983 4984 private: 4985 const uint32_t field_index_; 4986 4987 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 4988}; 4989 4990class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 4991 public: 4992 HUnresolvedInstanceFieldSet(HInstruction* obj, 4993 HInstruction* value, 4994 Primitive::Type field_type, 4995 uint32_t field_index, 4996 uint32_t dex_pc) 4997 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 4998 field_type_(field_type), 4999 field_index_(field_index) { 5000 DCHECK_EQ(field_type, value->GetType()); 5001 SetRawInputAt(0, obj); 5002 SetRawInputAt(1, value); 5003 } 5004 5005 bool NeedsEnvironment() const OVERRIDE { return true; } 5006 bool CanThrow() const OVERRIDE { return true; } 5007 5008 Primitive::Type GetFieldType() const { return field_type_; } 5009 uint32_t GetFieldIndex() const { return field_index_; } 5010 5011 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5012 5013 private: 5014 const Primitive::Type field_type_; 5015 const uint32_t field_index_; 5016 5017 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5018}; 5019 5020class HUnresolvedStaticFieldGet : public HExpression<0> { 5021 public: 5022 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5023 uint32_t field_index, 5024 uint32_t dex_pc) 5025 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5026 field_index_(field_index) { 5027 } 5028 5029 bool NeedsEnvironment() const OVERRIDE { return true; } 5030 bool CanThrow() const OVERRIDE { return true; } 5031 5032 Primitive::Type GetFieldType() const { return GetType(); } 5033 uint32_t GetFieldIndex() const { return field_index_; } 5034 5035 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5036 5037 private: 5038 const uint32_t field_index_; 5039 5040 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5041}; 5042 5043class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5044 public: 5045 HUnresolvedStaticFieldSet(HInstruction* value, 5046 Primitive::Type field_type, 5047 uint32_t field_index, 5048 uint32_t dex_pc) 5049 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5050 field_type_(field_type), 5051 field_index_(field_index) { 5052 DCHECK_EQ(field_type, value->GetType()); 5053 SetRawInputAt(0, value); 5054 } 5055 5056 bool NeedsEnvironment() const OVERRIDE { return true; } 5057 bool CanThrow() const OVERRIDE { return true; } 5058 5059 Primitive::Type GetFieldType() const { return field_type_; } 5060 uint32_t GetFieldIndex() const { return field_index_; } 5061 5062 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5063 5064 private: 5065 const Primitive::Type field_type_; 5066 const uint32_t field_index_; 5067 5068 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5069}; 5070 5071// Implement the move-exception DEX instruction. 5072class HLoadException : public HExpression<0> { 5073 public: 5074 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5075 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5076 5077 bool CanBeNull() const OVERRIDE { return false; } 5078 5079 DECLARE_INSTRUCTION(LoadException); 5080 5081 private: 5082 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5083}; 5084 5085// Implicit part of move-exception which clears thread-local exception storage. 5086// Must not be removed because the runtime expects the TLS to get cleared. 5087class HClearException : public HTemplateInstruction<0> { 5088 public: 5089 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5090 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5091 5092 DECLARE_INSTRUCTION(ClearException); 5093 5094 private: 5095 DISALLOW_COPY_AND_ASSIGN(HClearException); 5096}; 5097 5098class HThrow : public HTemplateInstruction<1> { 5099 public: 5100 HThrow(HInstruction* exception, uint32_t dex_pc) 5101 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5102 SetRawInputAt(0, exception); 5103 } 5104 5105 bool IsControlFlow() const OVERRIDE { return true; } 5106 5107 bool NeedsEnvironment() const OVERRIDE { return true; } 5108 5109 bool CanThrow() const OVERRIDE { return true; } 5110 5111 5112 DECLARE_INSTRUCTION(Throw); 5113 5114 private: 5115 DISALLOW_COPY_AND_ASSIGN(HThrow); 5116}; 5117 5118/** 5119 * Implementation strategies for the code generator of a HInstanceOf 5120 * or `HCheckCast`. 5121 */ 5122enum class TypeCheckKind { 5123 kUnresolvedCheck, // Check against an unresolved type. 5124 kExactCheck, // Can do a single class compare. 5125 kClassHierarchyCheck, // Can just walk the super class chain. 5126 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5127 kInterfaceCheck, // No optimization yet when checking against an interface. 5128 kArrayObjectCheck, // Can just check if the array is not primitive. 5129 kArrayCheck // No optimization yet when checking against a generic array. 5130}; 5131 5132class HInstanceOf : public HExpression<2> { 5133 public: 5134 HInstanceOf(HInstruction* object, 5135 HLoadClass* constant, 5136 TypeCheckKind check_kind, 5137 uint32_t dex_pc) 5138 : HExpression(Primitive::kPrimBoolean, 5139 SideEffectsForArchRuntimeCalls(check_kind), 5140 dex_pc), 5141 check_kind_(check_kind), 5142 must_do_null_check_(true) { 5143 SetRawInputAt(0, object); 5144 SetRawInputAt(1, constant); 5145 } 5146 5147 bool CanBeMoved() const OVERRIDE { return true; } 5148 5149 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5150 return true; 5151 } 5152 5153 bool NeedsEnvironment() const OVERRIDE { 5154 return false; 5155 } 5156 5157 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5158 5159 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5160 5161 // Used only in code generation. 5162 bool MustDoNullCheck() const { return must_do_null_check_; } 5163 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5164 5165 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5166 return (check_kind == TypeCheckKind::kExactCheck) 5167 ? SideEffects::None() 5168 // Mips currently does runtime calls for any other checks. 5169 : SideEffects::CanTriggerGC(); 5170 } 5171 5172 DECLARE_INSTRUCTION(InstanceOf); 5173 5174 private: 5175 const TypeCheckKind check_kind_; 5176 bool must_do_null_check_; 5177 5178 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5179}; 5180 5181class HBoundType : public HExpression<1> { 5182 public: 5183 // Constructs an HBoundType with the given upper_bound. 5184 // Ensures that the upper_bound is valid. 5185 HBoundType(HInstruction* input, 5186 ReferenceTypeInfo upper_bound, 5187 bool upper_can_be_null, 5188 uint32_t dex_pc = kNoDexPc) 5189 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5190 upper_bound_(upper_bound), 5191 upper_can_be_null_(upper_can_be_null), 5192 can_be_null_(upper_can_be_null) { 5193 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5194 SetRawInputAt(0, input); 5195 SetReferenceTypeInfo(upper_bound_); 5196 } 5197 5198 // GetUpper* should only be used in reference type propagation. 5199 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5200 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5201 5202 void SetCanBeNull(bool can_be_null) { 5203 DCHECK(upper_can_be_null_ || !can_be_null); 5204 can_be_null_ = can_be_null; 5205 } 5206 5207 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5208 5209 DECLARE_INSTRUCTION(BoundType); 5210 5211 private: 5212 // Encodes the most upper class that this instruction can have. In other words 5213 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5214 // It is used to bound the type in cases like: 5215 // if (x instanceof ClassX) { 5216 // // uper_bound_ will be ClassX 5217 // } 5218 const ReferenceTypeInfo upper_bound_; 5219 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5220 // is false then can_be_null_ cannot be true). 5221 const bool upper_can_be_null_; 5222 bool can_be_null_; 5223 5224 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5225}; 5226 5227class HCheckCast : public HTemplateInstruction<2> { 5228 public: 5229 HCheckCast(HInstruction* object, 5230 HLoadClass* constant, 5231 TypeCheckKind check_kind, 5232 uint32_t dex_pc) 5233 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5234 check_kind_(check_kind), 5235 must_do_null_check_(true) { 5236 SetRawInputAt(0, object); 5237 SetRawInputAt(1, constant); 5238 } 5239 5240 bool CanBeMoved() const OVERRIDE { return true; } 5241 5242 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5243 return true; 5244 } 5245 5246 bool NeedsEnvironment() const OVERRIDE { 5247 // Instruction may throw a CheckCastError. 5248 return true; 5249 } 5250 5251 bool CanThrow() const OVERRIDE { return true; } 5252 5253 bool MustDoNullCheck() const { return must_do_null_check_; } 5254 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5255 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5256 5257 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5258 5259 DECLARE_INSTRUCTION(CheckCast); 5260 5261 private: 5262 const TypeCheckKind check_kind_; 5263 bool must_do_null_check_; 5264 5265 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5266}; 5267 5268class HMemoryBarrier : public HTemplateInstruction<0> { 5269 public: 5270 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5271 : HTemplateInstruction( 5272 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5273 barrier_kind_(barrier_kind) {} 5274 5275 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5276 5277 DECLARE_INSTRUCTION(MemoryBarrier); 5278 5279 private: 5280 const MemBarrierKind barrier_kind_; 5281 5282 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5283}; 5284 5285class HMonitorOperation : public HTemplateInstruction<1> { 5286 public: 5287 enum OperationKind { 5288 kEnter, 5289 kExit, 5290 }; 5291 5292 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5293 : HTemplateInstruction( 5294 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5295 kind_(kind) { 5296 SetRawInputAt(0, object); 5297 } 5298 5299 // Instruction may throw a Java exception, so we need an environment. 5300 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5301 5302 bool CanThrow() const OVERRIDE { 5303 // Verifier guarantees that monitor-exit cannot throw. 5304 // This is important because it allows the HGraphBuilder to remove 5305 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5306 return IsEnter(); 5307 } 5308 5309 5310 bool IsEnter() const { return kind_ == kEnter; } 5311 5312 DECLARE_INSTRUCTION(MonitorOperation); 5313 5314 private: 5315 const OperationKind kind_; 5316 5317 private: 5318 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5319}; 5320 5321/** 5322 * A HInstruction used as a marker for the replacement of new + <init> 5323 * of a String to a call to a StringFactory. Only baseline will see 5324 * the node at code generation, where it will be be treated as null. 5325 * When compiling non-baseline, `HFakeString` instructions are being removed 5326 * in the instruction simplifier. 5327 */ 5328class HFakeString : public HTemplateInstruction<0> { 5329 public: 5330 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5331 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5332 5333 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5334 5335 DECLARE_INSTRUCTION(FakeString); 5336 5337 private: 5338 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5339}; 5340 5341class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5342 public: 5343 MoveOperands(Location source, 5344 Location destination, 5345 Primitive::Type type, 5346 HInstruction* instruction) 5347 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5348 5349 Location GetSource() const { return source_; } 5350 Location GetDestination() const { return destination_; } 5351 5352 void SetSource(Location value) { source_ = value; } 5353 void SetDestination(Location value) { destination_ = value; } 5354 5355 // The parallel move resolver marks moves as "in-progress" by clearing the 5356 // destination (but not the source). 5357 Location MarkPending() { 5358 DCHECK(!IsPending()); 5359 Location dest = destination_; 5360 destination_ = Location::NoLocation(); 5361 return dest; 5362 } 5363 5364 void ClearPending(Location dest) { 5365 DCHECK(IsPending()); 5366 destination_ = dest; 5367 } 5368 5369 bool IsPending() const { 5370 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5371 return destination_.IsInvalid() && !source_.IsInvalid(); 5372 } 5373 5374 // True if this blocks a move from the given location. 5375 bool Blocks(Location loc) const { 5376 return !IsEliminated() && source_.OverlapsWith(loc); 5377 } 5378 5379 // A move is redundant if it's been eliminated, if its source and 5380 // destination are the same, or if its destination is unneeded. 5381 bool IsRedundant() const { 5382 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5383 } 5384 5385 // We clear both operands to indicate move that's been eliminated. 5386 void Eliminate() { 5387 source_ = destination_ = Location::NoLocation(); 5388 } 5389 5390 bool IsEliminated() const { 5391 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5392 return source_.IsInvalid(); 5393 } 5394 5395 Primitive::Type GetType() const { return type_; } 5396 5397 bool Is64BitMove() const { 5398 return Primitive::Is64BitType(type_); 5399 } 5400 5401 HInstruction* GetInstruction() const { return instruction_; } 5402 5403 private: 5404 Location source_; 5405 Location destination_; 5406 // The type this move is for. 5407 Primitive::Type type_; 5408 // The instruction this move is assocatied with. Null when this move is 5409 // for moving an input in the expected locations of user (including a phi user). 5410 // This is only used in debug mode, to ensure we do not connect interval siblings 5411 // in the same parallel move. 5412 HInstruction* instruction_; 5413}; 5414 5415static constexpr size_t kDefaultNumberOfMoves = 4; 5416 5417class HParallelMove : public HTemplateInstruction<0> { 5418 public: 5419 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5420 : HTemplateInstruction(SideEffects::None(), dex_pc), 5421 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5422 moves_.reserve(kDefaultNumberOfMoves); 5423 } 5424 5425 void AddMove(Location source, 5426 Location destination, 5427 Primitive::Type type, 5428 HInstruction* instruction) { 5429 DCHECK(source.IsValid()); 5430 DCHECK(destination.IsValid()); 5431 if (kIsDebugBuild) { 5432 if (instruction != nullptr) { 5433 for (const MoveOperands& move : moves_) { 5434 if (move.GetInstruction() == instruction) { 5435 // Special case the situation where the move is for the spill slot 5436 // of the instruction. 5437 if ((GetPrevious() == instruction) 5438 || ((GetPrevious() == nullptr) 5439 && instruction->IsPhi() 5440 && instruction->GetBlock() == GetBlock())) { 5441 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5442 << "Doing parallel moves for the same instruction."; 5443 } else { 5444 DCHECK(false) << "Doing parallel moves for the same instruction."; 5445 } 5446 } 5447 } 5448 } 5449 for (const MoveOperands& move : moves_) { 5450 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5451 << "Overlapped destination for two moves in a parallel move: " 5452 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5453 << source << " ==> " << destination; 5454 } 5455 } 5456 moves_.emplace_back(source, destination, type, instruction); 5457 } 5458 5459 MoveOperands* MoveOperandsAt(size_t index) { 5460 return &moves_[index]; 5461 } 5462 5463 size_t NumMoves() const { return moves_.size(); } 5464 5465 DECLARE_INSTRUCTION(ParallelMove); 5466 5467 private: 5468 ArenaVector<MoveOperands> moves_; 5469 5470 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5471}; 5472 5473} // namespace art 5474 5475#ifdef ART_ENABLE_CODEGEN_arm64 5476#include "nodes_arm64.h" 5477#endif 5478#ifdef ART_ENABLE_CODEGEN_x86 5479#include "nodes_x86.h" 5480#endif 5481 5482namespace art { 5483 5484class HGraphVisitor : public ValueObject { 5485 public: 5486 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5487 virtual ~HGraphVisitor() {} 5488 5489 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5490 virtual void VisitBasicBlock(HBasicBlock* block); 5491 5492 // Visit the graph following basic block insertion order. 5493 void VisitInsertionOrder(); 5494 5495 // Visit the graph following dominator tree reverse post-order. 5496 void VisitReversePostOrder(); 5497 5498 HGraph* GetGraph() const { return graph_; } 5499 5500 // Visit functions for instruction classes. 5501#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5502 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5503 5504 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5505 5506#undef DECLARE_VISIT_INSTRUCTION 5507 5508 private: 5509 HGraph* const graph_; 5510 5511 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5512}; 5513 5514class HGraphDelegateVisitor : public HGraphVisitor { 5515 public: 5516 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5517 virtual ~HGraphDelegateVisitor() {} 5518 5519 // Visit functions that delegate to to super class. 5520#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5521 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5522 5523 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5524 5525#undef DECLARE_VISIT_INSTRUCTION 5526 5527 private: 5528 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5529}; 5530 5531class HInsertionOrderIterator : public ValueObject { 5532 public: 5533 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5534 5535 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5536 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5537 void Advance() { ++index_; } 5538 5539 private: 5540 const HGraph& graph_; 5541 size_t index_; 5542 5543 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5544}; 5545 5546class HReversePostOrderIterator : public ValueObject { 5547 public: 5548 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5549 // Check that reverse post order of the graph has been built. 5550 DCHECK(!graph.GetReversePostOrder().empty()); 5551 } 5552 5553 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5554 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5555 void Advance() { ++index_; } 5556 5557 private: 5558 const HGraph& graph_; 5559 size_t index_; 5560 5561 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5562}; 5563 5564class HPostOrderIterator : public ValueObject { 5565 public: 5566 explicit HPostOrderIterator(const HGraph& graph) 5567 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5568 // Check that reverse post order of the graph has been built. 5569 DCHECK(!graph.GetReversePostOrder().empty()); 5570 } 5571 5572 bool Done() const { return index_ == 0; } 5573 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5574 void Advance() { --index_; } 5575 5576 private: 5577 const HGraph& graph_; 5578 size_t index_; 5579 5580 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5581}; 5582 5583class HLinearPostOrderIterator : public ValueObject { 5584 public: 5585 explicit HLinearPostOrderIterator(const HGraph& graph) 5586 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5587 5588 bool Done() const { return index_ == 0; } 5589 5590 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5591 5592 void Advance() { 5593 --index_; 5594 DCHECK_GE(index_, 0U); 5595 } 5596 5597 private: 5598 const ArenaVector<HBasicBlock*>& order_; 5599 size_t index_; 5600 5601 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5602}; 5603 5604class HLinearOrderIterator : public ValueObject { 5605 public: 5606 explicit HLinearOrderIterator(const HGraph& graph) 5607 : order_(graph.GetLinearOrder()), index_(0) {} 5608 5609 bool Done() const { return index_ == order_.size(); } 5610 HBasicBlock* Current() const { return order_[index_]; } 5611 void Advance() { ++index_; } 5612 5613 private: 5614 const ArenaVector<HBasicBlock*>& order_; 5615 size_t index_; 5616 5617 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5618}; 5619 5620// Iterator over the blocks that art part of the loop. Includes blocks part 5621// of an inner loop. The order in which the blocks are iterated is on their 5622// block id. 5623class HBlocksInLoopIterator : public ValueObject { 5624 public: 5625 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5626 : blocks_in_loop_(info.GetBlocks()), 5627 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5628 index_(0) { 5629 if (!blocks_in_loop_.IsBitSet(index_)) { 5630 Advance(); 5631 } 5632 } 5633 5634 bool Done() const { return index_ == blocks_.size(); } 5635 HBasicBlock* Current() const { return blocks_[index_]; } 5636 void Advance() { 5637 ++index_; 5638 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5639 if (blocks_in_loop_.IsBitSet(index_)) { 5640 break; 5641 } 5642 } 5643 } 5644 5645 private: 5646 const BitVector& blocks_in_loop_; 5647 const ArenaVector<HBasicBlock*>& blocks_; 5648 size_t index_; 5649 5650 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5651}; 5652 5653// Iterator over the blocks that art part of the loop. Includes blocks part 5654// of an inner loop. The order in which the blocks are iterated is reverse 5655// post order. 5656class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5657 public: 5658 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5659 : blocks_in_loop_(info.GetBlocks()), 5660 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5661 index_(0) { 5662 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5663 Advance(); 5664 } 5665 } 5666 5667 bool Done() const { return index_ == blocks_.size(); } 5668 HBasicBlock* Current() const { return blocks_[index_]; } 5669 void Advance() { 5670 ++index_; 5671 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5672 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5673 break; 5674 } 5675 } 5676 } 5677 5678 private: 5679 const BitVector& blocks_in_loop_; 5680 const ArenaVector<HBasicBlock*>& blocks_; 5681 size_t index_; 5682 5683 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5684}; 5685 5686inline int64_t Int64FromConstant(HConstant* constant) { 5687 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5688 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5689 : constant->AsLongConstant()->GetValue(); 5690} 5691 5692inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5693 // For the purposes of the compiler, the dex files must actually be the same object 5694 // if we want to safely treat them as the same. This is especially important for JIT 5695 // as custom class loaders can open the same underlying file (or memory) multiple 5696 // times and provide different class resolution but no two class loaders should ever 5697 // use the same DexFile object - doing so is an unsupported hack that can lead to 5698 // all sorts of weird failures. 5699 return &lhs == &rhs; 5700} 5701 5702} // namespace art 5703 5704#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5705