nodes.h revision d930929be93798d790c91dd05adf2c038508f1b0
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38 39namespace art { 40 41class GraphChecker; 42class HBasicBlock; 43class HCurrentMethod; 44class HDoubleConstant; 45class HEnvironment; 46class HFakeString; 47class HFloatConstant; 48class HGraphBuilder; 49class HGraphVisitor; 50class HInstruction; 51class HIntConstant; 52class HInvoke; 53class HLongConstant; 54class HNullConstant; 55class HPhi; 56class HSuspendCheck; 57class HTryBoundary; 58class LiveInterval; 59class LocationSummary; 60class SlowPathCode; 61class SsaBuilder; 62 63namespace mirror { 64class DexCache; 65} // namespace mirror 66 67static const int kDefaultNumberOfBlocks = 8; 68static const int kDefaultNumberOfSuccessors = 2; 69static const int kDefaultNumberOfPredecessors = 2; 70static const int kDefaultNumberOfExceptionalPredecessors = 0; 71static const int kDefaultNumberOfDominatedBlocks = 1; 72static const int kDefaultNumberOfBackEdges = 1; 73 74static constexpr uint32_t kMaxIntShiftValue = 0x1f; 75static constexpr uint64_t kMaxLongShiftValue = 0x3f; 76 77static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 78static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 79 80static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 81 82static constexpr uint32_t kNoDexPc = -1; 83 84enum IfCondition { 85 // All types. 86 kCondEQ, // == 87 kCondNE, // != 88 // Signed integers and floating-point numbers. 89 kCondLT, // < 90 kCondLE, // <= 91 kCondGT, // > 92 kCondGE, // >= 93 // Unsigned integers. 94 kCondB, // < 95 kCondBE, // <= 96 kCondA, // > 97 kCondAE, // >= 98}; 99 100class HInstructionList : public ValueObject { 101 public: 102 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 103 104 void AddInstruction(HInstruction* instruction); 105 void RemoveInstruction(HInstruction* instruction); 106 107 // Insert `instruction` before/after an existing instruction `cursor`. 108 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 109 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 110 111 // Return true if this list contains `instruction`. 112 bool Contains(HInstruction* instruction) const; 113 114 // Return true if `instruction1` is found before `instruction2` in 115 // this instruction list and false otherwise. Abort if none 116 // of these instructions is found. 117 bool FoundBefore(const HInstruction* instruction1, 118 const HInstruction* instruction2) const; 119 120 bool IsEmpty() const { return first_instruction_ == nullptr; } 121 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 122 123 // Update the block of all instructions to be `block`. 124 void SetBlockOfInstructions(HBasicBlock* block) const; 125 126 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 127 void Add(const HInstructionList& instruction_list); 128 129 // Return the number of instructions in the list. This is an expensive operation. 130 size_t CountSize() const; 131 132 private: 133 HInstruction* first_instruction_; 134 HInstruction* last_instruction_; 135 136 friend class HBasicBlock; 137 friend class HGraph; 138 friend class HInstruction; 139 friend class HInstructionIterator; 140 friend class HBackwardInstructionIterator; 141 142 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 143}; 144 145// Control-flow graph of a method. Contains a list of basic blocks. 146class HGraph : public ArenaObject<kArenaAllocGraph> { 147 public: 148 HGraph(ArenaAllocator* arena, 149 const DexFile& dex_file, 150 uint32_t method_idx, 151 bool should_generate_constructor_barrier, 152 InstructionSet instruction_set, 153 InvokeType invoke_type = kInvalidInvokeType, 154 bool debuggable = false, 155 int start_instruction_id = 0) 156 : arena_(arena), 157 blocks_(arena->Adapter(kArenaAllocBlockList)), 158 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 159 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 160 entry_block_(nullptr), 161 exit_block_(nullptr), 162 maximum_number_of_out_vregs_(0), 163 number_of_vregs_(0), 164 number_of_in_vregs_(0), 165 temporaries_vreg_slots_(0), 166 has_bounds_checks_(false), 167 has_try_catch_(false), 168 debuggable_(debuggable), 169 current_instruction_id_(start_instruction_id), 170 dex_file_(dex_file), 171 method_idx_(method_idx), 172 invoke_type_(invoke_type), 173 in_ssa_form_(false), 174 should_generate_constructor_barrier_(should_generate_constructor_barrier), 175 instruction_set_(instruction_set), 176 cached_null_constant_(nullptr), 177 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 178 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 179 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 180 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 181 cached_current_method_(nullptr) { 182 blocks_.reserve(kDefaultNumberOfBlocks); 183 } 184 185 ArenaAllocator* GetArena() const { return arena_; } 186 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 187 188 bool IsInSsaForm() const { return in_ssa_form_; } 189 190 HBasicBlock* GetEntryBlock() const { return entry_block_; } 191 HBasicBlock* GetExitBlock() const { return exit_block_; } 192 bool HasExitBlock() const { return exit_block_ != nullptr; } 193 194 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 195 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 196 197 void AddBlock(HBasicBlock* block); 198 199 // Try building the SSA form of this graph, with dominance computation and loop 200 // recognition. Returns whether it was successful in doing all these steps. 201 bool TryBuildingSsa() { 202 BuildDominatorTree(); 203 // The SSA builder requires loops to all be natural. Specifically, the dead phi 204 // elimination phase checks the consistency of the graph when doing a post-order 205 // visit for eliminating dead phis: a dead phi can only have loop header phi 206 // users remaining when being visited. 207 if (!AnalyzeNaturalLoops()) return false; 208 // Precompute per-block try membership before entering the SSA builder, 209 // which needs the information to build catch block phis from values of 210 // locals at throwing instructions inside try blocks. 211 ComputeTryBlockInformation(); 212 TransformToSsa(); 213 in_ssa_form_ = true; 214 return true; 215 } 216 217 void ComputeDominanceInformation(); 218 void ClearDominanceInformation(); 219 220 void BuildDominatorTree(); 221 void TransformToSsa(); 222 void SimplifyCFG(); 223 void SimplifyCatchBlocks(); 224 225 // Analyze all natural loops in this graph. Returns false if one 226 // loop is not natural, that is the header does not dominate the 227 // back edge. 228 bool AnalyzeNaturalLoops() const; 229 230 // Iterate over blocks to compute try block membership. Needs reverse post 231 // order and loop information. 232 void ComputeTryBlockInformation(); 233 234 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 235 // Returns the instruction used to replace the invoke expression or null if the 236 // invoke is for a void method. 237 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 238 239 // Need to add a couple of blocks to test if the loop body is entered and 240 // put deoptimization instructions, etc. 241 void TransformLoopHeaderForBCE(HBasicBlock* header); 242 243 // Removes `block` from the graph. 244 void DeleteDeadBlock(HBasicBlock* block); 245 246 // Splits the edge between `block` and `successor` while preserving the 247 // indices in the predecessor/successor lists. If there are multiple edges 248 // between the blocks, the lowest indices are used. 249 // Returns the new block which is empty and has the same dex pc as `successor`. 250 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 251 252 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 253 void SimplifyLoop(HBasicBlock* header); 254 255 int32_t GetNextInstructionId() { 256 DCHECK_NE(current_instruction_id_, INT32_MAX); 257 return current_instruction_id_++; 258 } 259 260 int32_t GetCurrentInstructionId() const { 261 return current_instruction_id_; 262 } 263 264 void SetCurrentInstructionId(int32_t id) { 265 current_instruction_id_ = id; 266 } 267 268 uint16_t GetMaximumNumberOfOutVRegs() const { 269 return maximum_number_of_out_vregs_; 270 } 271 272 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 273 maximum_number_of_out_vregs_ = new_value; 274 } 275 276 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 277 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 278 } 279 280 void UpdateTemporariesVRegSlots(size_t slots) { 281 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 282 } 283 284 size_t GetTemporariesVRegSlots() const { 285 DCHECK(!in_ssa_form_); 286 return temporaries_vreg_slots_; 287 } 288 289 void SetNumberOfVRegs(uint16_t number_of_vregs) { 290 number_of_vregs_ = number_of_vregs; 291 } 292 293 uint16_t GetNumberOfVRegs() const { 294 return number_of_vregs_; 295 } 296 297 void SetNumberOfInVRegs(uint16_t value) { 298 number_of_in_vregs_ = value; 299 } 300 301 uint16_t GetNumberOfLocalVRegs() const { 302 DCHECK(!in_ssa_form_); 303 return number_of_vregs_ - number_of_in_vregs_; 304 } 305 306 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 307 return reverse_post_order_; 308 } 309 310 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 311 return linear_order_; 312 } 313 314 bool HasBoundsChecks() const { 315 return has_bounds_checks_; 316 } 317 318 void SetHasBoundsChecks(bool value) { 319 has_bounds_checks_ = value; 320 } 321 322 bool ShouldGenerateConstructorBarrier() const { 323 return should_generate_constructor_barrier_; 324 } 325 326 bool IsDebuggable() const { return debuggable_; } 327 328 // Returns a constant of the given type and value. If it does not exist 329 // already, it is created and inserted into the graph. This method is only for 330 // integral types. 331 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 332 333 // TODO: This is problematic for the consistency of reference type propagation 334 // because it can be created anytime after the pass and thus it will be left 335 // with an invalid type. 336 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 337 338 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 339 return CreateConstant(value, &cached_int_constants_, dex_pc); 340 } 341 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 342 return CreateConstant(value, &cached_long_constants_, dex_pc); 343 } 344 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 345 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 346 } 347 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 348 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 349 } 350 351 HCurrentMethod* GetCurrentMethod(); 352 353 HBasicBlock* FindCommonDominator(HBasicBlock* first, HBasicBlock* second) const; 354 355 const DexFile& GetDexFile() const { 356 return dex_file_; 357 } 358 359 uint32_t GetMethodIdx() const { 360 return method_idx_; 361 } 362 363 InvokeType GetInvokeType() const { 364 return invoke_type_; 365 } 366 367 InstructionSet GetInstructionSet() const { 368 return instruction_set_; 369 } 370 371 bool HasTryCatch() const { return has_try_catch_; } 372 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 373 374 private: 375 void FindBackEdges(ArenaBitVector* visited); 376 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 377 void RemoveDeadBlocks(const ArenaBitVector& visited); 378 379 template <class InstructionType, typename ValueType> 380 InstructionType* CreateConstant(ValueType value, 381 ArenaSafeMap<ValueType, InstructionType*>* cache, 382 uint32_t dex_pc = kNoDexPc) { 383 // Try to find an existing constant of the given value. 384 InstructionType* constant = nullptr; 385 auto cached_constant = cache->find(value); 386 if (cached_constant != cache->end()) { 387 constant = cached_constant->second; 388 } 389 390 // If not found or previously deleted, create and cache a new instruction. 391 // Don't bother reviving a previously deleted instruction, for simplicity. 392 if (constant == nullptr || constant->GetBlock() == nullptr) { 393 constant = new (arena_) InstructionType(value, dex_pc); 394 cache->Overwrite(value, constant); 395 InsertConstant(constant); 396 } 397 return constant; 398 } 399 400 void InsertConstant(HConstant* instruction); 401 402 // Cache a float constant into the graph. This method should only be 403 // called by the SsaBuilder when creating "equivalent" instructions. 404 void CacheFloatConstant(HFloatConstant* constant); 405 406 // See CacheFloatConstant comment. 407 void CacheDoubleConstant(HDoubleConstant* constant); 408 409 ArenaAllocator* const arena_; 410 411 // List of blocks in insertion order. 412 ArenaVector<HBasicBlock*> blocks_; 413 414 // List of blocks to perform a reverse post order tree traversal. 415 ArenaVector<HBasicBlock*> reverse_post_order_; 416 417 // List of blocks to perform a linear order tree traversal. 418 ArenaVector<HBasicBlock*> linear_order_; 419 420 HBasicBlock* entry_block_; 421 HBasicBlock* exit_block_; 422 423 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 424 uint16_t maximum_number_of_out_vregs_; 425 426 // The number of virtual registers in this method. Contains the parameters. 427 uint16_t number_of_vregs_; 428 429 // The number of virtual registers used by parameters of this method. 430 uint16_t number_of_in_vregs_; 431 432 // Number of vreg size slots that the temporaries use (used in baseline compiler). 433 size_t temporaries_vreg_slots_; 434 435 // Has bounds checks. We can totally skip BCE if it's false. 436 bool has_bounds_checks_; 437 438 // Flag whether there are any try/catch blocks in the graph. We will skip 439 // try/catch-related passes if false. 440 bool has_try_catch_; 441 442 // Indicates whether the graph should be compiled in a way that 443 // ensures full debuggability. If false, we can apply more 444 // aggressive optimizations that may limit the level of debugging. 445 const bool debuggable_; 446 447 // The current id to assign to a newly added instruction. See HInstruction.id_. 448 int32_t current_instruction_id_; 449 450 // The dex file from which the method is from. 451 const DexFile& dex_file_; 452 453 // The method index in the dex file. 454 const uint32_t method_idx_; 455 456 // If inlined, this encodes how the callee is being invoked. 457 const InvokeType invoke_type_; 458 459 // Whether the graph has been transformed to SSA form. Only used 460 // in debug mode to ensure we are not using properties only valid 461 // for non-SSA form (like the number of temporaries). 462 bool in_ssa_form_; 463 464 const bool should_generate_constructor_barrier_; 465 466 const InstructionSet instruction_set_; 467 468 // Cached constants. 469 HNullConstant* cached_null_constant_; 470 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 471 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 472 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 473 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 474 475 HCurrentMethod* cached_current_method_; 476 477 friend class SsaBuilder; // For caching constants. 478 friend class SsaLivenessAnalysis; // For the linear order. 479 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 480 DISALLOW_COPY_AND_ASSIGN(HGraph); 481}; 482 483class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 484 public: 485 HLoopInformation(HBasicBlock* header, HGraph* graph) 486 : header_(header), 487 suspend_check_(nullptr), 488 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 489 // Make bit vector growable, as the number of blocks may change. 490 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 491 back_edges_.reserve(kDefaultNumberOfBackEdges); 492 } 493 494 HBasicBlock* GetHeader() const { 495 return header_; 496 } 497 498 void SetHeader(HBasicBlock* block) { 499 header_ = block; 500 } 501 502 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 503 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 504 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 505 506 void AddBackEdge(HBasicBlock* back_edge) { 507 back_edges_.push_back(back_edge); 508 } 509 510 void RemoveBackEdge(HBasicBlock* back_edge) { 511 RemoveElement(back_edges_, back_edge); 512 } 513 514 bool IsBackEdge(const HBasicBlock& block) const { 515 return ContainsElement(back_edges_, &block); 516 } 517 518 size_t NumberOfBackEdges() const { 519 return back_edges_.size(); 520 } 521 522 HBasicBlock* GetPreHeader() const; 523 524 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 525 return back_edges_; 526 } 527 528 // Returns the lifetime position of the back edge that has the 529 // greatest lifetime position. 530 size_t GetLifetimeEnd() const; 531 532 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 533 ReplaceElement(back_edges_, existing, new_back_edge); 534 } 535 536 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 537 // that is the header dominates the back edge. 538 bool Populate(); 539 540 // Reanalyzes the loop by removing loop info from its blocks and re-running 541 // Populate(). If there are no back edges left, the loop info is completely 542 // removed as well as its SuspendCheck instruction. It must be run on nested 543 // inner loops first. 544 void Update(); 545 546 // Returns whether this loop information contains `block`. 547 // Note that this loop information *must* be populated before entering this function. 548 bool Contains(const HBasicBlock& block) const; 549 550 // Returns whether this loop information is an inner loop of `other`. 551 // Note that `other` *must* be populated before entering this function. 552 bool IsIn(const HLoopInformation& other) const; 553 554 // Returns true if instruction is not defined within this loop or any loop nested inside 555 // this loop. If must_dominate is set, only definitions that actually dominate the loop 556 // header can be invariant. Otherwise, any definition outside the loop, including 557 // definitions that appear after the loop, is invariant. 558 bool IsLoopInvariant(HInstruction* instruction, bool must_dominate) const; 559 560 const ArenaBitVector& GetBlocks() const { return blocks_; } 561 562 void Add(HBasicBlock* block); 563 void Remove(HBasicBlock* block); 564 565 private: 566 // Internal recursive implementation of `Populate`. 567 void PopulateRecursive(HBasicBlock* block); 568 569 HBasicBlock* header_; 570 HSuspendCheck* suspend_check_; 571 ArenaVector<HBasicBlock*> back_edges_; 572 ArenaBitVector blocks_; 573 574 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 575}; 576 577// Stores try/catch information for basic blocks. 578// Note that HGraph is constructed so that catch blocks cannot simultaneously 579// be try blocks. 580class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 581 public: 582 // Try block information constructor. 583 explicit TryCatchInformation(const HTryBoundary& try_entry) 584 : try_entry_(&try_entry), 585 catch_dex_file_(nullptr), 586 catch_type_index_(DexFile::kDexNoIndex16) { 587 DCHECK(try_entry_ != nullptr); 588 } 589 590 // Catch block information constructor. 591 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 592 : try_entry_(nullptr), 593 catch_dex_file_(&dex_file), 594 catch_type_index_(catch_type_index) {} 595 596 bool IsTryBlock() const { return try_entry_ != nullptr; } 597 598 const HTryBoundary& GetTryEntry() const { 599 DCHECK(IsTryBlock()); 600 return *try_entry_; 601 } 602 603 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 604 605 bool IsCatchAllTypeIndex() const { 606 DCHECK(IsCatchBlock()); 607 return catch_type_index_ == DexFile::kDexNoIndex16; 608 } 609 610 uint16_t GetCatchTypeIndex() const { 611 DCHECK(IsCatchBlock()); 612 return catch_type_index_; 613 } 614 615 const DexFile& GetCatchDexFile() const { 616 DCHECK(IsCatchBlock()); 617 return *catch_dex_file_; 618 } 619 620 private: 621 // One of possibly several TryBoundary instructions entering the block's try. 622 // Only set for try blocks. 623 const HTryBoundary* try_entry_; 624 625 // Exception type information. Only set for catch blocks. 626 const DexFile* catch_dex_file_; 627 const uint16_t catch_type_index_; 628}; 629 630static constexpr size_t kNoLifetime = -1; 631static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 632 633// A block in a method. Contains the list of instructions represented 634// as a double linked list. Each block knows its predecessors and 635// successors. 636 637class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 638 public: 639 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 640 : graph_(graph), 641 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 642 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 643 loop_information_(nullptr), 644 dominator_(nullptr), 645 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 646 block_id_(kInvalidBlockId), 647 dex_pc_(dex_pc), 648 lifetime_start_(kNoLifetime), 649 lifetime_end_(kNoLifetime), 650 try_catch_information_(nullptr) { 651 predecessors_.reserve(kDefaultNumberOfPredecessors); 652 successors_.reserve(kDefaultNumberOfSuccessors); 653 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 654 } 655 656 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 657 return predecessors_; 658 } 659 660 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 661 return successors_; 662 } 663 664 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 665 return ContainsElement(successors_, block, start_from); 666 } 667 668 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 669 return dominated_blocks_; 670 } 671 672 bool IsEntryBlock() const { 673 return graph_->GetEntryBlock() == this; 674 } 675 676 bool IsExitBlock() const { 677 return graph_->GetExitBlock() == this; 678 } 679 680 bool IsSingleGoto() const; 681 bool IsSingleTryBoundary() const; 682 683 // Returns true if this block emits nothing but a jump. 684 bool IsSingleJump() const { 685 HLoopInformation* loop_info = GetLoopInformation(); 686 return (IsSingleGoto() || IsSingleTryBoundary()) 687 // Back edges generate a suspend check. 688 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 689 } 690 691 void AddBackEdge(HBasicBlock* back_edge) { 692 if (loop_information_ == nullptr) { 693 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 694 } 695 DCHECK_EQ(loop_information_->GetHeader(), this); 696 loop_information_->AddBackEdge(back_edge); 697 } 698 699 HGraph* GetGraph() const { return graph_; } 700 void SetGraph(HGraph* graph) { graph_ = graph; } 701 702 uint32_t GetBlockId() const { return block_id_; } 703 void SetBlockId(int id) { block_id_ = id; } 704 uint32_t GetDexPc() const { return dex_pc_; } 705 706 HBasicBlock* GetDominator() const { return dominator_; } 707 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 708 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 709 710 void RemoveDominatedBlock(HBasicBlock* block) { 711 RemoveElement(dominated_blocks_, block); 712 } 713 714 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 715 ReplaceElement(dominated_blocks_, existing, new_block); 716 } 717 718 void ClearDominanceInformation(); 719 720 int NumberOfBackEdges() const { 721 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 722 } 723 724 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 725 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 726 const HInstructionList& GetInstructions() const { return instructions_; } 727 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 728 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 729 const HInstructionList& GetPhis() const { return phis_; } 730 731 void AddSuccessor(HBasicBlock* block) { 732 successors_.push_back(block); 733 block->predecessors_.push_back(this); 734 } 735 736 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 737 size_t successor_index = GetSuccessorIndexOf(existing); 738 existing->RemovePredecessor(this); 739 new_block->predecessors_.push_back(this); 740 successors_[successor_index] = new_block; 741 } 742 743 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 744 size_t predecessor_index = GetPredecessorIndexOf(existing); 745 existing->RemoveSuccessor(this); 746 new_block->successors_.push_back(this); 747 predecessors_[predecessor_index] = new_block; 748 } 749 750 // Insert `this` between `predecessor` and `successor. This method 751 // preserves the indicies, and will update the first edge found between 752 // `predecessor` and `successor`. 753 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 754 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 755 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 756 successor->predecessors_[predecessor_index] = this; 757 predecessor->successors_[successor_index] = this; 758 successors_.push_back(successor); 759 predecessors_.push_back(predecessor); 760 } 761 762 void RemovePredecessor(HBasicBlock* block) { 763 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 764 } 765 766 void RemoveSuccessor(HBasicBlock* block) { 767 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 768 } 769 770 void ClearAllPredecessors() { 771 predecessors_.clear(); 772 } 773 774 void AddPredecessor(HBasicBlock* block) { 775 predecessors_.push_back(block); 776 block->successors_.push_back(this); 777 } 778 779 void SwapPredecessors() { 780 DCHECK_EQ(predecessors_.size(), 2u); 781 std::swap(predecessors_[0], predecessors_[1]); 782 } 783 784 void SwapSuccessors() { 785 DCHECK_EQ(successors_.size(), 2u); 786 std::swap(successors_[0], successors_[1]); 787 } 788 789 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 790 return IndexOfElement(predecessors_, predecessor); 791 } 792 793 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 794 return IndexOfElement(successors_, successor); 795 } 796 797 HBasicBlock* GetSinglePredecessor() const { 798 DCHECK_EQ(GetPredecessors().size(), 1u); 799 return GetPredecessors()[0]; 800 } 801 802 HBasicBlock* GetSingleSuccessor() const { 803 DCHECK_EQ(GetSuccessors().size(), 1u); 804 return GetSuccessors()[0]; 805 } 806 807 // Returns whether the first occurrence of `predecessor` in the list of 808 // predecessors is at index `idx`. 809 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 810 DCHECK_EQ(GetPredecessors()[idx], predecessor); 811 return GetPredecessorIndexOf(predecessor) == idx; 812 } 813 814 // Returns the number of non-exceptional successors. SsaChecker ensures that 815 // these are stored at the beginning of the successor list. 816 size_t NumberOfNormalSuccessors() const { 817 return EndsWithTryBoundary() ? 1 : GetSuccessors().size(); 818 } 819 820 // Create a new block between this block and its predecessors. The new block 821 // is added to the graph, all predecessor edges are relinked to it and an edge 822 // is created to `this`. Returns the new empty block. Reverse post order or 823 // loop and try/catch information are not updated. 824 HBasicBlock* CreateImmediateDominator(); 825 826 // Split the block into two blocks just before `cursor`. Returns the newly 827 // created, latter block. Note that this method will add the block to the 828 // graph, create a Goto at the end of the former block and will create an edge 829 // between the blocks. It will not, however, update the reverse post order or 830 // loop and try/catch information. 831 HBasicBlock* SplitBefore(HInstruction* cursor); 832 833 // Split the block into two blocks just after `cursor`. Returns the newly 834 // created block. Note that this method just updates raw block information, 835 // like predecessors, successors, dominators, and instruction list. It does not 836 // update the graph, reverse post order, loop information, nor make sure the 837 // blocks are consistent (for example ending with a control flow instruction). 838 HBasicBlock* SplitAfter(HInstruction* cursor); 839 840 // Merge `other` at the end of `this`. Successors and dominated blocks of 841 // `other` are changed to be successors and dominated blocks of `this`. Note 842 // that this method does not update the graph, reverse post order, loop 843 // information, nor make sure the blocks are consistent (for example ending 844 // with a control flow instruction). 845 void MergeWithInlined(HBasicBlock* other); 846 847 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 848 // of `this` are moved to `other`. 849 // Note that this method does not update the graph, reverse post order, loop 850 // information, nor make sure the blocks are consistent (for example ending 851 // with a control flow instruction). 852 void ReplaceWith(HBasicBlock* other); 853 854 // Merge `other` at the end of `this`. This method updates loops, reverse post 855 // order, links to predecessors, successors, dominators and deletes the block 856 // from the graph. The two blocks must be successive, i.e. `this` the only 857 // predecessor of `other` and vice versa. 858 void MergeWith(HBasicBlock* other); 859 860 // Disconnects `this` from all its predecessors, successors and dominator, 861 // removes it from all loops it is included in and eventually from the graph. 862 // The block must not dominate any other block. Predecessors and successors 863 // are safely updated. 864 void DisconnectAndDelete(); 865 866 void AddInstruction(HInstruction* instruction); 867 // Insert `instruction` before/after an existing instruction `cursor`. 868 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 869 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 870 // Replace instruction `initial` with `replacement` within this block. 871 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 872 HInstruction* replacement); 873 void AddPhi(HPhi* phi); 874 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 875 // RemoveInstruction and RemovePhi delete a given instruction from the respective 876 // instruction list. With 'ensure_safety' set to true, it verifies that the 877 // instruction is not in use and removes it from the use lists of its inputs. 878 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 879 void RemovePhi(HPhi* phi, bool ensure_safety = true); 880 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 881 882 bool IsLoopHeader() const { 883 return IsInLoop() && (loop_information_->GetHeader() == this); 884 } 885 886 bool IsLoopPreHeaderFirstPredecessor() const { 887 DCHECK(IsLoopHeader()); 888 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 889 } 890 891 HLoopInformation* GetLoopInformation() const { 892 return loop_information_; 893 } 894 895 // Set the loop_information_ on this block. Overrides the current 896 // loop_information if it is an outer loop of the passed loop information. 897 // Note that this method is called while creating the loop information. 898 void SetInLoop(HLoopInformation* info) { 899 if (IsLoopHeader()) { 900 // Nothing to do. This just means `info` is an outer loop. 901 } else if (!IsInLoop()) { 902 loop_information_ = info; 903 } else if (loop_information_->Contains(*info->GetHeader())) { 904 // Block is currently part of an outer loop. Make it part of this inner loop. 905 // Note that a non loop header having a loop information means this loop information 906 // has already been populated 907 loop_information_ = info; 908 } else { 909 // Block is part of an inner loop. Do not update the loop information. 910 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 911 // at this point, because this method is being called while populating `info`. 912 } 913 } 914 915 // Raw update of the loop information. 916 void SetLoopInformation(HLoopInformation* info) { 917 loop_information_ = info; 918 } 919 920 bool IsInLoop() const { return loop_information_ != nullptr; } 921 922 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 923 924 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 925 try_catch_information_ = try_catch_information; 926 } 927 928 bool IsTryBlock() const { 929 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 930 } 931 932 bool IsCatchBlock() const { 933 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 934 } 935 936 // Returns the try entry that this block's successors should have. They will 937 // be in the same try, unless the block ends in a try boundary. In that case, 938 // the appropriate try entry will be returned. 939 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 940 941 bool HasThrowingInstructions() const; 942 943 // Returns whether this block dominates the blocked passed as parameter. 944 bool Dominates(HBasicBlock* block) const; 945 946 size_t GetLifetimeStart() const { return lifetime_start_; } 947 size_t GetLifetimeEnd() const { return lifetime_end_; } 948 949 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 950 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 951 952 bool EndsWithControlFlowInstruction() const; 953 bool EndsWithIf() const; 954 bool EndsWithTryBoundary() const; 955 bool HasSinglePhi() const; 956 957 private: 958 HGraph* graph_; 959 ArenaVector<HBasicBlock*> predecessors_; 960 ArenaVector<HBasicBlock*> successors_; 961 HInstructionList instructions_; 962 HInstructionList phis_; 963 HLoopInformation* loop_information_; 964 HBasicBlock* dominator_; 965 ArenaVector<HBasicBlock*> dominated_blocks_; 966 uint32_t block_id_; 967 // The dex program counter of the first instruction of this block. 968 const uint32_t dex_pc_; 969 size_t lifetime_start_; 970 size_t lifetime_end_; 971 TryCatchInformation* try_catch_information_; 972 973 friend class HGraph; 974 friend class HInstruction; 975 976 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 977}; 978 979// Iterates over the LoopInformation of all loops which contain 'block' 980// from the innermost to the outermost. 981class HLoopInformationOutwardIterator : public ValueObject { 982 public: 983 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 984 : current_(block.GetLoopInformation()) {} 985 986 bool Done() const { return current_ == nullptr; } 987 988 void Advance() { 989 DCHECK(!Done()); 990 current_ = current_->GetPreHeader()->GetLoopInformation(); 991 } 992 993 HLoopInformation* Current() const { 994 DCHECK(!Done()); 995 return current_; 996 } 997 998 private: 999 HLoopInformation* current_; 1000 1001 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1002}; 1003 1004#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1005 M(Above, Condition) \ 1006 M(AboveOrEqual, Condition) \ 1007 M(Add, BinaryOperation) \ 1008 M(And, BinaryOperation) \ 1009 M(ArrayGet, Instruction) \ 1010 M(ArrayLength, Instruction) \ 1011 M(ArraySet, Instruction) \ 1012 M(Below, Condition) \ 1013 M(BelowOrEqual, Condition) \ 1014 M(BooleanNot, UnaryOperation) \ 1015 M(BoundsCheck, Instruction) \ 1016 M(BoundType, Instruction) \ 1017 M(CheckCast, Instruction) \ 1018 M(ClearException, Instruction) \ 1019 M(ClinitCheck, Instruction) \ 1020 M(Compare, BinaryOperation) \ 1021 M(Condition, BinaryOperation) \ 1022 M(CurrentMethod, Instruction) \ 1023 M(Deoptimize, Instruction) \ 1024 M(Div, BinaryOperation) \ 1025 M(DivZeroCheck, Instruction) \ 1026 M(DoubleConstant, Constant) \ 1027 M(Equal, Condition) \ 1028 M(Exit, Instruction) \ 1029 M(FakeString, Instruction) \ 1030 M(FloatConstant, Constant) \ 1031 M(Goto, Instruction) \ 1032 M(GreaterThan, Condition) \ 1033 M(GreaterThanOrEqual, Condition) \ 1034 M(If, Instruction) \ 1035 M(InstanceFieldGet, Instruction) \ 1036 M(InstanceFieldSet, Instruction) \ 1037 M(InstanceOf, Instruction) \ 1038 M(IntConstant, Constant) \ 1039 M(InvokeUnresolved, Invoke) \ 1040 M(InvokeInterface, Invoke) \ 1041 M(InvokeStaticOrDirect, Invoke) \ 1042 M(InvokeVirtual, Invoke) \ 1043 M(LessThan, Condition) \ 1044 M(LessThanOrEqual, Condition) \ 1045 M(LoadClass, Instruction) \ 1046 M(LoadException, Instruction) \ 1047 M(LoadLocal, Instruction) \ 1048 M(LoadString, Instruction) \ 1049 M(Local, Instruction) \ 1050 M(LongConstant, Constant) \ 1051 M(MemoryBarrier, Instruction) \ 1052 M(MonitorOperation, Instruction) \ 1053 M(Mul, BinaryOperation) \ 1054 M(Neg, UnaryOperation) \ 1055 M(NewArray, Instruction) \ 1056 M(NewInstance, Instruction) \ 1057 M(Not, UnaryOperation) \ 1058 M(NotEqual, Condition) \ 1059 M(NullConstant, Instruction) \ 1060 M(NullCheck, Instruction) \ 1061 M(Or, BinaryOperation) \ 1062 M(PackedSwitch, Instruction) \ 1063 M(ParallelMove, Instruction) \ 1064 M(ParameterValue, Instruction) \ 1065 M(Phi, Instruction) \ 1066 M(Rem, BinaryOperation) \ 1067 M(Return, Instruction) \ 1068 M(ReturnVoid, Instruction) \ 1069 M(Shl, BinaryOperation) \ 1070 M(Shr, BinaryOperation) \ 1071 M(StaticFieldGet, Instruction) \ 1072 M(StaticFieldSet, Instruction) \ 1073 M(UnresolvedInstanceFieldGet, Instruction) \ 1074 M(UnresolvedInstanceFieldSet, Instruction) \ 1075 M(UnresolvedStaticFieldGet, Instruction) \ 1076 M(UnresolvedStaticFieldSet, Instruction) \ 1077 M(StoreLocal, Instruction) \ 1078 M(Sub, BinaryOperation) \ 1079 M(SuspendCheck, Instruction) \ 1080 M(Temporary, Instruction) \ 1081 M(Throw, Instruction) \ 1082 M(TryBoundary, Instruction) \ 1083 M(TypeConversion, Instruction) \ 1084 M(UShr, BinaryOperation) \ 1085 M(Xor, BinaryOperation) \ 1086 1087#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1088 1089#ifndef ART_ENABLE_CODEGEN_arm64 1090#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1091#else 1092#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1093 M(Arm64IntermediateAddress, Instruction) 1094#endif 1095 1096#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1097 1098#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1099 1100#ifndef ART_ENABLE_CODEGEN_x86 1101#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1102#else 1103#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1104 M(X86ComputeBaseMethodAddress, Instruction) \ 1105 M(X86LoadFromConstantTable, Instruction) \ 1106 M(X86PackedSwitch, Instruction) 1107#endif 1108 1109#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1110 1111#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1112 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1113 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1114 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1115 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1116 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1117 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1118 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1119 1120#define FOR_EACH_INSTRUCTION(M) \ 1121 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1122 M(Constant, Instruction) \ 1123 M(UnaryOperation, Instruction) \ 1124 M(BinaryOperation, Instruction) \ 1125 M(Invoke, Instruction) 1126 1127#define FORWARD_DECLARATION(type, super) class H##type; 1128FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1129#undef FORWARD_DECLARATION 1130 1131#define DECLARE_INSTRUCTION(type) \ 1132 InstructionKind GetKind() const OVERRIDE { return k##type; } \ 1133 const char* DebugName() const OVERRIDE { return #type; } \ 1134 const H##type* As##type() const OVERRIDE { return this; } \ 1135 H##type* As##type() OVERRIDE { return this; } \ 1136 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1137 return other->Is##type(); \ 1138 } \ 1139 void Accept(HGraphVisitor* visitor) OVERRIDE 1140 1141template <typename T> class HUseList; 1142 1143template <typename T> 1144class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1145 public: 1146 HUseListNode* GetPrevious() const { return prev_; } 1147 HUseListNode* GetNext() const { return next_; } 1148 T GetUser() const { return user_; } 1149 size_t GetIndex() const { return index_; } 1150 void SetIndex(size_t index) { index_ = index; } 1151 1152 private: 1153 HUseListNode(T user, size_t index) 1154 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1155 1156 T const user_; 1157 size_t index_; 1158 HUseListNode<T>* prev_; 1159 HUseListNode<T>* next_; 1160 1161 friend class HUseList<T>; 1162 1163 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1164}; 1165 1166template <typename T> 1167class HUseList : public ValueObject { 1168 public: 1169 HUseList() : first_(nullptr) {} 1170 1171 void Clear() { 1172 first_ = nullptr; 1173 } 1174 1175 // Adds a new entry at the beginning of the use list and returns 1176 // the newly created node. 1177 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1178 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1179 if (IsEmpty()) { 1180 first_ = new_node; 1181 } else { 1182 first_->prev_ = new_node; 1183 new_node->next_ = first_; 1184 first_ = new_node; 1185 } 1186 return new_node; 1187 } 1188 1189 HUseListNode<T>* GetFirst() const { 1190 return first_; 1191 } 1192 1193 void Remove(HUseListNode<T>* node) { 1194 DCHECK(node != nullptr); 1195 DCHECK(Contains(node)); 1196 1197 if (node->prev_ != nullptr) { 1198 node->prev_->next_ = node->next_; 1199 } 1200 if (node->next_ != nullptr) { 1201 node->next_->prev_ = node->prev_; 1202 } 1203 if (node == first_) { 1204 first_ = node->next_; 1205 } 1206 } 1207 1208 bool Contains(const HUseListNode<T>* node) const { 1209 if (node == nullptr) { 1210 return false; 1211 } 1212 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1213 if (current == node) { 1214 return true; 1215 } 1216 } 1217 return false; 1218 } 1219 1220 bool IsEmpty() const { 1221 return first_ == nullptr; 1222 } 1223 1224 bool HasOnlyOneUse() const { 1225 return first_ != nullptr && first_->next_ == nullptr; 1226 } 1227 1228 size_t SizeSlow() const { 1229 size_t count = 0; 1230 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1231 ++count; 1232 } 1233 return count; 1234 } 1235 1236 private: 1237 HUseListNode<T>* first_; 1238}; 1239 1240template<typename T> 1241class HUseIterator : public ValueObject { 1242 public: 1243 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1244 1245 bool Done() const { return current_ == nullptr; } 1246 1247 void Advance() { 1248 DCHECK(!Done()); 1249 current_ = current_->GetNext(); 1250 } 1251 1252 HUseListNode<T>* Current() const { 1253 DCHECK(!Done()); 1254 return current_; 1255 } 1256 1257 private: 1258 HUseListNode<T>* current_; 1259 1260 friend class HValue; 1261}; 1262 1263// This class is used by HEnvironment and HInstruction classes to record the 1264// instructions they use and pointers to the corresponding HUseListNodes kept 1265// by the used instructions. 1266template <typename T> 1267class HUserRecord : public ValueObject { 1268 public: 1269 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1270 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1271 1272 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1273 : instruction_(old_record.instruction_), use_node_(use_node) { 1274 DCHECK(instruction_ != nullptr); 1275 DCHECK(use_node_ != nullptr); 1276 DCHECK(old_record.use_node_ == nullptr); 1277 } 1278 1279 HInstruction* GetInstruction() const { return instruction_; } 1280 HUseListNode<T>* GetUseNode() const { return use_node_; } 1281 1282 private: 1283 // Instruction used by the user. 1284 HInstruction* instruction_; 1285 1286 // Corresponding entry in the use list kept by 'instruction_'. 1287 HUseListNode<T>* use_node_; 1288}; 1289 1290/** 1291 * Side-effects representation. 1292 * 1293 * For write/read dependences on fields/arrays, the dependence analysis uses 1294 * type disambiguation (e.g. a float field write cannot modify the value of an 1295 * integer field read) and the access type (e.g. a reference array write cannot 1296 * modify the value of a reference field read [although it may modify the 1297 * reference fetch prior to reading the field, which is represented by its own 1298 * write/read dependence]). The analysis makes conservative points-to 1299 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1300 * the same, and any reference read depends on any reference read without 1301 * further regard of its type). 1302 * 1303 * The internal representation uses 38-bit and is described in the table below. 1304 * The first line indicates the side effect, and for field/array accesses the 1305 * second line indicates the type of the access (in the order of the 1306 * Primitive::Type enum). 1307 * The two numbered lines below indicate the bit position in the bitfield (read 1308 * vertically). 1309 * 1310 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1311 * +-------------+---------+---------+--------------+---------+---------+ 1312 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1313 * | 3 |333333322|222222221| 1 |111111110|000000000| 1314 * | 7 |654321098|765432109| 8 |765432109|876543210| 1315 * 1316 * Note that, to ease the implementation, 'changes' bits are least significant 1317 * bits, while 'dependency' bits are most significant bits. 1318 */ 1319class SideEffects : public ValueObject { 1320 public: 1321 SideEffects() : flags_(0) {} 1322 1323 static SideEffects None() { 1324 return SideEffects(0); 1325 } 1326 1327 static SideEffects All() { 1328 return SideEffects(kAllChangeBits | kAllDependOnBits); 1329 } 1330 1331 static SideEffects AllChanges() { 1332 return SideEffects(kAllChangeBits); 1333 } 1334 1335 static SideEffects AllDependencies() { 1336 return SideEffects(kAllDependOnBits); 1337 } 1338 1339 static SideEffects AllExceptGCDependency() { 1340 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1341 } 1342 1343 static SideEffects AllWritesAndReads() { 1344 return SideEffects(kAllWrites | kAllReads); 1345 } 1346 1347 static SideEffects AllWrites() { 1348 return SideEffects(kAllWrites); 1349 } 1350 1351 static SideEffects AllReads() { 1352 return SideEffects(kAllReads); 1353 } 1354 1355 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1356 return is_volatile 1357 ? AllWritesAndReads() 1358 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1359 } 1360 1361 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1362 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1363 } 1364 1365 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1366 return is_volatile 1367 ? AllWritesAndReads() 1368 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1369 } 1370 1371 static SideEffects ArrayReadOfType(Primitive::Type type) { 1372 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1373 } 1374 1375 static SideEffects CanTriggerGC() { 1376 return SideEffects(1ULL << kCanTriggerGCBit); 1377 } 1378 1379 static SideEffects DependsOnGC() { 1380 return SideEffects(1ULL << kDependsOnGCBit); 1381 } 1382 1383 // Combines the side-effects of this and the other. 1384 SideEffects Union(SideEffects other) const { 1385 return SideEffects(flags_ | other.flags_); 1386 } 1387 1388 SideEffects Exclusion(SideEffects other) const { 1389 return SideEffects(flags_ & ~other.flags_); 1390 } 1391 1392 void Add(SideEffects other) { 1393 flags_ |= other.flags_; 1394 } 1395 1396 bool Includes(SideEffects other) const { 1397 return (other.flags_ & flags_) == other.flags_; 1398 } 1399 1400 bool HasSideEffects() const { 1401 return (flags_ & kAllChangeBits); 1402 } 1403 1404 bool HasDependencies() const { 1405 return (flags_ & kAllDependOnBits); 1406 } 1407 1408 // Returns true if there are no side effects or dependencies. 1409 bool DoesNothing() const { 1410 return flags_ == 0; 1411 } 1412 1413 // Returns true if something is written. 1414 bool DoesAnyWrite() const { 1415 return (flags_ & kAllWrites); 1416 } 1417 1418 // Returns true if something is read. 1419 bool DoesAnyRead() const { 1420 return (flags_ & kAllReads); 1421 } 1422 1423 // Returns true if potentially everything is written and read 1424 // (every type and every kind of access). 1425 bool DoesAllReadWrite() const { 1426 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1427 } 1428 1429 bool DoesAll() const { 1430 return flags_ == (kAllChangeBits | kAllDependOnBits); 1431 } 1432 1433 // Returns true if this may read something written by other. 1434 bool MayDependOn(SideEffects other) const { 1435 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1436 return (other.flags_ & depends_on_flags); 1437 } 1438 1439 // Returns string representation of flags (for debugging only). 1440 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1441 std::string ToString() const { 1442 std::string flags = "|"; 1443 for (int s = kLastBit; s >= 0; s--) { 1444 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1445 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1446 // This is a bit for the GC side effect. 1447 if (current_bit_is_set) { 1448 flags += "GC"; 1449 } 1450 flags += "|"; 1451 } else { 1452 // This is a bit for the array/field analysis. 1453 // The underscore character stands for the 'can trigger GC' bit. 1454 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1455 if (current_bit_is_set) { 1456 flags += kDebug[s]; 1457 } 1458 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1459 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1460 flags += "|"; 1461 } 1462 } 1463 } 1464 return flags; 1465 } 1466 1467 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1468 1469 private: 1470 static constexpr int kFieldArrayAnalysisBits = 9; 1471 1472 static constexpr int kFieldWriteOffset = 0; 1473 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1474 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1475 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1476 1477 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1478 1479 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1480 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1481 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1482 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1483 1484 static constexpr int kLastBit = kDependsOnGCBit; 1485 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1486 1487 // Aliases. 1488 1489 static_assert(kChangeBits == kDependOnBits, 1490 "the 'change' bits should match the 'depend on' bits."); 1491 1492 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1493 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1494 static constexpr uint64_t kAllWrites = 1495 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1496 static constexpr uint64_t kAllReads = 1497 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1498 1499 // Work around the fact that HIR aliases I/F and J/D. 1500 // TODO: remove this interceptor once HIR types are clean 1501 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1502 switch (type) { 1503 case Primitive::kPrimInt: 1504 case Primitive::kPrimFloat: 1505 return TypeFlag(Primitive::kPrimInt, offset) | 1506 TypeFlag(Primitive::kPrimFloat, offset); 1507 case Primitive::kPrimLong: 1508 case Primitive::kPrimDouble: 1509 return TypeFlag(Primitive::kPrimLong, offset) | 1510 TypeFlag(Primitive::kPrimDouble, offset); 1511 default: 1512 return TypeFlag(type, offset); 1513 } 1514 } 1515 1516 // Translates type to bit flag. 1517 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1518 CHECK_NE(type, Primitive::kPrimVoid); 1519 const uint64_t one = 1; 1520 const int shift = type; // 0-based consecutive enum 1521 DCHECK_LE(kFieldWriteOffset, shift); 1522 DCHECK_LT(shift, kArrayWriteOffset); 1523 return one << (type + offset); 1524 } 1525 1526 // Private constructor on direct flags value. 1527 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1528 1529 uint64_t flags_; 1530}; 1531 1532// A HEnvironment object contains the values of virtual registers at a given location. 1533class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1534 public: 1535 HEnvironment(ArenaAllocator* arena, 1536 size_t number_of_vregs, 1537 const DexFile& dex_file, 1538 uint32_t method_idx, 1539 uint32_t dex_pc, 1540 InvokeType invoke_type, 1541 HInstruction* holder) 1542 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1543 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1544 parent_(nullptr), 1545 dex_file_(dex_file), 1546 method_idx_(method_idx), 1547 dex_pc_(dex_pc), 1548 invoke_type_(invoke_type), 1549 holder_(holder) { 1550 } 1551 1552 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1553 : HEnvironment(arena, 1554 to_copy.Size(), 1555 to_copy.GetDexFile(), 1556 to_copy.GetMethodIdx(), 1557 to_copy.GetDexPc(), 1558 to_copy.GetInvokeType(), 1559 holder) {} 1560 1561 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1562 if (parent_ != nullptr) { 1563 parent_->SetAndCopyParentChain(allocator, parent); 1564 } else { 1565 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1566 parent_->CopyFrom(parent); 1567 if (parent->GetParent() != nullptr) { 1568 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1569 } 1570 } 1571 } 1572 1573 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1574 void CopyFrom(HEnvironment* environment); 1575 1576 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1577 // input to the loop phi instead. This is for inserting instructions that 1578 // require an environment (like HDeoptimization) in the loop pre-header. 1579 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1580 1581 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1582 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1583 } 1584 1585 HInstruction* GetInstructionAt(size_t index) const { 1586 return vregs_[index].GetInstruction(); 1587 } 1588 1589 void RemoveAsUserOfInput(size_t index) const; 1590 1591 size_t Size() const { return vregs_.size(); } 1592 1593 HEnvironment* GetParent() const { return parent_; } 1594 1595 void SetLocationAt(size_t index, Location location) { 1596 locations_[index] = location; 1597 } 1598 1599 Location GetLocationAt(size_t index) const { 1600 return locations_[index]; 1601 } 1602 1603 uint32_t GetDexPc() const { 1604 return dex_pc_; 1605 } 1606 1607 uint32_t GetMethodIdx() const { 1608 return method_idx_; 1609 } 1610 1611 InvokeType GetInvokeType() const { 1612 return invoke_type_; 1613 } 1614 1615 const DexFile& GetDexFile() const { 1616 return dex_file_; 1617 } 1618 1619 HInstruction* GetHolder() const { 1620 return holder_; 1621 } 1622 1623 private: 1624 // Record instructions' use entries of this environment for constant-time removal. 1625 // It should only be called by HInstruction when a new environment use is added. 1626 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1627 DCHECK(env_use->GetUser() == this); 1628 size_t index = env_use->GetIndex(); 1629 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1630 } 1631 1632 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1633 ArenaVector<Location> locations_; 1634 HEnvironment* parent_; 1635 const DexFile& dex_file_; 1636 const uint32_t method_idx_; 1637 const uint32_t dex_pc_; 1638 const InvokeType invoke_type_; 1639 1640 // The instruction that holds this environment. 1641 HInstruction* const holder_; 1642 1643 friend class HInstruction; 1644 1645 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1646}; 1647 1648class ReferenceTypeInfo : ValueObject { 1649 public: 1650 typedef Handle<mirror::Class> TypeHandle; 1651 1652 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1653 // The constructor will check that the type_handle is valid. 1654 return ReferenceTypeInfo(type_handle, is_exact); 1655 } 1656 1657 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1658 1659 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1660 return handle.GetReference() != nullptr; 1661 } 1662 1663 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1664 return IsValidHandle(type_handle_); 1665 } 1666 1667 bool IsExact() const { return is_exact_; } 1668 1669 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1670 DCHECK(IsValid()); 1671 return GetTypeHandle()->IsObjectClass(); 1672 } 1673 1674 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1675 DCHECK(IsValid()); 1676 return GetTypeHandle()->IsStringClass(); 1677 } 1678 1679 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1680 DCHECK(IsValid()); 1681 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1682 } 1683 1684 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1685 DCHECK(IsValid()); 1686 return GetTypeHandle()->IsInterface(); 1687 } 1688 1689 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1690 DCHECK(IsValid()); 1691 return GetTypeHandle()->IsArrayClass(); 1692 } 1693 1694 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1695 DCHECK(IsValid()); 1696 return GetTypeHandle()->IsPrimitiveArray(); 1697 } 1698 1699 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1700 DCHECK(IsValid()); 1701 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 1702 } 1703 1704 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1705 DCHECK(IsValid()); 1706 if (!IsExact()) return false; 1707 if (!IsArrayClass()) return false; 1708 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1709 } 1710 1711 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1712 DCHECK(IsValid()); 1713 if (!IsExact()) return false; 1714 if (!IsArrayClass()) return false; 1715 if (!rti.IsArrayClass()) return false; 1716 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 1717 rti.GetTypeHandle()->GetComponentType()); 1718 } 1719 1720 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1721 1722 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1723 DCHECK(IsValid()); 1724 DCHECK(rti.IsValid()); 1725 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1726 } 1727 1728 // Returns true if the type information provide the same amount of details. 1729 // Note that it does not mean that the instructions have the same actual type 1730 // (because the type can be the result of a merge). 1731 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1732 if (!IsValid() && !rti.IsValid()) { 1733 // Invalid types are equal. 1734 return true; 1735 } 1736 if (!IsValid() || !rti.IsValid()) { 1737 // One is valid, the other not. 1738 return false; 1739 } 1740 return IsExact() == rti.IsExact() 1741 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1742 } 1743 1744 private: 1745 ReferenceTypeInfo(); 1746 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1747 1748 // The class of the object. 1749 TypeHandle type_handle_; 1750 // Whether or not the type is exact or a superclass of the actual type. 1751 // Whether or not we have any information about this type. 1752 bool is_exact_; 1753}; 1754 1755std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1756 1757class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1758 public: 1759 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1760 : previous_(nullptr), 1761 next_(nullptr), 1762 block_(nullptr), 1763 dex_pc_(dex_pc), 1764 id_(-1), 1765 ssa_index_(-1), 1766 environment_(nullptr), 1767 locations_(nullptr), 1768 live_interval_(nullptr), 1769 lifetime_position_(kNoLifetime), 1770 side_effects_(side_effects), 1771 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1772 1773 virtual ~HInstruction() {} 1774 1775#define DECLARE_KIND(type, super) k##type, 1776 enum InstructionKind { 1777 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1778 }; 1779#undef DECLARE_KIND 1780 1781 HInstruction* GetNext() const { return next_; } 1782 HInstruction* GetPrevious() const { return previous_; } 1783 1784 HInstruction* GetNextDisregardingMoves() const; 1785 HInstruction* GetPreviousDisregardingMoves() const; 1786 1787 HBasicBlock* GetBlock() const { return block_; } 1788 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1789 void SetBlock(HBasicBlock* block) { block_ = block; } 1790 bool IsInBlock() const { return block_ != nullptr; } 1791 bool IsInLoop() const { return block_->IsInLoop(); } 1792 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1793 1794 virtual size_t InputCount() const = 0; 1795 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1796 1797 virtual void Accept(HGraphVisitor* visitor) = 0; 1798 virtual const char* DebugName() const = 0; 1799 1800 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1801 void SetRawInputAt(size_t index, HInstruction* input) { 1802 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1803 } 1804 1805 virtual bool NeedsEnvironment() const { return false; } 1806 1807 uint32_t GetDexPc() const { return dex_pc_; } 1808 1809 virtual bool IsControlFlow() const { return false; } 1810 1811 virtual bool CanThrow() const { return false; } 1812 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1813 1814 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1815 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1816 1817 // Does not apply for all instructions, but having this at top level greatly 1818 // simplifies the null check elimination. 1819 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1820 virtual bool CanBeNull() const { 1821 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1822 return true; 1823 } 1824 1825 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1826 return false; 1827 } 1828 1829 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1830 1831 ReferenceTypeInfo GetReferenceTypeInfo() const { 1832 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1833 return reference_type_info_; 1834 } 1835 1836 void AddUseAt(HInstruction* user, size_t index) { 1837 DCHECK(user != nullptr); 1838 HUseListNode<HInstruction*>* use = 1839 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1840 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1841 } 1842 1843 void AddEnvUseAt(HEnvironment* user, size_t index) { 1844 DCHECK(user != nullptr); 1845 HUseListNode<HEnvironment*>* env_use = 1846 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1847 user->RecordEnvUse(env_use); 1848 } 1849 1850 void RemoveAsUserOfInput(size_t input) { 1851 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1852 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1853 } 1854 1855 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1856 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1857 1858 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1859 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1860 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1861 bool HasOnlyOneNonEnvironmentUse() const { 1862 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1863 } 1864 1865 // Does this instruction strictly dominate `other_instruction`? 1866 // Returns false if this instruction and `other_instruction` are the same. 1867 // Aborts if this instruction and `other_instruction` are both phis. 1868 bool StrictlyDominates(HInstruction* other_instruction) const; 1869 1870 int GetId() const { return id_; } 1871 void SetId(int id) { id_ = id; } 1872 1873 int GetSsaIndex() const { return ssa_index_; } 1874 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1875 bool HasSsaIndex() const { return ssa_index_ != -1; } 1876 1877 bool HasEnvironment() const { return environment_ != nullptr; } 1878 HEnvironment* GetEnvironment() const { return environment_; } 1879 // Set the `environment_` field. Raw because this method does not 1880 // update the uses lists. 1881 void SetRawEnvironment(HEnvironment* environment) { 1882 DCHECK(environment_ == nullptr); 1883 DCHECK_EQ(environment->GetHolder(), this); 1884 environment_ = environment; 1885 } 1886 1887 // Set the environment of this instruction, copying it from `environment`. While 1888 // copying, the uses lists are being updated. 1889 void CopyEnvironmentFrom(HEnvironment* environment) { 1890 DCHECK(environment_ == nullptr); 1891 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1892 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1893 environment_->CopyFrom(environment); 1894 if (environment->GetParent() != nullptr) { 1895 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1896 } 1897 } 1898 1899 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1900 HBasicBlock* block) { 1901 DCHECK(environment_ == nullptr); 1902 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1903 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1904 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1905 if (environment->GetParent() != nullptr) { 1906 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1907 } 1908 } 1909 1910 // Returns the number of entries in the environment. Typically, that is the 1911 // number of dex registers in a method. It could be more in case of inlining. 1912 size_t EnvironmentSize() const; 1913 1914 LocationSummary* GetLocations() const { return locations_; } 1915 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1916 1917 void ReplaceWith(HInstruction* instruction); 1918 void ReplaceInput(HInstruction* replacement, size_t index); 1919 1920 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1921 // uses of this instruction by `other` are *not* updated. 1922 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1923 ReplaceWith(other); 1924 other->ReplaceInput(this, use_index); 1925 } 1926 1927 // Move `this` instruction before `cursor`. 1928 void MoveBefore(HInstruction* cursor); 1929 1930#define INSTRUCTION_TYPE_CHECK(type, super) \ 1931 bool Is##type() const { return (As##type() != nullptr); } \ 1932 virtual const H##type* As##type() const { return nullptr; } \ 1933 virtual H##type* As##type() { return nullptr; } 1934 1935 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1936#undef INSTRUCTION_TYPE_CHECK 1937 1938 // Returns whether the instruction can be moved within the graph. 1939 virtual bool CanBeMoved() const { return false; } 1940 1941 // Returns whether the two instructions are of the same kind. 1942 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1943 return false; 1944 } 1945 1946 // Returns whether any data encoded in the two instructions is equal. 1947 // This method does not look at the inputs. Both instructions must be 1948 // of the same type, otherwise the method has undefined behavior. 1949 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1950 return false; 1951 } 1952 1953 // Returns whether two instructions are equal, that is: 1954 // 1) They have the same type and contain the same data (InstructionDataEquals). 1955 // 2) Their inputs are identical. 1956 bool Equals(HInstruction* other) const; 1957 1958 virtual InstructionKind GetKind() const = 0; 1959 1960 virtual size_t ComputeHashCode() const { 1961 size_t result = GetKind(); 1962 for (size_t i = 0, e = InputCount(); i < e; ++i) { 1963 result = (result * 31) + InputAt(i)->GetId(); 1964 } 1965 return result; 1966 } 1967 1968 SideEffects GetSideEffects() const { return side_effects_; } 1969 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 1970 1971 size_t GetLifetimePosition() const { return lifetime_position_; } 1972 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 1973 LiveInterval* GetLiveInterval() const { return live_interval_; } 1974 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 1975 bool HasLiveInterval() const { return live_interval_ != nullptr; } 1976 1977 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 1978 1979 // Returns whether the code generation of the instruction will require to have access 1980 // to the current method. Such instructions are: 1981 // (1): Instructions that require an environment, as calling the runtime requires 1982 // to walk the stack and have the current method stored at a specific stack address. 1983 // (2): Object literals like classes and strings, that are loaded from the dex cache 1984 // fields of the current method. 1985 bool NeedsCurrentMethod() const { 1986 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 1987 } 1988 1989 // Returns whether the code generation of the instruction will require to have access 1990 // to the dex cache of the current method's declaring class via the current method. 1991 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 1992 1993 // Does this instruction have any use in an environment before 1994 // control flow hits 'other'? 1995 bool HasAnyEnvironmentUseBefore(HInstruction* other); 1996 1997 // Remove all references to environment uses of this instruction. 1998 // The caller must ensure that this is safe to do. 1999 void RemoveEnvironmentUsers(); 2000 2001 protected: 2002 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2003 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2004 2005 private: 2006 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2007 2008 HInstruction* previous_; 2009 HInstruction* next_; 2010 HBasicBlock* block_; 2011 const uint32_t dex_pc_; 2012 2013 // An instruction gets an id when it is added to the graph. 2014 // It reflects creation order. A negative id means the instruction 2015 // has not been added to the graph. 2016 int id_; 2017 2018 // When doing liveness analysis, instructions that have uses get an SSA index. 2019 int ssa_index_; 2020 2021 // List of instructions that have this instruction as input. 2022 HUseList<HInstruction*> uses_; 2023 2024 // List of environments that contain this instruction. 2025 HUseList<HEnvironment*> env_uses_; 2026 2027 // The environment associated with this instruction. Not null if the instruction 2028 // might jump out of the method. 2029 HEnvironment* environment_; 2030 2031 // Set by the code generator. 2032 LocationSummary* locations_; 2033 2034 // Set by the liveness analysis. 2035 LiveInterval* live_interval_; 2036 2037 // Set by the liveness analysis, this is the position in a linear 2038 // order of blocks where this instruction's live interval start. 2039 size_t lifetime_position_; 2040 2041 SideEffects side_effects_; 2042 2043 // TODO: for primitive types this should be marked as invalid. 2044 ReferenceTypeInfo reference_type_info_; 2045 2046 friend class GraphChecker; 2047 friend class HBasicBlock; 2048 friend class HEnvironment; 2049 friend class HGraph; 2050 friend class HInstructionList; 2051 2052 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2053}; 2054std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2055 2056class HInputIterator : public ValueObject { 2057 public: 2058 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2059 2060 bool Done() const { return index_ == instruction_->InputCount(); } 2061 HInstruction* Current() const { return instruction_->InputAt(index_); } 2062 void Advance() { index_++; } 2063 2064 private: 2065 HInstruction* instruction_; 2066 size_t index_; 2067 2068 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2069}; 2070 2071class HInstructionIterator : public ValueObject { 2072 public: 2073 explicit HInstructionIterator(const HInstructionList& instructions) 2074 : instruction_(instructions.first_instruction_) { 2075 next_ = Done() ? nullptr : instruction_->GetNext(); 2076 } 2077 2078 bool Done() const { return instruction_ == nullptr; } 2079 HInstruction* Current() const { return instruction_; } 2080 void Advance() { 2081 instruction_ = next_; 2082 next_ = Done() ? nullptr : instruction_->GetNext(); 2083 } 2084 2085 private: 2086 HInstruction* instruction_; 2087 HInstruction* next_; 2088 2089 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2090}; 2091 2092class HBackwardInstructionIterator : public ValueObject { 2093 public: 2094 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2095 : instruction_(instructions.last_instruction_) { 2096 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2097 } 2098 2099 bool Done() const { return instruction_ == nullptr; } 2100 HInstruction* Current() const { return instruction_; } 2101 void Advance() { 2102 instruction_ = next_; 2103 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2104 } 2105 2106 private: 2107 HInstruction* instruction_; 2108 HInstruction* next_; 2109 2110 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2111}; 2112 2113template<size_t N> 2114class HTemplateInstruction: public HInstruction { 2115 public: 2116 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2117 : HInstruction(side_effects, dex_pc), inputs_() {} 2118 virtual ~HTemplateInstruction() {} 2119 2120 size_t InputCount() const OVERRIDE { return N; } 2121 2122 protected: 2123 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2124 DCHECK_LT(i, N); 2125 return inputs_[i]; 2126 } 2127 2128 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2129 DCHECK_LT(i, N); 2130 inputs_[i] = input; 2131 } 2132 2133 private: 2134 std::array<HUserRecord<HInstruction*>, N> inputs_; 2135 2136 friend class SsaBuilder; 2137}; 2138 2139// HTemplateInstruction specialization for N=0. 2140template<> 2141class HTemplateInstruction<0>: public HInstruction { 2142 public: 2143 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2144 : HInstruction(side_effects, dex_pc) {} 2145 2146 virtual ~HTemplateInstruction() {} 2147 2148 size_t InputCount() const OVERRIDE { return 0; } 2149 2150 protected: 2151 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2152 LOG(FATAL) << "Unreachable"; 2153 UNREACHABLE(); 2154 } 2155 2156 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2157 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2158 LOG(FATAL) << "Unreachable"; 2159 UNREACHABLE(); 2160 } 2161 2162 private: 2163 friend class SsaBuilder; 2164}; 2165 2166template<intptr_t N> 2167class HExpression : public HTemplateInstruction<N> { 2168 public: 2169 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2170 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2171 virtual ~HExpression() {} 2172 2173 Primitive::Type GetType() const OVERRIDE { return type_; } 2174 2175 protected: 2176 Primitive::Type type_; 2177}; 2178 2179// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2180// instruction that branches to the exit block. 2181class HReturnVoid : public HTemplateInstruction<0> { 2182 public: 2183 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2184 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2185 2186 bool IsControlFlow() const OVERRIDE { return true; } 2187 2188 DECLARE_INSTRUCTION(ReturnVoid); 2189 2190 private: 2191 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2192}; 2193 2194// Represents dex's RETURN opcodes. A HReturn is a control flow 2195// instruction that branches to the exit block. 2196class HReturn : public HTemplateInstruction<1> { 2197 public: 2198 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2199 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2200 SetRawInputAt(0, value); 2201 } 2202 2203 bool IsControlFlow() const OVERRIDE { return true; } 2204 2205 DECLARE_INSTRUCTION(Return); 2206 2207 private: 2208 DISALLOW_COPY_AND_ASSIGN(HReturn); 2209}; 2210 2211// The exit instruction is the only instruction of the exit block. 2212// Instructions aborting the method (HThrow and HReturn) must branch to the 2213// exit block. 2214class HExit : public HTemplateInstruction<0> { 2215 public: 2216 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2217 2218 bool IsControlFlow() const OVERRIDE { return true; } 2219 2220 DECLARE_INSTRUCTION(Exit); 2221 2222 private: 2223 DISALLOW_COPY_AND_ASSIGN(HExit); 2224}; 2225 2226// Jumps from one block to another. 2227class HGoto : public HTemplateInstruction<0> { 2228 public: 2229 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2230 2231 bool IsControlFlow() const OVERRIDE { return true; } 2232 2233 HBasicBlock* GetSuccessor() const { 2234 return GetBlock()->GetSingleSuccessor(); 2235 } 2236 2237 DECLARE_INSTRUCTION(Goto); 2238 2239 private: 2240 DISALLOW_COPY_AND_ASSIGN(HGoto); 2241}; 2242 2243class HConstant : public HExpression<0> { 2244 public: 2245 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2246 : HExpression(type, SideEffects::None(), dex_pc) {} 2247 2248 bool CanBeMoved() const OVERRIDE { return true; } 2249 2250 virtual bool IsMinusOne() const { return false; } 2251 virtual bool IsZero() const { return false; } 2252 virtual bool IsOne() const { return false; } 2253 2254 virtual uint64_t GetValueAsUint64() const = 0; 2255 2256 DECLARE_INSTRUCTION(Constant); 2257 2258 private: 2259 DISALLOW_COPY_AND_ASSIGN(HConstant); 2260}; 2261 2262class HNullConstant : public HConstant { 2263 public: 2264 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2265 return true; 2266 } 2267 2268 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2269 2270 size_t ComputeHashCode() const OVERRIDE { return 0; } 2271 2272 DECLARE_INSTRUCTION(NullConstant); 2273 2274 private: 2275 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2276 2277 friend class HGraph; 2278 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2279}; 2280 2281// Constants of the type int. Those can be from Dex instructions, or 2282// synthesized (for example with the if-eqz instruction). 2283class HIntConstant : public HConstant { 2284 public: 2285 int32_t GetValue() const { return value_; } 2286 2287 uint64_t GetValueAsUint64() const OVERRIDE { 2288 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2289 } 2290 2291 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2292 DCHECK(other->IsIntConstant()); 2293 return other->AsIntConstant()->value_ == value_; 2294 } 2295 2296 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2297 2298 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2299 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2300 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2301 2302 DECLARE_INSTRUCTION(IntConstant); 2303 2304 private: 2305 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2306 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2307 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2308 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2309 2310 const int32_t value_; 2311 2312 friend class HGraph; 2313 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2314 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2315 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2316}; 2317 2318class HLongConstant : public HConstant { 2319 public: 2320 int64_t GetValue() const { return value_; } 2321 2322 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2323 2324 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2325 DCHECK(other->IsLongConstant()); 2326 return other->AsLongConstant()->value_ == value_; 2327 } 2328 2329 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2330 2331 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2332 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2333 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2334 2335 DECLARE_INSTRUCTION(LongConstant); 2336 2337 private: 2338 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2339 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2340 2341 const int64_t value_; 2342 2343 friend class HGraph; 2344 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2345}; 2346 2347// Conditional branch. A block ending with an HIf instruction must have 2348// two successors. 2349class HIf : public HTemplateInstruction<1> { 2350 public: 2351 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2352 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2353 SetRawInputAt(0, input); 2354 } 2355 2356 bool IsControlFlow() const OVERRIDE { return true; } 2357 2358 HBasicBlock* IfTrueSuccessor() const { 2359 return GetBlock()->GetSuccessors()[0]; 2360 } 2361 2362 HBasicBlock* IfFalseSuccessor() const { 2363 return GetBlock()->GetSuccessors()[1]; 2364 } 2365 2366 DECLARE_INSTRUCTION(If); 2367 2368 private: 2369 DISALLOW_COPY_AND_ASSIGN(HIf); 2370}; 2371 2372 2373// Abstract instruction which marks the beginning and/or end of a try block and 2374// links it to the respective exception handlers. Behaves the same as a Goto in 2375// non-exceptional control flow. 2376// Normal-flow successor is stored at index zero, exception handlers under 2377// higher indices in no particular order. 2378class HTryBoundary : public HTemplateInstruction<0> { 2379 public: 2380 enum BoundaryKind { 2381 kEntry, 2382 kExit, 2383 }; 2384 2385 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2386 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2387 2388 bool IsControlFlow() const OVERRIDE { return true; } 2389 2390 // Returns the block's non-exceptional successor (index zero). 2391 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2392 2393 // Returns whether `handler` is among its exception handlers (non-zero index 2394 // successors). 2395 bool HasExceptionHandler(const HBasicBlock& handler) const { 2396 DCHECK(handler.IsCatchBlock()); 2397 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2398 } 2399 2400 // If not present already, adds `handler` to its block's list of exception 2401 // handlers. 2402 void AddExceptionHandler(HBasicBlock* handler) { 2403 if (!HasExceptionHandler(*handler)) { 2404 GetBlock()->AddSuccessor(handler); 2405 } 2406 } 2407 2408 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2409 2410 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2411 2412 DECLARE_INSTRUCTION(TryBoundary); 2413 2414 private: 2415 const BoundaryKind kind_; 2416 2417 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2418}; 2419 2420// Iterator over exception handlers of a given HTryBoundary, i.e. over 2421// exceptional successors of its basic block. 2422class HExceptionHandlerIterator : public ValueObject { 2423 public: 2424 explicit HExceptionHandlerIterator(const HTryBoundary& try_boundary) 2425 : block_(*try_boundary.GetBlock()), index_(block_.NumberOfNormalSuccessors()) {} 2426 2427 bool Done() const { return index_ == block_.GetSuccessors().size(); } 2428 HBasicBlock* Current() const { return block_.GetSuccessors()[index_]; } 2429 size_t CurrentSuccessorIndex() const { return index_; } 2430 void Advance() { ++index_; } 2431 2432 private: 2433 const HBasicBlock& block_; 2434 size_t index_; 2435 2436 DISALLOW_COPY_AND_ASSIGN(HExceptionHandlerIterator); 2437}; 2438 2439// Deoptimize to interpreter, upon checking a condition. 2440class HDeoptimize : public HTemplateInstruction<1> { 2441 public: 2442 explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2443 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2444 SetRawInputAt(0, cond); 2445 } 2446 2447 bool NeedsEnvironment() const OVERRIDE { return true; } 2448 bool CanThrow() const OVERRIDE { return true; } 2449 2450 DECLARE_INSTRUCTION(Deoptimize); 2451 2452 private: 2453 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2454}; 2455 2456// Represents the ArtMethod that was passed as a first argument to 2457// the method. It is used by instructions that depend on it, like 2458// instructions that work with the dex cache. 2459class HCurrentMethod : public HExpression<0> { 2460 public: 2461 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2462 : HExpression(type, SideEffects::None(), dex_pc) {} 2463 2464 DECLARE_INSTRUCTION(CurrentMethod); 2465 2466 private: 2467 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2468}; 2469 2470// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2471// have one successor for each entry in the switch table, and the final successor 2472// will be the block containing the next Dex opcode. 2473class HPackedSwitch : public HTemplateInstruction<1> { 2474 public: 2475 HPackedSwitch(int32_t start_value, 2476 uint32_t num_entries, 2477 HInstruction* input, 2478 uint32_t dex_pc = kNoDexPc) 2479 : HTemplateInstruction(SideEffects::None(), dex_pc), 2480 start_value_(start_value), 2481 num_entries_(num_entries) { 2482 SetRawInputAt(0, input); 2483 } 2484 2485 bool IsControlFlow() const OVERRIDE { return true; } 2486 2487 int32_t GetStartValue() const { return start_value_; } 2488 2489 uint32_t GetNumEntries() const { return num_entries_; } 2490 2491 HBasicBlock* GetDefaultBlock() const { 2492 // Last entry is the default block. 2493 return GetBlock()->GetSuccessors()[num_entries_]; 2494 } 2495 DECLARE_INSTRUCTION(PackedSwitch); 2496 2497 private: 2498 const int32_t start_value_; 2499 const uint32_t num_entries_; 2500 2501 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2502}; 2503 2504class HUnaryOperation : public HExpression<1> { 2505 public: 2506 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2507 : HExpression(result_type, SideEffects::None(), dex_pc) { 2508 SetRawInputAt(0, input); 2509 } 2510 2511 HInstruction* GetInput() const { return InputAt(0); } 2512 Primitive::Type GetResultType() const { return GetType(); } 2513 2514 bool CanBeMoved() const OVERRIDE { return true; } 2515 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2516 return true; 2517 } 2518 2519 // Try to statically evaluate `operation` and return a HConstant 2520 // containing the result of this evaluation. If `operation` cannot 2521 // be evaluated as a constant, return null. 2522 HConstant* TryStaticEvaluation() const; 2523 2524 // Apply this operation to `x`. 2525 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2526 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2527 2528 DECLARE_INSTRUCTION(UnaryOperation); 2529 2530 private: 2531 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2532}; 2533 2534class HBinaryOperation : public HExpression<2> { 2535 public: 2536 HBinaryOperation(Primitive::Type result_type, 2537 HInstruction* left, 2538 HInstruction* right, 2539 SideEffects side_effects = SideEffects::None(), 2540 uint32_t dex_pc = kNoDexPc) 2541 : HExpression(result_type, side_effects, dex_pc) { 2542 SetRawInputAt(0, left); 2543 SetRawInputAt(1, right); 2544 } 2545 2546 HInstruction* GetLeft() const { return InputAt(0); } 2547 HInstruction* GetRight() const { return InputAt(1); } 2548 Primitive::Type GetResultType() const { return GetType(); } 2549 2550 virtual bool IsCommutative() const { return false; } 2551 2552 // Put constant on the right. 2553 // Returns whether order is changed. 2554 bool OrderInputsWithConstantOnTheRight() { 2555 HInstruction* left = InputAt(0); 2556 HInstruction* right = InputAt(1); 2557 if (left->IsConstant() && !right->IsConstant()) { 2558 ReplaceInput(right, 0); 2559 ReplaceInput(left, 1); 2560 return true; 2561 } 2562 return false; 2563 } 2564 2565 // Order inputs by instruction id, but favor constant on the right side. 2566 // This helps GVN for commutative ops. 2567 void OrderInputs() { 2568 DCHECK(IsCommutative()); 2569 HInstruction* left = InputAt(0); 2570 HInstruction* right = InputAt(1); 2571 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2572 return; 2573 } 2574 if (OrderInputsWithConstantOnTheRight()) { 2575 return; 2576 } 2577 // Order according to instruction id. 2578 if (left->GetId() > right->GetId()) { 2579 ReplaceInput(right, 0); 2580 ReplaceInput(left, 1); 2581 } 2582 } 2583 2584 bool CanBeMoved() const OVERRIDE { return true; } 2585 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2586 return true; 2587 } 2588 2589 // Try to statically evaluate `operation` and return a HConstant 2590 // containing the result of this evaluation. If `operation` cannot 2591 // be evaluated as a constant, return null. 2592 HConstant* TryStaticEvaluation() const; 2593 2594 // Apply this operation to `x` and `y`. 2595 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2596 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2597 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2598 HLongConstant* y ATTRIBUTE_UNUSED) const { 2599 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2600 return nullptr; 2601 } 2602 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2603 HIntConstant* y ATTRIBUTE_UNUSED) const { 2604 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2605 return nullptr; 2606 } 2607 2608 // Returns an input that can legally be used as the right input and is 2609 // constant, or null. 2610 HConstant* GetConstantRight() const; 2611 2612 // If `GetConstantRight()` returns one of the input, this returns the other 2613 // one. Otherwise it returns null. 2614 HInstruction* GetLeastConstantLeft() const; 2615 2616 DECLARE_INSTRUCTION(BinaryOperation); 2617 2618 private: 2619 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2620}; 2621 2622// The comparison bias applies for floating point operations and indicates how NaN 2623// comparisons are treated: 2624enum class ComparisonBias { 2625 kNoBias, // bias is not applicable (i.e. for long operation) 2626 kGtBias, // return 1 for NaN comparisons 2627 kLtBias, // return -1 for NaN comparisons 2628}; 2629 2630class HCondition : public HBinaryOperation { 2631 public: 2632 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2633 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2634 needs_materialization_(true), 2635 bias_(ComparisonBias::kNoBias) {} 2636 2637 bool NeedsMaterialization() const { return needs_materialization_; } 2638 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2639 2640 // For code generation purposes, returns whether this instruction is just before 2641 // `instruction`, and disregard moves in between. 2642 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2643 2644 DECLARE_INSTRUCTION(Condition); 2645 2646 virtual IfCondition GetCondition() const = 0; 2647 2648 virtual IfCondition GetOppositeCondition() const = 0; 2649 2650 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2651 2652 void SetBias(ComparisonBias bias) { bias_ = bias; } 2653 2654 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2655 return bias_ == other->AsCondition()->bias_; 2656 } 2657 2658 bool IsFPConditionTrueIfNaN() const { 2659 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2660 IfCondition if_cond = GetCondition(); 2661 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2662 } 2663 2664 bool IsFPConditionFalseIfNaN() const { 2665 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2666 IfCondition if_cond = GetCondition(); 2667 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2668 } 2669 2670 private: 2671 // For register allocation purposes, returns whether this instruction needs to be 2672 // materialized (that is, not just be in the processor flags). 2673 bool needs_materialization_; 2674 2675 // Needed if we merge a HCompare into a HCondition. 2676 ComparisonBias bias_; 2677 2678 DISALLOW_COPY_AND_ASSIGN(HCondition); 2679}; 2680 2681// Instruction to check if two inputs are equal to each other. 2682class HEqual : public HCondition { 2683 public: 2684 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2685 : HCondition(first, second, dex_pc) {} 2686 2687 bool IsCommutative() const OVERRIDE { return true; } 2688 2689 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2690 return GetBlock()->GetGraph()->GetIntConstant( 2691 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2692 } 2693 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2694 return GetBlock()->GetGraph()->GetIntConstant( 2695 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2696 } 2697 2698 DECLARE_INSTRUCTION(Equal); 2699 2700 IfCondition GetCondition() const OVERRIDE { 2701 return kCondEQ; 2702 } 2703 2704 IfCondition GetOppositeCondition() const OVERRIDE { 2705 return kCondNE; 2706 } 2707 2708 private: 2709 template <typename T> bool Compute(T x, T y) const { return x == y; } 2710 2711 DISALLOW_COPY_AND_ASSIGN(HEqual); 2712}; 2713 2714class HNotEqual : public HCondition { 2715 public: 2716 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2717 : HCondition(first, second, dex_pc) {} 2718 2719 bool IsCommutative() const OVERRIDE { return true; } 2720 2721 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2722 return GetBlock()->GetGraph()->GetIntConstant( 2723 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2724 } 2725 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2726 return GetBlock()->GetGraph()->GetIntConstant( 2727 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2728 } 2729 2730 DECLARE_INSTRUCTION(NotEqual); 2731 2732 IfCondition GetCondition() const OVERRIDE { 2733 return kCondNE; 2734 } 2735 2736 IfCondition GetOppositeCondition() const OVERRIDE { 2737 return kCondEQ; 2738 } 2739 2740 private: 2741 template <typename T> bool Compute(T x, T y) const { return x != y; } 2742 2743 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2744}; 2745 2746class HLessThan : public HCondition { 2747 public: 2748 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2749 : HCondition(first, second, dex_pc) {} 2750 2751 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2752 return GetBlock()->GetGraph()->GetIntConstant( 2753 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2754 } 2755 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2756 return GetBlock()->GetGraph()->GetIntConstant( 2757 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2758 } 2759 2760 DECLARE_INSTRUCTION(LessThan); 2761 2762 IfCondition GetCondition() const OVERRIDE { 2763 return kCondLT; 2764 } 2765 2766 IfCondition GetOppositeCondition() const OVERRIDE { 2767 return kCondGE; 2768 } 2769 2770 private: 2771 template <typename T> bool Compute(T x, T y) const { return x < y; } 2772 2773 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2774}; 2775 2776class HLessThanOrEqual : public HCondition { 2777 public: 2778 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2779 : HCondition(first, second, dex_pc) {} 2780 2781 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2782 return GetBlock()->GetGraph()->GetIntConstant( 2783 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2784 } 2785 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2786 return GetBlock()->GetGraph()->GetIntConstant( 2787 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2788 } 2789 2790 DECLARE_INSTRUCTION(LessThanOrEqual); 2791 2792 IfCondition GetCondition() const OVERRIDE { 2793 return kCondLE; 2794 } 2795 2796 IfCondition GetOppositeCondition() const OVERRIDE { 2797 return kCondGT; 2798 } 2799 2800 private: 2801 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2802 2803 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2804}; 2805 2806class HGreaterThan : public HCondition { 2807 public: 2808 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2809 : HCondition(first, second, dex_pc) {} 2810 2811 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2812 return GetBlock()->GetGraph()->GetIntConstant( 2813 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2814 } 2815 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2816 return GetBlock()->GetGraph()->GetIntConstant( 2817 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2818 } 2819 2820 DECLARE_INSTRUCTION(GreaterThan); 2821 2822 IfCondition GetCondition() const OVERRIDE { 2823 return kCondGT; 2824 } 2825 2826 IfCondition GetOppositeCondition() const OVERRIDE { 2827 return kCondLE; 2828 } 2829 2830 private: 2831 template <typename T> bool Compute(T x, T y) const { return x > y; } 2832 2833 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2834}; 2835 2836class HGreaterThanOrEqual : public HCondition { 2837 public: 2838 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2839 : HCondition(first, second, dex_pc) {} 2840 2841 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2842 return GetBlock()->GetGraph()->GetIntConstant( 2843 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2844 } 2845 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2846 return GetBlock()->GetGraph()->GetIntConstant( 2847 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2848 } 2849 2850 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2851 2852 IfCondition GetCondition() const OVERRIDE { 2853 return kCondGE; 2854 } 2855 2856 IfCondition GetOppositeCondition() const OVERRIDE { 2857 return kCondLT; 2858 } 2859 2860 private: 2861 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2862 2863 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2864}; 2865 2866class HBelow : public HCondition { 2867 public: 2868 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2869 : HCondition(first, second, dex_pc) {} 2870 2871 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2872 return GetBlock()->GetGraph()->GetIntConstant( 2873 Compute(static_cast<uint32_t>(x->GetValue()), 2874 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2875 } 2876 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2877 return GetBlock()->GetGraph()->GetIntConstant( 2878 Compute(static_cast<uint64_t>(x->GetValue()), 2879 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2880 } 2881 2882 DECLARE_INSTRUCTION(Below); 2883 2884 IfCondition GetCondition() const OVERRIDE { 2885 return kCondB; 2886 } 2887 2888 IfCondition GetOppositeCondition() const OVERRIDE { 2889 return kCondAE; 2890 } 2891 2892 private: 2893 template <typename T> bool Compute(T x, T y) const { return x < y; } 2894 2895 DISALLOW_COPY_AND_ASSIGN(HBelow); 2896}; 2897 2898class HBelowOrEqual : public HCondition { 2899 public: 2900 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2901 : HCondition(first, second, dex_pc) {} 2902 2903 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2904 return GetBlock()->GetGraph()->GetIntConstant( 2905 Compute(static_cast<uint32_t>(x->GetValue()), 2906 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2907 } 2908 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2909 return GetBlock()->GetGraph()->GetIntConstant( 2910 Compute(static_cast<uint64_t>(x->GetValue()), 2911 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2912 } 2913 2914 DECLARE_INSTRUCTION(BelowOrEqual); 2915 2916 IfCondition GetCondition() const OVERRIDE { 2917 return kCondBE; 2918 } 2919 2920 IfCondition GetOppositeCondition() const OVERRIDE { 2921 return kCondA; 2922 } 2923 2924 private: 2925 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2926 2927 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 2928}; 2929 2930class HAbove : public HCondition { 2931 public: 2932 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2933 : HCondition(first, second, dex_pc) {} 2934 2935 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2936 return GetBlock()->GetGraph()->GetIntConstant( 2937 Compute(static_cast<uint32_t>(x->GetValue()), 2938 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2939 } 2940 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2941 return GetBlock()->GetGraph()->GetIntConstant( 2942 Compute(static_cast<uint64_t>(x->GetValue()), 2943 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2944 } 2945 2946 DECLARE_INSTRUCTION(Above); 2947 2948 IfCondition GetCondition() const OVERRIDE { 2949 return kCondA; 2950 } 2951 2952 IfCondition GetOppositeCondition() const OVERRIDE { 2953 return kCondBE; 2954 } 2955 2956 private: 2957 template <typename T> bool Compute(T x, T y) const { return x > y; } 2958 2959 DISALLOW_COPY_AND_ASSIGN(HAbove); 2960}; 2961 2962class HAboveOrEqual : public HCondition { 2963 public: 2964 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2965 : HCondition(first, second, dex_pc) {} 2966 2967 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2968 return GetBlock()->GetGraph()->GetIntConstant( 2969 Compute(static_cast<uint32_t>(x->GetValue()), 2970 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2971 } 2972 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2973 return GetBlock()->GetGraph()->GetIntConstant( 2974 Compute(static_cast<uint64_t>(x->GetValue()), 2975 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2976 } 2977 2978 DECLARE_INSTRUCTION(AboveOrEqual); 2979 2980 IfCondition GetCondition() const OVERRIDE { 2981 return kCondAE; 2982 } 2983 2984 IfCondition GetOppositeCondition() const OVERRIDE { 2985 return kCondB; 2986 } 2987 2988 private: 2989 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2990 2991 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 2992}; 2993 2994// Instruction to check how two inputs compare to each other. 2995// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 2996class HCompare : public HBinaryOperation { 2997 public: 2998 HCompare(Primitive::Type type, 2999 HInstruction* first, 3000 HInstruction* second, 3001 ComparisonBias bias, 3002 uint32_t dex_pc) 3003 : HBinaryOperation(Primitive::kPrimInt, 3004 first, 3005 second, 3006 SideEffectsForArchRuntimeCalls(type), 3007 dex_pc), 3008 bias_(bias) { 3009 DCHECK_EQ(type, first->GetType()); 3010 DCHECK_EQ(type, second->GetType()); 3011 } 3012 3013 template <typename T> 3014 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3015 3016 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3017 return GetBlock()->GetGraph()->GetIntConstant( 3018 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3019 } 3020 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3021 return GetBlock()->GetGraph()->GetIntConstant( 3022 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3023 } 3024 3025 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3026 return bias_ == other->AsCompare()->bias_; 3027 } 3028 3029 ComparisonBias GetBias() const { return bias_; } 3030 3031 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3032 3033 3034 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3035 // MIPS64 uses a runtime call for FP comparisons. 3036 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3037 } 3038 3039 DECLARE_INSTRUCTION(Compare); 3040 3041 private: 3042 const ComparisonBias bias_; 3043 3044 DISALLOW_COPY_AND_ASSIGN(HCompare); 3045}; 3046 3047// A local in the graph. Corresponds to a Dex register. 3048class HLocal : public HTemplateInstruction<0> { 3049 public: 3050 explicit HLocal(uint16_t reg_number) 3051 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3052 3053 DECLARE_INSTRUCTION(Local); 3054 3055 uint16_t GetRegNumber() const { return reg_number_; } 3056 3057 private: 3058 // The Dex register number. 3059 const uint16_t reg_number_; 3060 3061 DISALLOW_COPY_AND_ASSIGN(HLocal); 3062}; 3063 3064// Load a given local. The local is an input of this instruction. 3065class HLoadLocal : public HExpression<1> { 3066 public: 3067 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3068 : HExpression(type, SideEffects::None(), dex_pc) { 3069 SetRawInputAt(0, local); 3070 } 3071 3072 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3073 3074 DECLARE_INSTRUCTION(LoadLocal); 3075 3076 private: 3077 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3078}; 3079 3080// Store a value in a given local. This instruction has two inputs: the value 3081// and the local. 3082class HStoreLocal : public HTemplateInstruction<2> { 3083 public: 3084 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3085 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3086 SetRawInputAt(0, local); 3087 SetRawInputAt(1, value); 3088 } 3089 3090 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3091 3092 DECLARE_INSTRUCTION(StoreLocal); 3093 3094 private: 3095 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3096}; 3097 3098class HFloatConstant : public HConstant { 3099 public: 3100 float GetValue() const { return value_; } 3101 3102 uint64_t GetValueAsUint64() const OVERRIDE { 3103 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3104 } 3105 3106 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3107 DCHECK(other->IsFloatConstant()); 3108 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3109 } 3110 3111 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3112 3113 bool IsMinusOne() const OVERRIDE { 3114 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3115 } 3116 bool IsZero() const OVERRIDE { 3117 return value_ == 0.0f; 3118 } 3119 bool IsOne() const OVERRIDE { 3120 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3121 } 3122 bool IsNaN() const { 3123 return std::isnan(value_); 3124 } 3125 3126 DECLARE_INSTRUCTION(FloatConstant); 3127 3128 private: 3129 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3130 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3131 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3132 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3133 3134 const float value_; 3135 3136 // Only the SsaBuilder and HGraph can create floating-point constants. 3137 friend class SsaBuilder; 3138 friend class HGraph; 3139 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3140}; 3141 3142class HDoubleConstant : public HConstant { 3143 public: 3144 double GetValue() const { return value_; } 3145 3146 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3147 3148 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3149 DCHECK(other->IsDoubleConstant()); 3150 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3151 } 3152 3153 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3154 3155 bool IsMinusOne() const OVERRIDE { 3156 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3157 } 3158 bool IsZero() const OVERRIDE { 3159 return value_ == 0.0; 3160 } 3161 bool IsOne() const OVERRIDE { 3162 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3163 } 3164 bool IsNaN() const { 3165 return std::isnan(value_); 3166 } 3167 3168 DECLARE_INSTRUCTION(DoubleConstant); 3169 3170 private: 3171 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3172 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3173 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3174 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3175 3176 const double value_; 3177 3178 // Only the SsaBuilder and HGraph can create floating-point constants. 3179 friend class SsaBuilder; 3180 friend class HGraph; 3181 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3182}; 3183 3184enum class Intrinsics { 3185#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache) k ## Name, 3186#include "intrinsics_list.h" 3187 kNone, 3188 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3189#undef INTRINSICS_LIST 3190#undef OPTIMIZING_INTRINSICS 3191}; 3192std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3193 3194enum IntrinsicNeedsEnvironmentOrCache { 3195 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3196 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3197}; 3198 3199class HInvoke : public HInstruction { 3200 public: 3201 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3202 3203 bool NeedsEnvironment() const OVERRIDE; 3204 3205 void SetArgumentAt(size_t index, HInstruction* argument) { 3206 SetRawInputAt(index, argument); 3207 } 3208 3209 // Return the number of arguments. This number can be lower than 3210 // the number of inputs returned by InputCount(), as some invoke 3211 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3212 // inputs at the end of their list of inputs. 3213 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3214 3215 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3216 3217 3218 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3219 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3220 3221 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3222 3223 Intrinsics GetIntrinsic() const { 3224 return intrinsic_; 3225 } 3226 3227 void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache); 3228 3229 bool IsFromInlinedInvoke() const { 3230 return GetEnvironment()->GetParent() != nullptr; 3231 } 3232 3233 bool CanThrow() const OVERRIDE { return true; } 3234 3235 uint32_t* GetIntrinsicOptimizations() { 3236 return &intrinsic_optimizations_; 3237 } 3238 3239 const uint32_t* GetIntrinsicOptimizations() const { 3240 return &intrinsic_optimizations_; 3241 } 3242 3243 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3244 3245 DECLARE_INSTRUCTION(Invoke); 3246 3247 protected: 3248 HInvoke(ArenaAllocator* arena, 3249 uint32_t number_of_arguments, 3250 uint32_t number_of_other_inputs, 3251 Primitive::Type return_type, 3252 uint32_t dex_pc, 3253 uint32_t dex_method_index, 3254 InvokeType original_invoke_type) 3255 : HInstruction( 3256 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3257 number_of_arguments_(number_of_arguments), 3258 inputs_(number_of_arguments + number_of_other_inputs, 3259 arena->Adapter(kArenaAllocInvokeInputs)), 3260 return_type_(return_type), 3261 dex_method_index_(dex_method_index), 3262 original_invoke_type_(original_invoke_type), 3263 intrinsic_(Intrinsics::kNone), 3264 intrinsic_optimizations_(0) { 3265 } 3266 3267 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3268 return inputs_[index]; 3269 } 3270 3271 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3272 inputs_[index] = input; 3273 } 3274 3275 uint32_t number_of_arguments_; 3276 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3277 const Primitive::Type return_type_; 3278 const uint32_t dex_method_index_; 3279 const InvokeType original_invoke_type_; 3280 Intrinsics intrinsic_; 3281 3282 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3283 uint32_t intrinsic_optimizations_; 3284 3285 private: 3286 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3287}; 3288 3289class HInvokeUnresolved : public HInvoke { 3290 public: 3291 HInvokeUnresolved(ArenaAllocator* arena, 3292 uint32_t number_of_arguments, 3293 Primitive::Type return_type, 3294 uint32_t dex_pc, 3295 uint32_t dex_method_index, 3296 InvokeType invoke_type) 3297 : HInvoke(arena, 3298 number_of_arguments, 3299 0u /* number_of_other_inputs */, 3300 return_type, 3301 dex_pc, 3302 dex_method_index, 3303 invoke_type) { 3304 } 3305 3306 DECLARE_INSTRUCTION(InvokeUnresolved); 3307 3308 private: 3309 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3310}; 3311 3312class HInvokeStaticOrDirect : public HInvoke { 3313 public: 3314 // Requirements of this method call regarding the class 3315 // initialization (clinit) check of its declaring class. 3316 enum class ClinitCheckRequirement { 3317 kNone, // Class already initialized. 3318 kExplicit, // Static call having explicit clinit check as last input. 3319 kImplicit, // Static call implicitly requiring a clinit check. 3320 }; 3321 3322 // Determines how to load the target ArtMethod*. 3323 enum class MethodLoadKind { 3324 // Use a String init ArtMethod* loaded from Thread entrypoints. 3325 kStringInit, 3326 3327 // Use the method's own ArtMethod* loaded by the register allocator. 3328 kRecursive, 3329 3330 // Use ArtMethod* at a known address, embed the direct address in the code. 3331 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3332 kDirectAddress, 3333 3334 // Use ArtMethod* at an address that will be known at link time, embed the direct 3335 // address in the code. If the image is relocatable, emit .patch_oat entry. 3336 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3337 // the image relocatable or not. 3338 kDirectAddressWithFixup, 3339 3340 // Load from resoved methods array in the dex cache using a PC-relative load. 3341 // Used when we need to use the dex cache, for example for invoke-static that 3342 // may cause class initialization (the entry may point to a resolution method), 3343 // and we know that we can access the dex cache arrays using a PC-relative load. 3344 kDexCachePcRelative, 3345 3346 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3347 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3348 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3349 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3350 kDexCacheViaMethod, 3351 }; 3352 3353 // Determines the location of the code pointer. 3354 enum class CodePtrLocation { 3355 // Recursive call, use local PC-relative call instruction. 3356 kCallSelf, 3357 3358 // Use PC-relative call instruction patched at link time. 3359 // Used for calls within an oat file, boot->boot or app->app. 3360 kCallPCRelative, 3361 3362 // Call to a known target address, embed the direct address in code. 3363 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3364 kCallDirect, 3365 3366 // Call to a target address that will be known at link time, embed the direct 3367 // address in code. If the image is relocatable, emit .patch_oat entry. 3368 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3369 // the image relocatable or not. 3370 kCallDirectWithFixup, 3371 3372 // Use code pointer from the ArtMethod*. 3373 // Used when we don't know the target code. This is also the last-resort-kind used when 3374 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3375 kCallArtMethod, 3376 }; 3377 3378 struct DispatchInfo { 3379 MethodLoadKind method_load_kind; 3380 CodePtrLocation code_ptr_location; 3381 // The method load data holds 3382 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3383 // Note that there are multiple string init methods, each having its own offset. 3384 // - the method address for kDirectAddress 3385 // - the dex cache arrays offset for kDexCachePcRel. 3386 uint64_t method_load_data; 3387 uint64_t direct_code_ptr; 3388 }; 3389 3390 HInvokeStaticOrDirect(ArenaAllocator* arena, 3391 uint32_t number_of_arguments, 3392 Primitive::Type return_type, 3393 uint32_t dex_pc, 3394 uint32_t method_index, 3395 MethodReference target_method, 3396 DispatchInfo dispatch_info, 3397 InvokeType original_invoke_type, 3398 InvokeType invoke_type, 3399 ClinitCheckRequirement clinit_check_requirement) 3400 : HInvoke(arena, 3401 number_of_arguments, 3402 // There is one extra argument for the HCurrentMethod node, and 3403 // potentially one other if the clinit check is explicit, and one other 3404 // if the method is a string factory. 3405 1u + (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) 3406 + (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3407 return_type, 3408 dex_pc, 3409 method_index, 3410 original_invoke_type), 3411 invoke_type_(invoke_type), 3412 clinit_check_requirement_(clinit_check_requirement), 3413 target_method_(target_method), 3414 dispatch_info_(dispatch_info) {} 3415 3416 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3417 dispatch_info_ = dispatch_info; 3418 } 3419 3420 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3421 // We access the method via the dex cache so we can't do an implicit null check. 3422 // TODO: for intrinsics we can generate implicit null checks. 3423 return false; 3424 } 3425 3426 bool CanBeNull() const OVERRIDE { 3427 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3428 } 3429 3430 InvokeType GetInvokeType() const { return invoke_type_; } 3431 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3432 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3433 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3434 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3435 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3436 uint32_t GetCurrentMethodInputIndex() const { return GetNumberOfArguments(); } 3437 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3438 bool HasPcRelDexCache() const { 3439 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3440 } 3441 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3442 MethodReference GetTargetMethod() const { return target_method_; } 3443 3444 int32_t GetStringInitOffset() const { 3445 DCHECK(IsStringInit()); 3446 return dispatch_info_.method_load_data; 3447 } 3448 3449 uint64_t GetMethodAddress() const { 3450 DCHECK(HasMethodAddress()); 3451 return dispatch_info_.method_load_data; 3452 } 3453 3454 uint32_t GetDexCacheArrayOffset() const { 3455 DCHECK(HasPcRelDexCache()); 3456 return dispatch_info_.method_load_data; 3457 } 3458 3459 uint64_t GetDirectCodePtr() const { 3460 DCHECK(HasDirectCodePtr()); 3461 return dispatch_info_.direct_code_ptr; 3462 } 3463 3464 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3465 3466 // Is this instruction a call to a static method? 3467 bool IsStatic() const { 3468 return GetInvokeType() == kStatic; 3469 } 3470 3471 // Remove the art::HLoadClass instruction set as last input by 3472 // art::PrepareForRegisterAllocation::VisitClinitCheck in lieu of 3473 // the initial art::HClinitCheck instruction (only relevant for 3474 // static calls with explicit clinit check). 3475 void RemoveLoadClassAsLastInput() { 3476 DCHECK(IsStaticWithExplicitClinitCheck()); 3477 size_t last_input_index = InputCount() - 1; 3478 HInstruction* last_input = InputAt(last_input_index); 3479 DCHECK(last_input != nullptr); 3480 DCHECK(last_input->IsLoadClass()) << last_input->DebugName(); 3481 RemoveAsUserOfInput(last_input_index); 3482 inputs_.pop_back(); 3483 clinit_check_requirement_ = ClinitCheckRequirement::kImplicit; 3484 DCHECK(IsStaticWithImplicitClinitCheck()); 3485 } 3486 3487 bool IsStringFactoryFor(HFakeString* str) const { 3488 if (!IsStringInit()) return false; 3489 // +1 for the current method. 3490 if (InputCount() == (number_of_arguments_ + 1)) return false; 3491 return InputAt(InputCount() - 1)->AsFakeString() == str; 3492 } 3493 3494 void RemoveFakeStringArgumentAsLastInput() { 3495 DCHECK(IsStringInit()); 3496 size_t last_input_index = InputCount() - 1; 3497 HInstruction* last_input = InputAt(last_input_index); 3498 DCHECK(last_input != nullptr); 3499 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3500 RemoveAsUserOfInput(last_input_index); 3501 inputs_.pop_back(); 3502 } 3503 3504 // Is this a call to a static method whose declaring class has an 3505 // explicit intialization check in the graph? 3506 bool IsStaticWithExplicitClinitCheck() const { 3507 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3508 } 3509 3510 // Is this a call to a static method whose declaring class has an 3511 // implicit intialization check requirement? 3512 bool IsStaticWithImplicitClinitCheck() const { 3513 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3514 } 3515 3516 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3517 3518 protected: 3519 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3520 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3521 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3522 HInstruction* input = input_record.GetInstruction(); 3523 // `input` is the last input of a static invoke marked as having 3524 // an explicit clinit check. It must either be: 3525 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3526 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3527 DCHECK(input != nullptr); 3528 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3529 } 3530 return input_record; 3531 } 3532 3533 private: 3534 const InvokeType invoke_type_; 3535 ClinitCheckRequirement clinit_check_requirement_; 3536 // The target method may refer to different dex file or method index than the original 3537 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3538 // in derived class to increase visibility. 3539 MethodReference target_method_; 3540 DispatchInfo dispatch_info_; 3541 3542 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3543}; 3544 3545class HInvokeVirtual : public HInvoke { 3546 public: 3547 HInvokeVirtual(ArenaAllocator* arena, 3548 uint32_t number_of_arguments, 3549 Primitive::Type return_type, 3550 uint32_t dex_pc, 3551 uint32_t dex_method_index, 3552 uint32_t vtable_index) 3553 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3554 vtable_index_(vtable_index) {} 3555 3556 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3557 // TODO: Add implicit null checks in intrinsics. 3558 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3559 } 3560 3561 uint32_t GetVTableIndex() const { return vtable_index_; } 3562 3563 DECLARE_INSTRUCTION(InvokeVirtual); 3564 3565 private: 3566 const uint32_t vtable_index_; 3567 3568 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3569}; 3570 3571class HInvokeInterface : public HInvoke { 3572 public: 3573 HInvokeInterface(ArenaAllocator* arena, 3574 uint32_t number_of_arguments, 3575 Primitive::Type return_type, 3576 uint32_t dex_pc, 3577 uint32_t dex_method_index, 3578 uint32_t imt_index) 3579 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3580 imt_index_(imt_index) {} 3581 3582 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3583 // TODO: Add implicit null checks in intrinsics. 3584 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3585 } 3586 3587 uint32_t GetImtIndex() const { return imt_index_; } 3588 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3589 3590 DECLARE_INSTRUCTION(InvokeInterface); 3591 3592 private: 3593 const uint32_t imt_index_; 3594 3595 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3596}; 3597 3598class HNewInstance : public HExpression<1> { 3599 public: 3600 HNewInstance(HCurrentMethod* current_method, 3601 uint32_t dex_pc, 3602 uint16_t type_index, 3603 const DexFile& dex_file, 3604 QuickEntrypointEnum entrypoint) 3605 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3606 type_index_(type_index), 3607 dex_file_(dex_file), 3608 entrypoint_(entrypoint) { 3609 SetRawInputAt(0, current_method); 3610 } 3611 3612 uint16_t GetTypeIndex() const { return type_index_; } 3613 const DexFile& GetDexFile() const { return dex_file_; } 3614 3615 // Calls runtime so needs an environment. 3616 bool NeedsEnvironment() const OVERRIDE { return true; } 3617 // It may throw when called on: 3618 // - interfaces 3619 // - abstract/innaccessible/unknown classes 3620 // TODO: optimize when possible. 3621 bool CanThrow() const OVERRIDE { return true; } 3622 3623 bool CanBeNull() const OVERRIDE { return false; } 3624 3625 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3626 3627 DECLARE_INSTRUCTION(NewInstance); 3628 3629 private: 3630 const uint16_t type_index_; 3631 const DexFile& dex_file_; 3632 const QuickEntrypointEnum entrypoint_; 3633 3634 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3635}; 3636 3637class HNeg : public HUnaryOperation { 3638 public: 3639 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3640 : HUnaryOperation(result_type, input, dex_pc) {} 3641 3642 template <typename T> T Compute(T x) const { return -x; } 3643 3644 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3645 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3646 } 3647 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3648 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3649 } 3650 3651 DECLARE_INSTRUCTION(Neg); 3652 3653 private: 3654 DISALLOW_COPY_AND_ASSIGN(HNeg); 3655}; 3656 3657class HNewArray : public HExpression<2> { 3658 public: 3659 HNewArray(HInstruction* length, 3660 HCurrentMethod* current_method, 3661 uint32_t dex_pc, 3662 uint16_t type_index, 3663 const DexFile& dex_file, 3664 QuickEntrypointEnum entrypoint) 3665 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3666 type_index_(type_index), 3667 dex_file_(dex_file), 3668 entrypoint_(entrypoint) { 3669 SetRawInputAt(0, length); 3670 SetRawInputAt(1, current_method); 3671 } 3672 3673 uint16_t GetTypeIndex() const { return type_index_; } 3674 const DexFile& GetDexFile() const { return dex_file_; } 3675 3676 // Calls runtime so needs an environment. 3677 bool NeedsEnvironment() const OVERRIDE { return true; } 3678 3679 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3680 bool CanThrow() const OVERRIDE { return true; } 3681 3682 bool CanBeNull() const OVERRIDE { return false; } 3683 3684 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3685 3686 DECLARE_INSTRUCTION(NewArray); 3687 3688 private: 3689 const uint16_t type_index_; 3690 const DexFile& dex_file_; 3691 const QuickEntrypointEnum entrypoint_; 3692 3693 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3694}; 3695 3696class HAdd : public HBinaryOperation { 3697 public: 3698 HAdd(Primitive::Type result_type, 3699 HInstruction* left, 3700 HInstruction* right, 3701 uint32_t dex_pc = kNoDexPc) 3702 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3703 3704 bool IsCommutative() const OVERRIDE { return true; } 3705 3706 template <typename T> T Compute(T x, T y) const { return x + y; } 3707 3708 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3709 return GetBlock()->GetGraph()->GetIntConstant( 3710 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3711 } 3712 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3713 return GetBlock()->GetGraph()->GetLongConstant( 3714 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3715 } 3716 3717 DECLARE_INSTRUCTION(Add); 3718 3719 private: 3720 DISALLOW_COPY_AND_ASSIGN(HAdd); 3721}; 3722 3723class HSub : public HBinaryOperation { 3724 public: 3725 HSub(Primitive::Type result_type, 3726 HInstruction* left, 3727 HInstruction* right, 3728 uint32_t dex_pc = kNoDexPc) 3729 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3730 3731 template <typename T> T Compute(T x, T y) const { return x - y; } 3732 3733 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3734 return GetBlock()->GetGraph()->GetIntConstant( 3735 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3736 } 3737 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3738 return GetBlock()->GetGraph()->GetLongConstant( 3739 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3740 } 3741 3742 DECLARE_INSTRUCTION(Sub); 3743 3744 private: 3745 DISALLOW_COPY_AND_ASSIGN(HSub); 3746}; 3747 3748class HMul : public HBinaryOperation { 3749 public: 3750 HMul(Primitive::Type result_type, 3751 HInstruction* left, 3752 HInstruction* right, 3753 uint32_t dex_pc = kNoDexPc) 3754 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3755 3756 bool IsCommutative() const OVERRIDE { return true; } 3757 3758 template <typename T> T Compute(T x, T y) const { return x * y; } 3759 3760 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3761 return GetBlock()->GetGraph()->GetIntConstant( 3762 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3763 } 3764 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3765 return GetBlock()->GetGraph()->GetLongConstant( 3766 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3767 } 3768 3769 DECLARE_INSTRUCTION(Mul); 3770 3771 private: 3772 DISALLOW_COPY_AND_ASSIGN(HMul); 3773}; 3774 3775class HDiv : public HBinaryOperation { 3776 public: 3777 HDiv(Primitive::Type result_type, 3778 HInstruction* left, 3779 HInstruction* right, 3780 uint32_t dex_pc) 3781 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3782 3783 template <typename T> 3784 T Compute(T x, T y) const { 3785 // Our graph structure ensures we never have 0 for `y` during 3786 // constant folding. 3787 DCHECK_NE(y, 0); 3788 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3789 return (y == -1) ? -x : x / y; 3790 } 3791 3792 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3793 return GetBlock()->GetGraph()->GetIntConstant( 3794 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3795 } 3796 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3797 return GetBlock()->GetGraph()->GetLongConstant( 3798 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3799 } 3800 3801 static SideEffects SideEffectsForArchRuntimeCalls() { 3802 // The generated code can use a runtime call. 3803 return SideEffects::CanTriggerGC(); 3804 } 3805 3806 DECLARE_INSTRUCTION(Div); 3807 3808 private: 3809 DISALLOW_COPY_AND_ASSIGN(HDiv); 3810}; 3811 3812class HRem : public HBinaryOperation { 3813 public: 3814 HRem(Primitive::Type result_type, 3815 HInstruction* left, 3816 HInstruction* right, 3817 uint32_t dex_pc) 3818 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3819 3820 template <typename T> 3821 T Compute(T x, T y) const { 3822 // Our graph structure ensures we never have 0 for `y` during 3823 // constant folding. 3824 DCHECK_NE(y, 0); 3825 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3826 return (y == -1) ? 0 : x % y; 3827 } 3828 3829 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3830 return GetBlock()->GetGraph()->GetIntConstant( 3831 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3832 } 3833 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3834 return GetBlock()->GetGraph()->GetLongConstant( 3835 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3836 } 3837 3838 3839 static SideEffects SideEffectsForArchRuntimeCalls() { 3840 return SideEffects::CanTriggerGC(); 3841 } 3842 3843 DECLARE_INSTRUCTION(Rem); 3844 3845 private: 3846 DISALLOW_COPY_AND_ASSIGN(HRem); 3847}; 3848 3849class HDivZeroCheck : public HExpression<1> { 3850 public: 3851 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 3852 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 3853 SetRawInputAt(0, value); 3854 } 3855 3856 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 3857 3858 bool CanBeMoved() const OVERRIDE { return true; } 3859 3860 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 3861 return true; 3862 } 3863 3864 bool NeedsEnvironment() const OVERRIDE { return true; } 3865 bool CanThrow() const OVERRIDE { return true; } 3866 3867 DECLARE_INSTRUCTION(DivZeroCheck); 3868 3869 private: 3870 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 3871}; 3872 3873class HShl : public HBinaryOperation { 3874 public: 3875 HShl(Primitive::Type result_type, 3876 HInstruction* left, 3877 HInstruction* right, 3878 uint32_t dex_pc = kNoDexPc) 3879 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3880 3881 template <typename T, typename U, typename V> 3882 T Compute(T x, U y, V max_shift_value) const { 3883 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3884 "V is not the unsigned integer type corresponding to T"); 3885 return x << (y & max_shift_value); 3886 } 3887 3888 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3889 return GetBlock()->GetGraph()->GetIntConstant( 3890 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3891 } 3892 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3893 // case is handled as `x << static_cast<int>(y)`. 3894 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3895 return GetBlock()->GetGraph()->GetLongConstant( 3896 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3897 } 3898 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3899 return GetBlock()->GetGraph()->GetLongConstant( 3900 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3901 } 3902 3903 DECLARE_INSTRUCTION(Shl); 3904 3905 private: 3906 DISALLOW_COPY_AND_ASSIGN(HShl); 3907}; 3908 3909class HShr : public HBinaryOperation { 3910 public: 3911 HShr(Primitive::Type result_type, 3912 HInstruction* left, 3913 HInstruction* right, 3914 uint32_t dex_pc = kNoDexPc) 3915 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3916 3917 template <typename T, typename U, typename V> 3918 T Compute(T x, U y, V max_shift_value) const { 3919 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3920 "V is not the unsigned integer type corresponding to T"); 3921 return x >> (y & max_shift_value); 3922 } 3923 3924 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3925 return GetBlock()->GetGraph()->GetIntConstant( 3926 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3927 } 3928 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3929 // case is handled as `x >> static_cast<int>(y)`. 3930 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3931 return GetBlock()->GetGraph()->GetLongConstant( 3932 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3933 } 3934 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3935 return GetBlock()->GetGraph()->GetLongConstant( 3936 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3937 } 3938 3939 DECLARE_INSTRUCTION(Shr); 3940 3941 private: 3942 DISALLOW_COPY_AND_ASSIGN(HShr); 3943}; 3944 3945class HUShr : public HBinaryOperation { 3946 public: 3947 HUShr(Primitive::Type result_type, 3948 HInstruction* left, 3949 HInstruction* right, 3950 uint32_t dex_pc = kNoDexPc) 3951 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3952 3953 template <typename T, typename U, typename V> 3954 T Compute(T x, U y, V max_shift_value) const { 3955 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3956 "V is not the unsigned integer type corresponding to T"); 3957 V ux = static_cast<V>(x); 3958 return static_cast<T>(ux >> (y & max_shift_value)); 3959 } 3960 3961 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3962 return GetBlock()->GetGraph()->GetIntConstant( 3963 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3964 } 3965 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3966 // case is handled as `x >>> static_cast<int>(y)`. 3967 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3968 return GetBlock()->GetGraph()->GetLongConstant( 3969 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3970 } 3971 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3972 return GetBlock()->GetGraph()->GetLongConstant( 3973 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3974 } 3975 3976 DECLARE_INSTRUCTION(UShr); 3977 3978 private: 3979 DISALLOW_COPY_AND_ASSIGN(HUShr); 3980}; 3981 3982class HAnd : public HBinaryOperation { 3983 public: 3984 HAnd(Primitive::Type result_type, 3985 HInstruction* left, 3986 HInstruction* right, 3987 uint32_t dex_pc = kNoDexPc) 3988 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3989 3990 bool IsCommutative() const OVERRIDE { return true; } 3991 3992 template <typename T, typename U> 3993 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 3994 3995 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3996 return GetBlock()->GetGraph()->GetIntConstant( 3997 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3998 } 3999 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4000 return GetBlock()->GetGraph()->GetLongConstant( 4001 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4002 } 4003 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4004 return GetBlock()->GetGraph()->GetLongConstant( 4005 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4006 } 4007 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4008 return GetBlock()->GetGraph()->GetLongConstant( 4009 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4010 } 4011 4012 DECLARE_INSTRUCTION(And); 4013 4014 private: 4015 DISALLOW_COPY_AND_ASSIGN(HAnd); 4016}; 4017 4018class HOr : public HBinaryOperation { 4019 public: 4020 HOr(Primitive::Type result_type, 4021 HInstruction* left, 4022 HInstruction* right, 4023 uint32_t dex_pc = kNoDexPc) 4024 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4025 4026 bool IsCommutative() const OVERRIDE { return true; } 4027 4028 template <typename T, typename U> 4029 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4030 4031 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4032 return GetBlock()->GetGraph()->GetIntConstant( 4033 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4034 } 4035 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4036 return GetBlock()->GetGraph()->GetLongConstant( 4037 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4038 } 4039 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4040 return GetBlock()->GetGraph()->GetLongConstant( 4041 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4042 } 4043 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4044 return GetBlock()->GetGraph()->GetLongConstant( 4045 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4046 } 4047 4048 DECLARE_INSTRUCTION(Or); 4049 4050 private: 4051 DISALLOW_COPY_AND_ASSIGN(HOr); 4052}; 4053 4054class HXor : public HBinaryOperation { 4055 public: 4056 HXor(Primitive::Type result_type, 4057 HInstruction* left, 4058 HInstruction* right, 4059 uint32_t dex_pc = kNoDexPc) 4060 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4061 4062 bool IsCommutative() const OVERRIDE { return true; } 4063 4064 template <typename T, typename U> 4065 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4066 4067 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4068 return GetBlock()->GetGraph()->GetIntConstant( 4069 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4070 } 4071 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4072 return GetBlock()->GetGraph()->GetLongConstant( 4073 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4074 } 4075 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4076 return GetBlock()->GetGraph()->GetLongConstant( 4077 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4078 } 4079 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4080 return GetBlock()->GetGraph()->GetLongConstant( 4081 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4082 } 4083 4084 DECLARE_INSTRUCTION(Xor); 4085 4086 private: 4087 DISALLOW_COPY_AND_ASSIGN(HXor); 4088}; 4089 4090// The value of a parameter in this method. Its location depends on 4091// the calling convention. 4092class HParameterValue : public HExpression<0> { 4093 public: 4094 HParameterValue(const DexFile& dex_file, 4095 uint16_t type_index, 4096 uint8_t index, 4097 Primitive::Type parameter_type, 4098 bool is_this = false) 4099 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4100 dex_file_(dex_file), 4101 type_index_(type_index), 4102 index_(index), 4103 is_this_(is_this), 4104 can_be_null_(!is_this) {} 4105 4106 const DexFile& GetDexFile() const { return dex_file_; } 4107 uint16_t GetTypeIndex() const { return type_index_; } 4108 uint8_t GetIndex() const { return index_; } 4109 bool IsThis() const { return is_this_; } 4110 4111 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4112 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4113 4114 DECLARE_INSTRUCTION(ParameterValue); 4115 4116 private: 4117 const DexFile& dex_file_; 4118 const uint16_t type_index_; 4119 // The index of this parameter in the parameters list. Must be less 4120 // than HGraph::number_of_in_vregs_. 4121 const uint8_t index_; 4122 4123 // Whether or not the parameter value corresponds to 'this' argument. 4124 const bool is_this_; 4125 4126 bool can_be_null_; 4127 4128 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4129}; 4130 4131class HNot : public HUnaryOperation { 4132 public: 4133 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4134 : HUnaryOperation(result_type, input, dex_pc) {} 4135 4136 bool CanBeMoved() const OVERRIDE { return true; } 4137 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4138 return true; 4139 } 4140 4141 template <typename T> T Compute(T x) const { return ~x; } 4142 4143 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4144 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4145 } 4146 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4147 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4148 } 4149 4150 DECLARE_INSTRUCTION(Not); 4151 4152 private: 4153 DISALLOW_COPY_AND_ASSIGN(HNot); 4154}; 4155 4156class HBooleanNot : public HUnaryOperation { 4157 public: 4158 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4159 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4160 4161 bool CanBeMoved() const OVERRIDE { return true; } 4162 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4163 return true; 4164 } 4165 4166 template <typename T> bool Compute(T x) const { 4167 DCHECK(IsUint<1>(x)); 4168 return !x; 4169 } 4170 4171 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4172 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4173 } 4174 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4175 LOG(FATAL) << DebugName() << " is not defined for long values"; 4176 UNREACHABLE(); 4177 } 4178 4179 DECLARE_INSTRUCTION(BooleanNot); 4180 4181 private: 4182 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4183}; 4184 4185class HTypeConversion : public HExpression<1> { 4186 public: 4187 // Instantiate a type conversion of `input` to `result_type`. 4188 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4189 : HExpression(result_type, 4190 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4191 dex_pc) { 4192 SetRawInputAt(0, input); 4193 DCHECK_NE(input->GetType(), result_type); 4194 } 4195 4196 HInstruction* GetInput() const { return InputAt(0); } 4197 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4198 Primitive::Type GetResultType() const { return GetType(); } 4199 4200 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4201 // to the runtime. 4202 4203 bool CanBeMoved() const OVERRIDE { return true; } 4204 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4205 4206 // Try to statically evaluate the conversion and return a HConstant 4207 // containing the result. If the input cannot be converted, return nullptr. 4208 HConstant* TryStaticEvaluation() const; 4209 4210 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4211 Primitive::Type result_type) { 4212 // Some architectures may not require the 'GC' side effects, but at this point 4213 // in the compilation process we do not know what architecture we will 4214 // generate code for, so we must be conservative. 4215 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4216 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4217 return SideEffects::CanTriggerGC(); 4218 } 4219 return SideEffects::None(); 4220 } 4221 4222 DECLARE_INSTRUCTION(TypeConversion); 4223 4224 private: 4225 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4226}; 4227 4228static constexpr uint32_t kNoRegNumber = -1; 4229 4230class HPhi : public HInstruction { 4231 public: 4232 HPhi(ArenaAllocator* arena, 4233 uint32_t reg_number, 4234 size_t number_of_inputs, 4235 Primitive::Type type, 4236 uint32_t dex_pc = kNoDexPc) 4237 : HInstruction(SideEffects::None(), dex_pc), 4238 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4239 reg_number_(reg_number), 4240 type_(type), 4241 is_live_(false), 4242 can_be_null_(true) { 4243 } 4244 4245 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4246 static Primitive::Type ToPhiType(Primitive::Type type) { 4247 switch (type) { 4248 case Primitive::kPrimBoolean: 4249 case Primitive::kPrimByte: 4250 case Primitive::kPrimShort: 4251 case Primitive::kPrimChar: 4252 return Primitive::kPrimInt; 4253 default: 4254 return type; 4255 } 4256 } 4257 4258 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4259 4260 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4261 4262 void AddInput(HInstruction* input); 4263 void RemoveInputAt(size_t index); 4264 4265 Primitive::Type GetType() const OVERRIDE { return type_; } 4266 void SetType(Primitive::Type type) { type_ = type; } 4267 4268 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4269 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4270 4271 uint32_t GetRegNumber() const { return reg_number_; } 4272 4273 void SetDead() { is_live_ = false; } 4274 void SetLive() { is_live_ = true; } 4275 bool IsDead() const { return !is_live_; } 4276 bool IsLive() const { return is_live_; } 4277 4278 bool IsVRegEquivalentOf(HInstruction* other) const { 4279 return other != nullptr 4280 && other->IsPhi() 4281 && other->AsPhi()->GetBlock() == GetBlock() 4282 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4283 } 4284 4285 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4286 // An equivalent phi is a phi having the same dex register and type. 4287 // It assumes that phis with the same dex register are adjacent. 4288 HPhi* GetNextEquivalentPhiWithSameType() { 4289 HInstruction* next = GetNext(); 4290 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4291 if (next->GetType() == GetType()) { 4292 return next->AsPhi(); 4293 } 4294 next = next->GetNext(); 4295 } 4296 return nullptr; 4297 } 4298 4299 DECLARE_INSTRUCTION(Phi); 4300 4301 protected: 4302 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4303 return inputs_[index]; 4304 } 4305 4306 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4307 inputs_[index] = input; 4308 } 4309 4310 private: 4311 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4312 const uint32_t reg_number_; 4313 Primitive::Type type_; 4314 bool is_live_; 4315 bool can_be_null_; 4316 4317 DISALLOW_COPY_AND_ASSIGN(HPhi); 4318}; 4319 4320class HNullCheck : public HExpression<1> { 4321 public: 4322 HNullCheck(HInstruction* value, uint32_t dex_pc) 4323 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4324 SetRawInputAt(0, value); 4325 } 4326 4327 bool CanBeMoved() const OVERRIDE { return true; } 4328 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4329 return true; 4330 } 4331 4332 bool NeedsEnvironment() const OVERRIDE { return true; } 4333 4334 bool CanThrow() const OVERRIDE { return true; } 4335 4336 bool CanBeNull() const OVERRIDE { return false; } 4337 4338 4339 DECLARE_INSTRUCTION(NullCheck); 4340 4341 private: 4342 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4343}; 4344 4345class FieldInfo : public ValueObject { 4346 public: 4347 FieldInfo(MemberOffset field_offset, 4348 Primitive::Type field_type, 4349 bool is_volatile, 4350 uint32_t index, 4351 uint16_t declaring_class_def_index, 4352 const DexFile& dex_file, 4353 Handle<mirror::DexCache> dex_cache) 4354 : field_offset_(field_offset), 4355 field_type_(field_type), 4356 is_volatile_(is_volatile), 4357 index_(index), 4358 declaring_class_def_index_(declaring_class_def_index), 4359 dex_file_(dex_file), 4360 dex_cache_(dex_cache) {} 4361 4362 MemberOffset GetFieldOffset() const { return field_offset_; } 4363 Primitive::Type GetFieldType() const { return field_type_; } 4364 uint32_t GetFieldIndex() const { return index_; } 4365 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4366 const DexFile& GetDexFile() const { return dex_file_; } 4367 bool IsVolatile() const { return is_volatile_; } 4368 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4369 4370 private: 4371 const MemberOffset field_offset_; 4372 const Primitive::Type field_type_; 4373 const bool is_volatile_; 4374 const uint32_t index_; 4375 const uint16_t declaring_class_def_index_; 4376 const DexFile& dex_file_; 4377 const Handle<mirror::DexCache> dex_cache_; 4378}; 4379 4380class HInstanceFieldGet : public HExpression<1> { 4381 public: 4382 HInstanceFieldGet(HInstruction* value, 4383 Primitive::Type field_type, 4384 MemberOffset field_offset, 4385 bool is_volatile, 4386 uint32_t field_idx, 4387 uint16_t declaring_class_def_index, 4388 const DexFile& dex_file, 4389 Handle<mirror::DexCache> dex_cache, 4390 uint32_t dex_pc) 4391 : HExpression(field_type, 4392 SideEffects::FieldReadOfType(field_type, is_volatile), 4393 dex_pc), 4394 field_info_(field_offset, 4395 field_type, 4396 is_volatile, 4397 field_idx, 4398 declaring_class_def_index, 4399 dex_file, 4400 dex_cache) { 4401 SetRawInputAt(0, value); 4402 } 4403 4404 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4405 4406 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4407 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4408 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4409 } 4410 4411 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4412 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4413 } 4414 4415 size_t ComputeHashCode() const OVERRIDE { 4416 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4417 } 4418 4419 const FieldInfo& GetFieldInfo() const { return field_info_; } 4420 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4421 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4422 bool IsVolatile() const { return field_info_.IsVolatile(); } 4423 4424 DECLARE_INSTRUCTION(InstanceFieldGet); 4425 4426 private: 4427 const FieldInfo field_info_; 4428 4429 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4430}; 4431 4432class HInstanceFieldSet : public HTemplateInstruction<2> { 4433 public: 4434 HInstanceFieldSet(HInstruction* object, 4435 HInstruction* value, 4436 Primitive::Type field_type, 4437 MemberOffset field_offset, 4438 bool is_volatile, 4439 uint32_t field_idx, 4440 uint16_t declaring_class_def_index, 4441 const DexFile& dex_file, 4442 Handle<mirror::DexCache> dex_cache, 4443 uint32_t dex_pc) 4444 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4445 dex_pc), 4446 field_info_(field_offset, 4447 field_type, 4448 is_volatile, 4449 field_idx, 4450 declaring_class_def_index, 4451 dex_file, 4452 dex_cache), 4453 value_can_be_null_(true) { 4454 SetRawInputAt(0, object); 4455 SetRawInputAt(1, value); 4456 } 4457 4458 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4459 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4460 } 4461 4462 const FieldInfo& GetFieldInfo() const { return field_info_; } 4463 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4464 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4465 bool IsVolatile() const { return field_info_.IsVolatile(); } 4466 HInstruction* GetValue() const { return InputAt(1); } 4467 bool GetValueCanBeNull() const { return value_can_be_null_; } 4468 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4469 4470 DECLARE_INSTRUCTION(InstanceFieldSet); 4471 4472 private: 4473 const FieldInfo field_info_; 4474 bool value_can_be_null_; 4475 4476 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4477}; 4478 4479class HArrayGet : public HExpression<2> { 4480 public: 4481 HArrayGet(HInstruction* array, 4482 HInstruction* index, 4483 Primitive::Type type, 4484 uint32_t dex_pc, 4485 SideEffects additional_side_effects = SideEffects::None()) 4486 : HExpression(type, 4487 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4488 dex_pc) { 4489 SetRawInputAt(0, array); 4490 SetRawInputAt(1, index); 4491 } 4492 4493 bool CanBeMoved() const OVERRIDE { return true; } 4494 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4495 return true; 4496 } 4497 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4498 // TODO: We can be smarter here. 4499 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4500 // which generates the implicit null check. There are cases when these can be removed 4501 // to produce better code. If we ever add optimizations to do so we should allow an 4502 // implicit check here (as long as the address falls in the first page). 4503 return false; 4504 } 4505 4506 void SetType(Primitive::Type type) { type_ = type; } 4507 4508 HInstruction* GetArray() const { return InputAt(0); } 4509 HInstruction* GetIndex() const { return InputAt(1); } 4510 4511 DECLARE_INSTRUCTION(ArrayGet); 4512 4513 private: 4514 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4515}; 4516 4517class HArraySet : public HTemplateInstruction<3> { 4518 public: 4519 HArraySet(HInstruction* array, 4520 HInstruction* index, 4521 HInstruction* value, 4522 Primitive::Type expected_component_type, 4523 uint32_t dex_pc, 4524 SideEffects additional_side_effects = SideEffects::None()) 4525 : HTemplateInstruction( 4526 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4527 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4528 additional_side_effects), 4529 dex_pc), 4530 expected_component_type_(expected_component_type), 4531 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4532 value_can_be_null_(true), 4533 static_type_of_array_is_object_array_(false) { 4534 SetRawInputAt(0, array); 4535 SetRawInputAt(1, index); 4536 SetRawInputAt(2, value); 4537 } 4538 4539 bool NeedsEnvironment() const OVERRIDE { 4540 // We currently always call a runtime method to catch array store 4541 // exceptions. 4542 return needs_type_check_; 4543 } 4544 4545 // Can throw ArrayStoreException. 4546 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4547 4548 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4549 // TODO: Same as for ArrayGet. 4550 return false; 4551 } 4552 4553 void ClearNeedsTypeCheck() { 4554 needs_type_check_ = false; 4555 } 4556 4557 void ClearValueCanBeNull() { 4558 value_can_be_null_ = false; 4559 } 4560 4561 void SetStaticTypeOfArrayIsObjectArray() { 4562 static_type_of_array_is_object_array_ = true; 4563 } 4564 4565 bool GetValueCanBeNull() const { return value_can_be_null_; } 4566 bool NeedsTypeCheck() const { return needs_type_check_; } 4567 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4568 4569 HInstruction* GetArray() const { return InputAt(0); } 4570 HInstruction* GetIndex() const { return InputAt(1); } 4571 HInstruction* GetValue() const { return InputAt(2); } 4572 4573 Primitive::Type GetComponentType() const { 4574 // The Dex format does not type floating point index operations. Since the 4575 // `expected_component_type_` is set during building and can therefore not 4576 // be correct, we also check what is the value type. If it is a floating 4577 // point type, we must use that type. 4578 Primitive::Type value_type = GetValue()->GetType(); 4579 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4580 ? value_type 4581 : expected_component_type_; 4582 } 4583 4584 Primitive::Type GetRawExpectedComponentType() const { 4585 return expected_component_type_; 4586 } 4587 4588 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4589 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4590 } 4591 4592 DECLARE_INSTRUCTION(ArraySet); 4593 4594 private: 4595 const Primitive::Type expected_component_type_; 4596 bool needs_type_check_; 4597 bool value_can_be_null_; 4598 // Cached information for the reference_type_info_ so that codegen 4599 // does not need to inspect the static type. 4600 bool static_type_of_array_is_object_array_; 4601 4602 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4603}; 4604 4605class HArrayLength : public HExpression<1> { 4606 public: 4607 HArrayLength(HInstruction* array, uint32_t dex_pc) 4608 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4609 // Note that arrays do not change length, so the instruction does not 4610 // depend on any write. 4611 SetRawInputAt(0, array); 4612 } 4613 4614 bool CanBeMoved() const OVERRIDE { return true; } 4615 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4616 return true; 4617 } 4618 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4619 return obj == InputAt(0); 4620 } 4621 4622 DECLARE_INSTRUCTION(ArrayLength); 4623 4624 private: 4625 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4626}; 4627 4628class HBoundsCheck : public HExpression<2> { 4629 public: 4630 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4631 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4632 DCHECK(index->GetType() == Primitive::kPrimInt); 4633 SetRawInputAt(0, index); 4634 SetRawInputAt(1, length); 4635 } 4636 4637 bool CanBeMoved() const OVERRIDE { return true; } 4638 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4639 return true; 4640 } 4641 4642 bool NeedsEnvironment() const OVERRIDE { return true; } 4643 4644 bool CanThrow() const OVERRIDE { return true; } 4645 4646 HInstruction* GetIndex() const { return InputAt(0); } 4647 4648 DECLARE_INSTRUCTION(BoundsCheck); 4649 4650 private: 4651 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4652}; 4653 4654/** 4655 * Some DEX instructions are folded into multiple HInstructions that need 4656 * to stay live until the last HInstruction. This class 4657 * is used as a marker for the baseline compiler to ensure its preceding 4658 * HInstruction stays live. `index` represents the stack location index of the 4659 * instruction (the actual offset is computed as index * vreg_size). 4660 */ 4661class HTemporary : public HTemplateInstruction<0> { 4662 public: 4663 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4664 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4665 4666 size_t GetIndex() const { return index_; } 4667 4668 Primitive::Type GetType() const OVERRIDE { 4669 // The previous instruction is the one that will be stored in the temporary location. 4670 DCHECK(GetPrevious() != nullptr); 4671 return GetPrevious()->GetType(); 4672 } 4673 4674 DECLARE_INSTRUCTION(Temporary); 4675 4676 private: 4677 const size_t index_; 4678 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4679}; 4680 4681class HSuspendCheck : public HTemplateInstruction<0> { 4682 public: 4683 explicit HSuspendCheck(uint32_t dex_pc) 4684 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4685 4686 bool NeedsEnvironment() const OVERRIDE { 4687 return true; 4688 } 4689 4690 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4691 SlowPathCode* GetSlowPath() const { return slow_path_; } 4692 4693 DECLARE_INSTRUCTION(SuspendCheck); 4694 4695 private: 4696 // Only used for code generation, in order to share the same slow path between back edges 4697 // of a same loop. 4698 SlowPathCode* slow_path_; 4699 4700 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4701}; 4702 4703/** 4704 * Instruction to load a Class object. 4705 */ 4706class HLoadClass : public HExpression<1> { 4707 public: 4708 HLoadClass(HCurrentMethod* current_method, 4709 uint16_t type_index, 4710 const DexFile& dex_file, 4711 bool is_referrers_class, 4712 uint32_t dex_pc, 4713 bool needs_access_check) 4714 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4715 type_index_(type_index), 4716 dex_file_(dex_file), 4717 is_referrers_class_(is_referrers_class), 4718 generate_clinit_check_(false), 4719 needs_access_check_(needs_access_check), 4720 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4721 // Referrers class should not need access check. We never inline unverified 4722 // methods so we can't possibly end up in this situation. 4723 DCHECK(!is_referrers_class_ || !needs_access_check_); 4724 SetRawInputAt(0, current_method); 4725 } 4726 4727 bool CanBeMoved() const OVERRIDE { return true; } 4728 4729 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4730 // Note that we don't need to test for generate_clinit_check_. 4731 // Whether or not we need to generate the clinit check is processed in 4732 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4733 return other->AsLoadClass()->type_index_ == type_index_ && 4734 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4735 } 4736 4737 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4738 4739 uint16_t GetTypeIndex() const { return type_index_; } 4740 bool IsReferrersClass() const { return is_referrers_class_; } 4741 bool CanBeNull() const OVERRIDE { return false; } 4742 4743 bool NeedsEnvironment() const OVERRIDE { 4744 // Will call runtime and load the class if the class is not loaded yet. 4745 // TODO: finer grain decision. 4746 return !is_referrers_class_; 4747 } 4748 4749 bool MustGenerateClinitCheck() const { 4750 return generate_clinit_check_; 4751 } 4752 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4753 // The entrypoint the code generator is going to call does not do 4754 // clinit of the class. 4755 DCHECK(!NeedsAccessCheck()); 4756 generate_clinit_check_ = generate_clinit_check; 4757 } 4758 4759 bool CanCallRuntime() const { 4760 return MustGenerateClinitCheck() || !is_referrers_class_ || needs_access_check_; 4761 } 4762 4763 bool NeedsAccessCheck() const { 4764 return needs_access_check_; 4765 } 4766 4767 bool CanThrow() const OVERRIDE { 4768 // May call runtime and and therefore can throw. 4769 // TODO: finer grain decision. 4770 return CanCallRuntime(); 4771 } 4772 4773 ReferenceTypeInfo GetLoadedClassRTI() { 4774 return loaded_class_rti_; 4775 } 4776 4777 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4778 // Make sure we only set exact types (the loaded class should never be merged). 4779 DCHECK(rti.IsExact()); 4780 loaded_class_rti_ = rti; 4781 } 4782 4783 const DexFile& GetDexFile() { return dex_file_; } 4784 4785 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 4786 4787 static SideEffects SideEffectsForArchRuntimeCalls() { 4788 return SideEffects::CanTriggerGC(); 4789 } 4790 4791 DECLARE_INSTRUCTION(LoadClass); 4792 4793 private: 4794 const uint16_t type_index_; 4795 const DexFile& dex_file_; 4796 const bool is_referrers_class_; 4797 // Whether this instruction must generate the initialization check. 4798 // Used for code generation. 4799 bool generate_clinit_check_; 4800 bool needs_access_check_; 4801 4802 ReferenceTypeInfo loaded_class_rti_; 4803 4804 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 4805}; 4806 4807class HLoadString : public HExpression<1> { 4808 public: 4809 HLoadString(HCurrentMethod* current_method, uint32_t string_index, uint32_t dex_pc) 4810 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4811 string_index_(string_index) { 4812 SetRawInputAt(0, current_method); 4813 } 4814 4815 bool CanBeMoved() const OVERRIDE { return true; } 4816 4817 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4818 return other->AsLoadString()->string_index_ == string_index_; 4819 } 4820 4821 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 4822 4823 uint32_t GetStringIndex() const { return string_index_; } 4824 4825 // TODO: Can we deopt or debug when we resolve a string? 4826 bool NeedsEnvironment() const OVERRIDE { return false; } 4827 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 4828 bool CanBeNull() const OVERRIDE { return false; } 4829 4830 static SideEffects SideEffectsForArchRuntimeCalls() { 4831 return SideEffects::CanTriggerGC(); 4832 } 4833 4834 DECLARE_INSTRUCTION(LoadString); 4835 4836 private: 4837 const uint32_t string_index_; 4838 4839 DISALLOW_COPY_AND_ASSIGN(HLoadString); 4840}; 4841 4842/** 4843 * Performs an initialization check on its Class object input. 4844 */ 4845class HClinitCheck : public HExpression<1> { 4846 public: 4847 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 4848 : HExpression( 4849 Primitive::kPrimNot, 4850 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 4851 dex_pc) { 4852 SetRawInputAt(0, constant); 4853 } 4854 4855 bool CanBeMoved() const OVERRIDE { return true; } 4856 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4857 return true; 4858 } 4859 4860 bool NeedsEnvironment() const OVERRIDE { 4861 // May call runtime to initialize the class. 4862 return true; 4863 } 4864 4865 4866 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 4867 4868 DECLARE_INSTRUCTION(ClinitCheck); 4869 4870 private: 4871 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 4872}; 4873 4874class HStaticFieldGet : public HExpression<1> { 4875 public: 4876 HStaticFieldGet(HInstruction* cls, 4877 Primitive::Type field_type, 4878 MemberOffset field_offset, 4879 bool is_volatile, 4880 uint32_t field_idx, 4881 uint16_t declaring_class_def_index, 4882 const DexFile& dex_file, 4883 Handle<mirror::DexCache> dex_cache, 4884 uint32_t dex_pc) 4885 : HExpression(field_type, 4886 SideEffects::FieldReadOfType(field_type, is_volatile), 4887 dex_pc), 4888 field_info_(field_offset, 4889 field_type, 4890 is_volatile, 4891 field_idx, 4892 declaring_class_def_index, 4893 dex_file, 4894 dex_cache) { 4895 SetRawInputAt(0, cls); 4896 } 4897 4898 4899 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4900 4901 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4902 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 4903 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4904 } 4905 4906 size_t ComputeHashCode() const OVERRIDE { 4907 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4908 } 4909 4910 const FieldInfo& GetFieldInfo() const { return field_info_; } 4911 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4912 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4913 bool IsVolatile() const { return field_info_.IsVolatile(); } 4914 4915 DECLARE_INSTRUCTION(StaticFieldGet); 4916 4917 private: 4918 const FieldInfo field_info_; 4919 4920 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 4921}; 4922 4923class HStaticFieldSet : public HTemplateInstruction<2> { 4924 public: 4925 HStaticFieldSet(HInstruction* cls, 4926 HInstruction* value, 4927 Primitive::Type field_type, 4928 MemberOffset field_offset, 4929 bool is_volatile, 4930 uint32_t field_idx, 4931 uint16_t declaring_class_def_index, 4932 const DexFile& dex_file, 4933 Handle<mirror::DexCache> dex_cache, 4934 uint32_t dex_pc) 4935 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4936 dex_pc), 4937 field_info_(field_offset, 4938 field_type, 4939 is_volatile, 4940 field_idx, 4941 declaring_class_def_index, 4942 dex_file, 4943 dex_cache), 4944 value_can_be_null_(true) { 4945 SetRawInputAt(0, cls); 4946 SetRawInputAt(1, value); 4947 } 4948 4949 const FieldInfo& GetFieldInfo() const { return field_info_; } 4950 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4951 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4952 bool IsVolatile() const { return field_info_.IsVolatile(); } 4953 4954 HInstruction* GetValue() const { return InputAt(1); } 4955 bool GetValueCanBeNull() const { return value_can_be_null_; } 4956 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4957 4958 DECLARE_INSTRUCTION(StaticFieldSet); 4959 4960 private: 4961 const FieldInfo field_info_; 4962 bool value_can_be_null_; 4963 4964 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 4965}; 4966 4967class HUnresolvedInstanceFieldGet : public HExpression<1> { 4968 public: 4969 HUnresolvedInstanceFieldGet(HInstruction* obj, 4970 Primitive::Type field_type, 4971 uint32_t field_index, 4972 uint32_t dex_pc) 4973 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 4974 field_index_(field_index) { 4975 SetRawInputAt(0, obj); 4976 } 4977 4978 bool NeedsEnvironment() const OVERRIDE { return true; } 4979 bool CanThrow() const OVERRIDE { return true; } 4980 4981 Primitive::Type GetFieldType() const { return GetType(); } 4982 uint32_t GetFieldIndex() const { return field_index_; } 4983 4984 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 4985 4986 private: 4987 const uint32_t field_index_; 4988 4989 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 4990}; 4991 4992class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 4993 public: 4994 HUnresolvedInstanceFieldSet(HInstruction* obj, 4995 HInstruction* value, 4996 Primitive::Type field_type, 4997 uint32_t field_index, 4998 uint32_t dex_pc) 4999 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5000 field_type_(field_type), 5001 field_index_(field_index) { 5002 DCHECK_EQ(field_type, value->GetType()); 5003 SetRawInputAt(0, obj); 5004 SetRawInputAt(1, value); 5005 } 5006 5007 bool NeedsEnvironment() const OVERRIDE { return true; } 5008 bool CanThrow() const OVERRIDE { return true; } 5009 5010 Primitive::Type GetFieldType() const { return field_type_; } 5011 uint32_t GetFieldIndex() const { return field_index_; } 5012 5013 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5014 5015 private: 5016 const Primitive::Type field_type_; 5017 const uint32_t field_index_; 5018 5019 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5020}; 5021 5022class HUnresolvedStaticFieldGet : public HExpression<0> { 5023 public: 5024 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5025 uint32_t field_index, 5026 uint32_t dex_pc) 5027 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5028 field_index_(field_index) { 5029 } 5030 5031 bool NeedsEnvironment() const OVERRIDE { return true; } 5032 bool CanThrow() const OVERRIDE { return true; } 5033 5034 Primitive::Type GetFieldType() const { return GetType(); } 5035 uint32_t GetFieldIndex() const { return field_index_; } 5036 5037 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5038 5039 private: 5040 const uint32_t field_index_; 5041 5042 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5043}; 5044 5045class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5046 public: 5047 HUnresolvedStaticFieldSet(HInstruction* value, 5048 Primitive::Type field_type, 5049 uint32_t field_index, 5050 uint32_t dex_pc) 5051 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5052 field_type_(field_type), 5053 field_index_(field_index) { 5054 DCHECK_EQ(field_type, value->GetType()); 5055 SetRawInputAt(0, value); 5056 } 5057 5058 bool NeedsEnvironment() const OVERRIDE { return true; } 5059 bool CanThrow() const OVERRIDE { return true; } 5060 5061 Primitive::Type GetFieldType() const { return field_type_; } 5062 uint32_t GetFieldIndex() const { return field_index_; } 5063 5064 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5065 5066 private: 5067 const Primitive::Type field_type_; 5068 const uint32_t field_index_; 5069 5070 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5071}; 5072 5073// Implement the move-exception DEX instruction. 5074class HLoadException : public HExpression<0> { 5075 public: 5076 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5077 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5078 5079 bool CanBeNull() const OVERRIDE { return false; } 5080 5081 DECLARE_INSTRUCTION(LoadException); 5082 5083 private: 5084 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5085}; 5086 5087// Implicit part of move-exception which clears thread-local exception storage. 5088// Must not be removed because the runtime expects the TLS to get cleared. 5089class HClearException : public HTemplateInstruction<0> { 5090 public: 5091 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5092 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5093 5094 DECLARE_INSTRUCTION(ClearException); 5095 5096 private: 5097 DISALLOW_COPY_AND_ASSIGN(HClearException); 5098}; 5099 5100class HThrow : public HTemplateInstruction<1> { 5101 public: 5102 HThrow(HInstruction* exception, uint32_t dex_pc) 5103 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5104 SetRawInputAt(0, exception); 5105 } 5106 5107 bool IsControlFlow() const OVERRIDE { return true; } 5108 5109 bool NeedsEnvironment() const OVERRIDE { return true; } 5110 5111 bool CanThrow() const OVERRIDE { return true; } 5112 5113 5114 DECLARE_INSTRUCTION(Throw); 5115 5116 private: 5117 DISALLOW_COPY_AND_ASSIGN(HThrow); 5118}; 5119 5120/** 5121 * Implementation strategies for the code generator of a HInstanceOf 5122 * or `HCheckCast`. 5123 */ 5124enum class TypeCheckKind { 5125 kUnresolvedCheck, // Check against an unresolved type. 5126 kExactCheck, // Can do a single class compare. 5127 kClassHierarchyCheck, // Can just walk the super class chain. 5128 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5129 kInterfaceCheck, // No optimization yet when checking against an interface. 5130 kArrayObjectCheck, // Can just check if the array is not primitive. 5131 kArrayCheck // No optimization yet when checking against a generic array. 5132}; 5133 5134class HInstanceOf : public HExpression<2> { 5135 public: 5136 HInstanceOf(HInstruction* object, 5137 HLoadClass* constant, 5138 TypeCheckKind check_kind, 5139 uint32_t dex_pc) 5140 : HExpression(Primitive::kPrimBoolean, 5141 SideEffectsForArchRuntimeCalls(check_kind), 5142 dex_pc), 5143 check_kind_(check_kind), 5144 must_do_null_check_(true) { 5145 SetRawInputAt(0, object); 5146 SetRawInputAt(1, constant); 5147 } 5148 5149 bool CanBeMoved() const OVERRIDE { return true; } 5150 5151 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5152 return true; 5153 } 5154 5155 bool NeedsEnvironment() const OVERRIDE { 5156 return false; 5157 } 5158 5159 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5160 5161 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5162 5163 // Used only in code generation. 5164 bool MustDoNullCheck() const { return must_do_null_check_; } 5165 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5166 5167 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5168 return (check_kind == TypeCheckKind::kExactCheck) 5169 ? SideEffects::None() 5170 // Mips currently does runtime calls for any other checks. 5171 : SideEffects::CanTriggerGC(); 5172 } 5173 5174 DECLARE_INSTRUCTION(InstanceOf); 5175 5176 private: 5177 const TypeCheckKind check_kind_; 5178 bool must_do_null_check_; 5179 5180 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5181}; 5182 5183class HBoundType : public HExpression<1> { 5184 public: 5185 // Constructs an HBoundType with the given upper_bound. 5186 // Ensures that the upper_bound is valid. 5187 HBoundType(HInstruction* input, 5188 ReferenceTypeInfo upper_bound, 5189 bool upper_can_be_null, 5190 uint32_t dex_pc = kNoDexPc) 5191 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5192 upper_bound_(upper_bound), 5193 upper_can_be_null_(upper_can_be_null), 5194 can_be_null_(upper_can_be_null) { 5195 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5196 SetRawInputAt(0, input); 5197 SetReferenceTypeInfo(upper_bound_); 5198 } 5199 5200 // GetUpper* should only be used in reference type propagation. 5201 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5202 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5203 5204 void SetCanBeNull(bool can_be_null) { 5205 DCHECK(upper_can_be_null_ || !can_be_null); 5206 can_be_null_ = can_be_null; 5207 } 5208 5209 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5210 5211 DECLARE_INSTRUCTION(BoundType); 5212 5213 private: 5214 // Encodes the most upper class that this instruction can have. In other words 5215 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5216 // It is used to bound the type in cases like: 5217 // if (x instanceof ClassX) { 5218 // // uper_bound_ will be ClassX 5219 // } 5220 const ReferenceTypeInfo upper_bound_; 5221 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5222 // is false then can_be_null_ cannot be true). 5223 const bool upper_can_be_null_; 5224 bool can_be_null_; 5225 5226 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5227}; 5228 5229class HCheckCast : public HTemplateInstruction<2> { 5230 public: 5231 HCheckCast(HInstruction* object, 5232 HLoadClass* constant, 5233 TypeCheckKind check_kind, 5234 uint32_t dex_pc) 5235 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5236 check_kind_(check_kind), 5237 must_do_null_check_(true) { 5238 SetRawInputAt(0, object); 5239 SetRawInputAt(1, constant); 5240 } 5241 5242 bool CanBeMoved() const OVERRIDE { return true; } 5243 5244 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5245 return true; 5246 } 5247 5248 bool NeedsEnvironment() const OVERRIDE { 5249 // Instruction may throw a CheckCastError. 5250 return true; 5251 } 5252 5253 bool CanThrow() const OVERRIDE { return true; } 5254 5255 bool MustDoNullCheck() const { return must_do_null_check_; } 5256 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5257 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5258 5259 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5260 5261 DECLARE_INSTRUCTION(CheckCast); 5262 5263 private: 5264 const TypeCheckKind check_kind_; 5265 bool must_do_null_check_; 5266 5267 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5268}; 5269 5270class HMemoryBarrier : public HTemplateInstruction<0> { 5271 public: 5272 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5273 : HTemplateInstruction( 5274 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5275 barrier_kind_(barrier_kind) {} 5276 5277 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5278 5279 DECLARE_INSTRUCTION(MemoryBarrier); 5280 5281 private: 5282 const MemBarrierKind barrier_kind_; 5283 5284 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5285}; 5286 5287class HMonitorOperation : public HTemplateInstruction<1> { 5288 public: 5289 enum OperationKind { 5290 kEnter, 5291 kExit, 5292 }; 5293 5294 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5295 : HTemplateInstruction( 5296 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5297 kind_(kind) { 5298 SetRawInputAt(0, object); 5299 } 5300 5301 // Instruction may throw a Java exception, so we need an environment. 5302 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5303 5304 bool CanThrow() const OVERRIDE { 5305 // Verifier guarantees that monitor-exit cannot throw. 5306 // This is important because it allows the HGraphBuilder to remove 5307 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5308 return IsEnter(); 5309 } 5310 5311 5312 bool IsEnter() const { return kind_ == kEnter; } 5313 5314 DECLARE_INSTRUCTION(MonitorOperation); 5315 5316 private: 5317 const OperationKind kind_; 5318 5319 private: 5320 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5321}; 5322 5323/** 5324 * A HInstruction used as a marker for the replacement of new + <init> 5325 * of a String to a call to a StringFactory. Only baseline will see 5326 * the node at code generation, where it will be be treated as null. 5327 * When compiling non-baseline, `HFakeString` instructions are being removed 5328 * in the instruction simplifier. 5329 */ 5330class HFakeString : public HTemplateInstruction<0> { 5331 public: 5332 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5333 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5334 5335 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5336 5337 DECLARE_INSTRUCTION(FakeString); 5338 5339 private: 5340 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5341}; 5342 5343class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5344 public: 5345 MoveOperands(Location source, 5346 Location destination, 5347 Primitive::Type type, 5348 HInstruction* instruction) 5349 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5350 5351 Location GetSource() const { return source_; } 5352 Location GetDestination() const { return destination_; } 5353 5354 void SetSource(Location value) { source_ = value; } 5355 void SetDestination(Location value) { destination_ = value; } 5356 5357 // The parallel move resolver marks moves as "in-progress" by clearing the 5358 // destination (but not the source). 5359 Location MarkPending() { 5360 DCHECK(!IsPending()); 5361 Location dest = destination_; 5362 destination_ = Location::NoLocation(); 5363 return dest; 5364 } 5365 5366 void ClearPending(Location dest) { 5367 DCHECK(IsPending()); 5368 destination_ = dest; 5369 } 5370 5371 bool IsPending() const { 5372 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5373 return destination_.IsInvalid() && !source_.IsInvalid(); 5374 } 5375 5376 // True if this blocks a move from the given location. 5377 bool Blocks(Location loc) const { 5378 return !IsEliminated() && source_.OverlapsWith(loc); 5379 } 5380 5381 // A move is redundant if it's been eliminated, if its source and 5382 // destination are the same, or if its destination is unneeded. 5383 bool IsRedundant() const { 5384 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5385 } 5386 5387 // We clear both operands to indicate move that's been eliminated. 5388 void Eliminate() { 5389 source_ = destination_ = Location::NoLocation(); 5390 } 5391 5392 bool IsEliminated() const { 5393 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5394 return source_.IsInvalid(); 5395 } 5396 5397 Primitive::Type GetType() const { return type_; } 5398 5399 bool Is64BitMove() const { 5400 return Primitive::Is64BitType(type_); 5401 } 5402 5403 HInstruction* GetInstruction() const { return instruction_; } 5404 5405 private: 5406 Location source_; 5407 Location destination_; 5408 // The type this move is for. 5409 Primitive::Type type_; 5410 // The instruction this move is assocatied with. Null when this move is 5411 // for moving an input in the expected locations of user (including a phi user). 5412 // This is only used in debug mode, to ensure we do not connect interval siblings 5413 // in the same parallel move. 5414 HInstruction* instruction_; 5415}; 5416 5417static constexpr size_t kDefaultNumberOfMoves = 4; 5418 5419class HParallelMove : public HTemplateInstruction<0> { 5420 public: 5421 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5422 : HTemplateInstruction(SideEffects::None(), dex_pc), 5423 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5424 moves_.reserve(kDefaultNumberOfMoves); 5425 } 5426 5427 void AddMove(Location source, 5428 Location destination, 5429 Primitive::Type type, 5430 HInstruction* instruction) { 5431 DCHECK(source.IsValid()); 5432 DCHECK(destination.IsValid()); 5433 if (kIsDebugBuild) { 5434 if (instruction != nullptr) { 5435 for (const MoveOperands& move : moves_) { 5436 if (move.GetInstruction() == instruction) { 5437 // Special case the situation where the move is for the spill slot 5438 // of the instruction. 5439 if ((GetPrevious() == instruction) 5440 || ((GetPrevious() == nullptr) 5441 && instruction->IsPhi() 5442 && instruction->GetBlock() == GetBlock())) { 5443 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5444 << "Doing parallel moves for the same instruction."; 5445 } else { 5446 DCHECK(false) << "Doing parallel moves for the same instruction."; 5447 } 5448 } 5449 } 5450 } 5451 for (const MoveOperands& move : moves_) { 5452 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5453 << "Overlapped destination for two moves in a parallel move: " 5454 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5455 << source << " ==> " << destination; 5456 } 5457 } 5458 moves_.emplace_back(source, destination, type, instruction); 5459 } 5460 5461 MoveOperands* MoveOperandsAt(size_t index) { 5462 return &moves_[index]; 5463 } 5464 5465 size_t NumMoves() const { return moves_.size(); } 5466 5467 DECLARE_INSTRUCTION(ParallelMove); 5468 5469 private: 5470 ArenaVector<MoveOperands> moves_; 5471 5472 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5473}; 5474 5475} // namespace art 5476 5477#ifdef ART_ENABLE_CODEGEN_arm64 5478#include "nodes_arm64.h" 5479#endif 5480#ifdef ART_ENABLE_CODEGEN_x86 5481#include "nodes_x86.h" 5482#endif 5483 5484namespace art { 5485 5486class HGraphVisitor : public ValueObject { 5487 public: 5488 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5489 virtual ~HGraphVisitor() {} 5490 5491 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5492 virtual void VisitBasicBlock(HBasicBlock* block); 5493 5494 // Visit the graph following basic block insertion order. 5495 void VisitInsertionOrder(); 5496 5497 // Visit the graph following dominator tree reverse post-order. 5498 void VisitReversePostOrder(); 5499 5500 HGraph* GetGraph() const { return graph_; } 5501 5502 // Visit functions for instruction classes. 5503#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5504 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5505 5506 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5507 5508#undef DECLARE_VISIT_INSTRUCTION 5509 5510 private: 5511 HGraph* const graph_; 5512 5513 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5514}; 5515 5516class HGraphDelegateVisitor : public HGraphVisitor { 5517 public: 5518 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5519 virtual ~HGraphDelegateVisitor() {} 5520 5521 // Visit functions that delegate to to super class. 5522#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5523 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5524 5525 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5526 5527#undef DECLARE_VISIT_INSTRUCTION 5528 5529 private: 5530 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5531}; 5532 5533class HInsertionOrderIterator : public ValueObject { 5534 public: 5535 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5536 5537 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5538 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5539 void Advance() { ++index_; } 5540 5541 private: 5542 const HGraph& graph_; 5543 size_t index_; 5544 5545 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5546}; 5547 5548class HReversePostOrderIterator : public ValueObject { 5549 public: 5550 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5551 // Check that reverse post order of the graph has been built. 5552 DCHECK(!graph.GetReversePostOrder().empty()); 5553 } 5554 5555 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5556 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5557 void Advance() { ++index_; } 5558 5559 private: 5560 const HGraph& graph_; 5561 size_t index_; 5562 5563 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5564}; 5565 5566class HPostOrderIterator : public ValueObject { 5567 public: 5568 explicit HPostOrderIterator(const HGraph& graph) 5569 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5570 // Check that reverse post order of the graph has been built. 5571 DCHECK(!graph.GetReversePostOrder().empty()); 5572 } 5573 5574 bool Done() const { return index_ == 0; } 5575 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5576 void Advance() { --index_; } 5577 5578 private: 5579 const HGraph& graph_; 5580 size_t index_; 5581 5582 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5583}; 5584 5585class HLinearPostOrderIterator : public ValueObject { 5586 public: 5587 explicit HLinearPostOrderIterator(const HGraph& graph) 5588 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5589 5590 bool Done() const { return index_ == 0; } 5591 5592 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5593 5594 void Advance() { 5595 --index_; 5596 DCHECK_GE(index_, 0U); 5597 } 5598 5599 private: 5600 const ArenaVector<HBasicBlock*>& order_; 5601 size_t index_; 5602 5603 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5604}; 5605 5606class HLinearOrderIterator : public ValueObject { 5607 public: 5608 explicit HLinearOrderIterator(const HGraph& graph) 5609 : order_(graph.GetLinearOrder()), index_(0) {} 5610 5611 bool Done() const { return index_ == order_.size(); } 5612 HBasicBlock* Current() const { return order_[index_]; } 5613 void Advance() { ++index_; } 5614 5615 private: 5616 const ArenaVector<HBasicBlock*>& order_; 5617 size_t index_; 5618 5619 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5620}; 5621 5622// Iterator over the blocks that art part of the loop. Includes blocks part 5623// of an inner loop. The order in which the blocks are iterated is on their 5624// block id. 5625class HBlocksInLoopIterator : public ValueObject { 5626 public: 5627 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5628 : blocks_in_loop_(info.GetBlocks()), 5629 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5630 index_(0) { 5631 if (!blocks_in_loop_.IsBitSet(index_)) { 5632 Advance(); 5633 } 5634 } 5635 5636 bool Done() const { return index_ == blocks_.size(); } 5637 HBasicBlock* Current() const { return blocks_[index_]; } 5638 void Advance() { 5639 ++index_; 5640 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5641 if (blocks_in_loop_.IsBitSet(index_)) { 5642 break; 5643 } 5644 } 5645 } 5646 5647 private: 5648 const BitVector& blocks_in_loop_; 5649 const ArenaVector<HBasicBlock*>& blocks_; 5650 size_t index_; 5651 5652 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5653}; 5654 5655// Iterator over the blocks that art part of the loop. Includes blocks part 5656// of an inner loop. The order in which the blocks are iterated is reverse 5657// post order. 5658class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5659 public: 5660 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5661 : blocks_in_loop_(info.GetBlocks()), 5662 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5663 index_(0) { 5664 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5665 Advance(); 5666 } 5667 } 5668 5669 bool Done() const { return index_ == blocks_.size(); } 5670 HBasicBlock* Current() const { return blocks_[index_]; } 5671 void Advance() { 5672 ++index_; 5673 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5674 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5675 break; 5676 } 5677 } 5678 } 5679 5680 private: 5681 const BitVector& blocks_in_loop_; 5682 const ArenaVector<HBasicBlock*>& blocks_; 5683 size_t index_; 5684 5685 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5686}; 5687 5688inline int64_t Int64FromConstant(HConstant* constant) { 5689 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5690 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5691 : constant->AsLongConstant()->GetValue(); 5692} 5693 5694inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5695 // For the purposes of the compiler, the dex files must actually be the same object 5696 // if we want to safely treat them as the same. This is especially important for JIT 5697 // as custom class loaders can open the same underlying file (or memory) multiple 5698 // times and provide different class resolution but no two class loaders should ever 5699 // use the same DexFile object - doing so is an unsupported hack that can lead to 5700 // all sorts of weird failures. 5701 return &lhs == &rhs; 5702} 5703 5704} // namespace art 5705 5706#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5707