nodes.h revision e6dbf48d7a549e58a3d798bbbdc391e4d091b432
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_containers.h" 25#include "base/arena_object.h" 26#include "base/stl_util.h" 27#include "dex/compiler_enums.h" 28#include "entrypoints/quick/quick_entrypoints_enum.h" 29#include "handle.h" 30#include "handle_scope.h" 31#include "invoke_type.h" 32#include "locations.h" 33#include "method_reference.h" 34#include "mirror/class.h" 35#include "offsets.h" 36#include "primitive.h" 37#include "utils/arena_bit_vector.h" 38 39namespace art { 40 41class GraphChecker; 42class HBasicBlock; 43class HCurrentMethod; 44class HDoubleConstant; 45class HEnvironment; 46class HFakeString; 47class HFloatConstant; 48class HGraphBuilder; 49class HGraphVisitor; 50class HInstruction; 51class HIntConstant; 52class HInvoke; 53class HLongConstant; 54class HNullConstant; 55class HPhi; 56class HSuspendCheck; 57class HTryBoundary; 58class LiveInterval; 59class LocationSummary; 60class SlowPathCode; 61class SsaBuilder; 62 63namespace mirror { 64class DexCache; 65} // namespace mirror 66 67static const int kDefaultNumberOfBlocks = 8; 68static const int kDefaultNumberOfSuccessors = 2; 69static const int kDefaultNumberOfPredecessors = 2; 70static const int kDefaultNumberOfExceptionalPredecessors = 0; 71static const int kDefaultNumberOfDominatedBlocks = 1; 72static const int kDefaultNumberOfBackEdges = 1; 73 74static constexpr uint32_t kMaxIntShiftValue = 0x1f; 75static constexpr uint64_t kMaxLongShiftValue = 0x3f; 76 77static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 78 79static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 80 81static constexpr uint32_t kNoDexPc = -1; 82 83enum IfCondition { 84 // All types. 85 kCondEQ, // == 86 kCondNE, // != 87 // Signed integers and floating-point numbers. 88 kCondLT, // < 89 kCondLE, // <= 90 kCondGT, // > 91 kCondGE, // >= 92 // Unsigned integers. 93 kCondB, // < 94 kCondBE, // <= 95 kCondA, // > 96 kCondAE, // >= 97}; 98 99class HInstructionList : public ValueObject { 100 public: 101 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 102 103 void AddInstruction(HInstruction* instruction); 104 void RemoveInstruction(HInstruction* instruction); 105 106 // Insert `instruction` before/after an existing instruction `cursor`. 107 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 108 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 109 110 // Return true if this list contains `instruction`. 111 bool Contains(HInstruction* instruction) const; 112 113 // Return true if `instruction1` is found before `instruction2` in 114 // this instruction list and false otherwise. Abort if none 115 // of these instructions is found. 116 bool FoundBefore(const HInstruction* instruction1, 117 const HInstruction* instruction2) const; 118 119 bool IsEmpty() const { return first_instruction_ == nullptr; } 120 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 121 122 // Update the block of all instructions to be `block`. 123 void SetBlockOfInstructions(HBasicBlock* block) const; 124 125 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 126 void Add(const HInstructionList& instruction_list); 127 128 // Return the number of instructions in the list. This is an expensive operation. 129 size_t CountSize() const; 130 131 private: 132 HInstruction* first_instruction_; 133 HInstruction* last_instruction_; 134 135 friend class HBasicBlock; 136 friend class HGraph; 137 friend class HInstruction; 138 friend class HInstructionIterator; 139 friend class HBackwardInstructionIterator; 140 141 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 142}; 143 144// Control-flow graph of a method. Contains a list of basic blocks. 145class HGraph : public ArenaObject<kArenaAllocGraph> { 146 public: 147 HGraph(ArenaAllocator* arena, 148 const DexFile& dex_file, 149 uint32_t method_idx, 150 bool should_generate_constructor_barrier, 151 InstructionSet instruction_set, 152 InvokeType invoke_type = kInvalidInvokeType, 153 bool debuggable = false, 154 int start_instruction_id = 0) 155 : arena_(arena), 156 blocks_(arena->Adapter(kArenaAllocBlockList)), 157 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 158 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 159 entry_block_(nullptr), 160 exit_block_(nullptr), 161 maximum_number_of_out_vregs_(0), 162 number_of_vregs_(0), 163 number_of_in_vregs_(0), 164 temporaries_vreg_slots_(0), 165 has_bounds_checks_(false), 166 has_try_catch_(false), 167 debuggable_(debuggable), 168 current_instruction_id_(start_instruction_id), 169 dex_file_(dex_file), 170 method_idx_(method_idx), 171 invoke_type_(invoke_type), 172 in_ssa_form_(false), 173 should_generate_constructor_barrier_(should_generate_constructor_barrier), 174 instruction_set_(instruction_set), 175 cached_null_constant_(nullptr), 176 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 177 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 178 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 179 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 180 cached_current_method_(nullptr) { 181 blocks_.reserve(kDefaultNumberOfBlocks); 182 } 183 184 ArenaAllocator* GetArena() const { return arena_; } 185 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 186 187 bool IsInSsaForm() const { return in_ssa_form_; } 188 189 HBasicBlock* GetEntryBlock() const { return entry_block_; } 190 HBasicBlock* GetExitBlock() const { return exit_block_; } 191 bool HasExitBlock() const { return exit_block_ != nullptr; } 192 193 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 194 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 195 196 void AddBlock(HBasicBlock* block); 197 198 // Try building the SSA form of this graph, with dominance computation and loop 199 // recognition. Returns whether it was successful in doing all these steps. 200 bool TryBuildingSsa() { 201 BuildDominatorTree(); 202 // The SSA builder requires loops to all be natural. Specifically, the dead phi 203 // elimination phase checks the consistency of the graph when doing a post-order 204 // visit for eliminating dead phis: a dead phi can only have loop header phi 205 // users remaining when being visited. 206 if (!AnalyzeNaturalLoops()) return false; 207 // Precompute per-block try membership before entering the SSA builder, 208 // which needs the information to build catch block phis from values of 209 // locals at throwing instructions inside try blocks. 210 ComputeTryBlockInformation(); 211 TransformToSsa(); 212 in_ssa_form_ = true; 213 return true; 214 } 215 216 void ComputeDominanceInformation(); 217 void ClearDominanceInformation(); 218 219 void BuildDominatorTree(); 220 void TransformToSsa(); 221 void SimplifyCFG(); 222 void SimplifyCatchBlocks(); 223 224 // Analyze all natural loops in this graph. Returns false if one 225 // loop is not natural, that is the header does not dominate the 226 // back edge. 227 bool AnalyzeNaturalLoops() const; 228 229 // Iterate over blocks to compute try block membership. Needs reverse post 230 // order and loop information. 231 void ComputeTryBlockInformation(); 232 233 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 234 // Returns the instruction used to replace the invoke expression or null if the 235 // invoke is for a void method. 236 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 237 238 // Need to add a couple of blocks to test if the loop body is entered and 239 // put deoptimization instructions, etc. 240 void TransformLoopHeaderForBCE(HBasicBlock* header); 241 242 // Removes `block` from the graph. 243 void DeleteDeadBlock(HBasicBlock* block); 244 245 // Splits the edge between `block` and `successor` while preserving the 246 // indices in the predecessor/successor lists. If there are multiple edges 247 // between the blocks, the lowest indices are used. 248 // Returns the new block which is empty and has the same dex pc as `successor`. 249 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 250 251 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 252 void SimplifyLoop(HBasicBlock* header); 253 254 int32_t GetNextInstructionId() { 255 DCHECK_NE(current_instruction_id_, INT32_MAX); 256 return current_instruction_id_++; 257 } 258 259 int32_t GetCurrentInstructionId() const { 260 return current_instruction_id_; 261 } 262 263 void SetCurrentInstructionId(int32_t id) { 264 current_instruction_id_ = id; 265 } 266 267 uint16_t GetMaximumNumberOfOutVRegs() const { 268 return maximum_number_of_out_vregs_; 269 } 270 271 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 272 maximum_number_of_out_vregs_ = new_value; 273 } 274 275 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 276 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 277 } 278 279 void UpdateTemporariesVRegSlots(size_t slots) { 280 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 281 } 282 283 size_t GetTemporariesVRegSlots() const { 284 DCHECK(!in_ssa_form_); 285 return temporaries_vreg_slots_; 286 } 287 288 void SetNumberOfVRegs(uint16_t number_of_vregs) { 289 number_of_vregs_ = number_of_vregs; 290 } 291 292 uint16_t GetNumberOfVRegs() const { 293 return number_of_vregs_; 294 } 295 296 void SetNumberOfInVRegs(uint16_t value) { 297 number_of_in_vregs_ = value; 298 } 299 300 uint16_t GetNumberOfLocalVRegs() const { 301 DCHECK(!in_ssa_form_); 302 return number_of_vregs_ - number_of_in_vregs_; 303 } 304 305 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 306 return reverse_post_order_; 307 } 308 309 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 310 return linear_order_; 311 } 312 313 bool HasBoundsChecks() const { 314 return has_bounds_checks_; 315 } 316 317 void SetHasBoundsChecks(bool value) { 318 has_bounds_checks_ = value; 319 } 320 321 bool ShouldGenerateConstructorBarrier() const { 322 return should_generate_constructor_barrier_; 323 } 324 325 bool IsDebuggable() const { return debuggable_; } 326 327 // Returns a constant of the given type and value. If it does not exist 328 // already, it is created and inserted into the graph. This method is only for 329 // integral types. 330 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 331 332 // TODO: This is problematic for the consistency of reference type propagation 333 // because it can be created anytime after the pass and thus it will be left 334 // with an invalid type. 335 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 336 337 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 338 return CreateConstant(value, &cached_int_constants_, dex_pc); 339 } 340 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 341 return CreateConstant(value, &cached_long_constants_, dex_pc); 342 } 343 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 344 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 345 } 346 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 347 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 348 } 349 350 HCurrentMethod* GetCurrentMethod(); 351 352 HBasicBlock* FindCommonDominator(HBasicBlock* first, HBasicBlock* second) const; 353 354 const DexFile& GetDexFile() const { 355 return dex_file_; 356 } 357 358 uint32_t GetMethodIdx() const { 359 return method_idx_; 360 } 361 362 InvokeType GetInvokeType() const { 363 return invoke_type_; 364 } 365 366 InstructionSet GetInstructionSet() const { 367 return instruction_set_; 368 } 369 370 bool HasTryCatch() const { return has_try_catch_; } 371 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 372 373 private: 374 void FindBackEdges(ArenaBitVector* visited); 375 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 376 void RemoveDeadBlocks(const ArenaBitVector& visited); 377 378 template <class InstructionType, typename ValueType> 379 InstructionType* CreateConstant(ValueType value, 380 ArenaSafeMap<ValueType, InstructionType*>* cache, 381 uint32_t dex_pc = kNoDexPc) { 382 // Try to find an existing constant of the given value. 383 InstructionType* constant = nullptr; 384 auto cached_constant = cache->find(value); 385 if (cached_constant != cache->end()) { 386 constant = cached_constant->second; 387 } 388 389 // If not found or previously deleted, create and cache a new instruction. 390 // Don't bother reviving a previously deleted instruction, for simplicity. 391 if (constant == nullptr || constant->GetBlock() == nullptr) { 392 constant = new (arena_) InstructionType(value, dex_pc); 393 cache->Overwrite(value, constant); 394 InsertConstant(constant); 395 } 396 return constant; 397 } 398 399 void InsertConstant(HConstant* instruction); 400 401 // Cache a float constant into the graph. This method should only be 402 // called by the SsaBuilder when creating "equivalent" instructions. 403 void CacheFloatConstant(HFloatConstant* constant); 404 405 // See CacheFloatConstant comment. 406 void CacheDoubleConstant(HDoubleConstant* constant); 407 408 ArenaAllocator* const arena_; 409 410 // List of blocks in insertion order. 411 ArenaVector<HBasicBlock*> blocks_; 412 413 // List of blocks to perform a reverse post order tree traversal. 414 ArenaVector<HBasicBlock*> reverse_post_order_; 415 416 // List of blocks to perform a linear order tree traversal. 417 ArenaVector<HBasicBlock*> linear_order_; 418 419 HBasicBlock* entry_block_; 420 HBasicBlock* exit_block_; 421 422 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 423 uint16_t maximum_number_of_out_vregs_; 424 425 // The number of virtual registers in this method. Contains the parameters. 426 uint16_t number_of_vregs_; 427 428 // The number of virtual registers used by parameters of this method. 429 uint16_t number_of_in_vregs_; 430 431 // Number of vreg size slots that the temporaries use (used in baseline compiler). 432 size_t temporaries_vreg_slots_; 433 434 // Has bounds checks. We can totally skip BCE if it's false. 435 bool has_bounds_checks_; 436 437 // Flag whether there are any try/catch blocks in the graph. We will skip 438 // try/catch-related passes if false. 439 bool has_try_catch_; 440 441 // Indicates whether the graph should be compiled in a way that 442 // ensures full debuggability. If false, we can apply more 443 // aggressive optimizations that may limit the level of debugging. 444 const bool debuggable_; 445 446 // The current id to assign to a newly added instruction. See HInstruction.id_. 447 int32_t current_instruction_id_; 448 449 // The dex file from which the method is from. 450 const DexFile& dex_file_; 451 452 // The method index in the dex file. 453 const uint32_t method_idx_; 454 455 // If inlined, this encodes how the callee is being invoked. 456 const InvokeType invoke_type_; 457 458 // Whether the graph has been transformed to SSA form. Only used 459 // in debug mode to ensure we are not using properties only valid 460 // for non-SSA form (like the number of temporaries). 461 bool in_ssa_form_; 462 463 const bool should_generate_constructor_barrier_; 464 465 const InstructionSet instruction_set_; 466 467 // Cached constants. 468 HNullConstant* cached_null_constant_; 469 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 470 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 471 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 472 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 473 474 HCurrentMethod* cached_current_method_; 475 476 friend class SsaBuilder; // For caching constants. 477 friend class SsaLivenessAnalysis; // For the linear order. 478 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 479 DISALLOW_COPY_AND_ASSIGN(HGraph); 480}; 481 482class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 483 public: 484 HLoopInformation(HBasicBlock* header, HGraph* graph) 485 : header_(header), 486 suspend_check_(nullptr), 487 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 488 // Make bit vector growable, as the number of blocks may change. 489 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 490 back_edges_.reserve(kDefaultNumberOfBackEdges); 491 } 492 493 HBasicBlock* GetHeader() const { 494 return header_; 495 } 496 497 void SetHeader(HBasicBlock* block) { 498 header_ = block; 499 } 500 501 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 502 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 503 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 504 505 void AddBackEdge(HBasicBlock* back_edge) { 506 back_edges_.push_back(back_edge); 507 } 508 509 void RemoveBackEdge(HBasicBlock* back_edge) { 510 RemoveElement(back_edges_, back_edge); 511 } 512 513 bool IsBackEdge(const HBasicBlock& block) const { 514 return ContainsElement(back_edges_, &block); 515 } 516 517 size_t NumberOfBackEdges() const { 518 return back_edges_.size(); 519 } 520 521 HBasicBlock* GetPreHeader() const; 522 523 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 524 return back_edges_; 525 } 526 527 // Returns the lifetime position of the back edge that has the 528 // greatest lifetime position. 529 size_t GetLifetimeEnd() const; 530 531 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 532 ReplaceElement(back_edges_, existing, new_back_edge); 533 } 534 535 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 536 // that is the header dominates the back edge. 537 bool Populate(); 538 539 // Reanalyzes the loop by removing loop info from its blocks and re-running 540 // Populate(). If there are no back edges left, the loop info is completely 541 // removed as well as its SuspendCheck instruction. It must be run on nested 542 // inner loops first. 543 void Update(); 544 545 // Returns whether this loop information contains `block`. 546 // Note that this loop information *must* be populated before entering this function. 547 bool Contains(const HBasicBlock& block) const; 548 549 // Returns whether this loop information is an inner loop of `other`. 550 // Note that `other` *must* be populated before entering this function. 551 bool IsIn(const HLoopInformation& other) const; 552 553 const ArenaBitVector& GetBlocks() const { return blocks_; } 554 555 void Add(HBasicBlock* block); 556 void Remove(HBasicBlock* block); 557 558 private: 559 // Internal recursive implementation of `Populate`. 560 void PopulateRecursive(HBasicBlock* block); 561 562 HBasicBlock* header_; 563 HSuspendCheck* suspend_check_; 564 ArenaVector<HBasicBlock*> back_edges_; 565 ArenaBitVector blocks_; 566 567 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 568}; 569 570// Stores try/catch information for basic blocks. 571// Note that HGraph is constructed so that catch blocks cannot simultaneously 572// be try blocks. 573class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 574 public: 575 // Try block information constructor. 576 explicit TryCatchInformation(const HTryBoundary& try_entry) 577 : try_entry_(&try_entry), 578 catch_dex_file_(nullptr), 579 catch_type_index_(DexFile::kDexNoIndex16) { 580 DCHECK(try_entry_ != nullptr); 581 } 582 583 // Catch block information constructor. 584 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 585 : try_entry_(nullptr), 586 catch_dex_file_(&dex_file), 587 catch_type_index_(catch_type_index) {} 588 589 bool IsTryBlock() const { return try_entry_ != nullptr; } 590 591 const HTryBoundary& GetTryEntry() const { 592 DCHECK(IsTryBlock()); 593 return *try_entry_; 594 } 595 596 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 597 598 bool IsCatchAllTypeIndex() const { 599 DCHECK(IsCatchBlock()); 600 return catch_type_index_ == DexFile::kDexNoIndex16; 601 } 602 603 uint16_t GetCatchTypeIndex() const { 604 DCHECK(IsCatchBlock()); 605 return catch_type_index_; 606 } 607 608 const DexFile& GetCatchDexFile() const { 609 DCHECK(IsCatchBlock()); 610 return *catch_dex_file_; 611 } 612 613 private: 614 // One of possibly several TryBoundary instructions entering the block's try. 615 // Only set for try blocks. 616 const HTryBoundary* try_entry_; 617 618 // Exception type information. Only set for catch blocks. 619 const DexFile* catch_dex_file_; 620 const uint16_t catch_type_index_; 621}; 622 623static constexpr size_t kNoLifetime = -1; 624static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 625 626// A block in a method. Contains the list of instructions represented 627// as a double linked list. Each block knows its predecessors and 628// successors. 629 630class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 631 public: 632 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 633 : graph_(graph), 634 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 635 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 636 loop_information_(nullptr), 637 dominator_(nullptr), 638 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 639 block_id_(kInvalidBlockId), 640 dex_pc_(dex_pc), 641 lifetime_start_(kNoLifetime), 642 lifetime_end_(kNoLifetime), 643 try_catch_information_(nullptr) { 644 predecessors_.reserve(kDefaultNumberOfPredecessors); 645 successors_.reserve(kDefaultNumberOfSuccessors); 646 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 647 } 648 649 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 650 return predecessors_; 651 } 652 653 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 654 return successors_; 655 } 656 657 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 658 return ContainsElement(successors_, block, start_from); 659 } 660 661 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 662 return dominated_blocks_; 663 } 664 665 bool IsEntryBlock() const { 666 return graph_->GetEntryBlock() == this; 667 } 668 669 bool IsExitBlock() const { 670 return graph_->GetExitBlock() == this; 671 } 672 673 bool IsSingleGoto() const; 674 bool IsSingleTryBoundary() const; 675 676 // Returns true if this block emits nothing but a jump. 677 bool IsSingleJump() const { 678 HLoopInformation* loop_info = GetLoopInformation(); 679 return (IsSingleGoto() || IsSingleTryBoundary()) 680 // Back edges generate a suspend check. 681 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 682 } 683 684 void AddBackEdge(HBasicBlock* back_edge) { 685 if (loop_information_ == nullptr) { 686 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 687 } 688 DCHECK_EQ(loop_information_->GetHeader(), this); 689 loop_information_->AddBackEdge(back_edge); 690 } 691 692 HGraph* GetGraph() const { return graph_; } 693 void SetGraph(HGraph* graph) { graph_ = graph; } 694 695 uint32_t GetBlockId() const { return block_id_; } 696 void SetBlockId(int id) { block_id_ = id; } 697 uint32_t GetDexPc() const { return dex_pc_; } 698 699 HBasicBlock* GetDominator() const { return dominator_; } 700 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 701 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 702 703 void RemoveDominatedBlock(HBasicBlock* block) { 704 RemoveElement(dominated_blocks_, block); 705 } 706 707 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 708 ReplaceElement(dominated_blocks_, existing, new_block); 709 } 710 711 void ClearDominanceInformation(); 712 713 int NumberOfBackEdges() const { 714 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 715 } 716 717 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 718 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 719 const HInstructionList& GetInstructions() const { return instructions_; } 720 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 721 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 722 const HInstructionList& GetPhis() const { return phis_; } 723 724 void AddSuccessor(HBasicBlock* block) { 725 successors_.push_back(block); 726 block->predecessors_.push_back(this); 727 } 728 729 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 730 size_t successor_index = GetSuccessorIndexOf(existing); 731 existing->RemovePredecessor(this); 732 new_block->predecessors_.push_back(this); 733 successors_[successor_index] = new_block; 734 } 735 736 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 737 size_t predecessor_index = GetPredecessorIndexOf(existing); 738 existing->RemoveSuccessor(this); 739 new_block->successors_.push_back(this); 740 predecessors_[predecessor_index] = new_block; 741 } 742 743 // Insert `this` between `predecessor` and `successor. This method 744 // preserves the indicies, and will update the first edge found between 745 // `predecessor` and `successor`. 746 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 747 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 748 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 749 successor->predecessors_[predecessor_index] = this; 750 predecessor->successors_[successor_index] = this; 751 successors_.push_back(successor); 752 predecessors_.push_back(predecessor); 753 } 754 755 void RemovePredecessor(HBasicBlock* block) { 756 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 757 } 758 759 void RemoveSuccessor(HBasicBlock* block) { 760 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 761 } 762 763 void ClearAllPredecessors() { 764 predecessors_.clear(); 765 } 766 767 void AddPredecessor(HBasicBlock* block) { 768 predecessors_.push_back(block); 769 block->successors_.push_back(this); 770 } 771 772 void SwapPredecessors() { 773 DCHECK_EQ(predecessors_.size(), 2u); 774 std::swap(predecessors_[0], predecessors_[1]); 775 } 776 777 void SwapSuccessors() { 778 DCHECK_EQ(successors_.size(), 2u); 779 std::swap(successors_[0], successors_[1]); 780 } 781 782 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 783 return IndexOfElement(predecessors_, predecessor); 784 } 785 786 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 787 return IndexOfElement(successors_, successor); 788 } 789 790 HBasicBlock* GetSinglePredecessor() const { 791 DCHECK_EQ(GetPredecessors().size(), 1u); 792 return GetPredecessors()[0]; 793 } 794 795 HBasicBlock* GetSingleSuccessor() const { 796 DCHECK_EQ(GetSuccessors().size(), 1u); 797 return GetSuccessors()[0]; 798 } 799 800 // Returns whether the first occurrence of `predecessor` in the list of 801 // predecessors is at index `idx`. 802 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 803 DCHECK_EQ(GetPredecessors()[idx], predecessor); 804 return GetPredecessorIndexOf(predecessor) == idx; 805 } 806 807 // Returns the number of non-exceptional successors. SsaChecker ensures that 808 // these are stored at the beginning of the successor list. 809 size_t NumberOfNormalSuccessors() const { 810 return EndsWithTryBoundary() ? 1 : GetSuccessors().size(); 811 } 812 813 // Create a new block between this block and its predecessors. The new block 814 // is added to the graph, all predecessor edges are relinked to it and an edge 815 // is created to `this`. Returns the new empty block. Reverse post order or 816 // loop and try/catch information are not updated. 817 HBasicBlock* CreateImmediateDominator(); 818 819 // Split the block into two blocks just before `cursor`. Returns the newly 820 // created, latter block. Note that this method will add the block to the 821 // graph, create a Goto at the end of the former block and will create an edge 822 // between the blocks. It will not, however, update the reverse post order or 823 // loop and try/catch information. 824 HBasicBlock* SplitBefore(HInstruction* cursor); 825 826 // Split the block into two blocks just after `cursor`. Returns the newly 827 // created block. Note that this method just updates raw block information, 828 // like predecessors, successors, dominators, and instruction list. It does not 829 // update the graph, reverse post order, loop information, nor make sure the 830 // blocks are consistent (for example ending with a control flow instruction). 831 HBasicBlock* SplitAfter(HInstruction* cursor); 832 833 // Merge `other` at the end of `this`. Successors and dominated blocks of 834 // `other` are changed to be successors and dominated blocks of `this`. Note 835 // that this method does not update the graph, reverse post order, loop 836 // information, nor make sure the blocks are consistent (for example ending 837 // with a control flow instruction). 838 void MergeWithInlined(HBasicBlock* other); 839 840 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 841 // of `this` are moved to `other`. 842 // Note that this method does not update the graph, reverse post order, loop 843 // information, nor make sure the blocks are consistent (for example ending 844 // with a control flow instruction). 845 void ReplaceWith(HBasicBlock* other); 846 847 // Merge `other` at the end of `this`. This method updates loops, reverse post 848 // order, links to predecessors, successors, dominators and deletes the block 849 // from the graph. The two blocks must be successive, i.e. `this` the only 850 // predecessor of `other` and vice versa. 851 void MergeWith(HBasicBlock* other); 852 853 // Disconnects `this` from all its predecessors, successors and dominator, 854 // removes it from all loops it is included in and eventually from the graph. 855 // The block must not dominate any other block. Predecessors and successors 856 // are safely updated. 857 void DisconnectAndDelete(); 858 859 void AddInstruction(HInstruction* instruction); 860 // Insert `instruction` before/after an existing instruction `cursor`. 861 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 862 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 863 // Replace instruction `initial` with `replacement` within this block. 864 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 865 HInstruction* replacement); 866 void AddPhi(HPhi* phi); 867 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 868 // RemoveInstruction and RemovePhi delete a given instruction from the respective 869 // instruction list. With 'ensure_safety' set to true, it verifies that the 870 // instruction is not in use and removes it from the use lists of its inputs. 871 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 872 void RemovePhi(HPhi* phi, bool ensure_safety = true); 873 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 874 875 bool IsLoopHeader() const { 876 return IsInLoop() && (loop_information_->GetHeader() == this); 877 } 878 879 bool IsLoopPreHeaderFirstPredecessor() const { 880 DCHECK(IsLoopHeader()); 881 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 882 } 883 884 HLoopInformation* GetLoopInformation() const { 885 return loop_information_; 886 } 887 888 // Set the loop_information_ on this block. Overrides the current 889 // loop_information if it is an outer loop of the passed loop information. 890 // Note that this method is called while creating the loop information. 891 void SetInLoop(HLoopInformation* info) { 892 if (IsLoopHeader()) { 893 // Nothing to do. This just means `info` is an outer loop. 894 } else if (!IsInLoop()) { 895 loop_information_ = info; 896 } else if (loop_information_->Contains(*info->GetHeader())) { 897 // Block is currently part of an outer loop. Make it part of this inner loop. 898 // Note that a non loop header having a loop information means this loop information 899 // has already been populated 900 loop_information_ = info; 901 } else { 902 // Block is part of an inner loop. Do not update the loop information. 903 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 904 // at this point, because this method is being called while populating `info`. 905 } 906 } 907 908 // Raw update of the loop information. 909 void SetLoopInformation(HLoopInformation* info) { 910 loop_information_ = info; 911 } 912 913 bool IsInLoop() const { return loop_information_ != nullptr; } 914 915 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 916 917 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 918 try_catch_information_ = try_catch_information; 919 } 920 921 bool IsTryBlock() const { 922 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 923 } 924 925 bool IsCatchBlock() const { 926 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 927 } 928 929 // Returns the try entry that this block's successors should have. They will 930 // be in the same try, unless the block ends in a try boundary. In that case, 931 // the appropriate try entry will be returned. 932 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 933 934 bool HasThrowingInstructions() const; 935 936 // Returns whether this block dominates the blocked passed as parameter. 937 bool Dominates(HBasicBlock* block) const; 938 939 size_t GetLifetimeStart() const { return lifetime_start_; } 940 size_t GetLifetimeEnd() const { return lifetime_end_; } 941 942 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 943 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 944 945 bool EndsWithControlFlowInstruction() const; 946 bool EndsWithIf() const; 947 bool EndsWithTryBoundary() const; 948 bool HasSinglePhi() const; 949 950 private: 951 HGraph* graph_; 952 ArenaVector<HBasicBlock*> predecessors_; 953 ArenaVector<HBasicBlock*> successors_; 954 HInstructionList instructions_; 955 HInstructionList phis_; 956 HLoopInformation* loop_information_; 957 HBasicBlock* dominator_; 958 ArenaVector<HBasicBlock*> dominated_blocks_; 959 uint32_t block_id_; 960 // The dex program counter of the first instruction of this block. 961 const uint32_t dex_pc_; 962 size_t lifetime_start_; 963 size_t lifetime_end_; 964 TryCatchInformation* try_catch_information_; 965 966 friend class HGraph; 967 friend class HInstruction; 968 969 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 970}; 971 972// Iterates over the LoopInformation of all loops which contain 'block' 973// from the innermost to the outermost. 974class HLoopInformationOutwardIterator : public ValueObject { 975 public: 976 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 977 : current_(block.GetLoopInformation()) {} 978 979 bool Done() const { return current_ == nullptr; } 980 981 void Advance() { 982 DCHECK(!Done()); 983 current_ = current_->GetPreHeader()->GetLoopInformation(); 984 } 985 986 HLoopInformation* Current() const { 987 DCHECK(!Done()); 988 return current_; 989 } 990 991 private: 992 HLoopInformation* current_; 993 994 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 995}; 996 997#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 998 M(Above, Condition) \ 999 M(AboveOrEqual, Condition) \ 1000 M(Add, BinaryOperation) \ 1001 M(And, BinaryOperation) \ 1002 M(ArrayGet, Instruction) \ 1003 M(ArrayLength, Instruction) \ 1004 M(ArraySet, Instruction) \ 1005 M(Below, Condition) \ 1006 M(BelowOrEqual, Condition) \ 1007 M(BooleanNot, UnaryOperation) \ 1008 M(BoundsCheck, Instruction) \ 1009 M(BoundType, Instruction) \ 1010 M(CheckCast, Instruction) \ 1011 M(ClearException, Instruction) \ 1012 M(ClinitCheck, Instruction) \ 1013 M(Compare, BinaryOperation) \ 1014 M(Condition, BinaryOperation) \ 1015 M(CurrentMethod, Instruction) \ 1016 M(Deoptimize, Instruction) \ 1017 M(Div, BinaryOperation) \ 1018 M(DivZeroCheck, Instruction) \ 1019 M(DoubleConstant, Constant) \ 1020 M(Equal, Condition) \ 1021 M(Exit, Instruction) \ 1022 M(FakeString, Instruction) \ 1023 M(FloatConstant, Constant) \ 1024 M(Goto, Instruction) \ 1025 M(GreaterThan, Condition) \ 1026 M(GreaterThanOrEqual, Condition) \ 1027 M(If, Instruction) \ 1028 M(InstanceFieldGet, Instruction) \ 1029 M(InstanceFieldSet, Instruction) \ 1030 M(InstanceOf, Instruction) \ 1031 M(IntConstant, Constant) \ 1032 M(InvokeUnresolved, Invoke) \ 1033 M(InvokeInterface, Invoke) \ 1034 M(InvokeStaticOrDirect, Invoke) \ 1035 M(InvokeVirtual, Invoke) \ 1036 M(LessThan, Condition) \ 1037 M(LessThanOrEqual, Condition) \ 1038 M(LoadClass, Instruction) \ 1039 M(LoadException, Instruction) \ 1040 M(LoadLocal, Instruction) \ 1041 M(LoadString, Instruction) \ 1042 M(Local, Instruction) \ 1043 M(LongConstant, Constant) \ 1044 M(MemoryBarrier, Instruction) \ 1045 M(MonitorOperation, Instruction) \ 1046 M(Mul, BinaryOperation) \ 1047 M(Neg, UnaryOperation) \ 1048 M(NewArray, Instruction) \ 1049 M(NewInstance, Instruction) \ 1050 M(Not, UnaryOperation) \ 1051 M(NotEqual, Condition) \ 1052 M(NullConstant, Instruction) \ 1053 M(NullCheck, Instruction) \ 1054 M(Or, BinaryOperation) \ 1055 M(PackedSwitch, Instruction) \ 1056 M(ParallelMove, Instruction) \ 1057 M(ParameterValue, Instruction) \ 1058 M(Phi, Instruction) \ 1059 M(Rem, BinaryOperation) \ 1060 M(Return, Instruction) \ 1061 M(ReturnVoid, Instruction) \ 1062 M(Shl, BinaryOperation) \ 1063 M(Shr, BinaryOperation) \ 1064 M(StaticFieldGet, Instruction) \ 1065 M(StaticFieldSet, Instruction) \ 1066 M(UnresolvedInstanceFieldGet, Instruction) \ 1067 M(UnresolvedInstanceFieldSet, Instruction) \ 1068 M(UnresolvedStaticFieldGet, Instruction) \ 1069 M(UnresolvedStaticFieldSet, Instruction) \ 1070 M(StoreLocal, Instruction) \ 1071 M(Sub, BinaryOperation) \ 1072 M(SuspendCheck, Instruction) \ 1073 M(Temporary, Instruction) \ 1074 M(Throw, Instruction) \ 1075 M(TryBoundary, Instruction) \ 1076 M(TypeConversion, Instruction) \ 1077 M(UShr, BinaryOperation) \ 1078 M(Xor, BinaryOperation) \ 1079 1080#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1081 1082#ifndef ART_ENABLE_CODEGEN_arm64 1083#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1084#else 1085#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1086 M(Arm64IntermediateAddress, Instruction) 1087#endif 1088 1089#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1090 1091#ifndef ART_ENABLE_CODEGEN_x86 1092#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1093#else 1094#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1095 M(X86ComputeBaseMethodAddress, Instruction) \ 1096 M(X86LoadFromConstantTable, Instruction) \ 1097 M(X86PackedSwitch, Instruction) 1098#endif 1099 1100#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1101 1102#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1103 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1104 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1105 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1106 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1107 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1108 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1109 1110#define FOR_EACH_INSTRUCTION(M) \ 1111 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1112 M(Constant, Instruction) \ 1113 M(UnaryOperation, Instruction) \ 1114 M(BinaryOperation, Instruction) \ 1115 M(Invoke, Instruction) 1116 1117#define FORWARD_DECLARATION(type, super) class H##type; 1118FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1119#undef FORWARD_DECLARATION 1120 1121#define DECLARE_INSTRUCTION(type) \ 1122 InstructionKind GetKind() const OVERRIDE { return k##type; } \ 1123 const char* DebugName() const OVERRIDE { return #type; } \ 1124 const H##type* As##type() const OVERRIDE { return this; } \ 1125 H##type* As##type() OVERRIDE { return this; } \ 1126 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1127 return other->Is##type(); \ 1128 } \ 1129 void Accept(HGraphVisitor* visitor) OVERRIDE 1130 1131template <typename T> class HUseList; 1132 1133template <typename T> 1134class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1135 public: 1136 HUseListNode* GetPrevious() const { return prev_; } 1137 HUseListNode* GetNext() const { return next_; } 1138 T GetUser() const { return user_; } 1139 size_t GetIndex() const { return index_; } 1140 void SetIndex(size_t index) { index_ = index; } 1141 1142 private: 1143 HUseListNode(T user, size_t index) 1144 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1145 1146 T const user_; 1147 size_t index_; 1148 HUseListNode<T>* prev_; 1149 HUseListNode<T>* next_; 1150 1151 friend class HUseList<T>; 1152 1153 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1154}; 1155 1156template <typename T> 1157class HUseList : public ValueObject { 1158 public: 1159 HUseList() : first_(nullptr) {} 1160 1161 void Clear() { 1162 first_ = nullptr; 1163 } 1164 1165 // Adds a new entry at the beginning of the use list and returns 1166 // the newly created node. 1167 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1168 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1169 if (IsEmpty()) { 1170 first_ = new_node; 1171 } else { 1172 first_->prev_ = new_node; 1173 new_node->next_ = first_; 1174 first_ = new_node; 1175 } 1176 return new_node; 1177 } 1178 1179 HUseListNode<T>* GetFirst() const { 1180 return first_; 1181 } 1182 1183 void Remove(HUseListNode<T>* node) { 1184 DCHECK(node != nullptr); 1185 DCHECK(Contains(node)); 1186 1187 if (node->prev_ != nullptr) { 1188 node->prev_->next_ = node->next_; 1189 } 1190 if (node->next_ != nullptr) { 1191 node->next_->prev_ = node->prev_; 1192 } 1193 if (node == first_) { 1194 first_ = node->next_; 1195 } 1196 } 1197 1198 bool Contains(const HUseListNode<T>* node) const { 1199 if (node == nullptr) { 1200 return false; 1201 } 1202 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1203 if (current == node) { 1204 return true; 1205 } 1206 } 1207 return false; 1208 } 1209 1210 bool IsEmpty() const { 1211 return first_ == nullptr; 1212 } 1213 1214 bool HasOnlyOneUse() const { 1215 return first_ != nullptr && first_->next_ == nullptr; 1216 } 1217 1218 size_t SizeSlow() const { 1219 size_t count = 0; 1220 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1221 ++count; 1222 } 1223 return count; 1224 } 1225 1226 private: 1227 HUseListNode<T>* first_; 1228}; 1229 1230template<typename T> 1231class HUseIterator : public ValueObject { 1232 public: 1233 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1234 1235 bool Done() const { return current_ == nullptr; } 1236 1237 void Advance() { 1238 DCHECK(!Done()); 1239 current_ = current_->GetNext(); 1240 } 1241 1242 HUseListNode<T>* Current() const { 1243 DCHECK(!Done()); 1244 return current_; 1245 } 1246 1247 private: 1248 HUseListNode<T>* current_; 1249 1250 friend class HValue; 1251}; 1252 1253// This class is used by HEnvironment and HInstruction classes to record the 1254// instructions they use and pointers to the corresponding HUseListNodes kept 1255// by the used instructions. 1256template <typename T> 1257class HUserRecord : public ValueObject { 1258 public: 1259 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1260 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1261 1262 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1263 : instruction_(old_record.instruction_), use_node_(use_node) { 1264 DCHECK(instruction_ != nullptr); 1265 DCHECK(use_node_ != nullptr); 1266 DCHECK(old_record.use_node_ == nullptr); 1267 } 1268 1269 HInstruction* GetInstruction() const { return instruction_; } 1270 HUseListNode<T>* GetUseNode() const { return use_node_; } 1271 1272 private: 1273 // Instruction used by the user. 1274 HInstruction* instruction_; 1275 1276 // Corresponding entry in the use list kept by 'instruction_'. 1277 HUseListNode<T>* use_node_; 1278}; 1279 1280/** 1281 * Side-effects representation. 1282 * 1283 * For write/read dependences on fields/arrays, the dependence analysis uses 1284 * type disambiguation (e.g. a float field write cannot modify the value of an 1285 * integer field read) and the access type (e.g. a reference array write cannot 1286 * modify the value of a reference field read [although it may modify the 1287 * reference fetch prior to reading the field, which is represented by its own 1288 * write/read dependence]). The analysis makes conservative points-to 1289 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1290 * the same, and any reference read depends on any reference read without 1291 * further regard of its type). 1292 * 1293 * The internal representation uses 38-bit and is described in the table below. 1294 * The first line indicates the side effect, and for field/array accesses the 1295 * second line indicates the type of the access (in the order of the 1296 * Primitive::Type enum). 1297 * The two numbered lines below indicate the bit position in the bitfield (read 1298 * vertically). 1299 * 1300 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1301 * +-------------+---------+---------+--------------+---------+---------+ 1302 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1303 * | 3 |333333322|222222221| 1 |111111110|000000000| 1304 * | 7 |654321098|765432109| 8 |765432109|876543210| 1305 * 1306 * Note that, to ease the implementation, 'changes' bits are least significant 1307 * bits, while 'dependency' bits are most significant bits. 1308 */ 1309class SideEffects : public ValueObject { 1310 public: 1311 SideEffects() : flags_(0) {} 1312 1313 static SideEffects None() { 1314 return SideEffects(0); 1315 } 1316 1317 static SideEffects All() { 1318 return SideEffects(kAllChangeBits | kAllDependOnBits); 1319 } 1320 1321 static SideEffects AllChanges() { 1322 return SideEffects(kAllChangeBits); 1323 } 1324 1325 static SideEffects AllDependencies() { 1326 return SideEffects(kAllDependOnBits); 1327 } 1328 1329 static SideEffects AllExceptGCDependency() { 1330 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1331 } 1332 1333 static SideEffects AllWritesAndReads() { 1334 return SideEffects(kAllWrites | kAllReads); 1335 } 1336 1337 static SideEffects AllWrites() { 1338 return SideEffects(kAllWrites); 1339 } 1340 1341 static SideEffects AllReads() { 1342 return SideEffects(kAllReads); 1343 } 1344 1345 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1346 return is_volatile 1347 ? AllWritesAndReads() 1348 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1349 } 1350 1351 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1352 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1353 } 1354 1355 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1356 return is_volatile 1357 ? AllWritesAndReads() 1358 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1359 } 1360 1361 static SideEffects ArrayReadOfType(Primitive::Type type) { 1362 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1363 } 1364 1365 static SideEffects CanTriggerGC() { 1366 return SideEffects(1ULL << kCanTriggerGCBit); 1367 } 1368 1369 static SideEffects DependsOnGC() { 1370 return SideEffects(1ULL << kDependsOnGCBit); 1371 } 1372 1373 // Combines the side-effects of this and the other. 1374 SideEffects Union(SideEffects other) const { 1375 return SideEffects(flags_ | other.flags_); 1376 } 1377 1378 SideEffects Exclusion(SideEffects other) const { 1379 return SideEffects(flags_ & ~other.flags_); 1380 } 1381 1382 void Add(SideEffects other) { 1383 flags_ |= other.flags_; 1384 } 1385 1386 bool Includes(SideEffects other) const { 1387 return (other.flags_ & flags_) == other.flags_; 1388 } 1389 1390 bool HasSideEffects() const { 1391 return (flags_ & kAllChangeBits); 1392 } 1393 1394 bool HasDependencies() const { 1395 return (flags_ & kAllDependOnBits); 1396 } 1397 1398 // Returns true if there are no side effects or dependencies. 1399 bool DoesNothing() const { 1400 return flags_ == 0; 1401 } 1402 1403 // Returns true if something is written. 1404 bool DoesAnyWrite() const { 1405 return (flags_ & kAllWrites); 1406 } 1407 1408 // Returns true if something is read. 1409 bool DoesAnyRead() const { 1410 return (flags_ & kAllReads); 1411 } 1412 1413 // Returns true if potentially everything is written and read 1414 // (every type and every kind of access). 1415 bool DoesAllReadWrite() const { 1416 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1417 } 1418 1419 bool DoesAll() const { 1420 return flags_ == (kAllChangeBits | kAllDependOnBits); 1421 } 1422 1423 // Returns true if this may read something written by other. 1424 bool MayDependOn(SideEffects other) const { 1425 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1426 return (other.flags_ & depends_on_flags); 1427 } 1428 1429 // Returns string representation of flags (for debugging only). 1430 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1431 std::string ToString() const { 1432 std::string flags = "|"; 1433 for (int s = kLastBit; s >= 0; s--) { 1434 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1435 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1436 // This is a bit for the GC side effect. 1437 if (current_bit_is_set) { 1438 flags += "GC"; 1439 } 1440 flags += "|"; 1441 } else { 1442 // This is a bit for the array/field analysis. 1443 // The underscore character stands for the 'can trigger GC' bit. 1444 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1445 if (current_bit_is_set) { 1446 flags += kDebug[s]; 1447 } 1448 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1449 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1450 flags += "|"; 1451 } 1452 } 1453 } 1454 return flags; 1455 } 1456 1457 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1458 1459 private: 1460 static constexpr int kFieldArrayAnalysisBits = 9; 1461 1462 static constexpr int kFieldWriteOffset = 0; 1463 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1464 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1465 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1466 1467 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1468 1469 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1470 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1471 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1472 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1473 1474 static constexpr int kLastBit = kDependsOnGCBit; 1475 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1476 1477 // Aliases. 1478 1479 static_assert(kChangeBits == kDependOnBits, 1480 "the 'change' bits should match the 'depend on' bits."); 1481 1482 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1483 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1484 static constexpr uint64_t kAllWrites = 1485 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1486 static constexpr uint64_t kAllReads = 1487 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1488 1489 // Work around the fact that HIR aliases I/F and J/D. 1490 // TODO: remove this interceptor once HIR types are clean 1491 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1492 switch (type) { 1493 case Primitive::kPrimInt: 1494 case Primitive::kPrimFloat: 1495 return TypeFlag(Primitive::kPrimInt, offset) | 1496 TypeFlag(Primitive::kPrimFloat, offset); 1497 case Primitive::kPrimLong: 1498 case Primitive::kPrimDouble: 1499 return TypeFlag(Primitive::kPrimLong, offset) | 1500 TypeFlag(Primitive::kPrimDouble, offset); 1501 default: 1502 return TypeFlag(type, offset); 1503 } 1504 } 1505 1506 // Translates type to bit flag. 1507 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1508 CHECK_NE(type, Primitive::kPrimVoid); 1509 const uint64_t one = 1; 1510 const int shift = type; // 0-based consecutive enum 1511 DCHECK_LE(kFieldWriteOffset, shift); 1512 DCHECK_LT(shift, kArrayWriteOffset); 1513 return one << (type + offset); 1514 } 1515 1516 // Private constructor on direct flags value. 1517 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1518 1519 uint64_t flags_; 1520}; 1521 1522// A HEnvironment object contains the values of virtual registers at a given location. 1523class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1524 public: 1525 HEnvironment(ArenaAllocator* arena, 1526 size_t number_of_vregs, 1527 const DexFile& dex_file, 1528 uint32_t method_idx, 1529 uint32_t dex_pc, 1530 InvokeType invoke_type, 1531 HInstruction* holder) 1532 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1533 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1534 parent_(nullptr), 1535 dex_file_(dex_file), 1536 method_idx_(method_idx), 1537 dex_pc_(dex_pc), 1538 invoke_type_(invoke_type), 1539 holder_(holder) { 1540 } 1541 1542 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1543 : HEnvironment(arena, 1544 to_copy.Size(), 1545 to_copy.GetDexFile(), 1546 to_copy.GetMethodIdx(), 1547 to_copy.GetDexPc(), 1548 to_copy.GetInvokeType(), 1549 holder) {} 1550 1551 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1552 if (parent_ != nullptr) { 1553 parent_->SetAndCopyParentChain(allocator, parent); 1554 } else { 1555 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1556 parent_->CopyFrom(parent); 1557 if (parent->GetParent() != nullptr) { 1558 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1559 } 1560 } 1561 } 1562 1563 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1564 void CopyFrom(HEnvironment* environment); 1565 1566 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1567 // input to the loop phi instead. This is for inserting instructions that 1568 // require an environment (like HDeoptimization) in the loop pre-header. 1569 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1570 1571 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1572 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1573 } 1574 1575 HInstruction* GetInstructionAt(size_t index) const { 1576 return vregs_[index].GetInstruction(); 1577 } 1578 1579 void RemoveAsUserOfInput(size_t index) const; 1580 1581 size_t Size() const { return vregs_.size(); } 1582 1583 HEnvironment* GetParent() const { return parent_; } 1584 1585 void SetLocationAt(size_t index, Location location) { 1586 locations_[index] = location; 1587 } 1588 1589 Location GetLocationAt(size_t index) const { 1590 return locations_[index]; 1591 } 1592 1593 uint32_t GetDexPc() const { 1594 return dex_pc_; 1595 } 1596 1597 uint32_t GetMethodIdx() const { 1598 return method_idx_; 1599 } 1600 1601 InvokeType GetInvokeType() const { 1602 return invoke_type_; 1603 } 1604 1605 const DexFile& GetDexFile() const { 1606 return dex_file_; 1607 } 1608 1609 HInstruction* GetHolder() const { 1610 return holder_; 1611 } 1612 1613 private: 1614 // Record instructions' use entries of this environment for constant-time removal. 1615 // It should only be called by HInstruction when a new environment use is added. 1616 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1617 DCHECK(env_use->GetUser() == this); 1618 size_t index = env_use->GetIndex(); 1619 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1620 } 1621 1622 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1623 ArenaVector<Location> locations_; 1624 HEnvironment* parent_; 1625 const DexFile& dex_file_; 1626 const uint32_t method_idx_; 1627 const uint32_t dex_pc_; 1628 const InvokeType invoke_type_; 1629 1630 // The instruction that holds this environment. 1631 HInstruction* const holder_; 1632 1633 friend class HInstruction; 1634 1635 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1636}; 1637 1638class ReferenceTypeInfo : ValueObject { 1639 public: 1640 typedef Handle<mirror::Class> TypeHandle; 1641 1642 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1643 // The constructor will check that the type_handle is valid. 1644 return ReferenceTypeInfo(type_handle, is_exact); 1645 } 1646 1647 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1648 1649 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1650 return handle.GetReference() != nullptr; 1651 } 1652 1653 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1654 return IsValidHandle(type_handle_); 1655 } 1656 1657 bool IsExact() const { return is_exact_; } 1658 1659 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1660 DCHECK(IsValid()); 1661 return GetTypeHandle()->IsObjectClass(); 1662 } 1663 1664 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1665 DCHECK(IsValid()); 1666 return GetTypeHandle()->IsStringClass(); 1667 } 1668 1669 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1670 DCHECK(IsValid()); 1671 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1672 } 1673 1674 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1675 DCHECK(IsValid()); 1676 return GetTypeHandle()->IsInterface(); 1677 } 1678 1679 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1680 DCHECK(IsValid()); 1681 return GetTypeHandle()->IsArrayClass(); 1682 } 1683 1684 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1685 DCHECK(IsValid()); 1686 return GetTypeHandle()->IsPrimitiveArray(); 1687 } 1688 1689 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1690 DCHECK(IsValid()); 1691 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 1692 } 1693 1694 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1695 DCHECK(IsValid()); 1696 if (!IsExact()) return false; 1697 if (!IsArrayClass()) return false; 1698 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1699 } 1700 1701 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1702 DCHECK(IsValid()); 1703 if (!IsExact()) return false; 1704 if (!IsArrayClass()) return false; 1705 if (!rti.IsArrayClass()) return false; 1706 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 1707 rti.GetTypeHandle()->GetComponentType()); 1708 } 1709 1710 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1711 1712 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1713 DCHECK(IsValid()); 1714 DCHECK(rti.IsValid()); 1715 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1716 } 1717 1718 // Returns true if the type information provide the same amount of details. 1719 // Note that it does not mean that the instructions have the same actual type 1720 // (because the type can be the result of a merge). 1721 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1722 if (!IsValid() && !rti.IsValid()) { 1723 // Invalid types are equal. 1724 return true; 1725 } 1726 if (!IsValid() || !rti.IsValid()) { 1727 // One is valid, the other not. 1728 return false; 1729 } 1730 return IsExact() == rti.IsExact() 1731 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1732 } 1733 1734 private: 1735 ReferenceTypeInfo(); 1736 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1737 1738 // The class of the object. 1739 TypeHandle type_handle_; 1740 // Whether or not the type is exact or a superclass of the actual type. 1741 // Whether or not we have any information about this type. 1742 bool is_exact_; 1743}; 1744 1745std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1746 1747class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1748 public: 1749 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1750 : previous_(nullptr), 1751 next_(nullptr), 1752 block_(nullptr), 1753 dex_pc_(dex_pc), 1754 id_(-1), 1755 ssa_index_(-1), 1756 environment_(nullptr), 1757 locations_(nullptr), 1758 live_interval_(nullptr), 1759 lifetime_position_(kNoLifetime), 1760 side_effects_(side_effects), 1761 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1762 1763 virtual ~HInstruction() {} 1764 1765#define DECLARE_KIND(type, super) k##type, 1766 enum InstructionKind { 1767 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1768 }; 1769#undef DECLARE_KIND 1770 1771 HInstruction* GetNext() const { return next_; } 1772 HInstruction* GetPrevious() const { return previous_; } 1773 1774 HInstruction* GetNextDisregardingMoves() const; 1775 HInstruction* GetPreviousDisregardingMoves() const; 1776 1777 HBasicBlock* GetBlock() const { return block_; } 1778 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1779 void SetBlock(HBasicBlock* block) { block_ = block; } 1780 bool IsInBlock() const { return block_ != nullptr; } 1781 bool IsInLoop() const { return block_->IsInLoop(); } 1782 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1783 1784 virtual size_t InputCount() const = 0; 1785 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1786 1787 virtual void Accept(HGraphVisitor* visitor) = 0; 1788 virtual const char* DebugName() const = 0; 1789 1790 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1791 void SetRawInputAt(size_t index, HInstruction* input) { 1792 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1793 } 1794 1795 virtual bool NeedsEnvironment() const { return false; } 1796 1797 uint32_t GetDexPc() const { return dex_pc_; } 1798 1799 virtual bool IsControlFlow() const { return false; } 1800 1801 virtual bool CanThrow() const { return false; } 1802 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1803 1804 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1805 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1806 1807 // Does not apply for all instructions, but having this at top level greatly 1808 // simplifies the null check elimination. 1809 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1810 virtual bool CanBeNull() const { 1811 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1812 return true; 1813 } 1814 1815 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1816 return false; 1817 } 1818 1819 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1820 1821 ReferenceTypeInfo GetReferenceTypeInfo() const { 1822 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1823 return reference_type_info_; 1824 } 1825 1826 void AddUseAt(HInstruction* user, size_t index) { 1827 DCHECK(user != nullptr); 1828 HUseListNode<HInstruction*>* use = 1829 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1830 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1831 } 1832 1833 void AddEnvUseAt(HEnvironment* user, size_t index) { 1834 DCHECK(user != nullptr); 1835 HUseListNode<HEnvironment*>* env_use = 1836 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1837 user->RecordEnvUse(env_use); 1838 } 1839 1840 void RemoveAsUserOfInput(size_t input) { 1841 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1842 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1843 } 1844 1845 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1846 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1847 1848 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1849 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1850 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1851 bool HasOnlyOneNonEnvironmentUse() const { 1852 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1853 } 1854 1855 // Does this instruction strictly dominate `other_instruction`? 1856 // Returns false if this instruction and `other_instruction` are the same. 1857 // Aborts if this instruction and `other_instruction` are both phis. 1858 bool StrictlyDominates(HInstruction* other_instruction) const; 1859 1860 int GetId() const { return id_; } 1861 void SetId(int id) { id_ = id; } 1862 1863 int GetSsaIndex() const { return ssa_index_; } 1864 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1865 bool HasSsaIndex() const { return ssa_index_ != -1; } 1866 1867 bool HasEnvironment() const { return environment_ != nullptr; } 1868 HEnvironment* GetEnvironment() const { return environment_; } 1869 // Set the `environment_` field. Raw because this method does not 1870 // update the uses lists. 1871 void SetRawEnvironment(HEnvironment* environment) { 1872 DCHECK(environment_ == nullptr); 1873 DCHECK_EQ(environment->GetHolder(), this); 1874 environment_ = environment; 1875 } 1876 1877 // Set the environment of this instruction, copying it from `environment`. While 1878 // copying, the uses lists are being updated. 1879 void CopyEnvironmentFrom(HEnvironment* environment) { 1880 DCHECK(environment_ == nullptr); 1881 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1882 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1883 environment_->CopyFrom(environment); 1884 if (environment->GetParent() != nullptr) { 1885 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1886 } 1887 } 1888 1889 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1890 HBasicBlock* block) { 1891 DCHECK(environment_ == nullptr); 1892 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1893 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1894 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1895 if (environment->GetParent() != nullptr) { 1896 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1897 } 1898 } 1899 1900 // Returns the number of entries in the environment. Typically, that is the 1901 // number of dex registers in a method. It could be more in case of inlining. 1902 size_t EnvironmentSize() const; 1903 1904 LocationSummary* GetLocations() const { return locations_; } 1905 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1906 1907 void ReplaceWith(HInstruction* instruction); 1908 void ReplaceInput(HInstruction* replacement, size_t index); 1909 1910 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1911 // uses of this instruction by `other` are *not* updated. 1912 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1913 ReplaceWith(other); 1914 other->ReplaceInput(this, use_index); 1915 } 1916 1917 // Move `this` instruction before `cursor`. 1918 void MoveBefore(HInstruction* cursor); 1919 1920#define INSTRUCTION_TYPE_CHECK(type, super) \ 1921 bool Is##type() const { return (As##type() != nullptr); } \ 1922 virtual const H##type* As##type() const { return nullptr; } \ 1923 virtual H##type* As##type() { return nullptr; } 1924 1925 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1926#undef INSTRUCTION_TYPE_CHECK 1927 1928 // Returns whether the instruction can be moved within the graph. 1929 virtual bool CanBeMoved() const { return false; } 1930 1931 // Returns whether the two instructions are of the same kind. 1932 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1933 return false; 1934 } 1935 1936 // Returns whether any data encoded in the two instructions is equal. 1937 // This method does not look at the inputs. Both instructions must be 1938 // of the same type, otherwise the method has undefined behavior. 1939 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1940 return false; 1941 } 1942 1943 // Returns whether two instructions are equal, that is: 1944 // 1) They have the same type and contain the same data (InstructionDataEquals). 1945 // 2) Their inputs are identical. 1946 bool Equals(HInstruction* other) const; 1947 1948 virtual InstructionKind GetKind() const = 0; 1949 1950 virtual size_t ComputeHashCode() const { 1951 size_t result = GetKind(); 1952 for (size_t i = 0, e = InputCount(); i < e; ++i) { 1953 result = (result * 31) + InputAt(i)->GetId(); 1954 } 1955 return result; 1956 } 1957 1958 SideEffects GetSideEffects() const { return side_effects_; } 1959 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 1960 1961 size_t GetLifetimePosition() const { return lifetime_position_; } 1962 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 1963 LiveInterval* GetLiveInterval() const { return live_interval_; } 1964 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 1965 bool HasLiveInterval() const { return live_interval_ != nullptr; } 1966 1967 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 1968 1969 // Returns whether the code generation of the instruction will require to have access 1970 // to the current method. Such instructions are: 1971 // (1): Instructions that require an environment, as calling the runtime requires 1972 // to walk the stack and have the current method stored at a specific stack address. 1973 // (2): Object literals like classes and strings, that are loaded from the dex cache 1974 // fields of the current method. 1975 bool NeedsCurrentMethod() const { 1976 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 1977 } 1978 1979 virtual bool NeedsDexCache() const { return false; } 1980 1981 // Does this instruction have any use in an environment before 1982 // control flow hits 'other'? 1983 bool HasAnyEnvironmentUseBefore(HInstruction* other); 1984 1985 // Remove all references to environment uses of this instruction. 1986 // The caller must ensure that this is safe to do. 1987 void RemoveEnvironmentUsers(); 1988 1989 protected: 1990 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 1991 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 1992 1993 private: 1994 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 1995 1996 HInstruction* previous_; 1997 HInstruction* next_; 1998 HBasicBlock* block_; 1999 const uint32_t dex_pc_; 2000 2001 // An instruction gets an id when it is added to the graph. 2002 // It reflects creation order. A negative id means the instruction 2003 // has not been added to the graph. 2004 int id_; 2005 2006 // When doing liveness analysis, instructions that have uses get an SSA index. 2007 int ssa_index_; 2008 2009 // List of instructions that have this instruction as input. 2010 HUseList<HInstruction*> uses_; 2011 2012 // List of environments that contain this instruction. 2013 HUseList<HEnvironment*> env_uses_; 2014 2015 // The environment associated with this instruction. Not null if the instruction 2016 // might jump out of the method. 2017 HEnvironment* environment_; 2018 2019 // Set by the code generator. 2020 LocationSummary* locations_; 2021 2022 // Set by the liveness analysis. 2023 LiveInterval* live_interval_; 2024 2025 // Set by the liveness analysis, this is the position in a linear 2026 // order of blocks where this instruction's live interval start. 2027 size_t lifetime_position_; 2028 2029 SideEffects side_effects_; 2030 2031 // TODO: for primitive types this should be marked as invalid. 2032 ReferenceTypeInfo reference_type_info_; 2033 2034 friend class GraphChecker; 2035 friend class HBasicBlock; 2036 friend class HEnvironment; 2037 friend class HGraph; 2038 friend class HInstructionList; 2039 2040 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2041}; 2042std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2043 2044class HInputIterator : public ValueObject { 2045 public: 2046 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2047 2048 bool Done() const { return index_ == instruction_->InputCount(); } 2049 HInstruction* Current() const { return instruction_->InputAt(index_); } 2050 void Advance() { index_++; } 2051 2052 private: 2053 HInstruction* instruction_; 2054 size_t index_; 2055 2056 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2057}; 2058 2059class HInstructionIterator : public ValueObject { 2060 public: 2061 explicit HInstructionIterator(const HInstructionList& instructions) 2062 : instruction_(instructions.first_instruction_) { 2063 next_ = Done() ? nullptr : instruction_->GetNext(); 2064 } 2065 2066 bool Done() const { return instruction_ == nullptr; } 2067 HInstruction* Current() const { return instruction_; } 2068 void Advance() { 2069 instruction_ = next_; 2070 next_ = Done() ? nullptr : instruction_->GetNext(); 2071 } 2072 2073 private: 2074 HInstruction* instruction_; 2075 HInstruction* next_; 2076 2077 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2078}; 2079 2080class HBackwardInstructionIterator : public ValueObject { 2081 public: 2082 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2083 : instruction_(instructions.last_instruction_) { 2084 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2085 } 2086 2087 bool Done() const { return instruction_ == nullptr; } 2088 HInstruction* Current() const { return instruction_; } 2089 void Advance() { 2090 instruction_ = next_; 2091 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2092 } 2093 2094 private: 2095 HInstruction* instruction_; 2096 HInstruction* next_; 2097 2098 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2099}; 2100 2101template<size_t N> 2102class HTemplateInstruction: public HInstruction { 2103 public: 2104 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2105 : HInstruction(side_effects, dex_pc), inputs_() {} 2106 virtual ~HTemplateInstruction() {} 2107 2108 size_t InputCount() const OVERRIDE { return N; } 2109 2110 protected: 2111 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2112 DCHECK_LT(i, N); 2113 return inputs_[i]; 2114 } 2115 2116 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2117 DCHECK_LT(i, N); 2118 inputs_[i] = input; 2119 } 2120 2121 private: 2122 std::array<HUserRecord<HInstruction*>, N> inputs_; 2123 2124 friend class SsaBuilder; 2125}; 2126 2127// HTemplateInstruction specialization for N=0. 2128template<> 2129class HTemplateInstruction<0>: public HInstruction { 2130 public: 2131 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2132 : HInstruction(side_effects, dex_pc) {} 2133 2134 virtual ~HTemplateInstruction() {} 2135 2136 size_t InputCount() const OVERRIDE { return 0; } 2137 2138 protected: 2139 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2140 LOG(FATAL) << "Unreachable"; 2141 UNREACHABLE(); 2142 } 2143 2144 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2145 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2146 LOG(FATAL) << "Unreachable"; 2147 UNREACHABLE(); 2148 } 2149 2150 private: 2151 friend class SsaBuilder; 2152}; 2153 2154template<intptr_t N> 2155class HExpression : public HTemplateInstruction<N> { 2156 public: 2157 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2158 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2159 virtual ~HExpression() {} 2160 2161 Primitive::Type GetType() const OVERRIDE { return type_; } 2162 2163 protected: 2164 Primitive::Type type_; 2165}; 2166 2167// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2168// instruction that branches to the exit block. 2169class HReturnVoid : public HTemplateInstruction<0> { 2170 public: 2171 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2172 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2173 2174 bool IsControlFlow() const OVERRIDE { return true; } 2175 2176 DECLARE_INSTRUCTION(ReturnVoid); 2177 2178 private: 2179 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2180}; 2181 2182// Represents dex's RETURN opcodes. A HReturn is a control flow 2183// instruction that branches to the exit block. 2184class HReturn : public HTemplateInstruction<1> { 2185 public: 2186 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2187 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2188 SetRawInputAt(0, value); 2189 } 2190 2191 bool IsControlFlow() const OVERRIDE { return true; } 2192 2193 DECLARE_INSTRUCTION(Return); 2194 2195 private: 2196 DISALLOW_COPY_AND_ASSIGN(HReturn); 2197}; 2198 2199// The exit instruction is the only instruction of the exit block. 2200// Instructions aborting the method (HThrow and HReturn) must branch to the 2201// exit block. 2202class HExit : public HTemplateInstruction<0> { 2203 public: 2204 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2205 2206 bool IsControlFlow() const OVERRIDE { return true; } 2207 2208 DECLARE_INSTRUCTION(Exit); 2209 2210 private: 2211 DISALLOW_COPY_AND_ASSIGN(HExit); 2212}; 2213 2214// Jumps from one block to another. 2215class HGoto : public HTemplateInstruction<0> { 2216 public: 2217 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2218 2219 bool IsControlFlow() const OVERRIDE { return true; } 2220 2221 HBasicBlock* GetSuccessor() const { 2222 return GetBlock()->GetSingleSuccessor(); 2223 } 2224 2225 DECLARE_INSTRUCTION(Goto); 2226 2227 private: 2228 DISALLOW_COPY_AND_ASSIGN(HGoto); 2229}; 2230 2231class HConstant : public HExpression<0> { 2232 public: 2233 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2234 : HExpression(type, SideEffects::None(), dex_pc) {} 2235 2236 bool CanBeMoved() const OVERRIDE { return true; } 2237 2238 virtual bool IsMinusOne() const { return false; } 2239 virtual bool IsZero() const { return false; } 2240 virtual bool IsOne() const { return false; } 2241 2242 virtual uint64_t GetValueAsUint64() const = 0; 2243 2244 DECLARE_INSTRUCTION(Constant); 2245 2246 private: 2247 DISALLOW_COPY_AND_ASSIGN(HConstant); 2248}; 2249 2250class HNullConstant : public HConstant { 2251 public: 2252 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2253 return true; 2254 } 2255 2256 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2257 2258 size_t ComputeHashCode() const OVERRIDE { return 0; } 2259 2260 DECLARE_INSTRUCTION(NullConstant); 2261 2262 private: 2263 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2264 2265 friend class HGraph; 2266 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2267}; 2268 2269// Constants of the type int. Those can be from Dex instructions, or 2270// synthesized (for example with the if-eqz instruction). 2271class HIntConstant : public HConstant { 2272 public: 2273 int32_t GetValue() const { return value_; } 2274 2275 uint64_t GetValueAsUint64() const OVERRIDE { 2276 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2277 } 2278 2279 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2280 DCHECK(other->IsIntConstant()); 2281 return other->AsIntConstant()->value_ == value_; 2282 } 2283 2284 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2285 2286 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2287 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2288 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2289 2290 DECLARE_INSTRUCTION(IntConstant); 2291 2292 private: 2293 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2294 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2295 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2296 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2297 2298 const int32_t value_; 2299 2300 friend class HGraph; 2301 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2302 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2303 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2304}; 2305 2306class HLongConstant : public HConstant { 2307 public: 2308 int64_t GetValue() const { return value_; } 2309 2310 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2311 2312 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2313 DCHECK(other->IsLongConstant()); 2314 return other->AsLongConstant()->value_ == value_; 2315 } 2316 2317 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2318 2319 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2320 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2321 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2322 2323 DECLARE_INSTRUCTION(LongConstant); 2324 2325 private: 2326 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2327 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2328 2329 const int64_t value_; 2330 2331 friend class HGraph; 2332 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2333}; 2334 2335// Conditional branch. A block ending with an HIf instruction must have 2336// two successors. 2337class HIf : public HTemplateInstruction<1> { 2338 public: 2339 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2340 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2341 SetRawInputAt(0, input); 2342 } 2343 2344 bool IsControlFlow() const OVERRIDE { return true; } 2345 2346 HBasicBlock* IfTrueSuccessor() const { 2347 return GetBlock()->GetSuccessors()[0]; 2348 } 2349 2350 HBasicBlock* IfFalseSuccessor() const { 2351 return GetBlock()->GetSuccessors()[1]; 2352 } 2353 2354 DECLARE_INSTRUCTION(If); 2355 2356 private: 2357 DISALLOW_COPY_AND_ASSIGN(HIf); 2358}; 2359 2360 2361// Abstract instruction which marks the beginning and/or end of a try block and 2362// links it to the respective exception handlers. Behaves the same as a Goto in 2363// non-exceptional control flow. 2364// Normal-flow successor is stored at index zero, exception handlers under 2365// higher indices in no particular order. 2366class HTryBoundary : public HTemplateInstruction<0> { 2367 public: 2368 enum BoundaryKind { 2369 kEntry, 2370 kExit, 2371 }; 2372 2373 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2374 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2375 2376 bool IsControlFlow() const OVERRIDE { return true; } 2377 2378 // Returns the block's non-exceptional successor (index zero). 2379 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2380 2381 // Returns whether `handler` is among its exception handlers (non-zero index 2382 // successors). 2383 bool HasExceptionHandler(const HBasicBlock& handler) const { 2384 DCHECK(handler.IsCatchBlock()); 2385 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2386 } 2387 2388 // If not present already, adds `handler` to its block's list of exception 2389 // handlers. 2390 void AddExceptionHandler(HBasicBlock* handler) { 2391 if (!HasExceptionHandler(*handler)) { 2392 GetBlock()->AddSuccessor(handler); 2393 } 2394 } 2395 2396 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2397 2398 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2399 2400 DECLARE_INSTRUCTION(TryBoundary); 2401 2402 private: 2403 const BoundaryKind kind_; 2404 2405 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2406}; 2407 2408// Iterator over exception handlers of a given HTryBoundary, i.e. over 2409// exceptional successors of its basic block. 2410class HExceptionHandlerIterator : public ValueObject { 2411 public: 2412 explicit HExceptionHandlerIterator(const HTryBoundary& try_boundary) 2413 : block_(*try_boundary.GetBlock()), index_(block_.NumberOfNormalSuccessors()) {} 2414 2415 bool Done() const { return index_ == block_.GetSuccessors().size(); } 2416 HBasicBlock* Current() const { return block_.GetSuccessors()[index_]; } 2417 size_t CurrentSuccessorIndex() const { return index_; } 2418 void Advance() { ++index_; } 2419 2420 private: 2421 const HBasicBlock& block_; 2422 size_t index_; 2423 2424 DISALLOW_COPY_AND_ASSIGN(HExceptionHandlerIterator); 2425}; 2426 2427// Deoptimize to interpreter, upon checking a condition. 2428class HDeoptimize : public HTemplateInstruction<1> { 2429 public: 2430 explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2431 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2432 SetRawInputAt(0, cond); 2433 } 2434 2435 bool NeedsEnvironment() const OVERRIDE { return true; } 2436 bool CanThrow() const OVERRIDE { return true; } 2437 2438 DECLARE_INSTRUCTION(Deoptimize); 2439 2440 private: 2441 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2442}; 2443 2444// Represents the ArtMethod that was passed as a first argument to 2445// the method. It is used by instructions that depend on it, like 2446// instructions that work with the dex cache. 2447class HCurrentMethod : public HExpression<0> { 2448 public: 2449 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2450 : HExpression(type, SideEffects::None(), dex_pc) {} 2451 2452 DECLARE_INSTRUCTION(CurrentMethod); 2453 2454 private: 2455 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2456}; 2457 2458// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2459// have one successor for each entry in the switch table, and the final successor 2460// will be the block containing the next Dex opcode. 2461class HPackedSwitch : public HTemplateInstruction<1> { 2462 public: 2463 HPackedSwitch(int32_t start_value, 2464 uint32_t num_entries, 2465 HInstruction* input, 2466 uint32_t dex_pc = kNoDexPc) 2467 : HTemplateInstruction(SideEffects::None(), dex_pc), 2468 start_value_(start_value), 2469 num_entries_(num_entries) { 2470 SetRawInputAt(0, input); 2471 } 2472 2473 bool IsControlFlow() const OVERRIDE { return true; } 2474 2475 int32_t GetStartValue() const { return start_value_; } 2476 2477 uint32_t GetNumEntries() const { return num_entries_; } 2478 2479 HBasicBlock* GetDefaultBlock() const { 2480 // Last entry is the default block. 2481 return GetBlock()->GetSuccessors()[num_entries_]; 2482 } 2483 DECLARE_INSTRUCTION(PackedSwitch); 2484 2485 private: 2486 const int32_t start_value_; 2487 const uint32_t num_entries_; 2488 2489 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2490}; 2491 2492class HUnaryOperation : public HExpression<1> { 2493 public: 2494 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2495 : HExpression(result_type, SideEffects::None(), dex_pc) { 2496 SetRawInputAt(0, input); 2497 } 2498 2499 HInstruction* GetInput() const { return InputAt(0); } 2500 Primitive::Type GetResultType() const { return GetType(); } 2501 2502 bool CanBeMoved() const OVERRIDE { return true; } 2503 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2504 return true; 2505 } 2506 2507 // Try to statically evaluate `operation` and return a HConstant 2508 // containing the result of this evaluation. If `operation` cannot 2509 // be evaluated as a constant, return null. 2510 HConstant* TryStaticEvaluation() const; 2511 2512 // Apply this operation to `x`. 2513 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2514 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2515 2516 DECLARE_INSTRUCTION(UnaryOperation); 2517 2518 private: 2519 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2520}; 2521 2522class HBinaryOperation : public HExpression<2> { 2523 public: 2524 HBinaryOperation(Primitive::Type result_type, 2525 HInstruction* left, 2526 HInstruction* right, 2527 SideEffects side_effects = SideEffects::None(), 2528 uint32_t dex_pc = kNoDexPc) 2529 : HExpression(result_type, side_effects, dex_pc) { 2530 SetRawInputAt(0, left); 2531 SetRawInputAt(1, right); 2532 } 2533 2534 HInstruction* GetLeft() const { return InputAt(0); } 2535 HInstruction* GetRight() const { return InputAt(1); } 2536 Primitive::Type GetResultType() const { return GetType(); } 2537 2538 virtual bool IsCommutative() const { return false; } 2539 2540 // Put constant on the right. 2541 // Returns whether order is changed. 2542 bool OrderInputsWithConstantOnTheRight() { 2543 HInstruction* left = InputAt(0); 2544 HInstruction* right = InputAt(1); 2545 if (left->IsConstant() && !right->IsConstant()) { 2546 ReplaceInput(right, 0); 2547 ReplaceInput(left, 1); 2548 return true; 2549 } 2550 return false; 2551 } 2552 2553 // Order inputs by instruction id, but favor constant on the right side. 2554 // This helps GVN for commutative ops. 2555 void OrderInputs() { 2556 DCHECK(IsCommutative()); 2557 HInstruction* left = InputAt(0); 2558 HInstruction* right = InputAt(1); 2559 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2560 return; 2561 } 2562 if (OrderInputsWithConstantOnTheRight()) { 2563 return; 2564 } 2565 // Order according to instruction id. 2566 if (left->GetId() > right->GetId()) { 2567 ReplaceInput(right, 0); 2568 ReplaceInput(left, 1); 2569 } 2570 } 2571 2572 bool CanBeMoved() const OVERRIDE { return true; } 2573 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2574 return true; 2575 } 2576 2577 // Try to statically evaluate `operation` and return a HConstant 2578 // containing the result of this evaluation. If `operation` cannot 2579 // be evaluated as a constant, return null. 2580 HConstant* TryStaticEvaluation() const; 2581 2582 // Apply this operation to `x` and `y`. 2583 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2584 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2585 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2586 HLongConstant* y ATTRIBUTE_UNUSED) const { 2587 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2588 return nullptr; 2589 } 2590 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2591 HIntConstant* y ATTRIBUTE_UNUSED) const { 2592 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2593 return nullptr; 2594 } 2595 2596 // Returns an input that can legally be used as the right input and is 2597 // constant, or null. 2598 HConstant* GetConstantRight() const; 2599 2600 // If `GetConstantRight()` returns one of the input, this returns the other 2601 // one. Otherwise it returns null. 2602 HInstruction* GetLeastConstantLeft() const; 2603 2604 DECLARE_INSTRUCTION(BinaryOperation); 2605 2606 private: 2607 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2608}; 2609 2610// The comparison bias applies for floating point operations and indicates how NaN 2611// comparisons are treated: 2612enum class ComparisonBias { 2613 kNoBias, // bias is not applicable (i.e. for long operation) 2614 kGtBias, // return 1 for NaN comparisons 2615 kLtBias, // return -1 for NaN comparisons 2616}; 2617 2618class HCondition : public HBinaryOperation { 2619 public: 2620 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2621 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2622 needs_materialization_(true), 2623 bias_(ComparisonBias::kNoBias) {} 2624 2625 bool NeedsMaterialization() const { return needs_materialization_; } 2626 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2627 2628 // For code generation purposes, returns whether this instruction is just before 2629 // `instruction`, and disregard moves in between. 2630 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2631 2632 DECLARE_INSTRUCTION(Condition); 2633 2634 virtual IfCondition GetCondition() const = 0; 2635 2636 virtual IfCondition GetOppositeCondition() const = 0; 2637 2638 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2639 2640 void SetBias(ComparisonBias bias) { bias_ = bias; } 2641 2642 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2643 return bias_ == other->AsCondition()->bias_; 2644 } 2645 2646 bool IsFPConditionTrueIfNaN() const { 2647 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2648 IfCondition if_cond = GetCondition(); 2649 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2650 } 2651 2652 bool IsFPConditionFalseIfNaN() const { 2653 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2654 IfCondition if_cond = GetCondition(); 2655 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2656 } 2657 2658 private: 2659 // For register allocation purposes, returns whether this instruction needs to be 2660 // materialized (that is, not just be in the processor flags). 2661 bool needs_materialization_; 2662 2663 // Needed if we merge a HCompare into a HCondition. 2664 ComparisonBias bias_; 2665 2666 DISALLOW_COPY_AND_ASSIGN(HCondition); 2667}; 2668 2669// Instruction to check if two inputs are equal to each other. 2670class HEqual : public HCondition { 2671 public: 2672 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2673 : HCondition(first, second, dex_pc) {} 2674 2675 bool IsCommutative() const OVERRIDE { return true; } 2676 2677 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2678 return GetBlock()->GetGraph()->GetIntConstant( 2679 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2680 } 2681 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2682 return GetBlock()->GetGraph()->GetIntConstant( 2683 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2684 } 2685 2686 DECLARE_INSTRUCTION(Equal); 2687 2688 IfCondition GetCondition() const OVERRIDE { 2689 return kCondEQ; 2690 } 2691 2692 IfCondition GetOppositeCondition() const OVERRIDE { 2693 return kCondNE; 2694 } 2695 2696 private: 2697 template <typename T> bool Compute(T x, T y) const { return x == y; } 2698 2699 DISALLOW_COPY_AND_ASSIGN(HEqual); 2700}; 2701 2702class HNotEqual : public HCondition { 2703 public: 2704 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2705 : HCondition(first, second, dex_pc) {} 2706 2707 bool IsCommutative() const OVERRIDE { return true; } 2708 2709 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2710 return GetBlock()->GetGraph()->GetIntConstant( 2711 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2712 } 2713 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2714 return GetBlock()->GetGraph()->GetIntConstant( 2715 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2716 } 2717 2718 DECLARE_INSTRUCTION(NotEqual); 2719 2720 IfCondition GetCondition() const OVERRIDE { 2721 return kCondNE; 2722 } 2723 2724 IfCondition GetOppositeCondition() const OVERRIDE { 2725 return kCondEQ; 2726 } 2727 2728 private: 2729 template <typename T> bool Compute(T x, T y) const { return x != y; } 2730 2731 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2732}; 2733 2734class HLessThan : public HCondition { 2735 public: 2736 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2737 : HCondition(first, second, dex_pc) {} 2738 2739 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2740 return GetBlock()->GetGraph()->GetIntConstant( 2741 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2742 } 2743 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2744 return GetBlock()->GetGraph()->GetIntConstant( 2745 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2746 } 2747 2748 DECLARE_INSTRUCTION(LessThan); 2749 2750 IfCondition GetCondition() const OVERRIDE { 2751 return kCondLT; 2752 } 2753 2754 IfCondition GetOppositeCondition() const OVERRIDE { 2755 return kCondGE; 2756 } 2757 2758 private: 2759 template <typename T> bool Compute(T x, T y) const { return x < y; } 2760 2761 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2762}; 2763 2764class HLessThanOrEqual : public HCondition { 2765 public: 2766 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2767 : HCondition(first, second, dex_pc) {} 2768 2769 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2770 return GetBlock()->GetGraph()->GetIntConstant( 2771 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2772 } 2773 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2774 return GetBlock()->GetGraph()->GetIntConstant( 2775 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2776 } 2777 2778 DECLARE_INSTRUCTION(LessThanOrEqual); 2779 2780 IfCondition GetCondition() const OVERRIDE { 2781 return kCondLE; 2782 } 2783 2784 IfCondition GetOppositeCondition() const OVERRIDE { 2785 return kCondGT; 2786 } 2787 2788 private: 2789 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2790 2791 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2792}; 2793 2794class HGreaterThan : public HCondition { 2795 public: 2796 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2797 : HCondition(first, second, dex_pc) {} 2798 2799 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2800 return GetBlock()->GetGraph()->GetIntConstant( 2801 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2802 } 2803 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2804 return GetBlock()->GetGraph()->GetIntConstant( 2805 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2806 } 2807 2808 DECLARE_INSTRUCTION(GreaterThan); 2809 2810 IfCondition GetCondition() const OVERRIDE { 2811 return kCondGT; 2812 } 2813 2814 IfCondition GetOppositeCondition() const OVERRIDE { 2815 return kCondLE; 2816 } 2817 2818 private: 2819 template <typename T> bool Compute(T x, T y) const { return x > y; } 2820 2821 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2822}; 2823 2824class HGreaterThanOrEqual : public HCondition { 2825 public: 2826 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2827 : HCondition(first, second, dex_pc) {} 2828 2829 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2830 return GetBlock()->GetGraph()->GetIntConstant( 2831 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2832 } 2833 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2834 return GetBlock()->GetGraph()->GetIntConstant( 2835 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2836 } 2837 2838 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2839 2840 IfCondition GetCondition() const OVERRIDE { 2841 return kCondGE; 2842 } 2843 2844 IfCondition GetOppositeCondition() const OVERRIDE { 2845 return kCondLT; 2846 } 2847 2848 private: 2849 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2850 2851 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2852}; 2853 2854class HBelow : public HCondition { 2855 public: 2856 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2857 : HCondition(first, second, dex_pc) {} 2858 2859 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2860 return GetBlock()->GetGraph()->GetIntConstant( 2861 Compute(static_cast<uint32_t>(x->GetValue()), 2862 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2863 } 2864 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2865 return GetBlock()->GetGraph()->GetIntConstant( 2866 Compute(static_cast<uint64_t>(x->GetValue()), 2867 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2868 } 2869 2870 DECLARE_INSTRUCTION(Below); 2871 2872 IfCondition GetCondition() const OVERRIDE { 2873 return kCondB; 2874 } 2875 2876 IfCondition GetOppositeCondition() const OVERRIDE { 2877 return kCondAE; 2878 } 2879 2880 private: 2881 template <typename T> bool Compute(T x, T y) const { return x < y; } 2882 2883 DISALLOW_COPY_AND_ASSIGN(HBelow); 2884}; 2885 2886class HBelowOrEqual : public HCondition { 2887 public: 2888 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2889 : HCondition(first, second, dex_pc) {} 2890 2891 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2892 return GetBlock()->GetGraph()->GetIntConstant( 2893 Compute(static_cast<uint32_t>(x->GetValue()), 2894 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2895 } 2896 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2897 return GetBlock()->GetGraph()->GetIntConstant( 2898 Compute(static_cast<uint64_t>(x->GetValue()), 2899 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2900 } 2901 2902 DECLARE_INSTRUCTION(BelowOrEqual); 2903 2904 IfCondition GetCondition() const OVERRIDE { 2905 return kCondBE; 2906 } 2907 2908 IfCondition GetOppositeCondition() const OVERRIDE { 2909 return kCondA; 2910 } 2911 2912 private: 2913 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2914 2915 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 2916}; 2917 2918class HAbove : public HCondition { 2919 public: 2920 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2921 : HCondition(first, second, dex_pc) {} 2922 2923 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2924 return GetBlock()->GetGraph()->GetIntConstant( 2925 Compute(static_cast<uint32_t>(x->GetValue()), 2926 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2927 } 2928 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2929 return GetBlock()->GetGraph()->GetIntConstant( 2930 Compute(static_cast<uint64_t>(x->GetValue()), 2931 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2932 } 2933 2934 DECLARE_INSTRUCTION(Above); 2935 2936 IfCondition GetCondition() const OVERRIDE { 2937 return kCondA; 2938 } 2939 2940 IfCondition GetOppositeCondition() const OVERRIDE { 2941 return kCondBE; 2942 } 2943 2944 private: 2945 template <typename T> bool Compute(T x, T y) const { return x > y; } 2946 2947 DISALLOW_COPY_AND_ASSIGN(HAbove); 2948}; 2949 2950class HAboveOrEqual : public HCondition { 2951 public: 2952 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2953 : HCondition(first, second, dex_pc) {} 2954 2955 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2956 return GetBlock()->GetGraph()->GetIntConstant( 2957 Compute(static_cast<uint32_t>(x->GetValue()), 2958 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2959 } 2960 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2961 return GetBlock()->GetGraph()->GetIntConstant( 2962 Compute(static_cast<uint64_t>(x->GetValue()), 2963 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2964 } 2965 2966 DECLARE_INSTRUCTION(AboveOrEqual); 2967 2968 IfCondition GetCondition() const OVERRIDE { 2969 return kCondAE; 2970 } 2971 2972 IfCondition GetOppositeCondition() const OVERRIDE { 2973 return kCondB; 2974 } 2975 2976 private: 2977 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2978 2979 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 2980}; 2981 2982// Instruction to check how two inputs compare to each other. 2983// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 2984class HCompare : public HBinaryOperation { 2985 public: 2986 HCompare(Primitive::Type type, 2987 HInstruction* first, 2988 HInstruction* second, 2989 ComparisonBias bias, 2990 uint32_t dex_pc) 2991 : HBinaryOperation(Primitive::kPrimInt, 2992 first, 2993 second, 2994 SideEffectsForArchRuntimeCalls(type), 2995 dex_pc), 2996 bias_(bias) { 2997 DCHECK_EQ(type, first->GetType()); 2998 DCHECK_EQ(type, second->GetType()); 2999 } 3000 3001 template <typename T> 3002 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3003 3004 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3005 return GetBlock()->GetGraph()->GetIntConstant( 3006 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3007 } 3008 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3009 return GetBlock()->GetGraph()->GetIntConstant( 3010 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3011 } 3012 3013 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3014 return bias_ == other->AsCompare()->bias_; 3015 } 3016 3017 ComparisonBias GetBias() const { return bias_; } 3018 3019 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3020 3021 3022 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3023 // MIPS64 uses a runtime call for FP comparisons. 3024 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3025 } 3026 3027 DECLARE_INSTRUCTION(Compare); 3028 3029 private: 3030 const ComparisonBias bias_; 3031 3032 DISALLOW_COPY_AND_ASSIGN(HCompare); 3033}; 3034 3035// A local in the graph. Corresponds to a Dex register. 3036class HLocal : public HTemplateInstruction<0> { 3037 public: 3038 explicit HLocal(uint16_t reg_number) 3039 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3040 3041 DECLARE_INSTRUCTION(Local); 3042 3043 uint16_t GetRegNumber() const { return reg_number_; } 3044 3045 private: 3046 // The Dex register number. 3047 const uint16_t reg_number_; 3048 3049 DISALLOW_COPY_AND_ASSIGN(HLocal); 3050}; 3051 3052// Load a given local. The local is an input of this instruction. 3053class HLoadLocal : public HExpression<1> { 3054 public: 3055 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3056 : HExpression(type, SideEffects::None(), dex_pc) { 3057 SetRawInputAt(0, local); 3058 } 3059 3060 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3061 3062 DECLARE_INSTRUCTION(LoadLocal); 3063 3064 private: 3065 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3066}; 3067 3068// Store a value in a given local. This instruction has two inputs: the value 3069// and the local. 3070class HStoreLocal : public HTemplateInstruction<2> { 3071 public: 3072 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3073 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3074 SetRawInputAt(0, local); 3075 SetRawInputAt(1, value); 3076 } 3077 3078 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3079 3080 DECLARE_INSTRUCTION(StoreLocal); 3081 3082 private: 3083 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3084}; 3085 3086class HFloatConstant : public HConstant { 3087 public: 3088 float GetValue() const { return value_; } 3089 3090 uint64_t GetValueAsUint64() const OVERRIDE { 3091 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3092 } 3093 3094 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3095 DCHECK(other->IsFloatConstant()); 3096 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3097 } 3098 3099 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3100 3101 bool IsMinusOne() const OVERRIDE { 3102 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3103 } 3104 bool IsZero() const OVERRIDE { 3105 return value_ == 0.0f; 3106 } 3107 bool IsOne() const OVERRIDE { 3108 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3109 } 3110 bool IsNaN() const { 3111 return std::isnan(value_); 3112 } 3113 3114 DECLARE_INSTRUCTION(FloatConstant); 3115 3116 private: 3117 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3118 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3119 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3120 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3121 3122 const float value_; 3123 3124 // Only the SsaBuilder and HGraph can create floating-point constants. 3125 friend class SsaBuilder; 3126 friend class HGraph; 3127 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3128}; 3129 3130class HDoubleConstant : public HConstant { 3131 public: 3132 double GetValue() const { return value_; } 3133 3134 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3135 3136 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3137 DCHECK(other->IsDoubleConstant()); 3138 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3139 } 3140 3141 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3142 3143 bool IsMinusOne() const OVERRIDE { 3144 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3145 } 3146 bool IsZero() const OVERRIDE { 3147 return value_ == 0.0; 3148 } 3149 bool IsOne() const OVERRIDE { 3150 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3151 } 3152 bool IsNaN() const { 3153 return std::isnan(value_); 3154 } 3155 3156 DECLARE_INSTRUCTION(DoubleConstant); 3157 3158 private: 3159 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3160 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3161 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3162 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3163 3164 const double value_; 3165 3166 // Only the SsaBuilder and HGraph can create floating-point constants. 3167 friend class SsaBuilder; 3168 friend class HGraph; 3169 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3170}; 3171 3172enum class Intrinsics { 3173#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache) k ## Name, 3174#include "intrinsics_list.h" 3175 kNone, 3176 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3177#undef INTRINSICS_LIST 3178#undef OPTIMIZING_INTRINSICS 3179}; 3180std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3181 3182enum IntrinsicNeedsEnvironmentOrCache { 3183 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3184 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3185}; 3186 3187class HInvoke : public HInstruction { 3188 public: 3189 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3190 3191 bool NeedsEnvironment() const OVERRIDE; 3192 3193 void SetArgumentAt(size_t index, HInstruction* argument) { 3194 SetRawInputAt(index, argument); 3195 } 3196 3197 // Return the number of arguments. This number can be lower than 3198 // the number of inputs returned by InputCount(), as some invoke 3199 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3200 // inputs at the end of their list of inputs. 3201 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3202 3203 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3204 3205 3206 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3207 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3208 3209 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3210 3211 Intrinsics GetIntrinsic() const { 3212 return intrinsic_; 3213 } 3214 3215 void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache); 3216 3217 bool IsFromInlinedInvoke() const { 3218 return GetEnvironment()->GetParent() != nullptr; 3219 } 3220 3221 bool CanThrow() const OVERRIDE { return true; } 3222 3223 uint32_t* GetIntrinsicOptimizations() { 3224 return &intrinsic_optimizations_; 3225 } 3226 3227 const uint32_t* GetIntrinsicOptimizations() const { 3228 return &intrinsic_optimizations_; 3229 } 3230 3231 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3232 3233 DECLARE_INSTRUCTION(Invoke); 3234 3235 protected: 3236 HInvoke(ArenaAllocator* arena, 3237 uint32_t number_of_arguments, 3238 uint32_t number_of_other_inputs, 3239 Primitive::Type return_type, 3240 uint32_t dex_pc, 3241 uint32_t dex_method_index, 3242 InvokeType original_invoke_type) 3243 : HInstruction( 3244 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3245 number_of_arguments_(number_of_arguments), 3246 inputs_(number_of_arguments + number_of_other_inputs, 3247 arena->Adapter(kArenaAllocInvokeInputs)), 3248 return_type_(return_type), 3249 dex_method_index_(dex_method_index), 3250 original_invoke_type_(original_invoke_type), 3251 intrinsic_(Intrinsics::kNone), 3252 intrinsic_optimizations_(0) { 3253 } 3254 3255 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3256 return inputs_[index]; 3257 } 3258 3259 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3260 inputs_[index] = input; 3261 } 3262 3263 uint32_t number_of_arguments_; 3264 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3265 const Primitive::Type return_type_; 3266 const uint32_t dex_method_index_; 3267 const InvokeType original_invoke_type_; 3268 Intrinsics intrinsic_; 3269 3270 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3271 uint32_t intrinsic_optimizations_; 3272 3273 private: 3274 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3275}; 3276 3277class HInvokeUnresolved : public HInvoke { 3278 public: 3279 HInvokeUnresolved(ArenaAllocator* arena, 3280 uint32_t number_of_arguments, 3281 Primitive::Type return_type, 3282 uint32_t dex_pc, 3283 uint32_t dex_method_index, 3284 InvokeType invoke_type) 3285 : HInvoke(arena, 3286 number_of_arguments, 3287 0u /* number_of_other_inputs */, 3288 return_type, 3289 dex_pc, 3290 dex_method_index, 3291 invoke_type) { 3292 } 3293 3294 DECLARE_INSTRUCTION(InvokeUnresolved); 3295 3296 private: 3297 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3298}; 3299 3300class HInvokeStaticOrDirect : public HInvoke { 3301 public: 3302 // Requirements of this method call regarding the class 3303 // initialization (clinit) check of its declaring class. 3304 enum class ClinitCheckRequirement { 3305 kNone, // Class already initialized. 3306 kExplicit, // Static call having explicit clinit check as last input. 3307 kImplicit, // Static call implicitly requiring a clinit check. 3308 }; 3309 3310 // Determines how to load the target ArtMethod*. 3311 enum class MethodLoadKind { 3312 // Use a String init ArtMethod* loaded from Thread entrypoints. 3313 kStringInit, 3314 3315 // Use the method's own ArtMethod* loaded by the register allocator. 3316 kRecursive, 3317 3318 // Use ArtMethod* at a known address, embed the direct address in the code. 3319 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3320 kDirectAddress, 3321 3322 // Use ArtMethod* at an address that will be known at link time, embed the direct 3323 // address in the code. If the image is relocatable, emit .patch_oat entry. 3324 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3325 // the image relocatable or not. 3326 kDirectAddressWithFixup, 3327 3328 // Load from resoved methods array in the dex cache using a PC-relative load. 3329 // Used when we need to use the dex cache, for example for invoke-static that 3330 // may cause class initialization (the entry may point to a resolution method), 3331 // and we know that we can access the dex cache arrays using a PC-relative load. 3332 kDexCachePcRelative, 3333 3334 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3335 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3336 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3337 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3338 kDexCacheViaMethod, 3339 }; 3340 3341 // Determines the location of the code pointer. 3342 enum class CodePtrLocation { 3343 // Recursive call, use local PC-relative call instruction. 3344 kCallSelf, 3345 3346 // Use PC-relative call instruction patched at link time. 3347 // Used for calls within an oat file, boot->boot or app->app. 3348 kCallPCRelative, 3349 3350 // Call to a known target address, embed the direct address in code. 3351 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3352 kCallDirect, 3353 3354 // Call to a target address that will be known at link time, embed the direct 3355 // address in code. If the image is relocatable, emit .patch_oat entry. 3356 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3357 // the image relocatable or not. 3358 kCallDirectWithFixup, 3359 3360 // Use code pointer from the ArtMethod*. 3361 // Used when we don't know the target code. This is also the last-resort-kind used when 3362 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3363 kCallArtMethod, 3364 }; 3365 3366 struct DispatchInfo { 3367 const MethodLoadKind method_load_kind; 3368 const CodePtrLocation code_ptr_location; 3369 // The method load data holds 3370 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3371 // Note that there are multiple string init methods, each having its own offset. 3372 // - the method address for kDirectAddress 3373 // - the dex cache arrays offset for kDexCachePcRel. 3374 const uint64_t method_load_data; 3375 const uint64_t direct_code_ptr; 3376 }; 3377 3378 HInvokeStaticOrDirect(ArenaAllocator* arena, 3379 uint32_t number_of_arguments, 3380 Primitive::Type return_type, 3381 uint32_t dex_pc, 3382 uint32_t method_index, 3383 MethodReference target_method, 3384 DispatchInfo dispatch_info, 3385 InvokeType original_invoke_type, 3386 InvokeType invoke_type, 3387 ClinitCheckRequirement clinit_check_requirement) 3388 : HInvoke(arena, 3389 number_of_arguments, 3390 // There is one extra argument for the HCurrentMethod node, and 3391 // potentially one other if the clinit check is explicit, and one other 3392 // if the method is a string factory. 3393 1u + (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) 3394 + (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3395 return_type, 3396 dex_pc, 3397 method_index, 3398 original_invoke_type), 3399 invoke_type_(invoke_type), 3400 clinit_check_requirement_(clinit_check_requirement), 3401 target_method_(target_method), 3402 dispatch_info_(dispatch_info) {} 3403 3404 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3405 // We access the method via the dex cache so we can't do an implicit null check. 3406 // TODO: for intrinsics we can generate implicit null checks. 3407 return false; 3408 } 3409 3410 bool CanBeNull() const OVERRIDE { 3411 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3412 } 3413 3414 InvokeType GetInvokeType() const { return invoke_type_; } 3415 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3416 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3417 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3418 bool NeedsDexCache() const OVERRIDE; 3419 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3420 uint32_t GetCurrentMethodInputIndex() const { return GetNumberOfArguments(); } 3421 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3422 bool HasPcRelDexCache() const { return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; } 3423 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3424 MethodReference GetTargetMethod() const { return target_method_; } 3425 3426 int32_t GetStringInitOffset() const { 3427 DCHECK(IsStringInit()); 3428 return dispatch_info_.method_load_data; 3429 } 3430 3431 uint64_t GetMethodAddress() const { 3432 DCHECK(HasMethodAddress()); 3433 return dispatch_info_.method_load_data; 3434 } 3435 3436 uint32_t GetDexCacheArrayOffset() const { 3437 DCHECK(HasPcRelDexCache()); 3438 return dispatch_info_.method_load_data; 3439 } 3440 3441 uint64_t GetDirectCodePtr() const { 3442 DCHECK(HasDirectCodePtr()); 3443 return dispatch_info_.direct_code_ptr; 3444 } 3445 3446 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3447 3448 // Is this instruction a call to a static method? 3449 bool IsStatic() const { 3450 return GetInvokeType() == kStatic; 3451 } 3452 3453 // Remove the art::HLoadClass instruction set as last input by 3454 // art::PrepareForRegisterAllocation::VisitClinitCheck in lieu of 3455 // the initial art::HClinitCheck instruction (only relevant for 3456 // static calls with explicit clinit check). 3457 void RemoveLoadClassAsLastInput() { 3458 DCHECK(IsStaticWithExplicitClinitCheck()); 3459 size_t last_input_index = InputCount() - 1; 3460 HInstruction* last_input = InputAt(last_input_index); 3461 DCHECK(last_input != nullptr); 3462 DCHECK(last_input->IsLoadClass()) << last_input->DebugName(); 3463 RemoveAsUserOfInput(last_input_index); 3464 inputs_.pop_back(); 3465 clinit_check_requirement_ = ClinitCheckRequirement::kImplicit; 3466 DCHECK(IsStaticWithImplicitClinitCheck()); 3467 } 3468 3469 bool IsStringFactoryFor(HFakeString* str) const { 3470 if (!IsStringInit()) return false; 3471 // +1 for the current method. 3472 if (InputCount() == (number_of_arguments_ + 1)) return false; 3473 return InputAt(InputCount() - 1)->AsFakeString() == str; 3474 } 3475 3476 void RemoveFakeStringArgumentAsLastInput() { 3477 DCHECK(IsStringInit()); 3478 size_t last_input_index = InputCount() - 1; 3479 HInstruction* last_input = InputAt(last_input_index); 3480 DCHECK(last_input != nullptr); 3481 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3482 RemoveAsUserOfInput(last_input_index); 3483 inputs_.pop_back(); 3484 } 3485 3486 // Is this a call to a static method whose declaring class has an 3487 // explicit intialization check in the graph? 3488 bool IsStaticWithExplicitClinitCheck() const { 3489 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3490 } 3491 3492 // Is this a call to a static method whose declaring class has an 3493 // implicit intialization check requirement? 3494 bool IsStaticWithImplicitClinitCheck() const { 3495 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3496 } 3497 3498 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3499 3500 protected: 3501 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3502 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3503 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3504 HInstruction* input = input_record.GetInstruction(); 3505 // `input` is the last input of a static invoke marked as having 3506 // an explicit clinit check. It must either be: 3507 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3508 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3509 DCHECK(input != nullptr); 3510 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3511 } 3512 return input_record; 3513 } 3514 3515 private: 3516 const InvokeType invoke_type_; 3517 ClinitCheckRequirement clinit_check_requirement_; 3518 // The target method may refer to different dex file or method index than the original 3519 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3520 // in derived class to increase visibility. 3521 MethodReference target_method_; 3522 DispatchInfo dispatch_info_; 3523 3524 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3525}; 3526 3527class HInvokeVirtual : public HInvoke { 3528 public: 3529 HInvokeVirtual(ArenaAllocator* arena, 3530 uint32_t number_of_arguments, 3531 Primitive::Type return_type, 3532 uint32_t dex_pc, 3533 uint32_t dex_method_index, 3534 uint32_t vtable_index) 3535 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3536 vtable_index_(vtable_index) {} 3537 3538 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3539 // TODO: Add implicit null checks in intrinsics. 3540 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3541 } 3542 3543 uint32_t GetVTableIndex() const { return vtable_index_; } 3544 3545 DECLARE_INSTRUCTION(InvokeVirtual); 3546 3547 private: 3548 const uint32_t vtable_index_; 3549 3550 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3551}; 3552 3553class HInvokeInterface : public HInvoke { 3554 public: 3555 HInvokeInterface(ArenaAllocator* arena, 3556 uint32_t number_of_arguments, 3557 Primitive::Type return_type, 3558 uint32_t dex_pc, 3559 uint32_t dex_method_index, 3560 uint32_t imt_index) 3561 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3562 imt_index_(imt_index) {} 3563 3564 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3565 // TODO: Add implicit null checks in intrinsics. 3566 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3567 } 3568 3569 uint32_t GetImtIndex() const { return imt_index_; } 3570 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3571 3572 DECLARE_INSTRUCTION(InvokeInterface); 3573 3574 private: 3575 const uint32_t imt_index_; 3576 3577 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3578}; 3579 3580class HNewInstance : public HExpression<1> { 3581 public: 3582 HNewInstance(HCurrentMethod* current_method, 3583 uint32_t dex_pc, 3584 uint16_t type_index, 3585 const DexFile& dex_file, 3586 QuickEntrypointEnum entrypoint) 3587 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3588 type_index_(type_index), 3589 dex_file_(dex_file), 3590 entrypoint_(entrypoint) { 3591 SetRawInputAt(0, current_method); 3592 } 3593 3594 uint16_t GetTypeIndex() const { return type_index_; } 3595 const DexFile& GetDexFile() const { return dex_file_; } 3596 3597 // Calls runtime so needs an environment. 3598 bool NeedsEnvironment() const OVERRIDE { return true; } 3599 // It may throw when called on: 3600 // - interfaces 3601 // - abstract/innaccessible/unknown classes 3602 // TODO: optimize when possible. 3603 bool CanThrow() const OVERRIDE { return true; } 3604 3605 bool CanBeNull() const OVERRIDE { return false; } 3606 3607 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3608 3609 DECLARE_INSTRUCTION(NewInstance); 3610 3611 private: 3612 const uint16_t type_index_; 3613 const DexFile& dex_file_; 3614 const QuickEntrypointEnum entrypoint_; 3615 3616 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3617}; 3618 3619class HNeg : public HUnaryOperation { 3620 public: 3621 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3622 : HUnaryOperation(result_type, input, dex_pc) {} 3623 3624 template <typename T> T Compute(T x) const { return -x; } 3625 3626 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3627 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3628 } 3629 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3630 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3631 } 3632 3633 DECLARE_INSTRUCTION(Neg); 3634 3635 private: 3636 DISALLOW_COPY_AND_ASSIGN(HNeg); 3637}; 3638 3639class HNewArray : public HExpression<2> { 3640 public: 3641 HNewArray(HInstruction* length, 3642 HCurrentMethod* current_method, 3643 uint32_t dex_pc, 3644 uint16_t type_index, 3645 const DexFile& dex_file, 3646 QuickEntrypointEnum entrypoint) 3647 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3648 type_index_(type_index), 3649 dex_file_(dex_file), 3650 entrypoint_(entrypoint) { 3651 SetRawInputAt(0, length); 3652 SetRawInputAt(1, current_method); 3653 } 3654 3655 uint16_t GetTypeIndex() const { return type_index_; } 3656 const DexFile& GetDexFile() const { return dex_file_; } 3657 3658 // Calls runtime so needs an environment. 3659 bool NeedsEnvironment() const OVERRIDE { return true; } 3660 3661 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3662 bool CanThrow() const OVERRIDE { return true; } 3663 3664 bool CanBeNull() const OVERRIDE { return false; } 3665 3666 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3667 3668 DECLARE_INSTRUCTION(NewArray); 3669 3670 private: 3671 const uint16_t type_index_; 3672 const DexFile& dex_file_; 3673 const QuickEntrypointEnum entrypoint_; 3674 3675 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3676}; 3677 3678class HAdd : public HBinaryOperation { 3679 public: 3680 HAdd(Primitive::Type result_type, 3681 HInstruction* left, 3682 HInstruction* right, 3683 uint32_t dex_pc = kNoDexPc) 3684 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3685 3686 bool IsCommutative() const OVERRIDE { return true; } 3687 3688 template <typename T> T Compute(T x, T y) const { return x + y; } 3689 3690 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3691 return GetBlock()->GetGraph()->GetIntConstant( 3692 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3693 } 3694 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3695 return GetBlock()->GetGraph()->GetLongConstant( 3696 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3697 } 3698 3699 DECLARE_INSTRUCTION(Add); 3700 3701 private: 3702 DISALLOW_COPY_AND_ASSIGN(HAdd); 3703}; 3704 3705class HSub : public HBinaryOperation { 3706 public: 3707 HSub(Primitive::Type result_type, 3708 HInstruction* left, 3709 HInstruction* right, 3710 uint32_t dex_pc = kNoDexPc) 3711 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3712 3713 template <typename T> T Compute(T x, T y) const { return x - y; } 3714 3715 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3716 return GetBlock()->GetGraph()->GetIntConstant( 3717 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3718 } 3719 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3720 return GetBlock()->GetGraph()->GetLongConstant( 3721 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3722 } 3723 3724 DECLARE_INSTRUCTION(Sub); 3725 3726 private: 3727 DISALLOW_COPY_AND_ASSIGN(HSub); 3728}; 3729 3730class HMul : public HBinaryOperation { 3731 public: 3732 HMul(Primitive::Type result_type, 3733 HInstruction* left, 3734 HInstruction* right, 3735 uint32_t dex_pc = kNoDexPc) 3736 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3737 3738 bool IsCommutative() const OVERRIDE { return true; } 3739 3740 template <typename T> T Compute(T x, T y) const { return x * y; } 3741 3742 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3743 return GetBlock()->GetGraph()->GetIntConstant( 3744 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3745 } 3746 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3747 return GetBlock()->GetGraph()->GetLongConstant( 3748 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3749 } 3750 3751 DECLARE_INSTRUCTION(Mul); 3752 3753 private: 3754 DISALLOW_COPY_AND_ASSIGN(HMul); 3755}; 3756 3757class HDiv : public HBinaryOperation { 3758 public: 3759 HDiv(Primitive::Type result_type, 3760 HInstruction* left, 3761 HInstruction* right, 3762 uint32_t dex_pc) 3763 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3764 3765 template <typename T> 3766 T Compute(T x, T y) const { 3767 // Our graph structure ensures we never have 0 for `y` during 3768 // constant folding. 3769 DCHECK_NE(y, 0); 3770 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3771 return (y == -1) ? -x : x / y; 3772 } 3773 3774 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3775 return GetBlock()->GetGraph()->GetIntConstant( 3776 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3777 } 3778 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3779 return GetBlock()->GetGraph()->GetLongConstant( 3780 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3781 } 3782 3783 static SideEffects SideEffectsForArchRuntimeCalls() { 3784 // The generated code can use a runtime call. 3785 return SideEffects::CanTriggerGC(); 3786 } 3787 3788 DECLARE_INSTRUCTION(Div); 3789 3790 private: 3791 DISALLOW_COPY_AND_ASSIGN(HDiv); 3792}; 3793 3794class HRem : public HBinaryOperation { 3795 public: 3796 HRem(Primitive::Type result_type, 3797 HInstruction* left, 3798 HInstruction* right, 3799 uint32_t dex_pc) 3800 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3801 3802 template <typename T> 3803 T Compute(T x, T y) const { 3804 // Our graph structure ensures we never have 0 for `y` during 3805 // constant folding. 3806 DCHECK_NE(y, 0); 3807 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3808 return (y == -1) ? 0 : x % y; 3809 } 3810 3811 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3812 return GetBlock()->GetGraph()->GetIntConstant( 3813 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3814 } 3815 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3816 return GetBlock()->GetGraph()->GetLongConstant( 3817 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3818 } 3819 3820 3821 static SideEffects SideEffectsForArchRuntimeCalls() { 3822 return SideEffects::CanTriggerGC(); 3823 } 3824 3825 DECLARE_INSTRUCTION(Rem); 3826 3827 private: 3828 DISALLOW_COPY_AND_ASSIGN(HRem); 3829}; 3830 3831class HDivZeroCheck : public HExpression<1> { 3832 public: 3833 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 3834 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 3835 SetRawInputAt(0, value); 3836 } 3837 3838 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 3839 3840 bool CanBeMoved() const OVERRIDE { return true; } 3841 3842 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 3843 return true; 3844 } 3845 3846 bool NeedsEnvironment() const OVERRIDE { return true; } 3847 bool CanThrow() const OVERRIDE { return true; } 3848 3849 DECLARE_INSTRUCTION(DivZeroCheck); 3850 3851 private: 3852 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 3853}; 3854 3855class HShl : public HBinaryOperation { 3856 public: 3857 HShl(Primitive::Type result_type, 3858 HInstruction* left, 3859 HInstruction* right, 3860 uint32_t dex_pc = kNoDexPc) 3861 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3862 3863 template <typename T, typename U, typename V> 3864 T Compute(T x, U y, V max_shift_value) const { 3865 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3866 "V is not the unsigned integer type corresponding to T"); 3867 return x << (y & max_shift_value); 3868 } 3869 3870 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3871 return GetBlock()->GetGraph()->GetIntConstant( 3872 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3873 } 3874 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3875 // case is handled as `x << static_cast<int>(y)`. 3876 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3877 return GetBlock()->GetGraph()->GetLongConstant( 3878 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3879 } 3880 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3881 return GetBlock()->GetGraph()->GetLongConstant( 3882 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3883 } 3884 3885 DECLARE_INSTRUCTION(Shl); 3886 3887 private: 3888 DISALLOW_COPY_AND_ASSIGN(HShl); 3889}; 3890 3891class HShr : public HBinaryOperation { 3892 public: 3893 HShr(Primitive::Type result_type, 3894 HInstruction* left, 3895 HInstruction* right, 3896 uint32_t dex_pc = kNoDexPc) 3897 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3898 3899 template <typename T, typename U, typename V> 3900 T Compute(T x, U y, V max_shift_value) const { 3901 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3902 "V is not the unsigned integer type corresponding to T"); 3903 return x >> (y & max_shift_value); 3904 } 3905 3906 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3907 return GetBlock()->GetGraph()->GetIntConstant( 3908 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3909 } 3910 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3911 // case is handled as `x >> static_cast<int>(y)`. 3912 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3913 return GetBlock()->GetGraph()->GetLongConstant( 3914 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3915 } 3916 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3917 return GetBlock()->GetGraph()->GetLongConstant( 3918 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3919 } 3920 3921 DECLARE_INSTRUCTION(Shr); 3922 3923 private: 3924 DISALLOW_COPY_AND_ASSIGN(HShr); 3925}; 3926 3927class HUShr : public HBinaryOperation { 3928 public: 3929 HUShr(Primitive::Type result_type, 3930 HInstruction* left, 3931 HInstruction* right, 3932 uint32_t dex_pc = kNoDexPc) 3933 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3934 3935 template <typename T, typename U, typename V> 3936 T Compute(T x, U y, V max_shift_value) const { 3937 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3938 "V is not the unsigned integer type corresponding to T"); 3939 V ux = static_cast<V>(x); 3940 return static_cast<T>(ux >> (y & max_shift_value)); 3941 } 3942 3943 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3944 return GetBlock()->GetGraph()->GetIntConstant( 3945 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3946 } 3947 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3948 // case is handled as `x >>> static_cast<int>(y)`. 3949 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3950 return GetBlock()->GetGraph()->GetLongConstant( 3951 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3952 } 3953 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3954 return GetBlock()->GetGraph()->GetLongConstant( 3955 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3956 } 3957 3958 DECLARE_INSTRUCTION(UShr); 3959 3960 private: 3961 DISALLOW_COPY_AND_ASSIGN(HUShr); 3962}; 3963 3964class HAnd : public HBinaryOperation { 3965 public: 3966 HAnd(Primitive::Type result_type, 3967 HInstruction* left, 3968 HInstruction* right, 3969 uint32_t dex_pc = kNoDexPc) 3970 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3971 3972 bool IsCommutative() const OVERRIDE { return true; } 3973 3974 template <typename T, typename U> 3975 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 3976 3977 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3978 return GetBlock()->GetGraph()->GetIntConstant( 3979 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3980 } 3981 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 3982 return GetBlock()->GetGraph()->GetLongConstant( 3983 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3984 } 3985 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3986 return GetBlock()->GetGraph()->GetLongConstant( 3987 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3988 } 3989 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3990 return GetBlock()->GetGraph()->GetLongConstant( 3991 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3992 } 3993 3994 DECLARE_INSTRUCTION(And); 3995 3996 private: 3997 DISALLOW_COPY_AND_ASSIGN(HAnd); 3998}; 3999 4000class HOr : public HBinaryOperation { 4001 public: 4002 HOr(Primitive::Type result_type, 4003 HInstruction* left, 4004 HInstruction* right, 4005 uint32_t dex_pc = kNoDexPc) 4006 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4007 4008 bool IsCommutative() const OVERRIDE { return true; } 4009 4010 template <typename T, typename U> 4011 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4012 4013 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4014 return GetBlock()->GetGraph()->GetIntConstant( 4015 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4016 } 4017 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4018 return GetBlock()->GetGraph()->GetLongConstant( 4019 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4020 } 4021 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4022 return GetBlock()->GetGraph()->GetLongConstant( 4023 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4024 } 4025 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4026 return GetBlock()->GetGraph()->GetLongConstant( 4027 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4028 } 4029 4030 DECLARE_INSTRUCTION(Or); 4031 4032 private: 4033 DISALLOW_COPY_AND_ASSIGN(HOr); 4034}; 4035 4036class HXor : public HBinaryOperation { 4037 public: 4038 HXor(Primitive::Type result_type, 4039 HInstruction* left, 4040 HInstruction* right, 4041 uint32_t dex_pc = kNoDexPc) 4042 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4043 4044 bool IsCommutative() const OVERRIDE { return true; } 4045 4046 template <typename T, typename U> 4047 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4048 4049 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4050 return GetBlock()->GetGraph()->GetIntConstant( 4051 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4052 } 4053 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4054 return GetBlock()->GetGraph()->GetLongConstant( 4055 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4056 } 4057 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4058 return GetBlock()->GetGraph()->GetLongConstant( 4059 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4060 } 4061 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4062 return GetBlock()->GetGraph()->GetLongConstant( 4063 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4064 } 4065 4066 DECLARE_INSTRUCTION(Xor); 4067 4068 private: 4069 DISALLOW_COPY_AND_ASSIGN(HXor); 4070}; 4071 4072// The value of a parameter in this method. Its location depends on 4073// the calling convention. 4074class HParameterValue : public HExpression<0> { 4075 public: 4076 HParameterValue(uint8_t index, 4077 Primitive::Type parameter_type, 4078 bool is_this = false) 4079 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4080 index_(index), 4081 is_this_(is_this), 4082 can_be_null_(!is_this) {} 4083 4084 uint8_t GetIndex() const { return index_; } 4085 4086 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4087 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4088 4089 bool IsThis() const { return is_this_; } 4090 4091 DECLARE_INSTRUCTION(ParameterValue); 4092 4093 private: 4094 // The index of this parameter in the parameters list. Must be less 4095 // than HGraph::number_of_in_vregs_. 4096 const uint8_t index_; 4097 4098 // Whether or not the parameter value corresponds to 'this' argument. 4099 const bool is_this_; 4100 4101 bool can_be_null_; 4102 4103 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4104}; 4105 4106class HNot : public HUnaryOperation { 4107 public: 4108 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4109 : HUnaryOperation(result_type, input, dex_pc) {} 4110 4111 bool CanBeMoved() const OVERRIDE { return true; } 4112 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4113 return true; 4114 } 4115 4116 template <typename T> T Compute(T x) const { return ~x; } 4117 4118 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4119 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4120 } 4121 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4122 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4123 } 4124 4125 DECLARE_INSTRUCTION(Not); 4126 4127 private: 4128 DISALLOW_COPY_AND_ASSIGN(HNot); 4129}; 4130 4131class HBooleanNot : public HUnaryOperation { 4132 public: 4133 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4134 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4135 4136 bool CanBeMoved() const OVERRIDE { return true; } 4137 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4138 return true; 4139 } 4140 4141 template <typename T> bool Compute(T x) const { 4142 DCHECK(IsUint<1>(x)); 4143 return !x; 4144 } 4145 4146 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4147 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4148 } 4149 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4150 LOG(FATAL) << DebugName() << " is not defined for long values"; 4151 UNREACHABLE(); 4152 } 4153 4154 DECLARE_INSTRUCTION(BooleanNot); 4155 4156 private: 4157 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4158}; 4159 4160class HTypeConversion : public HExpression<1> { 4161 public: 4162 // Instantiate a type conversion of `input` to `result_type`. 4163 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4164 : HExpression(result_type, 4165 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4166 dex_pc) { 4167 SetRawInputAt(0, input); 4168 DCHECK_NE(input->GetType(), result_type); 4169 } 4170 4171 HInstruction* GetInput() const { return InputAt(0); } 4172 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4173 Primitive::Type GetResultType() const { return GetType(); } 4174 4175 // Required by the x86 and ARM code generators when producing calls 4176 // to the runtime. 4177 4178 bool CanBeMoved() const OVERRIDE { return true; } 4179 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4180 4181 // Try to statically evaluate the conversion and return a HConstant 4182 // containing the result. If the input cannot be converted, return nullptr. 4183 HConstant* TryStaticEvaluation() const; 4184 4185 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4186 Primitive::Type result_type) { 4187 // Some architectures may not require the 'GC' side effects, but at this point 4188 // in the compilation process we do not know what architecture we will 4189 // generate code for, so we must be conservative. 4190 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4191 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4192 return SideEffects::CanTriggerGC(); 4193 } 4194 return SideEffects::None(); 4195 } 4196 4197 DECLARE_INSTRUCTION(TypeConversion); 4198 4199 private: 4200 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4201}; 4202 4203static constexpr uint32_t kNoRegNumber = -1; 4204 4205class HPhi : public HInstruction { 4206 public: 4207 HPhi(ArenaAllocator* arena, 4208 uint32_t reg_number, 4209 size_t number_of_inputs, 4210 Primitive::Type type, 4211 uint32_t dex_pc = kNoDexPc) 4212 : HInstruction(SideEffects::None(), dex_pc), 4213 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4214 reg_number_(reg_number), 4215 type_(type), 4216 is_live_(false), 4217 can_be_null_(true) { 4218 } 4219 4220 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4221 static Primitive::Type ToPhiType(Primitive::Type type) { 4222 switch (type) { 4223 case Primitive::kPrimBoolean: 4224 case Primitive::kPrimByte: 4225 case Primitive::kPrimShort: 4226 case Primitive::kPrimChar: 4227 return Primitive::kPrimInt; 4228 default: 4229 return type; 4230 } 4231 } 4232 4233 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4234 4235 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4236 4237 void AddInput(HInstruction* input); 4238 void RemoveInputAt(size_t index); 4239 4240 Primitive::Type GetType() const OVERRIDE { return type_; } 4241 void SetType(Primitive::Type type) { type_ = type; } 4242 4243 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4244 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4245 4246 uint32_t GetRegNumber() const { return reg_number_; } 4247 4248 void SetDead() { is_live_ = false; } 4249 void SetLive() { is_live_ = true; } 4250 bool IsDead() const { return !is_live_; } 4251 bool IsLive() const { return is_live_; } 4252 4253 bool IsVRegEquivalentOf(HInstruction* other) const { 4254 return other != nullptr 4255 && other->IsPhi() 4256 && other->AsPhi()->GetBlock() == GetBlock() 4257 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4258 } 4259 4260 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4261 // An equivalent phi is a phi having the same dex register and type. 4262 // It assumes that phis with the same dex register are adjacent. 4263 HPhi* GetNextEquivalentPhiWithSameType() { 4264 HInstruction* next = GetNext(); 4265 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4266 if (next->GetType() == GetType()) { 4267 return next->AsPhi(); 4268 } 4269 next = next->GetNext(); 4270 } 4271 return nullptr; 4272 } 4273 4274 DECLARE_INSTRUCTION(Phi); 4275 4276 protected: 4277 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4278 return inputs_[index]; 4279 } 4280 4281 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4282 inputs_[index] = input; 4283 } 4284 4285 private: 4286 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4287 const uint32_t reg_number_; 4288 Primitive::Type type_; 4289 bool is_live_; 4290 bool can_be_null_; 4291 4292 DISALLOW_COPY_AND_ASSIGN(HPhi); 4293}; 4294 4295class HNullCheck : public HExpression<1> { 4296 public: 4297 HNullCheck(HInstruction* value, uint32_t dex_pc) 4298 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4299 SetRawInputAt(0, value); 4300 } 4301 4302 bool CanBeMoved() const OVERRIDE { return true; } 4303 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4304 return true; 4305 } 4306 4307 bool NeedsEnvironment() const OVERRIDE { return true; } 4308 4309 bool CanThrow() const OVERRIDE { return true; } 4310 4311 bool CanBeNull() const OVERRIDE { return false; } 4312 4313 4314 DECLARE_INSTRUCTION(NullCheck); 4315 4316 private: 4317 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4318}; 4319 4320class FieldInfo : public ValueObject { 4321 public: 4322 FieldInfo(MemberOffset field_offset, 4323 Primitive::Type field_type, 4324 bool is_volatile, 4325 uint32_t index, 4326 const DexFile& dex_file, 4327 Handle<mirror::DexCache> dex_cache) 4328 : field_offset_(field_offset), 4329 field_type_(field_type), 4330 is_volatile_(is_volatile), 4331 index_(index), 4332 dex_file_(dex_file), 4333 dex_cache_(dex_cache) {} 4334 4335 MemberOffset GetFieldOffset() const { return field_offset_; } 4336 Primitive::Type GetFieldType() const { return field_type_; } 4337 uint32_t GetFieldIndex() const { return index_; } 4338 const DexFile& GetDexFile() const { return dex_file_; } 4339 bool IsVolatile() const { return is_volatile_; } 4340 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4341 4342 private: 4343 const MemberOffset field_offset_; 4344 const Primitive::Type field_type_; 4345 const bool is_volatile_; 4346 const uint32_t index_; 4347 const DexFile& dex_file_; 4348 const Handle<mirror::DexCache> dex_cache_; 4349}; 4350 4351class HInstanceFieldGet : public HExpression<1> { 4352 public: 4353 HInstanceFieldGet(HInstruction* value, 4354 Primitive::Type field_type, 4355 MemberOffset field_offset, 4356 bool is_volatile, 4357 uint32_t field_idx, 4358 const DexFile& dex_file, 4359 Handle<mirror::DexCache> dex_cache, 4360 uint32_t dex_pc) 4361 : HExpression( 4362 field_type, 4363 SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), 4364 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache) { 4365 SetRawInputAt(0, value); 4366 } 4367 4368 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4369 4370 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4371 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4372 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4373 } 4374 4375 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4376 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4377 } 4378 4379 size_t ComputeHashCode() const OVERRIDE { 4380 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4381 } 4382 4383 const FieldInfo& GetFieldInfo() const { return field_info_; } 4384 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4385 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4386 bool IsVolatile() const { return field_info_.IsVolatile(); } 4387 4388 DECLARE_INSTRUCTION(InstanceFieldGet); 4389 4390 private: 4391 const FieldInfo field_info_; 4392 4393 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4394}; 4395 4396class HInstanceFieldSet : public HTemplateInstruction<2> { 4397 public: 4398 HInstanceFieldSet(HInstruction* object, 4399 HInstruction* value, 4400 Primitive::Type field_type, 4401 MemberOffset field_offset, 4402 bool is_volatile, 4403 uint32_t field_idx, 4404 const DexFile& dex_file, 4405 Handle<mirror::DexCache> dex_cache, 4406 uint32_t dex_pc) 4407 : HTemplateInstruction( 4408 SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), 4409 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache), 4410 value_can_be_null_(true) { 4411 SetRawInputAt(0, object); 4412 SetRawInputAt(1, value); 4413 } 4414 4415 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4416 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4417 } 4418 4419 const FieldInfo& GetFieldInfo() const { return field_info_; } 4420 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4421 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4422 bool IsVolatile() const { return field_info_.IsVolatile(); } 4423 HInstruction* GetValue() const { return InputAt(1); } 4424 bool GetValueCanBeNull() const { return value_can_be_null_; } 4425 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4426 4427 DECLARE_INSTRUCTION(InstanceFieldSet); 4428 4429 private: 4430 const FieldInfo field_info_; 4431 bool value_can_be_null_; 4432 4433 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4434}; 4435 4436class HArrayGet : public HExpression<2> { 4437 public: 4438 HArrayGet(HInstruction* array, 4439 HInstruction* index, 4440 Primitive::Type type, 4441 uint32_t dex_pc, 4442 SideEffects additional_side_effects = SideEffects::None()) 4443 : HExpression(type, 4444 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4445 dex_pc) { 4446 SetRawInputAt(0, array); 4447 SetRawInputAt(1, index); 4448 } 4449 4450 bool CanBeMoved() const OVERRIDE { return true; } 4451 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4452 return true; 4453 } 4454 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4455 // TODO: We can be smarter here. 4456 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4457 // which generates the implicit null check. There are cases when these can be removed 4458 // to produce better code. If we ever add optimizations to do so we should allow an 4459 // implicit check here (as long as the address falls in the first page). 4460 return false; 4461 } 4462 4463 void SetType(Primitive::Type type) { type_ = type; } 4464 4465 HInstruction* GetArray() const { return InputAt(0); } 4466 HInstruction* GetIndex() const { return InputAt(1); } 4467 4468 DECLARE_INSTRUCTION(ArrayGet); 4469 4470 private: 4471 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4472}; 4473 4474class HArraySet : public HTemplateInstruction<3> { 4475 public: 4476 HArraySet(HInstruction* array, 4477 HInstruction* index, 4478 HInstruction* value, 4479 Primitive::Type expected_component_type, 4480 uint32_t dex_pc, 4481 SideEffects additional_side_effects = SideEffects::None()) 4482 : HTemplateInstruction( 4483 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4484 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4485 additional_side_effects), 4486 dex_pc), 4487 expected_component_type_(expected_component_type), 4488 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4489 value_can_be_null_(true), 4490 static_type_of_array_is_object_array_(false) { 4491 SetRawInputAt(0, array); 4492 SetRawInputAt(1, index); 4493 SetRawInputAt(2, value); 4494 } 4495 4496 bool NeedsEnvironment() const OVERRIDE { 4497 // We currently always call a runtime method to catch array store 4498 // exceptions. 4499 return needs_type_check_; 4500 } 4501 4502 // Can throw ArrayStoreException. 4503 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4504 4505 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4506 // TODO: Same as for ArrayGet. 4507 return false; 4508 } 4509 4510 void ClearNeedsTypeCheck() { 4511 needs_type_check_ = false; 4512 } 4513 4514 void ClearValueCanBeNull() { 4515 value_can_be_null_ = false; 4516 } 4517 4518 void SetStaticTypeOfArrayIsObjectArray() { 4519 static_type_of_array_is_object_array_ = true; 4520 } 4521 4522 bool GetValueCanBeNull() const { return value_can_be_null_; } 4523 bool NeedsTypeCheck() const { return needs_type_check_; } 4524 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4525 4526 HInstruction* GetArray() const { return InputAt(0); } 4527 HInstruction* GetIndex() const { return InputAt(1); } 4528 HInstruction* GetValue() const { return InputAt(2); } 4529 4530 Primitive::Type GetComponentType() const { 4531 // The Dex format does not type floating point index operations. Since the 4532 // `expected_component_type_` is set during building and can therefore not 4533 // be correct, we also check what is the value type. If it is a floating 4534 // point type, we must use that type. 4535 Primitive::Type value_type = GetValue()->GetType(); 4536 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4537 ? value_type 4538 : expected_component_type_; 4539 } 4540 4541 Primitive::Type GetRawExpectedComponentType() const { 4542 return expected_component_type_; 4543 } 4544 4545 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4546 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4547 } 4548 4549 DECLARE_INSTRUCTION(ArraySet); 4550 4551 private: 4552 const Primitive::Type expected_component_type_; 4553 bool needs_type_check_; 4554 bool value_can_be_null_; 4555 // Cached information for the reference_type_info_ so that codegen 4556 // does not need to inspect the static type. 4557 bool static_type_of_array_is_object_array_; 4558 4559 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4560}; 4561 4562class HArrayLength : public HExpression<1> { 4563 public: 4564 HArrayLength(HInstruction* array, uint32_t dex_pc) 4565 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4566 // Note that arrays do not change length, so the instruction does not 4567 // depend on any write. 4568 SetRawInputAt(0, array); 4569 } 4570 4571 bool CanBeMoved() const OVERRIDE { return true; } 4572 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4573 return true; 4574 } 4575 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4576 return obj == InputAt(0); 4577 } 4578 4579 DECLARE_INSTRUCTION(ArrayLength); 4580 4581 private: 4582 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4583}; 4584 4585class HBoundsCheck : public HExpression<2> { 4586 public: 4587 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4588 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4589 DCHECK(index->GetType() == Primitive::kPrimInt); 4590 SetRawInputAt(0, index); 4591 SetRawInputAt(1, length); 4592 } 4593 4594 bool CanBeMoved() const OVERRIDE { return true; } 4595 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4596 return true; 4597 } 4598 4599 bool NeedsEnvironment() const OVERRIDE { return true; } 4600 4601 bool CanThrow() const OVERRIDE { return true; } 4602 4603 HInstruction* GetIndex() const { return InputAt(0); } 4604 4605 DECLARE_INSTRUCTION(BoundsCheck); 4606 4607 private: 4608 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4609}; 4610 4611/** 4612 * Some DEX instructions are folded into multiple HInstructions that need 4613 * to stay live until the last HInstruction. This class 4614 * is used as a marker for the baseline compiler to ensure its preceding 4615 * HInstruction stays live. `index` represents the stack location index of the 4616 * instruction (the actual offset is computed as index * vreg_size). 4617 */ 4618class HTemporary : public HTemplateInstruction<0> { 4619 public: 4620 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4621 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4622 4623 size_t GetIndex() const { return index_; } 4624 4625 Primitive::Type GetType() const OVERRIDE { 4626 // The previous instruction is the one that will be stored in the temporary location. 4627 DCHECK(GetPrevious() != nullptr); 4628 return GetPrevious()->GetType(); 4629 } 4630 4631 DECLARE_INSTRUCTION(Temporary); 4632 4633 private: 4634 const size_t index_; 4635 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4636}; 4637 4638class HSuspendCheck : public HTemplateInstruction<0> { 4639 public: 4640 explicit HSuspendCheck(uint32_t dex_pc) 4641 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4642 4643 bool NeedsEnvironment() const OVERRIDE { 4644 return true; 4645 } 4646 4647 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4648 SlowPathCode* GetSlowPath() const { return slow_path_; } 4649 4650 DECLARE_INSTRUCTION(SuspendCheck); 4651 4652 private: 4653 // Only used for code generation, in order to share the same slow path between back edges 4654 // of a same loop. 4655 SlowPathCode* slow_path_; 4656 4657 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4658}; 4659 4660/** 4661 * Instruction to load a Class object. 4662 */ 4663class HLoadClass : public HExpression<1> { 4664 public: 4665 HLoadClass(HCurrentMethod* current_method, 4666 uint16_t type_index, 4667 const DexFile& dex_file, 4668 bool is_referrers_class, 4669 uint32_t dex_pc, 4670 bool needs_access_check) 4671 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4672 type_index_(type_index), 4673 dex_file_(dex_file), 4674 is_referrers_class_(is_referrers_class), 4675 generate_clinit_check_(false), 4676 needs_access_check_(needs_access_check), 4677 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4678 // Referrers class should not need access check. We never inline unverified 4679 // methods so we can't possibly end up in this situation. 4680 DCHECK(!is_referrers_class_ || !needs_access_check_); 4681 SetRawInputAt(0, current_method); 4682 } 4683 4684 bool CanBeMoved() const OVERRIDE { return true; } 4685 4686 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4687 // Note that we don't need to test for generate_clinit_check_. 4688 // Whether or not we need to generate the clinit check is processed in 4689 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4690 return other->AsLoadClass()->type_index_ == type_index_ && 4691 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4692 } 4693 4694 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4695 4696 uint16_t GetTypeIndex() const { return type_index_; } 4697 bool IsReferrersClass() const { return is_referrers_class_; } 4698 bool CanBeNull() const OVERRIDE { return false; } 4699 4700 bool NeedsEnvironment() const OVERRIDE { 4701 // Will call runtime and load the class if the class is not loaded yet. 4702 // TODO: finer grain decision. 4703 return !is_referrers_class_; 4704 } 4705 4706 bool MustGenerateClinitCheck() const { 4707 return generate_clinit_check_; 4708 } 4709 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4710 generate_clinit_check_ = generate_clinit_check; 4711 } 4712 4713 bool CanCallRuntime() const { 4714 return MustGenerateClinitCheck() || !is_referrers_class_ || needs_access_check_; 4715 } 4716 4717 bool NeedsAccessCheck() const { 4718 return needs_access_check_; 4719 } 4720 4721 bool CanThrow() const OVERRIDE { 4722 // May call runtime and and therefore can throw. 4723 // TODO: finer grain decision. 4724 return CanCallRuntime(); 4725 } 4726 4727 ReferenceTypeInfo GetLoadedClassRTI() { 4728 return loaded_class_rti_; 4729 } 4730 4731 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4732 // Make sure we only set exact types (the loaded class should never be merged). 4733 DCHECK(rti.IsExact()); 4734 loaded_class_rti_ = rti; 4735 } 4736 4737 const DexFile& GetDexFile() { return dex_file_; } 4738 4739 bool NeedsDexCache() const OVERRIDE { return !is_referrers_class_; } 4740 4741 static SideEffects SideEffectsForArchRuntimeCalls() { 4742 return SideEffects::CanTriggerGC(); 4743 } 4744 4745 DECLARE_INSTRUCTION(LoadClass); 4746 4747 private: 4748 const uint16_t type_index_; 4749 const DexFile& dex_file_; 4750 const bool is_referrers_class_; 4751 // Whether this instruction must generate the initialization check. 4752 // Used for code generation. 4753 bool generate_clinit_check_; 4754 bool needs_access_check_; 4755 4756 ReferenceTypeInfo loaded_class_rti_; 4757 4758 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 4759}; 4760 4761class HLoadString : public HExpression<1> { 4762 public: 4763 HLoadString(HCurrentMethod* current_method, uint32_t string_index, uint32_t dex_pc) 4764 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4765 string_index_(string_index) { 4766 SetRawInputAt(0, current_method); 4767 } 4768 4769 bool CanBeMoved() const OVERRIDE { return true; } 4770 4771 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4772 return other->AsLoadString()->string_index_ == string_index_; 4773 } 4774 4775 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 4776 4777 uint32_t GetStringIndex() const { return string_index_; } 4778 4779 // TODO: Can we deopt or debug when we resolve a string? 4780 bool NeedsEnvironment() const OVERRIDE { return false; } 4781 bool NeedsDexCache() const OVERRIDE { return true; } 4782 bool CanBeNull() const OVERRIDE { return false; } 4783 4784 static SideEffects SideEffectsForArchRuntimeCalls() { 4785 return SideEffects::CanTriggerGC(); 4786 } 4787 4788 DECLARE_INSTRUCTION(LoadString); 4789 4790 private: 4791 const uint32_t string_index_; 4792 4793 DISALLOW_COPY_AND_ASSIGN(HLoadString); 4794}; 4795 4796/** 4797 * Performs an initialization check on its Class object input. 4798 */ 4799class HClinitCheck : public HExpression<1> { 4800 public: 4801 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 4802 : HExpression( 4803 Primitive::kPrimNot, 4804 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 4805 dex_pc) { 4806 SetRawInputAt(0, constant); 4807 } 4808 4809 bool CanBeMoved() const OVERRIDE { return true; } 4810 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4811 return true; 4812 } 4813 4814 bool NeedsEnvironment() const OVERRIDE { 4815 // May call runtime to initialize the class. 4816 return true; 4817 } 4818 4819 4820 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 4821 4822 DECLARE_INSTRUCTION(ClinitCheck); 4823 4824 private: 4825 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 4826}; 4827 4828class HStaticFieldGet : public HExpression<1> { 4829 public: 4830 HStaticFieldGet(HInstruction* cls, 4831 Primitive::Type field_type, 4832 MemberOffset field_offset, 4833 bool is_volatile, 4834 uint32_t field_idx, 4835 const DexFile& dex_file, 4836 Handle<mirror::DexCache> dex_cache, 4837 uint32_t dex_pc) 4838 : HExpression( 4839 field_type, 4840 SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), 4841 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache) { 4842 SetRawInputAt(0, cls); 4843 } 4844 4845 4846 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4847 4848 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4849 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 4850 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4851 } 4852 4853 size_t ComputeHashCode() const OVERRIDE { 4854 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4855 } 4856 4857 const FieldInfo& GetFieldInfo() const { return field_info_; } 4858 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4859 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4860 bool IsVolatile() const { return field_info_.IsVolatile(); } 4861 4862 DECLARE_INSTRUCTION(StaticFieldGet); 4863 4864 private: 4865 const FieldInfo field_info_; 4866 4867 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 4868}; 4869 4870class HStaticFieldSet : public HTemplateInstruction<2> { 4871 public: 4872 HStaticFieldSet(HInstruction* cls, 4873 HInstruction* value, 4874 Primitive::Type field_type, 4875 MemberOffset field_offset, 4876 bool is_volatile, 4877 uint32_t field_idx, 4878 const DexFile& dex_file, 4879 Handle<mirror::DexCache> dex_cache, 4880 uint32_t dex_pc) 4881 : HTemplateInstruction( 4882 SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), 4883 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache), 4884 value_can_be_null_(true) { 4885 SetRawInputAt(0, cls); 4886 SetRawInputAt(1, value); 4887 } 4888 4889 const FieldInfo& GetFieldInfo() const { return field_info_; } 4890 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4891 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4892 bool IsVolatile() const { return field_info_.IsVolatile(); } 4893 4894 HInstruction* GetValue() const { return InputAt(1); } 4895 bool GetValueCanBeNull() const { return value_can_be_null_; } 4896 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4897 4898 DECLARE_INSTRUCTION(StaticFieldSet); 4899 4900 private: 4901 const FieldInfo field_info_; 4902 bool value_can_be_null_; 4903 4904 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 4905}; 4906 4907class HUnresolvedInstanceFieldGet : public HExpression<1> { 4908 public: 4909 HUnresolvedInstanceFieldGet(HInstruction* obj, 4910 Primitive::Type field_type, 4911 uint32_t field_index, 4912 uint32_t dex_pc) 4913 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 4914 field_index_(field_index) { 4915 SetRawInputAt(0, obj); 4916 } 4917 4918 bool NeedsEnvironment() const OVERRIDE { return true; } 4919 bool CanThrow() const OVERRIDE { return true; } 4920 4921 Primitive::Type GetFieldType() const { return GetType(); } 4922 uint32_t GetFieldIndex() const { return field_index_; } 4923 4924 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 4925 4926 private: 4927 const uint32_t field_index_; 4928 4929 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 4930}; 4931 4932class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 4933 public: 4934 HUnresolvedInstanceFieldSet(HInstruction* obj, 4935 HInstruction* value, 4936 Primitive::Type field_type, 4937 uint32_t field_index, 4938 uint32_t dex_pc) 4939 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 4940 field_type_(field_type), 4941 field_index_(field_index) { 4942 DCHECK_EQ(field_type, value->GetType()); 4943 SetRawInputAt(0, obj); 4944 SetRawInputAt(1, value); 4945 } 4946 4947 bool NeedsEnvironment() const OVERRIDE { return true; } 4948 bool CanThrow() const OVERRIDE { return true; } 4949 4950 Primitive::Type GetFieldType() const { return field_type_; } 4951 uint32_t GetFieldIndex() const { return field_index_; } 4952 4953 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 4954 4955 private: 4956 const Primitive::Type field_type_; 4957 const uint32_t field_index_; 4958 4959 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 4960}; 4961 4962class HUnresolvedStaticFieldGet : public HExpression<0> { 4963 public: 4964 HUnresolvedStaticFieldGet(Primitive::Type field_type, 4965 uint32_t field_index, 4966 uint32_t dex_pc) 4967 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 4968 field_index_(field_index) { 4969 } 4970 4971 bool NeedsEnvironment() const OVERRIDE { return true; } 4972 bool CanThrow() const OVERRIDE { return true; } 4973 4974 Primitive::Type GetFieldType() const { return GetType(); } 4975 uint32_t GetFieldIndex() const { return field_index_; } 4976 4977 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 4978 4979 private: 4980 const uint32_t field_index_; 4981 4982 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 4983}; 4984 4985class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 4986 public: 4987 HUnresolvedStaticFieldSet(HInstruction* value, 4988 Primitive::Type field_type, 4989 uint32_t field_index, 4990 uint32_t dex_pc) 4991 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 4992 field_type_(field_type), 4993 field_index_(field_index) { 4994 DCHECK_EQ(field_type, value->GetType()); 4995 SetRawInputAt(0, value); 4996 } 4997 4998 bool NeedsEnvironment() const OVERRIDE { return true; } 4999 bool CanThrow() const OVERRIDE { return true; } 5000 5001 Primitive::Type GetFieldType() const { return field_type_; } 5002 uint32_t GetFieldIndex() const { return field_index_; } 5003 5004 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5005 5006 private: 5007 const Primitive::Type field_type_; 5008 const uint32_t field_index_; 5009 5010 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5011}; 5012 5013// Implement the move-exception DEX instruction. 5014class HLoadException : public HExpression<0> { 5015 public: 5016 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5017 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5018 5019 bool CanBeNull() const OVERRIDE { return false; } 5020 5021 DECLARE_INSTRUCTION(LoadException); 5022 5023 private: 5024 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5025}; 5026 5027// Implicit part of move-exception which clears thread-local exception storage. 5028// Must not be removed because the runtime expects the TLS to get cleared. 5029class HClearException : public HTemplateInstruction<0> { 5030 public: 5031 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5032 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5033 5034 DECLARE_INSTRUCTION(ClearException); 5035 5036 private: 5037 DISALLOW_COPY_AND_ASSIGN(HClearException); 5038}; 5039 5040class HThrow : public HTemplateInstruction<1> { 5041 public: 5042 HThrow(HInstruction* exception, uint32_t dex_pc) 5043 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5044 SetRawInputAt(0, exception); 5045 } 5046 5047 bool IsControlFlow() const OVERRIDE { return true; } 5048 5049 bool NeedsEnvironment() const OVERRIDE { return true; } 5050 5051 bool CanThrow() const OVERRIDE { return true; } 5052 5053 5054 DECLARE_INSTRUCTION(Throw); 5055 5056 private: 5057 DISALLOW_COPY_AND_ASSIGN(HThrow); 5058}; 5059 5060/** 5061 * Implementation strategies for the code generator of a HInstanceOf 5062 * or `HCheckCast`. 5063 */ 5064enum class TypeCheckKind { 5065 kUnresolvedCheck, // Check against an unresolved type. 5066 kExactCheck, // Can do a single class compare. 5067 kClassHierarchyCheck, // Can just walk the super class chain. 5068 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5069 kInterfaceCheck, // No optimization yet when checking against an interface. 5070 kArrayObjectCheck, // Can just check if the array is not primitive. 5071 kArrayCheck // No optimization yet when checking against a generic array. 5072}; 5073 5074class HInstanceOf : public HExpression<2> { 5075 public: 5076 HInstanceOf(HInstruction* object, 5077 HLoadClass* constant, 5078 TypeCheckKind check_kind, 5079 uint32_t dex_pc) 5080 : HExpression(Primitive::kPrimBoolean, 5081 SideEffectsForArchRuntimeCalls(check_kind), 5082 dex_pc), 5083 check_kind_(check_kind), 5084 must_do_null_check_(true) { 5085 SetRawInputAt(0, object); 5086 SetRawInputAt(1, constant); 5087 } 5088 5089 bool CanBeMoved() const OVERRIDE { return true; } 5090 5091 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5092 return true; 5093 } 5094 5095 bool NeedsEnvironment() const OVERRIDE { 5096 return false; 5097 } 5098 5099 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5100 5101 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5102 5103 // Used only in code generation. 5104 bool MustDoNullCheck() const { return must_do_null_check_; } 5105 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5106 5107 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5108 return (check_kind == TypeCheckKind::kExactCheck) 5109 ? SideEffects::None() 5110 // Mips currently does runtime calls for any other checks. 5111 : SideEffects::CanTriggerGC(); 5112 } 5113 5114 DECLARE_INSTRUCTION(InstanceOf); 5115 5116 private: 5117 const TypeCheckKind check_kind_; 5118 bool must_do_null_check_; 5119 5120 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5121}; 5122 5123class HBoundType : public HExpression<1> { 5124 public: 5125 // Constructs an HBoundType with the given upper_bound. 5126 // Ensures that the upper_bound is valid. 5127 HBoundType(HInstruction* input, 5128 ReferenceTypeInfo upper_bound, 5129 bool upper_can_be_null, 5130 uint32_t dex_pc = kNoDexPc) 5131 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5132 upper_bound_(upper_bound), 5133 upper_can_be_null_(upper_can_be_null), 5134 can_be_null_(upper_can_be_null) { 5135 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5136 SetRawInputAt(0, input); 5137 SetReferenceTypeInfo(upper_bound_); 5138 } 5139 5140 // GetUpper* should only be used in reference type propagation. 5141 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5142 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5143 5144 void SetCanBeNull(bool can_be_null) { 5145 DCHECK(upper_can_be_null_ || !can_be_null); 5146 can_be_null_ = can_be_null; 5147 } 5148 5149 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5150 5151 DECLARE_INSTRUCTION(BoundType); 5152 5153 private: 5154 // Encodes the most upper class that this instruction can have. In other words 5155 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5156 // It is used to bound the type in cases like: 5157 // if (x instanceof ClassX) { 5158 // // uper_bound_ will be ClassX 5159 // } 5160 const ReferenceTypeInfo upper_bound_; 5161 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5162 // is false then can_be_null_ cannot be true). 5163 const bool upper_can_be_null_; 5164 bool can_be_null_; 5165 5166 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5167}; 5168 5169class HCheckCast : public HTemplateInstruction<2> { 5170 public: 5171 HCheckCast(HInstruction* object, 5172 HLoadClass* constant, 5173 TypeCheckKind check_kind, 5174 uint32_t dex_pc) 5175 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5176 check_kind_(check_kind), 5177 must_do_null_check_(true) { 5178 SetRawInputAt(0, object); 5179 SetRawInputAt(1, constant); 5180 } 5181 5182 bool CanBeMoved() const OVERRIDE { return true; } 5183 5184 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5185 return true; 5186 } 5187 5188 bool NeedsEnvironment() const OVERRIDE { 5189 // Instruction may throw a CheckCastError. 5190 return true; 5191 } 5192 5193 bool CanThrow() const OVERRIDE { return true; } 5194 5195 bool MustDoNullCheck() const { return must_do_null_check_; } 5196 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5197 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5198 5199 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5200 5201 DECLARE_INSTRUCTION(CheckCast); 5202 5203 private: 5204 const TypeCheckKind check_kind_; 5205 bool must_do_null_check_; 5206 5207 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5208}; 5209 5210class HMemoryBarrier : public HTemplateInstruction<0> { 5211 public: 5212 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5213 : HTemplateInstruction( 5214 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5215 barrier_kind_(barrier_kind) {} 5216 5217 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5218 5219 DECLARE_INSTRUCTION(MemoryBarrier); 5220 5221 private: 5222 const MemBarrierKind barrier_kind_; 5223 5224 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5225}; 5226 5227class HMonitorOperation : public HTemplateInstruction<1> { 5228 public: 5229 enum OperationKind { 5230 kEnter, 5231 kExit, 5232 }; 5233 5234 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5235 : HTemplateInstruction( 5236 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5237 kind_(kind) { 5238 SetRawInputAt(0, object); 5239 } 5240 5241 // Instruction may throw a Java exception, so we need an environment. 5242 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5243 5244 bool CanThrow() const OVERRIDE { 5245 // Verifier guarantees that monitor-exit cannot throw. 5246 // This is important because it allows the HGraphBuilder to remove 5247 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5248 return IsEnter(); 5249 } 5250 5251 5252 bool IsEnter() const { return kind_ == kEnter; } 5253 5254 DECLARE_INSTRUCTION(MonitorOperation); 5255 5256 private: 5257 const OperationKind kind_; 5258 5259 private: 5260 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5261}; 5262 5263/** 5264 * A HInstruction used as a marker for the replacement of new + <init> 5265 * of a String to a call to a StringFactory. Only baseline will see 5266 * the node at code generation, where it will be be treated as null. 5267 * When compiling non-baseline, `HFakeString` instructions are being removed 5268 * in the instruction simplifier. 5269 */ 5270class HFakeString : public HTemplateInstruction<0> { 5271 public: 5272 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5273 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5274 5275 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5276 5277 DECLARE_INSTRUCTION(FakeString); 5278 5279 private: 5280 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5281}; 5282 5283class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5284 public: 5285 MoveOperands(Location source, 5286 Location destination, 5287 Primitive::Type type, 5288 HInstruction* instruction) 5289 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5290 5291 Location GetSource() const { return source_; } 5292 Location GetDestination() const { return destination_; } 5293 5294 void SetSource(Location value) { source_ = value; } 5295 void SetDestination(Location value) { destination_ = value; } 5296 5297 // The parallel move resolver marks moves as "in-progress" by clearing the 5298 // destination (but not the source). 5299 Location MarkPending() { 5300 DCHECK(!IsPending()); 5301 Location dest = destination_; 5302 destination_ = Location::NoLocation(); 5303 return dest; 5304 } 5305 5306 void ClearPending(Location dest) { 5307 DCHECK(IsPending()); 5308 destination_ = dest; 5309 } 5310 5311 bool IsPending() const { 5312 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5313 return destination_.IsInvalid() && !source_.IsInvalid(); 5314 } 5315 5316 // True if this blocks a move from the given location. 5317 bool Blocks(Location loc) const { 5318 return !IsEliminated() && source_.OverlapsWith(loc); 5319 } 5320 5321 // A move is redundant if it's been eliminated, if its source and 5322 // destination are the same, or if its destination is unneeded. 5323 bool IsRedundant() const { 5324 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5325 } 5326 5327 // We clear both operands to indicate move that's been eliminated. 5328 void Eliminate() { 5329 source_ = destination_ = Location::NoLocation(); 5330 } 5331 5332 bool IsEliminated() const { 5333 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5334 return source_.IsInvalid(); 5335 } 5336 5337 Primitive::Type GetType() const { return type_; } 5338 5339 bool Is64BitMove() const { 5340 return Primitive::Is64BitType(type_); 5341 } 5342 5343 HInstruction* GetInstruction() const { return instruction_; } 5344 5345 private: 5346 Location source_; 5347 Location destination_; 5348 // The type this move is for. 5349 Primitive::Type type_; 5350 // The instruction this move is assocatied with. Null when this move is 5351 // for moving an input in the expected locations of user (including a phi user). 5352 // This is only used in debug mode, to ensure we do not connect interval siblings 5353 // in the same parallel move. 5354 HInstruction* instruction_; 5355}; 5356 5357static constexpr size_t kDefaultNumberOfMoves = 4; 5358 5359class HParallelMove : public HTemplateInstruction<0> { 5360 public: 5361 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5362 : HTemplateInstruction(SideEffects::None(), dex_pc), 5363 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5364 moves_.reserve(kDefaultNumberOfMoves); 5365 } 5366 5367 void AddMove(Location source, 5368 Location destination, 5369 Primitive::Type type, 5370 HInstruction* instruction) { 5371 DCHECK(source.IsValid()); 5372 DCHECK(destination.IsValid()); 5373 if (kIsDebugBuild) { 5374 if (instruction != nullptr) { 5375 for (const MoveOperands& move : moves_) { 5376 if (move.GetInstruction() == instruction) { 5377 // Special case the situation where the move is for the spill slot 5378 // of the instruction. 5379 if ((GetPrevious() == instruction) 5380 || ((GetPrevious() == nullptr) 5381 && instruction->IsPhi() 5382 && instruction->GetBlock() == GetBlock())) { 5383 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5384 << "Doing parallel moves for the same instruction."; 5385 } else { 5386 DCHECK(false) << "Doing parallel moves for the same instruction."; 5387 } 5388 } 5389 } 5390 } 5391 for (const MoveOperands& move : moves_) { 5392 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5393 << "Overlapped destination for two moves in a parallel move: " 5394 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5395 << source << " ==> " << destination; 5396 } 5397 } 5398 moves_.emplace_back(source, destination, type, instruction); 5399 } 5400 5401 MoveOperands* MoveOperandsAt(size_t index) { 5402 return &moves_[index]; 5403 } 5404 5405 size_t NumMoves() const { return moves_.size(); } 5406 5407 DECLARE_INSTRUCTION(ParallelMove); 5408 5409 private: 5410 ArenaVector<MoveOperands> moves_; 5411 5412 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5413}; 5414 5415} // namespace art 5416 5417#ifdef ART_ENABLE_CODEGEN_arm64 5418#include "nodes_arm64.h" 5419#endif 5420#ifdef ART_ENABLE_CODEGEN_x86 5421#include "nodes_x86.h" 5422#endif 5423 5424namespace art { 5425 5426class HGraphVisitor : public ValueObject { 5427 public: 5428 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5429 virtual ~HGraphVisitor() {} 5430 5431 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5432 virtual void VisitBasicBlock(HBasicBlock* block); 5433 5434 // Visit the graph following basic block insertion order. 5435 void VisitInsertionOrder(); 5436 5437 // Visit the graph following dominator tree reverse post-order. 5438 void VisitReversePostOrder(); 5439 5440 HGraph* GetGraph() const { return graph_; } 5441 5442 // Visit functions for instruction classes. 5443#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5444 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5445 5446 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5447 5448#undef DECLARE_VISIT_INSTRUCTION 5449 5450 private: 5451 HGraph* const graph_; 5452 5453 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5454}; 5455 5456class HGraphDelegateVisitor : public HGraphVisitor { 5457 public: 5458 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5459 virtual ~HGraphDelegateVisitor() {} 5460 5461 // Visit functions that delegate to to super class. 5462#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5463 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5464 5465 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5466 5467#undef DECLARE_VISIT_INSTRUCTION 5468 5469 private: 5470 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5471}; 5472 5473class HInsertionOrderIterator : public ValueObject { 5474 public: 5475 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5476 5477 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5478 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5479 void Advance() { ++index_; } 5480 5481 private: 5482 const HGraph& graph_; 5483 size_t index_; 5484 5485 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5486}; 5487 5488class HReversePostOrderIterator : public ValueObject { 5489 public: 5490 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5491 // Check that reverse post order of the graph has been built. 5492 DCHECK(!graph.GetReversePostOrder().empty()); 5493 } 5494 5495 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5496 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5497 void Advance() { ++index_; } 5498 5499 private: 5500 const HGraph& graph_; 5501 size_t index_; 5502 5503 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5504}; 5505 5506class HPostOrderIterator : public ValueObject { 5507 public: 5508 explicit HPostOrderIterator(const HGraph& graph) 5509 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5510 // Check that reverse post order of the graph has been built. 5511 DCHECK(!graph.GetReversePostOrder().empty()); 5512 } 5513 5514 bool Done() const { return index_ == 0; } 5515 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5516 void Advance() { --index_; } 5517 5518 private: 5519 const HGraph& graph_; 5520 size_t index_; 5521 5522 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5523}; 5524 5525class HLinearPostOrderIterator : public ValueObject { 5526 public: 5527 explicit HLinearPostOrderIterator(const HGraph& graph) 5528 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5529 5530 bool Done() const { return index_ == 0; } 5531 5532 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5533 5534 void Advance() { 5535 --index_; 5536 DCHECK_GE(index_, 0U); 5537 } 5538 5539 private: 5540 const ArenaVector<HBasicBlock*>& order_; 5541 size_t index_; 5542 5543 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5544}; 5545 5546class HLinearOrderIterator : public ValueObject { 5547 public: 5548 explicit HLinearOrderIterator(const HGraph& graph) 5549 : order_(graph.GetLinearOrder()), index_(0) {} 5550 5551 bool Done() const { return index_ == order_.size(); } 5552 HBasicBlock* Current() const { return order_[index_]; } 5553 void Advance() { ++index_; } 5554 5555 private: 5556 const ArenaVector<HBasicBlock*>& order_; 5557 size_t index_; 5558 5559 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5560}; 5561 5562// Iterator over the blocks that art part of the loop. Includes blocks part 5563// of an inner loop. The order in which the blocks are iterated is on their 5564// block id. 5565class HBlocksInLoopIterator : public ValueObject { 5566 public: 5567 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5568 : blocks_in_loop_(info.GetBlocks()), 5569 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5570 index_(0) { 5571 if (!blocks_in_loop_.IsBitSet(index_)) { 5572 Advance(); 5573 } 5574 } 5575 5576 bool Done() const { return index_ == blocks_.size(); } 5577 HBasicBlock* Current() const { return blocks_[index_]; } 5578 void Advance() { 5579 ++index_; 5580 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5581 if (blocks_in_loop_.IsBitSet(index_)) { 5582 break; 5583 } 5584 } 5585 } 5586 5587 private: 5588 const BitVector& blocks_in_loop_; 5589 const ArenaVector<HBasicBlock*>& blocks_; 5590 size_t index_; 5591 5592 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5593}; 5594 5595// Iterator over the blocks that art part of the loop. Includes blocks part 5596// of an inner loop. The order in which the blocks are iterated is reverse 5597// post order. 5598class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5599 public: 5600 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5601 : blocks_in_loop_(info.GetBlocks()), 5602 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5603 index_(0) { 5604 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5605 Advance(); 5606 } 5607 } 5608 5609 bool Done() const { return index_ == blocks_.size(); } 5610 HBasicBlock* Current() const { return blocks_[index_]; } 5611 void Advance() { 5612 ++index_; 5613 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5614 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5615 break; 5616 } 5617 } 5618 } 5619 5620 private: 5621 const BitVector& blocks_in_loop_; 5622 const ArenaVector<HBasicBlock*>& blocks_; 5623 size_t index_; 5624 5625 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5626}; 5627 5628inline int64_t Int64FromConstant(HConstant* constant) { 5629 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5630 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5631 : constant->AsLongConstant()->GetValue(); 5632} 5633 5634inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5635 // For the purposes of the compiler, the dex files must actually be the same object 5636 // if we want to safely treat them as the same. This is especially important for JIT 5637 // as custom class loaders can open the same underlying file (or memory) multiple 5638 // times and provide different class resolution but no two class loaders should ever 5639 // use the same DexFile object - doing so is an unsupported hack that can lead to 5640 // all sorts of weird failures. 5641 return &lhs == &rhs; 5642} 5643 5644} // namespace art 5645 5646#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5647