nodes.h revision 418318f4d50e0cfc2d54330d7623ee030d4d727d
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38#include "utils/array_ref.h" 39 40namespace art { 41 42class GraphChecker; 43class HBasicBlock; 44class HCurrentMethod; 45class HDoubleConstant; 46class HEnvironment; 47class HFakeString; 48class HFloatConstant; 49class HGraphBuilder; 50class HGraphVisitor; 51class HInstruction; 52class HIntConstant; 53class HInvoke; 54class HLongConstant; 55class HNullConstant; 56class HPhi; 57class HSuspendCheck; 58class HTryBoundary; 59class LiveInterval; 60class LocationSummary; 61class SlowPathCode; 62class SsaBuilder; 63 64namespace mirror { 65class DexCache; 66} // namespace mirror 67 68static const int kDefaultNumberOfBlocks = 8; 69static const int kDefaultNumberOfSuccessors = 2; 70static const int kDefaultNumberOfPredecessors = 2; 71static const int kDefaultNumberOfExceptionalPredecessors = 0; 72static const int kDefaultNumberOfDominatedBlocks = 1; 73static const int kDefaultNumberOfBackEdges = 1; 74 75static constexpr uint32_t kMaxIntShiftValue = 0x1f; 76static constexpr uint64_t kMaxLongShiftValue = 0x3f; 77 78static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 79static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 80 81static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 82 83static constexpr uint32_t kNoDexPc = -1; 84 85enum IfCondition { 86 // All types. 87 kCondEQ, // == 88 kCondNE, // != 89 // Signed integers and floating-point numbers. 90 kCondLT, // < 91 kCondLE, // <= 92 kCondGT, // > 93 kCondGE, // >= 94 // Unsigned integers. 95 kCondB, // < 96 kCondBE, // <= 97 kCondA, // > 98 kCondAE, // >= 99}; 100 101class HInstructionList : public ValueObject { 102 public: 103 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 104 105 void AddInstruction(HInstruction* instruction); 106 void RemoveInstruction(HInstruction* instruction); 107 108 // Insert `instruction` before/after an existing instruction `cursor`. 109 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 110 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 111 112 // Return true if this list contains `instruction`. 113 bool Contains(HInstruction* instruction) const; 114 115 // Return true if `instruction1` is found before `instruction2` in 116 // this instruction list and false otherwise. Abort if none 117 // of these instructions is found. 118 bool FoundBefore(const HInstruction* instruction1, 119 const HInstruction* instruction2) const; 120 121 bool IsEmpty() const { return first_instruction_ == nullptr; } 122 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 123 124 // Update the block of all instructions to be `block`. 125 void SetBlockOfInstructions(HBasicBlock* block) const; 126 127 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 128 void Add(const HInstructionList& instruction_list); 129 130 // Return the number of instructions in the list. This is an expensive operation. 131 size_t CountSize() const; 132 133 private: 134 HInstruction* first_instruction_; 135 HInstruction* last_instruction_; 136 137 friend class HBasicBlock; 138 friend class HGraph; 139 friend class HInstruction; 140 friend class HInstructionIterator; 141 friend class HBackwardInstructionIterator; 142 143 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 144}; 145 146// Control-flow graph of a method. Contains a list of basic blocks. 147class HGraph : public ArenaObject<kArenaAllocGraph> { 148 public: 149 HGraph(ArenaAllocator* arena, 150 const DexFile& dex_file, 151 uint32_t method_idx, 152 bool should_generate_constructor_barrier, 153 InstructionSet instruction_set, 154 InvokeType invoke_type = kInvalidInvokeType, 155 bool debuggable = false, 156 int start_instruction_id = 0) 157 : arena_(arena), 158 blocks_(arena->Adapter(kArenaAllocBlockList)), 159 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 160 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 161 entry_block_(nullptr), 162 exit_block_(nullptr), 163 maximum_number_of_out_vregs_(0), 164 number_of_vregs_(0), 165 number_of_in_vregs_(0), 166 temporaries_vreg_slots_(0), 167 has_bounds_checks_(false), 168 has_try_catch_(false), 169 debuggable_(debuggable), 170 current_instruction_id_(start_instruction_id), 171 dex_file_(dex_file), 172 method_idx_(method_idx), 173 invoke_type_(invoke_type), 174 in_ssa_form_(false), 175 should_generate_constructor_barrier_(should_generate_constructor_barrier), 176 instruction_set_(instruction_set), 177 cached_null_constant_(nullptr), 178 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 179 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 180 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 181 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 182 cached_current_method_(nullptr) { 183 blocks_.reserve(kDefaultNumberOfBlocks); 184 } 185 186 ArenaAllocator* GetArena() const { return arena_; } 187 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 188 189 bool IsInSsaForm() const { return in_ssa_form_; } 190 191 HBasicBlock* GetEntryBlock() const { return entry_block_; } 192 HBasicBlock* GetExitBlock() const { return exit_block_; } 193 bool HasExitBlock() const { return exit_block_ != nullptr; } 194 195 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 196 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 197 198 void AddBlock(HBasicBlock* block); 199 200 // Try building the SSA form of this graph, with dominance computation and loop 201 // recognition. Returns whether it was successful in doing all these steps. 202 bool TryBuildingSsa() { 203 BuildDominatorTree(); 204 // The SSA builder requires loops to all be natural. Specifically, the dead phi 205 // elimination phase checks the consistency of the graph when doing a post-order 206 // visit for eliminating dead phis: a dead phi can only have loop header phi 207 // users remaining when being visited. 208 if (!AnalyzeNaturalLoops()) return false; 209 // Precompute per-block try membership before entering the SSA builder, 210 // which needs the information to build catch block phis from values of 211 // locals at throwing instructions inside try blocks. 212 ComputeTryBlockInformation(); 213 TransformToSsa(); 214 in_ssa_form_ = true; 215 return true; 216 } 217 218 void ComputeDominanceInformation(); 219 void ClearDominanceInformation(); 220 221 void BuildDominatorTree(); 222 void TransformToSsa(); 223 void SimplifyCFG(); 224 void SimplifyCatchBlocks(); 225 226 // Analyze all natural loops in this graph. Returns false if one 227 // loop is not natural, that is the header does not dominate the 228 // back edge. 229 bool AnalyzeNaturalLoops() const; 230 231 // Iterate over blocks to compute try block membership. Needs reverse post 232 // order and loop information. 233 void ComputeTryBlockInformation(); 234 235 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 236 // Returns the instruction used to replace the invoke expression or null if the 237 // invoke is for a void method. 238 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 239 240 // Need to add a couple of blocks to test if the loop body is entered and 241 // put deoptimization instructions, etc. 242 void TransformLoopHeaderForBCE(HBasicBlock* header); 243 244 // Removes `block` from the graph. Assumes `block` has been disconnected from 245 // other blocks and has no instructions or phis. 246 void DeleteDeadEmptyBlock(HBasicBlock* block); 247 248 // Splits the edge between `block` and `successor` while preserving the 249 // indices in the predecessor/successor lists. If there are multiple edges 250 // between the blocks, the lowest indices are used. 251 // Returns the new block which is empty and has the same dex pc as `successor`. 252 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 253 254 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 255 void SimplifyLoop(HBasicBlock* header); 256 257 int32_t GetNextInstructionId() { 258 DCHECK_NE(current_instruction_id_, INT32_MAX); 259 return current_instruction_id_++; 260 } 261 262 int32_t GetCurrentInstructionId() const { 263 return current_instruction_id_; 264 } 265 266 void SetCurrentInstructionId(int32_t id) { 267 current_instruction_id_ = id; 268 } 269 270 uint16_t GetMaximumNumberOfOutVRegs() const { 271 return maximum_number_of_out_vregs_; 272 } 273 274 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 275 maximum_number_of_out_vregs_ = new_value; 276 } 277 278 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 279 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 280 } 281 282 void UpdateTemporariesVRegSlots(size_t slots) { 283 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 284 } 285 286 size_t GetTemporariesVRegSlots() const { 287 DCHECK(!in_ssa_form_); 288 return temporaries_vreg_slots_; 289 } 290 291 void SetNumberOfVRegs(uint16_t number_of_vregs) { 292 number_of_vregs_ = number_of_vregs; 293 } 294 295 uint16_t GetNumberOfVRegs() const { 296 return number_of_vregs_; 297 } 298 299 void SetNumberOfInVRegs(uint16_t value) { 300 number_of_in_vregs_ = value; 301 } 302 303 uint16_t GetNumberOfLocalVRegs() const { 304 DCHECK(!in_ssa_form_); 305 return number_of_vregs_ - number_of_in_vregs_; 306 } 307 308 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 309 return reverse_post_order_; 310 } 311 312 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 313 return linear_order_; 314 } 315 316 bool HasBoundsChecks() const { 317 return has_bounds_checks_; 318 } 319 320 void SetHasBoundsChecks(bool value) { 321 has_bounds_checks_ = value; 322 } 323 324 bool ShouldGenerateConstructorBarrier() const { 325 return should_generate_constructor_barrier_; 326 } 327 328 bool IsDebuggable() const { return debuggable_; } 329 330 // Returns a constant of the given type and value. If it does not exist 331 // already, it is created and inserted into the graph. This method is only for 332 // integral types. 333 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 334 335 // TODO: This is problematic for the consistency of reference type propagation 336 // because it can be created anytime after the pass and thus it will be left 337 // with an invalid type. 338 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 339 340 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 341 return CreateConstant(value, &cached_int_constants_, dex_pc); 342 } 343 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 344 return CreateConstant(value, &cached_long_constants_, dex_pc); 345 } 346 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 347 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 348 } 349 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 350 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 351 } 352 353 HCurrentMethod* GetCurrentMethod(); 354 355 const DexFile& GetDexFile() const { 356 return dex_file_; 357 } 358 359 uint32_t GetMethodIdx() const { 360 return method_idx_; 361 } 362 363 InvokeType GetInvokeType() const { 364 return invoke_type_; 365 } 366 367 InstructionSet GetInstructionSet() const { 368 return instruction_set_; 369 } 370 371 bool HasTryCatch() const { return has_try_catch_; } 372 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 373 374 private: 375 void FindBackEdges(ArenaBitVector* visited); 376 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 377 void RemoveDeadBlocks(const ArenaBitVector& visited); 378 379 template <class InstructionType, typename ValueType> 380 InstructionType* CreateConstant(ValueType value, 381 ArenaSafeMap<ValueType, InstructionType*>* cache, 382 uint32_t dex_pc = kNoDexPc) { 383 // Try to find an existing constant of the given value. 384 InstructionType* constant = nullptr; 385 auto cached_constant = cache->find(value); 386 if (cached_constant != cache->end()) { 387 constant = cached_constant->second; 388 } 389 390 // If not found or previously deleted, create and cache a new instruction. 391 // Don't bother reviving a previously deleted instruction, for simplicity. 392 if (constant == nullptr || constant->GetBlock() == nullptr) { 393 constant = new (arena_) InstructionType(value, dex_pc); 394 cache->Overwrite(value, constant); 395 InsertConstant(constant); 396 } 397 return constant; 398 } 399 400 void InsertConstant(HConstant* instruction); 401 402 // Cache a float constant into the graph. This method should only be 403 // called by the SsaBuilder when creating "equivalent" instructions. 404 void CacheFloatConstant(HFloatConstant* constant); 405 406 // See CacheFloatConstant comment. 407 void CacheDoubleConstant(HDoubleConstant* constant); 408 409 ArenaAllocator* const arena_; 410 411 // List of blocks in insertion order. 412 ArenaVector<HBasicBlock*> blocks_; 413 414 // List of blocks to perform a reverse post order tree traversal. 415 ArenaVector<HBasicBlock*> reverse_post_order_; 416 417 // List of blocks to perform a linear order tree traversal. 418 ArenaVector<HBasicBlock*> linear_order_; 419 420 HBasicBlock* entry_block_; 421 HBasicBlock* exit_block_; 422 423 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 424 uint16_t maximum_number_of_out_vregs_; 425 426 // The number of virtual registers in this method. Contains the parameters. 427 uint16_t number_of_vregs_; 428 429 // The number of virtual registers used by parameters of this method. 430 uint16_t number_of_in_vregs_; 431 432 // Number of vreg size slots that the temporaries use (used in baseline compiler). 433 size_t temporaries_vreg_slots_; 434 435 // Has bounds checks. We can totally skip BCE if it's false. 436 bool has_bounds_checks_; 437 438 // Flag whether there are any try/catch blocks in the graph. We will skip 439 // try/catch-related passes if false. 440 bool has_try_catch_; 441 442 // Indicates whether the graph should be compiled in a way that 443 // ensures full debuggability. If false, we can apply more 444 // aggressive optimizations that may limit the level of debugging. 445 const bool debuggable_; 446 447 // The current id to assign to a newly added instruction. See HInstruction.id_. 448 int32_t current_instruction_id_; 449 450 // The dex file from which the method is from. 451 const DexFile& dex_file_; 452 453 // The method index in the dex file. 454 const uint32_t method_idx_; 455 456 // If inlined, this encodes how the callee is being invoked. 457 const InvokeType invoke_type_; 458 459 // Whether the graph has been transformed to SSA form. Only used 460 // in debug mode to ensure we are not using properties only valid 461 // for non-SSA form (like the number of temporaries). 462 bool in_ssa_form_; 463 464 const bool should_generate_constructor_barrier_; 465 466 const InstructionSet instruction_set_; 467 468 // Cached constants. 469 HNullConstant* cached_null_constant_; 470 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 471 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 472 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 473 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 474 475 HCurrentMethod* cached_current_method_; 476 477 friend class SsaBuilder; // For caching constants. 478 friend class SsaLivenessAnalysis; // For the linear order. 479 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 480 DISALLOW_COPY_AND_ASSIGN(HGraph); 481}; 482 483class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 484 public: 485 HLoopInformation(HBasicBlock* header, HGraph* graph) 486 : header_(header), 487 suspend_check_(nullptr), 488 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 489 // Make bit vector growable, as the number of blocks may change. 490 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 491 back_edges_.reserve(kDefaultNumberOfBackEdges); 492 } 493 494 HBasicBlock* GetHeader() const { 495 return header_; 496 } 497 498 void SetHeader(HBasicBlock* block) { 499 header_ = block; 500 } 501 502 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 503 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 504 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 505 506 void AddBackEdge(HBasicBlock* back_edge) { 507 back_edges_.push_back(back_edge); 508 } 509 510 void RemoveBackEdge(HBasicBlock* back_edge) { 511 RemoveElement(back_edges_, back_edge); 512 } 513 514 bool IsBackEdge(const HBasicBlock& block) const { 515 return ContainsElement(back_edges_, &block); 516 } 517 518 size_t NumberOfBackEdges() const { 519 return back_edges_.size(); 520 } 521 522 HBasicBlock* GetPreHeader() const; 523 524 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 525 return back_edges_; 526 } 527 528 // Returns the lifetime position of the back edge that has the 529 // greatest lifetime position. 530 size_t GetLifetimeEnd() const; 531 532 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 533 ReplaceElement(back_edges_, existing, new_back_edge); 534 } 535 536 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 537 // that is the header dominates the back edge. 538 bool Populate(); 539 540 // Reanalyzes the loop by removing loop info from its blocks and re-running 541 // Populate(). If there are no back edges left, the loop info is completely 542 // removed as well as its SuspendCheck instruction. It must be run on nested 543 // inner loops first. 544 void Update(); 545 546 // Returns whether this loop information contains `block`. 547 // Note that this loop information *must* be populated before entering this function. 548 bool Contains(const HBasicBlock& block) const; 549 550 // Returns whether this loop information is an inner loop of `other`. 551 // Note that `other` *must* be populated before entering this function. 552 bool IsIn(const HLoopInformation& other) const; 553 554 // Returns true if instruction is not defined within this loop or any loop nested inside 555 // this loop. If must_dominate is set, only definitions that actually dominate the loop 556 // header can be invariant. Otherwise, any definition outside the loop, including 557 // definitions that appear after the loop, is invariant. 558 bool IsLoopInvariant(HInstruction* instruction, bool must_dominate) const; 559 560 const ArenaBitVector& GetBlocks() const { return blocks_; } 561 562 void Add(HBasicBlock* block); 563 void Remove(HBasicBlock* block); 564 565 private: 566 // Internal recursive implementation of `Populate`. 567 void PopulateRecursive(HBasicBlock* block); 568 569 HBasicBlock* header_; 570 HSuspendCheck* suspend_check_; 571 ArenaVector<HBasicBlock*> back_edges_; 572 ArenaBitVector blocks_; 573 574 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 575}; 576 577// Stores try/catch information for basic blocks. 578// Note that HGraph is constructed so that catch blocks cannot simultaneously 579// be try blocks. 580class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 581 public: 582 // Try block information constructor. 583 explicit TryCatchInformation(const HTryBoundary& try_entry) 584 : try_entry_(&try_entry), 585 catch_dex_file_(nullptr), 586 catch_type_index_(DexFile::kDexNoIndex16) { 587 DCHECK(try_entry_ != nullptr); 588 } 589 590 // Catch block information constructor. 591 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 592 : try_entry_(nullptr), 593 catch_dex_file_(&dex_file), 594 catch_type_index_(catch_type_index) {} 595 596 bool IsTryBlock() const { return try_entry_ != nullptr; } 597 598 const HTryBoundary& GetTryEntry() const { 599 DCHECK(IsTryBlock()); 600 return *try_entry_; 601 } 602 603 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 604 605 bool IsCatchAllTypeIndex() const { 606 DCHECK(IsCatchBlock()); 607 return catch_type_index_ == DexFile::kDexNoIndex16; 608 } 609 610 uint16_t GetCatchTypeIndex() const { 611 DCHECK(IsCatchBlock()); 612 return catch_type_index_; 613 } 614 615 const DexFile& GetCatchDexFile() const { 616 DCHECK(IsCatchBlock()); 617 return *catch_dex_file_; 618 } 619 620 private: 621 // One of possibly several TryBoundary instructions entering the block's try. 622 // Only set for try blocks. 623 const HTryBoundary* try_entry_; 624 625 // Exception type information. Only set for catch blocks. 626 const DexFile* catch_dex_file_; 627 const uint16_t catch_type_index_; 628}; 629 630static constexpr size_t kNoLifetime = -1; 631static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 632 633// A block in a method. Contains the list of instructions represented 634// as a double linked list. Each block knows its predecessors and 635// successors. 636 637class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 638 public: 639 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 640 : graph_(graph), 641 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 642 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 643 loop_information_(nullptr), 644 dominator_(nullptr), 645 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 646 block_id_(kInvalidBlockId), 647 dex_pc_(dex_pc), 648 lifetime_start_(kNoLifetime), 649 lifetime_end_(kNoLifetime), 650 try_catch_information_(nullptr) { 651 predecessors_.reserve(kDefaultNumberOfPredecessors); 652 successors_.reserve(kDefaultNumberOfSuccessors); 653 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 654 } 655 656 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 657 return predecessors_; 658 } 659 660 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 661 return successors_; 662 } 663 664 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const; 665 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const; 666 667 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 668 return ContainsElement(successors_, block, start_from); 669 } 670 671 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 672 return dominated_blocks_; 673 } 674 675 bool IsEntryBlock() const { 676 return graph_->GetEntryBlock() == this; 677 } 678 679 bool IsExitBlock() const { 680 return graph_->GetExitBlock() == this; 681 } 682 683 bool IsSingleGoto() const; 684 bool IsSingleTryBoundary() const; 685 686 // Returns true if this block emits nothing but a jump. 687 bool IsSingleJump() const { 688 HLoopInformation* loop_info = GetLoopInformation(); 689 return (IsSingleGoto() || IsSingleTryBoundary()) 690 // Back edges generate a suspend check. 691 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 692 } 693 694 void AddBackEdge(HBasicBlock* back_edge) { 695 if (loop_information_ == nullptr) { 696 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 697 } 698 DCHECK_EQ(loop_information_->GetHeader(), this); 699 loop_information_->AddBackEdge(back_edge); 700 } 701 702 HGraph* GetGraph() const { return graph_; } 703 void SetGraph(HGraph* graph) { graph_ = graph; } 704 705 uint32_t GetBlockId() const { return block_id_; } 706 void SetBlockId(int id) { block_id_ = id; } 707 uint32_t GetDexPc() const { return dex_pc_; } 708 709 HBasicBlock* GetDominator() const { return dominator_; } 710 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 711 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 712 713 void RemoveDominatedBlock(HBasicBlock* block) { 714 RemoveElement(dominated_blocks_, block); 715 } 716 717 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 718 ReplaceElement(dominated_blocks_, existing, new_block); 719 } 720 721 void ClearDominanceInformation(); 722 723 int NumberOfBackEdges() const { 724 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 725 } 726 727 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 728 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 729 const HInstructionList& GetInstructions() const { return instructions_; } 730 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 731 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 732 const HInstructionList& GetPhis() const { return phis_; } 733 734 void AddSuccessor(HBasicBlock* block) { 735 successors_.push_back(block); 736 block->predecessors_.push_back(this); 737 } 738 739 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 740 size_t successor_index = GetSuccessorIndexOf(existing); 741 existing->RemovePredecessor(this); 742 new_block->predecessors_.push_back(this); 743 successors_[successor_index] = new_block; 744 } 745 746 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 747 size_t predecessor_index = GetPredecessorIndexOf(existing); 748 existing->RemoveSuccessor(this); 749 new_block->successors_.push_back(this); 750 predecessors_[predecessor_index] = new_block; 751 } 752 753 // Insert `this` between `predecessor` and `successor. This method 754 // preserves the indicies, and will update the first edge found between 755 // `predecessor` and `successor`. 756 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 757 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 758 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 759 successor->predecessors_[predecessor_index] = this; 760 predecessor->successors_[successor_index] = this; 761 successors_.push_back(successor); 762 predecessors_.push_back(predecessor); 763 } 764 765 void RemovePredecessor(HBasicBlock* block) { 766 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 767 } 768 769 void RemoveSuccessor(HBasicBlock* block) { 770 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 771 } 772 773 void ClearAllPredecessors() { 774 predecessors_.clear(); 775 } 776 777 void AddPredecessor(HBasicBlock* block) { 778 predecessors_.push_back(block); 779 block->successors_.push_back(this); 780 } 781 782 void SwapPredecessors() { 783 DCHECK_EQ(predecessors_.size(), 2u); 784 std::swap(predecessors_[0], predecessors_[1]); 785 } 786 787 void SwapSuccessors() { 788 DCHECK_EQ(successors_.size(), 2u); 789 std::swap(successors_[0], successors_[1]); 790 } 791 792 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 793 return IndexOfElement(predecessors_, predecessor); 794 } 795 796 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 797 return IndexOfElement(successors_, successor); 798 } 799 800 HBasicBlock* GetSinglePredecessor() const { 801 DCHECK_EQ(GetPredecessors().size(), 1u); 802 return GetPredecessors()[0]; 803 } 804 805 HBasicBlock* GetSingleSuccessor() const { 806 DCHECK_EQ(GetSuccessors().size(), 1u); 807 return GetSuccessors()[0]; 808 } 809 810 // Returns whether the first occurrence of `predecessor` in the list of 811 // predecessors is at index `idx`. 812 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 813 DCHECK_EQ(GetPredecessors()[idx], predecessor); 814 return GetPredecessorIndexOf(predecessor) == idx; 815 } 816 817 // Create a new block between this block and its predecessors. The new block 818 // is added to the graph, all predecessor edges are relinked to it and an edge 819 // is created to `this`. Returns the new empty block. Reverse post order or 820 // loop and try/catch information are not updated. 821 HBasicBlock* CreateImmediateDominator(); 822 823 // Split the block into two blocks just before `cursor`. Returns the newly 824 // created, latter block. Note that this method will add the block to the 825 // graph, create a Goto at the end of the former block and will create an edge 826 // between the blocks. It will not, however, update the reverse post order or 827 // loop and try/catch information. 828 HBasicBlock* SplitBefore(HInstruction* cursor); 829 830 // Split the block into two blocks just after `cursor`. Returns the newly 831 // created block. Note that this method just updates raw block information, 832 // like predecessors, successors, dominators, and instruction list. It does not 833 // update the graph, reverse post order, loop information, nor make sure the 834 // blocks are consistent (for example ending with a control flow instruction). 835 HBasicBlock* SplitAfter(HInstruction* cursor); 836 837 // Split catch block into two blocks after the original move-exception bytecode 838 // instruction, or at the beginning if not present. Returns the newly created, 839 // latter block, or nullptr if such block could not be created (must be dead 840 // in that case). Note that this method just updates raw block information, 841 // like predecessors, successors, dominators, and instruction list. It does not 842 // update the graph, reverse post order, loop information, nor make sure the 843 // blocks are consistent (for example ending with a control flow instruction). 844 HBasicBlock* SplitCatchBlockAfterMoveException(); 845 846 // Merge `other` at the end of `this`. Successors and dominated blocks of 847 // `other` are changed to be successors and dominated blocks of `this`. Note 848 // that this method does not update the graph, reverse post order, loop 849 // information, nor make sure the blocks are consistent (for example ending 850 // with a control flow instruction). 851 void MergeWithInlined(HBasicBlock* other); 852 853 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 854 // of `this` are moved to `other`. 855 // Note that this method does not update the graph, reverse post order, loop 856 // information, nor make sure the blocks are consistent (for example ending 857 // with a control flow instruction). 858 void ReplaceWith(HBasicBlock* other); 859 860 // Merge `other` at the end of `this`. This method updates loops, reverse post 861 // order, links to predecessors, successors, dominators and deletes the block 862 // from the graph. The two blocks must be successive, i.e. `this` the only 863 // predecessor of `other` and vice versa. 864 void MergeWith(HBasicBlock* other); 865 866 // Disconnects `this` from all its predecessors, successors and dominator, 867 // removes it from all loops it is included in and eventually from the graph. 868 // The block must not dominate any other block. Predecessors and successors 869 // are safely updated. 870 void DisconnectAndDelete(); 871 872 void AddInstruction(HInstruction* instruction); 873 // Insert `instruction` before/after an existing instruction `cursor`. 874 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 875 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 876 // Replace instruction `initial` with `replacement` within this block. 877 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 878 HInstruction* replacement); 879 void AddPhi(HPhi* phi); 880 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 881 // RemoveInstruction and RemovePhi delete a given instruction from the respective 882 // instruction list. With 'ensure_safety' set to true, it verifies that the 883 // instruction is not in use and removes it from the use lists of its inputs. 884 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 885 void RemovePhi(HPhi* phi, bool ensure_safety = true); 886 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 887 888 bool IsLoopHeader() const { 889 return IsInLoop() && (loop_information_->GetHeader() == this); 890 } 891 892 bool IsLoopPreHeaderFirstPredecessor() const { 893 DCHECK(IsLoopHeader()); 894 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 895 } 896 897 HLoopInformation* GetLoopInformation() const { 898 return loop_information_; 899 } 900 901 // Set the loop_information_ on this block. Overrides the current 902 // loop_information if it is an outer loop of the passed loop information. 903 // Note that this method is called while creating the loop information. 904 void SetInLoop(HLoopInformation* info) { 905 if (IsLoopHeader()) { 906 // Nothing to do. This just means `info` is an outer loop. 907 } else if (!IsInLoop()) { 908 loop_information_ = info; 909 } else if (loop_information_->Contains(*info->GetHeader())) { 910 // Block is currently part of an outer loop. Make it part of this inner loop. 911 // Note that a non loop header having a loop information means this loop information 912 // has already been populated 913 loop_information_ = info; 914 } else { 915 // Block is part of an inner loop. Do not update the loop information. 916 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 917 // at this point, because this method is being called while populating `info`. 918 } 919 } 920 921 // Raw update of the loop information. 922 void SetLoopInformation(HLoopInformation* info) { 923 loop_information_ = info; 924 } 925 926 bool IsInLoop() const { return loop_information_ != nullptr; } 927 928 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 929 930 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 931 try_catch_information_ = try_catch_information; 932 } 933 934 bool IsTryBlock() const { 935 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 936 } 937 938 bool IsCatchBlock() const { 939 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 940 } 941 942 // Returns the try entry that this block's successors should have. They will 943 // be in the same try, unless the block ends in a try boundary. In that case, 944 // the appropriate try entry will be returned. 945 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 946 947 bool HasThrowingInstructions() const; 948 949 // Returns whether this block dominates the blocked passed as parameter. 950 bool Dominates(HBasicBlock* block) const; 951 952 size_t GetLifetimeStart() const { return lifetime_start_; } 953 size_t GetLifetimeEnd() const { return lifetime_end_; } 954 955 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 956 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 957 958 bool EndsWithControlFlowInstruction() const; 959 bool EndsWithIf() const; 960 bool EndsWithTryBoundary() const; 961 bool HasSinglePhi() const; 962 963 private: 964 HGraph* graph_; 965 ArenaVector<HBasicBlock*> predecessors_; 966 ArenaVector<HBasicBlock*> successors_; 967 HInstructionList instructions_; 968 HInstructionList phis_; 969 HLoopInformation* loop_information_; 970 HBasicBlock* dominator_; 971 ArenaVector<HBasicBlock*> dominated_blocks_; 972 uint32_t block_id_; 973 // The dex program counter of the first instruction of this block. 974 const uint32_t dex_pc_; 975 size_t lifetime_start_; 976 size_t lifetime_end_; 977 TryCatchInformation* try_catch_information_; 978 979 friend class HGraph; 980 friend class HInstruction; 981 982 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 983}; 984 985// Iterates over the LoopInformation of all loops which contain 'block' 986// from the innermost to the outermost. 987class HLoopInformationOutwardIterator : public ValueObject { 988 public: 989 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 990 : current_(block.GetLoopInformation()) {} 991 992 bool Done() const { return current_ == nullptr; } 993 994 void Advance() { 995 DCHECK(!Done()); 996 current_ = current_->GetPreHeader()->GetLoopInformation(); 997 } 998 999 HLoopInformation* Current() const { 1000 DCHECK(!Done()); 1001 return current_; 1002 } 1003 1004 private: 1005 HLoopInformation* current_; 1006 1007 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1008}; 1009 1010#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1011 M(Above, Condition) \ 1012 M(AboveOrEqual, Condition) \ 1013 M(Add, BinaryOperation) \ 1014 M(And, BinaryOperation) \ 1015 M(ArrayGet, Instruction) \ 1016 M(ArrayLength, Instruction) \ 1017 M(ArraySet, Instruction) \ 1018 M(Below, Condition) \ 1019 M(BelowOrEqual, Condition) \ 1020 M(BooleanNot, UnaryOperation) \ 1021 M(BoundsCheck, Instruction) \ 1022 M(BoundType, Instruction) \ 1023 M(CheckCast, Instruction) \ 1024 M(ClearException, Instruction) \ 1025 M(ClinitCheck, Instruction) \ 1026 M(Compare, BinaryOperation) \ 1027 M(Condition, BinaryOperation) \ 1028 M(CurrentMethod, Instruction) \ 1029 M(Deoptimize, Instruction) \ 1030 M(Div, BinaryOperation) \ 1031 M(DivZeroCheck, Instruction) \ 1032 M(DoubleConstant, Constant) \ 1033 M(Equal, Condition) \ 1034 M(Exit, Instruction) \ 1035 M(FakeString, Instruction) \ 1036 M(FloatConstant, Constant) \ 1037 M(Goto, Instruction) \ 1038 M(GreaterThan, Condition) \ 1039 M(GreaterThanOrEqual, Condition) \ 1040 M(If, Instruction) \ 1041 M(InstanceFieldGet, Instruction) \ 1042 M(InstanceFieldSet, Instruction) \ 1043 M(InstanceOf, Instruction) \ 1044 M(IntConstant, Constant) \ 1045 M(InvokeUnresolved, Invoke) \ 1046 M(InvokeInterface, Invoke) \ 1047 M(InvokeStaticOrDirect, Invoke) \ 1048 M(InvokeVirtual, Invoke) \ 1049 M(LessThan, Condition) \ 1050 M(LessThanOrEqual, Condition) \ 1051 M(LoadClass, Instruction) \ 1052 M(LoadException, Instruction) \ 1053 M(LoadLocal, Instruction) \ 1054 M(LoadString, Instruction) \ 1055 M(Local, Instruction) \ 1056 M(LongConstant, Constant) \ 1057 M(MemoryBarrier, Instruction) \ 1058 M(MonitorOperation, Instruction) \ 1059 M(Mul, BinaryOperation) \ 1060 M(Neg, UnaryOperation) \ 1061 M(NewArray, Instruction) \ 1062 M(NewInstance, Instruction) \ 1063 M(Not, UnaryOperation) \ 1064 M(NotEqual, Condition) \ 1065 M(NullConstant, Instruction) \ 1066 M(NullCheck, Instruction) \ 1067 M(Or, BinaryOperation) \ 1068 M(PackedSwitch, Instruction) \ 1069 M(ParallelMove, Instruction) \ 1070 M(ParameterValue, Instruction) \ 1071 M(Phi, Instruction) \ 1072 M(Rem, BinaryOperation) \ 1073 M(Return, Instruction) \ 1074 M(ReturnVoid, Instruction) \ 1075 M(Shl, BinaryOperation) \ 1076 M(Shr, BinaryOperation) \ 1077 M(StaticFieldGet, Instruction) \ 1078 M(StaticFieldSet, Instruction) \ 1079 M(UnresolvedInstanceFieldGet, Instruction) \ 1080 M(UnresolvedInstanceFieldSet, Instruction) \ 1081 M(UnresolvedStaticFieldGet, Instruction) \ 1082 M(UnresolvedStaticFieldSet, Instruction) \ 1083 M(StoreLocal, Instruction) \ 1084 M(Sub, BinaryOperation) \ 1085 M(SuspendCheck, Instruction) \ 1086 M(Temporary, Instruction) \ 1087 M(Throw, Instruction) \ 1088 M(TryBoundary, Instruction) \ 1089 M(TypeConversion, Instruction) \ 1090 M(UShr, BinaryOperation) \ 1091 M(Xor, BinaryOperation) \ 1092 1093#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1094 1095#ifndef ART_ENABLE_CODEGEN_arm64 1096#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1097#else 1098#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1099 M(Arm64IntermediateAddress, Instruction) \ 1100 M(Arm64MultiplyAccumulate, Instruction) 1101#endif 1102 1103#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1104 1105#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1106 1107#ifndef ART_ENABLE_CODEGEN_x86 1108#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1109#else 1110#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1111 M(X86ComputeBaseMethodAddress, Instruction) \ 1112 M(X86LoadFromConstantTable, Instruction) \ 1113 M(X86PackedSwitch, Instruction) 1114#endif 1115 1116#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1117 1118#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1119 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1120 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1121 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1122 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1123 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1124 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1125 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1126 1127#define FOR_EACH_INSTRUCTION(M) \ 1128 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1129 M(Constant, Instruction) \ 1130 M(UnaryOperation, Instruction) \ 1131 M(BinaryOperation, Instruction) \ 1132 M(Invoke, Instruction) 1133 1134#define FORWARD_DECLARATION(type, super) class H##type; 1135FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1136#undef FORWARD_DECLARATION 1137 1138#define DECLARE_INSTRUCTION(type) \ 1139 InstructionKind GetKind() const OVERRIDE { return k##type; } \ 1140 const char* DebugName() const OVERRIDE { return #type; } \ 1141 const H##type* As##type() const OVERRIDE { return this; } \ 1142 H##type* As##type() OVERRIDE { return this; } \ 1143 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1144 return other->Is##type(); \ 1145 } \ 1146 void Accept(HGraphVisitor* visitor) OVERRIDE 1147 1148template <typename T> class HUseList; 1149 1150template <typename T> 1151class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1152 public: 1153 HUseListNode* GetPrevious() const { return prev_; } 1154 HUseListNode* GetNext() const { return next_; } 1155 T GetUser() const { return user_; } 1156 size_t GetIndex() const { return index_; } 1157 void SetIndex(size_t index) { index_ = index; } 1158 1159 private: 1160 HUseListNode(T user, size_t index) 1161 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1162 1163 T const user_; 1164 size_t index_; 1165 HUseListNode<T>* prev_; 1166 HUseListNode<T>* next_; 1167 1168 friend class HUseList<T>; 1169 1170 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1171}; 1172 1173template <typename T> 1174class HUseList : public ValueObject { 1175 public: 1176 HUseList() : first_(nullptr) {} 1177 1178 void Clear() { 1179 first_ = nullptr; 1180 } 1181 1182 // Adds a new entry at the beginning of the use list and returns 1183 // the newly created node. 1184 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1185 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1186 if (IsEmpty()) { 1187 first_ = new_node; 1188 } else { 1189 first_->prev_ = new_node; 1190 new_node->next_ = first_; 1191 first_ = new_node; 1192 } 1193 return new_node; 1194 } 1195 1196 HUseListNode<T>* GetFirst() const { 1197 return first_; 1198 } 1199 1200 void Remove(HUseListNode<T>* node) { 1201 DCHECK(node != nullptr); 1202 DCHECK(Contains(node)); 1203 1204 if (node->prev_ != nullptr) { 1205 node->prev_->next_ = node->next_; 1206 } 1207 if (node->next_ != nullptr) { 1208 node->next_->prev_ = node->prev_; 1209 } 1210 if (node == first_) { 1211 first_ = node->next_; 1212 } 1213 } 1214 1215 bool Contains(const HUseListNode<T>* node) const { 1216 if (node == nullptr) { 1217 return false; 1218 } 1219 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1220 if (current == node) { 1221 return true; 1222 } 1223 } 1224 return false; 1225 } 1226 1227 bool IsEmpty() const { 1228 return first_ == nullptr; 1229 } 1230 1231 bool HasOnlyOneUse() const { 1232 return first_ != nullptr && first_->next_ == nullptr; 1233 } 1234 1235 size_t SizeSlow() const { 1236 size_t count = 0; 1237 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1238 ++count; 1239 } 1240 return count; 1241 } 1242 1243 private: 1244 HUseListNode<T>* first_; 1245}; 1246 1247template<typename T> 1248class HUseIterator : public ValueObject { 1249 public: 1250 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1251 1252 bool Done() const { return current_ == nullptr; } 1253 1254 void Advance() { 1255 DCHECK(!Done()); 1256 current_ = current_->GetNext(); 1257 } 1258 1259 HUseListNode<T>* Current() const { 1260 DCHECK(!Done()); 1261 return current_; 1262 } 1263 1264 private: 1265 HUseListNode<T>* current_; 1266 1267 friend class HValue; 1268}; 1269 1270// This class is used by HEnvironment and HInstruction classes to record the 1271// instructions they use and pointers to the corresponding HUseListNodes kept 1272// by the used instructions. 1273template <typename T> 1274class HUserRecord : public ValueObject { 1275 public: 1276 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1277 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1278 1279 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1280 : instruction_(old_record.instruction_), use_node_(use_node) { 1281 DCHECK(instruction_ != nullptr); 1282 DCHECK(use_node_ != nullptr); 1283 DCHECK(old_record.use_node_ == nullptr); 1284 } 1285 1286 HInstruction* GetInstruction() const { return instruction_; } 1287 HUseListNode<T>* GetUseNode() const { return use_node_; } 1288 1289 private: 1290 // Instruction used by the user. 1291 HInstruction* instruction_; 1292 1293 // Corresponding entry in the use list kept by 'instruction_'. 1294 HUseListNode<T>* use_node_; 1295}; 1296 1297/** 1298 * Side-effects representation. 1299 * 1300 * For write/read dependences on fields/arrays, the dependence analysis uses 1301 * type disambiguation (e.g. a float field write cannot modify the value of an 1302 * integer field read) and the access type (e.g. a reference array write cannot 1303 * modify the value of a reference field read [although it may modify the 1304 * reference fetch prior to reading the field, which is represented by its own 1305 * write/read dependence]). The analysis makes conservative points-to 1306 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1307 * the same, and any reference read depends on any reference read without 1308 * further regard of its type). 1309 * 1310 * The internal representation uses 38-bit and is described in the table below. 1311 * The first line indicates the side effect, and for field/array accesses the 1312 * second line indicates the type of the access (in the order of the 1313 * Primitive::Type enum). 1314 * The two numbered lines below indicate the bit position in the bitfield (read 1315 * vertically). 1316 * 1317 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1318 * +-------------+---------+---------+--------------+---------+---------+ 1319 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1320 * | 3 |333333322|222222221| 1 |111111110|000000000| 1321 * | 7 |654321098|765432109| 8 |765432109|876543210| 1322 * 1323 * Note that, to ease the implementation, 'changes' bits are least significant 1324 * bits, while 'dependency' bits are most significant bits. 1325 */ 1326class SideEffects : public ValueObject { 1327 public: 1328 SideEffects() : flags_(0) {} 1329 1330 static SideEffects None() { 1331 return SideEffects(0); 1332 } 1333 1334 static SideEffects All() { 1335 return SideEffects(kAllChangeBits | kAllDependOnBits); 1336 } 1337 1338 static SideEffects AllChanges() { 1339 return SideEffects(kAllChangeBits); 1340 } 1341 1342 static SideEffects AllDependencies() { 1343 return SideEffects(kAllDependOnBits); 1344 } 1345 1346 static SideEffects AllExceptGCDependency() { 1347 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1348 } 1349 1350 static SideEffects AllWritesAndReads() { 1351 return SideEffects(kAllWrites | kAllReads); 1352 } 1353 1354 static SideEffects AllWrites() { 1355 return SideEffects(kAllWrites); 1356 } 1357 1358 static SideEffects AllReads() { 1359 return SideEffects(kAllReads); 1360 } 1361 1362 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1363 return is_volatile 1364 ? AllWritesAndReads() 1365 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1366 } 1367 1368 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1369 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1370 } 1371 1372 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1373 return is_volatile 1374 ? AllWritesAndReads() 1375 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1376 } 1377 1378 static SideEffects ArrayReadOfType(Primitive::Type type) { 1379 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1380 } 1381 1382 static SideEffects CanTriggerGC() { 1383 return SideEffects(1ULL << kCanTriggerGCBit); 1384 } 1385 1386 static SideEffects DependsOnGC() { 1387 return SideEffects(1ULL << kDependsOnGCBit); 1388 } 1389 1390 // Combines the side-effects of this and the other. 1391 SideEffects Union(SideEffects other) const { 1392 return SideEffects(flags_ | other.flags_); 1393 } 1394 1395 SideEffects Exclusion(SideEffects other) const { 1396 return SideEffects(flags_ & ~other.flags_); 1397 } 1398 1399 void Add(SideEffects other) { 1400 flags_ |= other.flags_; 1401 } 1402 1403 bool Includes(SideEffects other) const { 1404 return (other.flags_ & flags_) == other.flags_; 1405 } 1406 1407 bool HasSideEffects() const { 1408 return (flags_ & kAllChangeBits); 1409 } 1410 1411 bool HasDependencies() const { 1412 return (flags_ & kAllDependOnBits); 1413 } 1414 1415 // Returns true if there are no side effects or dependencies. 1416 bool DoesNothing() const { 1417 return flags_ == 0; 1418 } 1419 1420 // Returns true if something is written. 1421 bool DoesAnyWrite() const { 1422 return (flags_ & kAllWrites); 1423 } 1424 1425 // Returns true if something is read. 1426 bool DoesAnyRead() const { 1427 return (flags_ & kAllReads); 1428 } 1429 1430 // Returns true if potentially everything is written and read 1431 // (every type and every kind of access). 1432 bool DoesAllReadWrite() const { 1433 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1434 } 1435 1436 bool DoesAll() const { 1437 return flags_ == (kAllChangeBits | kAllDependOnBits); 1438 } 1439 1440 // Returns true if `this` may read something written by `other`. 1441 bool MayDependOn(SideEffects other) const { 1442 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1443 return (other.flags_ & depends_on_flags); 1444 } 1445 1446 // Returns string representation of flags (for debugging only). 1447 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1448 std::string ToString() const { 1449 std::string flags = "|"; 1450 for (int s = kLastBit; s >= 0; s--) { 1451 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1452 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1453 // This is a bit for the GC side effect. 1454 if (current_bit_is_set) { 1455 flags += "GC"; 1456 } 1457 flags += "|"; 1458 } else { 1459 // This is a bit for the array/field analysis. 1460 // The underscore character stands for the 'can trigger GC' bit. 1461 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1462 if (current_bit_is_set) { 1463 flags += kDebug[s]; 1464 } 1465 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1466 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1467 flags += "|"; 1468 } 1469 } 1470 } 1471 return flags; 1472 } 1473 1474 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1475 1476 private: 1477 static constexpr int kFieldArrayAnalysisBits = 9; 1478 1479 static constexpr int kFieldWriteOffset = 0; 1480 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1481 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1482 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1483 1484 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1485 1486 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1487 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1488 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1489 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1490 1491 static constexpr int kLastBit = kDependsOnGCBit; 1492 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1493 1494 // Aliases. 1495 1496 static_assert(kChangeBits == kDependOnBits, 1497 "the 'change' bits should match the 'depend on' bits."); 1498 1499 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1500 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1501 static constexpr uint64_t kAllWrites = 1502 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1503 static constexpr uint64_t kAllReads = 1504 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1505 1506 // Work around the fact that HIR aliases I/F and J/D. 1507 // TODO: remove this interceptor once HIR types are clean 1508 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1509 switch (type) { 1510 case Primitive::kPrimInt: 1511 case Primitive::kPrimFloat: 1512 return TypeFlag(Primitive::kPrimInt, offset) | 1513 TypeFlag(Primitive::kPrimFloat, offset); 1514 case Primitive::kPrimLong: 1515 case Primitive::kPrimDouble: 1516 return TypeFlag(Primitive::kPrimLong, offset) | 1517 TypeFlag(Primitive::kPrimDouble, offset); 1518 default: 1519 return TypeFlag(type, offset); 1520 } 1521 } 1522 1523 // Translates type to bit flag. 1524 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1525 CHECK_NE(type, Primitive::kPrimVoid); 1526 const uint64_t one = 1; 1527 const int shift = type; // 0-based consecutive enum 1528 DCHECK_LE(kFieldWriteOffset, shift); 1529 DCHECK_LT(shift, kArrayWriteOffset); 1530 return one << (type + offset); 1531 } 1532 1533 // Private constructor on direct flags value. 1534 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1535 1536 uint64_t flags_; 1537}; 1538 1539// A HEnvironment object contains the values of virtual registers at a given location. 1540class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1541 public: 1542 HEnvironment(ArenaAllocator* arena, 1543 size_t number_of_vregs, 1544 const DexFile& dex_file, 1545 uint32_t method_idx, 1546 uint32_t dex_pc, 1547 InvokeType invoke_type, 1548 HInstruction* holder) 1549 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1550 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1551 parent_(nullptr), 1552 dex_file_(dex_file), 1553 method_idx_(method_idx), 1554 dex_pc_(dex_pc), 1555 invoke_type_(invoke_type), 1556 holder_(holder) { 1557 } 1558 1559 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1560 : HEnvironment(arena, 1561 to_copy.Size(), 1562 to_copy.GetDexFile(), 1563 to_copy.GetMethodIdx(), 1564 to_copy.GetDexPc(), 1565 to_copy.GetInvokeType(), 1566 holder) {} 1567 1568 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1569 if (parent_ != nullptr) { 1570 parent_->SetAndCopyParentChain(allocator, parent); 1571 } else { 1572 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1573 parent_->CopyFrom(parent); 1574 if (parent->GetParent() != nullptr) { 1575 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1576 } 1577 } 1578 } 1579 1580 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1581 void CopyFrom(HEnvironment* environment); 1582 1583 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1584 // input to the loop phi instead. This is for inserting instructions that 1585 // require an environment (like HDeoptimization) in the loop pre-header. 1586 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1587 1588 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1589 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1590 } 1591 1592 HInstruction* GetInstructionAt(size_t index) const { 1593 return vregs_[index].GetInstruction(); 1594 } 1595 1596 void RemoveAsUserOfInput(size_t index) const; 1597 1598 size_t Size() const { return vregs_.size(); } 1599 1600 HEnvironment* GetParent() const { return parent_; } 1601 1602 void SetLocationAt(size_t index, Location location) { 1603 locations_[index] = location; 1604 } 1605 1606 Location GetLocationAt(size_t index) const { 1607 return locations_[index]; 1608 } 1609 1610 uint32_t GetDexPc() const { 1611 return dex_pc_; 1612 } 1613 1614 uint32_t GetMethodIdx() const { 1615 return method_idx_; 1616 } 1617 1618 InvokeType GetInvokeType() const { 1619 return invoke_type_; 1620 } 1621 1622 const DexFile& GetDexFile() const { 1623 return dex_file_; 1624 } 1625 1626 HInstruction* GetHolder() const { 1627 return holder_; 1628 } 1629 1630 private: 1631 // Record instructions' use entries of this environment for constant-time removal. 1632 // It should only be called by HInstruction when a new environment use is added. 1633 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1634 DCHECK(env_use->GetUser() == this); 1635 size_t index = env_use->GetIndex(); 1636 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1637 } 1638 1639 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1640 ArenaVector<Location> locations_; 1641 HEnvironment* parent_; 1642 const DexFile& dex_file_; 1643 const uint32_t method_idx_; 1644 const uint32_t dex_pc_; 1645 const InvokeType invoke_type_; 1646 1647 // The instruction that holds this environment. 1648 HInstruction* const holder_; 1649 1650 friend class HInstruction; 1651 1652 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1653}; 1654 1655class ReferenceTypeInfo : ValueObject { 1656 public: 1657 typedef Handle<mirror::Class> TypeHandle; 1658 1659 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1660 // The constructor will check that the type_handle is valid. 1661 return ReferenceTypeInfo(type_handle, is_exact); 1662 } 1663 1664 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1665 1666 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1667 return handle.GetReference() != nullptr; 1668 } 1669 1670 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1671 return IsValidHandle(type_handle_); 1672 } 1673 1674 bool IsExact() const { return is_exact_; } 1675 1676 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1677 DCHECK(IsValid()); 1678 return GetTypeHandle()->IsObjectClass(); 1679 } 1680 1681 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1682 DCHECK(IsValid()); 1683 return GetTypeHandle()->IsStringClass(); 1684 } 1685 1686 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1687 DCHECK(IsValid()); 1688 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1689 } 1690 1691 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1692 DCHECK(IsValid()); 1693 return GetTypeHandle()->IsInterface(); 1694 } 1695 1696 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1697 DCHECK(IsValid()); 1698 return GetTypeHandle()->IsArrayClass(); 1699 } 1700 1701 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1702 DCHECK(IsValid()); 1703 return GetTypeHandle()->IsPrimitiveArray(); 1704 } 1705 1706 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1707 DCHECK(IsValid()); 1708 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 1709 } 1710 1711 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1712 DCHECK(IsValid()); 1713 if (!IsExact()) return false; 1714 if (!IsArrayClass()) return false; 1715 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1716 } 1717 1718 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1719 DCHECK(IsValid()); 1720 if (!IsExact()) return false; 1721 if (!IsArrayClass()) return false; 1722 if (!rti.IsArrayClass()) return false; 1723 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 1724 rti.GetTypeHandle()->GetComponentType()); 1725 } 1726 1727 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1728 1729 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1730 DCHECK(IsValid()); 1731 DCHECK(rti.IsValid()); 1732 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1733 } 1734 1735 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1736 DCHECK(IsValid()); 1737 DCHECK(rti.IsValid()); 1738 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() && 1739 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1740 } 1741 1742 // Returns true if the type information provide the same amount of details. 1743 // Note that it does not mean that the instructions have the same actual type 1744 // (because the type can be the result of a merge). 1745 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1746 if (!IsValid() && !rti.IsValid()) { 1747 // Invalid types are equal. 1748 return true; 1749 } 1750 if (!IsValid() || !rti.IsValid()) { 1751 // One is valid, the other not. 1752 return false; 1753 } 1754 return IsExact() == rti.IsExact() 1755 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1756 } 1757 1758 private: 1759 ReferenceTypeInfo(); 1760 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1761 1762 // The class of the object. 1763 TypeHandle type_handle_; 1764 // Whether or not the type is exact or a superclass of the actual type. 1765 // Whether or not we have any information about this type. 1766 bool is_exact_; 1767}; 1768 1769std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1770 1771class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1772 public: 1773 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1774 : previous_(nullptr), 1775 next_(nullptr), 1776 block_(nullptr), 1777 dex_pc_(dex_pc), 1778 id_(-1), 1779 ssa_index_(-1), 1780 environment_(nullptr), 1781 locations_(nullptr), 1782 live_interval_(nullptr), 1783 lifetime_position_(kNoLifetime), 1784 side_effects_(side_effects), 1785 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1786 1787 virtual ~HInstruction() {} 1788 1789#define DECLARE_KIND(type, super) k##type, 1790 enum InstructionKind { 1791 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1792 }; 1793#undef DECLARE_KIND 1794 1795 HInstruction* GetNext() const { return next_; } 1796 HInstruction* GetPrevious() const { return previous_; } 1797 1798 HInstruction* GetNextDisregardingMoves() const; 1799 HInstruction* GetPreviousDisregardingMoves() const; 1800 1801 HBasicBlock* GetBlock() const { return block_; } 1802 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1803 void SetBlock(HBasicBlock* block) { block_ = block; } 1804 bool IsInBlock() const { return block_ != nullptr; } 1805 bool IsInLoop() const { return block_->IsInLoop(); } 1806 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1807 1808 virtual size_t InputCount() const = 0; 1809 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1810 1811 virtual void Accept(HGraphVisitor* visitor) = 0; 1812 virtual const char* DebugName() const = 0; 1813 1814 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1815 void SetRawInputAt(size_t index, HInstruction* input) { 1816 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1817 } 1818 1819 virtual bool NeedsEnvironment() const { return false; } 1820 1821 uint32_t GetDexPc() const { return dex_pc_; } 1822 1823 virtual bool IsControlFlow() const { return false; } 1824 1825 virtual bool CanThrow() const { return false; } 1826 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1827 1828 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1829 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1830 1831 // Does not apply for all instructions, but having this at top level greatly 1832 // simplifies the null check elimination. 1833 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1834 virtual bool CanBeNull() const { 1835 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1836 return true; 1837 } 1838 1839 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1840 return false; 1841 } 1842 1843 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1844 1845 ReferenceTypeInfo GetReferenceTypeInfo() const { 1846 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1847 return reference_type_info_; 1848 } 1849 1850 void AddUseAt(HInstruction* user, size_t index) { 1851 DCHECK(user != nullptr); 1852 HUseListNode<HInstruction*>* use = 1853 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1854 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1855 } 1856 1857 void AddEnvUseAt(HEnvironment* user, size_t index) { 1858 DCHECK(user != nullptr); 1859 HUseListNode<HEnvironment*>* env_use = 1860 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1861 user->RecordEnvUse(env_use); 1862 } 1863 1864 void RemoveAsUserOfInput(size_t input) { 1865 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1866 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1867 } 1868 1869 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1870 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1871 1872 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1873 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1874 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1875 bool HasOnlyOneNonEnvironmentUse() const { 1876 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1877 } 1878 1879 // Does this instruction strictly dominate `other_instruction`? 1880 // Returns false if this instruction and `other_instruction` are the same. 1881 // Aborts if this instruction and `other_instruction` are both phis. 1882 bool StrictlyDominates(HInstruction* other_instruction) const; 1883 1884 int GetId() const { return id_; } 1885 void SetId(int id) { id_ = id; } 1886 1887 int GetSsaIndex() const { return ssa_index_; } 1888 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1889 bool HasSsaIndex() const { return ssa_index_ != -1; } 1890 1891 bool HasEnvironment() const { return environment_ != nullptr; } 1892 HEnvironment* GetEnvironment() const { return environment_; } 1893 // Set the `environment_` field. Raw because this method does not 1894 // update the uses lists. 1895 void SetRawEnvironment(HEnvironment* environment) { 1896 DCHECK(environment_ == nullptr); 1897 DCHECK_EQ(environment->GetHolder(), this); 1898 environment_ = environment; 1899 } 1900 1901 // Set the environment of this instruction, copying it from `environment`. While 1902 // copying, the uses lists are being updated. 1903 void CopyEnvironmentFrom(HEnvironment* environment) { 1904 DCHECK(environment_ == nullptr); 1905 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1906 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1907 environment_->CopyFrom(environment); 1908 if (environment->GetParent() != nullptr) { 1909 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1910 } 1911 } 1912 1913 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1914 HBasicBlock* block) { 1915 DCHECK(environment_ == nullptr); 1916 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1917 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1918 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1919 if (environment->GetParent() != nullptr) { 1920 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1921 } 1922 } 1923 1924 // Returns the number of entries in the environment. Typically, that is the 1925 // number of dex registers in a method. It could be more in case of inlining. 1926 size_t EnvironmentSize() const; 1927 1928 LocationSummary* GetLocations() const { return locations_; } 1929 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1930 1931 void ReplaceWith(HInstruction* instruction); 1932 void ReplaceInput(HInstruction* replacement, size_t index); 1933 1934 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1935 // uses of this instruction by `other` are *not* updated. 1936 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1937 ReplaceWith(other); 1938 other->ReplaceInput(this, use_index); 1939 } 1940 1941 // Move `this` instruction before `cursor`. 1942 void MoveBefore(HInstruction* cursor); 1943 1944#define INSTRUCTION_TYPE_CHECK(type, super) \ 1945 bool Is##type() const { return (As##type() != nullptr); } \ 1946 virtual const H##type* As##type() const { return nullptr; } \ 1947 virtual H##type* As##type() { return nullptr; } 1948 1949 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1950#undef INSTRUCTION_TYPE_CHECK 1951 1952 // Returns whether the instruction can be moved within the graph. 1953 virtual bool CanBeMoved() const { return false; } 1954 1955 // Returns whether the two instructions are of the same kind. 1956 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1957 return false; 1958 } 1959 1960 // Returns whether any data encoded in the two instructions is equal. 1961 // This method does not look at the inputs. Both instructions must be 1962 // of the same type, otherwise the method has undefined behavior. 1963 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1964 return false; 1965 } 1966 1967 // Returns whether two instructions are equal, that is: 1968 // 1) They have the same type and contain the same data (InstructionDataEquals). 1969 // 2) Their inputs are identical. 1970 bool Equals(HInstruction* other) const; 1971 1972 virtual InstructionKind GetKind() const = 0; 1973 1974 virtual size_t ComputeHashCode() const { 1975 size_t result = GetKind(); 1976 for (size_t i = 0, e = InputCount(); i < e; ++i) { 1977 result = (result * 31) + InputAt(i)->GetId(); 1978 } 1979 return result; 1980 } 1981 1982 SideEffects GetSideEffects() const { return side_effects_; } 1983 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 1984 1985 size_t GetLifetimePosition() const { return lifetime_position_; } 1986 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 1987 LiveInterval* GetLiveInterval() const { return live_interval_; } 1988 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 1989 bool HasLiveInterval() const { return live_interval_ != nullptr; } 1990 1991 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 1992 1993 // Returns whether the code generation of the instruction will require to have access 1994 // to the current method. Such instructions are: 1995 // (1): Instructions that require an environment, as calling the runtime requires 1996 // to walk the stack and have the current method stored at a specific stack address. 1997 // (2): Object literals like classes and strings, that are loaded from the dex cache 1998 // fields of the current method. 1999 bool NeedsCurrentMethod() const { 2000 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 2001 } 2002 2003 // Returns whether the code generation of the instruction will require to have access 2004 // to the dex cache of the current method's declaring class via the current method. 2005 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 2006 2007 // Does this instruction have any use in an environment before 2008 // control flow hits 'other'? 2009 bool HasAnyEnvironmentUseBefore(HInstruction* other); 2010 2011 // Remove all references to environment uses of this instruction. 2012 // The caller must ensure that this is safe to do. 2013 void RemoveEnvironmentUsers(); 2014 2015 protected: 2016 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2017 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2018 2019 private: 2020 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2021 2022 HInstruction* previous_; 2023 HInstruction* next_; 2024 HBasicBlock* block_; 2025 const uint32_t dex_pc_; 2026 2027 // An instruction gets an id when it is added to the graph. 2028 // It reflects creation order. A negative id means the instruction 2029 // has not been added to the graph. 2030 int id_; 2031 2032 // When doing liveness analysis, instructions that have uses get an SSA index. 2033 int ssa_index_; 2034 2035 // List of instructions that have this instruction as input. 2036 HUseList<HInstruction*> uses_; 2037 2038 // List of environments that contain this instruction. 2039 HUseList<HEnvironment*> env_uses_; 2040 2041 // The environment associated with this instruction. Not null if the instruction 2042 // might jump out of the method. 2043 HEnvironment* environment_; 2044 2045 // Set by the code generator. 2046 LocationSummary* locations_; 2047 2048 // Set by the liveness analysis. 2049 LiveInterval* live_interval_; 2050 2051 // Set by the liveness analysis, this is the position in a linear 2052 // order of blocks where this instruction's live interval start. 2053 size_t lifetime_position_; 2054 2055 SideEffects side_effects_; 2056 2057 // TODO: for primitive types this should be marked as invalid. 2058 ReferenceTypeInfo reference_type_info_; 2059 2060 friend class GraphChecker; 2061 friend class HBasicBlock; 2062 friend class HEnvironment; 2063 friend class HGraph; 2064 friend class HInstructionList; 2065 2066 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2067}; 2068std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2069 2070class HInputIterator : public ValueObject { 2071 public: 2072 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2073 2074 bool Done() const { return index_ == instruction_->InputCount(); } 2075 HInstruction* Current() const { return instruction_->InputAt(index_); } 2076 void Advance() { index_++; } 2077 2078 private: 2079 HInstruction* instruction_; 2080 size_t index_; 2081 2082 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2083}; 2084 2085class HInstructionIterator : public ValueObject { 2086 public: 2087 explicit HInstructionIterator(const HInstructionList& instructions) 2088 : instruction_(instructions.first_instruction_) { 2089 next_ = Done() ? nullptr : instruction_->GetNext(); 2090 } 2091 2092 bool Done() const { return instruction_ == nullptr; } 2093 HInstruction* Current() const { return instruction_; } 2094 void Advance() { 2095 instruction_ = next_; 2096 next_ = Done() ? nullptr : instruction_->GetNext(); 2097 } 2098 2099 private: 2100 HInstruction* instruction_; 2101 HInstruction* next_; 2102 2103 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2104}; 2105 2106class HBackwardInstructionIterator : public ValueObject { 2107 public: 2108 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2109 : instruction_(instructions.last_instruction_) { 2110 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2111 } 2112 2113 bool Done() const { return instruction_ == nullptr; } 2114 HInstruction* Current() const { return instruction_; } 2115 void Advance() { 2116 instruction_ = next_; 2117 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2118 } 2119 2120 private: 2121 HInstruction* instruction_; 2122 HInstruction* next_; 2123 2124 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2125}; 2126 2127template<size_t N> 2128class HTemplateInstruction: public HInstruction { 2129 public: 2130 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2131 : HInstruction(side_effects, dex_pc), inputs_() {} 2132 virtual ~HTemplateInstruction() {} 2133 2134 size_t InputCount() const OVERRIDE { return N; } 2135 2136 protected: 2137 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2138 DCHECK_LT(i, N); 2139 return inputs_[i]; 2140 } 2141 2142 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2143 DCHECK_LT(i, N); 2144 inputs_[i] = input; 2145 } 2146 2147 private: 2148 std::array<HUserRecord<HInstruction*>, N> inputs_; 2149 2150 friend class SsaBuilder; 2151}; 2152 2153// HTemplateInstruction specialization for N=0. 2154template<> 2155class HTemplateInstruction<0>: public HInstruction { 2156 public: 2157 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2158 : HInstruction(side_effects, dex_pc) {} 2159 2160 virtual ~HTemplateInstruction() {} 2161 2162 size_t InputCount() const OVERRIDE { return 0; } 2163 2164 protected: 2165 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2166 LOG(FATAL) << "Unreachable"; 2167 UNREACHABLE(); 2168 } 2169 2170 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2171 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2172 LOG(FATAL) << "Unreachable"; 2173 UNREACHABLE(); 2174 } 2175 2176 private: 2177 friend class SsaBuilder; 2178}; 2179 2180template<intptr_t N> 2181class HExpression : public HTemplateInstruction<N> { 2182 public: 2183 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2184 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2185 virtual ~HExpression() {} 2186 2187 Primitive::Type GetType() const OVERRIDE { return type_; } 2188 2189 protected: 2190 Primitive::Type type_; 2191}; 2192 2193// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2194// instruction that branches to the exit block. 2195class HReturnVoid : public HTemplateInstruction<0> { 2196 public: 2197 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2198 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2199 2200 bool IsControlFlow() const OVERRIDE { return true; } 2201 2202 DECLARE_INSTRUCTION(ReturnVoid); 2203 2204 private: 2205 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2206}; 2207 2208// Represents dex's RETURN opcodes. A HReturn is a control flow 2209// instruction that branches to the exit block. 2210class HReturn : public HTemplateInstruction<1> { 2211 public: 2212 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2213 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2214 SetRawInputAt(0, value); 2215 } 2216 2217 bool IsControlFlow() const OVERRIDE { return true; } 2218 2219 DECLARE_INSTRUCTION(Return); 2220 2221 private: 2222 DISALLOW_COPY_AND_ASSIGN(HReturn); 2223}; 2224 2225// The exit instruction is the only instruction of the exit block. 2226// Instructions aborting the method (HThrow and HReturn) must branch to the 2227// exit block. 2228class HExit : public HTemplateInstruction<0> { 2229 public: 2230 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2231 2232 bool IsControlFlow() const OVERRIDE { return true; } 2233 2234 DECLARE_INSTRUCTION(Exit); 2235 2236 private: 2237 DISALLOW_COPY_AND_ASSIGN(HExit); 2238}; 2239 2240// Jumps from one block to another. 2241class HGoto : public HTemplateInstruction<0> { 2242 public: 2243 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2244 2245 bool IsControlFlow() const OVERRIDE { return true; } 2246 2247 HBasicBlock* GetSuccessor() const { 2248 return GetBlock()->GetSingleSuccessor(); 2249 } 2250 2251 DECLARE_INSTRUCTION(Goto); 2252 2253 private: 2254 DISALLOW_COPY_AND_ASSIGN(HGoto); 2255}; 2256 2257class HConstant : public HExpression<0> { 2258 public: 2259 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2260 : HExpression(type, SideEffects::None(), dex_pc) {} 2261 2262 bool CanBeMoved() const OVERRIDE { return true; } 2263 2264 virtual bool IsMinusOne() const { return false; } 2265 virtual bool IsZero() const { return false; } 2266 virtual bool IsOne() const { return false; } 2267 2268 virtual uint64_t GetValueAsUint64() const = 0; 2269 2270 DECLARE_INSTRUCTION(Constant); 2271 2272 private: 2273 DISALLOW_COPY_AND_ASSIGN(HConstant); 2274}; 2275 2276class HNullConstant : public HConstant { 2277 public: 2278 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2279 return true; 2280 } 2281 2282 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2283 2284 size_t ComputeHashCode() const OVERRIDE { return 0; } 2285 2286 DECLARE_INSTRUCTION(NullConstant); 2287 2288 private: 2289 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2290 2291 friend class HGraph; 2292 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2293}; 2294 2295// Constants of the type int. Those can be from Dex instructions, or 2296// synthesized (for example with the if-eqz instruction). 2297class HIntConstant : public HConstant { 2298 public: 2299 int32_t GetValue() const { return value_; } 2300 2301 uint64_t GetValueAsUint64() const OVERRIDE { 2302 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2303 } 2304 2305 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2306 DCHECK(other->IsIntConstant()); 2307 return other->AsIntConstant()->value_ == value_; 2308 } 2309 2310 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2311 2312 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2313 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2314 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2315 2316 DECLARE_INSTRUCTION(IntConstant); 2317 2318 private: 2319 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2320 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2321 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2322 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2323 2324 const int32_t value_; 2325 2326 friend class HGraph; 2327 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2328 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2329 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2330}; 2331 2332class HLongConstant : public HConstant { 2333 public: 2334 int64_t GetValue() const { return value_; } 2335 2336 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2337 2338 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2339 DCHECK(other->IsLongConstant()); 2340 return other->AsLongConstant()->value_ == value_; 2341 } 2342 2343 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2344 2345 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2346 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2347 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2348 2349 DECLARE_INSTRUCTION(LongConstant); 2350 2351 private: 2352 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2353 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2354 2355 const int64_t value_; 2356 2357 friend class HGraph; 2358 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2359}; 2360 2361// Conditional branch. A block ending with an HIf instruction must have 2362// two successors. 2363class HIf : public HTemplateInstruction<1> { 2364 public: 2365 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2366 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2367 SetRawInputAt(0, input); 2368 } 2369 2370 bool IsControlFlow() const OVERRIDE { return true; } 2371 2372 HBasicBlock* IfTrueSuccessor() const { 2373 return GetBlock()->GetSuccessors()[0]; 2374 } 2375 2376 HBasicBlock* IfFalseSuccessor() const { 2377 return GetBlock()->GetSuccessors()[1]; 2378 } 2379 2380 DECLARE_INSTRUCTION(If); 2381 2382 private: 2383 DISALLOW_COPY_AND_ASSIGN(HIf); 2384}; 2385 2386 2387// Abstract instruction which marks the beginning and/or end of a try block and 2388// links it to the respective exception handlers. Behaves the same as a Goto in 2389// non-exceptional control flow. 2390// Normal-flow successor is stored at index zero, exception handlers under 2391// higher indices in no particular order. 2392class HTryBoundary : public HTemplateInstruction<0> { 2393 public: 2394 enum BoundaryKind { 2395 kEntry, 2396 kExit, 2397 }; 2398 2399 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2400 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2401 2402 bool IsControlFlow() const OVERRIDE { return true; } 2403 2404 // Returns the block's non-exceptional successor (index zero). 2405 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2406 2407 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const { 2408 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u); 2409 } 2410 2411 // Returns whether `handler` is among its exception handlers (non-zero index 2412 // successors). 2413 bool HasExceptionHandler(const HBasicBlock& handler) const { 2414 DCHECK(handler.IsCatchBlock()); 2415 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2416 } 2417 2418 // If not present already, adds `handler` to its block's list of exception 2419 // handlers. 2420 void AddExceptionHandler(HBasicBlock* handler) { 2421 if (!HasExceptionHandler(*handler)) { 2422 GetBlock()->AddSuccessor(handler); 2423 } 2424 } 2425 2426 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2427 2428 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2429 2430 DECLARE_INSTRUCTION(TryBoundary); 2431 2432 private: 2433 const BoundaryKind kind_; 2434 2435 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2436}; 2437 2438// Deoptimize to interpreter, upon checking a condition. 2439class HDeoptimize : public HTemplateInstruction<1> { 2440 public: 2441 explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2442 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2443 SetRawInputAt(0, cond); 2444 } 2445 2446 bool NeedsEnvironment() const OVERRIDE { return true; } 2447 bool CanThrow() const OVERRIDE { return true; } 2448 2449 DECLARE_INSTRUCTION(Deoptimize); 2450 2451 private: 2452 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2453}; 2454 2455// Represents the ArtMethod that was passed as a first argument to 2456// the method. It is used by instructions that depend on it, like 2457// instructions that work with the dex cache. 2458class HCurrentMethod : public HExpression<0> { 2459 public: 2460 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2461 : HExpression(type, SideEffects::None(), dex_pc) {} 2462 2463 DECLARE_INSTRUCTION(CurrentMethod); 2464 2465 private: 2466 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2467}; 2468 2469// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2470// have one successor for each entry in the switch table, and the final successor 2471// will be the block containing the next Dex opcode. 2472class HPackedSwitch : public HTemplateInstruction<1> { 2473 public: 2474 HPackedSwitch(int32_t start_value, 2475 uint32_t num_entries, 2476 HInstruction* input, 2477 uint32_t dex_pc = kNoDexPc) 2478 : HTemplateInstruction(SideEffects::None(), dex_pc), 2479 start_value_(start_value), 2480 num_entries_(num_entries) { 2481 SetRawInputAt(0, input); 2482 } 2483 2484 bool IsControlFlow() const OVERRIDE { return true; } 2485 2486 int32_t GetStartValue() const { return start_value_; } 2487 2488 uint32_t GetNumEntries() const { return num_entries_; } 2489 2490 HBasicBlock* GetDefaultBlock() const { 2491 // Last entry is the default block. 2492 return GetBlock()->GetSuccessors()[num_entries_]; 2493 } 2494 DECLARE_INSTRUCTION(PackedSwitch); 2495 2496 private: 2497 const int32_t start_value_; 2498 const uint32_t num_entries_; 2499 2500 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2501}; 2502 2503class HUnaryOperation : public HExpression<1> { 2504 public: 2505 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2506 : HExpression(result_type, SideEffects::None(), dex_pc) { 2507 SetRawInputAt(0, input); 2508 } 2509 2510 HInstruction* GetInput() const { return InputAt(0); } 2511 Primitive::Type GetResultType() const { return GetType(); } 2512 2513 bool CanBeMoved() const OVERRIDE { return true; } 2514 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2515 return true; 2516 } 2517 2518 // Try to statically evaluate `operation` and return a HConstant 2519 // containing the result of this evaluation. If `operation` cannot 2520 // be evaluated as a constant, return null. 2521 HConstant* TryStaticEvaluation() const; 2522 2523 // Apply this operation to `x`. 2524 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2525 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2526 2527 DECLARE_INSTRUCTION(UnaryOperation); 2528 2529 private: 2530 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2531}; 2532 2533class HBinaryOperation : public HExpression<2> { 2534 public: 2535 HBinaryOperation(Primitive::Type result_type, 2536 HInstruction* left, 2537 HInstruction* right, 2538 SideEffects side_effects = SideEffects::None(), 2539 uint32_t dex_pc = kNoDexPc) 2540 : HExpression(result_type, side_effects, dex_pc) { 2541 SetRawInputAt(0, left); 2542 SetRawInputAt(1, right); 2543 } 2544 2545 HInstruction* GetLeft() const { return InputAt(0); } 2546 HInstruction* GetRight() const { return InputAt(1); } 2547 Primitive::Type GetResultType() const { return GetType(); } 2548 2549 virtual bool IsCommutative() const { return false; } 2550 2551 // Put constant on the right. 2552 // Returns whether order is changed. 2553 bool OrderInputsWithConstantOnTheRight() { 2554 HInstruction* left = InputAt(0); 2555 HInstruction* right = InputAt(1); 2556 if (left->IsConstant() && !right->IsConstant()) { 2557 ReplaceInput(right, 0); 2558 ReplaceInput(left, 1); 2559 return true; 2560 } 2561 return false; 2562 } 2563 2564 // Order inputs by instruction id, but favor constant on the right side. 2565 // This helps GVN for commutative ops. 2566 void OrderInputs() { 2567 DCHECK(IsCommutative()); 2568 HInstruction* left = InputAt(0); 2569 HInstruction* right = InputAt(1); 2570 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2571 return; 2572 } 2573 if (OrderInputsWithConstantOnTheRight()) { 2574 return; 2575 } 2576 // Order according to instruction id. 2577 if (left->GetId() > right->GetId()) { 2578 ReplaceInput(right, 0); 2579 ReplaceInput(left, 1); 2580 } 2581 } 2582 2583 bool CanBeMoved() const OVERRIDE { return true; } 2584 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2585 return true; 2586 } 2587 2588 // Try to statically evaluate `operation` and return a HConstant 2589 // containing the result of this evaluation. If `operation` cannot 2590 // be evaluated as a constant, return null. 2591 HConstant* TryStaticEvaluation() const; 2592 2593 // Apply this operation to `x` and `y`. 2594 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2595 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2596 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2597 HLongConstant* y ATTRIBUTE_UNUSED) const { 2598 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2599 return nullptr; 2600 } 2601 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2602 HIntConstant* y ATTRIBUTE_UNUSED) const { 2603 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2604 return nullptr; 2605 } 2606 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2607 HNullConstant* y ATTRIBUTE_UNUSED) const { 2608 VLOG(compiler) << DebugName() << " is not defined for the (null, null) case."; 2609 return nullptr; 2610 } 2611 2612 // Returns an input that can legally be used as the right input and is 2613 // constant, or null. 2614 HConstant* GetConstantRight() const; 2615 2616 // If `GetConstantRight()` returns one of the input, this returns the other 2617 // one. Otherwise it returns null. 2618 HInstruction* GetLeastConstantLeft() const; 2619 2620 DECLARE_INSTRUCTION(BinaryOperation); 2621 2622 private: 2623 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2624}; 2625 2626// The comparison bias applies for floating point operations and indicates how NaN 2627// comparisons are treated: 2628enum class ComparisonBias { 2629 kNoBias, // bias is not applicable (i.e. for long operation) 2630 kGtBias, // return 1 for NaN comparisons 2631 kLtBias, // return -1 for NaN comparisons 2632}; 2633 2634class HCondition : public HBinaryOperation { 2635 public: 2636 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2637 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2638 needs_materialization_(true), 2639 bias_(ComparisonBias::kNoBias) {} 2640 2641 bool NeedsMaterialization() const { return needs_materialization_; } 2642 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2643 2644 // For code generation purposes, returns whether this instruction is just before 2645 // `instruction`, and disregard moves in between. 2646 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2647 2648 DECLARE_INSTRUCTION(Condition); 2649 2650 virtual IfCondition GetCondition() const = 0; 2651 2652 virtual IfCondition GetOppositeCondition() const = 0; 2653 2654 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2655 2656 void SetBias(ComparisonBias bias) { bias_ = bias; } 2657 2658 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2659 return bias_ == other->AsCondition()->bias_; 2660 } 2661 2662 bool IsFPConditionTrueIfNaN() const { 2663 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2664 IfCondition if_cond = GetCondition(); 2665 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2666 } 2667 2668 bool IsFPConditionFalseIfNaN() const { 2669 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2670 IfCondition if_cond = GetCondition(); 2671 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2672 } 2673 2674 private: 2675 // For register allocation purposes, returns whether this instruction needs to be 2676 // materialized (that is, not just be in the processor flags). 2677 bool needs_materialization_; 2678 2679 // Needed if we merge a HCompare into a HCondition. 2680 ComparisonBias bias_; 2681 2682 DISALLOW_COPY_AND_ASSIGN(HCondition); 2683}; 2684 2685// Instruction to check if two inputs are equal to each other. 2686class HEqual : public HCondition { 2687 public: 2688 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2689 : HCondition(first, second, dex_pc) {} 2690 2691 bool IsCommutative() const OVERRIDE { return true; } 2692 2693 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2694 return GetBlock()->GetGraph()->GetIntConstant( 2695 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2696 } 2697 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2698 return GetBlock()->GetGraph()->GetIntConstant( 2699 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2700 } 2701 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2702 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2703 return GetBlock()->GetGraph()->GetIntConstant(1); 2704 } 2705 2706 DECLARE_INSTRUCTION(Equal); 2707 2708 IfCondition GetCondition() const OVERRIDE { 2709 return kCondEQ; 2710 } 2711 2712 IfCondition GetOppositeCondition() const OVERRIDE { 2713 return kCondNE; 2714 } 2715 2716 private: 2717 template <typename T> bool Compute(T x, T y) const { return x == y; } 2718 2719 DISALLOW_COPY_AND_ASSIGN(HEqual); 2720}; 2721 2722class HNotEqual : public HCondition { 2723 public: 2724 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2725 : HCondition(first, second, dex_pc) {} 2726 2727 bool IsCommutative() const OVERRIDE { return true; } 2728 2729 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2730 return GetBlock()->GetGraph()->GetIntConstant( 2731 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2732 } 2733 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2734 return GetBlock()->GetGraph()->GetIntConstant( 2735 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2736 } 2737 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2738 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2739 return GetBlock()->GetGraph()->GetIntConstant(0); 2740 } 2741 2742 DECLARE_INSTRUCTION(NotEqual); 2743 2744 IfCondition GetCondition() const OVERRIDE { 2745 return kCondNE; 2746 } 2747 2748 IfCondition GetOppositeCondition() const OVERRIDE { 2749 return kCondEQ; 2750 } 2751 2752 private: 2753 template <typename T> bool Compute(T x, T y) const { return x != y; } 2754 2755 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2756}; 2757 2758class HLessThan : public HCondition { 2759 public: 2760 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2761 : HCondition(first, second, dex_pc) {} 2762 2763 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2764 return GetBlock()->GetGraph()->GetIntConstant( 2765 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2766 } 2767 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2768 return GetBlock()->GetGraph()->GetIntConstant( 2769 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2770 } 2771 2772 DECLARE_INSTRUCTION(LessThan); 2773 2774 IfCondition GetCondition() const OVERRIDE { 2775 return kCondLT; 2776 } 2777 2778 IfCondition GetOppositeCondition() const OVERRIDE { 2779 return kCondGE; 2780 } 2781 2782 private: 2783 template <typename T> bool Compute(T x, T y) const { return x < y; } 2784 2785 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2786}; 2787 2788class HLessThanOrEqual : public HCondition { 2789 public: 2790 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2791 : HCondition(first, second, dex_pc) {} 2792 2793 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2794 return GetBlock()->GetGraph()->GetIntConstant( 2795 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2796 } 2797 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2798 return GetBlock()->GetGraph()->GetIntConstant( 2799 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2800 } 2801 2802 DECLARE_INSTRUCTION(LessThanOrEqual); 2803 2804 IfCondition GetCondition() const OVERRIDE { 2805 return kCondLE; 2806 } 2807 2808 IfCondition GetOppositeCondition() const OVERRIDE { 2809 return kCondGT; 2810 } 2811 2812 private: 2813 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2814 2815 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2816}; 2817 2818class HGreaterThan : public HCondition { 2819 public: 2820 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2821 : HCondition(first, second, dex_pc) {} 2822 2823 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2824 return GetBlock()->GetGraph()->GetIntConstant( 2825 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2826 } 2827 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2828 return GetBlock()->GetGraph()->GetIntConstant( 2829 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2830 } 2831 2832 DECLARE_INSTRUCTION(GreaterThan); 2833 2834 IfCondition GetCondition() const OVERRIDE { 2835 return kCondGT; 2836 } 2837 2838 IfCondition GetOppositeCondition() const OVERRIDE { 2839 return kCondLE; 2840 } 2841 2842 private: 2843 template <typename T> bool Compute(T x, T y) const { return x > y; } 2844 2845 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2846}; 2847 2848class HGreaterThanOrEqual : public HCondition { 2849 public: 2850 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2851 : HCondition(first, second, dex_pc) {} 2852 2853 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2854 return GetBlock()->GetGraph()->GetIntConstant( 2855 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2856 } 2857 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2858 return GetBlock()->GetGraph()->GetIntConstant( 2859 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2860 } 2861 2862 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2863 2864 IfCondition GetCondition() const OVERRIDE { 2865 return kCondGE; 2866 } 2867 2868 IfCondition GetOppositeCondition() const OVERRIDE { 2869 return kCondLT; 2870 } 2871 2872 private: 2873 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2874 2875 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2876}; 2877 2878class HBelow : public HCondition { 2879 public: 2880 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2881 : HCondition(first, second, dex_pc) {} 2882 2883 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2884 return GetBlock()->GetGraph()->GetIntConstant( 2885 Compute(static_cast<uint32_t>(x->GetValue()), 2886 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2887 } 2888 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2889 return GetBlock()->GetGraph()->GetIntConstant( 2890 Compute(static_cast<uint64_t>(x->GetValue()), 2891 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2892 } 2893 2894 DECLARE_INSTRUCTION(Below); 2895 2896 IfCondition GetCondition() const OVERRIDE { 2897 return kCondB; 2898 } 2899 2900 IfCondition GetOppositeCondition() const OVERRIDE { 2901 return kCondAE; 2902 } 2903 2904 private: 2905 template <typename T> bool Compute(T x, T y) const { return x < y; } 2906 2907 DISALLOW_COPY_AND_ASSIGN(HBelow); 2908}; 2909 2910class HBelowOrEqual : public HCondition { 2911 public: 2912 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2913 : HCondition(first, second, dex_pc) {} 2914 2915 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2916 return GetBlock()->GetGraph()->GetIntConstant( 2917 Compute(static_cast<uint32_t>(x->GetValue()), 2918 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2919 } 2920 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2921 return GetBlock()->GetGraph()->GetIntConstant( 2922 Compute(static_cast<uint64_t>(x->GetValue()), 2923 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2924 } 2925 2926 DECLARE_INSTRUCTION(BelowOrEqual); 2927 2928 IfCondition GetCondition() const OVERRIDE { 2929 return kCondBE; 2930 } 2931 2932 IfCondition GetOppositeCondition() const OVERRIDE { 2933 return kCondA; 2934 } 2935 2936 private: 2937 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2938 2939 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 2940}; 2941 2942class HAbove : public HCondition { 2943 public: 2944 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2945 : HCondition(first, second, dex_pc) {} 2946 2947 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2948 return GetBlock()->GetGraph()->GetIntConstant( 2949 Compute(static_cast<uint32_t>(x->GetValue()), 2950 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2951 } 2952 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2953 return GetBlock()->GetGraph()->GetIntConstant( 2954 Compute(static_cast<uint64_t>(x->GetValue()), 2955 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2956 } 2957 2958 DECLARE_INSTRUCTION(Above); 2959 2960 IfCondition GetCondition() const OVERRIDE { 2961 return kCondA; 2962 } 2963 2964 IfCondition GetOppositeCondition() const OVERRIDE { 2965 return kCondBE; 2966 } 2967 2968 private: 2969 template <typename T> bool Compute(T x, T y) const { return x > y; } 2970 2971 DISALLOW_COPY_AND_ASSIGN(HAbove); 2972}; 2973 2974class HAboveOrEqual : public HCondition { 2975 public: 2976 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2977 : HCondition(first, second, dex_pc) {} 2978 2979 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2980 return GetBlock()->GetGraph()->GetIntConstant( 2981 Compute(static_cast<uint32_t>(x->GetValue()), 2982 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2983 } 2984 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2985 return GetBlock()->GetGraph()->GetIntConstant( 2986 Compute(static_cast<uint64_t>(x->GetValue()), 2987 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2988 } 2989 2990 DECLARE_INSTRUCTION(AboveOrEqual); 2991 2992 IfCondition GetCondition() const OVERRIDE { 2993 return kCondAE; 2994 } 2995 2996 IfCondition GetOppositeCondition() const OVERRIDE { 2997 return kCondB; 2998 } 2999 3000 private: 3001 template <typename T> bool Compute(T x, T y) const { return x >= y; } 3002 3003 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 3004}; 3005 3006// Instruction to check how two inputs compare to each other. 3007// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 3008class HCompare : public HBinaryOperation { 3009 public: 3010 HCompare(Primitive::Type type, 3011 HInstruction* first, 3012 HInstruction* second, 3013 ComparisonBias bias, 3014 uint32_t dex_pc) 3015 : HBinaryOperation(Primitive::kPrimInt, 3016 first, 3017 second, 3018 SideEffectsForArchRuntimeCalls(type), 3019 dex_pc), 3020 bias_(bias) { 3021 DCHECK_EQ(type, first->GetType()); 3022 DCHECK_EQ(type, second->GetType()); 3023 } 3024 3025 template <typename T> 3026 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3027 3028 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3029 return GetBlock()->GetGraph()->GetIntConstant( 3030 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3031 } 3032 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3033 return GetBlock()->GetGraph()->GetIntConstant( 3034 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3035 } 3036 3037 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3038 return bias_ == other->AsCompare()->bias_; 3039 } 3040 3041 ComparisonBias GetBias() const { return bias_; } 3042 3043 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3044 3045 3046 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3047 // MIPS64 uses a runtime call for FP comparisons. 3048 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3049 } 3050 3051 DECLARE_INSTRUCTION(Compare); 3052 3053 private: 3054 const ComparisonBias bias_; 3055 3056 DISALLOW_COPY_AND_ASSIGN(HCompare); 3057}; 3058 3059// A local in the graph. Corresponds to a Dex register. 3060class HLocal : public HTemplateInstruction<0> { 3061 public: 3062 explicit HLocal(uint16_t reg_number) 3063 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3064 3065 DECLARE_INSTRUCTION(Local); 3066 3067 uint16_t GetRegNumber() const { return reg_number_; } 3068 3069 private: 3070 // The Dex register number. 3071 const uint16_t reg_number_; 3072 3073 DISALLOW_COPY_AND_ASSIGN(HLocal); 3074}; 3075 3076// Load a given local. The local is an input of this instruction. 3077class HLoadLocal : public HExpression<1> { 3078 public: 3079 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3080 : HExpression(type, SideEffects::None(), dex_pc) { 3081 SetRawInputAt(0, local); 3082 } 3083 3084 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3085 3086 DECLARE_INSTRUCTION(LoadLocal); 3087 3088 private: 3089 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3090}; 3091 3092// Store a value in a given local. This instruction has two inputs: the value 3093// and the local. 3094class HStoreLocal : public HTemplateInstruction<2> { 3095 public: 3096 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3097 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3098 SetRawInputAt(0, local); 3099 SetRawInputAt(1, value); 3100 } 3101 3102 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3103 3104 DECLARE_INSTRUCTION(StoreLocal); 3105 3106 private: 3107 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3108}; 3109 3110class HFloatConstant : public HConstant { 3111 public: 3112 float GetValue() const { return value_; } 3113 3114 uint64_t GetValueAsUint64() const OVERRIDE { 3115 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3116 } 3117 3118 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3119 DCHECK(other->IsFloatConstant()); 3120 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3121 } 3122 3123 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3124 3125 bool IsMinusOne() const OVERRIDE { 3126 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3127 } 3128 bool IsZero() const OVERRIDE { 3129 return value_ == 0.0f; 3130 } 3131 bool IsOne() const OVERRIDE { 3132 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3133 } 3134 bool IsNaN() const { 3135 return std::isnan(value_); 3136 } 3137 3138 DECLARE_INSTRUCTION(FloatConstant); 3139 3140 private: 3141 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3142 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3143 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3144 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3145 3146 const float value_; 3147 3148 // Only the SsaBuilder and HGraph can create floating-point constants. 3149 friend class SsaBuilder; 3150 friend class HGraph; 3151 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3152}; 3153 3154class HDoubleConstant : public HConstant { 3155 public: 3156 double GetValue() const { return value_; } 3157 3158 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3159 3160 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3161 DCHECK(other->IsDoubleConstant()); 3162 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3163 } 3164 3165 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3166 3167 bool IsMinusOne() const OVERRIDE { 3168 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3169 } 3170 bool IsZero() const OVERRIDE { 3171 return value_ == 0.0; 3172 } 3173 bool IsOne() const OVERRIDE { 3174 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3175 } 3176 bool IsNaN() const { 3177 return std::isnan(value_); 3178 } 3179 3180 DECLARE_INSTRUCTION(DoubleConstant); 3181 3182 private: 3183 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3184 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3185 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3186 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3187 3188 const double value_; 3189 3190 // Only the SsaBuilder and HGraph can create floating-point constants. 3191 friend class SsaBuilder; 3192 friend class HGraph; 3193 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3194}; 3195 3196enum class Intrinsics { 3197#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache) k ## Name, 3198#include "intrinsics_list.h" 3199 kNone, 3200 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3201#undef INTRINSICS_LIST 3202#undef OPTIMIZING_INTRINSICS 3203}; 3204std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3205 3206enum IntrinsicNeedsEnvironmentOrCache { 3207 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3208 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3209}; 3210 3211class HInvoke : public HInstruction { 3212 public: 3213 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3214 3215 bool NeedsEnvironment() const OVERRIDE; 3216 3217 void SetArgumentAt(size_t index, HInstruction* argument) { 3218 SetRawInputAt(index, argument); 3219 } 3220 3221 // Return the number of arguments. This number can be lower than 3222 // the number of inputs returned by InputCount(), as some invoke 3223 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3224 // inputs at the end of their list of inputs. 3225 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3226 3227 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3228 3229 3230 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3231 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3232 3233 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3234 3235 Intrinsics GetIntrinsic() const { 3236 return intrinsic_; 3237 } 3238 3239 void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache); 3240 3241 bool IsFromInlinedInvoke() const { 3242 return GetEnvironment()->GetParent() != nullptr; 3243 } 3244 3245 bool CanThrow() const OVERRIDE { return true; } 3246 3247 uint32_t* GetIntrinsicOptimizations() { 3248 return &intrinsic_optimizations_; 3249 } 3250 3251 const uint32_t* GetIntrinsicOptimizations() const { 3252 return &intrinsic_optimizations_; 3253 } 3254 3255 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3256 3257 DECLARE_INSTRUCTION(Invoke); 3258 3259 protected: 3260 HInvoke(ArenaAllocator* arena, 3261 uint32_t number_of_arguments, 3262 uint32_t number_of_other_inputs, 3263 Primitive::Type return_type, 3264 uint32_t dex_pc, 3265 uint32_t dex_method_index, 3266 InvokeType original_invoke_type) 3267 : HInstruction( 3268 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3269 number_of_arguments_(number_of_arguments), 3270 inputs_(number_of_arguments + number_of_other_inputs, 3271 arena->Adapter(kArenaAllocInvokeInputs)), 3272 return_type_(return_type), 3273 dex_method_index_(dex_method_index), 3274 original_invoke_type_(original_invoke_type), 3275 intrinsic_(Intrinsics::kNone), 3276 intrinsic_optimizations_(0) { 3277 } 3278 3279 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3280 return inputs_[index]; 3281 } 3282 3283 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3284 inputs_[index] = input; 3285 } 3286 3287 uint32_t number_of_arguments_; 3288 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3289 const Primitive::Type return_type_; 3290 const uint32_t dex_method_index_; 3291 const InvokeType original_invoke_type_; 3292 Intrinsics intrinsic_; 3293 3294 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3295 uint32_t intrinsic_optimizations_; 3296 3297 private: 3298 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3299}; 3300 3301class HInvokeUnresolved : public HInvoke { 3302 public: 3303 HInvokeUnresolved(ArenaAllocator* arena, 3304 uint32_t number_of_arguments, 3305 Primitive::Type return_type, 3306 uint32_t dex_pc, 3307 uint32_t dex_method_index, 3308 InvokeType invoke_type) 3309 : HInvoke(arena, 3310 number_of_arguments, 3311 0u /* number_of_other_inputs */, 3312 return_type, 3313 dex_pc, 3314 dex_method_index, 3315 invoke_type) { 3316 } 3317 3318 DECLARE_INSTRUCTION(InvokeUnresolved); 3319 3320 private: 3321 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3322}; 3323 3324class HInvokeStaticOrDirect : public HInvoke { 3325 public: 3326 // Requirements of this method call regarding the class 3327 // initialization (clinit) check of its declaring class. 3328 enum class ClinitCheckRequirement { 3329 kNone, // Class already initialized. 3330 kExplicit, // Static call having explicit clinit check as last input. 3331 kImplicit, // Static call implicitly requiring a clinit check. 3332 }; 3333 3334 // Determines how to load the target ArtMethod*. 3335 enum class MethodLoadKind { 3336 // Use a String init ArtMethod* loaded from Thread entrypoints. 3337 kStringInit, 3338 3339 // Use the method's own ArtMethod* loaded by the register allocator. 3340 kRecursive, 3341 3342 // Use ArtMethod* at a known address, embed the direct address in the code. 3343 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3344 kDirectAddress, 3345 3346 // Use ArtMethod* at an address that will be known at link time, embed the direct 3347 // address in the code. If the image is relocatable, emit .patch_oat entry. 3348 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3349 // the image relocatable or not. 3350 kDirectAddressWithFixup, 3351 3352 // Load from resoved methods array in the dex cache using a PC-relative load. 3353 // Used when we need to use the dex cache, for example for invoke-static that 3354 // may cause class initialization (the entry may point to a resolution method), 3355 // and we know that we can access the dex cache arrays using a PC-relative load. 3356 kDexCachePcRelative, 3357 3358 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3359 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3360 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3361 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3362 kDexCacheViaMethod, 3363 }; 3364 3365 // Determines the location of the code pointer. 3366 enum class CodePtrLocation { 3367 // Recursive call, use local PC-relative call instruction. 3368 kCallSelf, 3369 3370 // Use PC-relative call instruction patched at link time. 3371 // Used for calls within an oat file, boot->boot or app->app. 3372 kCallPCRelative, 3373 3374 // Call to a known target address, embed the direct address in code. 3375 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3376 kCallDirect, 3377 3378 // Call to a target address that will be known at link time, embed the direct 3379 // address in code. If the image is relocatable, emit .patch_oat entry. 3380 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3381 // the image relocatable or not. 3382 kCallDirectWithFixup, 3383 3384 // Use code pointer from the ArtMethod*. 3385 // Used when we don't know the target code. This is also the last-resort-kind used when 3386 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3387 kCallArtMethod, 3388 }; 3389 3390 struct DispatchInfo { 3391 MethodLoadKind method_load_kind; 3392 CodePtrLocation code_ptr_location; 3393 // The method load data holds 3394 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3395 // Note that there are multiple string init methods, each having its own offset. 3396 // - the method address for kDirectAddress 3397 // - the dex cache arrays offset for kDexCachePcRel. 3398 uint64_t method_load_data; 3399 uint64_t direct_code_ptr; 3400 }; 3401 3402 HInvokeStaticOrDirect(ArenaAllocator* arena, 3403 uint32_t number_of_arguments, 3404 Primitive::Type return_type, 3405 uint32_t dex_pc, 3406 uint32_t method_index, 3407 MethodReference target_method, 3408 DispatchInfo dispatch_info, 3409 InvokeType original_invoke_type, 3410 InvokeType invoke_type, 3411 ClinitCheckRequirement clinit_check_requirement) 3412 : HInvoke(arena, 3413 number_of_arguments, 3414 // There is potentially one extra argument for the HCurrentMethod node, and 3415 // potentially one other if the clinit check is explicit, and potentially 3416 // one other if the method is a string factory. 3417 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) + 3418 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) + 3419 (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3420 return_type, 3421 dex_pc, 3422 method_index, 3423 original_invoke_type), 3424 invoke_type_(invoke_type), 3425 clinit_check_requirement_(clinit_check_requirement), 3426 target_method_(target_method), 3427 dispatch_info_(dispatch_info) { } 3428 3429 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3430 bool had_current_method_input = HasCurrentMethodInput(); 3431 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind); 3432 3433 // Using the current method is the default and once we find a better 3434 // method load kind, we should not go back to using the current method. 3435 DCHECK(had_current_method_input || !needs_current_method_input); 3436 3437 if (had_current_method_input && !needs_current_method_input) { 3438 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod()); 3439 RemoveInputAt(GetSpecialInputIndex()); 3440 } 3441 dispatch_info_ = dispatch_info; 3442 } 3443 3444 void AddSpecialInput(HInstruction* input) { 3445 // We allow only one special input. 3446 DCHECK(!IsStringInit() && !HasCurrentMethodInput()); 3447 DCHECK(InputCount() == GetSpecialInputIndex() || 3448 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck())); 3449 InsertInputAt(GetSpecialInputIndex(), input); 3450 } 3451 3452 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3453 // We access the method via the dex cache so we can't do an implicit null check. 3454 // TODO: for intrinsics we can generate implicit null checks. 3455 return false; 3456 } 3457 3458 bool CanBeNull() const OVERRIDE { 3459 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3460 } 3461 3462 // Get the index of the special input, if any. 3463 // 3464 // If the invoke IsStringInit(), it initially has a HFakeString special argument 3465 // which is removed by the instruction simplifier; if the invoke HasCurrentMethodInput(), 3466 // the "special input" is the current method pointer; otherwise there may be one 3467 // platform-specific special input, such as PC-relative addressing base. 3468 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } 3469 3470 InvokeType GetInvokeType() const { return invoke_type_; } 3471 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3472 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3473 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3474 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3475 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3476 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3477 bool HasPcRelativeDexCache() const { 3478 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3479 } 3480 bool HasCurrentMethodInput() const { 3481 // This function can be called only after the invoke has been fully initialized by the builder. 3482 if (NeedsCurrentMethodInput(GetMethodLoadKind())) { 3483 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3484 return true; 3485 } else { 3486 DCHECK(InputCount() == GetSpecialInputIndex() || 3487 !InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3488 return false; 3489 } 3490 } 3491 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3492 MethodReference GetTargetMethod() const { return target_method_; } 3493 3494 int32_t GetStringInitOffset() const { 3495 DCHECK(IsStringInit()); 3496 return dispatch_info_.method_load_data; 3497 } 3498 3499 uint64_t GetMethodAddress() const { 3500 DCHECK(HasMethodAddress()); 3501 return dispatch_info_.method_load_data; 3502 } 3503 3504 uint32_t GetDexCacheArrayOffset() const { 3505 DCHECK(HasPcRelativeDexCache()); 3506 return dispatch_info_.method_load_data; 3507 } 3508 3509 uint64_t GetDirectCodePtr() const { 3510 DCHECK(HasDirectCodePtr()); 3511 return dispatch_info_.direct_code_ptr; 3512 } 3513 3514 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3515 3516 // Is this instruction a call to a static method? 3517 bool IsStatic() const { 3518 return GetInvokeType() == kStatic; 3519 } 3520 3521 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by 3522 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck) 3523 // instruction; only relevant for static calls with explicit clinit check. 3524 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) { 3525 DCHECK(IsStaticWithExplicitClinitCheck()); 3526 size_t last_input_index = InputCount() - 1; 3527 HInstruction* last_input = InputAt(last_input_index); 3528 DCHECK(last_input != nullptr); 3529 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName(); 3530 RemoveAsUserOfInput(last_input_index); 3531 inputs_.pop_back(); 3532 clinit_check_requirement_ = new_requirement; 3533 DCHECK(!IsStaticWithExplicitClinitCheck()); 3534 } 3535 3536 bool IsStringFactoryFor(HFakeString* str) const { 3537 if (!IsStringInit()) return false; 3538 DCHECK(!HasCurrentMethodInput()); 3539 if (InputCount() == (number_of_arguments_)) return false; 3540 return InputAt(InputCount() - 1)->AsFakeString() == str; 3541 } 3542 3543 void RemoveFakeStringArgumentAsLastInput() { 3544 DCHECK(IsStringInit()); 3545 size_t last_input_index = InputCount() - 1; 3546 HInstruction* last_input = InputAt(last_input_index); 3547 DCHECK(last_input != nullptr); 3548 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3549 RemoveAsUserOfInput(last_input_index); 3550 inputs_.pop_back(); 3551 } 3552 3553 // Is this a call to a static method whose declaring class has an 3554 // explicit initialization check in the graph? 3555 bool IsStaticWithExplicitClinitCheck() const { 3556 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3557 } 3558 3559 // Is this a call to a static method whose declaring class has an 3560 // implicit intialization check requirement? 3561 bool IsStaticWithImplicitClinitCheck() const { 3562 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3563 } 3564 3565 // Does this method load kind need the current method as an input? 3566 static bool NeedsCurrentMethodInput(MethodLoadKind kind) { 3567 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod; 3568 } 3569 3570 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3571 3572 protected: 3573 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3574 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3575 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3576 HInstruction* input = input_record.GetInstruction(); 3577 // `input` is the last input of a static invoke marked as having 3578 // an explicit clinit check. It must either be: 3579 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3580 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3581 DCHECK(input != nullptr); 3582 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3583 } 3584 return input_record; 3585 } 3586 3587 void InsertInputAt(size_t index, HInstruction* input); 3588 void RemoveInputAt(size_t index); 3589 3590 private: 3591 const InvokeType invoke_type_; 3592 ClinitCheckRequirement clinit_check_requirement_; 3593 // The target method may refer to different dex file or method index than the original 3594 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3595 // in derived class to increase visibility. 3596 MethodReference target_method_; 3597 DispatchInfo dispatch_info_; 3598 3599 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3600}; 3601std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs); 3602 3603class HInvokeVirtual : public HInvoke { 3604 public: 3605 HInvokeVirtual(ArenaAllocator* arena, 3606 uint32_t number_of_arguments, 3607 Primitive::Type return_type, 3608 uint32_t dex_pc, 3609 uint32_t dex_method_index, 3610 uint32_t vtable_index) 3611 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3612 vtable_index_(vtable_index) {} 3613 3614 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3615 // TODO: Add implicit null checks in intrinsics. 3616 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3617 } 3618 3619 uint32_t GetVTableIndex() const { return vtable_index_; } 3620 3621 DECLARE_INSTRUCTION(InvokeVirtual); 3622 3623 private: 3624 const uint32_t vtable_index_; 3625 3626 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3627}; 3628 3629class HInvokeInterface : public HInvoke { 3630 public: 3631 HInvokeInterface(ArenaAllocator* arena, 3632 uint32_t number_of_arguments, 3633 Primitive::Type return_type, 3634 uint32_t dex_pc, 3635 uint32_t dex_method_index, 3636 uint32_t imt_index) 3637 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3638 imt_index_(imt_index) {} 3639 3640 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3641 // TODO: Add implicit null checks in intrinsics. 3642 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3643 } 3644 3645 uint32_t GetImtIndex() const { return imt_index_; } 3646 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3647 3648 DECLARE_INSTRUCTION(InvokeInterface); 3649 3650 private: 3651 const uint32_t imt_index_; 3652 3653 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3654}; 3655 3656class HNewInstance : public HExpression<1> { 3657 public: 3658 HNewInstance(HCurrentMethod* current_method, 3659 uint32_t dex_pc, 3660 uint16_t type_index, 3661 const DexFile& dex_file, 3662 bool can_throw, 3663 bool finalizable, 3664 QuickEntrypointEnum entrypoint) 3665 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3666 type_index_(type_index), 3667 dex_file_(dex_file), 3668 can_throw_(can_throw), 3669 finalizable_(finalizable), 3670 entrypoint_(entrypoint) { 3671 SetRawInputAt(0, current_method); 3672 } 3673 3674 uint16_t GetTypeIndex() const { return type_index_; } 3675 const DexFile& GetDexFile() const { return dex_file_; } 3676 3677 // Calls runtime so needs an environment. 3678 bool NeedsEnvironment() const OVERRIDE { return true; } 3679 3680 // It may throw when called on type that's not instantiable/accessible. 3681 // It can throw OOME. 3682 // TODO: distinguish between the two cases so we can for example allow allocation elimination. 3683 bool CanThrow() const OVERRIDE { return can_throw_ || true; } 3684 3685 bool IsFinalizable() const { return finalizable_; } 3686 3687 bool CanBeNull() const OVERRIDE { return false; } 3688 3689 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3690 3691 DECLARE_INSTRUCTION(NewInstance); 3692 3693 private: 3694 const uint16_t type_index_; 3695 const DexFile& dex_file_; 3696 const bool can_throw_; 3697 const bool finalizable_; 3698 const QuickEntrypointEnum entrypoint_; 3699 3700 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3701}; 3702 3703class HNeg : public HUnaryOperation { 3704 public: 3705 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3706 : HUnaryOperation(result_type, input, dex_pc) {} 3707 3708 template <typename T> T Compute(T x) const { return -x; } 3709 3710 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3711 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3712 } 3713 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3714 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3715 } 3716 3717 DECLARE_INSTRUCTION(Neg); 3718 3719 private: 3720 DISALLOW_COPY_AND_ASSIGN(HNeg); 3721}; 3722 3723class HNewArray : public HExpression<2> { 3724 public: 3725 HNewArray(HInstruction* length, 3726 HCurrentMethod* current_method, 3727 uint32_t dex_pc, 3728 uint16_t type_index, 3729 const DexFile& dex_file, 3730 QuickEntrypointEnum entrypoint) 3731 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3732 type_index_(type_index), 3733 dex_file_(dex_file), 3734 entrypoint_(entrypoint) { 3735 SetRawInputAt(0, length); 3736 SetRawInputAt(1, current_method); 3737 } 3738 3739 uint16_t GetTypeIndex() const { return type_index_; } 3740 const DexFile& GetDexFile() const { return dex_file_; } 3741 3742 // Calls runtime so needs an environment. 3743 bool NeedsEnvironment() const OVERRIDE { return true; } 3744 3745 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3746 bool CanThrow() const OVERRIDE { return true; } 3747 3748 bool CanBeNull() const OVERRIDE { return false; } 3749 3750 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3751 3752 DECLARE_INSTRUCTION(NewArray); 3753 3754 private: 3755 const uint16_t type_index_; 3756 const DexFile& dex_file_; 3757 const QuickEntrypointEnum entrypoint_; 3758 3759 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3760}; 3761 3762class HAdd : public HBinaryOperation { 3763 public: 3764 HAdd(Primitive::Type result_type, 3765 HInstruction* left, 3766 HInstruction* right, 3767 uint32_t dex_pc = kNoDexPc) 3768 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3769 3770 bool IsCommutative() const OVERRIDE { return true; } 3771 3772 template <typename T> T Compute(T x, T y) const { return x + y; } 3773 3774 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3775 return GetBlock()->GetGraph()->GetIntConstant( 3776 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3777 } 3778 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3779 return GetBlock()->GetGraph()->GetLongConstant( 3780 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3781 } 3782 3783 DECLARE_INSTRUCTION(Add); 3784 3785 private: 3786 DISALLOW_COPY_AND_ASSIGN(HAdd); 3787}; 3788 3789class HSub : public HBinaryOperation { 3790 public: 3791 HSub(Primitive::Type result_type, 3792 HInstruction* left, 3793 HInstruction* right, 3794 uint32_t dex_pc = kNoDexPc) 3795 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3796 3797 template <typename T> T Compute(T x, T y) const { return x - y; } 3798 3799 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3800 return GetBlock()->GetGraph()->GetIntConstant( 3801 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3802 } 3803 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3804 return GetBlock()->GetGraph()->GetLongConstant( 3805 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3806 } 3807 3808 DECLARE_INSTRUCTION(Sub); 3809 3810 private: 3811 DISALLOW_COPY_AND_ASSIGN(HSub); 3812}; 3813 3814class HMul : public HBinaryOperation { 3815 public: 3816 HMul(Primitive::Type result_type, 3817 HInstruction* left, 3818 HInstruction* right, 3819 uint32_t dex_pc = kNoDexPc) 3820 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3821 3822 bool IsCommutative() const OVERRIDE { return true; } 3823 3824 template <typename T> T Compute(T x, T y) const { return x * y; } 3825 3826 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3827 return GetBlock()->GetGraph()->GetIntConstant( 3828 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3829 } 3830 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3831 return GetBlock()->GetGraph()->GetLongConstant( 3832 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3833 } 3834 3835 DECLARE_INSTRUCTION(Mul); 3836 3837 private: 3838 DISALLOW_COPY_AND_ASSIGN(HMul); 3839}; 3840 3841class HDiv : public HBinaryOperation { 3842 public: 3843 HDiv(Primitive::Type result_type, 3844 HInstruction* left, 3845 HInstruction* right, 3846 uint32_t dex_pc) 3847 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3848 3849 template <typename T> 3850 T Compute(T x, T y) const { 3851 // Our graph structure ensures we never have 0 for `y` during 3852 // constant folding. 3853 DCHECK_NE(y, 0); 3854 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3855 return (y == -1) ? -x : x / y; 3856 } 3857 3858 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3859 return GetBlock()->GetGraph()->GetIntConstant( 3860 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3861 } 3862 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3863 return GetBlock()->GetGraph()->GetLongConstant( 3864 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3865 } 3866 3867 static SideEffects SideEffectsForArchRuntimeCalls() { 3868 // The generated code can use a runtime call. 3869 return SideEffects::CanTriggerGC(); 3870 } 3871 3872 DECLARE_INSTRUCTION(Div); 3873 3874 private: 3875 DISALLOW_COPY_AND_ASSIGN(HDiv); 3876}; 3877 3878class HRem : public HBinaryOperation { 3879 public: 3880 HRem(Primitive::Type result_type, 3881 HInstruction* left, 3882 HInstruction* right, 3883 uint32_t dex_pc) 3884 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3885 3886 template <typename T> 3887 T Compute(T x, T y) const { 3888 // Our graph structure ensures we never have 0 for `y` during 3889 // constant folding. 3890 DCHECK_NE(y, 0); 3891 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3892 return (y == -1) ? 0 : x % y; 3893 } 3894 3895 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3896 return GetBlock()->GetGraph()->GetIntConstant( 3897 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3898 } 3899 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3900 return GetBlock()->GetGraph()->GetLongConstant( 3901 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3902 } 3903 3904 3905 static SideEffects SideEffectsForArchRuntimeCalls() { 3906 return SideEffects::CanTriggerGC(); 3907 } 3908 3909 DECLARE_INSTRUCTION(Rem); 3910 3911 private: 3912 DISALLOW_COPY_AND_ASSIGN(HRem); 3913}; 3914 3915class HDivZeroCheck : public HExpression<1> { 3916 public: 3917 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 3918 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 3919 SetRawInputAt(0, value); 3920 } 3921 3922 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 3923 3924 bool CanBeMoved() const OVERRIDE { return true; } 3925 3926 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 3927 return true; 3928 } 3929 3930 bool NeedsEnvironment() const OVERRIDE { return true; } 3931 bool CanThrow() const OVERRIDE { return true; } 3932 3933 DECLARE_INSTRUCTION(DivZeroCheck); 3934 3935 private: 3936 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 3937}; 3938 3939class HShl : public HBinaryOperation { 3940 public: 3941 HShl(Primitive::Type result_type, 3942 HInstruction* left, 3943 HInstruction* right, 3944 uint32_t dex_pc = kNoDexPc) 3945 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3946 3947 template <typename T, typename U, typename V> 3948 T Compute(T x, U y, V max_shift_value) const { 3949 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3950 "V is not the unsigned integer type corresponding to T"); 3951 return x << (y & max_shift_value); 3952 } 3953 3954 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3955 return GetBlock()->GetGraph()->GetIntConstant( 3956 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3957 } 3958 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3959 // case is handled as `x << static_cast<int>(y)`. 3960 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3961 return GetBlock()->GetGraph()->GetLongConstant( 3962 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3963 } 3964 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3965 return GetBlock()->GetGraph()->GetLongConstant( 3966 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3967 } 3968 3969 DECLARE_INSTRUCTION(Shl); 3970 3971 private: 3972 DISALLOW_COPY_AND_ASSIGN(HShl); 3973}; 3974 3975class HShr : public HBinaryOperation { 3976 public: 3977 HShr(Primitive::Type result_type, 3978 HInstruction* left, 3979 HInstruction* right, 3980 uint32_t dex_pc = kNoDexPc) 3981 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3982 3983 template <typename T, typename U, typename V> 3984 T Compute(T x, U y, V max_shift_value) const { 3985 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3986 "V is not the unsigned integer type corresponding to T"); 3987 return x >> (y & max_shift_value); 3988 } 3989 3990 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3991 return GetBlock()->GetGraph()->GetIntConstant( 3992 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3993 } 3994 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3995 // case is handled as `x >> static_cast<int>(y)`. 3996 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3997 return GetBlock()->GetGraph()->GetLongConstant( 3998 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3999 } 4000 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4001 return GetBlock()->GetGraph()->GetLongConstant( 4002 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4003 } 4004 4005 DECLARE_INSTRUCTION(Shr); 4006 4007 private: 4008 DISALLOW_COPY_AND_ASSIGN(HShr); 4009}; 4010 4011class HUShr : public HBinaryOperation { 4012 public: 4013 HUShr(Primitive::Type result_type, 4014 HInstruction* left, 4015 HInstruction* right, 4016 uint32_t dex_pc = kNoDexPc) 4017 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4018 4019 template <typename T, typename U, typename V> 4020 T Compute(T x, U y, V max_shift_value) const { 4021 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4022 "V is not the unsigned integer type corresponding to T"); 4023 V ux = static_cast<V>(x); 4024 return static_cast<T>(ux >> (y & max_shift_value)); 4025 } 4026 4027 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4028 return GetBlock()->GetGraph()->GetIntConstant( 4029 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4030 } 4031 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4032 // case is handled as `x >>> static_cast<int>(y)`. 4033 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4034 return GetBlock()->GetGraph()->GetLongConstant( 4035 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4036 } 4037 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4038 return GetBlock()->GetGraph()->GetLongConstant( 4039 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4040 } 4041 4042 DECLARE_INSTRUCTION(UShr); 4043 4044 private: 4045 DISALLOW_COPY_AND_ASSIGN(HUShr); 4046}; 4047 4048class HAnd : public HBinaryOperation { 4049 public: 4050 HAnd(Primitive::Type result_type, 4051 HInstruction* left, 4052 HInstruction* right, 4053 uint32_t dex_pc = kNoDexPc) 4054 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4055 4056 bool IsCommutative() const OVERRIDE { return true; } 4057 4058 template <typename T, typename U> 4059 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 4060 4061 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4062 return GetBlock()->GetGraph()->GetIntConstant( 4063 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4064 } 4065 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4066 return GetBlock()->GetGraph()->GetLongConstant( 4067 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4068 } 4069 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4070 return GetBlock()->GetGraph()->GetLongConstant( 4071 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4072 } 4073 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4074 return GetBlock()->GetGraph()->GetLongConstant( 4075 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4076 } 4077 4078 DECLARE_INSTRUCTION(And); 4079 4080 private: 4081 DISALLOW_COPY_AND_ASSIGN(HAnd); 4082}; 4083 4084class HOr : public HBinaryOperation { 4085 public: 4086 HOr(Primitive::Type result_type, 4087 HInstruction* left, 4088 HInstruction* right, 4089 uint32_t dex_pc = kNoDexPc) 4090 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4091 4092 bool IsCommutative() const OVERRIDE { return true; } 4093 4094 template <typename T, typename U> 4095 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4096 4097 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4098 return GetBlock()->GetGraph()->GetIntConstant( 4099 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4100 } 4101 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4102 return GetBlock()->GetGraph()->GetLongConstant( 4103 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4104 } 4105 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4106 return GetBlock()->GetGraph()->GetLongConstant( 4107 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4108 } 4109 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4110 return GetBlock()->GetGraph()->GetLongConstant( 4111 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4112 } 4113 4114 DECLARE_INSTRUCTION(Or); 4115 4116 private: 4117 DISALLOW_COPY_AND_ASSIGN(HOr); 4118}; 4119 4120class HXor : public HBinaryOperation { 4121 public: 4122 HXor(Primitive::Type result_type, 4123 HInstruction* left, 4124 HInstruction* right, 4125 uint32_t dex_pc = kNoDexPc) 4126 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4127 4128 bool IsCommutative() const OVERRIDE { return true; } 4129 4130 template <typename T, typename U> 4131 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4132 4133 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4134 return GetBlock()->GetGraph()->GetIntConstant( 4135 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4136 } 4137 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4138 return GetBlock()->GetGraph()->GetLongConstant( 4139 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4140 } 4141 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4142 return GetBlock()->GetGraph()->GetLongConstant( 4143 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4144 } 4145 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4146 return GetBlock()->GetGraph()->GetLongConstant( 4147 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4148 } 4149 4150 DECLARE_INSTRUCTION(Xor); 4151 4152 private: 4153 DISALLOW_COPY_AND_ASSIGN(HXor); 4154}; 4155 4156// The value of a parameter in this method. Its location depends on 4157// the calling convention. 4158class HParameterValue : public HExpression<0> { 4159 public: 4160 HParameterValue(const DexFile& dex_file, 4161 uint16_t type_index, 4162 uint8_t index, 4163 Primitive::Type parameter_type, 4164 bool is_this = false) 4165 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4166 dex_file_(dex_file), 4167 type_index_(type_index), 4168 index_(index), 4169 is_this_(is_this), 4170 can_be_null_(!is_this) {} 4171 4172 const DexFile& GetDexFile() const { return dex_file_; } 4173 uint16_t GetTypeIndex() const { return type_index_; } 4174 uint8_t GetIndex() const { return index_; } 4175 bool IsThis() const { return is_this_; } 4176 4177 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4178 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4179 4180 DECLARE_INSTRUCTION(ParameterValue); 4181 4182 private: 4183 const DexFile& dex_file_; 4184 const uint16_t type_index_; 4185 // The index of this parameter in the parameters list. Must be less 4186 // than HGraph::number_of_in_vregs_. 4187 const uint8_t index_; 4188 4189 // Whether or not the parameter value corresponds to 'this' argument. 4190 const bool is_this_; 4191 4192 bool can_be_null_; 4193 4194 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4195}; 4196 4197class HNot : public HUnaryOperation { 4198 public: 4199 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4200 : HUnaryOperation(result_type, input, dex_pc) {} 4201 4202 bool CanBeMoved() const OVERRIDE { return true; } 4203 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4204 return true; 4205 } 4206 4207 template <typename T> T Compute(T x) const { return ~x; } 4208 4209 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4210 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4211 } 4212 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4213 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4214 } 4215 4216 DECLARE_INSTRUCTION(Not); 4217 4218 private: 4219 DISALLOW_COPY_AND_ASSIGN(HNot); 4220}; 4221 4222class HBooleanNot : public HUnaryOperation { 4223 public: 4224 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4225 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4226 4227 bool CanBeMoved() const OVERRIDE { return true; } 4228 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4229 return true; 4230 } 4231 4232 template <typename T> bool Compute(T x) const { 4233 DCHECK(IsUint<1>(x)); 4234 return !x; 4235 } 4236 4237 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4238 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4239 } 4240 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4241 LOG(FATAL) << DebugName() << " is not defined for long values"; 4242 UNREACHABLE(); 4243 } 4244 4245 DECLARE_INSTRUCTION(BooleanNot); 4246 4247 private: 4248 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4249}; 4250 4251class HTypeConversion : public HExpression<1> { 4252 public: 4253 // Instantiate a type conversion of `input` to `result_type`. 4254 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4255 : HExpression(result_type, 4256 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4257 dex_pc) { 4258 SetRawInputAt(0, input); 4259 DCHECK_NE(input->GetType(), result_type); 4260 } 4261 4262 HInstruction* GetInput() const { return InputAt(0); } 4263 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4264 Primitive::Type GetResultType() const { return GetType(); } 4265 4266 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4267 // to the runtime. 4268 4269 bool CanBeMoved() const OVERRIDE { return true; } 4270 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4271 4272 // Try to statically evaluate the conversion and return a HConstant 4273 // containing the result. If the input cannot be converted, return nullptr. 4274 HConstant* TryStaticEvaluation() const; 4275 4276 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4277 Primitive::Type result_type) { 4278 // Some architectures may not require the 'GC' side effects, but at this point 4279 // in the compilation process we do not know what architecture we will 4280 // generate code for, so we must be conservative. 4281 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4282 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4283 return SideEffects::CanTriggerGC(); 4284 } 4285 return SideEffects::None(); 4286 } 4287 4288 DECLARE_INSTRUCTION(TypeConversion); 4289 4290 private: 4291 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4292}; 4293 4294static constexpr uint32_t kNoRegNumber = -1; 4295 4296class HPhi : public HInstruction { 4297 public: 4298 HPhi(ArenaAllocator* arena, 4299 uint32_t reg_number, 4300 size_t number_of_inputs, 4301 Primitive::Type type, 4302 uint32_t dex_pc = kNoDexPc) 4303 : HInstruction(SideEffects::None(), dex_pc), 4304 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4305 reg_number_(reg_number), 4306 type_(type), 4307 is_live_(false), 4308 can_be_null_(true) { 4309 } 4310 4311 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4312 static Primitive::Type ToPhiType(Primitive::Type type) { 4313 switch (type) { 4314 case Primitive::kPrimBoolean: 4315 case Primitive::kPrimByte: 4316 case Primitive::kPrimShort: 4317 case Primitive::kPrimChar: 4318 return Primitive::kPrimInt; 4319 default: 4320 return type; 4321 } 4322 } 4323 4324 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4325 4326 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4327 4328 void AddInput(HInstruction* input); 4329 void RemoveInputAt(size_t index); 4330 4331 Primitive::Type GetType() const OVERRIDE { return type_; } 4332 void SetType(Primitive::Type type) { type_ = type; } 4333 4334 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4335 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4336 4337 uint32_t GetRegNumber() const { return reg_number_; } 4338 4339 void SetDead() { is_live_ = false; } 4340 void SetLive() { is_live_ = true; } 4341 bool IsDead() const { return !is_live_; } 4342 bool IsLive() const { return is_live_; } 4343 4344 bool IsVRegEquivalentOf(HInstruction* other) const { 4345 return other != nullptr 4346 && other->IsPhi() 4347 && other->AsPhi()->GetBlock() == GetBlock() 4348 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4349 } 4350 4351 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4352 // An equivalent phi is a phi having the same dex register and type. 4353 // It assumes that phis with the same dex register are adjacent. 4354 HPhi* GetNextEquivalentPhiWithSameType() { 4355 HInstruction* next = GetNext(); 4356 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4357 if (next->GetType() == GetType()) { 4358 return next->AsPhi(); 4359 } 4360 next = next->GetNext(); 4361 } 4362 return nullptr; 4363 } 4364 4365 DECLARE_INSTRUCTION(Phi); 4366 4367 protected: 4368 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4369 return inputs_[index]; 4370 } 4371 4372 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4373 inputs_[index] = input; 4374 } 4375 4376 private: 4377 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4378 const uint32_t reg_number_; 4379 Primitive::Type type_; 4380 bool is_live_; 4381 bool can_be_null_; 4382 4383 DISALLOW_COPY_AND_ASSIGN(HPhi); 4384}; 4385 4386class HNullCheck : public HExpression<1> { 4387 public: 4388 HNullCheck(HInstruction* value, uint32_t dex_pc) 4389 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4390 SetRawInputAt(0, value); 4391 } 4392 4393 bool CanBeMoved() const OVERRIDE { return true; } 4394 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4395 return true; 4396 } 4397 4398 bool NeedsEnvironment() const OVERRIDE { return true; } 4399 4400 bool CanThrow() const OVERRIDE { return true; } 4401 4402 bool CanBeNull() const OVERRIDE { return false; } 4403 4404 4405 DECLARE_INSTRUCTION(NullCheck); 4406 4407 private: 4408 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4409}; 4410 4411class FieldInfo : public ValueObject { 4412 public: 4413 FieldInfo(MemberOffset field_offset, 4414 Primitive::Type field_type, 4415 bool is_volatile, 4416 uint32_t index, 4417 uint16_t declaring_class_def_index, 4418 const DexFile& dex_file, 4419 Handle<mirror::DexCache> dex_cache) 4420 : field_offset_(field_offset), 4421 field_type_(field_type), 4422 is_volatile_(is_volatile), 4423 index_(index), 4424 declaring_class_def_index_(declaring_class_def_index), 4425 dex_file_(dex_file), 4426 dex_cache_(dex_cache) {} 4427 4428 MemberOffset GetFieldOffset() const { return field_offset_; } 4429 Primitive::Type GetFieldType() const { return field_type_; } 4430 uint32_t GetFieldIndex() const { return index_; } 4431 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4432 const DexFile& GetDexFile() const { return dex_file_; } 4433 bool IsVolatile() const { return is_volatile_; } 4434 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4435 4436 private: 4437 const MemberOffset field_offset_; 4438 const Primitive::Type field_type_; 4439 const bool is_volatile_; 4440 const uint32_t index_; 4441 const uint16_t declaring_class_def_index_; 4442 const DexFile& dex_file_; 4443 const Handle<mirror::DexCache> dex_cache_; 4444}; 4445 4446class HInstanceFieldGet : public HExpression<1> { 4447 public: 4448 HInstanceFieldGet(HInstruction* value, 4449 Primitive::Type field_type, 4450 MemberOffset field_offset, 4451 bool is_volatile, 4452 uint32_t field_idx, 4453 uint16_t declaring_class_def_index, 4454 const DexFile& dex_file, 4455 Handle<mirror::DexCache> dex_cache, 4456 uint32_t dex_pc) 4457 : HExpression(field_type, 4458 SideEffects::FieldReadOfType(field_type, is_volatile), 4459 dex_pc), 4460 field_info_(field_offset, 4461 field_type, 4462 is_volatile, 4463 field_idx, 4464 declaring_class_def_index, 4465 dex_file, 4466 dex_cache) { 4467 SetRawInputAt(0, value); 4468 } 4469 4470 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4471 4472 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4473 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4474 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4475 } 4476 4477 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4478 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4479 } 4480 4481 size_t ComputeHashCode() const OVERRIDE { 4482 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4483 } 4484 4485 const FieldInfo& GetFieldInfo() const { return field_info_; } 4486 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4487 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4488 bool IsVolatile() const { return field_info_.IsVolatile(); } 4489 4490 DECLARE_INSTRUCTION(InstanceFieldGet); 4491 4492 private: 4493 const FieldInfo field_info_; 4494 4495 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4496}; 4497 4498class HInstanceFieldSet : public HTemplateInstruction<2> { 4499 public: 4500 HInstanceFieldSet(HInstruction* object, 4501 HInstruction* value, 4502 Primitive::Type field_type, 4503 MemberOffset field_offset, 4504 bool is_volatile, 4505 uint32_t field_idx, 4506 uint16_t declaring_class_def_index, 4507 const DexFile& dex_file, 4508 Handle<mirror::DexCache> dex_cache, 4509 uint32_t dex_pc) 4510 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4511 dex_pc), 4512 field_info_(field_offset, 4513 field_type, 4514 is_volatile, 4515 field_idx, 4516 declaring_class_def_index, 4517 dex_file, 4518 dex_cache), 4519 value_can_be_null_(true) { 4520 SetRawInputAt(0, object); 4521 SetRawInputAt(1, value); 4522 } 4523 4524 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4525 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4526 } 4527 4528 const FieldInfo& GetFieldInfo() const { return field_info_; } 4529 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4530 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4531 bool IsVolatile() const { return field_info_.IsVolatile(); } 4532 HInstruction* GetValue() const { return InputAt(1); } 4533 bool GetValueCanBeNull() const { return value_can_be_null_; } 4534 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4535 4536 DECLARE_INSTRUCTION(InstanceFieldSet); 4537 4538 private: 4539 const FieldInfo field_info_; 4540 bool value_can_be_null_; 4541 4542 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4543}; 4544 4545class HArrayGet : public HExpression<2> { 4546 public: 4547 HArrayGet(HInstruction* array, 4548 HInstruction* index, 4549 Primitive::Type type, 4550 uint32_t dex_pc, 4551 SideEffects additional_side_effects = SideEffects::None()) 4552 : HExpression(type, 4553 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4554 dex_pc) { 4555 SetRawInputAt(0, array); 4556 SetRawInputAt(1, index); 4557 } 4558 4559 bool CanBeMoved() const OVERRIDE { return true; } 4560 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4561 return true; 4562 } 4563 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4564 // TODO: We can be smarter here. 4565 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4566 // which generates the implicit null check. There are cases when these can be removed 4567 // to produce better code. If we ever add optimizations to do so we should allow an 4568 // implicit check here (as long as the address falls in the first page). 4569 return false; 4570 } 4571 4572 void SetType(Primitive::Type type) { type_ = type; } 4573 4574 HInstruction* GetArray() const { return InputAt(0); } 4575 HInstruction* GetIndex() const { return InputAt(1); } 4576 4577 DECLARE_INSTRUCTION(ArrayGet); 4578 4579 private: 4580 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4581}; 4582 4583class HArraySet : public HTemplateInstruction<3> { 4584 public: 4585 HArraySet(HInstruction* array, 4586 HInstruction* index, 4587 HInstruction* value, 4588 Primitive::Type expected_component_type, 4589 uint32_t dex_pc, 4590 SideEffects additional_side_effects = SideEffects::None()) 4591 : HTemplateInstruction( 4592 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4593 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4594 additional_side_effects), 4595 dex_pc), 4596 expected_component_type_(expected_component_type), 4597 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4598 value_can_be_null_(true), 4599 static_type_of_array_is_object_array_(false) { 4600 SetRawInputAt(0, array); 4601 SetRawInputAt(1, index); 4602 SetRawInputAt(2, value); 4603 } 4604 4605 bool NeedsEnvironment() const OVERRIDE { 4606 // We currently always call a runtime method to catch array store 4607 // exceptions. 4608 return needs_type_check_; 4609 } 4610 4611 // Can throw ArrayStoreException. 4612 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4613 4614 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4615 // TODO: Same as for ArrayGet. 4616 return false; 4617 } 4618 4619 void ClearNeedsTypeCheck() { 4620 needs_type_check_ = false; 4621 } 4622 4623 void ClearValueCanBeNull() { 4624 value_can_be_null_ = false; 4625 } 4626 4627 void SetStaticTypeOfArrayIsObjectArray() { 4628 static_type_of_array_is_object_array_ = true; 4629 } 4630 4631 bool GetValueCanBeNull() const { return value_can_be_null_; } 4632 bool NeedsTypeCheck() const { return needs_type_check_; } 4633 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4634 4635 HInstruction* GetArray() const { return InputAt(0); } 4636 HInstruction* GetIndex() const { return InputAt(1); } 4637 HInstruction* GetValue() const { return InputAt(2); } 4638 4639 Primitive::Type GetComponentType() const { 4640 // The Dex format does not type floating point index operations. Since the 4641 // `expected_component_type_` is set during building and can therefore not 4642 // be correct, we also check what is the value type. If it is a floating 4643 // point type, we must use that type. 4644 Primitive::Type value_type = GetValue()->GetType(); 4645 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4646 ? value_type 4647 : expected_component_type_; 4648 } 4649 4650 Primitive::Type GetRawExpectedComponentType() const { 4651 return expected_component_type_; 4652 } 4653 4654 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4655 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4656 } 4657 4658 DECLARE_INSTRUCTION(ArraySet); 4659 4660 private: 4661 const Primitive::Type expected_component_type_; 4662 bool needs_type_check_; 4663 bool value_can_be_null_; 4664 // Cached information for the reference_type_info_ so that codegen 4665 // does not need to inspect the static type. 4666 bool static_type_of_array_is_object_array_; 4667 4668 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4669}; 4670 4671class HArrayLength : public HExpression<1> { 4672 public: 4673 HArrayLength(HInstruction* array, uint32_t dex_pc) 4674 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4675 // Note that arrays do not change length, so the instruction does not 4676 // depend on any write. 4677 SetRawInputAt(0, array); 4678 } 4679 4680 bool CanBeMoved() const OVERRIDE { return true; } 4681 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4682 return true; 4683 } 4684 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4685 return obj == InputAt(0); 4686 } 4687 4688 DECLARE_INSTRUCTION(ArrayLength); 4689 4690 private: 4691 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4692}; 4693 4694class HBoundsCheck : public HExpression<2> { 4695 public: 4696 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4697 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4698 DCHECK(index->GetType() == Primitive::kPrimInt); 4699 SetRawInputAt(0, index); 4700 SetRawInputAt(1, length); 4701 } 4702 4703 bool CanBeMoved() const OVERRIDE { return true; } 4704 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4705 return true; 4706 } 4707 4708 bool NeedsEnvironment() const OVERRIDE { return true; } 4709 4710 bool CanThrow() const OVERRIDE { return true; } 4711 4712 HInstruction* GetIndex() const { return InputAt(0); } 4713 4714 DECLARE_INSTRUCTION(BoundsCheck); 4715 4716 private: 4717 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4718}; 4719 4720/** 4721 * Some DEX instructions are folded into multiple HInstructions that need 4722 * to stay live until the last HInstruction. This class 4723 * is used as a marker for the baseline compiler to ensure its preceding 4724 * HInstruction stays live. `index` represents the stack location index of the 4725 * instruction (the actual offset is computed as index * vreg_size). 4726 */ 4727class HTemporary : public HTemplateInstruction<0> { 4728 public: 4729 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4730 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4731 4732 size_t GetIndex() const { return index_; } 4733 4734 Primitive::Type GetType() const OVERRIDE { 4735 // The previous instruction is the one that will be stored in the temporary location. 4736 DCHECK(GetPrevious() != nullptr); 4737 return GetPrevious()->GetType(); 4738 } 4739 4740 DECLARE_INSTRUCTION(Temporary); 4741 4742 private: 4743 const size_t index_; 4744 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4745}; 4746 4747class HSuspendCheck : public HTemplateInstruction<0> { 4748 public: 4749 explicit HSuspendCheck(uint32_t dex_pc) 4750 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4751 4752 bool NeedsEnvironment() const OVERRIDE { 4753 return true; 4754 } 4755 4756 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4757 SlowPathCode* GetSlowPath() const { return slow_path_; } 4758 4759 DECLARE_INSTRUCTION(SuspendCheck); 4760 4761 private: 4762 // Only used for code generation, in order to share the same slow path between back edges 4763 // of a same loop. 4764 SlowPathCode* slow_path_; 4765 4766 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4767}; 4768 4769/** 4770 * Instruction to load a Class object. 4771 */ 4772class HLoadClass : public HExpression<1> { 4773 public: 4774 HLoadClass(HCurrentMethod* current_method, 4775 uint16_t type_index, 4776 const DexFile& dex_file, 4777 bool is_referrers_class, 4778 uint32_t dex_pc, 4779 bool needs_access_check) 4780 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4781 type_index_(type_index), 4782 dex_file_(dex_file), 4783 is_referrers_class_(is_referrers_class), 4784 generate_clinit_check_(false), 4785 needs_access_check_(needs_access_check), 4786 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4787 // Referrers class should not need access check. We never inline unverified 4788 // methods so we can't possibly end up in this situation. 4789 DCHECK(!is_referrers_class_ || !needs_access_check_); 4790 SetRawInputAt(0, current_method); 4791 } 4792 4793 bool CanBeMoved() const OVERRIDE { return true; } 4794 4795 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4796 // Note that we don't need to test for generate_clinit_check_. 4797 // Whether or not we need to generate the clinit check is processed in 4798 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4799 return other->AsLoadClass()->type_index_ == type_index_ && 4800 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4801 } 4802 4803 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4804 4805 uint16_t GetTypeIndex() const { return type_index_; } 4806 bool IsReferrersClass() const { return is_referrers_class_; } 4807 bool CanBeNull() const OVERRIDE { return false; } 4808 4809 bool NeedsEnvironment() const OVERRIDE { 4810 // Will call runtime and load the class if the class is not loaded yet. 4811 // TODO: finer grain decision. 4812 return !is_referrers_class_; 4813 } 4814 4815 bool MustGenerateClinitCheck() const { 4816 return generate_clinit_check_; 4817 } 4818 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4819 // The entrypoint the code generator is going to call does not do 4820 // clinit of the class. 4821 DCHECK(!NeedsAccessCheck()); 4822 generate_clinit_check_ = generate_clinit_check; 4823 } 4824 4825 bool CanCallRuntime() const { 4826 return MustGenerateClinitCheck() || !is_referrers_class_ || needs_access_check_; 4827 } 4828 4829 bool NeedsAccessCheck() const { 4830 return needs_access_check_; 4831 } 4832 4833 bool CanThrow() const OVERRIDE { 4834 // May call runtime and and therefore can throw. 4835 // TODO: finer grain decision. 4836 return CanCallRuntime(); 4837 } 4838 4839 ReferenceTypeInfo GetLoadedClassRTI() { 4840 return loaded_class_rti_; 4841 } 4842 4843 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4844 // Make sure we only set exact types (the loaded class should never be merged). 4845 DCHECK(rti.IsExact()); 4846 loaded_class_rti_ = rti; 4847 } 4848 4849 const DexFile& GetDexFile() { return dex_file_; } 4850 4851 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 4852 4853 static SideEffects SideEffectsForArchRuntimeCalls() { 4854 return SideEffects::CanTriggerGC(); 4855 } 4856 4857 DECLARE_INSTRUCTION(LoadClass); 4858 4859 private: 4860 const uint16_t type_index_; 4861 const DexFile& dex_file_; 4862 const bool is_referrers_class_; 4863 // Whether this instruction must generate the initialization check. 4864 // Used for code generation. 4865 bool generate_clinit_check_; 4866 bool needs_access_check_; 4867 4868 ReferenceTypeInfo loaded_class_rti_; 4869 4870 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 4871}; 4872 4873class HLoadString : public HExpression<1> { 4874 public: 4875 HLoadString(HCurrentMethod* current_method, uint32_t string_index, uint32_t dex_pc) 4876 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4877 string_index_(string_index) { 4878 SetRawInputAt(0, current_method); 4879 } 4880 4881 bool CanBeMoved() const OVERRIDE { return true; } 4882 4883 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4884 return other->AsLoadString()->string_index_ == string_index_; 4885 } 4886 4887 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 4888 4889 uint32_t GetStringIndex() const { return string_index_; } 4890 4891 // TODO: Can we deopt or debug when we resolve a string? 4892 bool NeedsEnvironment() const OVERRIDE { return false; } 4893 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 4894 bool CanBeNull() const OVERRIDE { return false; } 4895 4896 static SideEffects SideEffectsForArchRuntimeCalls() { 4897 return SideEffects::CanTriggerGC(); 4898 } 4899 4900 DECLARE_INSTRUCTION(LoadString); 4901 4902 private: 4903 const uint32_t string_index_; 4904 4905 DISALLOW_COPY_AND_ASSIGN(HLoadString); 4906}; 4907 4908/** 4909 * Performs an initialization check on its Class object input. 4910 */ 4911class HClinitCheck : public HExpression<1> { 4912 public: 4913 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 4914 : HExpression( 4915 Primitive::kPrimNot, 4916 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 4917 dex_pc) { 4918 SetRawInputAt(0, constant); 4919 } 4920 4921 bool CanBeMoved() const OVERRIDE { return true; } 4922 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4923 return true; 4924 } 4925 4926 bool NeedsEnvironment() const OVERRIDE { 4927 // May call runtime to initialize the class. 4928 return true; 4929 } 4930 4931 4932 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 4933 4934 DECLARE_INSTRUCTION(ClinitCheck); 4935 4936 private: 4937 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 4938}; 4939 4940class HStaticFieldGet : public HExpression<1> { 4941 public: 4942 HStaticFieldGet(HInstruction* cls, 4943 Primitive::Type field_type, 4944 MemberOffset field_offset, 4945 bool is_volatile, 4946 uint32_t field_idx, 4947 uint16_t declaring_class_def_index, 4948 const DexFile& dex_file, 4949 Handle<mirror::DexCache> dex_cache, 4950 uint32_t dex_pc) 4951 : HExpression(field_type, 4952 SideEffects::FieldReadOfType(field_type, is_volatile), 4953 dex_pc), 4954 field_info_(field_offset, 4955 field_type, 4956 is_volatile, 4957 field_idx, 4958 declaring_class_def_index, 4959 dex_file, 4960 dex_cache) { 4961 SetRawInputAt(0, cls); 4962 } 4963 4964 4965 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4966 4967 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4968 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 4969 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4970 } 4971 4972 size_t ComputeHashCode() const OVERRIDE { 4973 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4974 } 4975 4976 const FieldInfo& GetFieldInfo() const { return field_info_; } 4977 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4978 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4979 bool IsVolatile() const { return field_info_.IsVolatile(); } 4980 4981 DECLARE_INSTRUCTION(StaticFieldGet); 4982 4983 private: 4984 const FieldInfo field_info_; 4985 4986 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 4987}; 4988 4989class HStaticFieldSet : public HTemplateInstruction<2> { 4990 public: 4991 HStaticFieldSet(HInstruction* cls, 4992 HInstruction* value, 4993 Primitive::Type field_type, 4994 MemberOffset field_offset, 4995 bool is_volatile, 4996 uint32_t field_idx, 4997 uint16_t declaring_class_def_index, 4998 const DexFile& dex_file, 4999 Handle<mirror::DexCache> dex_cache, 5000 uint32_t dex_pc) 5001 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 5002 dex_pc), 5003 field_info_(field_offset, 5004 field_type, 5005 is_volatile, 5006 field_idx, 5007 declaring_class_def_index, 5008 dex_file, 5009 dex_cache), 5010 value_can_be_null_(true) { 5011 SetRawInputAt(0, cls); 5012 SetRawInputAt(1, value); 5013 } 5014 5015 const FieldInfo& GetFieldInfo() const { return field_info_; } 5016 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5017 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5018 bool IsVolatile() const { return field_info_.IsVolatile(); } 5019 5020 HInstruction* GetValue() const { return InputAt(1); } 5021 bool GetValueCanBeNull() const { return value_can_be_null_; } 5022 void ClearValueCanBeNull() { value_can_be_null_ = false; } 5023 5024 DECLARE_INSTRUCTION(StaticFieldSet); 5025 5026 private: 5027 const FieldInfo field_info_; 5028 bool value_can_be_null_; 5029 5030 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 5031}; 5032 5033class HUnresolvedInstanceFieldGet : public HExpression<1> { 5034 public: 5035 HUnresolvedInstanceFieldGet(HInstruction* obj, 5036 Primitive::Type field_type, 5037 uint32_t field_index, 5038 uint32_t dex_pc) 5039 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5040 field_index_(field_index) { 5041 SetRawInputAt(0, obj); 5042 } 5043 5044 bool NeedsEnvironment() const OVERRIDE { return true; } 5045 bool CanThrow() const OVERRIDE { return true; } 5046 5047 Primitive::Type GetFieldType() const { return GetType(); } 5048 uint32_t GetFieldIndex() const { return field_index_; } 5049 5050 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 5051 5052 private: 5053 const uint32_t field_index_; 5054 5055 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 5056}; 5057 5058class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 5059 public: 5060 HUnresolvedInstanceFieldSet(HInstruction* obj, 5061 HInstruction* value, 5062 Primitive::Type field_type, 5063 uint32_t field_index, 5064 uint32_t dex_pc) 5065 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5066 field_type_(field_type), 5067 field_index_(field_index) { 5068 DCHECK_EQ(field_type, value->GetType()); 5069 SetRawInputAt(0, obj); 5070 SetRawInputAt(1, value); 5071 } 5072 5073 bool NeedsEnvironment() const OVERRIDE { return true; } 5074 bool CanThrow() const OVERRIDE { return true; } 5075 5076 Primitive::Type GetFieldType() const { return field_type_; } 5077 uint32_t GetFieldIndex() const { return field_index_; } 5078 5079 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5080 5081 private: 5082 const Primitive::Type field_type_; 5083 const uint32_t field_index_; 5084 5085 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5086}; 5087 5088class HUnresolvedStaticFieldGet : public HExpression<0> { 5089 public: 5090 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5091 uint32_t field_index, 5092 uint32_t dex_pc) 5093 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5094 field_index_(field_index) { 5095 } 5096 5097 bool NeedsEnvironment() const OVERRIDE { return true; } 5098 bool CanThrow() const OVERRIDE { return true; } 5099 5100 Primitive::Type GetFieldType() const { return GetType(); } 5101 uint32_t GetFieldIndex() const { return field_index_; } 5102 5103 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5104 5105 private: 5106 const uint32_t field_index_; 5107 5108 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5109}; 5110 5111class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5112 public: 5113 HUnresolvedStaticFieldSet(HInstruction* value, 5114 Primitive::Type field_type, 5115 uint32_t field_index, 5116 uint32_t dex_pc) 5117 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5118 field_type_(field_type), 5119 field_index_(field_index) { 5120 DCHECK_EQ(field_type, value->GetType()); 5121 SetRawInputAt(0, value); 5122 } 5123 5124 bool NeedsEnvironment() const OVERRIDE { return true; } 5125 bool CanThrow() const OVERRIDE { return true; } 5126 5127 Primitive::Type GetFieldType() const { return field_type_; } 5128 uint32_t GetFieldIndex() const { return field_index_; } 5129 5130 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5131 5132 private: 5133 const Primitive::Type field_type_; 5134 const uint32_t field_index_; 5135 5136 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5137}; 5138 5139// Implement the move-exception DEX instruction. 5140class HLoadException : public HExpression<0> { 5141 public: 5142 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5143 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5144 5145 bool CanBeNull() const OVERRIDE { return false; } 5146 5147 DECLARE_INSTRUCTION(LoadException); 5148 5149 private: 5150 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5151}; 5152 5153// Implicit part of move-exception which clears thread-local exception storage. 5154// Must not be removed because the runtime expects the TLS to get cleared. 5155class HClearException : public HTemplateInstruction<0> { 5156 public: 5157 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5158 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5159 5160 DECLARE_INSTRUCTION(ClearException); 5161 5162 private: 5163 DISALLOW_COPY_AND_ASSIGN(HClearException); 5164}; 5165 5166class HThrow : public HTemplateInstruction<1> { 5167 public: 5168 HThrow(HInstruction* exception, uint32_t dex_pc) 5169 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5170 SetRawInputAt(0, exception); 5171 } 5172 5173 bool IsControlFlow() const OVERRIDE { return true; } 5174 5175 bool NeedsEnvironment() const OVERRIDE { return true; } 5176 5177 bool CanThrow() const OVERRIDE { return true; } 5178 5179 5180 DECLARE_INSTRUCTION(Throw); 5181 5182 private: 5183 DISALLOW_COPY_AND_ASSIGN(HThrow); 5184}; 5185 5186/** 5187 * Implementation strategies for the code generator of a HInstanceOf 5188 * or `HCheckCast`. 5189 */ 5190enum class TypeCheckKind { 5191 kUnresolvedCheck, // Check against an unresolved type. 5192 kExactCheck, // Can do a single class compare. 5193 kClassHierarchyCheck, // Can just walk the super class chain. 5194 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5195 kInterfaceCheck, // No optimization yet when checking against an interface. 5196 kArrayObjectCheck, // Can just check if the array is not primitive. 5197 kArrayCheck // No optimization yet when checking against a generic array. 5198}; 5199 5200class HInstanceOf : public HExpression<2> { 5201 public: 5202 HInstanceOf(HInstruction* object, 5203 HLoadClass* constant, 5204 TypeCheckKind check_kind, 5205 uint32_t dex_pc) 5206 : HExpression(Primitive::kPrimBoolean, 5207 SideEffectsForArchRuntimeCalls(check_kind), 5208 dex_pc), 5209 check_kind_(check_kind), 5210 must_do_null_check_(true) { 5211 SetRawInputAt(0, object); 5212 SetRawInputAt(1, constant); 5213 } 5214 5215 bool CanBeMoved() const OVERRIDE { return true; } 5216 5217 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5218 return true; 5219 } 5220 5221 bool NeedsEnvironment() const OVERRIDE { 5222 return false; 5223 } 5224 5225 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5226 5227 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5228 5229 // Used only in code generation. 5230 bool MustDoNullCheck() const { return must_do_null_check_; } 5231 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5232 5233 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5234 return (check_kind == TypeCheckKind::kExactCheck) 5235 ? SideEffects::None() 5236 // Mips currently does runtime calls for any other checks. 5237 : SideEffects::CanTriggerGC(); 5238 } 5239 5240 DECLARE_INSTRUCTION(InstanceOf); 5241 5242 private: 5243 const TypeCheckKind check_kind_; 5244 bool must_do_null_check_; 5245 5246 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5247}; 5248 5249class HBoundType : public HExpression<1> { 5250 public: 5251 // Constructs an HBoundType with the given upper_bound. 5252 // Ensures that the upper_bound is valid. 5253 HBoundType(HInstruction* input, 5254 ReferenceTypeInfo upper_bound, 5255 bool upper_can_be_null, 5256 uint32_t dex_pc = kNoDexPc) 5257 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5258 upper_bound_(upper_bound), 5259 upper_can_be_null_(upper_can_be_null), 5260 can_be_null_(upper_can_be_null) { 5261 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5262 SetRawInputAt(0, input); 5263 SetReferenceTypeInfo(upper_bound_); 5264 } 5265 5266 // GetUpper* should only be used in reference type propagation. 5267 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5268 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5269 5270 void SetCanBeNull(bool can_be_null) { 5271 DCHECK(upper_can_be_null_ || !can_be_null); 5272 can_be_null_ = can_be_null; 5273 } 5274 5275 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5276 5277 DECLARE_INSTRUCTION(BoundType); 5278 5279 private: 5280 // Encodes the most upper class that this instruction can have. In other words 5281 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5282 // It is used to bound the type in cases like: 5283 // if (x instanceof ClassX) { 5284 // // uper_bound_ will be ClassX 5285 // } 5286 const ReferenceTypeInfo upper_bound_; 5287 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5288 // is false then can_be_null_ cannot be true). 5289 const bool upper_can_be_null_; 5290 bool can_be_null_; 5291 5292 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5293}; 5294 5295class HCheckCast : public HTemplateInstruction<2> { 5296 public: 5297 HCheckCast(HInstruction* object, 5298 HLoadClass* constant, 5299 TypeCheckKind check_kind, 5300 uint32_t dex_pc) 5301 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5302 check_kind_(check_kind), 5303 must_do_null_check_(true) { 5304 SetRawInputAt(0, object); 5305 SetRawInputAt(1, constant); 5306 } 5307 5308 bool CanBeMoved() const OVERRIDE { return true; } 5309 5310 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5311 return true; 5312 } 5313 5314 bool NeedsEnvironment() const OVERRIDE { 5315 // Instruction may throw a CheckCastError. 5316 return true; 5317 } 5318 5319 bool CanThrow() const OVERRIDE { return true; } 5320 5321 bool MustDoNullCheck() const { return must_do_null_check_; } 5322 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5323 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5324 5325 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5326 5327 DECLARE_INSTRUCTION(CheckCast); 5328 5329 private: 5330 const TypeCheckKind check_kind_; 5331 bool must_do_null_check_; 5332 5333 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5334}; 5335 5336class HMemoryBarrier : public HTemplateInstruction<0> { 5337 public: 5338 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5339 : HTemplateInstruction( 5340 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5341 barrier_kind_(barrier_kind) {} 5342 5343 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5344 5345 DECLARE_INSTRUCTION(MemoryBarrier); 5346 5347 private: 5348 const MemBarrierKind barrier_kind_; 5349 5350 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5351}; 5352 5353class HMonitorOperation : public HTemplateInstruction<1> { 5354 public: 5355 enum OperationKind { 5356 kEnter, 5357 kExit, 5358 }; 5359 5360 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5361 : HTemplateInstruction( 5362 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5363 kind_(kind) { 5364 SetRawInputAt(0, object); 5365 } 5366 5367 // Instruction may throw a Java exception, so we need an environment. 5368 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5369 5370 bool CanThrow() const OVERRIDE { 5371 // Verifier guarantees that monitor-exit cannot throw. 5372 // This is important because it allows the HGraphBuilder to remove 5373 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5374 return IsEnter(); 5375 } 5376 5377 5378 bool IsEnter() const { return kind_ == kEnter; } 5379 5380 DECLARE_INSTRUCTION(MonitorOperation); 5381 5382 private: 5383 const OperationKind kind_; 5384 5385 private: 5386 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5387}; 5388 5389/** 5390 * A HInstruction used as a marker for the replacement of new + <init> 5391 * of a String to a call to a StringFactory. Only baseline will see 5392 * the node at code generation, where it will be be treated as null. 5393 * When compiling non-baseline, `HFakeString` instructions are being removed 5394 * in the instruction simplifier. 5395 */ 5396class HFakeString : public HTemplateInstruction<0> { 5397 public: 5398 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5399 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5400 5401 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5402 5403 DECLARE_INSTRUCTION(FakeString); 5404 5405 private: 5406 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5407}; 5408 5409class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5410 public: 5411 MoveOperands(Location source, 5412 Location destination, 5413 Primitive::Type type, 5414 HInstruction* instruction) 5415 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5416 5417 Location GetSource() const { return source_; } 5418 Location GetDestination() const { return destination_; } 5419 5420 void SetSource(Location value) { source_ = value; } 5421 void SetDestination(Location value) { destination_ = value; } 5422 5423 // The parallel move resolver marks moves as "in-progress" by clearing the 5424 // destination (but not the source). 5425 Location MarkPending() { 5426 DCHECK(!IsPending()); 5427 Location dest = destination_; 5428 destination_ = Location::NoLocation(); 5429 return dest; 5430 } 5431 5432 void ClearPending(Location dest) { 5433 DCHECK(IsPending()); 5434 destination_ = dest; 5435 } 5436 5437 bool IsPending() const { 5438 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5439 return destination_.IsInvalid() && !source_.IsInvalid(); 5440 } 5441 5442 // True if this blocks a move from the given location. 5443 bool Blocks(Location loc) const { 5444 return !IsEliminated() && source_.OverlapsWith(loc); 5445 } 5446 5447 // A move is redundant if it's been eliminated, if its source and 5448 // destination are the same, or if its destination is unneeded. 5449 bool IsRedundant() const { 5450 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5451 } 5452 5453 // We clear both operands to indicate move that's been eliminated. 5454 void Eliminate() { 5455 source_ = destination_ = Location::NoLocation(); 5456 } 5457 5458 bool IsEliminated() const { 5459 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5460 return source_.IsInvalid(); 5461 } 5462 5463 Primitive::Type GetType() const { return type_; } 5464 5465 bool Is64BitMove() const { 5466 return Primitive::Is64BitType(type_); 5467 } 5468 5469 HInstruction* GetInstruction() const { return instruction_; } 5470 5471 private: 5472 Location source_; 5473 Location destination_; 5474 // The type this move is for. 5475 Primitive::Type type_; 5476 // The instruction this move is assocatied with. Null when this move is 5477 // for moving an input in the expected locations of user (including a phi user). 5478 // This is only used in debug mode, to ensure we do not connect interval siblings 5479 // in the same parallel move. 5480 HInstruction* instruction_; 5481}; 5482 5483static constexpr size_t kDefaultNumberOfMoves = 4; 5484 5485class HParallelMove : public HTemplateInstruction<0> { 5486 public: 5487 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5488 : HTemplateInstruction(SideEffects::None(), dex_pc), 5489 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5490 moves_.reserve(kDefaultNumberOfMoves); 5491 } 5492 5493 void AddMove(Location source, 5494 Location destination, 5495 Primitive::Type type, 5496 HInstruction* instruction) { 5497 DCHECK(source.IsValid()); 5498 DCHECK(destination.IsValid()); 5499 if (kIsDebugBuild) { 5500 if (instruction != nullptr) { 5501 for (const MoveOperands& move : moves_) { 5502 if (move.GetInstruction() == instruction) { 5503 // Special case the situation where the move is for the spill slot 5504 // of the instruction. 5505 if ((GetPrevious() == instruction) 5506 || ((GetPrevious() == nullptr) 5507 && instruction->IsPhi() 5508 && instruction->GetBlock() == GetBlock())) { 5509 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5510 << "Doing parallel moves for the same instruction."; 5511 } else { 5512 DCHECK(false) << "Doing parallel moves for the same instruction."; 5513 } 5514 } 5515 } 5516 } 5517 for (const MoveOperands& move : moves_) { 5518 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5519 << "Overlapped destination for two moves in a parallel move: " 5520 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5521 << source << " ==> " << destination; 5522 } 5523 } 5524 moves_.emplace_back(source, destination, type, instruction); 5525 } 5526 5527 MoveOperands* MoveOperandsAt(size_t index) { 5528 return &moves_[index]; 5529 } 5530 5531 size_t NumMoves() const { return moves_.size(); } 5532 5533 DECLARE_INSTRUCTION(ParallelMove); 5534 5535 private: 5536 ArenaVector<MoveOperands> moves_; 5537 5538 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5539}; 5540 5541} // namespace art 5542 5543#ifdef ART_ENABLE_CODEGEN_arm64 5544#include "nodes_arm64.h" 5545#endif 5546#ifdef ART_ENABLE_CODEGEN_x86 5547#include "nodes_x86.h" 5548#endif 5549 5550namespace art { 5551 5552class HGraphVisitor : public ValueObject { 5553 public: 5554 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5555 virtual ~HGraphVisitor() {} 5556 5557 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5558 virtual void VisitBasicBlock(HBasicBlock* block); 5559 5560 // Visit the graph following basic block insertion order. 5561 void VisitInsertionOrder(); 5562 5563 // Visit the graph following dominator tree reverse post-order. 5564 void VisitReversePostOrder(); 5565 5566 HGraph* GetGraph() const { return graph_; } 5567 5568 // Visit functions for instruction classes. 5569#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5570 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5571 5572 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5573 5574#undef DECLARE_VISIT_INSTRUCTION 5575 5576 private: 5577 HGraph* const graph_; 5578 5579 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5580}; 5581 5582class HGraphDelegateVisitor : public HGraphVisitor { 5583 public: 5584 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5585 virtual ~HGraphDelegateVisitor() {} 5586 5587 // Visit functions that delegate to to super class. 5588#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5589 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5590 5591 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5592 5593#undef DECLARE_VISIT_INSTRUCTION 5594 5595 private: 5596 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5597}; 5598 5599class HInsertionOrderIterator : public ValueObject { 5600 public: 5601 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5602 5603 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5604 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5605 void Advance() { ++index_; } 5606 5607 private: 5608 const HGraph& graph_; 5609 size_t index_; 5610 5611 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5612}; 5613 5614class HReversePostOrderIterator : public ValueObject { 5615 public: 5616 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5617 // Check that reverse post order of the graph has been built. 5618 DCHECK(!graph.GetReversePostOrder().empty()); 5619 } 5620 5621 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5622 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5623 void Advance() { ++index_; } 5624 5625 private: 5626 const HGraph& graph_; 5627 size_t index_; 5628 5629 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5630}; 5631 5632class HPostOrderIterator : public ValueObject { 5633 public: 5634 explicit HPostOrderIterator(const HGraph& graph) 5635 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5636 // Check that reverse post order of the graph has been built. 5637 DCHECK(!graph.GetReversePostOrder().empty()); 5638 } 5639 5640 bool Done() const { return index_ == 0; } 5641 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5642 void Advance() { --index_; } 5643 5644 private: 5645 const HGraph& graph_; 5646 size_t index_; 5647 5648 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5649}; 5650 5651class HLinearPostOrderIterator : public ValueObject { 5652 public: 5653 explicit HLinearPostOrderIterator(const HGraph& graph) 5654 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5655 5656 bool Done() const { return index_ == 0; } 5657 5658 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5659 5660 void Advance() { 5661 --index_; 5662 DCHECK_GE(index_, 0U); 5663 } 5664 5665 private: 5666 const ArenaVector<HBasicBlock*>& order_; 5667 size_t index_; 5668 5669 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5670}; 5671 5672class HLinearOrderIterator : public ValueObject { 5673 public: 5674 explicit HLinearOrderIterator(const HGraph& graph) 5675 : order_(graph.GetLinearOrder()), index_(0) {} 5676 5677 bool Done() const { return index_ == order_.size(); } 5678 HBasicBlock* Current() const { return order_[index_]; } 5679 void Advance() { ++index_; } 5680 5681 private: 5682 const ArenaVector<HBasicBlock*>& order_; 5683 size_t index_; 5684 5685 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5686}; 5687 5688// Iterator over the blocks that art part of the loop. Includes blocks part 5689// of an inner loop. The order in which the blocks are iterated is on their 5690// block id. 5691class HBlocksInLoopIterator : public ValueObject { 5692 public: 5693 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5694 : blocks_in_loop_(info.GetBlocks()), 5695 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5696 index_(0) { 5697 if (!blocks_in_loop_.IsBitSet(index_)) { 5698 Advance(); 5699 } 5700 } 5701 5702 bool Done() const { return index_ == blocks_.size(); } 5703 HBasicBlock* Current() const { return blocks_[index_]; } 5704 void Advance() { 5705 ++index_; 5706 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5707 if (blocks_in_loop_.IsBitSet(index_)) { 5708 break; 5709 } 5710 } 5711 } 5712 5713 private: 5714 const BitVector& blocks_in_loop_; 5715 const ArenaVector<HBasicBlock*>& blocks_; 5716 size_t index_; 5717 5718 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5719}; 5720 5721// Iterator over the blocks that art part of the loop. Includes blocks part 5722// of an inner loop. The order in which the blocks are iterated is reverse 5723// post order. 5724class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5725 public: 5726 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5727 : blocks_in_loop_(info.GetBlocks()), 5728 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5729 index_(0) { 5730 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5731 Advance(); 5732 } 5733 } 5734 5735 bool Done() const { return index_ == blocks_.size(); } 5736 HBasicBlock* Current() const { return blocks_[index_]; } 5737 void Advance() { 5738 ++index_; 5739 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5740 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5741 break; 5742 } 5743 } 5744 } 5745 5746 private: 5747 const BitVector& blocks_in_loop_; 5748 const ArenaVector<HBasicBlock*>& blocks_; 5749 size_t index_; 5750 5751 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5752}; 5753 5754inline int64_t Int64FromConstant(HConstant* constant) { 5755 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5756 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5757 : constant->AsLongConstant()->GetValue(); 5758} 5759 5760inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5761 // For the purposes of the compiler, the dex files must actually be the same object 5762 // if we want to safely treat them as the same. This is especially important for JIT 5763 // as custom class loaders can open the same underlying file (or memory) multiple 5764 // times and provide different class resolution but no two class loaders should ever 5765 // use the same DexFile object - doing so is an unsupported hack that can lead to 5766 // all sorts of weird failures. 5767 return &lhs == &rhs; 5768} 5769 5770} // namespace art 5771 5772#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5773