nodes.h revision 8e1ef53e3d551f11bb424ae4f29cc1f5eabbe6bc
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38#include "utils/array_ref.h" 39 40namespace art { 41 42class GraphChecker; 43class HBasicBlock; 44class HCurrentMethod; 45class HDoubleConstant; 46class HEnvironment; 47class HFakeString; 48class HFloatConstant; 49class HGraphBuilder; 50class HGraphVisitor; 51class HInstruction; 52class HIntConstant; 53class HInvoke; 54class HLongConstant; 55class HNullConstant; 56class HPhi; 57class HSuspendCheck; 58class HTryBoundary; 59class LiveInterval; 60class LocationSummary; 61class SlowPathCode; 62class SsaBuilder; 63 64namespace mirror { 65class DexCache; 66} // namespace mirror 67 68static const int kDefaultNumberOfBlocks = 8; 69static const int kDefaultNumberOfSuccessors = 2; 70static const int kDefaultNumberOfPredecessors = 2; 71static const int kDefaultNumberOfExceptionalPredecessors = 0; 72static const int kDefaultNumberOfDominatedBlocks = 1; 73static const int kDefaultNumberOfBackEdges = 1; 74 75static constexpr uint32_t kMaxIntShiftValue = 0x1f; 76static constexpr uint64_t kMaxLongShiftValue = 0x3f; 77 78static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 79static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 80 81static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 82 83static constexpr uint32_t kNoDexPc = -1; 84 85enum IfCondition { 86 // All types. 87 kCondEQ, // == 88 kCondNE, // != 89 // Signed integers and floating-point numbers. 90 kCondLT, // < 91 kCondLE, // <= 92 kCondGT, // > 93 kCondGE, // >= 94 // Unsigned integers. 95 kCondB, // < 96 kCondBE, // <= 97 kCondA, // > 98 kCondAE, // >= 99}; 100 101class HInstructionList : public ValueObject { 102 public: 103 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 104 105 void AddInstruction(HInstruction* instruction); 106 void RemoveInstruction(HInstruction* instruction); 107 108 // Insert `instruction` before/after an existing instruction `cursor`. 109 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 110 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 111 112 // Return true if this list contains `instruction`. 113 bool Contains(HInstruction* instruction) const; 114 115 // Return true if `instruction1` is found before `instruction2` in 116 // this instruction list and false otherwise. Abort if none 117 // of these instructions is found. 118 bool FoundBefore(const HInstruction* instruction1, 119 const HInstruction* instruction2) const; 120 121 bool IsEmpty() const { return first_instruction_ == nullptr; } 122 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 123 124 // Update the block of all instructions to be `block`. 125 void SetBlockOfInstructions(HBasicBlock* block) const; 126 127 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 128 void Add(const HInstructionList& instruction_list); 129 130 // Return the number of instructions in the list. This is an expensive operation. 131 size_t CountSize() const; 132 133 private: 134 HInstruction* first_instruction_; 135 HInstruction* last_instruction_; 136 137 friend class HBasicBlock; 138 friend class HGraph; 139 friend class HInstruction; 140 friend class HInstructionIterator; 141 friend class HBackwardInstructionIterator; 142 143 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 144}; 145 146// Control-flow graph of a method. Contains a list of basic blocks. 147class HGraph : public ArenaObject<kArenaAllocGraph> { 148 public: 149 HGraph(ArenaAllocator* arena, 150 const DexFile& dex_file, 151 uint32_t method_idx, 152 bool should_generate_constructor_barrier, 153 InstructionSet instruction_set, 154 InvokeType invoke_type = kInvalidInvokeType, 155 bool debuggable = false, 156 int start_instruction_id = 0) 157 : arena_(arena), 158 blocks_(arena->Adapter(kArenaAllocBlockList)), 159 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 160 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 161 entry_block_(nullptr), 162 exit_block_(nullptr), 163 maximum_number_of_out_vregs_(0), 164 number_of_vregs_(0), 165 number_of_in_vregs_(0), 166 temporaries_vreg_slots_(0), 167 has_bounds_checks_(false), 168 has_try_catch_(false), 169 debuggable_(debuggable), 170 current_instruction_id_(start_instruction_id), 171 dex_file_(dex_file), 172 method_idx_(method_idx), 173 invoke_type_(invoke_type), 174 in_ssa_form_(false), 175 should_generate_constructor_barrier_(should_generate_constructor_barrier), 176 instruction_set_(instruction_set), 177 cached_null_constant_(nullptr), 178 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 179 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 180 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 181 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 182 cached_current_method_(nullptr) { 183 blocks_.reserve(kDefaultNumberOfBlocks); 184 } 185 186 ArenaAllocator* GetArena() const { return arena_; } 187 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 188 189 bool IsInSsaForm() const { return in_ssa_form_; } 190 191 HBasicBlock* GetEntryBlock() const { return entry_block_; } 192 HBasicBlock* GetExitBlock() const { return exit_block_; } 193 bool HasExitBlock() const { return exit_block_ != nullptr; } 194 195 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 196 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 197 198 void AddBlock(HBasicBlock* block); 199 200 // Try building the SSA form of this graph, with dominance computation and loop 201 // recognition. Returns whether it was successful in doing all these steps. 202 bool TryBuildingSsa() { 203 BuildDominatorTree(); 204 // The SSA builder requires loops to all be natural. Specifically, the dead phi 205 // elimination phase checks the consistency of the graph when doing a post-order 206 // visit for eliminating dead phis: a dead phi can only have loop header phi 207 // users remaining when being visited. 208 if (!AnalyzeNaturalLoops()) return false; 209 // Precompute per-block try membership before entering the SSA builder, 210 // which needs the information to build catch block phis from values of 211 // locals at throwing instructions inside try blocks. 212 ComputeTryBlockInformation(); 213 TransformToSsa(); 214 in_ssa_form_ = true; 215 return true; 216 } 217 218 void ComputeDominanceInformation(); 219 void ClearDominanceInformation(); 220 221 void BuildDominatorTree(); 222 void TransformToSsa(); 223 void SimplifyCFG(); 224 void SimplifyCatchBlocks(); 225 226 // Analyze all natural loops in this graph. Returns false if one 227 // loop is not natural, that is the header does not dominate the 228 // back edge. 229 bool AnalyzeNaturalLoops() const; 230 231 // Iterate over blocks to compute try block membership. Needs reverse post 232 // order and loop information. 233 void ComputeTryBlockInformation(); 234 235 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 236 // Returns the instruction used to replace the invoke expression or null if the 237 // invoke is for a void method. 238 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 239 240 // Need to add a couple of blocks to test if the loop body is entered and 241 // put deoptimization instructions, etc. 242 void TransformLoopHeaderForBCE(HBasicBlock* header); 243 244 // Removes `block` from the graph. Assumes `block` has been disconnected from 245 // other blocks and has no instructions or phis. 246 void DeleteDeadEmptyBlock(HBasicBlock* block); 247 248 // Splits the edge between `block` and `successor` while preserving the 249 // indices in the predecessor/successor lists. If there are multiple edges 250 // between the blocks, the lowest indices are used. 251 // Returns the new block which is empty and has the same dex pc as `successor`. 252 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 253 254 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 255 void SimplifyLoop(HBasicBlock* header); 256 257 int32_t GetNextInstructionId() { 258 DCHECK_NE(current_instruction_id_, INT32_MAX); 259 return current_instruction_id_++; 260 } 261 262 int32_t GetCurrentInstructionId() const { 263 return current_instruction_id_; 264 } 265 266 void SetCurrentInstructionId(int32_t id) { 267 current_instruction_id_ = id; 268 } 269 270 uint16_t GetMaximumNumberOfOutVRegs() const { 271 return maximum_number_of_out_vregs_; 272 } 273 274 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 275 maximum_number_of_out_vregs_ = new_value; 276 } 277 278 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 279 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 280 } 281 282 void UpdateTemporariesVRegSlots(size_t slots) { 283 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 284 } 285 286 size_t GetTemporariesVRegSlots() const { 287 DCHECK(!in_ssa_form_); 288 return temporaries_vreg_slots_; 289 } 290 291 void SetNumberOfVRegs(uint16_t number_of_vregs) { 292 number_of_vregs_ = number_of_vregs; 293 } 294 295 uint16_t GetNumberOfVRegs() const { 296 return number_of_vregs_; 297 } 298 299 void SetNumberOfInVRegs(uint16_t value) { 300 number_of_in_vregs_ = value; 301 } 302 303 uint16_t GetNumberOfLocalVRegs() const { 304 DCHECK(!in_ssa_form_); 305 return number_of_vregs_ - number_of_in_vregs_; 306 } 307 308 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 309 return reverse_post_order_; 310 } 311 312 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 313 return linear_order_; 314 } 315 316 bool HasBoundsChecks() const { 317 return has_bounds_checks_; 318 } 319 320 void SetHasBoundsChecks(bool value) { 321 has_bounds_checks_ = value; 322 } 323 324 bool ShouldGenerateConstructorBarrier() const { 325 return should_generate_constructor_barrier_; 326 } 327 328 bool IsDebuggable() const { return debuggable_; } 329 330 // Returns a constant of the given type and value. If it does not exist 331 // already, it is created and inserted into the graph. This method is only for 332 // integral types. 333 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 334 335 // TODO: This is problematic for the consistency of reference type propagation 336 // because it can be created anytime after the pass and thus it will be left 337 // with an invalid type. 338 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 339 340 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 341 return CreateConstant(value, &cached_int_constants_, dex_pc); 342 } 343 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 344 return CreateConstant(value, &cached_long_constants_, dex_pc); 345 } 346 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 347 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 348 } 349 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 350 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 351 } 352 353 HCurrentMethod* GetCurrentMethod(); 354 355 const DexFile& GetDexFile() const { 356 return dex_file_; 357 } 358 359 uint32_t GetMethodIdx() const { 360 return method_idx_; 361 } 362 363 InvokeType GetInvokeType() const { 364 return invoke_type_; 365 } 366 367 InstructionSet GetInstructionSet() const { 368 return instruction_set_; 369 } 370 371 bool HasTryCatch() const { return has_try_catch_; } 372 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 373 374 // Returns an instruction with the opposite boolean value from 'cond'. 375 // The instruction has been inserted into the graph, either as a constant, or 376 // before cursor. 377 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor); 378 379 private: 380 void FindBackEdges(ArenaBitVector* visited); 381 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 382 void RemoveDeadBlocks(const ArenaBitVector& visited); 383 384 template <class InstructionType, typename ValueType> 385 InstructionType* CreateConstant(ValueType value, 386 ArenaSafeMap<ValueType, InstructionType*>* cache, 387 uint32_t dex_pc = kNoDexPc) { 388 // Try to find an existing constant of the given value. 389 InstructionType* constant = nullptr; 390 auto cached_constant = cache->find(value); 391 if (cached_constant != cache->end()) { 392 constant = cached_constant->second; 393 } 394 395 // If not found or previously deleted, create and cache a new instruction. 396 // Don't bother reviving a previously deleted instruction, for simplicity. 397 if (constant == nullptr || constant->GetBlock() == nullptr) { 398 constant = new (arena_) InstructionType(value, dex_pc); 399 cache->Overwrite(value, constant); 400 InsertConstant(constant); 401 } 402 return constant; 403 } 404 405 void InsertConstant(HConstant* instruction); 406 407 // Cache a float constant into the graph. This method should only be 408 // called by the SsaBuilder when creating "equivalent" instructions. 409 void CacheFloatConstant(HFloatConstant* constant); 410 411 // See CacheFloatConstant comment. 412 void CacheDoubleConstant(HDoubleConstant* constant); 413 414 ArenaAllocator* const arena_; 415 416 // List of blocks in insertion order. 417 ArenaVector<HBasicBlock*> blocks_; 418 419 // List of blocks to perform a reverse post order tree traversal. 420 ArenaVector<HBasicBlock*> reverse_post_order_; 421 422 // List of blocks to perform a linear order tree traversal. 423 ArenaVector<HBasicBlock*> linear_order_; 424 425 HBasicBlock* entry_block_; 426 HBasicBlock* exit_block_; 427 428 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 429 uint16_t maximum_number_of_out_vregs_; 430 431 // The number of virtual registers in this method. Contains the parameters. 432 uint16_t number_of_vregs_; 433 434 // The number of virtual registers used by parameters of this method. 435 uint16_t number_of_in_vregs_; 436 437 // Number of vreg size slots that the temporaries use (used in baseline compiler). 438 size_t temporaries_vreg_slots_; 439 440 // Has bounds checks. We can totally skip BCE if it's false. 441 bool has_bounds_checks_; 442 443 // Flag whether there are any try/catch blocks in the graph. We will skip 444 // try/catch-related passes if false. 445 bool has_try_catch_; 446 447 // Indicates whether the graph should be compiled in a way that 448 // ensures full debuggability. If false, we can apply more 449 // aggressive optimizations that may limit the level of debugging. 450 const bool debuggable_; 451 452 // The current id to assign to a newly added instruction. See HInstruction.id_. 453 int32_t current_instruction_id_; 454 455 // The dex file from which the method is from. 456 const DexFile& dex_file_; 457 458 // The method index in the dex file. 459 const uint32_t method_idx_; 460 461 // If inlined, this encodes how the callee is being invoked. 462 const InvokeType invoke_type_; 463 464 // Whether the graph has been transformed to SSA form. Only used 465 // in debug mode to ensure we are not using properties only valid 466 // for non-SSA form (like the number of temporaries). 467 bool in_ssa_form_; 468 469 const bool should_generate_constructor_barrier_; 470 471 const InstructionSet instruction_set_; 472 473 // Cached constants. 474 HNullConstant* cached_null_constant_; 475 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 476 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 477 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 478 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 479 480 HCurrentMethod* cached_current_method_; 481 482 friend class SsaBuilder; // For caching constants. 483 friend class SsaLivenessAnalysis; // For the linear order. 484 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 485 DISALLOW_COPY_AND_ASSIGN(HGraph); 486}; 487 488class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 489 public: 490 HLoopInformation(HBasicBlock* header, HGraph* graph) 491 : header_(header), 492 suspend_check_(nullptr), 493 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 494 // Make bit vector growable, as the number of blocks may change. 495 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 496 back_edges_.reserve(kDefaultNumberOfBackEdges); 497 } 498 499 HBasicBlock* GetHeader() const { 500 return header_; 501 } 502 503 void SetHeader(HBasicBlock* block) { 504 header_ = block; 505 } 506 507 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 508 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 509 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 510 511 void AddBackEdge(HBasicBlock* back_edge) { 512 back_edges_.push_back(back_edge); 513 } 514 515 void RemoveBackEdge(HBasicBlock* back_edge) { 516 RemoveElement(back_edges_, back_edge); 517 } 518 519 bool IsBackEdge(const HBasicBlock& block) const { 520 return ContainsElement(back_edges_, &block); 521 } 522 523 size_t NumberOfBackEdges() const { 524 return back_edges_.size(); 525 } 526 527 HBasicBlock* GetPreHeader() const; 528 529 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 530 return back_edges_; 531 } 532 533 // Returns the lifetime position of the back edge that has the 534 // greatest lifetime position. 535 size_t GetLifetimeEnd() const; 536 537 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 538 ReplaceElement(back_edges_, existing, new_back_edge); 539 } 540 541 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 542 // that is the header dominates the back edge. 543 bool Populate(); 544 545 // Reanalyzes the loop by removing loop info from its blocks and re-running 546 // Populate(). If there are no back edges left, the loop info is completely 547 // removed as well as its SuspendCheck instruction. It must be run on nested 548 // inner loops first. 549 void Update(); 550 551 // Returns whether this loop information contains `block`. 552 // Note that this loop information *must* be populated before entering this function. 553 bool Contains(const HBasicBlock& block) const; 554 555 // Returns whether this loop information is an inner loop of `other`. 556 // Note that `other` *must* be populated before entering this function. 557 bool IsIn(const HLoopInformation& other) const; 558 559 // Returns true if instruction is not defined within this loop or any loop nested inside 560 // this loop. If must_dominate is set, only definitions that actually dominate the loop 561 // header can be invariant. Otherwise, any definition outside the loop, including 562 // definitions that appear after the loop, is invariant. 563 bool IsLoopInvariant(HInstruction* instruction, bool must_dominate) const; 564 565 const ArenaBitVector& GetBlocks() const { return blocks_; } 566 567 void Add(HBasicBlock* block); 568 void Remove(HBasicBlock* block); 569 570 private: 571 // Internal recursive implementation of `Populate`. 572 void PopulateRecursive(HBasicBlock* block); 573 574 HBasicBlock* header_; 575 HSuspendCheck* suspend_check_; 576 ArenaVector<HBasicBlock*> back_edges_; 577 ArenaBitVector blocks_; 578 579 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 580}; 581 582// Stores try/catch information for basic blocks. 583// Note that HGraph is constructed so that catch blocks cannot simultaneously 584// be try blocks. 585class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 586 public: 587 // Try block information constructor. 588 explicit TryCatchInformation(const HTryBoundary& try_entry) 589 : try_entry_(&try_entry), 590 catch_dex_file_(nullptr), 591 catch_type_index_(DexFile::kDexNoIndex16) { 592 DCHECK(try_entry_ != nullptr); 593 } 594 595 // Catch block information constructor. 596 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 597 : try_entry_(nullptr), 598 catch_dex_file_(&dex_file), 599 catch_type_index_(catch_type_index) {} 600 601 bool IsTryBlock() const { return try_entry_ != nullptr; } 602 603 const HTryBoundary& GetTryEntry() const { 604 DCHECK(IsTryBlock()); 605 return *try_entry_; 606 } 607 608 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 609 610 bool IsCatchAllTypeIndex() const { 611 DCHECK(IsCatchBlock()); 612 return catch_type_index_ == DexFile::kDexNoIndex16; 613 } 614 615 uint16_t GetCatchTypeIndex() const { 616 DCHECK(IsCatchBlock()); 617 return catch_type_index_; 618 } 619 620 const DexFile& GetCatchDexFile() const { 621 DCHECK(IsCatchBlock()); 622 return *catch_dex_file_; 623 } 624 625 private: 626 // One of possibly several TryBoundary instructions entering the block's try. 627 // Only set for try blocks. 628 const HTryBoundary* try_entry_; 629 630 // Exception type information. Only set for catch blocks. 631 const DexFile* catch_dex_file_; 632 const uint16_t catch_type_index_; 633}; 634 635static constexpr size_t kNoLifetime = -1; 636static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 637 638// A block in a method. Contains the list of instructions represented 639// as a double linked list. Each block knows its predecessors and 640// successors. 641 642class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 643 public: 644 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 645 : graph_(graph), 646 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 647 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 648 loop_information_(nullptr), 649 dominator_(nullptr), 650 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 651 block_id_(kInvalidBlockId), 652 dex_pc_(dex_pc), 653 lifetime_start_(kNoLifetime), 654 lifetime_end_(kNoLifetime), 655 try_catch_information_(nullptr) { 656 predecessors_.reserve(kDefaultNumberOfPredecessors); 657 successors_.reserve(kDefaultNumberOfSuccessors); 658 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 659 } 660 661 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 662 return predecessors_; 663 } 664 665 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 666 return successors_; 667 } 668 669 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const; 670 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const; 671 672 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 673 return ContainsElement(successors_, block, start_from); 674 } 675 676 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 677 return dominated_blocks_; 678 } 679 680 bool IsEntryBlock() const { 681 return graph_->GetEntryBlock() == this; 682 } 683 684 bool IsExitBlock() const { 685 return graph_->GetExitBlock() == this; 686 } 687 688 bool IsSingleGoto() const; 689 bool IsSingleTryBoundary() const; 690 691 // Returns true if this block emits nothing but a jump. 692 bool IsSingleJump() const { 693 HLoopInformation* loop_info = GetLoopInformation(); 694 return (IsSingleGoto() || IsSingleTryBoundary()) 695 // Back edges generate a suspend check. 696 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 697 } 698 699 void AddBackEdge(HBasicBlock* back_edge) { 700 if (loop_information_ == nullptr) { 701 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 702 } 703 DCHECK_EQ(loop_information_->GetHeader(), this); 704 loop_information_->AddBackEdge(back_edge); 705 } 706 707 HGraph* GetGraph() const { return graph_; } 708 void SetGraph(HGraph* graph) { graph_ = graph; } 709 710 uint32_t GetBlockId() const { return block_id_; } 711 void SetBlockId(int id) { block_id_ = id; } 712 uint32_t GetDexPc() const { return dex_pc_; } 713 714 HBasicBlock* GetDominator() const { return dominator_; } 715 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 716 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 717 718 void RemoveDominatedBlock(HBasicBlock* block) { 719 RemoveElement(dominated_blocks_, block); 720 } 721 722 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 723 ReplaceElement(dominated_blocks_, existing, new_block); 724 } 725 726 void ClearDominanceInformation(); 727 728 int NumberOfBackEdges() const { 729 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 730 } 731 732 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 733 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 734 const HInstructionList& GetInstructions() const { return instructions_; } 735 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 736 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 737 const HInstructionList& GetPhis() const { return phis_; } 738 739 void AddSuccessor(HBasicBlock* block) { 740 successors_.push_back(block); 741 block->predecessors_.push_back(this); 742 } 743 744 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 745 size_t successor_index = GetSuccessorIndexOf(existing); 746 existing->RemovePredecessor(this); 747 new_block->predecessors_.push_back(this); 748 successors_[successor_index] = new_block; 749 } 750 751 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 752 size_t predecessor_index = GetPredecessorIndexOf(existing); 753 existing->RemoveSuccessor(this); 754 new_block->successors_.push_back(this); 755 predecessors_[predecessor_index] = new_block; 756 } 757 758 // Insert `this` between `predecessor` and `successor. This method 759 // preserves the indicies, and will update the first edge found between 760 // `predecessor` and `successor`. 761 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 762 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 763 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 764 successor->predecessors_[predecessor_index] = this; 765 predecessor->successors_[successor_index] = this; 766 successors_.push_back(successor); 767 predecessors_.push_back(predecessor); 768 } 769 770 void RemovePredecessor(HBasicBlock* block) { 771 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 772 } 773 774 void RemoveSuccessor(HBasicBlock* block) { 775 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 776 } 777 778 void ClearAllPredecessors() { 779 predecessors_.clear(); 780 } 781 782 void AddPredecessor(HBasicBlock* block) { 783 predecessors_.push_back(block); 784 block->successors_.push_back(this); 785 } 786 787 void SwapPredecessors() { 788 DCHECK_EQ(predecessors_.size(), 2u); 789 std::swap(predecessors_[0], predecessors_[1]); 790 } 791 792 void SwapSuccessors() { 793 DCHECK_EQ(successors_.size(), 2u); 794 std::swap(successors_[0], successors_[1]); 795 } 796 797 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 798 return IndexOfElement(predecessors_, predecessor); 799 } 800 801 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 802 return IndexOfElement(successors_, successor); 803 } 804 805 HBasicBlock* GetSinglePredecessor() const { 806 DCHECK_EQ(GetPredecessors().size(), 1u); 807 return GetPredecessors()[0]; 808 } 809 810 HBasicBlock* GetSingleSuccessor() const { 811 DCHECK_EQ(GetSuccessors().size(), 1u); 812 return GetSuccessors()[0]; 813 } 814 815 // Returns whether the first occurrence of `predecessor` in the list of 816 // predecessors is at index `idx`. 817 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 818 DCHECK_EQ(GetPredecessors()[idx], predecessor); 819 return GetPredecessorIndexOf(predecessor) == idx; 820 } 821 822 // Create a new block between this block and its predecessors. The new block 823 // is added to the graph, all predecessor edges are relinked to it and an edge 824 // is created to `this`. Returns the new empty block. Reverse post order or 825 // loop and try/catch information are not updated. 826 HBasicBlock* CreateImmediateDominator(); 827 828 // Split the block into two blocks just before `cursor`. Returns the newly 829 // created, latter block. Note that this method will add the block to the 830 // graph, create a Goto at the end of the former block and will create an edge 831 // between the blocks. It will not, however, update the reverse post order or 832 // loop and try/catch information. 833 HBasicBlock* SplitBefore(HInstruction* cursor); 834 835 // Split the block into two blocks just after `cursor`. Returns the newly 836 // created block. Note that this method just updates raw block information, 837 // like predecessors, successors, dominators, and instruction list. It does not 838 // update the graph, reverse post order, loop information, nor make sure the 839 // blocks are consistent (for example ending with a control flow instruction). 840 HBasicBlock* SplitAfter(HInstruction* cursor); 841 842 // Split catch block into two blocks after the original move-exception bytecode 843 // instruction, or at the beginning if not present. Returns the newly created, 844 // latter block, or nullptr if such block could not be created (must be dead 845 // in that case). Note that this method just updates raw block information, 846 // like predecessors, successors, dominators, and instruction list. It does not 847 // update the graph, reverse post order, loop information, nor make sure the 848 // blocks are consistent (for example ending with a control flow instruction). 849 HBasicBlock* SplitCatchBlockAfterMoveException(); 850 851 // Merge `other` at the end of `this`. Successors and dominated blocks of 852 // `other` are changed to be successors and dominated blocks of `this`. Note 853 // that this method does not update the graph, reverse post order, loop 854 // information, nor make sure the blocks are consistent (for example ending 855 // with a control flow instruction). 856 void MergeWithInlined(HBasicBlock* other); 857 858 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 859 // of `this` are moved to `other`. 860 // Note that this method does not update the graph, reverse post order, loop 861 // information, nor make sure the blocks are consistent (for example ending 862 // with a control flow instruction). 863 void ReplaceWith(HBasicBlock* other); 864 865 // Merge `other` at the end of `this`. This method updates loops, reverse post 866 // order, links to predecessors, successors, dominators and deletes the block 867 // from the graph. The two blocks must be successive, i.e. `this` the only 868 // predecessor of `other` and vice versa. 869 void MergeWith(HBasicBlock* other); 870 871 // Disconnects `this` from all its predecessors, successors and dominator, 872 // removes it from all loops it is included in and eventually from the graph. 873 // The block must not dominate any other block. Predecessors and successors 874 // are safely updated. 875 void DisconnectAndDelete(); 876 877 void AddInstruction(HInstruction* instruction); 878 // Insert `instruction` before/after an existing instruction `cursor`. 879 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 880 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 881 // Replace instruction `initial` with `replacement` within this block. 882 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 883 HInstruction* replacement); 884 void AddPhi(HPhi* phi); 885 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 886 // RemoveInstruction and RemovePhi delete a given instruction from the respective 887 // instruction list. With 'ensure_safety' set to true, it verifies that the 888 // instruction is not in use and removes it from the use lists of its inputs. 889 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 890 void RemovePhi(HPhi* phi, bool ensure_safety = true); 891 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 892 893 bool IsLoopHeader() const { 894 return IsInLoop() && (loop_information_->GetHeader() == this); 895 } 896 897 bool IsLoopPreHeaderFirstPredecessor() const { 898 DCHECK(IsLoopHeader()); 899 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 900 } 901 902 HLoopInformation* GetLoopInformation() const { 903 return loop_information_; 904 } 905 906 // Set the loop_information_ on this block. Overrides the current 907 // loop_information if it is an outer loop of the passed loop information. 908 // Note that this method is called while creating the loop information. 909 void SetInLoop(HLoopInformation* info) { 910 if (IsLoopHeader()) { 911 // Nothing to do. This just means `info` is an outer loop. 912 } else if (!IsInLoop()) { 913 loop_information_ = info; 914 } else if (loop_information_->Contains(*info->GetHeader())) { 915 // Block is currently part of an outer loop. Make it part of this inner loop. 916 // Note that a non loop header having a loop information means this loop information 917 // has already been populated 918 loop_information_ = info; 919 } else { 920 // Block is part of an inner loop. Do not update the loop information. 921 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 922 // at this point, because this method is being called while populating `info`. 923 } 924 } 925 926 // Raw update of the loop information. 927 void SetLoopInformation(HLoopInformation* info) { 928 loop_information_ = info; 929 } 930 931 bool IsInLoop() const { return loop_information_ != nullptr; } 932 933 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 934 935 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 936 try_catch_information_ = try_catch_information; 937 } 938 939 bool IsTryBlock() const { 940 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 941 } 942 943 bool IsCatchBlock() const { 944 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 945 } 946 947 // Returns the try entry that this block's successors should have. They will 948 // be in the same try, unless the block ends in a try boundary. In that case, 949 // the appropriate try entry will be returned. 950 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 951 952 bool HasThrowingInstructions() const; 953 954 // Returns whether this block dominates the blocked passed as parameter. 955 bool Dominates(HBasicBlock* block) const; 956 957 size_t GetLifetimeStart() const { return lifetime_start_; } 958 size_t GetLifetimeEnd() const { return lifetime_end_; } 959 960 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 961 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 962 963 bool EndsWithControlFlowInstruction() const; 964 bool EndsWithIf() const; 965 bool EndsWithTryBoundary() const; 966 bool HasSinglePhi() const; 967 968 private: 969 HGraph* graph_; 970 ArenaVector<HBasicBlock*> predecessors_; 971 ArenaVector<HBasicBlock*> successors_; 972 HInstructionList instructions_; 973 HInstructionList phis_; 974 HLoopInformation* loop_information_; 975 HBasicBlock* dominator_; 976 ArenaVector<HBasicBlock*> dominated_blocks_; 977 uint32_t block_id_; 978 // The dex program counter of the first instruction of this block. 979 const uint32_t dex_pc_; 980 size_t lifetime_start_; 981 size_t lifetime_end_; 982 TryCatchInformation* try_catch_information_; 983 984 friend class HGraph; 985 friend class HInstruction; 986 987 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 988}; 989 990// Iterates over the LoopInformation of all loops which contain 'block' 991// from the innermost to the outermost. 992class HLoopInformationOutwardIterator : public ValueObject { 993 public: 994 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 995 : current_(block.GetLoopInformation()) {} 996 997 bool Done() const { return current_ == nullptr; } 998 999 void Advance() { 1000 DCHECK(!Done()); 1001 current_ = current_->GetPreHeader()->GetLoopInformation(); 1002 } 1003 1004 HLoopInformation* Current() const { 1005 DCHECK(!Done()); 1006 return current_; 1007 } 1008 1009 private: 1010 HLoopInformation* current_; 1011 1012 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1013}; 1014 1015#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1016 M(Above, Condition) \ 1017 M(AboveOrEqual, Condition) \ 1018 M(Add, BinaryOperation) \ 1019 M(And, BinaryOperation) \ 1020 M(ArrayGet, Instruction) \ 1021 M(ArrayLength, Instruction) \ 1022 M(ArraySet, Instruction) \ 1023 M(Below, Condition) \ 1024 M(BelowOrEqual, Condition) \ 1025 M(BooleanNot, UnaryOperation) \ 1026 M(BoundsCheck, Instruction) \ 1027 M(BoundType, Instruction) \ 1028 M(CheckCast, Instruction) \ 1029 M(ClearException, Instruction) \ 1030 M(ClinitCheck, Instruction) \ 1031 M(Compare, BinaryOperation) \ 1032 M(Condition, BinaryOperation) \ 1033 M(CurrentMethod, Instruction) \ 1034 M(Deoptimize, Instruction) \ 1035 M(Div, BinaryOperation) \ 1036 M(DivZeroCheck, Instruction) \ 1037 M(DoubleConstant, Constant) \ 1038 M(Equal, Condition) \ 1039 M(Exit, Instruction) \ 1040 M(FakeString, Instruction) \ 1041 M(FloatConstant, Constant) \ 1042 M(Goto, Instruction) \ 1043 M(GreaterThan, Condition) \ 1044 M(GreaterThanOrEqual, Condition) \ 1045 M(If, Instruction) \ 1046 M(InstanceFieldGet, Instruction) \ 1047 M(InstanceFieldSet, Instruction) \ 1048 M(InstanceOf, Instruction) \ 1049 M(IntConstant, Constant) \ 1050 M(InvokeUnresolved, Invoke) \ 1051 M(InvokeInterface, Invoke) \ 1052 M(InvokeStaticOrDirect, Invoke) \ 1053 M(InvokeVirtual, Invoke) \ 1054 M(LessThan, Condition) \ 1055 M(LessThanOrEqual, Condition) \ 1056 M(LoadClass, Instruction) \ 1057 M(LoadException, Instruction) \ 1058 M(LoadLocal, Instruction) \ 1059 M(LoadString, Instruction) \ 1060 M(Local, Instruction) \ 1061 M(LongConstant, Constant) \ 1062 M(MemoryBarrier, Instruction) \ 1063 M(MonitorOperation, Instruction) \ 1064 M(Mul, BinaryOperation) \ 1065 M(Neg, UnaryOperation) \ 1066 M(NewArray, Instruction) \ 1067 M(NewInstance, Instruction) \ 1068 M(Not, UnaryOperation) \ 1069 M(NotEqual, Condition) \ 1070 M(NullConstant, Instruction) \ 1071 M(NullCheck, Instruction) \ 1072 M(Or, BinaryOperation) \ 1073 M(PackedSwitch, Instruction) \ 1074 M(ParallelMove, Instruction) \ 1075 M(ParameterValue, Instruction) \ 1076 M(Phi, Instruction) \ 1077 M(Rem, BinaryOperation) \ 1078 M(Return, Instruction) \ 1079 M(ReturnVoid, Instruction) \ 1080 M(Shl, BinaryOperation) \ 1081 M(Shr, BinaryOperation) \ 1082 M(StaticFieldGet, Instruction) \ 1083 M(StaticFieldSet, Instruction) \ 1084 M(UnresolvedInstanceFieldGet, Instruction) \ 1085 M(UnresolvedInstanceFieldSet, Instruction) \ 1086 M(UnresolvedStaticFieldGet, Instruction) \ 1087 M(UnresolvedStaticFieldSet, Instruction) \ 1088 M(StoreLocal, Instruction) \ 1089 M(Sub, BinaryOperation) \ 1090 M(SuspendCheck, Instruction) \ 1091 M(Temporary, Instruction) \ 1092 M(Throw, Instruction) \ 1093 M(TryBoundary, Instruction) \ 1094 M(TypeConversion, Instruction) \ 1095 M(UShr, BinaryOperation) \ 1096 M(Xor, BinaryOperation) \ 1097 1098#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1099 1100#ifndef ART_ENABLE_CODEGEN_arm64 1101#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1102#else 1103#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1104 M(Arm64IntermediateAddress, Instruction) 1105#endif 1106 1107#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1108 1109#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1110 1111#ifndef ART_ENABLE_CODEGEN_x86 1112#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1113#else 1114#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1115 M(X86ComputeBaseMethodAddress, Instruction) \ 1116 M(X86LoadFromConstantTable, Instruction) \ 1117 M(X86PackedSwitch, Instruction) 1118#endif 1119 1120#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1121 1122#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1123 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1124 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1125 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1126 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1127 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1128 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1129 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1130 1131#define FOR_EACH_INSTRUCTION(M) \ 1132 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1133 M(Constant, Instruction) \ 1134 M(UnaryOperation, Instruction) \ 1135 M(BinaryOperation, Instruction) \ 1136 M(Invoke, Instruction) 1137 1138#define FORWARD_DECLARATION(type, super) class H##type; 1139FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1140#undef FORWARD_DECLARATION 1141 1142#define DECLARE_INSTRUCTION(type) \ 1143 InstructionKind GetKind() const OVERRIDE { return k##type; } \ 1144 const char* DebugName() const OVERRIDE { return #type; } \ 1145 const H##type* As##type() const OVERRIDE { return this; } \ 1146 H##type* As##type() OVERRIDE { return this; } \ 1147 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1148 return other->Is##type(); \ 1149 } \ 1150 void Accept(HGraphVisitor* visitor) OVERRIDE 1151 1152template <typename T> class HUseList; 1153 1154template <typename T> 1155class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1156 public: 1157 HUseListNode* GetPrevious() const { return prev_; } 1158 HUseListNode* GetNext() const { return next_; } 1159 T GetUser() const { return user_; } 1160 size_t GetIndex() const { return index_; } 1161 void SetIndex(size_t index) { index_ = index; } 1162 1163 private: 1164 HUseListNode(T user, size_t index) 1165 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1166 1167 T const user_; 1168 size_t index_; 1169 HUseListNode<T>* prev_; 1170 HUseListNode<T>* next_; 1171 1172 friend class HUseList<T>; 1173 1174 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1175}; 1176 1177template <typename T> 1178class HUseList : public ValueObject { 1179 public: 1180 HUseList() : first_(nullptr) {} 1181 1182 void Clear() { 1183 first_ = nullptr; 1184 } 1185 1186 // Adds a new entry at the beginning of the use list and returns 1187 // the newly created node. 1188 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1189 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1190 if (IsEmpty()) { 1191 first_ = new_node; 1192 } else { 1193 first_->prev_ = new_node; 1194 new_node->next_ = first_; 1195 first_ = new_node; 1196 } 1197 return new_node; 1198 } 1199 1200 HUseListNode<T>* GetFirst() const { 1201 return first_; 1202 } 1203 1204 void Remove(HUseListNode<T>* node) { 1205 DCHECK(node != nullptr); 1206 DCHECK(Contains(node)); 1207 1208 if (node->prev_ != nullptr) { 1209 node->prev_->next_ = node->next_; 1210 } 1211 if (node->next_ != nullptr) { 1212 node->next_->prev_ = node->prev_; 1213 } 1214 if (node == first_) { 1215 first_ = node->next_; 1216 } 1217 } 1218 1219 bool Contains(const HUseListNode<T>* node) const { 1220 if (node == nullptr) { 1221 return false; 1222 } 1223 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1224 if (current == node) { 1225 return true; 1226 } 1227 } 1228 return false; 1229 } 1230 1231 bool IsEmpty() const { 1232 return first_ == nullptr; 1233 } 1234 1235 bool HasOnlyOneUse() const { 1236 return first_ != nullptr && first_->next_ == nullptr; 1237 } 1238 1239 size_t SizeSlow() const { 1240 size_t count = 0; 1241 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1242 ++count; 1243 } 1244 return count; 1245 } 1246 1247 private: 1248 HUseListNode<T>* first_; 1249}; 1250 1251template<typename T> 1252class HUseIterator : public ValueObject { 1253 public: 1254 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1255 1256 bool Done() const { return current_ == nullptr; } 1257 1258 void Advance() { 1259 DCHECK(!Done()); 1260 current_ = current_->GetNext(); 1261 } 1262 1263 HUseListNode<T>* Current() const { 1264 DCHECK(!Done()); 1265 return current_; 1266 } 1267 1268 private: 1269 HUseListNode<T>* current_; 1270 1271 friend class HValue; 1272}; 1273 1274// This class is used by HEnvironment and HInstruction classes to record the 1275// instructions they use and pointers to the corresponding HUseListNodes kept 1276// by the used instructions. 1277template <typename T> 1278class HUserRecord : public ValueObject { 1279 public: 1280 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1281 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1282 1283 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1284 : instruction_(old_record.instruction_), use_node_(use_node) { 1285 DCHECK(instruction_ != nullptr); 1286 DCHECK(use_node_ != nullptr); 1287 DCHECK(old_record.use_node_ == nullptr); 1288 } 1289 1290 HInstruction* GetInstruction() const { return instruction_; } 1291 HUseListNode<T>* GetUseNode() const { return use_node_; } 1292 1293 private: 1294 // Instruction used by the user. 1295 HInstruction* instruction_; 1296 1297 // Corresponding entry in the use list kept by 'instruction_'. 1298 HUseListNode<T>* use_node_; 1299}; 1300 1301/** 1302 * Side-effects representation. 1303 * 1304 * For write/read dependences on fields/arrays, the dependence analysis uses 1305 * type disambiguation (e.g. a float field write cannot modify the value of an 1306 * integer field read) and the access type (e.g. a reference array write cannot 1307 * modify the value of a reference field read [although it may modify the 1308 * reference fetch prior to reading the field, which is represented by its own 1309 * write/read dependence]). The analysis makes conservative points-to 1310 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1311 * the same, and any reference read depends on any reference read without 1312 * further regard of its type). 1313 * 1314 * The internal representation uses 38-bit and is described in the table below. 1315 * The first line indicates the side effect, and for field/array accesses the 1316 * second line indicates the type of the access (in the order of the 1317 * Primitive::Type enum). 1318 * The two numbered lines below indicate the bit position in the bitfield (read 1319 * vertically). 1320 * 1321 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1322 * +-------------+---------+---------+--------------+---------+---------+ 1323 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1324 * | 3 |333333322|222222221| 1 |111111110|000000000| 1325 * | 7 |654321098|765432109| 8 |765432109|876543210| 1326 * 1327 * Note that, to ease the implementation, 'changes' bits are least significant 1328 * bits, while 'dependency' bits are most significant bits. 1329 */ 1330class SideEffects : public ValueObject { 1331 public: 1332 SideEffects() : flags_(0) {} 1333 1334 static SideEffects None() { 1335 return SideEffects(0); 1336 } 1337 1338 static SideEffects All() { 1339 return SideEffects(kAllChangeBits | kAllDependOnBits); 1340 } 1341 1342 static SideEffects AllChanges() { 1343 return SideEffects(kAllChangeBits); 1344 } 1345 1346 static SideEffects AllDependencies() { 1347 return SideEffects(kAllDependOnBits); 1348 } 1349 1350 static SideEffects AllExceptGCDependency() { 1351 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1352 } 1353 1354 static SideEffects AllWritesAndReads() { 1355 return SideEffects(kAllWrites | kAllReads); 1356 } 1357 1358 static SideEffects AllWrites() { 1359 return SideEffects(kAllWrites); 1360 } 1361 1362 static SideEffects AllReads() { 1363 return SideEffects(kAllReads); 1364 } 1365 1366 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1367 return is_volatile 1368 ? AllWritesAndReads() 1369 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1370 } 1371 1372 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1373 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1374 } 1375 1376 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1377 return is_volatile 1378 ? AllWritesAndReads() 1379 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1380 } 1381 1382 static SideEffects ArrayReadOfType(Primitive::Type type) { 1383 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1384 } 1385 1386 static SideEffects CanTriggerGC() { 1387 return SideEffects(1ULL << kCanTriggerGCBit); 1388 } 1389 1390 static SideEffects DependsOnGC() { 1391 return SideEffects(1ULL << kDependsOnGCBit); 1392 } 1393 1394 // Combines the side-effects of this and the other. 1395 SideEffects Union(SideEffects other) const { 1396 return SideEffects(flags_ | other.flags_); 1397 } 1398 1399 SideEffects Exclusion(SideEffects other) const { 1400 return SideEffects(flags_ & ~other.flags_); 1401 } 1402 1403 void Add(SideEffects other) { 1404 flags_ |= other.flags_; 1405 } 1406 1407 bool Includes(SideEffects other) const { 1408 return (other.flags_ & flags_) == other.flags_; 1409 } 1410 1411 bool HasSideEffects() const { 1412 return (flags_ & kAllChangeBits); 1413 } 1414 1415 bool HasDependencies() const { 1416 return (flags_ & kAllDependOnBits); 1417 } 1418 1419 // Returns true if there are no side effects or dependencies. 1420 bool DoesNothing() const { 1421 return flags_ == 0; 1422 } 1423 1424 // Returns true if something is written. 1425 bool DoesAnyWrite() const { 1426 return (flags_ & kAllWrites); 1427 } 1428 1429 // Returns true if something is read. 1430 bool DoesAnyRead() const { 1431 return (flags_ & kAllReads); 1432 } 1433 1434 // Returns true if potentially everything is written and read 1435 // (every type and every kind of access). 1436 bool DoesAllReadWrite() const { 1437 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1438 } 1439 1440 bool DoesAll() const { 1441 return flags_ == (kAllChangeBits | kAllDependOnBits); 1442 } 1443 1444 // Returns true if `this` may read something written by `other`. 1445 bool MayDependOn(SideEffects other) const { 1446 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1447 return (other.flags_ & depends_on_flags); 1448 } 1449 1450 // Returns string representation of flags (for debugging only). 1451 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1452 std::string ToString() const { 1453 std::string flags = "|"; 1454 for (int s = kLastBit; s >= 0; s--) { 1455 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1456 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1457 // This is a bit for the GC side effect. 1458 if (current_bit_is_set) { 1459 flags += "GC"; 1460 } 1461 flags += "|"; 1462 } else { 1463 // This is a bit for the array/field analysis. 1464 // The underscore character stands for the 'can trigger GC' bit. 1465 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1466 if (current_bit_is_set) { 1467 flags += kDebug[s]; 1468 } 1469 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1470 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1471 flags += "|"; 1472 } 1473 } 1474 } 1475 return flags; 1476 } 1477 1478 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1479 1480 private: 1481 static constexpr int kFieldArrayAnalysisBits = 9; 1482 1483 static constexpr int kFieldWriteOffset = 0; 1484 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1485 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1486 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1487 1488 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1489 1490 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1491 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1492 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1493 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1494 1495 static constexpr int kLastBit = kDependsOnGCBit; 1496 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1497 1498 // Aliases. 1499 1500 static_assert(kChangeBits == kDependOnBits, 1501 "the 'change' bits should match the 'depend on' bits."); 1502 1503 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1504 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1505 static constexpr uint64_t kAllWrites = 1506 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1507 static constexpr uint64_t kAllReads = 1508 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1509 1510 // Work around the fact that HIR aliases I/F and J/D. 1511 // TODO: remove this interceptor once HIR types are clean 1512 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1513 switch (type) { 1514 case Primitive::kPrimInt: 1515 case Primitive::kPrimFloat: 1516 return TypeFlag(Primitive::kPrimInt, offset) | 1517 TypeFlag(Primitive::kPrimFloat, offset); 1518 case Primitive::kPrimLong: 1519 case Primitive::kPrimDouble: 1520 return TypeFlag(Primitive::kPrimLong, offset) | 1521 TypeFlag(Primitive::kPrimDouble, offset); 1522 default: 1523 return TypeFlag(type, offset); 1524 } 1525 } 1526 1527 // Translates type to bit flag. 1528 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1529 CHECK_NE(type, Primitive::kPrimVoid); 1530 const uint64_t one = 1; 1531 const int shift = type; // 0-based consecutive enum 1532 DCHECK_LE(kFieldWriteOffset, shift); 1533 DCHECK_LT(shift, kArrayWriteOffset); 1534 return one << (type + offset); 1535 } 1536 1537 // Private constructor on direct flags value. 1538 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1539 1540 uint64_t flags_; 1541}; 1542 1543// A HEnvironment object contains the values of virtual registers at a given location. 1544class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1545 public: 1546 HEnvironment(ArenaAllocator* arena, 1547 size_t number_of_vregs, 1548 const DexFile& dex_file, 1549 uint32_t method_idx, 1550 uint32_t dex_pc, 1551 InvokeType invoke_type, 1552 HInstruction* holder) 1553 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1554 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1555 parent_(nullptr), 1556 dex_file_(dex_file), 1557 method_idx_(method_idx), 1558 dex_pc_(dex_pc), 1559 invoke_type_(invoke_type), 1560 holder_(holder) { 1561 } 1562 1563 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1564 : HEnvironment(arena, 1565 to_copy.Size(), 1566 to_copy.GetDexFile(), 1567 to_copy.GetMethodIdx(), 1568 to_copy.GetDexPc(), 1569 to_copy.GetInvokeType(), 1570 holder) {} 1571 1572 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1573 if (parent_ != nullptr) { 1574 parent_->SetAndCopyParentChain(allocator, parent); 1575 } else { 1576 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1577 parent_->CopyFrom(parent); 1578 if (parent->GetParent() != nullptr) { 1579 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1580 } 1581 } 1582 } 1583 1584 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1585 void CopyFrom(HEnvironment* environment); 1586 1587 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1588 // input to the loop phi instead. This is for inserting instructions that 1589 // require an environment (like HDeoptimization) in the loop pre-header. 1590 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1591 1592 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1593 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1594 } 1595 1596 HInstruction* GetInstructionAt(size_t index) const { 1597 return vregs_[index].GetInstruction(); 1598 } 1599 1600 void RemoveAsUserOfInput(size_t index) const; 1601 1602 size_t Size() const { return vregs_.size(); } 1603 1604 HEnvironment* GetParent() const { return parent_; } 1605 1606 void SetLocationAt(size_t index, Location location) { 1607 locations_[index] = location; 1608 } 1609 1610 Location GetLocationAt(size_t index) const { 1611 return locations_[index]; 1612 } 1613 1614 uint32_t GetDexPc() const { 1615 return dex_pc_; 1616 } 1617 1618 uint32_t GetMethodIdx() const { 1619 return method_idx_; 1620 } 1621 1622 InvokeType GetInvokeType() const { 1623 return invoke_type_; 1624 } 1625 1626 const DexFile& GetDexFile() const { 1627 return dex_file_; 1628 } 1629 1630 HInstruction* GetHolder() const { 1631 return holder_; 1632 } 1633 1634 1635 bool IsFromInlinedInvoke() const { 1636 return GetParent() != nullptr; 1637 } 1638 1639 private: 1640 // Record instructions' use entries of this environment for constant-time removal. 1641 // It should only be called by HInstruction when a new environment use is added. 1642 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1643 DCHECK(env_use->GetUser() == this); 1644 size_t index = env_use->GetIndex(); 1645 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1646 } 1647 1648 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1649 ArenaVector<Location> locations_; 1650 HEnvironment* parent_; 1651 const DexFile& dex_file_; 1652 const uint32_t method_idx_; 1653 const uint32_t dex_pc_; 1654 const InvokeType invoke_type_; 1655 1656 // The instruction that holds this environment. 1657 HInstruction* const holder_; 1658 1659 friend class HInstruction; 1660 1661 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1662}; 1663 1664class ReferenceTypeInfo : ValueObject { 1665 public: 1666 typedef Handle<mirror::Class> TypeHandle; 1667 1668 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1669 // The constructor will check that the type_handle is valid. 1670 return ReferenceTypeInfo(type_handle, is_exact); 1671 } 1672 1673 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1674 1675 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1676 return handle.GetReference() != nullptr; 1677 } 1678 1679 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1680 return IsValidHandle(type_handle_); 1681 } 1682 1683 bool IsExact() const { return is_exact_; } 1684 1685 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1686 DCHECK(IsValid()); 1687 return GetTypeHandle()->IsObjectClass(); 1688 } 1689 1690 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1691 DCHECK(IsValid()); 1692 return GetTypeHandle()->IsStringClass(); 1693 } 1694 1695 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1696 DCHECK(IsValid()); 1697 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1698 } 1699 1700 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1701 DCHECK(IsValid()); 1702 return GetTypeHandle()->IsInterface(); 1703 } 1704 1705 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1706 DCHECK(IsValid()); 1707 return GetTypeHandle()->IsArrayClass(); 1708 } 1709 1710 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1711 DCHECK(IsValid()); 1712 return GetTypeHandle()->IsPrimitiveArray(); 1713 } 1714 1715 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1716 DCHECK(IsValid()); 1717 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 1718 } 1719 1720 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1721 DCHECK(IsValid()); 1722 if (!IsExact()) return false; 1723 if (!IsArrayClass()) return false; 1724 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1725 } 1726 1727 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1728 DCHECK(IsValid()); 1729 if (!IsExact()) return false; 1730 if (!IsArrayClass()) return false; 1731 if (!rti.IsArrayClass()) return false; 1732 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 1733 rti.GetTypeHandle()->GetComponentType()); 1734 } 1735 1736 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1737 1738 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1739 DCHECK(IsValid()); 1740 DCHECK(rti.IsValid()); 1741 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1742 } 1743 1744 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1745 DCHECK(IsValid()); 1746 DCHECK(rti.IsValid()); 1747 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() && 1748 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1749 } 1750 1751 // Returns true if the type information provide the same amount of details. 1752 // Note that it does not mean that the instructions have the same actual type 1753 // (because the type can be the result of a merge). 1754 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1755 if (!IsValid() && !rti.IsValid()) { 1756 // Invalid types are equal. 1757 return true; 1758 } 1759 if (!IsValid() || !rti.IsValid()) { 1760 // One is valid, the other not. 1761 return false; 1762 } 1763 return IsExact() == rti.IsExact() 1764 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1765 } 1766 1767 private: 1768 ReferenceTypeInfo(); 1769 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1770 1771 // The class of the object. 1772 TypeHandle type_handle_; 1773 // Whether or not the type is exact or a superclass of the actual type. 1774 // Whether or not we have any information about this type. 1775 bool is_exact_; 1776}; 1777 1778std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1779 1780class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1781 public: 1782 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1783 : previous_(nullptr), 1784 next_(nullptr), 1785 block_(nullptr), 1786 dex_pc_(dex_pc), 1787 id_(-1), 1788 ssa_index_(-1), 1789 environment_(nullptr), 1790 locations_(nullptr), 1791 live_interval_(nullptr), 1792 lifetime_position_(kNoLifetime), 1793 side_effects_(side_effects), 1794 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1795 1796 virtual ~HInstruction() {} 1797 1798#define DECLARE_KIND(type, super) k##type, 1799 enum InstructionKind { 1800 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1801 }; 1802#undef DECLARE_KIND 1803 1804 HInstruction* GetNext() const { return next_; } 1805 HInstruction* GetPrevious() const { return previous_; } 1806 1807 HInstruction* GetNextDisregardingMoves() const; 1808 HInstruction* GetPreviousDisregardingMoves() const; 1809 1810 HBasicBlock* GetBlock() const { return block_; } 1811 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1812 void SetBlock(HBasicBlock* block) { block_ = block; } 1813 bool IsInBlock() const { return block_ != nullptr; } 1814 bool IsInLoop() const { return block_->IsInLoop(); } 1815 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1816 1817 virtual size_t InputCount() const = 0; 1818 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1819 1820 virtual void Accept(HGraphVisitor* visitor) = 0; 1821 virtual const char* DebugName() const = 0; 1822 1823 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1824 void SetRawInputAt(size_t index, HInstruction* input) { 1825 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1826 } 1827 1828 virtual bool NeedsEnvironment() const { return false; } 1829 1830 uint32_t GetDexPc() const { return dex_pc_; } 1831 1832 virtual bool IsControlFlow() const { return false; } 1833 1834 virtual bool CanThrow() const { return false; } 1835 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1836 1837 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1838 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1839 1840 // Does not apply for all instructions, but having this at top level greatly 1841 // simplifies the null check elimination. 1842 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1843 virtual bool CanBeNull() const { 1844 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1845 return true; 1846 } 1847 1848 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1849 return false; 1850 } 1851 1852 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1853 1854 ReferenceTypeInfo GetReferenceTypeInfo() const { 1855 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1856 return reference_type_info_; 1857 } 1858 1859 void AddUseAt(HInstruction* user, size_t index) { 1860 DCHECK(user != nullptr); 1861 HUseListNode<HInstruction*>* use = 1862 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1863 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1864 } 1865 1866 void AddEnvUseAt(HEnvironment* user, size_t index) { 1867 DCHECK(user != nullptr); 1868 HUseListNode<HEnvironment*>* env_use = 1869 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1870 user->RecordEnvUse(env_use); 1871 } 1872 1873 void RemoveAsUserOfInput(size_t input) { 1874 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1875 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1876 } 1877 1878 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1879 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1880 1881 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1882 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1883 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1884 bool HasOnlyOneNonEnvironmentUse() const { 1885 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1886 } 1887 1888 // Does this instruction strictly dominate `other_instruction`? 1889 // Returns false if this instruction and `other_instruction` are the same. 1890 // Aborts if this instruction and `other_instruction` are both phis. 1891 bool StrictlyDominates(HInstruction* other_instruction) const; 1892 1893 int GetId() const { return id_; } 1894 void SetId(int id) { id_ = id; } 1895 1896 int GetSsaIndex() const { return ssa_index_; } 1897 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1898 bool HasSsaIndex() const { return ssa_index_ != -1; } 1899 1900 bool HasEnvironment() const { return environment_ != nullptr; } 1901 HEnvironment* GetEnvironment() const { return environment_; } 1902 // Set the `environment_` field. Raw because this method does not 1903 // update the uses lists. 1904 void SetRawEnvironment(HEnvironment* environment) { 1905 DCHECK(environment_ == nullptr); 1906 DCHECK_EQ(environment->GetHolder(), this); 1907 environment_ = environment; 1908 } 1909 1910 // Set the environment of this instruction, copying it from `environment`. While 1911 // copying, the uses lists are being updated. 1912 void CopyEnvironmentFrom(HEnvironment* environment) { 1913 DCHECK(environment_ == nullptr); 1914 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1915 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1916 environment_->CopyFrom(environment); 1917 if (environment->GetParent() != nullptr) { 1918 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1919 } 1920 } 1921 1922 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1923 HBasicBlock* block) { 1924 DCHECK(environment_ == nullptr); 1925 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1926 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1927 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1928 if (environment->GetParent() != nullptr) { 1929 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1930 } 1931 } 1932 1933 // Returns the number of entries in the environment. Typically, that is the 1934 // number of dex registers in a method. It could be more in case of inlining. 1935 size_t EnvironmentSize() const; 1936 1937 LocationSummary* GetLocations() const { return locations_; } 1938 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1939 1940 void ReplaceWith(HInstruction* instruction); 1941 void ReplaceInput(HInstruction* replacement, size_t index); 1942 1943 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1944 // uses of this instruction by `other` are *not* updated. 1945 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1946 ReplaceWith(other); 1947 other->ReplaceInput(this, use_index); 1948 } 1949 1950 // Move `this` instruction before `cursor`. 1951 void MoveBefore(HInstruction* cursor); 1952 1953#define INSTRUCTION_TYPE_CHECK(type, super) \ 1954 bool Is##type() const { return (As##type() != nullptr); } \ 1955 virtual const H##type* As##type() const { return nullptr; } \ 1956 virtual H##type* As##type() { return nullptr; } 1957 1958 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1959#undef INSTRUCTION_TYPE_CHECK 1960 1961 // Returns whether the instruction can be moved within the graph. 1962 virtual bool CanBeMoved() const { return false; } 1963 1964 // Returns whether the two instructions are of the same kind. 1965 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1966 return false; 1967 } 1968 1969 // Returns whether any data encoded in the two instructions is equal. 1970 // This method does not look at the inputs. Both instructions must be 1971 // of the same type, otherwise the method has undefined behavior. 1972 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1973 return false; 1974 } 1975 1976 // Returns whether two instructions are equal, that is: 1977 // 1) They have the same type and contain the same data (InstructionDataEquals). 1978 // 2) Their inputs are identical. 1979 bool Equals(HInstruction* other) const; 1980 1981 virtual InstructionKind GetKind() const = 0; 1982 1983 virtual size_t ComputeHashCode() const { 1984 size_t result = GetKind(); 1985 for (size_t i = 0, e = InputCount(); i < e; ++i) { 1986 result = (result * 31) + InputAt(i)->GetId(); 1987 } 1988 return result; 1989 } 1990 1991 SideEffects GetSideEffects() const { return side_effects_; } 1992 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 1993 1994 size_t GetLifetimePosition() const { return lifetime_position_; } 1995 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 1996 LiveInterval* GetLiveInterval() const { return live_interval_; } 1997 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 1998 bool HasLiveInterval() const { return live_interval_ != nullptr; } 1999 2000 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 2001 2002 // Returns whether the code generation of the instruction will require to have access 2003 // to the current method. Such instructions are: 2004 // (1): Instructions that require an environment, as calling the runtime requires 2005 // to walk the stack and have the current method stored at a specific stack address. 2006 // (2): Object literals like classes and strings, that are loaded from the dex cache 2007 // fields of the current method. 2008 bool NeedsCurrentMethod() const { 2009 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 2010 } 2011 2012 // Returns whether the code generation of the instruction will require to have access 2013 // to the dex cache of the current method's declaring class via the current method. 2014 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 2015 2016 // Does this instruction have any use in an environment before 2017 // control flow hits 'other'? 2018 bool HasAnyEnvironmentUseBefore(HInstruction* other); 2019 2020 // Remove all references to environment uses of this instruction. 2021 // The caller must ensure that this is safe to do. 2022 void RemoveEnvironmentUsers(); 2023 2024 protected: 2025 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2026 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2027 2028 private: 2029 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2030 2031 HInstruction* previous_; 2032 HInstruction* next_; 2033 HBasicBlock* block_; 2034 const uint32_t dex_pc_; 2035 2036 // An instruction gets an id when it is added to the graph. 2037 // It reflects creation order. A negative id means the instruction 2038 // has not been added to the graph. 2039 int id_; 2040 2041 // When doing liveness analysis, instructions that have uses get an SSA index. 2042 int ssa_index_; 2043 2044 // List of instructions that have this instruction as input. 2045 HUseList<HInstruction*> uses_; 2046 2047 // List of environments that contain this instruction. 2048 HUseList<HEnvironment*> env_uses_; 2049 2050 // The environment associated with this instruction. Not null if the instruction 2051 // might jump out of the method. 2052 HEnvironment* environment_; 2053 2054 // Set by the code generator. 2055 LocationSummary* locations_; 2056 2057 // Set by the liveness analysis. 2058 LiveInterval* live_interval_; 2059 2060 // Set by the liveness analysis, this is the position in a linear 2061 // order of blocks where this instruction's live interval start. 2062 size_t lifetime_position_; 2063 2064 SideEffects side_effects_; 2065 2066 // TODO: for primitive types this should be marked as invalid. 2067 ReferenceTypeInfo reference_type_info_; 2068 2069 friend class GraphChecker; 2070 friend class HBasicBlock; 2071 friend class HEnvironment; 2072 friend class HGraph; 2073 friend class HInstructionList; 2074 2075 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2076}; 2077std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2078 2079class HInputIterator : public ValueObject { 2080 public: 2081 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2082 2083 bool Done() const { return index_ == instruction_->InputCount(); } 2084 HInstruction* Current() const { return instruction_->InputAt(index_); } 2085 void Advance() { index_++; } 2086 2087 private: 2088 HInstruction* instruction_; 2089 size_t index_; 2090 2091 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2092}; 2093 2094class HInstructionIterator : public ValueObject { 2095 public: 2096 explicit HInstructionIterator(const HInstructionList& instructions) 2097 : instruction_(instructions.first_instruction_) { 2098 next_ = Done() ? nullptr : instruction_->GetNext(); 2099 } 2100 2101 bool Done() const { return instruction_ == nullptr; } 2102 HInstruction* Current() const { return instruction_; } 2103 void Advance() { 2104 instruction_ = next_; 2105 next_ = Done() ? nullptr : instruction_->GetNext(); 2106 } 2107 2108 private: 2109 HInstruction* instruction_; 2110 HInstruction* next_; 2111 2112 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2113}; 2114 2115class HBackwardInstructionIterator : public ValueObject { 2116 public: 2117 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2118 : instruction_(instructions.last_instruction_) { 2119 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2120 } 2121 2122 bool Done() const { return instruction_ == nullptr; } 2123 HInstruction* Current() const { return instruction_; } 2124 void Advance() { 2125 instruction_ = next_; 2126 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2127 } 2128 2129 private: 2130 HInstruction* instruction_; 2131 HInstruction* next_; 2132 2133 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2134}; 2135 2136template<size_t N> 2137class HTemplateInstruction: public HInstruction { 2138 public: 2139 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2140 : HInstruction(side_effects, dex_pc), inputs_() {} 2141 virtual ~HTemplateInstruction() {} 2142 2143 size_t InputCount() const OVERRIDE { return N; } 2144 2145 protected: 2146 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2147 DCHECK_LT(i, N); 2148 return inputs_[i]; 2149 } 2150 2151 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2152 DCHECK_LT(i, N); 2153 inputs_[i] = input; 2154 } 2155 2156 private: 2157 std::array<HUserRecord<HInstruction*>, N> inputs_; 2158 2159 friend class SsaBuilder; 2160}; 2161 2162// HTemplateInstruction specialization for N=0. 2163template<> 2164class HTemplateInstruction<0>: public HInstruction { 2165 public: 2166 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2167 : HInstruction(side_effects, dex_pc) {} 2168 2169 virtual ~HTemplateInstruction() {} 2170 2171 size_t InputCount() const OVERRIDE { return 0; } 2172 2173 protected: 2174 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2175 LOG(FATAL) << "Unreachable"; 2176 UNREACHABLE(); 2177 } 2178 2179 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2180 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2181 LOG(FATAL) << "Unreachable"; 2182 UNREACHABLE(); 2183 } 2184 2185 private: 2186 friend class SsaBuilder; 2187}; 2188 2189template<intptr_t N> 2190class HExpression : public HTemplateInstruction<N> { 2191 public: 2192 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2193 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2194 virtual ~HExpression() {} 2195 2196 Primitive::Type GetType() const OVERRIDE { return type_; } 2197 2198 protected: 2199 Primitive::Type type_; 2200}; 2201 2202// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2203// instruction that branches to the exit block. 2204class HReturnVoid : public HTemplateInstruction<0> { 2205 public: 2206 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2207 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2208 2209 bool IsControlFlow() const OVERRIDE { return true; } 2210 2211 DECLARE_INSTRUCTION(ReturnVoid); 2212 2213 private: 2214 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2215}; 2216 2217// Represents dex's RETURN opcodes. A HReturn is a control flow 2218// instruction that branches to the exit block. 2219class HReturn : public HTemplateInstruction<1> { 2220 public: 2221 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2222 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2223 SetRawInputAt(0, value); 2224 } 2225 2226 bool IsControlFlow() const OVERRIDE { return true; } 2227 2228 DECLARE_INSTRUCTION(Return); 2229 2230 private: 2231 DISALLOW_COPY_AND_ASSIGN(HReturn); 2232}; 2233 2234// The exit instruction is the only instruction of the exit block. 2235// Instructions aborting the method (HThrow and HReturn) must branch to the 2236// exit block. 2237class HExit : public HTemplateInstruction<0> { 2238 public: 2239 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2240 2241 bool IsControlFlow() const OVERRIDE { return true; } 2242 2243 DECLARE_INSTRUCTION(Exit); 2244 2245 private: 2246 DISALLOW_COPY_AND_ASSIGN(HExit); 2247}; 2248 2249// Jumps from one block to another. 2250class HGoto : public HTemplateInstruction<0> { 2251 public: 2252 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2253 2254 bool IsControlFlow() const OVERRIDE { return true; } 2255 2256 HBasicBlock* GetSuccessor() const { 2257 return GetBlock()->GetSingleSuccessor(); 2258 } 2259 2260 DECLARE_INSTRUCTION(Goto); 2261 2262 private: 2263 DISALLOW_COPY_AND_ASSIGN(HGoto); 2264}; 2265 2266class HConstant : public HExpression<0> { 2267 public: 2268 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2269 : HExpression(type, SideEffects::None(), dex_pc) {} 2270 2271 bool CanBeMoved() const OVERRIDE { return true; } 2272 2273 virtual bool IsMinusOne() const { return false; } 2274 virtual bool IsZero() const { return false; } 2275 virtual bool IsOne() const { return false; } 2276 2277 virtual uint64_t GetValueAsUint64() const = 0; 2278 2279 DECLARE_INSTRUCTION(Constant); 2280 2281 private: 2282 DISALLOW_COPY_AND_ASSIGN(HConstant); 2283}; 2284 2285class HNullConstant : public HConstant { 2286 public: 2287 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2288 return true; 2289 } 2290 2291 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2292 2293 size_t ComputeHashCode() const OVERRIDE { return 0; } 2294 2295 DECLARE_INSTRUCTION(NullConstant); 2296 2297 private: 2298 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2299 2300 friend class HGraph; 2301 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2302}; 2303 2304// Constants of the type int. Those can be from Dex instructions, or 2305// synthesized (for example with the if-eqz instruction). 2306class HIntConstant : public HConstant { 2307 public: 2308 int32_t GetValue() const { return value_; } 2309 2310 uint64_t GetValueAsUint64() const OVERRIDE { 2311 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2312 } 2313 2314 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2315 DCHECK(other->IsIntConstant()); 2316 return other->AsIntConstant()->value_ == value_; 2317 } 2318 2319 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2320 2321 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2322 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2323 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2324 2325 DECLARE_INSTRUCTION(IntConstant); 2326 2327 private: 2328 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2329 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2330 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2331 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2332 2333 const int32_t value_; 2334 2335 friend class HGraph; 2336 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2337 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2338 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2339}; 2340 2341class HLongConstant : public HConstant { 2342 public: 2343 int64_t GetValue() const { return value_; } 2344 2345 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2346 2347 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2348 DCHECK(other->IsLongConstant()); 2349 return other->AsLongConstant()->value_ == value_; 2350 } 2351 2352 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2353 2354 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2355 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2356 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2357 2358 DECLARE_INSTRUCTION(LongConstant); 2359 2360 private: 2361 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2362 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2363 2364 const int64_t value_; 2365 2366 friend class HGraph; 2367 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2368}; 2369 2370// Conditional branch. A block ending with an HIf instruction must have 2371// two successors. 2372class HIf : public HTemplateInstruction<1> { 2373 public: 2374 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2375 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2376 SetRawInputAt(0, input); 2377 } 2378 2379 bool IsControlFlow() const OVERRIDE { return true; } 2380 2381 HBasicBlock* IfTrueSuccessor() const { 2382 return GetBlock()->GetSuccessors()[0]; 2383 } 2384 2385 HBasicBlock* IfFalseSuccessor() const { 2386 return GetBlock()->GetSuccessors()[1]; 2387 } 2388 2389 DECLARE_INSTRUCTION(If); 2390 2391 private: 2392 DISALLOW_COPY_AND_ASSIGN(HIf); 2393}; 2394 2395 2396// Abstract instruction which marks the beginning and/or end of a try block and 2397// links it to the respective exception handlers. Behaves the same as a Goto in 2398// non-exceptional control flow. 2399// Normal-flow successor is stored at index zero, exception handlers under 2400// higher indices in no particular order. 2401class HTryBoundary : public HTemplateInstruction<0> { 2402 public: 2403 enum BoundaryKind { 2404 kEntry, 2405 kExit, 2406 }; 2407 2408 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2409 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2410 2411 bool IsControlFlow() const OVERRIDE { return true; } 2412 2413 // Returns the block's non-exceptional successor (index zero). 2414 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2415 2416 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const { 2417 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u); 2418 } 2419 2420 // Returns whether `handler` is among its exception handlers (non-zero index 2421 // successors). 2422 bool HasExceptionHandler(const HBasicBlock& handler) const { 2423 DCHECK(handler.IsCatchBlock()); 2424 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2425 } 2426 2427 // If not present already, adds `handler` to its block's list of exception 2428 // handlers. 2429 void AddExceptionHandler(HBasicBlock* handler) { 2430 if (!HasExceptionHandler(*handler)) { 2431 GetBlock()->AddSuccessor(handler); 2432 } 2433 } 2434 2435 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2436 2437 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2438 2439 DECLARE_INSTRUCTION(TryBoundary); 2440 2441 private: 2442 const BoundaryKind kind_; 2443 2444 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2445}; 2446 2447// Deoptimize to interpreter, upon checking a condition. 2448class HDeoptimize : public HTemplateInstruction<1> { 2449 public: 2450 explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2451 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2452 SetRawInputAt(0, cond); 2453 } 2454 2455 bool NeedsEnvironment() const OVERRIDE { return true; } 2456 bool CanThrow() const OVERRIDE { return true; } 2457 2458 DECLARE_INSTRUCTION(Deoptimize); 2459 2460 private: 2461 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2462}; 2463 2464// Represents the ArtMethod that was passed as a first argument to 2465// the method. It is used by instructions that depend on it, like 2466// instructions that work with the dex cache. 2467class HCurrentMethod : public HExpression<0> { 2468 public: 2469 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2470 : HExpression(type, SideEffects::None(), dex_pc) {} 2471 2472 DECLARE_INSTRUCTION(CurrentMethod); 2473 2474 private: 2475 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2476}; 2477 2478// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2479// have one successor for each entry in the switch table, and the final successor 2480// will be the block containing the next Dex opcode. 2481class HPackedSwitch : public HTemplateInstruction<1> { 2482 public: 2483 HPackedSwitch(int32_t start_value, 2484 uint32_t num_entries, 2485 HInstruction* input, 2486 uint32_t dex_pc = kNoDexPc) 2487 : HTemplateInstruction(SideEffects::None(), dex_pc), 2488 start_value_(start_value), 2489 num_entries_(num_entries) { 2490 SetRawInputAt(0, input); 2491 } 2492 2493 bool IsControlFlow() const OVERRIDE { return true; } 2494 2495 int32_t GetStartValue() const { return start_value_; } 2496 2497 uint32_t GetNumEntries() const { return num_entries_; } 2498 2499 HBasicBlock* GetDefaultBlock() const { 2500 // Last entry is the default block. 2501 return GetBlock()->GetSuccessors()[num_entries_]; 2502 } 2503 DECLARE_INSTRUCTION(PackedSwitch); 2504 2505 private: 2506 const int32_t start_value_; 2507 const uint32_t num_entries_; 2508 2509 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2510}; 2511 2512class HUnaryOperation : public HExpression<1> { 2513 public: 2514 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2515 : HExpression(result_type, SideEffects::None(), dex_pc) { 2516 SetRawInputAt(0, input); 2517 } 2518 2519 HInstruction* GetInput() const { return InputAt(0); } 2520 Primitive::Type GetResultType() const { return GetType(); } 2521 2522 bool CanBeMoved() const OVERRIDE { return true; } 2523 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2524 return true; 2525 } 2526 2527 // Try to statically evaluate `operation` and return a HConstant 2528 // containing the result of this evaluation. If `operation` cannot 2529 // be evaluated as a constant, return null. 2530 HConstant* TryStaticEvaluation() const; 2531 2532 // Apply this operation to `x`. 2533 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2534 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2535 2536 DECLARE_INSTRUCTION(UnaryOperation); 2537 2538 private: 2539 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2540}; 2541 2542class HBinaryOperation : public HExpression<2> { 2543 public: 2544 HBinaryOperation(Primitive::Type result_type, 2545 HInstruction* left, 2546 HInstruction* right, 2547 SideEffects side_effects = SideEffects::None(), 2548 uint32_t dex_pc = kNoDexPc) 2549 : HExpression(result_type, side_effects, dex_pc) { 2550 SetRawInputAt(0, left); 2551 SetRawInputAt(1, right); 2552 } 2553 2554 HInstruction* GetLeft() const { return InputAt(0); } 2555 HInstruction* GetRight() const { return InputAt(1); } 2556 Primitive::Type GetResultType() const { return GetType(); } 2557 2558 virtual bool IsCommutative() const { return false; } 2559 2560 // Put constant on the right. 2561 // Returns whether order is changed. 2562 bool OrderInputsWithConstantOnTheRight() { 2563 HInstruction* left = InputAt(0); 2564 HInstruction* right = InputAt(1); 2565 if (left->IsConstant() && !right->IsConstant()) { 2566 ReplaceInput(right, 0); 2567 ReplaceInput(left, 1); 2568 return true; 2569 } 2570 return false; 2571 } 2572 2573 // Order inputs by instruction id, but favor constant on the right side. 2574 // This helps GVN for commutative ops. 2575 void OrderInputs() { 2576 DCHECK(IsCommutative()); 2577 HInstruction* left = InputAt(0); 2578 HInstruction* right = InputAt(1); 2579 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2580 return; 2581 } 2582 if (OrderInputsWithConstantOnTheRight()) { 2583 return; 2584 } 2585 // Order according to instruction id. 2586 if (left->GetId() > right->GetId()) { 2587 ReplaceInput(right, 0); 2588 ReplaceInput(left, 1); 2589 } 2590 } 2591 2592 bool CanBeMoved() const OVERRIDE { return true; } 2593 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2594 return true; 2595 } 2596 2597 // Try to statically evaluate `operation` and return a HConstant 2598 // containing the result of this evaluation. If `operation` cannot 2599 // be evaluated as a constant, return null. 2600 HConstant* TryStaticEvaluation() const; 2601 2602 // Apply this operation to `x` and `y`. 2603 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2604 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2605 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2606 HLongConstant* y ATTRIBUTE_UNUSED) const { 2607 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2608 return nullptr; 2609 } 2610 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2611 HIntConstant* y ATTRIBUTE_UNUSED) const { 2612 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2613 return nullptr; 2614 } 2615 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2616 HNullConstant* y ATTRIBUTE_UNUSED) const { 2617 VLOG(compiler) << DebugName() << " is not defined for the (null, null) case."; 2618 return nullptr; 2619 } 2620 2621 // Returns an input that can legally be used as the right input and is 2622 // constant, or null. 2623 HConstant* GetConstantRight() const; 2624 2625 // If `GetConstantRight()` returns one of the input, this returns the other 2626 // one. Otherwise it returns null. 2627 HInstruction* GetLeastConstantLeft() const; 2628 2629 DECLARE_INSTRUCTION(BinaryOperation); 2630 2631 private: 2632 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2633}; 2634 2635// The comparison bias applies for floating point operations and indicates how NaN 2636// comparisons are treated: 2637enum class ComparisonBias { 2638 kNoBias, // bias is not applicable (i.e. for long operation) 2639 kGtBias, // return 1 for NaN comparisons 2640 kLtBias, // return -1 for NaN comparisons 2641}; 2642 2643class HCondition : public HBinaryOperation { 2644 public: 2645 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2646 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2647 needs_materialization_(true), 2648 bias_(ComparisonBias::kNoBias) {} 2649 2650 bool NeedsMaterialization() const { return needs_materialization_; } 2651 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2652 2653 // For code generation purposes, returns whether this instruction is just before 2654 // `instruction`, and disregard moves in between. 2655 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2656 2657 DECLARE_INSTRUCTION(Condition); 2658 2659 virtual IfCondition GetCondition() const = 0; 2660 2661 virtual IfCondition GetOppositeCondition() const = 0; 2662 2663 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2664 2665 void SetBias(ComparisonBias bias) { bias_ = bias; } 2666 2667 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2668 return bias_ == other->AsCondition()->bias_; 2669 } 2670 2671 bool IsFPConditionTrueIfNaN() const { 2672 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2673 IfCondition if_cond = GetCondition(); 2674 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2675 } 2676 2677 bool IsFPConditionFalseIfNaN() const { 2678 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2679 IfCondition if_cond = GetCondition(); 2680 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2681 } 2682 2683 private: 2684 // For register allocation purposes, returns whether this instruction needs to be 2685 // materialized (that is, not just be in the processor flags). 2686 bool needs_materialization_; 2687 2688 // Needed if we merge a HCompare into a HCondition. 2689 ComparisonBias bias_; 2690 2691 DISALLOW_COPY_AND_ASSIGN(HCondition); 2692}; 2693 2694// Instruction to check if two inputs are equal to each other. 2695class HEqual : public HCondition { 2696 public: 2697 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2698 : HCondition(first, second, dex_pc) {} 2699 2700 bool IsCommutative() const OVERRIDE { return true; } 2701 2702 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2703 return GetBlock()->GetGraph()->GetIntConstant( 2704 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2705 } 2706 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2707 return GetBlock()->GetGraph()->GetIntConstant( 2708 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2709 } 2710 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2711 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2712 return GetBlock()->GetGraph()->GetIntConstant(1); 2713 } 2714 2715 DECLARE_INSTRUCTION(Equal); 2716 2717 IfCondition GetCondition() const OVERRIDE { 2718 return kCondEQ; 2719 } 2720 2721 IfCondition GetOppositeCondition() const OVERRIDE { 2722 return kCondNE; 2723 } 2724 2725 private: 2726 template <typename T> bool Compute(T x, T y) const { return x == y; } 2727 2728 DISALLOW_COPY_AND_ASSIGN(HEqual); 2729}; 2730 2731class HNotEqual : public HCondition { 2732 public: 2733 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2734 : HCondition(first, second, dex_pc) {} 2735 2736 bool IsCommutative() const OVERRIDE { return true; } 2737 2738 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2739 return GetBlock()->GetGraph()->GetIntConstant( 2740 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2741 } 2742 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2743 return GetBlock()->GetGraph()->GetIntConstant( 2744 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2745 } 2746 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2747 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2748 return GetBlock()->GetGraph()->GetIntConstant(0); 2749 } 2750 2751 DECLARE_INSTRUCTION(NotEqual); 2752 2753 IfCondition GetCondition() const OVERRIDE { 2754 return kCondNE; 2755 } 2756 2757 IfCondition GetOppositeCondition() const OVERRIDE { 2758 return kCondEQ; 2759 } 2760 2761 private: 2762 template <typename T> bool Compute(T x, T y) const { return x != y; } 2763 2764 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2765}; 2766 2767class HLessThan : public HCondition { 2768 public: 2769 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2770 : HCondition(first, second, dex_pc) {} 2771 2772 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2773 return GetBlock()->GetGraph()->GetIntConstant( 2774 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2775 } 2776 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2777 return GetBlock()->GetGraph()->GetIntConstant( 2778 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2779 } 2780 2781 DECLARE_INSTRUCTION(LessThan); 2782 2783 IfCondition GetCondition() const OVERRIDE { 2784 return kCondLT; 2785 } 2786 2787 IfCondition GetOppositeCondition() const OVERRIDE { 2788 return kCondGE; 2789 } 2790 2791 private: 2792 template <typename T> bool Compute(T x, T y) const { return x < y; } 2793 2794 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2795}; 2796 2797class HLessThanOrEqual : public HCondition { 2798 public: 2799 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2800 : HCondition(first, second, dex_pc) {} 2801 2802 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2803 return GetBlock()->GetGraph()->GetIntConstant( 2804 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2805 } 2806 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2807 return GetBlock()->GetGraph()->GetIntConstant( 2808 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2809 } 2810 2811 DECLARE_INSTRUCTION(LessThanOrEqual); 2812 2813 IfCondition GetCondition() const OVERRIDE { 2814 return kCondLE; 2815 } 2816 2817 IfCondition GetOppositeCondition() const OVERRIDE { 2818 return kCondGT; 2819 } 2820 2821 private: 2822 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2823 2824 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2825}; 2826 2827class HGreaterThan : public HCondition { 2828 public: 2829 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2830 : HCondition(first, second, dex_pc) {} 2831 2832 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2833 return GetBlock()->GetGraph()->GetIntConstant( 2834 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2835 } 2836 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2837 return GetBlock()->GetGraph()->GetIntConstant( 2838 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2839 } 2840 2841 DECLARE_INSTRUCTION(GreaterThan); 2842 2843 IfCondition GetCondition() const OVERRIDE { 2844 return kCondGT; 2845 } 2846 2847 IfCondition GetOppositeCondition() const OVERRIDE { 2848 return kCondLE; 2849 } 2850 2851 private: 2852 template <typename T> bool Compute(T x, T y) const { return x > y; } 2853 2854 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2855}; 2856 2857class HGreaterThanOrEqual : public HCondition { 2858 public: 2859 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2860 : HCondition(first, second, dex_pc) {} 2861 2862 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2863 return GetBlock()->GetGraph()->GetIntConstant( 2864 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2865 } 2866 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2867 return GetBlock()->GetGraph()->GetIntConstant( 2868 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2869 } 2870 2871 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2872 2873 IfCondition GetCondition() const OVERRIDE { 2874 return kCondGE; 2875 } 2876 2877 IfCondition GetOppositeCondition() const OVERRIDE { 2878 return kCondLT; 2879 } 2880 2881 private: 2882 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2883 2884 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2885}; 2886 2887class HBelow : public HCondition { 2888 public: 2889 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2890 : HCondition(first, second, dex_pc) {} 2891 2892 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2893 return GetBlock()->GetGraph()->GetIntConstant( 2894 Compute(static_cast<uint32_t>(x->GetValue()), 2895 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2896 } 2897 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2898 return GetBlock()->GetGraph()->GetIntConstant( 2899 Compute(static_cast<uint64_t>(x->GetValue()), 2900 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2901 } 2902 2903 DECLARE_INSTRUCTION(Below); 2904 2905 IfCondition GetCondition() const OVERRIDE { 2906 return kCondB; 2907 } 2908 2909 IfCondition GetOppositeCondition() const OVERRIDE { 2910 return kCondAE; 2911 } 2912 2913 private: 2914 template <typename T> bool Compute(T x, T y) const { return x < y; } 2915 2916 DISALLOW_COPY_AND_ASSIGN(HBelow); 2917}; 2918 2919class HBelowOrEqual : public HCondition { 2920 public: 2921 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2922 : HCondition(first, second, dex_pc) {} 2923 2924 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2925 return GetBlock()->GetGraph()->GetIntConstant( 2926 Compute(static_cast<uint32_t>(x->GetValue()), 2927 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2928 } 2929 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2930 return GetBlock()->GetGraph()->GetIntConstant( 2931 Compute(static_cast<uint64_t>(x->GetValue()), 2932 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2933 } 2934 2935 DECLARE_INSTRUCTION(BelowOrEqual); 2936 2937 IfCondition GetCondition() const OVERRIDE { 2938 return kCondBE; 2939 } 2940 2941 IfCondition GetOppositeCondition() const OVERRIDE { 2942 return kCondA; 2943 } 2944 2945 private: 2946 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2947 2948 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 2949}; 2950 2951class HAbove : public HCondition { 2952 public: 2953 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2954 : HCondition(first, second, dex_pc) {} 2955 2956 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2957 return GetBlock()->GetGraph()->GetIntConstant( 2958 Compute(static_cast<uint32_t>(x->GetValue()), 2959 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2960 } 2961 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2962 return GetBlock()->GetGraph()->GetIntConstant( 2963 Compute(static_cast<uint64_t>(x->GetValue()), 2964 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2965 } 2966 2967 DECLARE_INSTRUCTION(Above); 2968 2969 IfCondition GetCondition() const OVERRIDE { 2970 return kCondA; 2971 } 2972 2973 IfCondition GetOppositeCondition() const OVERRIDE { 2974 return kCondBE; 2975 } 2976 2977 private: 2978 template <typename T> bool Compute(T x, T y) const { return x > y; } 2979 2980 DISALLOW_COPY_AND_ASSIGN(HAbove); 2981}; 2982 2983class HAboveOrEqual : public HCondition { 2984 public: 2985 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2986 : HCondition(first, second, dex_pc) {} 2987 2988 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2989 return GetBlock()->GetGraph()->GetIntConstant( 2990 Compute(static_cast<uint32_t>(x->GetValue()), 2991 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2992 } 2993 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2994 return GetBlock()->GetGraph()->GetIntConstant( 2995 Compute(static_cast<uint64_t>(x->GetValue()), 2996 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2997 } 2998 2999 DECLARE_INSTRUCTION(AboveOrEqual); 3000 3001 IfCondition GetCondition() const OVERRIDE { 3002 return kCondAE; 3003 } 3004 3005 IfCondition GetOppositeCondition() const OVERRIDE { 3006 return kCondB; 3007 } 3008 3009 private: 3010 template <typename T> bool Compute(T x, T y) const { return x >= y; } 3011 3012 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 3013}; 3014 3015// Instruction to check how two inputs compare to each other. 3016// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 3017class HCompare : public HBinaryOperation { 3018 public: 3019 HCompare(Primitive::Type type, 3020 HInstruction* first, 3021 HInstruction* second, 3022 ComparisonBias bias, 3023 uint32_t dex_pc) 3024 : HBinaryOperation(Primitive::kPrimInt, 3025 first, 3026 second, 3027 SideEffectsForArchRuntimeCalls(type), 3028 dex_pc), 3029 bias_(bias) { 3030 DCHECK_EQ(type, first->GetType()); 3031 DCHECK_EQ(type, second->GetType()); 3032 } 3033 3034 template <typename T> 3035 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3036 3037 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3038 return GetBlock()->GetGraph()->GetIntConstant( 3039 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3040 } 3041 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3042 return GetBlock()->GetGraph()->GetIntConstant( 3043 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3044 } 3045 3046 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3047 return bias_ == other->AsCompare()->bias_; 3048 } 3049 3050 ComparisonBias GetBias() const { return bias_; } 3051 3052 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3053 3054 3055 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3056 // MIPS64 uses a runtime call for FP comparisons. 3057 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3058 } 3059 3060 DECLARE_INSTRUCTION(Compare); 3061 3062 private: 3063 const ComparisonBias bias_; 3064 3065 DISALLOW_COPY_AND_ASSIGN(HCompare); 3066}; 3067 3068// A local in the graph. Corresponds to a Dex register. 3069class HLocal : public HTemplateInstruction<0> { 3070 public: 3071 explicit HLocal(uint16_t reg_number) 3072 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3073 3074 DECLARE_INSTRUCTION(Local); 3075 3076 uint16_t GetRegNumber() const { return reg_number_; } 3077 3078 private: 3079 // The Dex register number. 3080 const uint16_t reg_number_; 3081 3082 DISALLOW_COPY_AND_ASSIGN(HLocal); 3083}; 3084 3085// Load a given local. The local is an input of this instruction. 3086class HLoadLocal : public HExpression<1> { 3087 public: 3088 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3089 : HExpression(type, SideEffects::None(), dex_pc) { 3090 SetRawInputAt(0, local); 3091 } 3092 3093 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3094 3095 DECLARE_INSTRUCTION(LoadLocal); 3096 3097 private: 3098 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3099}; 3100 3101// Store a value in a given local. This instruction has two inputs: the value 3102// and the local. 3103class HStoreLocal : public HTemplateInstruction<2> { 3104 public: 3105 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3106 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3107 SetRawInputAt(0, local); 3108 SetRawInputAt(1, value); 3109 } 3110 3111 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3112 3113 DECLARE_INSTRUCTION(StoreLocal); 3114 3115 private: 3116 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3117}; 3118 3119class HFloatConstant : public HConstant { 3120 public: 3121 float GetValue() const { return value_; } 3122 3123 uint64_t GetValueAsUint64() const OVERRIDE { 3124 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3125 } 3126 3127 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3128 DCHECK(other->IsFloatConstant()); 3129 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3130 } 3131 3132 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3133 3134 bool IsMinusOne() const OVERRIDE { 3135 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3136 } 3137 bool IsZero() const OVERRIDE { 3138 return value_ == 0.0f; 3139 } 3140 bool IsOne() const OVERRIDE { 3141 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3142 } 3143 bool IsNaN() const { 3144 return std::isnan(value_); 3145 } 3146 3147 DECLARE_INSTRUCTION(FloatConstant); 3148 3149 private: 3150 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3151 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3152 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3153 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3154 3155 const float value_; 3156 3157 // Only the SsaBuilder and HGraph can create floating-point constants. 3158 friend class SsaBuilder; 3159 friend class HGraph; 3160 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3161}; 3162 3163class HDoubleConstant : public HConstant { 3164 public: 3165 double GetValue() const { return value_; } 3166 3167 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3168 3169 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3170 DCHECK(other->IsDoubleConstant()); 3171 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3172 } 3173 3174 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3175 3176 bool IsMinusOne() const OVERRIDE { 3177 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3178 } 3179 bool IsZero() const OVERRIDE { 3180 return value_ == 0.0; 3181 } 3182 bool IsOne() const OVERRIDE { 3183 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3184 } 3185 bool IsNaN() const { 3186 return std::isnan(value_); 3187 } 3188 3189 DECLARE_INSTRUCTION(DoubleConstant); 3190 3191 private: 3192 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3193 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3194 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3195 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3196 3197 const double value_; 3198 3199 // Only the SsaBuilder and HGraph can create floating-point constants. 3200 friend class SsaBuilder; 3201 friend class HGraph; 3202 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3203}; 3204 3205enum class Intrinsics { 3206#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache) k ## Name, 3207#include "intrinsics_list.h" 3208 kNone, 3209 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3210#undef INTRINSICS_LIST 3211#undef OPTIMIZING_INTRINSICS 3212}; 3213std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3214 3215enum IntrinsicNeedsEnvironmentOrCache { 3216 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3217 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3218}; 3219 3220class HInvoke : public HInstruction { 3221 public: 3222 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3223 3224 bool NeedsEnvironment() const OVERRIDE; 3225 3226 void SetArgumentAt(size_t index, HInstruction* argument) { 3227 SetRawInputAt(index, argument); 3228 } 3229 3230 // Return the number of arguments. This number can be lower than 3231 // the number of inputs returned by InputCount(), as some invoke 3232 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3233 // inputs at the end of their list of inputs. 3234 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3235 3236 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3237 3238 3239 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3240 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3241 3242 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3243 3244 Intrinsics GetIntrinsic() const { 3245 return intrinsic_; 3246 } 3247 3248 void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache); 3249 3250 bool IsFromInlinedInvoke() const { 3251 return GetEnvironment()->IsFromInlinedInvoke(); 3252 } 3253 3254 bool CanThrow() const OVERRIDE { return true; } 3255 3256 uint32_t* GetIntrinsicOptimizations() { 3257 return &intrinsic_optimizations_; 3258 } 3259 3260 const uint32_t* GetIntrinsicOptimizations() const { 3261 return &intrinsic_optimizations_; 3262 } 3263 3264 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3265 3266 DECLARE_INSTRUCTION(Invoke); 3267 3268 protected: 3269 HInvoke(ArenaAllocator* arena, 3270 uint32_t number_of_arguments, 3271 uint32_t number_of_other_inputs, 3272 Primitive::Type return_type, 3273 uint32_t dex_pc, 3274 uint32_t dex_method_index, 3275 InvokeType original_invoke_type) 3276 : HInstruction( 3277 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3278 number_of_arguments_(number_of_arguments), 3279 inputs_(number_of_arguments + number_of_other_inputs, 3280 arena->Adapter(kArenaAllocInvokeInputs)), 3281 return_type_(return_type), 3282 dex_method_index_(dex_method_index), 3283 original_invoke_type_(original_invoke_type), 3284 intrinsic_(Intrinsics::kNone), 3285 intrinsic_optimizations_(0) { 3286 } 3287 3288 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3289 return inputs_[index]; 3290 } 3291 3292 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3293 inputs_[index] = input; 3294 } 3295 3296 uint32_t number_of_arguments_; 3297 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3298 const Primitive::Type return_type_; 3299 const uint32_t dex_method_index_; 3300 const InvokeType original_invoke_type_; 3301 Intrinsics intrinsic_; 3302 3303 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3304 uint32_t intrinsic_optimizations_; 3305 3306 private: 3307 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3308}; 3309 3310class HInvokeUnresolved : public HInvoke { 3311 public: 3312 HInvokeUnresolved(ArenaAllocator* arena, 3313 uint32_t number_of_arguments, 3314 Primitive::Type return_type, 3315 uint32_t dex_pc, 3316 uint32_t dex_method_index, 3317 InvokeType invoke_type) 3318 : HInvoke(arena, 3319 number_of_arguments, 3320 0u /* number_of_other_inputs */, 3321 return_type, 3322 dex_pc, 3323 dex_method_index, 3324 invoke_type) { 3325 } 3326 3327 DECLARE_INSTRUCTION(InvokeUnresolved); 3328 3329 private: 3330 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3331}; 3332 3333class HInvokeStaticOrDirect : public HInvoke { 3334 public: 3335 // Requirements of this method call regarding the class 3336 // initialization (clinit) check of its declaring class. 3337 enum class ClinitCheckRequirement { 3338 kNone, // Class already initialized. 3339 kExplicit, // Static call having explicit clinit check as last input. 3340 kImplicit, // Static call implicitly requiring a clinit check. 3341 }; 3342 3343 // Determines how to load the target ArtMethod*. 3344 enum class MethodLoadKind { 3345 // Use a String init ArtMethod* loaded from Thread entrypoints. 3346 kStringInit, 3347 3348 // Use the method's own ArtMethod* loaded by the register allocator. 3349 kRecursive, 3350 3351 // Use ArtMethod* at a known address, embed the direct address in the code. 3352 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3353 kDirectAddress, 3354 3355 // Use ArtMethod* at an address that will be known at link time, embed the direct 3356 // address in the code. If the image is relocatable, emit .patch_oat entry. 3357 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3358 // the image relocatable or not. 3359 kDirectAddressWithFixup, 3360 3361 // Load from resoved methods array in the dex cache using a PC-relative load. 3362 // Used when we need to use the dex cache, for example for invoke-static that 3363 // may cause class initialization (the entry may point to a resolution method), 3364 // and we know that we can access the dex cache arrays using a PC-relative load. 3365 kDexCachePcRelative, 3366 3367 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3368 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3369 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3370 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3371 kDexCacheViaMethod, 3372 }; 3373 3374 // Determines the location of the code pointer. 3375 enum class CodePtrLocation { 3376 // Recursive call, use local PC-relative call instruction. 3377 kCallSelf, 3378 3379 // Use PC-relative call instruction patched at link time. 3380 // Used for calls within an oat file, boot->boot or app->app. 3381 kCallPCRelative, 3382 3383 // Call to a known target address, embed the direct address in code. 3384 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3385 kCallDirect, 3386 3387 // Call to a target address that will be known at link time, embed the direct 3388 // address in code. If the image is relocatable, emit .patch_oat entry. 3389 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3390 // the image relocatable or not. 3391 kCallDirectWithFixup, 3392 3393 // Use code pointer from the ArtMethod*. 3394 // Used when we don't know the target code. This is also the last-resort-kind used when 3395 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3396 kCallArtMethod, 3397 }; 3398 3399 struct DispatchInfo { 3400 MethodLoadKind method_load_kind; 3401 CodePtrLocation code_ptr_location; 3402 // The method load data holds 3403 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3404 // Note that there are multiple string init methods, each having its own offset. 3405 // - the method address for kDirectAddress 3406 // - the dex cache arrays offset for kDexCachePcRel. 3407 uint64_t method_load_data; 3408 uint64_t direct_code_ptr; 3409 }; 3410 3411 HInvokeStaticOrDirect(ArenaAllocator* arena, 3412 uint32_t number_of_arguments, 3413 Primitive::Type return_type, 3414 uint32_t dex_pc, 3415 uint32_t method_index, 3416 MethodReference target_method, 3417 DispatchInfo dispatch_info, 3418 InvokeType original_invoke_type, 3419 InvokeType invoke_type, 3420 ClinitCheckRequirement clinit_check_requirement) 3421 : HInvoke(arena, 3422 number_of_arguments, 3423 // There is potentially one extra argument for the HCurrentMethod node, and 3424 // potentially one other if the clinit check is explicit, and potentially 3425 // one other if the method is a string factory. 3426 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) + 3427 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) + 3428 (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3429 return_type, 3430 dex_pc, 3431 method_index, 3432 original_invoke_type), 3433 invoke_type_(invoke_type), 3434 clinit_check_requirement_(clinit_check_requirement), 3435 target_method_(target_method), 3436 dispatch_info_(dispatch_info) { } 3437 3438 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3439 bool had_current_method_input = HasCurrentMethodInput(); 3440 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind); 3441 3442 // Using the current method is the default and once we find a better 3443 // method load kind, we should not go back to using the current method. 3444 DCHECK(had_current_method_input || !needs_current_method_input); 3445 3446 if (had_current_method_input && !needs_current_method_input) { 3447 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod()); 3448 RemoveInputAt(GetSpecialInputIndex()); 3449 } 3450 dispatch_info_ = dispatch_info; 3451 } 3452 3453 void AddSpecialInput(HInstruction* input) { 3454 // We allow only one special input. 3455 DCHECK(!IsStringInit() && !HasCurrentMethodInput()); 3456 DCHECK(InputCount() == GetSpecialInputIndex() || 3457 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck())); 3458 InsertInputAt(GetSpecialInputIndex(), input); 3459 } 3460 3461 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3462 // We access the method via the dex cache so we can't do an implicit null check. 3463 // TODO: for intrinsics we can generate implicit null checks. 3464 return false; 3465 } 3466 3467 bool CanBeNull() const OVERRIDE { 3468 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3469 } 3470 3471 // Get the index of the special input, if any. 3472 // 3473 // If the invoke IsStringInit(), it initially has a HFakeString special argument 3474 // which is removed by the instruction simplifier; if the invoke HasCurrentMethodInput(), 3475 // the "special input" is the current method pointer; otherwise there may be one 3476 // platform-specific special input, such as PC-relative addressing base. 3477 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } 3478 3479 InvokeType GetInvokeType() const { return invoke_type_; } 3480 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3481 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3482 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3483 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3484 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3485 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3486 bool HasPcRelativeDexCache() const { 3487 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3488 } 3489 bool HasCurrentMethodInput() const { 3490 // This function can be called only after the invoke has been fully initialized by the builder. 3491 if (NeedsCurrentMethodInput(GetMethodLoadKind())) { 3492 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3493 return true; 3494 } else { 3495 DCHECK(InputCount() == GetSpecialInputIndex() || 3496 !InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3497 return false; 3498 } 3499 } 3500 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3501 MethodReference GetTargetMethod() const { return target_method_; } 3502 3503 int32_t GetStringInitOffset() const { 3504 DCHECK(IsStringInit()); 3505 return dispatch_info_.method_load_data; 3506 } 3507 3508 uint64_t GetMethodAddress() const { 3509 DCHECK(HasMethodAddress()); 3510 return dispatch_info_.method_load_data; 3511 } 3512 3513 uint32_t GetDexCacheArrayOffset() const { 3514 DCHECK(HasPcRelativeDexCache()); 3515 return dispatch_info_.method_load_data; 3516 } 3517 3518 uint64_t GetDirectCodePtr() const { 3519 DCHECK(HasDirectCodePtr()); 3520 return dispatch_info_.direct_code_ptr; 3521 } 3522 3523 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3524 3525 // Is this instruction a call to a static method? 3526 bool IsStatic() const { 3527 return GetInvokeType() == kStatic; 3528 } 3529 3530 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by 3531 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck) 3532 // instruction; only relevant for static calls with explicit clinit check. 3533 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) { 3534 DCHECK(IsStaticWithExplicitClinitCheck()); 3535 size_t last_input_index = InputCount() - 1; 3536 HInstruction* last_input = InputAt(last_input_index); 3537 DCHECK(last_input != nullptr); 3538 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName(); 3539 RemoveAsUserOfInput(last_input_index); 3540 inputs_.pop_back(); 3541 clinit_check_requirement_ = new_requirement; 3542 DCHECK(!IsStaticWithExplicitClinitCheck()); 3543 } 3544 3545 bool IsStringFactoryFor(HFakeString* str) const { 3546 if (!IsStringInit()) return false; 3547 DCHECK(!HasCurrentMethodInput()); 3548 if (InputCount() == (number_of_arguments_)) return false; 3549 return InputAt(InputCount() - 1)->AsFakeString() == str; 3550 } 3551 3552 void RemoveFakeStringArgumentAsLastInput() { 3553 DCHECK(IsStringInit()); 3554 size_t last_input_index = InputCount() - 1; 3555 HInstruction* last_input = InputAt(last_input_index); 3556 DCHECK(last_input != nullptr); 3557 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3558 RemoveAsUserOfInput(last_input_index); 3559 inputs_.pop_back(); 3560 } 3561 3562 // Is this a call to a static method whose declaring class has an 3563 // explicit initialization check in the graph? 3564 bool IsStaticWithExplicitClinitCheck() const { 3565 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3566 } 3567 3568 // Is this a call to a static method whose declaring class has an 3569 // implicit intialization check requirement? 3570 bool IsStaticWithImplicitClinitCheck() const { 3571 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3572 } 3573 3574 // Does this method load kind need the current method as an input? 3575 static bool NeedsCurrentMethodInput(MethodLoadKind kind) { 3576 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod; 3577 } 3578 3579 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3580 3581 protected: 3582 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3583 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3584 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3585 HInstruction* input = input_record.GetInstruction(); 3586 // `input` is the last input of a static invoke marked as having 3587 // an explicit clinit check. It must either be: 3588 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3589 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3590 DCHECK(input != nullptr); 3591 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3592 } 3593 return input_record; 3594 } 3595 3596 void InsertInputAt(size_t index, HInstruction* input); 3597 void RemoveInputAt(size_t index); 3598 3599 private: 3600 const InvokeType invoke_type_; 3601 ClinitCheckRequirement clinit_check_requirement_; 3602 // The target method may refer to different dex file or method index than the original 3603 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3604 // in derived class to increase visibility. 3605 MethodReference target_method_; 3606 DispatchInfo dispatch_info_; 3607 3608 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3609}; 3610std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs); 3611 3612class HInvokeVirtual : public HInvoke { 3613 public: 3614 HInvokeVirtual(ArenaAllocator* arena, 3615 uint32_t number_of_arguments, 3616 Primitive::Type return_type, 3617 uint32_t dex_pc, 3618 uint32_t dex_method_index, 3619 uint32_t vtable_index) 3620 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3621 vtable_index_(vtable_index) {} 3622 3623 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3624 // TODO: Add implicit null checks in intrinsics. 3625 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3626 } 3627 3628 uint32_t GetVTableIndex() const { return vtable_index_; } 3629 3630 DECLARE_INSTRUCTION(InvokeVirtual); 3631 3632 private: 3633 const uint32_t vtable_index_; 3634 3635 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3636}; 3637 3638class HInvokeInterface : public HInvoke { 3639 public: 3640 HInvokeInterface(ArenaAllocator* arena, 3641 uint32_t number_of_arguments, 3642 Primitive::Type return_type, 3643 uint32_t dex_pc, 3644 uint32_t dex_method_index, 3645 uint32_t imt_index) 3646 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3647 imt_index_(imt_index) {} 3648 3649 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3650 // TODO: Add implicit null checks in intrinsics. 3651 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3652 } 3653 3654 uint32_t GetImtIndex() const { return imt_index_; } 3655 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3656 3657 DECLARE_INSTRUCTION(InvokeInterface); 3658 3659 private: 3660 const uint32_t imt_index_; 3661 3662 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3663}; 3664 3665class HNewInstance : public HExpression<2> { 3666 public: 3667 HNewInstance(HInstruction* cls, 3668 HCurrentMethod* current_method, 3669 uint32_t dex_pc, 3670 uint16_t type_index, 3671 const DexFile& dex_file, 3672 bool can_throw, 3673 bool finalizable, 3674 QuickEntrypointEnum entrypoint) 3675 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3676 type_index_(type_index), 3677 dex_file_(dex_file), 3678 can_throw_(can_throw), 3679 finalizable_(finalizable), 3680 entrypoint_(entrypoint) { 3681 SetRawInputAt(0, cls); 3682 SetRawInputAt(1, current_method); 3683 } 3684 3685 uint16_t GetTypeIndex() const { return type_index_; } 3686 const DexFile& GetDexFile() const { return dex_file_; } 3687 3688 // Calls runtime so needs an environment. 3689 bool NeedsEnvironment() const OVERRIDE { return true; } 3690 3691 // It may throw when called on type that's not instantiable/accessible. 3692 // It can throw OOME. 3693 // TODO: distinguish between the two cases so we can for example allow allocation elimination. 3694 bool CanThrow() const OVERRIDE { return can_throw_ || true; } 3695 3696 bool IsFinalizable() const { return finalizable_; } 3697 3698 bool CanBeNull() const OVERRIDE { return false; } 3699 3700 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3701 3702 void SetEntrypoint(QuickEntrypointEnum entrypoint) { 3703 entrypoint_ = entrypoint; 3704 } 3705 3706 DECLARE_INSTRUCTION(NewInstance); 3707 3708 private: 3709 const uint16_t type_index_; 3710 const DexFile& dex_file_; 3711 const bool can_throw_; 3712 const bool finalizable_; 3713 QuickEntrypointEnum entrypoint_; 3714 3715 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3716}; 3717 3718class HNeg : public HUnaryOperation { 3719 public: 3720 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3721 : HUnaryOperation(result_type, input, dex_pc) {} 3722 3723 template <typename T> T Compute(T x) const { return -x; } 3724 3725 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3726 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3727 } 3728 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3729 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3730 } 3731 3732 DECLARE_INSTRUCTION(Neg); 3733 3734 private: 3735 DISALLOW_COPY_AND_ASSIGN(HNeg); 3736}; 3737 3738class HNewArray : public HExpression<2> { 3739 public: 3740 HNewArray(HInstruction* length, 3741 HCurrentMethod* current_method, 3742 uint32_t dex_pc, 3743 uint16_t type_index, 3744 const DexFile& dex_file, 3745 QuickEntrypointEnum entrypoint) 3746 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3747 type_index_(type_index), 3748 dex_file_(dex_file), 3749 entrypoint_(entrypoint) { 3750 SetRawInputAt(0, length); 3751 SetRawInputAt(1, current_method); 3752 } 3753 3754 uint16_t GetTypeIndex() const { return type_index_; } 3755 const DexFile& GetDexFile() const { return dex_file_; } 3756 3757 // Calls runtime so needs an environment. 3758 bool NeedsEnvironment() const OVERRIDE { return true; } 3759 3760 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3761 bool CanThrow() const OVERRIDE { return true; } 3762 3763 bool CanBeNull() const OVERRIDE { return false; } 3764 3765 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3766 3767 DECLARE_INSTRUCTION(NewArray); 3768 3769 private: 3770 const uint16_t type_index_; 3771 const DexFile& dex_file_; 3772 const QuickEntrypointEnum entrypoint_; 3773 3774 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3775}; 3776 3777class HAdd : public HBinaryOperation { 3778 public: 3779 HAdd(Primitive::Type result_type, 3780 HInstruction* left, 3781 HInstruction* right, 3782 uint32_t dex_pc = kNoDexPc) 3783 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3784 3785 bool IsCommutative() const OVERRIDE { return true; } 3786 3787 template <typename T> T Compute(T x, T y) const { return x + y; } 3788 3789 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3790 return GetBlock()->GetGraph()->GetIntConstant( 3791 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3792 } 3793 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3794 return GetBlock()->GetGraph()->GetLongConstant( 3795 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3796 } 3797 3798 DECLARE_INSTRUCTION(Add); 3799 3800 private: 3801 DISALLOW_COPY_AND_ASSIGN(HAdd); 3802}; 3803 3804class HSub : public HBinaryOperation { 3805 public: 3806 HSub(Primitive::Type result_type, 3807 HInstruction* left, 3808 HInstruction* right, 3809 uint32_t dex_pc = kNoDexPc) 3810 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3811 3812 template <typename T> T Compute(T x, T y) const { return x - y; } 3813 3814 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3815 return GetBlock()->GetGraph()->GetIntConstant( 3816 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3817 } 3818 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3819 return GetBlock()->GetGraph()->GetLongConstant( 3820 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3821 } 3822 3823 DECLARE_INSTRUCTION(Sub); 3824 3825 private: 3826 DISALLOW_COPY_AND_ASSIGN(HSub); 3827}; 3828 3829class HMul : public HBinaryOperation { 3830 public: 3831 HMul(Primitive::Type result_type, 3832 HInstruction* left, 3833 HInstruction* right, 3834 uint32_t dex_pc = kNoDexPc) 3835 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3836 3837 bool IsCommutative() const OVERRIDE { return true; } 3838 3839 template <typename T> T Compute(T x, T y) const { return x * y; } 3840 3841 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3842 return GetBlock()->GetGraph()->GetIntConstant( 3843 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3844 } 3845 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3846 return GetBlock()->GetGraph()->GetLongConstant( 3847 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3848 } 3849 3850 DECLARE_INSTRUCTION(Mul); 3851 3852 private: 3853 DISALLOW_COPY_AND_ASSIGN(HMul); 3854}; 3855 3856class HDiv : public HBinaryOperation { 3857 public: 3858 HDiv(Primitive::Type result_type, 3859 HInstruction* left, 3860 HInstruction* right, 3861 uint32_t dex_pc) 3862 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3863 3864 template <typename T> 3865 T Compute(T x, T y) const { 3866 // Our graph structure ensures we never have 0 for `y` during 3867 // constant folding. 3868 DCHECK_NE(y, 0); 3869 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3870 return (y == -1) ? -x : x / y; 3871 } 3872 3873 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3874 return GetBlock()->GetGraph()->GetIntConstant( 3875 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3876 } 3877 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3878 return GetBlock()->GetGraph()->GetLongConstant( 3879 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3880 } 3881 3882 static SideEffects SideEffectsForArchRuntimeCalls() { 3883 // The generated code can use a runtime call. 3884 return SideEffects::CanTriggerGC(); 3885 } 3886 3887 DECLARE_INSTRUCTION(Div); 3888 3889 private: 3890 DISALLOW_COPY_AND_ASSIGN(HDiv); 3891}; 3892 3893class HRem : public HBinaryOperation { 3894 public: 3895 HRem(Primitive::Type result_type, 3896 HInstruction* left, 3897 HInstruction* right, 3898 uint32_t dex_pc) 3899 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3900 3901 template <typename T> 3902 T Compute(T x, T y) const { 3903 // Our graph structure ensures we never have 0 for `y` during 3904 // constant folding. 3905 DCHECK_NE(y, 0); 3906 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3907 return (y == -1) ? 0 : x % y; 3908 } 3909 3910 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3911 return GetBlock()->GetGraph()->GetIntConstant( 3912 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3913 } 3914 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3915 return GetBlock()->GetGraph()->GetLongConstant( 3916 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3917 } 3918 3919 3920 static SideEffects SideEffectsForArchRuntimeCalls() { 3921 return SideEffects::CanTriggerGC(); 3922 } 3923 3924 DECLARE_INSTRUCTION(Rem); 3925 3926 private: 3927 DISALLOW_COPY_AND_ASSIGN(HRem); 3928}; 3929 3930class HDivZeroCheck : public HExpression<1> { 3931 public: 3932 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 3933 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 3934 SetRawInputAt(0, value); 3935 } 3936 3937 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 3938 3939 bool CanBeMoved() const OVERRIDE { return true; } 3940 3941 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 3942 return true; 3943 } 3944 3945 bool NeedsEnvironment() const OVERRIDE { return true; } 3946 bool CanThrow() const OVERRIDE { return true; } 3947 3948 DECLARE_INSTRUCTION(DivZeroCheck); 3949 3950 private: 3951 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 3952}; 3953 3954class HShl : public HBinaryOperation { 3955 public: 3956 HShl(Primitive::Type result_type, 3957 HInstruction* left, 3958 HInstruction* right, 3959 uint32_t dex_pc = kNoDexPc) 3960 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3961 3962 template <typename T, typename U, typename V> 3963 T Compute(T x, U y, V max_shift_value) const { 3964 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3965 "V is not the unsigned integer type corresponding to T"); 3966 return x << (y & max_shift_value); 3967 } 3968 3969 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3970 return GetBlock()->GetGraph()->GetIntConstant( 3971 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3972 } 3973 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3974 // case is handled as `x << static_cast<int>(y)`. 3975 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3976 return GetBlock()->GetGraph()->GetLongConstant( 3977 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3978 } 3979 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3980 return GetBlock()->GetGraph()->GetLongConstant( 3981 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3982 } 3983 3984 DECLARE_INSTRUCTION(Shl); 3985 3986 private: 3987 DISALLOW_COPY_AND_ASSIGN(HShl); 3988}; 3989 3990class HShr : public HBinaryOperation { 3991 public: 3992 HShr(Primitive::Type result_type, 3993 HInstruction* left, 3994 HInstruction* right, 3995 uint32_t dex_pc = kNoDexPc) 3996 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3997 3998 template <typename T, typename U, typename V> 3999 T Compute(T x, U y, V max_shift_value) const { 4000 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4001 "V is not the unsigned integer type corresponding to T"); 4002 return x >> (y & max_shift_value); 4003 } 4004 4005 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4006 return GetBlock()->GetGraph()->GetIntConstant( 4007 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4008 } 4009 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4010 // case is handled as `x >> static_cast<int>(y)`. 4011 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4012 return GetBlock()->GetGraph()->GetLongConstant( 4013 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4014 } 4015 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4016 return GetBlock()->GetGraph()->GetLongConstant( 4017 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4018 } 4019 4020 DECLARE_INSTRUCTION(Shr); 4021 4022 private: 4023 DISALLOW_COPY_AND_ASSIGN(HShr); 4024}; 4025 4026class HUShr : public HBinaryOperation { 4027 public: 4028 HUShr(Primitive::Type result_type, 4029 HInstruction* left, 4030 HInstruction* right, 4031 uint32_t dex_pc = kNoDexPc) 4032 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4033 4034 template <typename T, typename U, typename V> 4035 T Compute(T x, U y, V max_shift_value) const { 4036 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4037 "V is not the unsigned integer type corresponding to T"); 4038 V ux = static_cast<V>(x); 4039 return static_cast<T>(ux >> (y & max_shift_value)); 4040 } 4041 4042 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4043 return GetBlock()->GetGraph()->GetIntConstant( 4044 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4045 } 4046 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4047 // case is handled as `x >>> static_cast<int>(y)`. 4048 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4049 return GetBlock()->GetGraph()->GetLongConstant( 4050 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4051 } 4052 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4053 return GetBlock()->GetGraph()->GetLongConstant( 4054 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4055 } 4056 4057 DECLARE_INSTRUCTION(UShr); 4058 4059 private: 4060 DISALLOW_COPY_AND_ASSIGN(HUShr); 4061}; 4062 4063class HAnd : public HBinaryOperation { 4064 public: 4065 HAnd(Primitive::Type result_type, 4066 HInstruction* left, 4067 HInstruction* right, 4068 uint32_t dex_pc = kNoDexPc) 4069 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4070 4071 bool IsCommutative() const OVERRIDE { return true; } 4072 4073 template <typename T, typename U> 4074 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 4075 4076 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4077 return GetBlock()->GetGraph()->GetIntConstant( 4078 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4079 } 4080 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4081 return GetBlock()->GetGraph()->GetLongConstant( 4082 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4083 } 4084 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4085 return GetBlock()->GetGraph()->GetLongConstant( 4086 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4087 } 4088 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4089 return GetBlock()->GetGraph()->GetLongConstant( 4090 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4091 } 4092 4093 DECLARE_INSTRUCTION(And); 4094 4095 private: 4096 DISALLOW_COPY_AND_ASSIGN(HAnd); 4097}; 4098 4099class HOr : public HBinaryOperation { 4100 public: 4101 HOr(Primitive::Type result_type, 4102 HInstruction* left, 4103 HInstruction* right, 4104 uint32_t dex_pc = kNoDexPc) 4105 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4106 4107 bool IsCommutative() const OVERRIDE { return true; } 4108 4109 template <typename T, typename U> 4110 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4111 4112 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4113 return GetBlock()->GetGraph()->GetIntConstant( 4114 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4115 } 4116 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4117 return GetBlock()->GetGraph()->GetLongConstant( 4118 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4119 } 4120 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4121 return GetBlock()->GetGraph()->GetLongConstant( 4122 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4123 } 4124 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4125 return GetBlock()->GetGraph()->GetLongConstant( 4126 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4127 } 4128 4129 DECLARE_INSTRUCTION(Or); 4130 4131 private: 4132 DISALLOW_COPY_AND_ASSIGN(HOr); 4133}; 4134 4135class HXor : public HBinaryOperation { 4136 public: 4137 HXor(Primitive::Type result_type, 4138 HInstruction* left, 4139 HInstruction* right, 4140 uint32_t dex_pc = kNoDexPc) 4141 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4142 4143 bool IsCommutative() const OVERRIDE { return true; } 4144 4145 template <typename T, typename U> 4146 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4147 4148 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4149 return GetBlock()->GetGraph()->GetIntConstant( 4150 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4151 } 4152 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4153 return GetBlock()->GetGraph()->GetLongConstant( 4154 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4155 } 4156 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4157 return GetBlock()->GetGraph()->GetLongConstant( 4158 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4159 } 4160 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4161 return GetBlock()->GetGraph()->GetLongConstant( 4162 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4163 } 4164 4165 DECLARE_INSTRUCTION(Xor); 4166 4167 private: 4168 DISALLOW_COPY_AND_ASSIGN(HXor); 4169}; 4170 4171// The value of a parameter in this method. Its location depends on 4172// the calling convention. 4173class HParameterValue : public HExpression<0> { 4174 public: 4175 HParameterValue(const DexFile& dex_file, 4176 uint16_t type_index, 4177 uint8_t index, 4178 Primitive::Type parameter_type, 4179 bool is_this = false) 4180 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4181 dex_file_(dex_file), 4182 type_index_(type_index), 4183 index_(index), 4184 is_this_(is_this), 4185 can_be_null_(!is_this) {} 4186 4187 const DexFile& GetDexFile() const { return dex_file_; } 4188 uint16_t GetTypeIndex() const { return type_index_; } 4189 uint8_t GetIndex() const { return index_; } 4190 bool IsThis() const { return is_this_; } 4191 4192 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4193 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4194 4195 DECLARE_INSTRUCTION(ParameterValue); 4196 4197 private: 4198 const DexFile& dex_file_; 4199 const uint16_t type_index_; 4200 // The index of this parameter in the parameters list. Must be less 4201 // than HGraph::number_of_in_vregs_. 4202 const uint8_t index_; 4203 4204 // Whether or not the parameter value corresponds to 'this' argument. 4205 const bool is_this_; 4206 4207 bool can_be_null_; 4208 4209 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4210}; 4211 4212class HNot : public HUnaryOperation { 4213 public: 4214 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4215 : HUnaryOperation(result_type, input, dex_pc) {} 4216 4217 bool CanBeMoved() const OVERRIDE { return true; } 4218 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4219 return true; 4220 } 4221 4222 template <typename T> T Compute(T x) const { return ~x; } 4223 4224 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4225 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4226 } 4227 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4228 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4229 } 4230 4231 DECLARE_INSTRUCTION(Not); 4232 4233 private: 4234 DISALLOW_COPY_AND_ASSIGN(HNot); 4235}; 4236 4237class HBooleanNot : public HUnaryOperation { 4238 public: 4239 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4240 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4241 4242 bool CanBeMoved() const OVERRIDE { return true; } 4243 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4244 return true; 4245 } 4246 4247 template <typename T> bool Compute(T x) const { 4248 DCHECK(IsUint<1>(x)); 4249 return !x; 4250 } 4251 4252 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4253 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4254 } 4255 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4256 LOG(FATAL) << DebugName() << " is not defined for long values"; 4257 UNREACHABLE(); 4258 } 4259 4260 DECLARE_INSTRUCTION(BooleanNot); 4261 4262 private: 4263 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4264}; 4265 4266class HTypeConversion : public HExpression<1> { 4267 public: 4268 // Instantiate a type conversion of `input` to `result_type`. 4269 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4270 : HExpression(result_type, 4271 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4272 dex_pc) { 4273 SetRawInputAt(0, input); 4274 DCHECK_NE(input->GetType(), result_type); 4275 } 4276 4277 HInstruction* GetInput() const { return InputAt(0); } 4278 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4279 Primitive::Type GetResultType() const { return GetType(); } 4280 4281 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4282 // to the runtime. 4283 4284 bool CanBeMoved() const OVERRIDE { return true; } 4285 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4286 4287 // Try to statically evaluate the conversion and return a HConstant 4288 // containing the result. If the input cannot be converted, return nullptr. 4289 HConstant* TryStaticEvaluation() const; 4290 4291 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4292 Primitive::Type result_type) { 4293 // Some architectures may not require the 'GC' side effects, but at this point 4294 // in the compilation process we do not know what architecture we will 4295 // generate code for, so we must be conservative. 4296 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4297 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4298 return SideEffects::CanTriggerGC(); 4299 } 4300 return SideEffects::None(); 4301 } 4302 4303 DECLARE_INSTRUCTION(TypeConversion); 4304 4305 private: 4306 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4307}; 4308 4309static constexpr uint32_t kNoRegNumber = -1; 4310 4311class HPhi : public HInstruction { 4312 public: 4313 HPhi(ArenaAllocator* arena, 4314 uint32_t reg_number, 4315 size_t number_of_inputs, 4316 Primitive::Type type, 4317 uint32_t dex_pc = kNoDexPc) 4318 : HInstruction(SideEffects::None(), dex_pc), 4319 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4320 reg_number_(reg_number), 4321 type_(type), 4322 is_live_(false), 4323 can_be_null_(true) { 4324 } 4325 4326 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4327 static Primitive::Type ToPhiType(Primitive::Type type) { 4328 switch (type) { 4329 case Primitive::kPrimBoolean: 4330 case Primitive::kPrimByte: 4331 case Primitive::kPrimShort: 4332 case Primitive::kPrimChar: 4333 return Primitive::kPrimInt; 4334 default: 4335 return type; 4336 } 4337 } 4338 4339 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4340 4341 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4342 4343 void AddInput(HInstruction* input); 4344 void RemoveInputAt(size_t index); 4345 4346 Primitive::Type GetType() const OVERRIDE { return type_; } 4347 void SetType(Primitive::Type type) { type_ = type; } 4348 4349 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4350 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4351 4352 uint32_t GetRegNumber() const { return reg_number_; } 4353 4354 void SetDead() { is_live_ = false; } 4355 void SetLive() { is_live_ = true; } 4356 bool IsDead() const { return !is_live_; } 4357 bool IsLive() const { return is_live_; } 4358 4359 bool IsVRegEquivalentOf(HInstruction* other) const { 4360 return other != nullptr 4361 && other->IsPhi() 4362 && other->AsPhi()->GetBlock() == GetBlock() 4363 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4364 } 4365 4366 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4367 // An equivalent phi is a phi having the same dex register and type. 4368 // It assumes that phis with the same dex register are adjacent. 4369 HPhi* GetNextEquivalentPhiWithSameType() { 4370 HInstruction* next = GetNext(); 4371 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4372 if (next->GetType() == GetType()) { 4373 return next->AsPhi(); 4374 } 4375 next = next->GetNext(); 4376 } 4377 return nullptr; 4378 } 4379 4380 DECLARE_INSTRUCTION(Phi); 4381 4382 protected: 4383 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4384 return inputs_[index]; 4385 } 4386 4387 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4388 inputs_[index] = input; 4389 } 4390 4391 private: 4392 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4393 const uint32_t reg_number_; 4394 Primitive::Type type_; 4395 bool is_live_; 4396 bool can_be_null_; 4397 4398 DISALLOW_COPY_AND_ASSIGN(HPhi); 4399}; 4400 4401class HNullCheck : public HExpression<1> { 4402 public: 4403 HNullCheck(HInstruction* value, uint32_t dex_pc) 4404 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4405 SetRawInputAt(0, value); 4406 } 4407 4408 bool CanBeMoved() const OVERRIDE { return true; } 4409 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4410 return true; 4411 } 4412 4413 bool NeedsEnvironment() const OVERRIDE { return true; } 4414 4415 bool CanThrow() const OVERRIDE { return true; } 4416 4417 bool CanBeNull() const OVERRIDE { return false; } 4418 4419 4420 DECLARE_INSTRUCTION(NullCheck); 4421 4422 private: 4423 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4424}; 4425 4426class FieldInfo : public ValueObject { 4427 public: 4428 FieldInfo(MemberOffset field_offset, 4429 Primitive::Type field_type, 4430 bool is_volatile, 4431 uint32_t index, 4432 uint16_t declaring_class_def_index, 4433 const DexFile& dex_file, 4434 Handle<mirror::DexCache> dex_cache) 4435 : field_offset_(field_offset), 4436 field_type_(field_type), 4437 is_volatile_(is_volatile), 4438 index_(index), 4439 declaring_class_def_index_(declaring_class_def_index), 4440 dex_file_(dex_file), 4441 dex_cache_(dex_cache) {} 4442 4443 MemberOffset GetFieldOffset() const { return field_offset_; } 4444 Primitive::Type GetFieldType() const { return field_type_; } 4445 uint32_t GetFieldIndex() const { return index_; } 4446 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4447 const DexFile& GetDexFile() const { return dex_file_; } 4448 bool IsVolatile() const { return is_volatile_; } 4449 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4450 4451 private: 4452 const MemberOffset field_offset_; 4453 const Primitive::Type field_type_; 4454 const bool is_volatile_; 4455 const uint32_t index_; 4456 const uint16_t declaring_class_def_index_; 4457 const DexFile& dex_file_; 4458 const Handle<mirror::DexCache> dex_cache_; 4459}; 4460 4461class HInstanceFieldGet : public HExpression<1> { 4462 public: 4463 HInstanceFieldGet(HInstruction* value, 4464 Primitive::Type field_type, 4465 MemberOffset field_offset, 4466 bool is_volatile, 4467 uint32_t field_idx, 4468 uint16_t declaring_class_def_index, 4469 const DexFile& dex_file, 4470 Handle<mirror::DexCache> dex_cache, 4471 uint32_t dex_pc) 4472 : HExpression(field_type, 4473 SideEffects::FieldReadOfType(field_type, is_volatile), 4474 dex_pc), 4475 field_info_(field_offset, 4476 field_type, 4477 is_volatile, 4478 field_idx, 4479 declaring_class_def_index, 4480 dex_file, 4481 dex_cache) { 4482 SetRawInputAt(0, value); 4483 } 4484 4485 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4486 4487 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4488 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4489 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4490 } 4491 4492 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4493 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4494 } 4495 4496 size_t ComputeHashCode() const OVERRIDE { 4497 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4498 } 4499 4500 const FieldInfo& GetFieldInfo() const { return field_info_; } 4501 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4502 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4503 bool IsVolatile() const { return field_info_.IsVolatile(); } 4504 4505 DECLARE_INSTRUCTION(InstanceFieldGet); 4506 4507 private: 4508 const FieldInfo field_info_; 4509 4510 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4511}; 4512 4513class HInstanceFieldSet : public HTemplateInstruction<2> { 4514 public: 4515 HInstanceFieldSet(HInstruction* object, 4516 HInstruction* value, 4517 Primitive::Type field_type, 4518 MemberOffset field_offset, 4519 bool is_volatile, 4520 uint32_t field_idx, 4521 uint16_t declaring_class_def_index, 4522 const DexFile& dex_file, 4523 Handle<mirror::DexCache> dex_cache, 4524 uint32_t dex_pc) 4525 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4526 dex_pc), 4527 field_info_(field_offset, 4528 field_type, 4529 is_volatile, 4530 field_idx, 4531 declaring_class_def_index, 4532 dex_file, 4533 dex_cache), 4534 value_can_be_null_(true) { 4535 SetRawInputAt(0, object); 4536 SetRawInputAt(1, value); 4537 } 4538 4539 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4540 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4541 } 4542 4543 const FieldInfo& GetFieldInfo() const { return field_info_; } 4544 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4545 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4546 bool IsVolatile() const { return field_info_.IsVolatile(); } 4547 HInstruction* GetValue() const { return InputAt(1); } 4548 bool GetValueCanBeNull() const { return value_can_be_null_; } 4549 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4550 4551 DECLARE_INSTRUCTION(InstanceFieldSet); 4552 4553 private: 4554 const FieldInfo field_info_; 4555 bool value_can_be_null_; 4556 4557 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4558}; 4559 4560class HArrayGet : public HExpression<2> { 4561 public: 4562 HArrayGet(HInstruction* array, 4563 HInstruction* index, 4564 Primitive::Type type, 4565 uint32_t dex_pc, 4566 SideEffects additional_side_effects = SideEffects::None()) 4567 : HExpression(type, 4568 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4569 dex_pc) { 4570 SetRawInputAt(0, array); 4571 SetRawInputAt(1, index); 4572 } 4573 4574 bool CanBeMoved() const OVERRIDE { return true; } 4575 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4576 return true; 4577 } 4578 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4579 // TODO: We can be smarter here. 4580 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4581 // which generates the implicit null check. There are cases when these can be removed 4582 // to produce better code. If we ever add optimizations to do so we should allow an 4583 // implicit check here (as long as the address falls in the first page). 4584 return false; 4585 } 4586 4587 void SetType(Primitive::Type type) { type_ = type; } 4588 4589 HInstruction* GetArray() const { return InputAt(0); } 4590 HInstruction* GetIndex() const { return InputAt(1); } 4591 4592 DECLARE_INSTRUCTION(ArrayGet); 4593 4594 private: 4595 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4596}; 4597 4598class HArraySet : public HTemplateInstruction<3> { 4599 public: 4600 HArraySet(HInstruction* array, 4601 HInstruction* index, 4602 HInstruction* value, 4603 Primitive::Type expected_component_type, 4604 uint32_t dex_pc, 4605 SideEffects additional_side_effects = SideEffects::None()) 4606 : HTemplateInstruction( 4607 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4608 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4609 additional_side_effects), 4610 dex_pc), 4611 expected_component_type_(expected_component_type), 4612 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4613 value_can_be_null_(true), 4614 static_type_of_array_is_object_array_(false) { 4615 SetRawInputAt(0, array); 4616 SetRawInputAt(1, index); 4617 SetRawInputAt(2, value); 4618 } 4619 4620 bool NeedsEnvironment() const OVERRIDE { 4621 // We currently always call a runtime method to catch array store 4622 // exceptions. 4623 return needs_type_check_; 4624 } 4625 4626 // Can throw ArrayStoreException. 4627 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4628 4629 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4630 // TODO: Same as for ArrayGet. 4631 return false; 4632 } 4633 4634 void ClearNeedsTypeCheck() { 4635 needs_type_check_ = false; 4636 } 4637 4638 void ClearValueCanBeNull() { 4639 value_can_be_null_ = false; 4640 } 4641 4642 void SetStaticTypeOfArrayIsObjectArray() { 4643 static_type_of_array_is_object_array_ = true; 4644 } 4645 4646 bool GetValueCanBeNull() const { return value_can_be_null_; } 4647 bool NeedsTypeCheck() const { return needs_type_check_; } 4648 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4649 4650 HInstruction* GetArray() const { return InputAt(0); } 4651 HInstruction* GetIndex() const { return InputAt(1); } 4652 HInstruction* GetValue() const { return InputAt(2); } 4653 4654 Primitive::Type GetComponentType() const { 4655 // The Dex format does not type floating point index operations. Since the 4656 // `expected_component_type_` is set during building and can therefore not 4657 // be correct, we also check what is the value type. If it is a floating 4658 // point type, we must use that type. 4659 Primitive::Type value_type = GetValue()->GetType(); 4660 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4661 ? value_type 4662 : expected_component_type_; 4663 } 4664 4665 Primitive::Type GetRawExpectedComponentType() const { 4666 return expected_component_type_; 4667 } 4668 4669 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4670 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4671 } 4672 4673 DECLARE_INSTRUCTION(ArraySet); 4674 4675 private: 4676 const Primitive::Type expected_component_type_; 4677 bool needs_type_check_; 4678 bool value_can_be_null_; 4679 // Cached information for the reference_type_info_ so that codegen 4680 // does not need to inspect the static type. 4681 bool static_type_of_array_is_object_array_; 4682 4683 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4684}; 4685 4686class HArrayLength : public HExpression<1> { 4687 public: 4688 HArrayLength(HInstruction* array, uint32_t dex_pc) 4689 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4690 // Note that arrays do not change length, so the instruction does not 4691 // depend on any write. 4692 SetRawInputAt(0, array); 4693 } 4694 4695 bool CanBeMoved() const OVERRIDE { return true; } 4696 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4697 return true; 4698 } 4699 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4700 return obj == InputAt(0); 4701 } 4702 4703 DECLARE_INSTRUCTION(ArrayLength); 4704 4705 private: 4706 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4707}; 4708 4709class HBoundsCheck : public HExpression<2> { 4710 public: 4711 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4712 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4713 DCHECK(index->GetType() == Primitive::kPrimInt); 4714 SetRawInputAt(0, index); 4715 SetRawInputAt(1, length); 4716 } 4717 4718 bool CanBeMoved() const OVERRIDE { return true; } 4719 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4720 return true; 4721 } 4722 4723 bool NeedsEnvironment() const OVERRIDE { return true; } 4724 4725 bool CanThrow() const OVERRIDE { return true; } 4726 4727 HInstruction* GetIndex() const { return InputAt(0); } 4728 4729 DECLARE_INSTRUCTION(BoundsCheck); 4730 4731 private: 4732 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4733}; 4734 4735/** 4736 * Some DEX instructions are folded into multiple HInstructions that need 4737 * to stay live until the last HInstruction. This class 4738 * is used as a marker for the baseline compiler to ensure its preceding 4739 * HInstruction stays live. `index` represents the stack location index of the 4740 * instruction (the actual offset is computed as index * vreg_size). 4741 */ 4742class HTemporary : public HTemplateInstruction<0> { 4743 public: 4744 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4745 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4746 4747 size_t GetIndex() const { return index_; } 4748 4749 Primitive::Type GetType() const OVERRIDE { 4750 // The previous instruction is the one that will be stored in the temporary location. 4751 DCHECK(GetPrevious() != nullptr); 4752 return GetPrevious()->GetType(); 4753 } 4754 4755 DECLARE_INSTRUCTION(Temporary); 4756 4757 private: 4758 const size_t index_; 4759 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4760}; 4761 4762class HSuspendCheck : public HTemplateInstruction<0> { 4763 public: 4764 explicit HSuspendCheck(uint32_t dex_pc) 4765 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4766 4767 bool NeedsEnvironment() const OVERRIDE { 4768 return true; 4769 } 4770 4771 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4772 SlowPathCode* GetSlowPath() const { return slow_path_; } 4773 4774 DECLARE_INSTRUCTION(SuspendCheck); 4775 4776 private: 4777 // Only used for code generation, in order to share the same slow path between back edges 4778 // of a same loop. 4779 SlowPathCode* slow_path_; 4780 4781 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4782}; 4783 4784/** 4785 * Instruction to load a Class object. 4786 */ 4787class HLoadClass : public HExpression<1> { 4788 public: 4789 HLoadClass(HCurrentMethod* current_method, 4790 uint16_t type_index, 4791 const DexFile& dex_file, 4792 bool is_referrers_class, 4793 uint32_t dex_pc, 4794 bool needs_access_check) 4795 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4796 type_index_(type_index), 4797 dex_file_(dex_file), 4798 is_referrers_class_(is_referrers_class), 4799 generate_clinit_check_(false), 4800 needs_access_check_(needs_access_check), 4801 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4802 // Referrers class should not need access check. We never inline unverified 4803 // methods so we can't possibly end up in this situation. 4804 DCHECK(!is_referrers_class_ || !needs_access_check_); 4805 SetRawInputAt(0, current_method); 4806 } 4807 4808 bool CanBeMoved() const OVERRIDE { return true; } 4809 4810 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4811 // Note that we don't need to test for generate_clinit_check_. 4812 // Whether or not we need to generate the clinit check is processed in 4813 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4814 return other->AsLoadClass()->type_index_ == type_index_ && 4815 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4816 } 4817 4818 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4819 4820 uint16_t GetTypeIndex() const { return type_index_; } 4821 bool IsReferrersClass() const { return is_referrers_class_; } 4822 bool CanBeNull() const OVERRIDE { return false; } 4823 4824 bool NeedsEnvironment() const OVERRIDE { 4825 // Will call runtime and load the class if the class is not loaded yet. 4826 // TODO: finer grain decision. 4827 return !is_referrers_class_; 4828 } 4829 4830 bool MustGenerateClinitCheck() const { 4831 return generate_clinit_check_; 4832 } 4833 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4834 // The entrypoint the code generator is going to call does not do 4835 // clinit of the class. 4836 DCHECK(!NeedsAccessCheck()); 4837 generate_clinit_check_ = generate_clinit_check; 4838 } 4839 4840 bool CanCallRuntime() const { 4841 return MustGenerateClinitCheck() || !is_referrers_class_ || needs_access_check_; 4842 } 4843 4844 bool NeedsAccessCheck() const { 4845 return needs_access_check_; 4846 } 4847 4848 bool CanThrow() const OVERRIDE { 4849 // May call runtime and and therefore can throw. 4850 // TODO: finer grain decision. 4851 return CanCallRuntime(); 4852 } 4853 4854 ReferenceTypeInfo GetLoadedClassRTI() { 4855 return loaded_class_rti_; 4856 } 4857 4858 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4859 // Make sure we only set exact types (the loaded class should never be merged). 4860 DCHECK(rti.IsExact()); 4861 loaded_class_rti_ = rti; 4862 } 4863 4864 const DexFile& GetDexFile() { return dex_file_; } 4865 4866 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 4867 4868 static SideEffects SideEffectsForArchRuntimeCalls() { 4869 return SideEffects::CanTriggerGC(); 4870 } 4871 4872 DECLARE_INSTRUCTION(LoadClass); 4873 4874 private: 4875 const uint16_t type_index_; 4876 const DexFile& dex_file_; 4877 const bool is_referrers_class_; 4878 // Whether this instruction must generate the initialization check. 4879 // Used for code generation. 4880 bool generate_clinit_check_; 4881 bool needs_access_check_; 4882 4883 ReferenceTypeInfo loaded_class_rti_; 4884 4885 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 4886}; 4887 4888class HLoadString : public HExpression<1> { 4889 public: 4890 HLoadString(HCurrentMethod* current_method, uint32_t string_index, uint32_t dex_pc) 4891 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4892 string_index_(string_index) { 4893 SetRawInputAt(0, current_method); 4894 } 4895 4896 bool CanBeMoved() const OVERRIDE { return true; } 4897 4898 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4899 return other->AsLoadString()->string_index_ == string_index_; 4900 } 4901 4902 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 4903 4904 uint32_t GetStringIndex() const { return string_index_; } 4905 4906 // TODO: Can we deopt or debug when we resolve a string? 4907 bool NeedsEnvironment() const OVERRIDE { return false; } 4908 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 4909 bool CanBeNull() const OVERRIDE { return false; } 4910 4911 static SideEffects SideEffectsForArchRuntimeCalls() { 4912 return SideEffects::CanTriggerGC(); 4913 } 4914 4915 DECLARE_INSTRUCTION(LoadString); 4916 4917 private: 4918 const uint32_t string_index_; 4919 4920 DISALLOW_COPY_AND_ASSIGN(HLoadString); 4921}; 4922 4923/** 4924 * Performs an initialization check on its Class object input. 4925 */ 4926class HClinitCheck : public HExpression<1> { 4927 public: 4928 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 4929 : HExpression( 4930 Primitive::kPrimNot, 4931 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 4932 dex_pc) { 4933 SetRawInputAt(0, constant); 4934 } 4935 4936 bool CanBeMoved() const OVERRIDE { return true; } 4937 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4938 return true; 4939 } 4940 4941 bool NeedsEnvironment() const OVERRIDE { 4942 // May call runtime to initialize the class. 4943 return true; 4944 } 4945 4946 bool CanThrow() const OVERRIDE { return true; } 4947 4948 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 4949 4950 DECLARE_INSTRUCTION(ClinitCheck); 4951 4952 private: 4953 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 4954}; 4955 4956class HStaticFieldGet : public HExpression<1> { 4957 public: 4958 HStaticFieldGet(HInstruction* cls, 4959 Primitive::Type field_type, 4960 MemberOffset field_offset, 4961 bool is_volatile, 4962 uint32_t field_idx, 4963 uint16_t declaring_class_def_index, 4964 const DexFile& dex_file, 4965 Handle<mirror::DexCache> dex_cache, 4966 uint32_t dex_pc) 4967 : HExpression(field_type, 4968 SideEffects::FieldReadOfType(field_type, is_volatile), 4969 dex_pc), 4970 field_info_(field_offset, 4971 field_type, 4972 is_volatile, 4973 field_idx, 4974 declaring_class_def_index, 4975 dex_file, 4976 dex_cache) { 4977 SetRawInputAt(0, cls); 4978 } 4979 4980 4981 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4982 4983 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4984 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 4985 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4986 } 4987 4988 size_t ComputeHashCode() const OVERRIDE { 4989 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4990 } 4991 4992 const FieldInfo& GetFieldInfo() const { return field_info_; } 4993 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4994 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4995 bool IsVolatile() const { return field_info_.IsVolatile(); } 4996 4997 DECLARE_INSTRUCTION(StaticFieldGet); 4998 4999 private: 5000 const FieldInfo field_info_; 5001 5002 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 5003}; 5004 5005class HStaticFieldSet : public HTemplateInstruction<2> { 5006 public: 5007 HStaticFieldSet(HInstruction* cls, 5008 HInstruction* value, 5009 Primitive::Type field_type, 5010 MemberOffset field_offset, 5011 bool is_volatile, 5012 uint32_t field_idx, 5013 uint16_t declaring_class_def_index, 5014 const DexFile& dex_file, 5015 Handle<mirror::DexCache> dex_cache, 5016 uint32_t dex_pc) 5017 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 5018 dex_pc), 5019 field_info_(field_offset, 5020 field_type, 5021 is_volatile, 5022 field_idx, 5023 declaring_class_def_index, 5024 dex_file, 5025 dex_cache), 5026 value_can_be_null_(true) { 5027 SetRawInputAt(0, cls); 5028 SetRawInputAt(1, value); 5029 } 5030 5031 const FieldInfo& GetFieldInfo() const { return field_info_; } 5032 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5033 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5034 bool IsVolatile() const { return field_info_.IsVolatile(); } 5035 5036 HInstruction* GetValue() const { return InputAt(1); } 5037 bool GetValueCanBeNull() const { return value_can_be_null_; } 5038 void ClearValueCanBeNull() { value_can_be_null_ = false; } 5039 5040 DECLARE_INSTRUCTION(StaticFieldSet); 5041 5042 private: 5043 const FieldInfo field_info_; 5044 bool value_can_be_null_; 5045 5046 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 5047}; 5048 5049class HUnresolvedInstanceFieldGet : public HExpression<1> { 5050 public: 5051 HUnresolvedInstanceFieldGet(HInstruction* obj, 5052 Primitive::Type field_type, 5053 uint32_t field_index, 5054 uint32_t dex_pc) 5055 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5056 field_index_(field_index) { 5057 SetRawInputAt(0, obj); 5058 } 5059 5060 bool NeedsEnvironment() const OVERRIDE { return true; } 5061 bool CanThrow() const OVERRIDE { return true; } 5062 5063 Primitive::Type GetFieldType() const { return GetType(); } 5064 uint32_t GetFieldIndex() const { return field_index_; } 5065 5066 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 5067 5068 private: 5069 const uint32_t field_index_; 5070 5071 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 5072}; 5073 5074class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 5075 public: 5076 HUnresolvedInstanceFieldSet(HInstruction* obj, 5077 HInstruction* value, 5078 Primitive::Type field_type, 5079 uint32_t field_index, 5080 uint32_t dex_pc) 5081 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5082 field_type_(field_type), 5083 field_index_(field_index) { 5084 DCHECK_EQ(field_type, value->GetType()); 5085 SetRawInputAt(0, obj); 5086 SetRawInputAt(1, value); 5087 } 5088 5089 bool NeedsEnvironment() const OVERRIDE { return true; } 5090 bool CanThrow() const OVERRIDE { return true; } 5091 5092 Primitive::Type GetFieldType() const { return field_type_; } 5093 uint32_t GetFieldIndex() const { return field_index_; } 5094 5095 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5096 5097 private: 5098 const Primitive::Type field_type_; 5099 const uint32_t field_index_; 5100 5101 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5102}; 5103 5104class HUnresolvedStaticFieldGet : public HExpression<0> { 5105 public: 5106 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5107 uint32_t field_index, 5108 uint32_t dex_pc) 5109 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5110 field_index_(field_index) { 5111 } 5112 5113 bool NeedsEnvironment() const OVERRIDE { return true; } 5114 bool CanThrow() const OVERRIDE { return true; } 5115 5116 Primitive::Type GetFieldType() const { return GetType(); } 5117 uint32_t GetFieldIndex() const { return field_index_; } 5118 5119 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5120 5121 private: 5122 const uint32_t field_index_; 5123 5124 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5125}; 5126 5127class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5128 public: 5129 HUnresolvedStaticFieldSet(HInstruction* value, 5130 Primitive::Type field_type, 5131 uint32_t field_index, 5132 uint32_t dex_pc) 5133 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5134 field_type_(field_type), 5135 field_index_(field_index) { 5136 DCHECK_EQ(field_type, value->GetType()); 5137 SetRawInputAt(0, value); 5138 } 5139 5140 bool NeedsEnvironment() const OVERRIDE { return true; } 5141 bool CanThrow() const OVERRIDE { return true; } 5142 5143 Primitive::Type GetFieldType() const { return field_type_; } 5144 uint32_t GetFieldIndex() const { return field_index_; } 5145 5146 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5147 5148 private: 5149 const Primitive::Type field_type_; 5150 const uint32_t field_index_; 5151 5152 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5153}; 5154 5155// Implement the move-exception DEX instruction. 5156class HLoadException : public HExpression<0> { 5157 public: 5158 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5159 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5160 5161 bool CanBeNull() const OVERRIDE { return false; } 5162 5163 DECLARE_INSTRUCTION(LoadException); 5164 5165 private: 5166 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5167}; 5168 5169// Implicit part of move-exception which clears thread-local exception storage. 5170// Must not be removed because the runtime expects the TLS to get cleared. 5171class HClearException : public HTemplateInstruction<0> { 5172 public: 5173 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5174 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5175 5176 DECLARE_INSTRUCTION(ClearException); 5177 5178 private: 5179 DISALLOW_COPY_AND_ASSIGN(HClearException); 5180}; 5181 5182class HThrow : public HTemplateInstruction<1> { 5183 public: 5184 HThrow(HInstruction* exception, uint32_t dex_pc) 5185 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5186 SetRawInputAt(0, exception); 5187 } 5188 5189 bool IsControlFlow() const OVERRIDE { return true; } 5190 5191 bool NeedsEnvironment() const OVERRIDE { return true; } 5192 5193 bool CanThrow() const OVERRIDE { return true; } 5194 5195 5196 DECLARE_INSTRUCTION(Throw); 5197 5198 private: 5199 DISALLOW_COPY_AND_ASSIGN(HThrow); 5200}; 5201 5202/** 5203 * Implementation strategies for the code generator of a HInstanceOf 5204 * or `HCheckCast`. 5205 */ 5206enum class TypeCheckKind { 5207 kUnresolvedCheck, // Check against an unresolved type. 5208 kExactCheck, // Can do a single class compare. 5209 kClassHierarchyCheck, // Can just walk the super class chain. 5210 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5211 kInterfaceCheck, // No optimization yet when checking against an interface. 5212 kArrayObjectCheck, // Can just check if the array is not primitive. 5213 kArrayCheck // No optimization yet when checking against a generic array. 5214}; 5215 5216class HInstanceOf : public HExpression<2> { 5217 public: 5218 HInstanceOf(HInstruction* object, 5219 HLoadClass* constant, 5220 TypeCheckKind check_kind, 5221 uint32_t dex_pc) 5222 : HExpression(Primitive::kPrimBoolean, 5223 SideEffectsForArchRuntimeCalls(check_kind), 5224 dex_pc), 5225 check_kind_(check_kind), 5226 must_do_null_check_(true) { 5227 SetRawInputAt(0, object); 5228 SetRawInputAt(1, constant); 5229 } 5230 5231 bool CanBeMoved() const OVERRIDE { return true; } 5232 5233 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5234 return true; 5235 } 5236 5237 bool NeedsEnvironment() const OVERRIDE { 5238 return false; 5239 } 5240 5241 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5242 5243 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5244 5245 // Used only in code generation. 5246 bool MustDoNullCheck() const { return must_do_null_check_; } 5247 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5248 5249 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5250 return (check_kind == TypeCheckKind::kExactCheck) 5251 ? SideEffects::None() 5252 // Mips currently does runtime calls for any other checks. 5253 : SideEffects::CanTriggerGC(); 5254 } 5255 5256 DECLARE_INSTRUCTION(InstanceOf); 5257 5258 private: 5259 const TypeCheckKind check_kind_; 5260 bool must_do_null_check_; 5261 5262 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5263}; 5264 5265class HBoundType : public HExpression<1> { 5266 public: 5267 // Constructs an HBoundType with the given upper_bound. 5268 // Ensures that the upper_bound is valid. 5269 HBoundType(HInstruction* input, 5270 ReferenceTypeInfo upper_bound, 5271 bool upper_can_be_null, 5272 uint32_t dex_pc = kNoDexPc) 5273 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5274 upper_bound_(upper_bound), 5275 upper_can_be_null_(upper_can_be_null), 5276 can_be_null_(upper_can_be_null) { 5277 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5278 SetRawInputAt(0, input); 5279 SetReferenceTypeInfo(upper_bound_); 5280 } 5281 5282 // GetUpper* should only be used in reference type propagation. 5283 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5284 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5285 5286 void SetCanBeNull(bool can_be_null) { 5287 DCHECK(upper_can_be_null_ || !can_be_null); 5288 can_be_null_ = can_be_null; 5289 } 5290 5291 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5292 5293 DECLARE_INSTRUCTION(BoundType); 5294 5295 private: 5296 // Encodes the most upper class that this instruction can have. In other words 5297 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5298 // It is used to bound the type in cases like: 5299 // if (x instanceof ClassX) { 5300 // // uper_bound_ will be ClassX 5301 // } 5302 const ReferenceTypeInfo upper_bound_; 5303 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5304 // is false then can_be_null_ cannot be true). 5305 const bool upper_can_be_null_; 5306 bool can_be_null_; 5307 5308 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5309}; 5310 5311class HCheckCast : public HTemplateInstruction<2> { 5312 public: 5313 HCheckCast(HInstruction* object, 5314 HLoadClass* constant, 5315 TypeCheckKind check_kind, 5316 uint32_t dex_pc) 5317 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5318 check_kind_(check_kind), 5319 must_do_null_check_(true) { 5320 SetRawInputAt(0, object); 5321 SetRawInputAt(1, constant); 5322 } 5323 5324 bool CanBeMoved() const OVERRIDE { return true; } 5325 5326 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5327 return true; 5328 } 5329 5330 bool NeedsEnvironment() const OVERRIDE { 5331 // Instruction may throw a CheckCastError. 5332 return true; 5333 } 5334 5335 bool CanThrow() const OVERRIDE { return true; } 5336 5337 bool MustDoNullCheck() const { return must_do_null_check_; } 5338 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5339 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5340 5341 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5342 5343 DECLARE_INSTRUCTION(CheckCast); 5344 5345 private: 5346 const TypeCheckKind check_kind_; 5347 bool must_do_null_check_; 5348 5349 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5350}; 5351 5352class HMemoryBarrier : public HTemplateInstruction<0> { 5353 public: 5354 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5355 : HTemplateInstruction( 5356 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5357 barrier_kind_(barrier_kind) {} 5358 5359 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5360 5361 DECLARE_INSTRUCTION(MemoryBarrier); 5362 5363 private: 5364 const MemBarrierKind barrier_kind_; 5365 5366 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5367}; 5368 5369class HMonitorOperation : public HTemplateInstruction<1> { 5370 public: 5371 enum OperationKind { 5372 kEnter, 5373 kExit, 5374 }; 5375 5376 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5377 : HTemplateInstruction( 5378 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5379 kind_(kind) { 5380 SetRawInputAt(0, object); 5381 } 5382 5383 // Instruction may throw a Java exception, so we need an environment. 5384 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5385 5386 bool CanThrow() const OVERRIDE { 5387 // Verifier guarantees that monitor-exit cannot throw. 5388 // This is important because it allows the HGraphBuilder to remove 5389 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5390 return IsEnter(); 5391 } 5392 5393 5394 bool IsEnter() const { return kind_ == kEnter; } 5395 5396 DECLARE_INSTRUCTION(MonitorOperation); 5397 5398 private: 5399 const OperationKind kind_; 5400 5401 private: 5402 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5403}; 5404 5405/** 5406 * A HInstruction used as a marker for the replacement of new + <init> 5407 * of a String to a call to a StringFactory. Only baseline will see 5408 * the node at code generation, where it will be be treated as null. 5409 * When compiling non-baseline, `HFakeString` instructions are being removed 5410 * in the instruction simplifier. 5411 */ 5412class HFakeString : public HTemplateInstruction<0> { 5413 public: 5414 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5415 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5416 5417 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5418 5419 DECLARE_INSTRUCTION(FakeString); 5420 5421 private: 5422 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5423}; 5424 5425class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5426 public: 5427 MoveOperands(Location source, 5428 Location destination, 5429 Primitive::Type type, 5430 HInstruction* instruction) 5431 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5432 5433 Location GetSource() const { return source_; } 5434 Location GetDestination() const { return destination_; } 5435 5436 void SetSource(Location value) { source_ = value; } 5437 void SetDestination(Location value) { destination_ = value; } 5438 5439 // The parallel move resolver marks moves as "in-progress" by clearing the 5440 // destination (but not the source). 5441 Location MarkPending() { 5442 DCHECK(!IsPending()); 5443 Location dest = destination_; 5444 destination_ = Location::NoLocation(); 5445 return dest; 5446 } 5447 5448 void ClearPending(Location dest) { 5449 DCHECK(IsPending()); 5450 destination_ = dest; 5451 } 5452 5453 bool IsPending() const { 5454 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5455 return destination_.IsInvalid() && !source_.IsInvalid(); 5456 } 5457 5458 // True if this blocks a move from the given location. 5459 bool Blocks(Location loc) const { 5460 return !IsEliminated() && source_.OverlapsWith(loc); 5461 } 5462 5463 // A move is redundant if it's been eliminated, if its source and 5464 // destination are the same, or if its destination is unneeded. 5465 bool IsRedundant() const { 5466 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5467 } 5468 5469 // We clear both operands to indicate move that's been eliminated. 5470 void Eliminate() { 5471 source_ = destination_ = Location::NoLocation(); 5472 } 5473 5474 bool IsEliminated() const { 5475 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5476 return source_.IsInvalid(); 5477 } 5478 5479 Primitive::Type GetType() const { return type_; } 5480 5481 bool Is64BitMove() const { 5482 return Primitive::Is64BitType(type_); 5483 } 5484 5485 HInstruction* GetInstruction() const { return instruction_; } 5486 5487 private: 5488 Location source_; 5489 Location destination_; 5490 // The type this move is for. 5491 Primitive::Type type_; 5492 // The instruction this move is assocatied with. Null when this move is 5493 // for moving an input in the expected locations of user (including a phi user). 5494 // This is only used in debug mode, to ensure we do not connect interval siblings 5495 // in the same parallel move. 5496 HInstruction* instruction_; 5497}; 5498 5499static constexpr size_t kDefaultNumberOfMoves = 4; 5500 5501class HParallelMove : public HTemplateInstruction<0> { 5502 public: 5503 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5504 : HTemplateInstruction(SideEffects::None(), dex_pc), 5505 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5506 moves_.reserve(kDefaultNumberOfMoves); 5507 } 5508 5509 void AddMove(Location source, 5510 Location destination, 5511 Primitive::Type type, 5512 HInstruction* instruction) { 5513 DCHECK(source.IsValid()); 5514 DCHECK(destination.IsValid()); 5515 if (kIsDebugBuild) { 5516 if (instruction != nullptr) { 5517 for (const MoveOperands& move : moves_) { 5518 if (move.GetInstruction() == instruction) { 5519 // Special case the situation where the move is for the spill slot 5520 // of the instruction. 5521 if ((GetPrevious() == instruction) 5522 || ((GetPrevious() == nullptr) 5523 && instruction->IsPhi() 5524 && instruction->GetBlock() == GetBlock())) { 5525 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5526 << "Doing parallel moves for the same instruction."; 5527 } else { 5528 DCHECK(false) << "Doing parallel moves for the same instruction."; 5529 } 5530 } 5531 } 5532 } 5533 for (const MoveOperands& move : moves_) { 5534 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5535 << "Overlapped destination for two moves in a parallel move: " 5536 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5537 << source << " ==> " << destination; 5538 } 5539 } 5540 moves_.emplace_back(source, destination, type, instruction); 5541 } 5542 5543 MoveOperands* MoveOperandsAt(size_t index) { 5544 return &moves_[index]; 5545 } 5546 5547 size_t NumMoves() const { return moves_.size(); } 5548 5549 DECLARE_INSTRUCTION(ParallelMove); 5550 5551 private: 5552 ArenaVector<MoveOperands> moves_; 5553 5554 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5555}; 5556 5557} // namespace art 5558 5559#ifdef ART_ENABLE_CODEGEN_arm64 5560#include "nodes_arm64.h" 5561#endif 5562#ifdef ART_ENABLE_CODEGEN_x86 5563#include "nodes_x86.h" 5564#endif 5565 5566namespace art { 5567 5568class HGraphVisitor : public ValueObject { 5569 public: 5570 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5571 virtual ~HGraphVisitor() {} 5572 5573 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5574 virtual void VisitBasicBlock(HBasicBlock* block); 5575 5576 // Visit the graph following basic block insertion order. 5577 void VisitInsertionOrder(); 5578 5579 // Visit the graph following dominator tree reverse post-order. 5580 void VisitReversePostOrder(); 5581 5582 HGraph* GetGraph() const { return graph_; } 5583 5584 // Visit functions for instruction classes. 5585#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5586 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5587 5588 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5589 5590#undef DECLARE_VISIT_INSTRUCTION 5591 5592 private: 5593 HGraph* const graph_; 5594 5595 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5596}; 5597 5598class HGraphDelegateVisitor : public HGraphVisitor { 5599 public: 5600 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5601 virtual ~HGraphDelegateVisitor() {} 5602 5603 // Visit functions that delegate to to super class. 5604#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5605 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5606 5607 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5608 5609#undef DECLARE_VISIT_INSTRUCTION 5610 5611 private: 5612 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5613}; 5614 5615class HInsertionOrderIterator : public ValueObject { 5616 public: 5617 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5618 5619 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5620 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5621 void Advance() { ++index_; } 5622 5623 private: 5624 const HGraph& graph_; 5625 size_t index_; 5626 5627 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5628}; 5629 5630class HReversePostOrderIterator : public ValueObject { 5631 public: 5632 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5633 // Check that reverse post order of the graph has been built. 5634 DCHECK(!graph.GetReversePostOrder().empty()); 5635 } 5636 5637 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5638 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5639 void Advance() { ++index_; } 5640 5641 private: 5642 const HGraph& graph_; 5643 size_t index_; 5644 5645 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5646}; 5647 5648class HPostOrderIterator : public ValueObject { 5649 public: 5650 explicit HPostOrderIterator(const HGraph& graph) 5651 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5652 // Check that reverse post order of the graph has been built. 5653 DCHECK(!graph.GetReversePostOrder().empty()); 5654 } 5655 5656 bool Done() const { return index_ == 0; } 5657 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5658 void Advance() { --index_; } 5659 5660 private: 5661 const HGraph& graph_; 5662 size_t index_; 5663 5664 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5665}; 5666 5667class HLinearPostOrderIterator : public ValueObject { 5668 public: 5669 explicit HLinearPostOrderIterator(const HGraph& graph) 5670 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5671 5672 bool Done() const { return index_ == 0; } 5673 5674 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5675 5676 void Advance() { 5677 --index_; 5678 DCHECK_GE(index_, 0U); 5679 } 5680 5681 private: 5682 const ArenaVector<HBasicBlock*>& order_; 5683 size_t index_; 5684 5685 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5686}; 5687 5688class HLinearOrderIterator : public ValueObject { 5689 public: 5690 explicit HLinearOrderIterator(const HGraph& graph) 5691 : order_(graph.GetLinearOrder()), index_(0) {} 5692 5693 bool Done() const { return index_ == order_.size(); } 5694 HBasicBlock* Current() const { return order_[index_]; } 5695 void Advance() { ++index_; } 5696 5697 private: 5698 const ArenaVector<HBasicBlock*>& order_; 5699 size_t index_; 5700 5701 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5702}; 5703 5704// Iterator over the blocks that art part of the loop. Includes blocks part 5705// of an inner loop. The order in which the blocks are iterated is on their 5706// block id. 5707class HBlocksInLoopIterator : public ValueObject { 5708 public: 5709 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5710 : blocks_in_loop_(info.GetBlocks()), 5711 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5712 index_(0) { 5713 if (!blocks_in_loop_.IsBitSet(index_)) { 5714 Advance(); 5715 } 5716 } 5717 5718 bool Done() const { return index_ == blocks_.size(); } 5719 HBasicBlock* Current() const { return blocks_[index_]; } 5720 void Advance() { 5721 ++index_; 5722 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5723 if (blocks_in_loop_.IsBitSet(index_)) { 5724 break; 5725 } 5726 } 5727 } 5728 5729 private: 5730 const BitVector& blocks_in_loop_; 5731 const ArenaVector<HBasicBlock*>& blocks_; 5732 size_t index_; 5733 5734 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5735}; 5736 5737// Iterator over the blocks that art part of the loop. Includes blocks part 5738// of an inner loop. The order in which the blocks are iterated is reverse 5739// post order. 5740class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5741 public: 5742 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5743 : blocks_in_loop_(info.GetBlocks()), 5744 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5745 index_(0) { 5746 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5747 Advance(); 5748 } 5749 } 5750 5751 bool Done() const { return index_ == blocks_.size(); } 5752 HBasicBlock* Current() const { return blocks_[index_]; } 5753 void Advance() { 5754 ++index_; 5755 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5756 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5757 break; 5758 } 5759 } 5760 } 5761 5762 private: 5763 const BitVector& blocks_in_loop_; 5764 const ArenaVector<HBasicBlock*>& blocks_; 5765 size_t index_; 5766 5767 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5768}; 5769 5770inline int64_t Int64FromConstant(HConstant* constant) { 5771 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5772 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5773 : constant->AsLongConstant()->GetValue(); 5774} 5775 5776inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5777 // For the purposes of the compiler, the dex files must actually be the same object 5778 // if we want to safely treat them as the same. This is especially important for JIT 5779 // as custom class loaders can open the same underlying file (or memory) multiple 5780 // times and provide different class resolution but no two class loaders should ever 5781 // use the same DexFile object - doing so is an unsupported hack that can lead to 5782 // all sorts of weird failures. 5783 return &lhs == &rhs; 5784} 5785 5786} // namespace art 5787 5788#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5789