nodes.h revision 5d75afe333f57546786686d9bee16b52f1bbe971
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38#include "utils/array_ref.h" 39 40namespace art { 41 42class GraphChecker; 43class HBasicBlock; 44class HCurrentMethod; 45class HDoubleConstant; 46class HEnvironment; 47class HFakeString; 48class HFloatConstant; 49class HGraphBuilder; 50class HGraphVisitor; 51class HInstruction; 52class HIntConstant; 53class HInvoke; 54class HLongConstant; 55class HNullConstant; 56class HPhi; 57class HSuspendCheck; 58class HTryBoundary; 59class LiveInterval; 60class LocationSummary; 61class SlowPathCode; 62class SsaBuilder; 63 64namespace mirror { 65class DexCache; 66} // namespace mirror 67 68static const int kDefaultNumberOfBlocks = 8; 69static const int kDefaultNumberOfSuccessors = 2; 70static const int kDefaultNumberOfPredecessors = 2; 71static const int kDefaultNumberOfExceptionalPredecessors = 0; 72static const int kDefaultNumberOfDominatedBlocks = 1; 73static const int kDefaultNumberOfBackEdges = 1; 74 75static constexpr uint32_t kMaxIntShiftValue = 0x1f; 76static constexpr uint64_t kMaxLongShiftValue = 0x3f; 77 78static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 79static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 80 81static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 82 83static constexpr uint32_t kNoDexPc = -1; 84 85enum IfCondition { 86 // All types. 87 kCondEQ, // == 88 kCondNE, // != 89 // Signed integers and floating-point numbers. 90 kCondLT, // < 91 kCondLE, // <= 92 kCondGT, // > 93 kCondGE, // >= 94 // Unsigned integers. 95 kCondB, // < 96 kCondBE, // <= 97 kCondA, // > 98 kCondAE, // >= 99}; 100 101class HInstructionList : public ValueObject { 102 public: 103 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 104 105 void AddInstruction(HInstruction* instruction); 106 void RemoveInstruction(HInstruction* instruction); 107 108 // Insert `instruction` before/after an existing instruction `cursor`. 109 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 110 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 111 112 // Return true if this list contains `instruction`. 113 bool Contains(HInstruction* instruction) const; 114 115 // Return true if `instruction1` is found before `instruction2` in 116 // this instruction list and false otherwise. Abort if none 117 // of these instructions is found. 118 bool FoundBefore(const HInstruction* instruction1, 119 const HInstruction* instruction2) const; 120 121 bool IsEmpty() const { return first_instruction_ == nullptr; } 122 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 123 124 // Update the block of all instructions to be `block`. 125 void SetBlockOfInstructions(HBasicBlock* block) const; 126 127 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 128 void Add(const HInstructionList& instruction_list); 129 130 // Return the number of instructions in the list. This is an expensive operation. 131 size_t CountSize() const; 132 133 private: 134 HInstruction* first_instruction_; 135 HInstruction* last_instruction_; 136 137 friend class HBasicBlock; 138 friend class HGraph; 139 friend class HInstruction; 140 friend class HInstructionIterator; 141 friend class HBackwardInstructionIterator; 142 143 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 144}; 145 146// Control-flow graph of a method. Contains a list of basic blocks. 147class HGraph : public ArenaObject<kArenaAllocGraph> { 148 public: 149 HGraph(ArenaAllocator* arena, 150 const DexFile& dex_file, 151 uint32_t method_idx, 152 bool should_generate_constructor_barrier, 153 InstructionSet instruction_set, 154 InvokeType invoke_type = kInvalidInvokeType, 155 bool debuggable = false, 156 int start_instruction_id = 0) 157 : arena_(arena), 158 blocks_(arena->Adapter(kArenaAllocBlockList)), 159 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 160 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 161 entry_block_(nullptr), 162 exit_block_(nullptr), 163 maximum_number_of_out_vregs_(0), 164 number_of_vregs_(0), 165 number_of_in_vregs_(0), 166 temporaries_vreg_slots_(0), 167 has_bounds_checks_(false), 168 has_try_catch_(false), 169 debuggable_(debuggable), 170 current_instruction_id_(start_instruction_id), 171 dex_file_(dex_file), 172 method_idx_(method_idx), 173 invoke_type_(invoke_type), 174 in_ssa_form_(false), 175 should_generate_constructor_barrier_(should_generate_constructor_barrier), 176 instruction_set_(instruction_set), 177 cached_null_constant_(nullptr), 178 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 179 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 180 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 181 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 182 cached_current_method_(nullptr) { 183 blocks_.reserve(kDefaultNumberOfBlocks); 184 } 185 186 ArenaAllocator* GetArena() const { return arena_; } 187 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 188 189 bool IsInSsaForm() const { return in_ssa_form_; } 190 191 HBasicBlock* GetEntryBlock() const { return entry_block_; } 192 HBasicBlock* GetExitBlock() const { return exit_block_; } 193 bool HasExitBlock() const { return exit_block_ != nullptr; } 194 195 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 196 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 197 198 void AddBlock(HBasicBlock* block); 199 200 // Try building the SSA form of this graph, with dominance computation and loop 201 // recognition. Returns whether it was successful in doing all these steps. 202 bool TryBuildingSsa() { 203 BuildDominatorTree(); 204 // The SSA builder requires loops to all be natural. Specifically, the dead phi 205 // elimination phase checks the consistency of the graph when doing a post-order 206 // visit for eliminating dead phis: a dead phi can only have loop header phi 207 // users remaining when being visited. 208 if (!AnalyzeNaturalLoops()) return false; 209 // Precompute per-block try membership before entering the SSA builder, 210 // which needs the information to build catch block phis from values of 211 // locals at throwing instructions inside try blocks. 212 ComputeTryBlockInformation(); 213 TransformToSsa(); 214 in_ssa_form_ = true; 215 return true; 216 } 217 218 void ComputeDominanceInformation(); 219 void ClearDominanceInformation(); 220 221 void BuildDominatorTree(); 222 void TransformToSsa(); 223 void SimplifyCFG(); 224 void SimplifyCatchBlocks(); 225 226 // Analyze all natural loops in this graph. Returns false if one 227 // loop is not natural, that is the header does not dominate the 228 // back edge. 229 bool AnalyzeNaturalLoops() const; 230 231 // Iterate over blocks to compute try block membership. Needs reverse post 232 // order and loop information. 233 void ComputeTryBlockInformation(); 234 235 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 236 // Returns the instruction used to replace the invoke expression or null if the 237 // invoke is for a void method. 238 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 239 240 // Need to add a couple of blocks to test if the loop body is entered and 241 // put deoptimization instructions, etc. 242 void TransformLoopHeaderForBCE(HBasicBlock* header); 243 244 // Removes `block` from the graph. Assumes `block` has been disconnected from 245 // other blocks and has no instructions or phis. 246 void DeleteDeadEmptyBlock(HBasicBlock* block); 247 248 // Splits the edge between `block` and `successor` while preserving the 249 // indices in the predecessor/successor lists. If there are multiple edges 250 // between the blocks, the lowest indices are used. 251 // Returns the new block which is empty and has the same dex pc as `successor`. 252 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 253 254 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 255 void SimplifyLoop(HBasicBlock* header); 256 257 int32_t GetNextInstructionId() { 258 DCHECK_NE(current_instruction_id_, INT32_MAX); 259 return current_instruction_id_++; 260 } 261 262 int32_t GetCurrentInstructionId() const { 263 return current_instruction_id_; 264 } 265 266 void SetCurrentInstructionId(int32_t id) { 267 current_instruction_id_ = id; 268 } 269 270 uint16_t GetMaximumNumberOfOutVRegs() const { 271 return maximum_number_of_out_vregs_; 272 } 273 274 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 275 maximum_number_of_out_vregs_ = new_value; 276 } 277 278 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 279 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 280 } 281 282 void UpdateTemporariesVRegSlots(size_t slots) { 283 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 284 } 285 286 size_t GetTemporariesVRegSlots() const { 287 DCHECK(!in_ssa_form_); 288 return temporaries_vreg_slots_; 289 } 290 291 void SetNumberOfVRegs(uint16_t number_of_vregs) { 292 number_of_vregs_ = number_of_vregs; 293 } 294 295 uint16_t GetNumberOfVRegs() const { 296 return number_of_vregs_; 297 } 298 299 void SetNumberOfInVRegs(uint16_t value) { 300 number_of_in_vregs_ = value; 301 } 302 303 uint16_t GetNumberOfLocalVRegs() const { 304 DCHECK(!in_ssa_form_); 305 return number_of_vregs_ - number_of_in_vregs_; 306 } 307 308 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 309 return reverse_post_order_; 310 } 311 312 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 313 return linear_order_; 314 } 315 316 bool HasBoundsChecks() const { 317 return has_bounds_checks_; 318 } 319 320 void SetHasBoundsChecks(bool value) { 321 has_bounds_checks_ = value; 322 } 323 324 bool ShouldGenerateConstructorBarrier() const { 325 return should_generate_constructor_barrier_; 326 } 327 328 bool IsDebuggable() const { return debuggable_; } 329 330 // Returns a constant of the given type and value. If it does not exist 331 // already, it is created and inserted into the graph. This method is only for 332 // integral types. 333 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 334 335 // TODO: This is problematic for the consistency of reference type propagation 336 // because it can be created anytime after the pass and thus it will be left 337 // with an invalid type. 338 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 339 340 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 341 return CreateConstant(value, &cached_int_constants_, dex_pc); 342 } 343 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 344 return CreateConstant(value, &cached_long_constants_, dex_pc); 345 } 346 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 347 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 348 } 349 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 350 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 351 } 352 353 HCurrentMethod* GetCurrentMethod(); 354 355 const DexFile& GetDexFile() const { 356 return dex_file_; 357 } 358 359 uint32_t GetMethodIdx() const { 360 return method_idx_; 361 } 362 363 InvokeType GetInvokeType() const { 364 return invoke_type_; 365 } 366 367 InstructionSet GetInstructionSet() const { 368 return instruction_set_; 369 } 370 371 bool HasTryCatch() const { return has_try_catch_; } 372 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 373 374 ArtMethod* GetArtMethod() const { return art_method_; } 375 void SetArtMethod(ArtMethod* method) { art_method_ = method; } 376 377 // Returns an instruction with the opposite boolean value from 'cond'. 378 // The instruction has been inserted into the graph, either as a constant, or 379 // before cursor. 380 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor); 381 382 private: 383 void FindBackEdges(ArenaBitVector* visited); 384 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 385 void RemoveDeadBlocks(const ArenaBitVector& visited); 386 387 template <class InstructionType, typename ValueType> 388 InstructionType* CreateConstant(ValueType value, 389 ArenaSafeMap<ValueType, InstructionType*>* cache, 390 uint32_t dex_pc = kNoDexPc) { 391 // Try to find an existing constant of the given value. 392 InstructionType* constant = nullptr; 393 auto cached_constant = cache->find(value); 394 if (cached_constant != cache->end()) { 395 constant = cached_constant->second; 396 } 397 398 // If not found or previously deleted, create and cache a new instruction. 399 // Don't bother reviving a previously deleted instruction, for simplicity. 400 if (constant == nullptr || constant->GetBlock() == nullptr) { 401 constant = new (arena_) InstructionType(value, dex_pc); 402 cache->Overwrite(value, constant); 403 InsertConstant(constant); 404 } 405 return constant; 406 } 407 408 void InsertConstant(HConstant* instruction); 409 410 // Cache a float constant into the graph. This method should only be 411 // called by the SsaBuilder when creating "equivalent" instructions. 412 void CacheFloatConstant(HFloatConstant* constant); 413 414 // See CacheFloatConstant comment. 415 void CacheDoubleConstant(HDoubleConstant* constant); 416 417 ArenaAllocator* const arena_; 418 419 // List of blocks in insertion order. 420 ArenaVector<HBasicBlock*> blocks_; 421 422 // List of blocks to perform a reverse post order tree traversal. 423 ArenaVector<HBasicBlock*> reverse_post_order_; 424 425 // List of blocks to perform a linear order tree traversal. 426 ArenaVector<HBasicBlock*> linear_order_; 427 428 HBasicBlock* entry_block_; 429 HBasicBlock* exit_block_; 430 431 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 432 uint16_t maximum_number_of_out_vregs_; 433 434 // The number of virtual registers in this method. Contains the parameters. 435 uint16_t number_of_vregs_; 436 437 // The number of virtual registers used by parameters of this method. 438 uint16_t number_of_in_vregs_; 439 440 // Number of vreg size slots that the temporaries use (used in baseline compiler). 441 size_t temporaries_vreg_slots_; 442 443 // Has bounds checks. We can totally skip BCE if it's false. 444 bool has_bounds_checks_; 445 446 // Flag whether there are any try/catch blocks in the graph. We will skip 447 // try/catch-related passes if false. 448 bool has_try_catch_; 449 450 // Indicates whether the graph should be compiled in a way that 451 // ensures full debuggability. If false, we can apply more 452 // aggressive optimizations that may limit the level of debugging. 453 const bool debuggable_; 454 455 // The current id to assign to a newly added instruction. See HInstruction.id_. 456 int32_t current_instruction_id_; 457 458 // The dex file from which the method is from. 459 const DexFile& dex_file_; 460 461 // The method index in the dex file. 462 const uint32_t method_idx_; 463 464 // If inlined, this encodes how the callee is being invoked. 465 const InvokeType invoke_type_; 466 467 // Whether the graph has been transformed to SSA form. Only used 468 // in debug mode to ensure we are not using properties only valid 469 // for non-SSA form (like the number of temporaries). 470 bool in_ssa_form_; 471 472 const bool should_generate_constructor_barrier_; 473 474 const InstructionSet instruction_set_; 475 476 // Cached constants. 477 HNullConstant* cached_null_constant_; 478 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 479 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 480 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 481 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 482 483 HCurrentMethod* cached_current_method_; 484 485 // The ArtMethod this graph is for. Note that for AOT, it may be null, 486 // for example for methods whose declaring class could not be resolved 487 // (such as when the superclass could not be found). 488 ArtMethod* art_method_; 489 490 friend class SsaBuilder; // For caching constants. 491 friend class SsaLivenessAnalysis; // For the linear order. 492 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 493 DISALLOW_COPY_AND_ASSIGN(HGraph); 494}; 495 496class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 497 public: 498 HLoopInformation(HBasicBlock* header, HGraph* graph) 499 : header_(header), 500 suspend_check_(nullptr), 501 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 502 // Make bit vector growable, as the number of blocks may change. 503 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 504 back_edges_.reserve(kDefaultNumberOfBackEdges); 505 } 506 507 HBasicBlock* GetHeader() const { 508 return header_; 509 } 510 511 void SetHeader(HBasicBlock* block) { 512 header_ = block; 513 } 514 515 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 516 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 517 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 518 519 void AddBackEdge(HBasicBlock* back_edge) { 520 back_edges_.push_back(back_edge); 521 } 522 523 void RemoveBackEdge(HBasicBlock* back_edge) { 524 RemoveElement(back_edges_, back_edge); 525 } 526 527 bool IsBackEdge(const HBasicBlock& block) const { 528 return ContainsElement(back_edges_, &block); 529 } 530 531 size_t NumberOfBackEdges() const { 532 return back_edges_.size(); 533 } 534 535 HBasicBlock* GetPreHeader() const; 536 537 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 538 return back_edges_; 539 } 540 541 // Returns the lifetime position of the back edge that has the 542 // greatest lifetime position. 543 size_t GetLifetimeEnd() const; 544 545 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 546 ReplaceElement(back_edges_, existing, new_back_edge); 547 } 548 549 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 550 // that is the header dominates the back edge. 551 bool Populate(); 552 553 // Reanalyzes the loop by removing loop info from its blocks and re-running 554 // Populate(). If there are no back edges left, the loop info is completely 555 // removed as well as its SuspendCheck instruction. It must be run on nested 556 // inner loops first. 557 void Update(); 558 559 // Returns whether this loop information contains `block`. 560 // Note that this loop information *must* be populated before entering this function. 561 bool Contains(const HBasicBlock& block) const; 562 563 // Returns whether this loop information is an inner loop of `other`. 564 // Note that `other` *must* be populated before entering this function. 565 bool IsIn(const HLoopInformation& other) const; 566 567 // Returns true if instruction is not defined within this loop. 568 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const; 569 570 const ArenaBitVector& GetBlocks() const { return blocks_; } 571 572 void Add(HBasicBlock* block); 573 void Remove(HBasicBlock* block); 574 575 private: 576 // Internal recursive implementation of `Populate`. 577 void PopulateRecursive(HBasicBlock* block); 578 579 HBasicBlock* header_; 580 HSuspendCheck* suspend_check_; 581 ArenaVector<HBasicBlock*> back_edges_; 582 ArenaBitVector blocks_; 583 584 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 585}; 586 587// Stores try/catch information for basic blocks. 588// Note that HGraph is constructed so that catch blocks cannot simultaneously 589// be try blocks. 590class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 591 public: 592 // Try block information constructor. 593 explicit TryCatchInformation(const HTryBoundary& try_entry) 594 : try_entry_(&try_entry), 595 catch_dex_file_(nullptr), 596 catch_type_index_(DexFile::kDexNoIndex16) { 597 DCHECK(try_entry_ != nullptr); 598 } 599 600 // Catch block information constructor. 601 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 602 : try_entry_(nullptr), 603 catch_dex_file_(&dex_file), 604 catch_type_index_(catch_type_index) {} 605 606 bool IsTryBlock() const { return try_entry_ != nullptr; } 607 608 const HTryBoundary& GetTryEntry() const { 609 DCHECK(IsTryBlock()); 610 return *try_entry_; 611 } 612 613 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 614 615 bool IsCatchAllTypeIndex() const { 616 DCHECK(IsCatchBlock()); 617 return catch_type_index_ == DexFile::kDexNoIndex16; 618 } 619 620 uint16_t GetCatchTypeIndex() const { 621 DCHECK(IsCatchBlock()); 622 return catch_type_index_; 623 } 624 625 const DexFile& GetCatchDexFile() const { 626 DCHECK(IsCatchBlock()); 627 return *catch_dex_file_; 628 } 629 630 private: 631 // One of possibly several TryBoundary instructions entering the block's try. 632 // Only set for try blocks. 633 const HTryBoundary* try_entry_; 634 635 // Exception type information. Only set for catch blocks. 636 const DexFile* catch_dex_file_; 637 const uint16_t catch_type_index_; 638}; 639 640static constexpr size_t kNoLifetime = -1; 641static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 642 643// A block in a method. Contains the list of instructions represented 644// as a double linked list. Each block knows its predecessors and 645// successors. 646 647class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 648 public: 649 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 650 : graph_(graph), 651 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 652 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 653 loop_information_(nullptr), 654 dominator_(nullptr), 655 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 656 block_id_(kInvalidBlockId), 657 dex_pc_(dex_pc), 658 lifetime_start_(kNoLifetime), 659 lifetime_end_(kNoLifetime), 660 try_catch_information_(nullptr) { 661 predecessors_.reserve(kDefaultNumberOfPredecessors); 662 successors_.reserve(kDefaultNumberOfSuccessors); 663 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 664 } 665 666 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 667 return predecessors_; 668 } 669 670 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 671 return successors_; 672 } 673 674 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const; 675 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const; 676 677 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 678 return ContainsElement(successors_, block, start_from); 679 } 680 681 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 682 return dominated_blocks_; 683 } 684 685 bool IsEntryBlock() const { 686 return graph_->GetEntryBlock() == this; 687 } 688 689 bool IsExitBlock() const { 690 return graph_->GetExitBlock() == this; 691 } 692 693 bool IsSingleGoto() const; 694 bool IsSingleTryBoundary() const; 695 696 // Returns true if this block emits nothing but a jump. 697 bool IsSingleJump() const { 698 HLoopInformation* loop_info = GetLoopInformation(); 699 return (IsSingleGoto() || IsSingleTryBoundary()) 700 // Back edges generate a suspend check. 701 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 702 } 703 704 void AddBackEdge(HBasicBlock* back_edge) { 705 if (loop_information_ == nullptr) { 706 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 707 } 708 DCHECK_EQ(loop_information_->GetHeader(), this); 709 loop_information_->AddBackEdge(back_edge); 710 } 711 712 HGraph* GetGraph() const { return graph_; } 713 void SetGraph(HGraph* graph) { graph_ = graph; } 714 715 uint32_t GetBlockId() const { return block_id_; } 716 void SetBlockId(int id) { block_id_ = id; } 717 uint32_t GetDexPc() const { return dex_pc_; } 718 719 HBasicBlock* GetDominator() const { return dominator_; } 720 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 721 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 722 723 void RemoveDominatedBlock(HBasicBlock* block) { 724 RemoveElement(dominated_blocks_, block); 725 } 726 727 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 728 ReplaceElement(dominated_blocks_, existing, new_block); 729 } 730 731 void ClearDominanceInformation(); 732 733 int NumberOfBackEdges() const { 734 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 735 } 736 737 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 738 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 739 const HInstructionList& GetInstructions() const { return instructions_; } 740 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 741 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 742 const HInstructionList& GetPhis() const { return phis_; } 743 744 void AddSuccessor(HBasicBlock* block) { 745 successors_.push_back(block); 746 block->predecessors_.push_back(this); 747 } 748 749 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 750 size_t successor_index = GetSuccessorIndexOf(existing); 751 existing->RemovePredecessor(this); 752 new_block->predecessors_.push_back(this); 753 successors_[successor_index] = new_block; 754 } 755 756 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 757 size_t predecessor_index = GetPredecessorIndexOf(existing); 758 existing->RemoveSuccessor(this); 759 new_block->successors_.push_back(this); 760 predecessors_[predecessor_index] = new_block; 761 } 762 763 // Insert `this` between `predecessor` and `successor. This method 764 // preserves the indicies, and will update the first edge found between 765 // `predecessor` and `successor`. 766 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 767 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 768 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 769 successor->predecessors_[predecessor_index] = this; 770 predecessor->successors_[successor_index] = this; 771 successors_.push_back(successor); 772 predecessors_.push_back(predecessor); 773 } 774 775 void RemovePredecessor(HBasicBlock* block) { 776 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 777 } 778 779 void RemoveSuccessor(HBasicBlock* block) { 780 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 781 } 782 783 void ClearAllPredecessors() { 784 predecessors_.clear(); 785 } 786 787 void AddPredecessor(HBasicBlock* block) { 788 predecessors_.push_back(block); 789 block->successors_.push_back(this); 790 } 791 792 void SwapPredecessors() { 793 DCHECK_EQ(predecessors_.size(), 2u); 794 std::swap(predecessors_[0], predecessors_[1]); 795 } 796 797 void SwapSuccessors() { 798 DCHECK_EQ(successors_.size(), 2u); 799 std::swap(successors_[0], successors_[1]); 800 } 801 802 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 803 return IndexOfElement(predecessors_, predecessor); 804 } 805 806 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 807 return IndexOfElement(successors_, successor); 808 } 809 810 HBasicBlock* GetSinglePredecessor() const { 811 DCHECK_EQ(GetPredecessors().size(), 1u); 812 return GetPredecessors()[0]; 813 } 814 815 HBasicBlock* GetSingleSuccessor() const { 816 DCHECK_EQ(GetSuccessors().size(), 1u); 817 return GetSuccessors()[0]; 818 } 819 820 // Returns whether the first occurrence of `predecessor` in the list of 821 // predecessors is at index `idx`. 822 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 823 DCHECK_EQ(GetPredecessors()[idx], predecessor); 824 return GetPredecessorIndexOf(predecessor) == idx; 825 } 826 827 // Create a new block between this block and its predecessors. The new block 828 // is added to the graph, all predecessor edges are relinked to it and an edge 829 // is created to `this`. Returns the new empty block. Reverse post order or 830 // loop and try/catch information are not updated. 831 HBasicBlock* CreateImmediateDominator(); 832 833 // Split the block into two blocks just before `cursor`. Returns the newly 834 // created, latter block. Note that this method will add the block to the 835 // graph, create a Goto at the end of the former block and will create an edge 836 // between the blocks. It will not, however, update the reverse post order or 837 // loop and try/catch information. 838 HBasicBlock* SplitBefore(HInstruction* cursor); 839 840 // Split the block into two blocks just after `cursor`. Returns the newly 841 // created block. Note that this method just updates raw block information, 842 // like predecessors, successors, dominators, and instruction list. It does not 843 // update the graph, reverse post order, loop information, nor make sure the 844 // blocks are consistent (for example ending with a control flow instruction). 845 HBasicBlock* SplitAfter(HInstruction* cursor); 846 847 // Split catch block into two blocks after the original move-exception bytecode 848 // instruction, or at the beginning if not present. Returns the newly created, 849 // latter block, or nullptr if such block could not be created (must be dead 850 // in that case). Note that this method just updates raw block information, 851 // like predecessors, successors, dominators, and instruction list. It does not 852 // update the graph, reverse post order, loop information, nor make sure the 853 // blocks are consistent (for example ending with a control flow instruction). 854 HBasicBlock* SplitCatchBlockAfterMoveException(); 855 856 // Merge `other` at the end of `this`. Successors and dominated blocks of 857 // `other` are changed to be successors and dominated blocks of `this`. Note 858 // that this method does not update the graph, reverse post order, loop 859 // information, nor make sure the blocks are consistent (for example ending 860 // with a control flow instruction). 861 void MergeWithInlined(HBasicBlock* other); 862 863 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 864 // of `this` are moved to `other`. 865 // Note that this method does not update the graph, reverse post order, loop 866 // information, nor make sure the blocks are consistent (for example ending 867 // with a control flow instruction). 868 void ReplaceWith(HBasicBlock* other); 869 870 // Merge `other` at the end of `this`. This method updates loops, reverse post 871 // order, links to predecessors, successors, dominators and deletes the block 872 // from the graph. The two blocks must be successive, i.e. `this` the only 873 // predecessor of `other` and vice versa. 874 void MergeWith(HBasicBlock* other); 875 876 // Disconnects `this` from all its predecessors, successors and dominator, 877 // removes it from all loops it is included in and eventually from the graph. 878 // The block must not dominate any other block. Predecessors and successors 879 // are safely updated. 880 void DisconnectAndDelete(); 881 882 void AddInstruction(HInstruction* instruction); 883 // Insert `instruction` before/after an existing instruction `cursor`. 884 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 885 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 886 // Replace instruction `initial` with `replacement` within this block. 887 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 888 HInstruction* replacement); 889 void AddPhi(HPhi* phi); 890 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 891 // RemoveInstruction and RemovePhi delete a given instruction from the respective 892 // instruction list. With 'ensure_safety' set to true, it verifies that the 893 // instruction is not in use and removes it from the use lists of its inputs. 894 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 895 void RemovePhi(HPhi* phi, bool ensure_safety = true); 896 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 897 898 bool IsLoopHeader() const { 899 return IsInLoop() && (loop_information_->GetHeader() == this); 900 } 901 902 bool IsLoopPreHeaderFirstPredecessor() const { 903 DCHECK(IsLoopHeader()); 904 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 905 } 906 907 HLoopInformation* GetLoopInformation() const { 908 return loop_information_; 909 } 910 911 // Set the loop_information_ on this block. Overrides the current 912 // loop_information if it is an outer loop of the passed loop information. 913 // Note that this method is called while creating the loop information. 914 void SetInLoop(HLoopInformation* info) { 915 if (IsLoopHeader()) { 916 // Nothing to do. This just means `info` is an outer loop. 917 } else if (!IsInLoop()) { 918 loop_information_ = info; 919 } else if (loop_information_->Contains(*info->GetHeader())) { 920 // Block is currently part of an outer loop. Make it part of this inner loop. 921 // Note that a non loop header having a loop information means this loop information 922 // has already been populated 923 loop_information_ = info; 924 } else { 925 // Block is part of an inner loop. Do not update the loop information. 926 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 927 // at this point, because this method is being called while populating `info`. 928 } 929 } 930 931 // Raw update of the loop information. 932 void SetLoopInformation(HLoopInformation* info) { 933 loop_information_ = info; 934 } 935 936 bool IsInLoop() const { return loop_information_ != nullptr; } 937 938 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 939 940 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 941 try_catch_information_ = try_catch_information; 942 } 943 944 bool IsTryBlock() const { 945 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 946 } 947 948 bool IsCatchBlock() const { 949 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 950 } 951 952 // Returns the try entry that this block's successors should have. They will 953 // be in the same try, unless the block ends in a try boundary. In that case, 954 // the appropriate try entry will be returned. 955 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 956 957 bool HasThrowingInstructions() const; 958 959 // Returns whether this block dominates the blocked passed as parameter. 960 bool Dominates(HBasicBlock* block) const; 961 962 size_t GetLifetimeStart() const { return lifetime_start_; } 963 size_t GetLifetimeEnd() const { return lifetime_end_; } 964 965 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 966 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 967 968 bool EndsWithControlFlowInstruction() const; 969 bool EndsWithIf() const; 970 bool EndsWithTryBoundary() const; 971 bool HasSinglePhi() const; 972 973 private: 974 HGraph* graph_; 975 ArenaVector<HBasicBlock*> predecessors_; 976 ArenaVector<HBasicBlock*> successors_; 977 HInstructionList instructions_; 978 HInstructionList phis_; 979 HLoopInformation* loop_information_; 980 HBasicBlock* dominator_; 981 ArenaVector<HBasicBlock*> dominated_blocks_; 982 uint32_t block_id_; 983 // The dex program counter of the first instruction of this block. 984 const uint32_t dex_pc_; 985 size_t lifetime_start_; 986 size_t lifetime_end_; 987 TryCatchInformation* try_catch_information_; 988 989 friend class HGraph; 990 friend class HInstruction; 991 992 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 993}; 994 995// Iterates over the LoopInformation of all loops which contain 'block' 996// from the innermost to the outermost. 997class HLoopInformationOutwardIterator : public ValueObject { 998 public: 999 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 1000 : current_(block.GetLoopInformation()) {} 1001 1002 bool Done() const { return current_ == nullptr; } 1003 1004 void Advance() { 1005 DCHECK(!Done()); 1006 current_ = current_->GetPreHeader()->GetLoopInformation(); 1007 } 1008 1009 HLoopInformation* Current() const { 1010 DCHECK(!Done()); 1011 return current_; 1012 } 1013 1014 private: 1015 HLoopInformation* current_; 1016 1017 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1018}; 1019 1020#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1021 M(Above, Condition) \ 1022 M(AboveOrEqual, Condition) \ 1023 M(Add, BinaryOperation) \ 1024 M(And, BinaryOperation) \ 1025 M(ArrayGet, Instruction) \ 1026 M(ArrayLength, Instruction) \ 1027 M(ArraySet, Instruction) \ 1028 M(Below, Condition) \ 1029 M(BelowOrEqual, Condition) \ 1030 M(BooleanNot, UnaryOperation) \ 1031 M(BoundsCheck, Instruction) \ 1032 M(BoundType, Instruction) \ 1033 M(CheckCast, Instruction) \ 1034 M(ClearException, Instruction) \ 1035 M(ClinitCheck, Instruction) \ 1036 M(Compare, BinaryOperation) \ 1037 M(CurrentMethod, Instruction) \ 1038 M(Deoptimize, Instruction) \ 1039 M(Div, BinaryOperation) \ 1040 M(DivZeroCheck, Instruction) \ 1041 M(DoubleConstant, Constant) \ 1042 M(Equal, Condition) \ 1043 M(Exit, Instruction) \ 1044 M(FakeString, Instruction) \ 1045 M(FloatConstant, Constant) \ 1046 M(Goto, Instruction) \ 1047 M(GreaterThan, Condition) \ 1048 M(GreaterThanOrEqual, Condition) \ 1049 M(If, Instruction) \ 1050 M(InstanceFieldGet, Instruction) \ 1051 M(InstanceFieldSet, Instruction) \ 1052 M(InstanceOf, Instruction) \ 1053 M(IntConstant, Constant) \ 1054 M(InvokeUnresolved, Invoke) \ 1055 M(InvokeInterface, Invoke) \ 1056 M(InvokeStaticOrDirect, Invoke) \ 1057 M(InvokeVirtual, Invoke) \ 1058 M(LessThan, Condition) \ 1059 M(LessThanOrEqual, Condition) \ 1060 M(LoadClass, Instruction) \ 1061 M(LoadException, Instruction) \ 1062 M(LoadLocal, Instruction) \ 1063 M(LoadString, Instruction) \ 1064 M(Local, Instruction) \ 1065 M(LongConstant, Constant) \ 1066 M(MemoryBarrier, Instruction) \ 1067 M(MonitorOperation, Instruction) \ 1068 M(Mul, BinaryOperation) \ 1069 M(NativeDebugInfo, Instruction) \ 1070 M(Neg, UnaryOperation) \ 1071 M(NewArray, Instruction) \ 1072 M(NewInstance, Instruction) \ 1073 M(Not, UnaryOperation) \ 1074 M(NotEqual, Condition) \ 1075 M(NullConstant, Instruction) \ 1076 M(NullCheck, Instruction) \ 1077 M(Or, BinaryOperation) \ 1078 M(PackedSwitch, Instruction) \ 1079 M(ParallelMove, Instruction) \ 1080 M(ParameterValue, Instruction) \ 1081 M(Phi, Instruction) \ 1082 M(Rem, BinaryOperation) \ 1083 M(Return, Instruction) \ 1084 M(ReturnVoid, Instruction) \ 1085 M(Ror, BinaryOperation) \ 1086 M(Shl, BinaryOperation) \ 1087 M(Shr, BinaryOperation) \ 1088 M(StaticFieldGet, Instruction) \ 1089 M(StaticFieldSet, Instruction) \ 1090 M(UnresolvedInstanceFieldGet, Instruction) \ 1091 M(UnresolvedInstanceFieldSet, Instruction) \ 1092 M(UnresolvedStaticFieldGet, Instruction) \ 1093 M(UnresolvedStaticFieldSet, Instruction) \ 1094 M(StoreLocal, Instruction) \ 1095 M(Sub, BinaryOperation) \ 1096 M(SuspendCheck, Instruction) \ 1097 M(Temporary, Instruction) \ 1098 M(Throw, Instruction) \ 1099 M(TryBoundary, Instruction) \ 1100 M(TypeConversion, Instruction) \ 1101 M(UShr, BinaryOperation) \ 1102 M(Xor, BinaryOperation) \ 1103 1104#ifndef ART_ENABLE_CODEGEN_arm 1105#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1106#else 1107#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1108 M(ArmDexCacheArraysBase, Instruction) 1109#endif 1110 1111#ifndef ART_ENABLE_CODEGEN_arm64 1112#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1113#else 1114#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1115 M(Arm64DataProcWithShifterOp, Instruction) \ 1116 M(Arm64IntermediateAddress, Instruction) \ 1117 M(Arm64MultiplyAccumulate, Instruction) 1118#endif 1119 1120#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1121 1122#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1123 1124#ifndef ART_ENABLE_CODEGEN_x86 1125#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1126#else 1127#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1128 M(X86ComputeBaseMethodAddress, Instruction) \ 1129 M(X86LoadFromConstantTable, Instruction) \ 1130 M(X86PackedSwitch, Instruction) 1131#endif 1132 1133#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1134 1135#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1136 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1137 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1138 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1139 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1140 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1141 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1142 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1143 1144#define FOR_EACH_ABSTRACT_INSTRUCTION(M) \ 1145 M(Condition, BinaryOperation) \ 1146 M(Constant, Instruction) \ 1147 M(UnaryOperation, Instruction) \ 1148 M(BinaryOperation, Instruction) \ 1149 M(Invoke, Instruction) 1150 1151#define FOR_EACH_INSTRUCTION(M) \ 1152 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1153 FOR_EACH_ABSTRACT_INSTRUCTION(M) 1154 1155#define FORWARD_DECLARATION(type, super) class H##type; 1156FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1157#undef FORWARD_DECLARATION 1158 1159#define DECLARE_INSTRUCTION(type) \ 1160 InstructionKind GetKindInternal() const OVERRIDE { return k##type; } \ 1161 const char* DebugName() const OVERRIDE { return #type; } \ 1162 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1163 return other->Is##type(); \ 1164 } \ 1165 void Accept(HGraphVisitor* visitor) OVERRIDE 1166 1167#define DECLARE_ABSTRACT_INSTRUCTION(type) \ 1168 bool Is##type() const { return As##type() != nullptr; } \ 1169 const H##type* As##type() const { return this; } \ 1170 H##type* As##type() { return this; } 1171 1172template <typename T> class HUseList; 1173 1174template <typename T> 1175class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1176 public: 1177 HUseListNode* GetPrevious() const { return prev_; } 1178 HUseListNode* GetNext() const { return next_; } 1179 T GetUser() const { return user_; } 1180 size_t GetIndex() const { return index_; } 1181 void SetIndex(size_t index) { index_ = index; } 1182 1183 private: 1184 HUseListNode(T user, size_t index) 1185 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1186 1187 T const user_; 1188 size_t index_; 1189 HUseListNode<T>* prev_; 1190 HUseListNode<T>* next_; 1191 1192 friend class HUseList<T>; 1193 1194 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1195}; 1196 1197template <typename T> 1198class HUseList : public ValueObject { 1199 public: 1200 HUseList() : first_(nullptr) {} 1201 1202 void Clear() { 1203 first_ = nullptr; 1204 } 1205 1206 // Adds a new entry at the beginning of the use list and returns 1207 // the newly created node. 1208 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1209 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1210 if (IsEmpty()) { 1211 first_ = new_node; 1212 } else { 1213 first_->prev_ = new_node; 1214 new_node->next_ = first_; 1215 first_ = new_node; 1216 } 1217 return new_node; 1218 } 1219 1220 HUseListNode<T>* GetFirst() const { 1221 return first_; 1222 } 1223 1224 void Remove(HUseListNode<T>* node) { 1225 DCHECK(node != nullptr); 1226 DCHECK(Contains(node)); 1227 1228 if (node->prev_ != nullptr) { 1229 node->prev_->next_ = node->next_; 1230 } 1231 if (node->next_ != nullptr) { 1232 node->next_->prev_ = node->prev_; 1233 } 1234 if (node == first_) { 1235 first_ = node->next_; 1236 } 1237 } 1238 1239 bool Contains(const HUseListNode<T>* node) const { 1240 if (node == nullptr) { 1241 return false; 1242 } 1243 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1244 if (current == node) { 1245 return true; 1246 } 1247 } 1248 return false; 1249 } 1250 1251 bool IsEmpty() const { 1252 return first_ == nullptr; 1253 } 1254 1255 bool HasOnlyOneUse() const { 1256 return first_ != nullptr && first_->next_ == nullptr; 1257 } 1258 1259 size_t SizeSlow() const { 1260 size_t count = 0; 1261 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1262 ++count; 1263 } 1264 return count; 1265 } 1266 1267 private: 1268 HUseListNode<T>* first_; 1269}; 1270 1271template<typename T> 1272class HUseIterator : public ValueObject { 1273 public: 1274 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1275 1276 bool Done() const { return current_ == nullptr; } 1277 1278 void Advance() { 1279 DCHECK(!Done()); 1280 current_ = current_->GetNext(); 1281 } 1282 1283 HUseListNode<T>* Current() const { 1284 DCHECK(!Done()); 1285 return current_; 1286 } 1287 1288 private: 1289 HUseListNode<T>* current_; 1290 1291 friend class HValue; 1292}; 1293 1294// This class is used by HEnvironment and HInstruction classes to record the 1295// instructions they use and pointers to the corresponding HUseListNodes kept 1296// by the used instructions. 1297template <typename T> 1298class HUserRecord : public ValueObject { 1299 public: 1300 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1301 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1302 1303 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1304 : instruction_(old_record.instruction_), use_node_(use_node) { 1305 DCHECK(instruction_ != nullptr); 1306 DCHECK(use_node_ != nullptr); 1307 DCHECK(old_record.use_node_ == nullptr); 1308 } 1309 1310 HInstruction* GetInstruction() const { return instruction_; } 1311 HUseListNode<T>* GetUseNode() const { return use_node_; } 1312 1313 private: 1314 // Instruction used by the user. 1315 HInstruction* instruction_; 1316 1317 // Corresponding entry in the use list kept by 'instruction_'. 1318 HUseListNode<T>* use_node_; 1319}; 1320 1321/** 1322 * Side-effects representation. 1323 * 1324 * For write/read dependences on fields/arrays, the dependence analysis uses 1325 * type disambiguation (e.g. a float field write cannot modify the value of an 1326 * integer field read) and the access type (e.g. a reference array write cannot 1327 * modify the value of a reference field read [although it may modify the 1328 * reference fetch prior to reading the field, which is represented by its own 1329 * write/read dependence]). The analysis makes conservative points-to 1330 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1331 * the same, and any reference read depends on any reference read without 1332 * further regard of its type). 1333 * 1334 * The internal representation uses 38-bit and is described in the table below. 1335 * The first line indicates the side effect, and for field/array accesses the 1336 * second line indicates the type of the access (in the order of the 1337 * Primitive::Type enum). 1338 * The two numbered lines below indicate the bit position in the bitfield (read 1339 * vertically). 1340 * 1341 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1342 * +-------------+---------+---------+--------------+---------+---------+ 1343 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1344 * | 3 |333333322|222222221| 1 |111111110|000000000| 1345 * | 7 |654321098|765432109| 8 |765432109|876543210| 1346 * 1347 * Note that, to ease the implementation, 'changes' bits are least significant 1348 * bits, while 'dependency' bits are most significant bits. 1349 */ 1350class SideEffects : public ValueObject { 1351 public: 1352 SideEffects() : flags_(0) {} 1353 1354 static SideEffects None() { 1355 return SideEffects(0); 1356 } 1357 1358 static SideEffects All() { 1359 return SideEffects(kAllChangeBits | kAllDependOnBits); 1360 } 1361 1362 static SideEffects AllChanges() { 1363 return SideEffects(kAllChangeBits); 1364 } 1365 1366 static SideEffects AllDependencies() { 1367 return SideEffects(kAllDependOnBits); 1368 } 1369 1370 static SideEffects AllExceptGCDependency() { 1371 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1372 } 1373 1374 static SideEffects AllWritesAndReads() { 1375 return SideEffects(kAllWrites | kAllReads); 1376 } 1377 1378 static SideEffects AllWrites() { 1379 return SideEffects(kAllWrites); 1380 } 1381 1382 static SideEffects AllReads() { 1383 return SideEffects(kAllReads); 1384 } 1385 1386 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1387 return is_volatile 1388 ? AllWritesAndReads() 1389 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1390 } 1391 1392 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1393 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1394 } 1395 1396 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1397 return is_volatile 1398 ? AllWritesAndReads() 1399 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1400 } 1401 1402 static SideEffects ArrayReadOfType(Primitive::Type type) { 1403 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1404 } 1405 1406 static SideEffects CanTriggerGC() { 1407 return SideEffects(1ULL << kCanTriggerGCBit); 1408 } 1409 1410 static SideEffects DependsOnGC() { 1411 return SideEffects(1ULL << kDependsOnGCBit); 1412 } 1413 1414 // Combines the side-effects of this and the other. 1415 SideEffects Union(SideEffects other) const { 1416 return SideEffects(flags_ | other.flags_); 1417 } 1418 1419 SideEffects Exclusion(SideEffects other) const { 1420 return SideEffects(flags_ & ~other.flags_); 1421 } 1422 1423 void Add(SideEffects other) { 1424 flags_ |= other.flags_; 1425 } 1426 1427 bool Includes(SideEffects other) const { 1428 return (other.flags_ & flags_) == other.flags_; 1429 } 1430 1431 bool HasSideEffects() const { 1432 return (flags_ & kAllChangeBits); 1433 } 1434 1435 bool HasDependencies() const { 1436 return (flags_ & kAllDependOnBits); 1437 } 1438 1439 // Returns true if there are no side effects or dependencies. 1440 bool DoesNothing() const { 1441 return flags_ == 0; 1442 } 1443 1444 // Returns true if something is written. 1445 bool DoesAnyWrite() const { 1446 return (flags_ & kAllWrites); 1447 } 1448 1449 // Returns true if something is read. 1450 bool DoesAnyRead() const { 1451 return (flags_ & kAllReads); 1452 } 1453 1454 // Returns true if potentially everything is written and read 1455 // (every type and every kind of access). 1456 bool DoesAllReadWrite() const { 1457 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1458 } 1459 1460 bool DoesAll() const { 1461 return flags_ == (kAllChangeBits | kAllDependOnBits); 1462 } 1463 1464 // Returns true if `this` may read something written by `other`. 1465 bool MayDependOn(SideEffects other) const { 1466 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1467 return (other.flags_ & depends_on_flags); 1468 } 1469 1470 // Returns string representation of flags (for debugging only). 1471 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1472 std::string ToString() const { 1473 std::string flags = "|"; 1474 for (int s = kLastBit; s >= 0; s--) { 1475 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1476 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1477 // This is a bit for the GC side effect. 1478 if (current_bit_is_set) { 1479 flags += "GC"; 1480 } 1481 flags += "|"; 1482 } else { 1483 // This is a bit for the array/field analysis. 1484 // The underscore character stands for the 'can trigger GC' bit. 1485 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1486 if (current_bit_is_set) { 1487 flags += kDebug[s]; 1488 } 1489 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1490 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1491 flags += "|"; 1492 } 1493 } 1494 } 1495 return flags; 1496 } 1497 1498 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1499 1500 private: 1501 static constexpr int kFieldArrayAnalysisBits = 9; 1502 1503 static constexpr int kFieldWriteOffset = 0; 1504 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1505 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1506 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1507 1508 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1509 1510 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1511 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1512 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1513 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1514 1515 static constexpr int kLastBit = kDependsOnGCBit; 1516 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1517 1518 // Aliases. 1519 1520 static_assert(kChangeBits == kDependOnBits, 1521 "the 'change' bits should match the 'depend on' bits."); 1522 1523 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1524 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1525 static constexpr uint64_t kAllWrites = 1526 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1527 static constexpr uint64_t kAllReads = 1528 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1529 1530 // Work around the fact that HIR aliases I/F and J/D. 1531 // TODO: remove this interceptor once HIR types are clean 1532 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1533 switch (type) { 1534 case Primitive::kPrimInt: 1535 case Primitive::kPrimFloat: 1536 return TypeFlag(Primitive::kPrimInt, offset) | 1537 TypeFlag(Primitive::kPrimFloat, offset); 1538 case Primitive::kPrimLong: 1539 case Primitive::kPrimDouble: 1540 return TypeFlag(Primitive::kPrimLong, offset) | 1541 TypeFlag(Primitive::kPrimDouble, offset); 1542 default: 1543 return TypeFlag(type, offset); 1544 } 1545 } 1546 1547 // Translates type to bit flag. 1548 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1549 CHECK_NE(type, Primitive::kPrimVoid); 1550 const uint64_t one = 1; 1551 const int shift = type; // 0-based consecutive enum 1552 DCHECK_LE(kFieldWriteOffset, shift); 1553 DCHECK_LT(shift, kArrayWriteOffset); 1554 return one << (type + offset); 1555 } 1556 1557 // Private constructor on direct flags value. 1558 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1559 1560 uint64_t flags_; 1561}; 1562 1563// A HEnvironment object contains the values of virtual registers at a given location. 1564class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1565 public: 1566 HEnvironment(ArenaAllocator* arena, 1567 size_t number_of_vregs, 1568 const DexFile& dex_file, 1569 uint32_t method_idx, 1570 uint32_t dex_pc, 1571 InvokeType invoke_type, 1572 HInstruction* holder) 1573 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1574 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1575 parent_(nullptr), 1576 dex_file_(dex_file), 1577 method_idx_(method_idx), 1578 dex_pc_(dex_pc), 1579 invoke_type_(invoke_type), 1580 holder_(holder) { 1581 } 1582 1583 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1584 : HEnvironment(arena, 1585 to_copy.Size(), 1586 to_copy.GetDexFile(), 1587 to_copy.GetMethodIdx(), 1588 to_copy.GetDexPc(), 1589 to_copy.GetInvokeType(), 1590 holder) {} 1591 1592 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1593 if (parent_ != nullptr) { 1594 parent_->SetAndCopyParentChain(allocator, parent); 1595 } else { 1596 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1597 parent_->CopyFrom(parent); 1598 if (parent->GetParent() != nullptr) { 1599 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1600 } 1601 } 1602 } 1603 1604 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1605 void CopyFrom(HEnvironment* environment); 1606 1607 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1608 // input to the loop phi instead. This is for inserting instructions that 1609 // require an environment (like HDeoptimization) in the loop pre-header. 1610 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1611 1612 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1613 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1614 } 1615 1616 HInstruction* GetInstructionAt(size_t index) const { 1617 return vregs_[index].GetInstruction(); 1618 } 1619 1620 void RemoveAsUserOfInput(size_t index) const; 1621 1622 size_t Size() const { return vregs_.size(); } 1623 1624 HEnvironment* GetParent() const { return parent_; } 1625 1626 void SetLocationAt(size_t index, Location location) { 1627 locations_[index] = location; 1628 } 1629 1630 Location GetLocationAt(size_t index) const { 1631 return locations_[index]; 1632 } 1633 1634 uint32_t GetDexPc() const { 1635 return dex_pc_; 1636 } 1637 1638 uint32_t GetMethodIdx() const { 1639 return method_idx_; 1640 } 1641 1642 InvokeType GetInvokeType() const { 1643 return invoke_type_; 1644 } 1645 1646 const DexFile& GetDexFile() const { 1647 return dex_file_; 1648 } 1649 1650 HInstruction* GetHolder() const { 1651 return holder_; 1652 } 1653 1654 1655 bool IsFromInlinedInvoke() const { 1656 return GetParent() != nullptr; 1657 } 1658 1659 private: 1660 // Record instructions' use entries of this environment for constant-time removal. 1661 // It should only be called by HInstruction when a new environment use is added. 1662 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1663 DCHECK(env_use->GetUser() == this); 1664 size_t index = env_use->GetIndex(); 1665 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1666 } 1667 1668 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1669 ArenaVector<Location> locations_; 1670 HEnvironment* parent_; 1671 const DexFile& dex_file_; 1672 const uint32_t method_idx_; 1673 const uint32_t dex_pc_; 1674 const InvokeType invoke_type_; 1675 1676 // The instruction that holds this environment. 1677 HInstruction* const holder_; 1678 1679 friend class HInstruction; 1680 1681 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1682}; 1683 1684class ReferenceTypeInfo : ValueObject { 1685 public: 1686 typedef Handle<mirror::Class> TypeHandle; 1687 1688 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1689 // The constructor will check that the type_handle is valid. 1690 return ReferenceTypeInfo(type_handle, is_exact); 1691 } 1692 1693 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1694 1695 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1696 return handle.GetReference() != nullptr; 1697 } 1698 1699 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1700 return IsValidHandle(type_handle_); 1701 } 1702 1703 bool IsExact() const { return is_exact_; } 1704 1705 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1706 DCHECK(IsValid()); 1707 return GetTypeHandle()->IsObjectClass(); 1708 } 1709 1710 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1711 DCHECK(IsValid()); 1712 return GetTypeHandle()->IsStringClass(); 1713 } 1714 1715 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1716 DCHECK(IsValid()); 1717 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1718 } 1719 1720 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1721 DCHECK(IsValid()); 1722 return GetTypeHandle()->IsInterface(); 1723 } 1724 1725 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1726 DCHECK(IsValid()); 1727 return GetTypeHandle()->IsArrayClass(); 1728 } 1729 1730 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1731 DCHECK(IsValid()); 1732 return GetTypeHandle()->IsPrimitiveArray(); 1733 } 1734 1735 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1736 DCHECK(IsValid()); 1737 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 1738 } 1739 1740 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1741 DCHECK(IsValid()); 1742 if (!IsExact()) return false; 1743 if (!IsArrayClass()) return false; 1744 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1745 } 1746 1747 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1748 DCHECK(IsValid()); 1749 if (!IsExact()) return false; 1750 if (!IsArrayClass()) return false; 1751 if (!rti.IsArrayClass()) return false; 1752 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 1753 rti.GetTypeHandle()->GetComponentType()); 1754 } 1755 1756 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1757 1758 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1759 DCHECK(IsValid()); 1760 DCHECK(rti.IsValid()); 1761 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1762 } 1763 1764 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1765 DCHECK(IsValid()); 1766 DCHECK(rti.IsValid()); 1767 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() && 1768 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1769 } 1770 1771 // Returns true if the type information provide the same amount of details. 1772 // Note that it does not mean that the instructions have the same actual type 1773 // (because the type can be the result of a merge). 1774 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1775 if (!IsValid() && !rti.IsValid()) { 1776 // Invalid types are equal. 1777 return true; 1778 } 1779 if (!IsValid() || !rti.IsValid()) { 1780 // One is valid, the other not. 1781 return false; 1782 } 1783 return IsExact() == rti.IsExact() 1784 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1785 } 1786 1787 private: 1788 ReferenceTypeInfo(); 1789 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1790 1791 // The class of the object. 1792 TypeHandle type_handle_; 1793 // Whether or not the type is exact or a superclass of the actual type. 1794 // Whether or not we have any information about this type. 1795 bool is_exact_; 1796}; 1797 1798std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1799 1800class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1801 public: 1802 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1803 : previous_(nullptr), 1804 next_(nullptr), 1805 block_(nullptr), 1806 dex_pc_(dex_pc), 1807 id_(-1), 1808 ssa_index_(-1), 1809 environment_(nullptr), 1810 locations_(nullptr), 1811 live_interval_(nullptr), 1812 lifetime_position_(kNoLifetime), 1813 side_effects_(side_effects), 1814 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1815 1816 virtual ~HInstruction() {} 1817 1818#define DECLARE_KIND(type, super) k##type, 1819 enum InstructionKind { 1820 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1821 }; 1822#undef DECLARE_KIND 1823 1824 HInstruction* GetNext() const { return next_; } 1825 HInstruction* GetPrevious() const { return previous_; } 1826 1827 HInstruction* GetNextDisregardingMoves() const; 1828 HInstruction* GetPreviousDisregardingMoves() const; 1829 1830 HBasicBlock* GetBlock() const { return block_; } 1831 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1832 void SetBlock(HBasicBlock* block) { block_ = block; } 1833 bool IsInBlock() const { return block_ != nullptr; } 1834 bool IsInLoop() const { return block_->IsInLoop(); } 1835 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1836 1837 virtual size_t InputCount() const = 0; 1838 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1839 1840 virtual void Accept(HGraphVisitor* visitor) = 0; 1841 virtual const char* DebugName() const = 0; 1842 1843 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1844 void SetRawInputAt(size_t index, HInstruction* input) { 1845 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1846 } 1847 1848 virtual bool NeedsEnvironment() const { return false; } 1849 1850 uint32_t GetDexPc() const { return dex_pc_; } 1851 1852 virtual bool IsControlFlow() const { return false; } 1853 1854 virtual bool CanThrow() const { return false; } 1855 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1856 1857 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1858 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1859 1860 // Does not apply for all instructions, but having this at top level greatly 1861 // simplifies the null check elimination. 1862 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1863 virtual bool CanBeNull() const { 1864 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1865 return true; 1866 } 1867 1868 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1869 return false; 1870 } 1871 1872 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1873 1874 ReferenceTypeInfo GetReferenceTypeInfo() const { 1875 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1876 return reference_type_info_; 1877 } 1878 1879 void AddUseAt(HInstruction* user, size_t index) { 1880 DCHECK(user != nullptr); 1881 HUseListNode<HInstruction*>* use = 1882 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1883 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1884 } 1885 1886 void AddEnvUseAt(HEnvironment* user, size_t index) { 1887 DCHECK(user != nullptr); 1888 HUseListNode<HEnvironment*>* env_use = 1889 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1890 user->RecordEnvUse(env_use); 1891 } 1892 1893 void RemoveAsUserOfInput(size_t input) { 1894 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1895 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1896 } 1897 1898 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1899 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1900 1901 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1902 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1903 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1904 bool HasOnlyOneNonEnvironmentUse() const { 1905 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1906 } 1907 1908 // Does this instruction strictly dominate `other_instruction`? 1909 // Returns false if this instruction and `other_instruction` are the same. 1910 // Aborts if this instruction and `other_instruction` are both phis. 1911 bool StrictlyDominates(HInstruction* other_instruction) const; 1912 1913 int GetId() const { return id_; } 1914 void SetId(int id) { id_ = id; } 1915 1916 int GetSsaIndex() const { return ssa_index_; } 1917 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1918 bool HasSsaIndex() const { return ssa_index_ != -1; } 1919 1920 bool HasEnvironment() const { return environment_ != nullptr; } 1921 HEnvironment* GetEnvironment() const { return environment_; } 1922 // Set the `environment_` field. Raw because this method does not 1923 // update the uses lists. 1924 void SetRawEnvironment(HEnvironment* environment) { 1925 DCHECK(environment_ == nullptr); 1926 DCHECK_EQ(environment->GetHolder(), this); 1927 environment_ = environment; 1928 } 1929 1930 // Set the environment of this instruction, copying it from `environment`. While 1931 // copying, the uses lists are being updated. 1932 void CopyEnvironmentFrom(HEnvironment* environment) { 1933 DCHECK(environment_ == nullptr); 1934 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1935 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1936 environment_->CopyFrom(environment); 1937 if (environment->GetParent() != nullptr) { 1938 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1939 } 1940 } 1941 1942 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1943 HBasicBlock* block) { 1944 DCHECK(environment_ == nullptr); 1945 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1946 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1947 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1948 if (environment->GetParent() != nullptr) { 1949 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1950 } 1951 } 1952 1953 // Returns the number of entries in the environment. Typically, that is the 1954 // number of dex registers in a method. It could be more in case of inlining. 1955 size_t EnvironmentSize() const; 1956 1957 LocationSummary* GetLocations() const { return locations_; } 1958 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1959 1960 void ReplaceWith(HInstruction* instruction); 1961 void ReplaceInput(HInstruction* replacement, size_t index); 1962 1963 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1964 // uses of this instruction by `other` are *not* updated. 1965 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1966 ReplaceWith(other); 1967 other->ReplaceInput(this, use_index); 1968 } 1969 1970 // Move `this` instruction before `cursor`. 1971 void MoveBefore(HInstruction* cursor); 1972 1973 // Move `this` before its first user and out of any loops. If there is no 1974 // out-of-loop user that dominates all other users, move the instruction 1975 // to the end of the out-of-loop common dominator of the user's blocks. 1976 // 1977 // This can be used only on non-throwing instructions with no side effects that 1978 // have at least one use but no environment uses. 1979 void MoveBeforeFirstUserAndOutOfLoops(); 1980 1981#define INSTRUCTION_TYPE_CHECK(type, super) \ 1982 bool Is##type() const; \ 1983 const H##type* As##type() const; \ 1984 H##type* As##type(); 1985 1986 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1987#undef INSTRUCTION_TYPE_CHECK 1988 1989#define INSTRUCTION_TYPE_CHECK(type, super) \ 1990 bool Is##type() const { return (As##type() != nullptr); } \ 1991 virtual const H##type* As##type() const { return nullptr; } \ 1992 virtual H##type* As##type() { return nullptr; } 1993 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1994#undef INSTRUCTION_TYPE_CHECK 1995 1996 // Returns whether the instruction can be moved within the graph. 1997 virtual bool CanBeMoved() const { return false; } 1998 1999 // Returns whether the two instructions are of the same kind. 2000 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 2001 return false; 2002 } 2003 2004 // Returns whether any data encoded in the two instructions is equal. 2005 // This method does not look at the inputs. Both instructions must be 2006 // of the same type, otherwise the method has undefined behavior. 2007 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 2008 return false; 2009 } 2010 2011 // Returns whether two instructions are equal, that is: 2012 // 1) They have the same type and contain the same data (InstructionDataEquals). 2013 // 2) Their inputs are identical. 2014 bool Equals(HInstruction* other) const; 2015 2016 // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744) 2017 // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide 2018 // the virtual function because the __attribute__((__pure__)) doesn't really 2019 // apply the strong requirement for virtual functions, preventing optimizations. 2020 InstructionKind GetKind() const PURE; 2021 virtual InstructionKind GetKindInternal() const = 0; 2022 2023 virtual size_t ComputeHashCode() const { 2024 size_t result = GetKind(); 2025 for (size_t i = 0, e = InputCount(); i < e; ++i) { 2026 result = (result * 31) + InputAt(i)->GetId(); 2027 } 2028 return result; 2029 } 2030 2031 SideEffects GetSideEffects() const { return side_effects_; } 2032 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 2033 2034 size_t GetLifetimePosition() const { return lifetime_position_; } 2035 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 2036 LiveInterval* GetLiveInterval() const { return live_interval_; } 2037 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 2038 bool HasLiveInterval() const { return live_interval_ != nullptr; } 2039 2040 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 2041 2042 // Returns whether the code generation of the instruction will require to have access 2043 // to the current method. Such instructions are: 2044 // (1): Instructions that require an environment, as calling the runtime requires 2045 // to walk the stack and have the current method stored at a specific stack address. 2046 // (2): Object literals like classes and strings, that are loaded from the dex cache 2047 // fields of the current method. 2048 bool NeedsCurrentMethod() const { 2049 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 2050 } 2051 2052 // Returns whether the code generation of the instruction will require to have access 2053 // to the dex cache of the current method's declaring class via the current method. 2054 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 2055 2056 // Does this instruction have any use in an environment before 2057 // control flow hits 'other'? 2058 bool HasAnyEnvironmentUseBefore(HInstruction* other); 2059 2060 // Remove all references to environment uses of this instruction. 2061 // The caller must ensure that this is safe to do. 2062 void RemoveEnvironmentUsers(); 2063 2064 protected: 2065 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2066 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2067 void SetSideEffects(SideEffects other) { side_effects_ = other; } 2068 2069 private: 2070 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2071 2072 HInstruction* previous_; 2073 HInstruction* next_; 2074 HBasicBlock* block_; 2075 const uint32_t dex_pc_; 2076 2077 // An instruction gets an id when it is added to the graph. 2078 // It reflects creation order. A negative id means the instruction 2079 // has not been added to the graph. 2080 int id_; 2081 2082 // When doing liveness analysis, instructions that have uses get an SSA index. 2083 int ssa_index_; 2084 2085 // List of instructions that have this instruction as input. 2086 HUseList<HInstruction*> uses_; 2087 2088 // List of environments that contain this instruction. 2089 HUseList<HEnvironment*> env_uses_; 2090 2091 // The environment associated with this instruction. Not null if the instruction 2092 // might jump out of the method. 2093 HEnvironment* environment_; 2094 2095 // Set by the code generator. 2096 LocationSummary* locations_; 2097 2098 // Set by the liveness analysis. 2099 LiveInterval* live_interval_; 2100 2101 // Set by the liveness analysis, this is the position in a linear 2102 // order of blocks where this instruction's live interval start. 2103 size_t lifetime_position_; 2104 2105 SideEffects side_effects_; 2106 2107 // TODO: for primitive types this should be marked as invalid. 2108 ReferenceTypeInfo reference_type_info_; 2109 2110 friend class GraphChecker; 2111 friend class HBasicBlock; 2112 friend class HEnvironment; 2113 friend class HGraph; 2114 friend class HInstructionList; 2115 2116 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2117}; 2118std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2119 2120class HInputIterator : public ValueObject { 2121 public: 2122 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2123 2124 bool Done() const { return index_ == instruction_->InputCount(); } 2125 HInstruction* Current() const { return instruction_->InputAt(index_); } 2126 void Advance() { index_++; } 2127 2128 private: 2129 HInstruction* instruction_; 2130 size_t index_; 2131 2132 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2133}; 2134 2135class HInstructionIterator : public ValueObject { 2136 public: 2137 explicit HInstructionIterator(const HInstructionList& instructions) 2138 : instruction_(instructions.first_instruction_) { 2139 next_ = Done() ? nullptr : instruction_->GetNext(); 2140 } 2141 2142 bool Done() const { return instruction_ == nullptr; } 2143 HInstruction* Current() const { return instruction_; } 2144 void Advance() { 2145 instruction_ = next_; 2146 next_ = Done() ? nullptr : instruction_->GetNext(); 2147 } 2148 2149 private: 2150 HInstruction* instruction_; 2151 HInstruction* next_; 2152 2153 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2154}; 2155 2156class HBackwardInstructionIterator : public ValueObject { 2157 public: 2158 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2159 : instruction_(instructions.last_instruction_) { 2160 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2161 } 2162 2163 bool Done() const { return instruction_ == nullptr; } 2164 HInstruction* Current() const { return instruction_; } 2165 void Advance() { 2166 instruction_ = next_; 2167 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2168 } 2169 2170 private: 2171 HInstruction* instruction_; 2172 HInstruction* next_; 2173 2174 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2175}; 2176 2177template<size_t N> 2178class HTemplateInstruction: public HInstruction { 2179 public: 2180 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2181 : HInstruction(side_effects, dex_pc), inputs_() {} 2182 virtual ~HTemplateInstruction() {} 2183 2184 size_t InputCount() const OVERRIDE { return N; } 2185 2186 protected: 2187 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2188 DCHECK_LT(i, N); 2189 return inputs_[i]; 2190 } 2191 2192 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2193 DCHECK_LT(i, N); 2194 inputs_[i] = input; 2195 } 2196 2197 private: 2198 std::array<HUserRecord<HInstruction*>, N> inputs_; 2199 2200 friend class SsaBuilder; 2201}; 2202 2203// HTemplateInstruction specialization for N=0. 2204template<> 2205class HTemplateInstruction<0>: public HInstruction { 2206 public: 2207 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2208 : HInstruction(side_effects, dex_pc) {} 2209 2210 virtual ~HTemplateInstruction() {} 2211 2212 size_t InputCount() const OVERRIDE { return 0; } 2213 2214 protected: 2215 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2216 LOG(FATAL) << "Unreachable"; 2217 UNREACHABLE(); 2218 } 2219 2220 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2221 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2222 LOG(FATAL) << "Unreachable"; 2223 UNREACHABLE(); 2224 } 2225 2226 private: 2227 friend class SsaBuilder; 2228}; 2229 2230template<intptr_t N> 2231class HExpression : public HTemplateInstruction<N> { 2232 public: 2233 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2234 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2235 virtual ~HExpression() {} 2236 2237 Primitive::Type GetType() const OVERRIDE { return type_; } 2238 2239 protected: 2240 Primitive::Type type_; 2241}; 2242 2243// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2244// instruction that branches to the exit block. 2245class HReturnVoid : public HTemplateInstruction<0> { 2246 public: 2247 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2248 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2249 2250 bool IsControlFlow() const OVERRIDE { return true; } 2251 2252 DECLARE_INSTRUCTION(ReturnVoid); 2253 2254 private: 2255 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2256}; 2257 2258// Represents dex's RETURN opcodes. A HReturn is a control flow 2259// instruction that branches to the exit block. 2260class HReturn : public HTemplateInstruction<1> { 2261 public: 2262 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2263 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2264 SetRawInputAt(0, value); 2265 } 2266 2267 bool IsControlFlow() const OVERRIDE { return true; } 2268 2269 DECLARE_INSTRUCTION(Return); 2270 2271 private: 2272 DISALLOW_COPY_AND_ASSIGN(HReturn); 2273}; 2274 2275// The exit instruction is the only instruction of the exit block. 2276// Instructions aborting the method (HThrow and HReturn) must branch to the 2277// exit block. 2278class HExit : public HTemplateInstruction<0> { 2279 public: 2280 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2281 2282 bool IsControlFlow() const OVERRIDE { return true; } 2283 2284 DECLARE_INSTRUCTION(Exit); 2285 2286 private: 2287 DISALLOW_COPY_AND_ASSIGN(HExit); 2288}; 2289 2290// Jumps from one block to another. 2291class HGoto : public HTemplateInstruction<0> { 2292 public: 2293 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2294 2295 bool IsControlFlow() const OVERRIDE { return true; } 2296 2297 HBasicBlock* GetSuccessor() const { 2298 return GetBlock()->GetSingleSuccessor(); 2299 } 2300 2301 DECLARE_INSTRUCTION(Goto); 2302 2303 private: 2304 DISALLOW_COPY_AND_ASSIGN(HGoto); 2305}; 2306 2307class HConstant : public HExpression<0> { 2308 public: 2309 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2310 : HExpression(type, SideEffects::None(), dex_pc) {} 2311 2312 bool CanBeMoved() const OVERRIDE { return true; } 2313 2314 virtual bool IsMinusOne() const { return false; } 2315 virtual bool IsZero() const { return false; } 2316 virtual bool IsOne() const { return false; } 2317 2318 virtual uint64_t GetValueAsUint64() const = 0; 2319 2320 DECLARE_ABSTRACT_INSTRUCTION(Constant); 2321 2322 private: 2323 DISALLOW_COPY_AND_ASSIGN(HConstant); 2324}; 2325 2326class HNullConstant : public HConstant { 2327 public: 2328 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2329 return true; 2330 } 2331 2332 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2333 2334 size_t ComputeHashCode() const OVERRIDE { return 0; } 2335 2336 DECLARE_INSTRUCTION(NullConstant); 2337 2338 private: 2339 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2340 2341 friend class HGraph; 2342 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2343}; 2344 2345// Constants of the type int. Those can be from Dex instructions, or 2346// synthesized (for example with the if-eqz instruction). 2347class HIntConstant : public HConstant { 2348 public: 2349 int32_t GetValue() const { return value_; } 2350 2351 uint64_t GetValueAsUint64() const OVERRIDE { 2352 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2353 } 2354 2355 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2356 DCHECK(other->IsIntConstant()); 2357 return other->AsIntConstant()->value_ == value_; 2358 } 2359 2360 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2361 2362 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2363 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2364 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2365 2366 DECLARE_INSTRUCTION(IntConstant); 2367 2368 private: 2369 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2370 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2371 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2372 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2373 2374 const int32_t value_; 2375 2376 friend class HGraph; 2377 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2378 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2379 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2380}; 2381 2382class HLongConstant : public HConstant { 2383 public: 2384 int64_t GetValue() const { return value_; } 2385 2386 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2387 2388 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2389 DCHECK(other->IsLongConstant()); 2390 return other->AsLongConstant()->value_ == value_; 2391 } 2392 2393 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2394 2395 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2396 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2397 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2398 2399 DECLARE_INSTRUCTION(LongConstant); 2400 2401 private: 2402 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2403 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2404 2405 const int64_t value_; 2406 2407 friend class HGraph; 2408 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2409}; 2410 2411// Conditional branch. A block ending with an HIf instruction must have 2412// two successors. 2413class HIf : public HTemplateInstruction<1> { 2414 public: 2415 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2416 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2417 SetRawInputAt(0, input); 2418 } 2419 2420 bool IsControlFlow() const OVERRIDE { return true; } 2421 2422 HBasicBlock* IfTrueSuccessor() const { 2423 return GetBlock()->GetSuccessors()[0]; 2424 } 2425 2426 HBasicBlock* IfFalseSuccessor() const { 2427 return GetBlock()->GetSuccessors()[1]; 2428 } 2429 2430 DECLARE_INSTRUCTION(If); 2431 2432 private: 2433 DISALLOW_COPY_AND_ASSIGN(HIf); 2434}; 2435 2436 2437// Abstract instruction which marks the beginning and/or end of a try block and 2438// links it to the respective exception handlers. Behaves the same as a Goto in 2439// non-exceptional control flow. 2440// Normal-flow successor is stored at index zero, exception handlers under 2441// higher indices in no particular order. 2442class HTryBoundary : public HTemplateInstruction<0> { 2443 public: 2444 enum BoundaryKind { 2445 kEntry, 2446 kExit, 2447 }; 2448 2449 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2450 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2451 2452 bool IsControlFlow() const OVERRIDE { return true; } 2453 2454 // Returns the block's non-exceptional successor (index zero). 2455 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2456 2457 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const { 2458 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u); 2459 } 2460 2461 // Returns whether `handler` is among its exception handlers (non-zero index 2462 // successors). 2463 bool HasExceptionHandler(const HBasicBlock& handler) const { 2464 DCHECK(handler.IsCatchBlock()); 2465 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2466 } 2467 2468 // If not present already, adds `handler` to its block's list of exception 2469 // handlers. 2470 void AddExceptionHandler(HBasicBlock* handler) { 2471 if (!HasExceptionHandler(*handler)) { 2472 GetBlock()->AddSuccessor(handler); 2473 } 2474 } 2475 2476 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2477 2478 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2479 2480 DECLARE_INSTRUCTION(TryBoundary); 2481 2482 private: 2483 const BoundaryKind kind_; 2484 2485 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2486}; 2487 2488// Deoptimize to interpreter, upon checking a condition. 2489class HDeoptimize : public HTemplateInstruction<1> { 2490 public: 2491 HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2492 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2493 SetRawInputAt(0, cond); 2494 } 2495 2496 bool CanBeMoved() const OVERRIDE { return true; } 2497 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2498 return true; 2499 } 2500 bool NeedsEnvironment() const OVERRIDE { return true; } 2501 bool CanThrow() const OVERRIDE { return true; } 2502 2503 DECLARE_INSTRUCTION(Deoptimize); 2504 2505 private: 2506 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2507}; 2508 2509// Represents the ArtMethod that was passed as a first argument to 2510// the method. It is used by instructions that depend on it, like 2511// instructions that work with the dex cache. 2512class HCurrentMethod : public HExpression<0> { 2513 public: 2514 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2515 : HExpression(type, SideEffects::None(), dex_pc) {} 2516 2517 DECLARE_INSTRUCTION(CurrentMethod); 2518 2519 private: 2520 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2521}; 2522 2523// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2524// have one successor for each entry in the switch table, and the final successor 2525// will be the block containing the next Dex opcode. 2526class HPackedSwitch : public HTemplateInstruction<1> { 2527 public: 2528 HPackedSwitch(int32_t start_value, 2529 uint32_t num_entries, 2530 HInstruction* input, 2531 uint32_t dex_pc = kNoDexPc) 2532 : HTemplateInstruction(SideEffects::None(), dex_pc), 2533 start_value_(start_value), 2534 num_entries_(num_entries) { 2535 SetRawInputAt(0, input); 2536 } 2537 2538 bool IsControlFlow() const OVERRIDE { return true; } 2539 2540 int32_t GetStartValue() const { return start_value_; } 2541 2542 uint32_t GetNumEntries() const { return num_entries_; } 2543 2544 HBasicBlock* GetDefaultBlock() const { 2545 // Last entry is the default block. 2546 return GetBlock()->GetSuccessors()[num_entries_]; 2547 } 2548 DECLARE_INSTRUCTION(PackedSwitch); 2549 2550 private: 2551 const int32_t start_value_; 2552 const uint32_t num_entries_; 2553 2554 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2555}; 2556 2557class HUnaryOperation : public HExpression<1> { 2558 public: 2559 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2560 : HExpression(result_type, SideEffects::None(), dex_pc) { 2561 SetRawInputAt(0, input); 2562 } 2563 2564 HInstruction* GetInput() const { return InputAt(0); } 2565 Primitive::Type GetResultType() const { return GetType(); } 2566 2567 bool CanBeMoved() const OVERRIDE { return true; } 2568 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2569 return true; 2570 } 2571 2572 // Try to statically evaluate `operation` and return a HConstant 2573 // containing the result of this evaluation. If `operation` cannot 2574 // be evaluated as a constant, return null. 2575 HConstant* TryStaticEvaluation() const; 2576 2577 // Apply this operation to `x`. 2578 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2579 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2580 2581 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation); 2582 2583 private: 2584 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2585}; 2586 2587class HBinaryOperation : public HExpression<2> { 2588 public: 2589 HBinaryOperation(Primitive::Type result_type, 2590 HInstruction* left, 2591 HInstruction* right, 2592 SideEffects side_effects = SideEffects::None(), 2593 uint32_t dex_pc = kNoDexPc) 2594 : HExpression(result_type, side_effects, dex_pc) { 2595 SetRawInputAt(0, left); 2596 SetRawInputAt(1, right); 2597 } 2598 2599 HInstruction* GetLeft() const { return InputAt(0); } 2600 HInstruction* GetRight() const { return InputAt(1); } 2601 Primitive::Type GetResultType() const { return GetType(); } 2602 2603 virtual bool IsCommutative() const { return false; } 2604 2605 // Put constant on the right. 2606 // Returns whether order is changed. 2607 bool OrderInputsWithConstantOnTheRight() { 2608 HInstruction* left = InputAt(0); 2609 HInstruction* right = InputAt(1); 2610 if (left->IsConstant() && !right->IsConstant()) { 2611 ReplaceInput(right, 0); 2612 ReplaceInput(left, 1); 2613 return true; 2614 } 2615 return false; 2616 } 2617 2618 // Order inputs by instruction id, but favor constant on the right side. 2619 // This helps GVN for commutative ops. 2620 void OrderInputs() { 2621 DCHECK(IsCommutative()); 2622 HInstruction* left = InputAt(0); 2623 HInstruction* right = InputAt(1); 2624 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2625 return; 2626 } 2627 if (OrderInputsWithConstantOnTheRight()) { 2628 return; 2629 } 2630 // Order according to instruction id. 2631 if (left->GetId() > right->GetId()) { 2632 ReplaceInput(right, 0); 2633 ReplaceInput(left, 1); 2634 } 2635 } 2636 2637 bool CanBeMoved() const OVERRIDE { return true; } 2638 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2639 return true; 2640 } 2641 2642 // Try to statically evaluate `operation` and return a HConstant 2643 // containing the result of this evaluation. If `operation` cannot 2644 // be evaluated as a constant, return null. 2645 HConstant* TryStaticEvaluation() const; 2646 2647 // Apply this operation to `x` and `y`. 2648 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2649 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2650 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2651 HLongConstant* y ATTRIBUTE_UNUSED) const { 2652 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2653 return nullptr; 2654 } 2655 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2656 HIntConstant* y ATTRIBUTE_UNUSED) const { 2657 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2658 return nullptr; 2659 } 2660 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2661 HNullConstant* y ATTRIBUTE_UNUSED) const { 2662 VLOG(compiler) << DebugName() << " is not defined for the (null, null) case."; 2663 return nullptr; 2664 } 2665 2666 // Returns an input that can legally be used as the right input and is 2667 // constant, or null. 2668 HConstant* GetConstantRight() const; 2669 2670 // If `GetConstantRight()` returns one of the input, this returns the other 2671 // one. Otherwise it returns null. 2672 HInstruction* GetLeastConstantLeft() const; 2673 2674 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation); 2675 2676 private: 2677 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2678}; 2679 2680// The comparison bias applies for floating point operations and indicates how NaN 2681// comparisons are treated: 2682enum class ComparisonBias { 2683 kNoBias, // bias is not applicable (i.e. for long operation) 2684 kGtBias, // return 1 for NaN comparisons 2685 kLtBias, // return -1 for NaN comparisons 2686}; 2687 2688class HCondition : public HBinaryOperation { 2689 public: 2690 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2691 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2692 needs_materialization_(true), 2693 bias_(ComparisonBias::kNoBias) {} 2694 2695 bool NeedsMaterialization() const { return needs_materialization_; } 2696 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2697 2698 // For code generation purposes, returns whether this instruction is just before 2699 // `instruction`, and disregard moves in between. 2700 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2701 2702 DECLARE_ABSTRACT_INSTRUCTION(Condition); 2703 2704 virtual IfCondition GetCondition() const = 0; 2705 2706 virtual IfCondition GetOppositeCondition() const = 0; 2707 2708 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2709 2710 void SetBias(ComparisonBias bias) { bias_ = bias; } 2711 2712 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2713 return bias_ == other->AsCondition()->bias_; 2714 } 2715 2716 bool IsFPConditionTrueIfNaN() const { 2717 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2718 IfCondition if_cond = GetCondition(); 2719 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2720 } 2721 2722 bool IsFPConditionFalseIfNaN() const { 2723 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2724 IfCondition if_cond = GetCondition(); 2725 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2726 } 2727 2728 private: 2729 // For register allocation purposes, returns whether this instruction needs to be 2730 // materialized (that is, not just be in the processor flags). 2731 bool needs_materialization_; 2732 2733 // Needed if we merge a HCompare into a HCondition. 2734 ComparisonBias bias_; 2735 2736 DISALLOW_COPY_AND_ASSIGN(HCondition); 2737}; 2738 2739// Instruction to check if two inputs are equal to each other. 2740class HEqual : public HCondition { 2741 public: 2742 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2743 : HCondition(first, second, dex_pc) {} 2744 2745 bool IsCommutative() const OVERRIDE { return true; } 2746 2747 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2748 return GetBlock()->GetGraph()->GetIntConstant( 2749 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2750 } 2751 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2752 return GetBlock()->GetGraph()->GetIntConstant( 2753 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2754 } 2755 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2756 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2757 return GetBlock()->GetGraph()->GetIntConstant(1); 2758 } 2759 2760 DECLARE_INSTRUCTION(Equal); 2761 2762 IfCondition GetCondition() const OVERRIDE { 2763 return kCondEQ; 2764 } 2765 2766 IfCondition GetOppositeCondition() const OVERRIDE { 2767 return kCondNE; 2768 } 2769 2770 private: 2771 template <typename T> bool Compute(T x, T y) const { return x == y; } 2772 2773 DISALLOW_COPY_AND_ASSIGN(HEqual); 2774}; 2775 2776class HNotEqual : public HCondition { 2777 public: 2778 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2779 : HCondition(first, second, dex_pc) {} 2780 2781 bool IsCommutative() const OVERRIDE { return true; } 2782 2783 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2784 return GetBlock()->GetGraph()->GetIntConstant( 2785 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2786 } 2787 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2788 return GetBlock()->GetGraph()->GetIntConstant( 2789 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2790 } 2791 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2792 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2793 return GetBlock()->GetGraph()->GetIntConstant(0); 2794 } 2795 2796 DECLARE_INSTRUCTION(NotEqual); 2797 2798 IfCondition GetCondition() const OVERRIDE { 2799 return kCondNE; 2800 } 2801 2802 IfCondition GetOppositeCondition() const OVERRIDE { 2803 return kCondEQ; 2804 } 2805 2806 private: 2807 template <typename T> bool Compute(T x, T y) const { return x != y; } 2808 2809 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2810}; 2811 2812class HLessThan : public HCondition { 2813 public: 2814 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2815 : HCondition(first, second, dex_pc) {} 2816 2817 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2818 return GetBlock()->GetGraph()->GetIntConstant( 2819 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2820 } 2821 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2822 return GetBlock()->GetGraph()->GetIntConstant( 2823 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2824 } 2825 2826 DECLARE_INSTRUCTION(LessThan); 2827 2828 IfCondition GetCondition() const OVERRIDE { 2829 return kCondLT; 2830 } 2831 2832 IfCondition GetOppositeCondition() const OVERRIDE { 2833 return kCondGE; 2834 } 2835 2836 private: 2837 template <typename T> bool Compute(T x, T y) const { return x < y; } 2838 2839 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2840}; 2841 2842class HLessThanOrEqual : public HCondition { 2843 public: 2844 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2845 : HCondition(first, second, dex_pc) {} 2846 2847 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2848 return GetBlock()->GetGraph()->GetIntConstant( 2849 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2850 } 2851 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2852 return GetBlock()->GetGraph()->GetIntConstant( 2853 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2854 } 2855 2856 DECLARE_INSTRUCTION(LessThanOrEqual); 2857 2858 IfCondition GetCondition() const OVERRIDE { 2859 return kCondLE; 2860 } 2861 2862 IfCondition GetOppositeCondition() const OVERRIDE { 2863 return kCondGT; 2864 } 2865 2866 private: 2867 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2868 2869 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2870}; 2871 2872class HGreaterThan : public HCondition { 2873 public: 2874 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2875 : HCondition(first, second, dex_pc) {} 2876 2877 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2878 return GetBlock()->GetGraph()->GetIntConstant( 2879 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2880 } 2881 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2882 return GetBlock()->GetGraph()->GetIntConstant( 2883 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2884 } 2885 2886 DECLARE_INSTRUCTION(GreaterThan); 2887 2888 IfCondition GetCondition() const OVERRIDE { 2889 return kCondGT; 2890 } 2891 2892 IfCondition GetOppositeCondition() const OVERRIDE { 2893 return kCondLE; 2894 } 2895 2896 private: 2897 template <typename T> bool Compute(T x, T y) const { return x > y; } 2898 2899 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2900}; 2901 2902class HGreaterThanOrEqual : public HCondition { 2903 public: 2904 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2905 : HCondition(first, second, dex_pc) {} 2906 2907 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2908 return GetBlock()->GetGraph()->GetIntConstant( 2909 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2910 } 2911 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2912 return GetBlock()->GetGraph()->GetIntConstant( 2913 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2914 } 2915 2916 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2917 2918 IfCondition GetCondition() const OVERRIDE { 2919 return kCondGE; 2920 } 2921 2922 IfCondition GetOppositeCondition() const OVERRIDE { 2923 return kCondLT; 2924 } 2925 2926 private: 2927 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2928 2929 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2930}; 2931 2932class HBelow : public HCondition { 2933 public: 2934 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2935 : HCondition(first, second, dex_pc) {} 2936 2937 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2938 return GetBlock()->GetGraph()->GetIntConstant( 2939 Compute(static_cast<uint32_t>(x->GetValue()), 2940 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2941 } 2942 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2943 return GetBlock()->GetGraph()->GetIntConstant( 2944 Compute(static_cast<uint64_t>(x->GetValue()), 2945 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2946 } 2947 2948 DECLARE_INSTRUCTION(Below); 2949 2950 IfCondition GetCondition() const OVERRIDE { 2951 return kCondB; 2952 } 2953 2954 IfCondition GetOppositeCondition() const OVERRIDE { 2955 return kCondAE; 2956 } 2957 2958 private: 2959 template <typename T> bool Compute(T x, T y) const { return x < y; } 2960 2961 DISALLOW_COPY_AND_ASSIGN(HBelow); 2962}; 2963 2964class HBelowOrEqual : public HCondition { 2965 public: 2966 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2967 : HCondition(first, second, dex_pc) {} 2968 2969 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2970 return GetBlock()->GetGraph()->GetIntConstant( 2971 Compute(static_cast<uint32_t>(x->GetValue()), 2972 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2973 } 2974 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2975 return GetBlock()->GetGraph()->GetIntConstant( 2976 Compute(static_cast<uint64_t>(x->GetValue()), 2977 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2978 } 2979 2980 DECLARE_INSTRUCTION(BelowOrEqual); 2981 2982 IfCondition GetCondition() const OVERRIDE { 2983 return kCondBE; 2984 } 2985 2986 IfCondition GetOppositeCondition() const OVERRIDE { 2987 return kCondA; 2988 } 2989 2990 private: 2991 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2992 2993 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 2994}; 2995 2996class HAbove : public HCondition { 2997 public: 2998 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2999 : HCondition(first, second, dex_pc) {} 3000 3001 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3002 return GetBlock()->GetGraph()->GetIntConstant( 3003 Compute(static_cast<uint32_t>(x->GetValue()), 3004 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3005 } 3006 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3007 return GetBlock()->GetGraph()->GetIntConstant( 3008 Compute(static_cast<uint64_t>(x->GetValue()), 3009 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3010 } 3011 3012 DECLARE_INSTRUCTION(Above); 3013 3014 IfCondition GetCondition() const OVERRIDE { 3015 return kCondA; 3016 } 3017 3018 IfCondition GetOppositeCondition() const OVERRIDE { 3019 return kCondBE; 3020 } 3021 3022 private: 3023 template <typename T> bool Compute(T x, T y) const { return x > y; } 3024 3025 DISALLOW_COPY_AND_ASSIGN(HAbove); 3026}; 3027 3028class HAboveOrEqual : public HCondition { 3029 public: 3030 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3031 : HCondition(first, second, dex_pc) {} 3032 3033 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3034 return GetBlock()->GetGraph()->GetIntConstant( 3035 Compute(static_cast<uint32_t>(x->GetValue()), 3036 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3037 } 3038 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3039 return GetBlock()->GetGraph()->GetIntConstant( 3040 Compute(static_cast<uint64_t>(x->GetValue()), 3041 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3042 } 3043 3044 DECLARE_INSTRUCTION(AboveOrEqual); 3045 3046 IfCondition GetCondition() const OVERRIDE { 3047 return kCondAE; 3048 } 3049 3050 IfCondition GetOppositeCondition() const OVERRIDE { 3051 return kCondB; 3052 } 3053 3054 private: 3055 template <typename T> bool Compute(T x, T y) const { return x >= y; } 3056 3057 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 3058}; 3059 3060// Instruction to check how two inputs compare to each other. 3061// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 3062class HCompare : public HBinaryOperation { 3063 public: 3064 HCompare(Primitive::Type type, 3065 HInstruction* first, 3066 HInstruction* second, 3067 ComparisonBias bias, 3068 uint32_t dex_pc) 3069 : HBinaryOperation(Primitive::kPrimInt, 3070 first, 3071 second, 3072 SideEffectsForArchRuntimeCalls(type), 3073 dex_pc), 3074 bias_(bias) { 3075 DCHECK_EQ(type, first->GetType()); 3076 DCHECK_EQ(type, second->GetType()); 3077 } 3078 3079 template <typename T> 3080 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3081 3082 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3083 return GetBlock()->GetGraph()->GetIntConstant( 3084 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3085 } 3086 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3087 return GetBlock()->GetGraph()->GetIntConstant( 3088 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3089 } 3090 3091 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3092 return bias_ == other->AsCompare()->bias_; 3093 } 3094 3095 ComparisonBias GetBias() const { return bias_; } 3096 3097 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3098 3099 3100 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3101 // MIPS64 uses a runtime call for FP comparisons. 3102 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3103 } 3104 3105 DECLARE_INSTRUCTION(Compare); 3106 3107 private: 3108 const ComparisonBias bias_; 3109 3110 DISALLOW_COPY_AND_ASSIGN(HCompare); 3111}; 3112 3113// A local in the graph. Corresponds to a Dex register. 3114class HLocal : public HTemplateInstruction<0> { 3115 public: 3116 explicit HLocal(uint16_t reg_number) 3117 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3118 3119 DECLARE_INSTRUCTION(Local); 3120 3121 uint16_t GetRegNumber() const { return reg_number_; } 3122 3123 private: 3124 // The Dex register number. 3125 const uint16_t reg_number_; 3126 3127 DISALLOW_COPY_AND_ASSIGN(HLocal); 3128}; 3129 3130// Load a given local. The local is an input of this instruction. 3131class HLoadLocal : public HExpression<1> { 3132 public: 3133 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3134 : HExpression(type, SideEffects::None(), dex_pc) { 3135 SetRawInputAt(0, local); 3136 } 3137 3138 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3139 3140 DECLARE_INSTRUCTION(LoadLocal); 3141 3142 private: 3143 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3144}; 3145 3146// Store a value in a given local. This instruction has two inputs: the value 3147// and the local. 3148class HStoreLocal : public HTemplateInstruction<2> { 3149 public: 3150 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3151 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3152 SetRawInputAt(0, local); 3153 SetRawInputAt(1, value); 3154 } 3155 3156 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3157 3158 DECLARE_INSTRUCTION(StoreLocal); 3159 3160 private: 3161 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3162}; 3163 3164class HFloatConstant : public HConstant { 3165 public: 3166 float GetValue() const { return value_; } 3167 3168 uint64_t GetValueAsUint64() const OVERRIDE { 3169 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3170 } 3171 3172 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3173 DCHECK(other->IsFloatConstant()); 3174 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3175 } 3176 3177 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3178 3179 bool IsMinusOne() const OVERRIDE { 3180 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3181 } 3182 bool IsZero() const OVERRIDE { 3183 return value_ == 0.0f; 3184 } 3185 bool IsOne() const OVERRIDE { 3186 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3187 } 3188 bool IsNaN() const { 3189 return std::isnan(value_); 3190 } 3191 3192 DECLARE_INSTRUCTION(FloatConstant); 3193 3194 private: 3195 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3196 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3197 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3198 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3199 3200 const float value_; 3201 3202 // Only the SsaBuilder and HGraph can create floating-point constants. 3203 friend class SsaBuilder; 3204 friend class HGraph; 3205 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3206}; 3207 3208class HDoubleConstant : public HConstant { 3209 public: 3210 double GetValue() const { return value_; } 3211 3212 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3213 3214 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3215 DCHECK(other->IsDoubleConstant()); 3216 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3217 } 3218 3219 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3220 3221 bool IsMinusOne() const OVERRIDE { 3222 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3223 } 3224 bool IsZero() const OVERRIDE { 3225 return value_ == 0.0; 3226 } 3227 bool IsOne() const OVERRIDE { 3228 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3229 } 3230 bool IsNaN() const { 3231 return std::isnan(value_); 3232 } 3233 3234 DECLARE_INSTRUCTION(DoubleConstant); 3235 3236 private: 3237 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3238 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3239 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3240 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3241 3242 const double value_; 3243 3244 // Only the SsaBuilder and HGraph can create floating-point constants. 3245 friend class SsaBuilder; 3246 friend class HGraph; 3247 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3248}; 3249 3250enum class Intrinsics { 3251#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \ 3252 k ## Name, 3253#include "intrinsics_list.h" 3254 kNone, 3255 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3256#undef INTRINSICS_LIST 3257#undef OPTIMIZING_INTRINSICS 3258}; 3259std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3260 3261enum IntrinsicNeedsEnvironmentOrCache { 3262 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3263 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3264}; 3265 3266enum IntrinsicSideEffects { 3267 kNoSideEffects, // Intrinsic does not have any heap memory side effects. 3268 kReadSideEffects, // Intrinsic may read heap memory. 3269 kWriteSideEffects, // Intrinsic may write heap memory. 3270 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC. 3271}; 3272 3273enum IntrinsicExceptions { 3274 kNoThrow, // Intrinsic does not throw any exceptions. 3275 kCanThrow // Intrinsic may throw exceptions. 3276}; 3277 3278class HInvoke : public HInstruction { 3279 public: 3280 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3281 3282 bool NeedsEnvironment() const OVERRIDE; 3283 3284 void SetArgumentAt(size_t index, HInstruction* argument) { 3285 SetRawInputAt(index, argument); 3286 } 3287 3288 // Return the number of arguments. This number can be lower than 3289 // the number of inputs returned by InputCount(), as some invoke 3290 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3291 // inputs at the end of their list of inputs. 3292 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3293 3294 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3295 3296 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3297 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3298 3299 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3300 3301 Intrinsics GetIntrinsic() const { 3302 return intrinsic_; 3303 } 3304 3305 void SetIntrinsic(Intrinsics intrinsic, 3306 IntrinsicNeedsEnvironmentOrCache needs_env_or_cache, 3307 IntrinsicSideEffects side_effects, 3308 IntrinsicExceptions exceptions); 3309 3310 bool IsFromInlinedInvoke() const { 3311 return GetEnvironment()->IsFromInlinedInvoke(); 3312 } 3313 3314 bool CanThrow() const OVERRIDE { return can_throw_; } 3315 3316 bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); } 3317 3318 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3319 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_; 3320 } 3321 3322 uint32_t* GetIntrinsicOptimizations() { 3323 return &intrinsic_optimizations_; 3324 } 3325 3326 const uint32_t* GetIntrinsicOptimizations() const { 3327 return &intrinsic_optimizations_; 3328 } 3329 3330 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3331 3332 DECLARE_ABSTRACT_INSTRUCTION(Invoke); 3333 3334 protected: 3335 HInvoke(ArenaAllocator* arena, 3336 uint32_t number_of_arguments, 3337 uint32_t number_of_other_inputs, 3338 Primitive::Type return_type, 3339 uint32_t dex_pc, 3340 uint32_t dex_method_index, 3341 InvokeType original_invoke_type) 3342 : HInstruction( 3343 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3344 number_of_arguments_(number_of_arguments), 3345 inputs_(number_of_arguments + number_of_other_inputs, 3346 arena->Adapter(kArenaAllocInvokeInputs)), 3347 return_type_(return_type), 3348 dex_method_index_(dex_method_index), 3349 original_invoke_type_(original_invoke_type), 3350 can_throw_(true), 3351 intrinsic_(Intrinsics::kNone), 3352 intrinsic_optimizations_(0) { 3353 } 3354 3355 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3356 return inputs_[index]; 3357 } 3358 3359 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3360 inputs_[index] = input; 3361 } 3362 3363 void SetCanThrow(bool can_throw) { can_throw_ = can_throw; } 3364 3365 uint32_t number_of_arguments_; 3366 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3367 const Primitive::Type return_type_; 3368 const uint32_t dex_method_index_; 3369 const InvokeType original_invoke_type_; 3370 bool can_throw_; 3371 Intrinsics intrinsic_; 3372 3373 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3374 uint32_t intrinsic_optimizations_; 3375 3376 private: 3377 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3378}; 3379 3380class HInvokeUnresolved : public HInvoke { 3381 public: 3382 HInvokeUnresolved(ArenaAllocator* arena, 3383 uint32_t number_of_arguments, 3384 Primitive::Type return_type, 3385 uint32_t dex_pc, 3386 uint32_t dex_method_index, 3387 InvokeType invoke_type) 3388 : HInvoke(arena, 3389 number_of_arguments, 3390 0u /* number_of_other_inputs */, 3391 return_type, 3392 dex_pc, 3393 dex_method_index, 3394 invoke_type) { 3395 } 3396 3397 DECLARE_INSTRUCTION(InvokeUnresolved); 3398 3399 private: 3400 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3401}; 3402 3403class HInvokeStaticOrDirect : public HInvoke { 3404 public: 3405 // Requirements of this method call regarding the class 3406 // initialization (clinit) check of its declaring class. 3407 enum class ClinitCheckRequirement { 3408 kNone, // Class already initialized. 3409 kExplicit, // Static call having explicit clinit check as last input. 3410 kImplicit, // Static call implicitly requiring a clinit check. 3411 }; 3412 3413 // Determines how to load the target ArtMethod*. 3414 enum class MethodLoadKind { 3415 // Use a String init ArtMethod* loaded from Thread entrypoints. 3416 kStringInit, 3417 3418 // Use the method's own ArtMethod* loaded by the register allocator. 3419 kRecursive, 3420 3421 // Use ArtMethod* at a known address, embed the direct address in the code. 3422 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3423 kDirectAddress, 3424 3425 // Use ArtMethod* at an address that will be known at link time, embed the direct 3426 // address in the code. If the image is relocatable, emit .patch_oat entry. 3427 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3428 // the image relocatable or not. 3429 kDirectAddressWithFixup, 3430 3431 // Load from resoved methods array in the dex cache using a PC-relative load. 3432 // Used when we need to use the dex cache, for example for invoke-static that 3433 // may cause class initialization (the entry may point to a resolution method), 3434 // and we know that we can access the dex cache arrays using a PC-relative load. 3435 kDexCachePcRelative, 3436 3437 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3438 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3439 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3440 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3441 kDexCacheViaMethod, 3442 }; 3443 3444 // Determines the location of the code pointer. 3445 enum class CodePtrLocation { 3446 // Recursive call, use local PC-relative call instruction. 3447 kCallSelf, 3448 3449 // Use PC-relative call instruction patched at link time. 3450 // Used for calls within an oat file, boot->boot or app->app. 3451 kCallPCRelative, 3452 3453 // Call to a known target address, embed the direct address in code. 3454 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3455 kCallDirect, 3456 3457 // Call to a target address that will be known at link time, embed the direct 3458 // address in code. If the image is relocatable, emit .patch_oat entry. 3459 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3460 // the image relocatable or not. 3461 kCallDirectWithFixup, 3462 3463 // Use code pointer from the ArtMethod*. 3464 // Used when we don't know the target code. This is also the last-resort-kind used when 3465 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3466 kCallArtMethod, 3467 }; 3468 3469 struct DispatchInfo { 3470 MethodLoadKind method_load_kind; 3471 CodePtrLocation code_ptr_location; 3472 // The method load data holds 3473 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3474 // Note that there are multiple string init methods, each having its own offset. 3475 // - the method address for kDirectAddress 3476 // - the dex cache arrays offset for kDexCachePcRel. 3477 uint64_t method_load_data; 3478 uint64_t direct_code_ptr; 3479 }; 3480 3481 HInvokeStaticOrDirect(ArenaAllocator* arena, 3482 uint32_t number_of_arguments, 3483 Primitive::Type return_type, 3484 uint32_t dex_pc, 3485 uint32_t method_index, 3486 MethodReference target_method, 3487 DispatchInfo dispatch_info, 3488 InvokeType original_invoke_type, 3489 InvokeType optimized_invoke_type, 3490 ClinitCheckRequirement clinit_check_requirement) 3491 : HInvoke(arena, 3492 number_of_arguments, 3493 // There is potentially one extra argument for the HCurrentMethod node, and 3494 // potentially one other if the clinit check is explicit, and potentially 3495 // one other if the method is a string factory. 3496 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) + 3497 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) + 3498 (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3499 return_type, 3500 dex_pc, 3501 method_index, 3502 original_invoke_type), 3503 optimized_invoke_type_(optimized_invoke_type), 3504 clinit_check_requirement_(clinit_check_requirement), 3505 target_method_(target_method), 3506 dispatch_info_(dispatch_info) { } 3507 3508 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3509 bool had_current_method_input = HasCurrentMethodInput(); 3510 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind); 3511 3512 // Using the current method is the default and once we find a better 3513 // method load kind, we should not go back to using the current method. 3514 DCHECK(had_current_method_input || !needs_current_method_input); 3515 3516 if (had_current_method_input && !needs_current_method_input) { 3517 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod()); 3518 RemoveInputAt(GetSpecialInputIndex()); 3519 } 3520 dispatch_info_ = dispatch_info; 3521 } 3522 3523 void AddSpecialInput(HInstruction* input) { 3524 // We allow only one special input. 3525 DCHECK(!IsStringInit() && !HasCurrentMethodInput()); 3526 DCHECK(InputCount() == GetSpecialInputIndex() || 3527 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck())); 3528 InsertInputAt(GetSpecialInputIndex(), input); 3529 } 3530 3531 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3532 // We access the method via the dex cache so we can't do an implicit null check. 3533 // TODO: for intrinsics we can generate implicit null checks. 3534 return false; 3535 } 3536 3537 bool CanBeNull() const OVERRIDE { 3538 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3539 } 3540 3541 // Get the index of the special input, if any. 3542 // 3543 // If the invoke IsStringInit(), it initially has a HFakeString special argument 3544 // which is removed by the instruction simplifier; if the invoke HasCurrentMethodInput(), 3545 // the "special input" is the current method pointer; otherwise there may be one 3546 // platform-specific special input, such as PC-relative addressing base. 3547 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } 3548 3549 InvokeType GetOptimizedInvokeType() const { return optimized_invoke_type_; } 3550 void SetOptimizedInvokeType(InvokeType invoke_type) { 3551 optimized_invoke_type_ = invoke_type; 3552 } 3553 3554 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3555 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3556 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3557 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3558 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3559 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3560 bool HasPcRelativeDexCache() const { 3561 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3562 } 3563 bool HasCurrentMethodInput() const { 3564 // This function can be called only after the invoke has been fully initialized by the builder. 3565 if (NeedsCurrentMethodInput(GetMethodLoadKind())) { 3566 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3567 return true; 3568 } else { 3569 DCHECK(InputCount() == GetSpecialInputIndex() || 3570 !InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3571 return false; 3572 } 3573 } 3574 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3575 MethodReference GetTargetMethod() const { return target_method_; } 3576 void SetTargetMethod(MethodReference method) { target_method_ = method; } 3577 3578 int32_t GetStringInitOffset() const { 3579 DCHECK(IsStringInit()); 3580 return dispatch_info_.method_load_data; 3581 } 3582 3583 uint64_t GetMethodAddress() const { 3584 DCHECK(HasMethodAddress()); 3585 return dispatch_info_.method_load_data; 3586 } 3587 3588 uint32_t GetDexCacheArrayOffset() const { 3589 DCHECK(HasPcRelativeDexCache()); 3590 return dispatch_info_.method_load_data; 3591 } 3592 3593 uint64_t GetDirectCodePtr() const { 3594 DCHECK(HasDirectCodePtr()); 3595 return dispatch_info_.direct_code_ptr; 3596 } 3597 3598 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3599 3600 // Is this instruction a call to a static method? 3601 bool IsStatic() const { 3602 return GetOriginalInvokeType() == kStatic; 3603 } 3604 3605 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by 3606 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck) 3607 // instruction; only relevant for static calls with explicit clinit check. 3608 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) { 3609 DCHECK(IsStaticWithExplicitClinitCheck()); 3610 size_t last_input_index = InputCount() - 1; 3611 HInstruction* last_input = InputAt(last_input_index); 3612 DCHECK(last_input != nullptr); 3613 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName(); 3614 RemoveAsUserOfInput(last_input_index); 3615 inputs_.pop_back(); 3616 clinit_check_requirement_ = new_requirement; 3617 DCHECK(!IsStaticWithExplicitClinitCheck()); 3618 } 3619 3620 bool IsStringFactoryFor(HFakeString* str) const { 3621 if (!IsStringInit()) return false; 3622 DCHECK(!HasCurrentMethodInput()); 3623 if (InputCount() == (number_of_arguments_)) return false; 3624 return InputAt(InputCount() - 1)->AsFakeString() == str; 3625 } 3626 3627 void RemoveFakeStringArgumentAsLastInput() { 3628 DCHECK(IsStringInit()); 3629 size_t last_input_index = InputCount() - 1; 3630 HInstruction* last_input = InputAt(last_input_index); 3631 DCHECK(last_input != nullptr); 3632 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3633 RemoveAsUserOfInput(last_input_index); 3634 inputs_.pop_back(); 3635 } 3636 3637 // Is this a call to a static method whose declaring class has an 3638 // explicit initialization check in the graph? 3639 bool IsStaticWithExplicitClinitCheck() const { 3640 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3641 } 3642 3643 // Is this a call to a static method whose declaring class has an 3644 // implicit intialization check requirement? 3645 bool IsStaticWithImplicitClinitCheck() const { 3646 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3647 } 3648 3649 // Does this method load kind need the current method as an input? 3650 static bool NeedsCurrentMethodInput(MethodLoadKind kind) { 3651 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod; 3652 } 3653 3654 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3655 3656 protected: 3657 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3658 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3659 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3660 HInstruction* input = input_record.GetInstruction(); 3661 // `input` is the last input of a static invoke marked as having 3662 // an explicit clinit check. It must either be: 3663 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3664 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3665 DCHECK(input != nullptr); 3666 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3667 } 3668 return input_record; 3669 } 3670 3671 void InsertInputAt(size_t index, HInstruction* input); 3672 void RemoveInputAt(size_t index); 3673 3674 private: 3675 InvokeType optimized_invoke_type_; 3676 ClinitCheckRequirement clinit_check_requirement_; 3677 // The target method may refer to different dex file or method index than the original 3678 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3679 // in derived class to increase visibility. 3680 MethodReference target_method_; 3681 DispatchInfo dispatch_info_; 3682 3683 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3684}; 3685std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs); 3686std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs); 3687 3688class HInvokeVirtual : public HInvoke { 3689 public: 3690 HInvokeVirtual(ArenaAllocator* arena, 3691 uint32_t number_of_arguments, 3692 Primitive::Type return_type, 3693 uint32_t dex_pc, 3694 uint32_t dex_method_index, 3695 uint32_t vtable_index) 3696 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3697 vtable_index_(vtable_index) {} 3698 3699 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3700 // TODO: Add implicit null checks in intrinsics. 3701 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3702 } 3703 3704 uint32_t GetVTableIndex() const { return vtable_index_; } 3705 3706 DECLARE_INSTRUCTION(InvokeVirtual); 3707 3708 private: 3709 const uint32_t vtable_index_; 3710 3711 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3712}; 3713 3714class HInvokeInterface : public HInvoke { 3715 public: 3716 HInvokeInterface(ArenaAllocator* arena, 3717 uint32_t number_of_arguments, 3718 Primitive::Type return_type, 3719 uint32_t dex_pc, 3720 uint32_t dex_method_index, 3721 uint32_t imt_index) 3722 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3723 imt_index_(imt_index) {} 3724 3725 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3726 // TODO: Add implicit null checks in intrinsics. 3727 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3728 } 3729 3730 uint32_t GetImtIndex() const { return imt_index_; } 3731 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3732 3733 DECLARE_INSTRUCTION(InvokeInterface); 3734 3735 private: 3736 const uint32_t imt_index_; 3737 3738 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3739}; 3740 3741class HNewInstance : public HExpression<2> { 3742 public: 3743 HNewInstance(HInstruction* cls, 3744 HCurrentMethod* current_method, 3745 uint32_t dex_pc, 3746 uint16_t type_index, 3747 const DexFile& dex_file, 3748 bool can_throw, 3749 bool finalizable, 3750 QuickEntrypointEnum entrypoint) 3751 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3752 type_index_(type_index), 3753 dex_file_(dex_file), 3754 can_throw_(can_throw), 3755 finalizable_(finalizable), 3756 entrypoint_(entrypoint) { 3757 SetRawInputAt(0, cls); 3758 SetRawInputAt(1, current_method); 3759 } 3760 3761 uint16_t GetTypeIndex() const { return type_index_; } 3762 const DexFile& GetDexFile() const { return dex_file_; } 3763 3764 // Calls runtime so needs an environment. 3765 bool NeedsEnvironment() const OVERRIDE { return true; } 3766 3767 // It may throw when called on type that's not instantiable/accessible. 3768 // It can throw OOME. 3769 // TODO: distinguish between the two cases so we can for example allow allocation elimination. 3770 bool CanThrow() const OVERRIDE { return can_throw_ || true; } 3771 3772 bool IsFinalizable() const { return finalizable_; } 3773 3774 bool CanBeNull() const OVERRIDE { return false; } 3775 3776 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3777 3778 void SetEntrypoint(QuickEntrypointEnum entrypoint) { 3779 entrypoint_ = entrypoint; 3780 } 3781 3782 DECLARE_INSTRUCTION(NewInstance); 3783 3784 private: 3785 const uint16_t type_index_; 3786 const DexFile& dex_file_; 3787 const bool can_throw_; 3788 const bool finalizable_; 3789 QuickEntrypointEnum entrypoint_; 3790 3791 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3792}; 3793 3794class HNeg : public HUnaryOperation { 3795 public: 3796 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3797 : HUnaryOperation(result_type, input, dex_pc) {} 3798 3799 template <typename T> T Compute(T x) const { return -x; } 3800 3801 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3802 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3803 } 3804 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3805 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3806 } 3807 3808 DECLARE_INSTRUCTION(Neg); 3809 3810 private: 3811 DISALLOW_COPY_AND_ASSIGN(HNeg); 3812}; 3813 3814class HNewArray : public HExpression<2> { 3815 public: 3816 HNewArray(HInstruction* length, 3817 HCurrentMethod* current_method, 3818 uint32_t dex_pc, 3819 uint16_t type_index, 3820 const DexFile& dex_file, 3821 QuickEntrypointEnum entrypoint) 3822 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3823 type_index_(type_index), 3824 dex_file_(dex_file), 3825 entrypoint_(entrypoint) { 3826 SetRawInputAt(0, length); 3827 SetRawInputAt(1, current_method); 3828 } 3829 3830 uint16_t GetTypeIndex() const { return type_index_; } 3831 const DexFile& GetDexFile() const { return dex_file_; } 3832 3833 // Calls runtime so needs an environment. 3834 bool NeedsEnvironment() const OVERRIDE { return true; } 3835 3836 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3837 bool CanThrow() const OVERRIDE { return true; } 3838 3839 bool CanBeNull() const OVERRIDE { return false; } 3840 3841 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3842 3843 DECLARE_INSTRUCTION(NewArray); 3844 3845 private: 3846 const uint16_t type_index_; 3847 const DexFile& dex_file_; 3848 const QuickEntrypointEnum entrypoint_; 3849 3850 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3851}; 3852 3853class HAdd : public HBinaryOperation { 3854 public: 3855 HAdd(Primitive::Type result_type, 3856 HInstruction* left, 3857 HInstruction* right, 3858 uint32_t dex_pc = kNoDexPc) 3859 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3860 3861 bool IsCommutative() const OVERRIDE { return true; } 3862 3863 template <typename T> T Compute(T x, T y) const { return x + y; } 3864 3865 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3866 return GetBlock()->GetGraph()->GetIntConstant( 3867 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3868 } 3869 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3870 return GetBlock()->GetGraph()->GetLongConstant( 3871 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3872 } 3873 3874 DECLARE_INSTRUCTION(Add); 3875 3876 private: 3877 DISALLOW_COPY_AND_ASSIGN(HAdd); 3878}; 3879 3880class HSub : public HBinaryOperation { 3881 public: 3882 HSub(Primitive::Type result_type, 3883 HInstruction* left, 3884 HInstruction* right, 3885 uint32_t dex_pc = kNoDexPc) 3886 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3887 3888 template <typename T> T Compute(T x, T y) const { return x - y; } 3889 3890 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3891 return GetBlock()->GetGraph()->GetIntConstant( 3892 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3893 } 3894 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3895 return GetBlock()->GetGraph()->GetLongConstant( 3896 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3897 } 3898 3899 DECLARE_INSTRUCTION(Sub); 3900 3901 private: 3902 DISALLOW_COPY_AND_ASSIGN(HSub); 3903}; 3904 3905class HMul : public HBinaryOperation { 3906 public: 3907 HMul(Primitive::Type result_type, 3908 HInstruction* left, 3909 HInstruction* right, 3910 uint32_t dex_pc = kNoDexPc) 3911 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3912 3913 bool IsCommutative() const OVERRIDE { return true; } 3914 3915 template <typename T> T Compute(T x, T y) const { return x * y; } 3916 3917 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3918 return GetBlock()->GetGraph()->GetIntConstant( 3919 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3920 } 3921 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3922 return GetBlock()->GetGraph()->GetLongConstant( 3923 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3924 } 3925 3926 DECLARE_INSTRUCTION(Mul); 3927 3928 private: 3929 DISALLOW_COPY_AND_ASSIGN(HMul); 3930}; 3931 3932class HDiv : public HBinaryOperation { 3933 public: 3934 HDiv(Primitive::Type result_type, 3935 HInstruction* left, 3936 HInstruction* right, 3937 uint32_t dex_pc) 3938 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3939 3940 template <typename T> 3941 T Compute(T x, T y) const { 3942 // Our graph structure ensures we never have 0 for `y` during 3943 // constant folding. 3944 DCHECK_NE(y, 0); 3945 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3946 return (y == -1) ? -x : x / y; 3947 } 3948 3949 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3950 return GetBlock()->GetGraph()->GetIntConstant( 3951 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3952 } 3953 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3954 return GetBlock()->GetGraph()->GetLongConstant( 3955 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3956 } 3957 3958 static SideEffects SideEffectsForArchRuntimeCalls() { 3959 // The generated code can use a runtime call. 3960 return SideEffects::CanTriggerGC(); 3961 } 3962 3963 DECLARE_INSTRUCTION(Div); 3964 3965 private: 3966 DISALLOW_COPY_AND_ASSIGN(HDiv); 3967}; 3968 3969class HRem : public HBinaryOperation { 3970 public: 3971 HRem(Primitive::Type result_type, 3972 HInstruction* left, 3973 HInstruction* right, 3974 uint32_t dex_pc) 3975 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3976 3977 template <typename T> 3978 T Compute(T x, T y) const { 3979 // Our graph structure ensures we never have 0 for `y` during 3980 // constant folding. 3981 DCHECK_NE(y, 0); 3982 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3983 return (y == -1) ? 0 : x % y; 3984 } 3985 3986 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3987 return GetBlock()->GetGraph()->GetIntConstant( 3988 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3989 } 3990 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3991 return GetBlock()->GetGraph()->GetLongConstant( 3992 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3993 } 3994 3995 3996 static SideEffects SideEffectsForArchRuntimeCalls() { 3997 return SideEffects::CanTriggerGC(); 3998 } 3999 4000 DECLARE_INSTRUCTION(Rem); 4001 4002 private: 4003 DISALLOW_COPY_AND_ASSIGN(HRem); 4004}; 4005 4006class HDivZeroCheck : public HExpression<1> { 4007 public: 4008 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 4009 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4010 SetRawInputAt(0, value); 4011 } 4012 4013 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 4014 4015 bool CanBeMoved() const OVERRIDE { return true; } 4016 4017 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4018 return true; 4019 } 4020 4021 bool NeedsEnvironment() const OVERRIDE { return true; } 4022 bool CanThrow() const OVERRIDE { return true; } 4023 4024 DECLARE_INSTRUCTION(DivZeroCheck); 4025 4026 private: 4027 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 4028}; 4029 4030class HShl : public HBinaryOperation { 4031 public: 4032 HShl(Primitive::Type result_type, 4033 HInstruction* left, 4034 HInstruction* right, 4035 uint32_t dex_pc = kNoDexPc) 4036 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4037 4038 template <typename T, typename U, typename V> 4039 T Compute(T x, U y, V max_shift_value) const { 4040 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4041 "V is not the unsigned integer type corresponding to T"); 4042 return x << (y & max_shift_value); 4043 } 4044 4045 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4046 return GetBlock()->GetGraph()->GetIntConstant( 4047 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4048 } 4049 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4050 // case is handled as `x << static_cast<int>(y)`. 4051 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4052 return GetBlock()->GetGraph()->GetLongConstant( 4053 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4054 } 4055 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4056 return GetBlock()->GetGraph()->GetLongConstant( 4057 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4058 } 4059 4060 DECLARE_INSTRUCTION(Shl); 4061 4062 private: 4063 DISALLOW_COPY_AND_ASSIGN(HShl); 4064}; 4065 4066class HShr : public HBinaryOperation { 4067 public: 4068 HShr(Primitive::Type result_type, 4069 HInstruction* left, 4070 HInstruction* right, 4071 uint32_t dex_pc = kNoDexPc) 4072 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4073 4074 template <typename T, typename U, typename V> 4075 T Compute(T x, U y, V max_shift_value) const { 4076 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4077 "V is not the unsigned integer type corresponding to T"); 4078 return x >> (y & max_shift_value); 4079 } 4080 4081 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4082 return GetBlock()->GetGraph()->GetIntConstant( 4083 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4084 } 4085 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4086 // case is handled as `x >> static_cast<int>(y)`. 4087 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4088 return GetBlock()->GetGraph()->GetLongConstant( 4089 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4090 } 4091 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4092 return GetBlock()->GetGraph()->GetLongConstant( 4093 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4094 } 4095 4096 DECLARE_INSTRUCTION(Shr); 4097 4098 private: 4099 DISALLOW_COPY_AND_ASSIGN(HShr); 4100}; 4101 4102class HUShr : public HBinaryOperation { 4103 public: 4104 HUShr(Primitive::Type result_type, 4105 HInstruction* left, 4106 HInstruction* right, 4107 uint32_t dex_pc = kNoDexPc) 4108 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4109 4110 template <typename T, typename U, typename V> 4111 T Compute(T x, U y, V max_shift_value) const { 4112 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4113 "V is not the unsigned integer type corresponding to T"); 4114 V ux = static_cast<V>(x); 4115 return static_cast<T>(ux >> (y & max_shift_value)); 4116 } 4117 4118 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4119 return GetBlock()->GetGraph()->GetIntConstant( 4120 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4121 } 4122 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4123 // case is handled as `x >>> static_cast<int>(y)`. 4124 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4125 return GetBlock()->GetGraph()->GetLongConstant( 4126 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4127 } 4128 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4129 return GetBlock()->GetGraph()->GetLongConstant( 4130 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4131 } 4132 4133 DECLARE_INSTRUCTION(UShr); 4134 4135 private: 4136 DISALLOW_COPY_AND_ASSIGN(HUShr); 4137}; 4138 4139class HAnd : public HBinaryOperation { 4140 public: 4141 HAnd(Primitive::Type result_type, 4142 HInstruction* left, 4143 HInstruction* right, 4144 uint32_t dex_pc = kNoDexPc) 4145 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4146 4147 bool IsCommutative() const OVERRIDE { return true; } 4148 4149 template <typename T, typename U> 4150 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 4151 4152 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4153 return GetBlock()->GetGraph()->GetIntConstant( 4154 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4155 } 4156 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4157 return GetBlock()->GetGraph()->GetLongConstant( 4158 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4159 } 4160 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4161 return GetBlock()->GetGraph()->GetLongConstant( 4162 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4163 } 4164 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4165 return GetBlock()->GetGraph()->GetLongConstant( 4166 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4167 } 4168 4169 DECLARE_INSTRUCTION(And); 4170 4171 private: 4172 DISALLOW_COPY_AND_ASSIGN(HAnd); 4173}; 4174 4175class HOr : public HBinaryOperation { 4176 public: 4177 HOr(Primitive::Type result_type, 4178 HInstruction* left, 4179 HInstruction* right, 4180 uint32_t dex_pc = kNoDexPc) 4181 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4182 4183 bool IsCommutative() const OVERRIDE { return true; } 4184 4185 template <typename T, typename U> 4186 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4187 4188 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4189 return GetBlock()->GetGraph()->GetIntConstant( 4190 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4191 } 4192 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4193 return GetBlock()->GetGraph()->GetLongConstant( 4194 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4195 } 4196 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4197 return GetBlock()->GetGraph()->GetLongConstant( 4198 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4199 } 4200 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4201 return GetBlock()->GetGraph()->GetLongConstant( 4202 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4203 } 4204 4205 DECLARE_INSTRUCTION(Or); 4206 4207 private: 4208 DISALLOW_COPY_AND_ASSIGN(HOr); 4209}; 4210 4211class HXor : public HBinaryOperation { 4212 public: 4213 HXor(Primitive::Type result_type, 4214 HInstruction* left, 4215 HInstruction* right, 4216 uint32_t dex_pc = kNoDexPc) 4217 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4218 4219 bool IsCommutative() const OVERRIDE { return true; } 4220 4221 template <typename T, typename U> 4222 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4223 4224 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4225 return GetBlock()->GetGraph()->GetIntConstant( 4226 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4227 } 4228 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4229 return GetBlock()->GetGraph()->GetLongConstant( 4230 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4231 } 4232 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4233 return GetBlock()->GetGraph()->GetLongConstant( 4234 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4235 } 4236 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4237 return GetBlock()->GetGraph()->GetLongConstant( 4238 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4239 } 4240 4241 DECLARE_INSTRUCTION(Xor); 4242 4243 private: 4244 DISALLOW_COPY_AND_ASSIGN(HXor); 4245}; 4246 4247class HRor : public HBinaryOperation { 4248 public: 4249 HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance) 4250 : HBinaryOperation(result_type, value, distance) {} 4251 4252 template <typename T, typename U, typename V> 4253 T Compute(T x, U y, V max_shift_value) const { 4254 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4255 "V is not the unsigned integer type corresponding to T"); 4256 V ux = static_cast<V>(x); 4257 if ((y & max_shift_value) == 0) { 4258 return static_cast<T>(ux); 4259 } else { 4260 const V reg_bits = sizeof(T) * 8; 4261 return static_cast<T>(ux >> (y & max_shift_value)) | 4262 (x << (reg_bits - (y & max_shift_value))); 4263 } 4264 } 4265 4266 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4267 return GetBlock()->GetGraph()->GetIntConstant( 4268 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4269 } 4270 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4271 return GetBlock()->GetGraph()->GetLongConstant( 4272 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4273 } 4274 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4275 return GetBlock()->GetGraph()->GetLongConstant( 4276 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4277 } 4278 4279 DECLARE_INSTRUCTION(Ror); 4280 4281 private: 4282 DISALLOW_COPY_AND_ASSIGN(HRor); 4283}; 4284 4285// The value of a parameter in this method. Its location depends on 4286// the calling convention. 4287class HParameterValue : public HExpression<0> { 4288 public: 4289 HParameterValue(const DexFile& dex_file, 4290 uint16_t type_index, 4291 uint8_t index, 4292 Primitive::Type parameter_type, 4293 bool is_this = false) 4294 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4295 dex_file_(dex_file), 4296 type_index_(type_index), 4297 index_(index), 4298 is_this_(is_this), 4299 can_be_null_(!is_this) {} 4300 4301 const DexFile& GetDexFile() const { return dex_file_; } 4302 uint16_t GetTypeIndex() const { return type_index_; } 4303 uint8_t GetIndex() const { return index_; } 4304 bool IsThis() const { return is_this_; } 4305 4306 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4307 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4308 4309 DECLARE_INSTRUCTION(ParameterValue); 4310 4311 private: 4312 const DexFile& dex_file_; 4313 const uint16_t type_index_; 4314 // The index of this parameter in the parameters list. Must be less 4315 // than HGraph::number_of_in_vregs_. 4316 const uint8_t index_; 4317 4318 // Whether or not the parameter value corresponds to 'this' argument. 4319 const bool is_this_; 4320 4321 bool can_be_null_; 4322 4323 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4324}; 4325 4326class HNot : public HUnaryOperation { 4327 public: 4328 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4329 : HUnaryOperation(result_type, input, dex_pc) {} 4330 4331 bool CanBeMoved() const OVERRIDE { return true; } 4332 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4333 return true; 4334 } 4335 4336 template <typename T> T Compute(T x) const { return ~x; } 4337 4338 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4339 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4340 } 4341 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4342 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4343 } 4344 4345 DECLARE_INSTRUCTION(Not); 4346 4347 private: 4348 DISALLOW_COPY_AND_ASSIGN(HNot); 4349}; 4350 4351class HBooleanNot : public HUnaryOperation { 4352 public: 4353 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4354 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4355 4356 bool CanBeMoved() const OVERRIDE { return true; } 4357 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4358 return true; 4359 } 4360 4361 template <typename T> bool Compute(T x) const { 4362 DCHECK(IsUint<1>(x)); 4363 return !x; 4364 } 4365 4366 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4367 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4368 } 4369 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4370 LOG(FATAL) << DebugName() << " is not defined for long values"; 4371 UNREACHABLE(); 4372 } 4373 4374 DECLARE_INSTRUCTION(BooleanNot); 4375 4376 private: 4377 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4378}; 4379 4380class HTypeConversion : public HExpression<1> { 4381 public: 4382 // Instantiate a type conversion of `input` to `result_type`. 4383 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4384 : HExpression(result_type, 4385 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4386 dex_pc) { 4387 SetRawInputAt(0, input); 4388 DCHECK_NE(input->GetType(), result_type); 4389 } 4390 4391 HInstruction* GetInput() const { return InputAt(0); } 4392 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4393 Primitive::Type GetResultType() const { return GetType(); } 4394 4395 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4396 // to the runtime. 4397 4398 bool CanBeMoved() const OVERRIDE { return true; } 4399 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4400 4401 // Try to statically evaluate the conversion and return a HConstant 4402 // containing the result. If the input cannot be converted, return nullptr. 4403 HConstant* TryStaticEvaluation() const; 4404 4405 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4406 Primitive::Type result_type) { 4407 // Some architectures may not require the 'GC' side effects, but at this point 4408 // in the compilation process we do not know what architecture we will 4409 // generate code for, so we must be conservative. 4410 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4411 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4412 return SideEffects::CanTriggerGC(); 4413 } 4414 return SideEffects::None(); 4415 } 4416 4417 DECLARE_INSTRUCTION(TypeConversion); 4418 4419 private: 4420 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4421}; 4422 4423static constexpr uint32_t kNoRegNumber = -1; 4424 4425class HPhi : public HInstruction { 4426 public: 4427 HPhi(ArenaAllocator* arena, 4428 uint32_t reg_number, 4429 size_t number_of_inputs, 4430 Primitive::Type type, 4431 uint32_t dex_pc = kNoDexPc) 4432 : HInstruction(SideEffects::None(), dex_pc), 4433 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4434 reg_number_(reg_number), 4435 type_(ToPhiType(type)), 4436 // Phis are constructed live and marked dead if conflicting or unused. 4437 // Individual steps of SsaBuilder should assume that if a phi has been 4438 // marked dead, it can be ignored and will be removed by SsaPhiElimination. 4439 is_live_(true), 4440 can_be_null_(true) { 4441 DCHECK_NE(type_, Primitive::kPrimVoid); 4442 } 4443 4444 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4445 static Primitive::Type ToPhiType(Primitive::Type type) { 4446 switch (type) { 4447 case Primitive::kPrimBoolean: 4448 case Primitive::kPrimByte: 4449 case Primitive::kPrimShort: 4450 case Primitive::kPrimChar: 4451 return Primitive::kPrimInt; 4452 default: 4453 return type; 4454 } 4455 } 4456 4457 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4458 4459 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4460 4461 void AddInput(HInstruction* input); 4462 void RemoveInputAt(size_t index); 4463 4464 Primitive::Type GetType() const OVERRIDE { return type_; } 4465 void SetType(Primitive::Type type) { type_ = type; } 4466 4467 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4468 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4469 4470 uint32_t GetRegNumber() const { return reg_number_; } 4471 4472 void SetDead() { is_live_ = false; } 4473 void SetLive() { is_live_ = true; } 4474 bool IsDead() const { return !is_live_; } 4475 bool IsLive() const { return is_live_; } 4476 4477 bool IsVRegEquivalentOf(HInstruction* other) const { 4478 return other != nullptr 4479 && other->IsPhi() 4480 && other->AsPhi()->GetBlock() == GetBlock() 4481 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4482 } 4483 4484 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4485 // An equivalent phi is a phi having the same dex register and type. 4486 // It assumes that phis with the same dex register are adjacent. 4487 HPhi* GetNextEquivalentPhiWithSameType() { 4488 HInstruction* next = GetNext(); 4489 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4490 if (next->GetType() == GetType()) { 4491 return next->AsPhi(); 4492 } 4493 next = next->GetNext(); 4494 } 4495 return nullptr; 4496 } 4497 4498 DECLARE_INSTRUCTION(Phi); 4499 4500 protected: 4501 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4502 return inputs_[index]; 4503 } 4504 4505 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4506 inputs_[index] = input; 4507 } 4508 4509 private: 4510 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4511 const uint32_t reg_number_; 4512 Primitive::Type type_; 4513 bool is_live_; 4514 bool can_be_null_; 4515 4516 DISALLOW_COPY_AND_ASSIGN(HPhi); 4517}; 4518 4519class HNullCheck : public HExpression<1> { 4520 public: 4521 HNullCheck(HInstruction* value, uint32_t dex_pc) 4522 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4523 SetRawInputAt(0, value); 4524 } 4525 4526 bool CanBeMoved() const OVERRIDE { return true; } 4527 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4528 return true; 4529 } 4530 4531 bool NeedsEnvironment() const OVERRIDE { return true; } 4532 4533 bool CanThrow() const OVERRIDE { return true; } 4534 4535 bool CanBeNull() const OVERRIDE { return false; } 4536 4537 4538 DECLARE_INSTRUCTION(NullCheck); 4539 4540 private: 4541 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4542}; 4543 4544class FieldInfo : public ValueObject { 4545 public: 4546 FieldInfo(MemberOffset field_offset, 4547 Primitive::Type field_type, 4548 bool is_volatile, 4549 uint32_t index, 4550 uint16_t declaring_class_def_index, 4551 const DexFile& dex_file, 4552 Handle<mirror::DexCache> dex_cache) 4553 : field_offset_(field_offset), 4554 field_type_(field_type), 4555 is_volatile_(is_volatile), 4556 index_(index), 4557 declaring_class_def_index_(declaring_class_def_index), 4558 dex_file_(dex_file), 4559 dex_cache_(dex_cache) {} 4560 4561 MemberOffset GetFieldOffset() const { return field_offset_; } 4562 Primitive::Type GetFieldType() const { return field_type_; } 4563 uint32_t GetFieldIndex() const { return index_; } 4564 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4565 const DexFile& GetDexFile() const { return dex_file_; } 4566 bool IsVolatile() const { return is_volatile_; } 4567 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4568 4569 private: 4570 const MemberOffset field_offset_; 4571 const Primitive::Type field_type_; 4572 const bool is_volatile_; 4573 const uint32_t index_; 4574 const uint16_t declaring_class_def_index_; 4575 const DexFile& dex_file_; 4576 const Handle<mirror::DexCache> dex_cache_; 4577}; 4578 4579class HInstanceFieldGet : public HExpression<1> { 4580 public: 4581 HInstanceFieldGet(HInstruction* value, 4582 Primitive::Type field_type, 4583 MemberOffset field_offset, 4584 bool is_volatile, 4585 uint32_t field_idx, 4586 uint16_t declaring_class_def_index, 4587 const DexFile& dex_file, 4588 Handle<mirror::DexCache> dex_cache, 4589 uint32_t dex_pc) 4590 : HExpression(field_type, 4591 SideEffects::FieldReadOfType(field_type, is_volatile), 4592 dex_pc), 4593 field_info_(field_offset, 4594 field_type, 4595 is_volatile, 4596 field_idx, 4597 declaring_class_def_index, 4598 dex_file, 4599 dex_cache) { 4600 SetRawInputAt(0, value); 4601 } 4602 4603 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4604 4605 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4606 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4607 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4608 } 4609 4610 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4611 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4612 } 4613 4614 size_t ComputeHashCode() const OVERRIDE { 4615 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4616 } 4617 4618 const FieldInfo& GetFieldInfo() const { return field_info_; } 4619 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4620 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4621 bool IsVolatile() const { return field_info_.IsVolatile(); } 4622 4623 DECLARE_INSTRUCTION(InstanceFieldGet); 4624 4625 private: 4626 const FieldInfo field_info_; 4627 4628 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4629}; 4630 4631class HInstanceFieldSet : public HTemplateInstruction<2> { 4632 public: 4633 HInstanceFieldSet(HInstruction* object, 4634 HInstruction* value, 4635 Primitive::Type field_type, 4636 MemberOffset field_offset, 4637 bool is_volatile, 4638 uint32_t field_idx, 4639 uint16_t declaring_class_def_index, 4640 const DexFile& dex_file, 4641 Handle<mirror::DexCache> dex_cache, 4642 uint32_t dex_pc) 4643 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4644 dex_pc), 4645 field_info_(field_offset, 4646 field_type, 4647 is_volatile, 4648 field_idx, 4649 declaring_class_def_index, 4650 dex_file, 4651 dex_cache), 4652 value_can_be_null_(true) { 4653 SetRawInputAt(0, object); 4654 SetRawInputAt(1, value); 4655 } 4656 4657 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4658 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4659 } 4660 4661 const FieldInfo& GetFieldInfo() const { return field_info_; } 4662 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4663 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4664 bool IsVolatile() const { return field_info_.IsVolatile(); } 4665 HInstruction* GetValue() const { return InputAt(1); } 4666 bool GetValueCanBeNull() const { return value_can_be_null_; } 4667 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4668 4669 DECLARE_INSTRUCTION(InstanceFieldSet); 4670 4671 private: 4672 const FieldInfo field_info_; 4673 bool value_can_be_null_; 4674 4675 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4676}; 4677 4678class HArrayGet : public HExpression<2> { 4679 public: 4680 HArrayGet(HInstruction* array, 4681 HInstruction* index, 4682 Primitive::Type type, 4683 uint32_t dex_pc, 4684 SideEffects additional_side_effects = SideEffects::None()) 4685 : HExpression(type, 4686 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4687 dex_pc) { 4688 SetRawInputAt(0, array); 4689 SetRawInputAt(1, index); 4690 } 4691 4692 bool CanBeMoved() const OVERRIDE { return true; } 4693 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4694 return true; 4695 } 4696 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4697 // TODO: We can be smarter here. 4698 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4699 // which generates the implicit null check. There are cases when these can be removed 4700 // to produce better code. If we ever add optimizations to do so we should allow an 4701 // implicit check here (as long as the address falls in the first page). 4702 return false; 4703 } 4704 4705 void SetType(Primitive::Type type) { type_ = type; } 4706 4707 HInstruction* GetArray() const { return InputAt(0); } 4708 HInstruction* GetIndex() const { return InputAt(1); } 4709 4710 DECLARE_INSTRUCTION(ArrayGet); 4711 4712 private: 4713 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4714}; 4715 4716class HArraySet : public HTemplateInstruction<3> { 4717 public: 4718 HArraySet(HInstruction* array, 4719 HInstruction* index, 4720 HInstruction* value, 4721 Primitive::Type expected_component_type, 4722 uint32_t dex_pc, 4723 SideEffects additional_side_effects = SideEffects::None()) 4724 : HTemplateInstruction( 4725 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4726 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4727 additional_side_effects), 4728 dex_pc), 4729 expected_component_type_(expected_component_type), 4730 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4731 value_can_be_null_(true), 4732 static_type_of_array_is_object_array_(false) { 4733 SetRawInputAt(0, array); 4734 SetRawInputAt(1, index); 4735 SetRawInputAt(2, value); 4736 } 4737 4738 bool NeedsEnvironment() const OVERRIDE { 4739 // We currently always call a runtime method to catch array store 4740 // exceptions. 4741 return needs_type_check_; 4742 } 4743 4744 // Can throw ArrayStoreException. 4745 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4746 4747 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4748 // TODO: Same as for ArrayGet. 4749 return false; 4750 } 4751 4752 void ClearNeedsTypeCheck() { 4753 needs_type_check_ = false; 4754 } 4755 4756 void ClearValueCanBeNull() { 4757 value_can_be_null_ = false; 4758 } 4759 4760 void SetStaticTypeOfArrayIsObjectArray() { 4761 static_type_of_array_is_object_array_ = true; 4762 } 4763 4764 bool GetValueCanBeNull() const { return value_can_be_null_; } 4765 bool NeedsTypeCheck() const { return needs_type_check_; } 4766 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4767 4768 HInstruction* GetArray() const { return InputAt(0); } 4769 HInstruction* GetIndex() const { return InputAt(1); } 4770 HInstruction* GetValue() const { return InputAt(2); } 4771 4772 Primitive::Type GetComponentType() const { 4773 // The Dex format does not type floating point index operations. Since the 4774 // `expected_component_type_` is set during building and can therefore not 4775 // be correct, we also check what is the value type. If it is a floating 4776 // point type, we must use that type. 4777 Primitive::Type value_type = GetValue()->GetType(); 4778 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4779 ? value_type 4780 : expected_component_type_; 4781 } 4782 4783 Primitive::Type GetRawExpectedComponentType() const { 4784 return expected_component_type_; 4785 } 4786 4787 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4788 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4789 } 4790 4791 DECLARE_INSTRUCTION(ArraySet); 4792 4793 private: 4794 const Primitive::Type expected_component_type_; 4795 bool needs_type_check_; 4796 bool value_can_be_null_; 4797 // Cached information for the reference_type_info_ so that codegen 4798 // does not need to inspect the static type. 4799 bool static_type_of_array_is_object_array_; 4800 4801 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4802}; 4803 4804class HArrayLength : public HExpression<1> { 4805 public: 4806 HArrayLength(HInstruction* array, uint32_t dex_pc) 4807 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4808 // Note that arrays do not change length, so the instruction does not 4809 // depend on any write. 4810 SetRawInputAt(0, array); 4811 } 4812 4813 bool CanBeMoved() const OVERRIDE { return true; } 4814 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4815 return true; 4816 } 4817 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4818 return obj == InputAt(0); 4819 } 4820 4821 DECLARE_INSTRUCTION(ArrayLength); 4822 4823 private: 4824 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4825}; 4826 4827class HBoundsCheck : public HExpression<2> { 4828 public: 4829 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4830 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4831 DCHECK(index->GetType() == Primitive::kPrimInt); 4832 SetRawInputAt(0, index); 4833 SetRawInputAt(1, length); 4834 } 4835 4836 bool CanBeMoved() const OVERRIDE { return true; } 4837 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4838 return true; 4839 } 4840 4841 bool NeedsEnvironment() const OVERRIDE { return true; } 4842 4843 bool CanThrow() const OVERRIDE { return true; } 4844 4845 HInstruction* GetIndex() const { return InputAt(0); } 4846 4847 DECLARE_INSTRUCTION(BoundsCheck); 4848 4849 private: 4850 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4851}; 4852 4853/** 4854 * Some DEX instructions are folded into multiple HInstructions that need 4855 * to stay live until the last HInstruction. This class 4856 * is used as a marker for the baseline compiler to ensure its preceding 4857 * HInstruction stays live. `index` represents the stack location index of the 4858 * instruction (the actual offset is computed as index * vreg_size). 4859 */ 4860class HTemporary : public HTemplateInstruction<0> { 4861 public: 4862 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4863 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4864 4865 size_t GetIndex() const { return index_; } 4866 4867 Primitive::Type GetType() const OVERRIDE { 4868 // The previous instruction is the one that will be stored in the temporary location. 4869 DCHECK(GetPrevious() != nullptr); 4870 return GetPrevious()->GetType(); 4871 } 4872 4873 DECLARE_INSTRUCTION(Temporary); 4874 4875 private: 4876 const size_t index_; 4877 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4878}; 4879 4880class HSuspendCheck : public HTemplateInstruction<0> { 4881 public: 4882 explicit HSuspendCheck(uint32_t dex_pc) 4883 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4884 4885 bool NeedsEnvironment() const OVERRIDE { 4886 return true; 4887 } 4888 4889 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4890 SlowPathCode* GetSlowPath() const { return slow_path_; } 4891 4892 DECLARE_INSTRUCTION(SuspendCheck); 4893 4894 private: 4895 // Only used for code generation, in order to share the same slow path between back edges 4896 // of a same loop. 4897 SlowPathCode* slow_path_; 4898 4899 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4900}; 4901 4902// Pseudo-instruction which provides the native debugger with mapping information. 4903// It ensures that we can generate line number and local variables at this point. 4904class HNativeDebugInfo : public HTemplateInstruction<0> { 4905 public: 4906 explicit HNativeDebugInfo(uint32_t dex_pc) 4907 : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {} 4908 4909 bool NeedsEnvironment() const OVERRIDE { 4910 return true; 4911 } 4912 4913 DECLARE_INSTRUCTION(NativeDebugInfo); 4914 4915 private: 4916 DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo); 4917}; 4918 4919/** 4920 * Instruction to load a Class object. 4921 */ 4922class HLoadClass : public HExpression<1> { 4923 public: 4924 HLoadClass(HCurrentMethod* current_method, 4925 uint16_t type_index, 4926 const DexFile& dex_file, 4927 bool is_referrers_class, 4928 uint32_t dex_pc, 4929 bool needs_access_check, 4930 bool is_in_dex_cache) 4931 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4932 type_index_(type_index), 4933 dex_file_(dex_file), 4934 is_referrers_class_(is_referrers_class), 4935 generate_clinit_check_(false), 4936 needs_access_check_(needs_access_check), 4937 is_in_dex_cache_(is_in_dex_cache), 4938 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4939 // Referrers class should not need access check. We never inline unverified 4940 // methods so we can't possibly end up in this situation. 4941 DCHECK(!is_referrers_class_ || !needs_access_check_); 4942 SetRawInputAt(0, current_method); 4943 } 4944 4945 bool CanBeMoved() const OVERRIDE { return true; } 4946 4947 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4948 // Note that we don't need to test for generate_clinit_check_. 4949 // Whether or not we need to generate the clinit check is processed in 4950 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4951 return other->AsLoadClass()->type_index_ == type_index_ && 4952 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4953 } 4954 4955 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4956 4957 uint16_t GetTypeIndex() const { return type_index_; } 4958 bool IsReferrersClass() const { return is_referrers_class_; } 4959 bool CanBeNull() const OVERRIDE { return false; } 4960 4961 bool NeedsEnvironment() const OVERRIDE { 4962 return CanCallRuntime(); 4963 } 4964 4965 bool MustGenerateClinitCheck() const { 4966 return generate_clinit_check_; 4967 } 4968 4969 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4970 // The entrypoint the code generator is going to call does not do 4971 // clinit of the class. 4972 DCHECK(!NeedsAccessCheck()); 4973 generate_clinit_check_ = generate_clinit_check; 4974 } 4975 4976 bool CanCallRuntime() const { 4977 return MustGenerateClinitCheck() || 4978 (!is_referrers_class_ && !is_in_dex_cache_) || 4979 needs_access_check_; 4980 } 4981 4982 bool NeedsAccessCheck() const { 4983 return needs_access_check_; 4984 } 4985 4986 bool CanThrow() const OVERRIDE { 4987 return CanCallRuntime(); 4988 } 4989 4990 ReferenceTypeInfo GetLoadedClassRTI() { 4991 return loaded_class_rti_; 4992 } 4993 4994 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4995 // Make sure we only set exact types (the loaded class should never be merged). 4996 DCHECK(rti.IsExact()); 4997 loaded_class_rti_ = rti; 4998 } 4999 5000 const DexFile& GetDexFile() { return dex_file_; } 5001 5002 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 5003 5004 static SideEffects SideEffectsForArchRuntimeCalls() { 5005 return SideEffects::CanTriggerGC(); 5006 } 5007 5008 bool IsInDexCache() const { return is_in_dex_cache_; } 5009 5010 DECLARE_INSTRUCTION(LoadClass); 5011 5012 private: 5013 const uint16_t type_index_; 5014 const DexFile& dex_file_; 5015 const bool is_referrers_class_; 5016 // Whether this instruction must generate the initialization check. 5017 // Used for code generation. 5018 bool generate_clinit_check_; 5019 const bool needs_access_check_; 5020 const bool is_in_dex_cache_; 5021 5022 ReferenceTypeInfo loaded_class_rti_; 5023 5024 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 5025}; 5026 5027class HLoadString : public HExpression<1> { 5028 public: 5029 HLoadString(HCurrentMethod* current_method, 5030 uint32_t string_index, 5031 uint32_t dex_pc, 5032 bool is_in_dex_cache) 5033 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 5034 string_index_(string_index), 5035 is_in_dex_cache_(is_in_dex_cache) { 5036 SetRawInputAt(0, current_method); 5037 } 5038 5039 bool CanBeMoved() const OVERRIDE { return true; } 5040 5041 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 5042 return other->AsLoadString()->string_index_ == string_index_; 5043 } 5044 5045 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 5046 5047 uint32_t GetStringIndex() const { return string_index_; } 5048 5049 // TODO: Can we deopt or debug when we resolve a string? 5050 bool NeedsEnvironment() const OVERRIDE { return false; } 5051 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 5052 bool CanBeNull() const OVERRIDE { return false; } 5053 bool IsInDexCache() const { return is_in_dex_cache_; } 5054 5055 static SideEffects SideEffectsForArchRuntimeCalls() { 5056 return SideEffects::CanTriggerGC(); 5057 } 5058 5059 DECLARE_INSTRUCTION(LoadString); 5060 5061 private: 5062 const uint32_t string_index_; 5063 const bool is_in_dex_cache_; 5064 5065 DISALLOW_COPY_AND_ASSIGN(HLoadString); 5066}; 5067 5068/** 5069 * Performs an initialization check on its Class object input. 5070 */ 5071class HClinitCheck : public HExpression<1> { 5072 public: 5073 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 5074 : HExpression( 5075 Primitive::kPrimNot, 5076 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 5077 dex_pc) { 5078 SetRawInputAt(0, constant); 5079 } 5080 5081 bool CanBeMoved() const OVERRIDE { return true; } 5082 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5083 return true; 5084 } 5085 5086 bool NeedsEnvironment() const OVERRIDE { 5087 // May call runtime to initialize the class. 5088 return true; 5089 } 5090 5091 bool CanThrow() const OVERRIDE { return true; } 5092 5093 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 5094 5095 DECLARE_INSTRUCTION(ClinitCheck); 5096 5097 private: 5098 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 5099}; 5100 5101class HStaticFieldGet : public HExpression<1> { 5102 public: 5103 HStaticFieldGet(HInstruction* cls, 5104 Primitive::Type field_type, 5105 MemberOffset field_offset, 5106 bool is_volatile, 5107 uint32_t field_idx, 5108 uint16_t declaring_class_def_index, 5109 const DexFile& dex_file, 5110 Handle<mirror::DexCache> dex_cache, 5111 uint32_t dex_pc) 5112 : HExpression(field_type, 5113 SideEffects::FieldReadOfType(field_type, is_volatile), 5114 dex_pc), 5115 field_info_(field_offset, 5116 field_type, 5117 is_volatile, 5118 field_idx, 5119 declaring_class_def_index, 5120 dex_file, 5121 dex_cache) { 5122 SetRawInputAt(0, cls); 5123 } 5124 5125 5126 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 5127 5128 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 5129 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 5130 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 5131 } 5132 5133 size_t ComputeHashCode() const OVERRIDE { 5134 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 5135 } 5136 5137 const FieldInfo& GetFieldInfo() const { return field_info_; } 5138 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5139 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5140 bool IsVolatile() const { return field_info_.IsVolatile(); } 5141 5142 DECLARE_INSTRUCTION(StaticFieldGet); 5143 5144 private: 5145 const FieldInfo field_info_; 5146 5147 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 5148}; 5149 5150class HStaticFieldSet : public HTemplateInstruction<2> { 5151 public: 5152 HStaticFieldSet(HInstruction* cls, 5153 HInstruction* value, 5154 Primitive::Type field_type, 5155 MemberOffset field_offset, 5156 bool is_volatile, 5157 uint32_t field_idx, 5158 uint16_t declaring_class_def_index, 5159 const DexFile& dex_file, 5160 Handle<mirror::DexCache> dex_cache, 5161 uint32_t dex_pc) 5162 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 5163 dex_pc), 5164 field_info_(field_offset, 5165 field_type, 5166 is_volatile, 5167 field_idx, 5168 declaring_class_def_index, 5169 dex_file, 5170 dex_cache), 5171 value_can_be_null_(true) { 5172 SetRawInputAt(0, cls); 5173 SetRawInputAt(1, value); 5174 } 5175 5176 const FieldInfo& GetFieldInfo() const { return field_info_; } 5177 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5178 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5179 bool IsVolatile() const { return field_info_.IsVolatile(); } 5180 5181 HInstruction* GetValue() const { return InputAt(1); } 5182 bool GetValueCanBeNull() const { return value_can_be_null_; } 5183 void ClearValueCanBeNull() { value_can_be_null_ = false; } 5184 5185 DECLARE_INSTRUCTION(StaticFieldSet); 5186 5187 private: 5188 const FieldInfo field_info_; 5189 bool value_can_be_null_; 5190 5191 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 5192}; 5193 5194class HUnresolvedInstanceFieldGet : public HExpression<1> { 5195 public: 5196 HUnresolvedInstanceFieldGet(HInstruction* obj, 5197 Primitive::Type field_type, 5198 uint32_t field_index, 5199 uint32_t dex_pc) 5200 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5201 field_index_(field_index) { 5202 SetRawInputAt(0, obj); 5203 } 5204 5205 bool NeedsEnvironment() const OVERRIDE { return true; } 5206 bool CanThrow() const OVERRIDE { return true; } 5207 5208 Primitive::Type GetFieldType() const { return GetType(); } 5209 uint32_t GetFieldIndex() const { return field_index_; } 5210 5211 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 5212 5213 private: 5214 const uint32_t field_index_; 5215 5216 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 5217}; 5218 5219class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 5220 public: 5221 HUnresolvedInstanceFieldSet(HInstruction* obj, 5222 HInstruction* value, 5223 Primitive::Type field_type, 5224 uint32_t field_index, 5225 uint32_t dex_pc) 5226 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5227 field_type_(field_type), 5228 field_index_(field_index) { 5229 DCHECK_EQ(field_type, value->GetType()); 5230 SetRawInputAt(0, obj); 5231 SetRawInputAt(1, value); 5232 } 5233 5234 bool NeedsEnvironment() const OVERRIDE { return true; } 5235 bool CanThrow() const OVERRIDE { return true; } 5236 5237 Primitive::Type GetFieldType() const { return field_type_; } 5238 uint32_t GetFieldIndex() const { return field_index_; } 5239 5240 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5241 5242 private: 5243 const Primitive::Type field_type_; 5244 const uint32_t field_index_; 5245 5246 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5247}; 5248 5249class HUnresolvedStaticFieldGet : public HExpression<0> { 5250 public: 5251 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5252 uint32_t field_index, 5253 uint32_t dex_pc) 5254 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5255 field_index_(field_index) { 5256 } 5257 5258 bool NeedsEnvironment() const OVERRIDE { return true; } 5259 bool CanThrow() const OVERRIDE { return true; } 5260 5261 Primitive::Type GetFieldType() const { return GetType(); } 5262 uint32_t GetFieldIndex() const { return field_index_; } 5263 5264 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5265 5266 private: 5267 const uint32_t field_index_; 5268 5269 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5270}; 5271 5272class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5273 public: 5274 HUnresolvedStaticFieldSet(HInstruction* value, 5275 Primitive::Type field_type, 5276 uint32_t field_index, 5277 uint32_t dex_pc) 5278 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5279 field_type_(field_type), 5280 field_index_(field_index) { 5281 DCHECK_EQ(field_type, value->GetType()); 5282 SetRawInputAt(0, value); 5283 } 5284 5285 bool NeedsEnvironment() const OVERRIDE { return true; } 5286 bool CanThrow() const OVERRIDE { return true; } 5287 5288 Primitive::Type GetFieldType() const { return field_type_; } 5289 uint32_t GetFieldIndex() const { return field_index_; } 5290 5291 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5292 5293 private: 5294 const Primitive::Type field_type_; 5295 const uint32_t field_index_; 5296 5297 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5298}; 5299 5300// Implement the move-exception DEX instruction. 5301class HLoadException : public HExpression<0> { 5302 public: 5303 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5304 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5305 5306 bool CanBeNull() const OVERRIDE { return false; } 5307 5308 DECLARE_INSTRUCTION(LoadException); 5309 5310 private: 5311 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5312}; 5313 5314// Implicit part of move-exception which clears thread-local exception storage. 5315// Must not be removed because the runtime expects the TLS to get cleared. 5316class HClearException : public HTemplateInstruction<0> { 5317 public: 5318 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5319 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5320 5321 DECLARE_INSTRUCTION(ClearException); 5322 5323 private: 5324 DISALLOW_COPY_AND_ASSIGN(HClearException); 5325}; 5326 5327class HThrow : public HTemplateInstruction<1> { 5328 public: 5329 HThrow(HInstruction* exception, uint32_t dex_pc) 5330 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5331 SetRawInputAt(0, exception); 5332 } 5333 5334 bool IsControlFlow() const OVERRIDE { return true; } 5335 5336 bool NeedsEnvironment() const OVERRIDE { return true; } 5337 5338 bool CanThrow() const OVERRIDE { return true; } 5339 5340 5341 DECLARE_INSTRUCTION(Throw); 5342 5343 private: 5344 DISALLOW_COPY_AND_ASSIGN(HThrow); 5345}; 5346 5347/** 5348 * Implementation strategies for the code generator of a HInstanceOf 5349 * or `HCheckCast`. 5350 */ 5351enum class TypeCheckKind { 5352 kUnresolvedCheck, // Check against an unresolved type. 5353 kExactCheck, // Can do a single class compare. 5354 kClassHierarchyCheck, // Can just walk the super class chain. 5355 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5356 kInterfaceCheck, // No optimization yet when checking against an interface. 5357 kArrayObjectCheck, // Can just check if the array is not primitive. 5358 kArrayCheck // No optimization yet when checking against a generic array. 5359}; 5360 5361class HInstanceOf : public HExpression<2> { 5362 public: 5363 HInstanceOf(HInstruction* object, 5364 HLoadClass* constant, 5365 TypeCheckKind check_kind, 5366 uint32_t dex_pc) 5367 : HExpression(Primitive::kPrimBoolean, 5368 SideEffectsForArchRuntimeCalls(check_kind), 5369 dex_pc), 5370 check_kind_(check_kind), 5371 must_do_null_check_(true) { 5372 SetRawInputAt(0, object); 5373 SetRawInputAt(1, constant); 5374 } 5375 5376 bool CanBeMoved() const OVERRIDE { return true; } 5377 5378 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5379 return true; 5380 } 5381 5382 bool NeedsEnvironment() const OVERRIDE { 5383 return false; 5384 } 5385 5386 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5387 5388 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5389 5390 // Used only in code generation. 5391 bool MustDoNullCheck() const { return must_do_null_check_; } 5392 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5393 5394 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5395 return (check_kind == TypeCheckKind::kExactCheck) 5396 ? SideEffects::None() 5397 // Mips currently does runtime calls for any other checks. 5398 : SideEffects::CanTriggerGC(); 5399 } 5400 5401 DECLARE_INSTRUCTION(InstanceOf); 5402 5403 private: 5404 const TypeCheckKind check_kind_; 5405 bool must_do_null_check_; 5406 5407 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5408}; 5409 5410class HBoundType : public HExpression<1> { 5411 public: 5412 // Constructs an HBoundType with the given upper_bound. 5413 // Ensures that the upper_bound is valid. 5414 HBoundType(HInstruction* input, 5415 ReferenceTypeInfo upper_bound, 5416 bool upper_can_be_null, 5417 uint32_t dex_pc = kNoDexPc) 5418 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5419 upper_bound_(upper_bound), 5420 upper_can_be_null_(upper_can_be_null), 5421 can_be_null_(upper_can_be_null) { 5422 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5423 SetRawInputAt(0, input); 5424 SetReferenceTypeInfo(upper_bound_); 5425 } 5426 5427 // GetUpper* should only be used in reference type propagation. 5428 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5429 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5430 5431 void SetCanBeNull(bool can_be_null) { 5432 DCHECK(upper_can_be_null_ || !can_be_null); 5433 can_be_null_ = can_be_null; 5434 } 5435 5436 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5437 5438 DECLARE_INSTRUCTION(BoundType); 5439 5440 private: 5441 // Encodes the most upper class that this instruction can have. In other words 5442 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5443 // It is used to bound the type in cases like: 5444 // if (x instanceof ClassX) { 5445 // // uper_bound_ will be ClassX 5446 // } 5447 const ReferenceTypeInfo upper_bound_; 5448 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5449 // is false then can_be_null_ cannot be true). 5450 const bool upper_can_be_null_; 5451 bool can_be_null_; 5452 5453 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5454}; 5455 5456class HCheckCast : public HTemplateInstruction<2> { 5457 public: 5458 HCheckCast(HInstruction* object, 5459 HLoadClass* constant, 5460 TypeCheckKind check_kind, 5461 uint32_t dex_pc) 5462 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5463 check_kind_(check_kind), 5464 must_do_null_check_(true) { 5465 SetRawInputAt(0, object); 5466 SetRawInputAt(1, constant); 5467 } 5468 5469 bool CanBeMoved() const OVERRIDE { return true; } 5470 5471 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5472 return true; 5473 } 5474 5475 bool NeedsEnvironment() const OVERRIDE { 5476 // Instruction may throw a CheckCastError. 5477 return true; 5478 } 5479 5480 bool CanThrow() const OVERRIDE { return true; } 5481 5482 bool MustDoNullCheck() const { return must_do_null_check_; } 5483 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5484 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5485 5486 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5487 5488 DECLARE_INSTRUCTION(CheckCast); 5489 5490 private: 5491 const TypeCheckKind check_kind_; 5492 bool must_do_null_check_; 5493 5494 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5495}; 5496 5497class HMemoryBarrier : public HTemplateInstruction<0> { 5498 public: 5499 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5500 : HTemplateInstruction( 5501 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5502 barrier_kind_(barrier_kind) {} 5503 5504 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5505 5506 DECLARE_INSTRUCTION(MemoryBarrier); 5507 5508 private: 5509 const MemBarrierKind barrier_kind_; 5510 5511 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5512}; 5513 5514class HMonitorOperation : public HTemplateInstruction<1> { 5515 public: 5516 enum OperationKind { 5517 kEnter, 5518 kExit, 5519 }; 5520 5521 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5522 : HTemplateInstruction( 5523 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5524 kind_(kind) { 5525 SetRawInputAt(0, object); 5526 } 5527 5528 // Instruction may throw a Java exception, so we need an environment. 5529 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5530 5531 bool CanThrow() const OVERRIDE { 5532 // Verifier guarantees that monitor-exit cannot throw. 5533 // This is important because it allows the HGraphBuilder to remove 5534 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5535 return IsEnter(); 5536 } 5537 5538 5539 bool IsEnter() const { return kind_ == kEnter; } 5540 5541 DECLARE_INSTRUCTION(MonitorOperation); 5542 5543 private: 5544 const OperationKind kind_; 5545 5546 private: 5547 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5548}; 5549 5550/** 5551 * A HInstruction used as a marker for the replacement of new + <init> 5552 * of a String to a call to a StringFactory. Only baseline will see 5553 * the node at code generation, where it will be be treated as null. 5554 * When compiling non-baseline, `HFakeString` instructions are being removed 5555 * in the instruction simplifier. 5556 */ 5557class HFakeString : public HTemplateInstruction<0> { 5558 public: 5559 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5560 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5561 5562 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5563 5564 DECLARE_INSTRUCTION(FakeString); 5565 5566 private: 5567 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5568}; 5569 5570class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5571 public: 5572 MoveOperands(Location source, 5573 Location destination, 5574 Primitive::Type type, 5575 HInstruction* instruction) 5576 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5577 5578 Location GetSource() const { return source_; } 5579 Location GetDestination() const { return destination_; } 5580 5581 void SetSource(Location value) { source_ = value; } 5582 void SetDestination(Location value) { destination_ = value; } 5583 5584 // The parallel move resolver marks moves as "in-progress" by clearing the 5585 // destination (but not the source). 5586 Location MarkPending() { 5587 DCHECK(!IsPending()); 5588 Location dest = destination_; 5589 destination_ = Location::NoLocation(); 5590 return dest; 5591 } 5592 5593 void ClearPending(Location dest) { 5594 DCHECK(IsPending()); 5595 destination_ = dest; 5596 } 5597 5598 bool IsPending() const { 5599 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5600 return destination_.IsInvalid() && !source_.IsInvalid(); 5601 } 5602 5603 // True if this blocks a move from the given location. 5604 bool Blocks(Location loc) const { 5605 return !IsEliminated() && source_.OverlapsWith(loc); 5606 } 5607 5608 // A move is redundant if it's been eliminated, if its source and 5609 // destination are the same, or if its destination is unneeded. 5610 bool IsRedundant() const { 5611 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5612 } 5613 5614 // We clear both operands to indicate move that's been eliminated. 5615 void Eliminate() { 5616 source_ = destination_ = Location::NoLocation(); 5617 } 5618 5619 bool IsEliminated() const { 5620 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5621 return source_.IsInvalid(); 5622 } 5623 5624 Primitive::Type GetType() const { return type_; } 5625 5626 bool Is64BitMove() const { 5627 return Primitive::Is64BitType(type_); 5628 } 5629 5630 HInstruction* GetInstruction() const { return instruction_; } 5631 5632 private: 5633 Location source_; 5634 Location destination_; 5635 // The type this move is for. 5636 Primitive::Type type_; 5637 // The instruction this move is assocatied with. Null when this move is 5638 // for moving an input in the expected locations of user (including a phi user). 5639 // This is only used in debug mode, to ensure we do not connect interval siblings 5640 // in the same parallel move. 5641 HInstruction* instruction_; 5642}; 5643 5644static constexpr size_t kDefaultNumberOfMoves = 4; 5645 5646class HParallelMove : public HTemplateInstruction<0> { 5647 public: 5648 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5649 : HTemplateInstruction(SideEffects::None(), dex_pc), 5650 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5651 moves_.reserve(kDefaultNumberOfMoves); 5652 } 5653 5654 void AddMove(Location source, 5655 Location destination, 5656 Primitive::Type type, 5657 HInstruction* instruction) { 5658 DCHECK(source.IsValid()); 5659 DCHECK(destination.IsValid()); 5660 if (kIsDebugBuild) { 5661 if (instruction != nullptr) { 5662 for (const MoveOperands& move : moves_) { 5663 if (move.GetInstruction() == instruction) { 5664 // Special case the situation where the move is for the spill slot 5665 // of the instruction. 5666 if ((GetPrevious() == instruction) 5667 || ((GetPrevious() == nullptr) 5668 && instruction->IsPhi() 5669 && instruction->GetBlock() == GetBlock())) { 5670 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5671 << "Doing parallel moves for the same instruction."; 5672 } else { 5673 DCHECK(false) << "Doing parallel moves for the same instruction."; 5674 } 5675 } 5676 } 5677 } 5678 for (const MoveOperands& move : moves_) { 5679 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5680 << "Overlapped destination for two moves in a parallel move: " 5681 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5682 << source << " ==> " << destination; 5683 } 5684 } 5685 moves_.emplace_back(source, destination, type, instruction); 5686 } 5687 5688 MoveOperands* MoveOperandsAt(size_t index) { 5689 return &moves_[index]; 5690 } 5691 5692 size_t NumMoves() const { return moves_.size(); } 5693 5694 DECLARE_INSTRUCTION(ParallelMove); 5695 5696 private: 5697 ArenaVector<MoveOperands> moves_; 5698 5699 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5700}; 5701 5702} // namespace art 5703 5704#ifdef ART_ENABLE_CODEGEN_arm 5705#include "nodes_arm.h" 5706#endif 5707#ifdef ART_ENABLE_CODEGEN_arm64 5708#include "nodes_arm64.h" 5709#endif 5710#ifdef ART_ENABLE_CODEGEN_x86 5711#include "nodes_x86.h" 5712#endif 5713 5714namespace art { 5715 5716class HGraphVisitor : public ValueObject { 5717 public: 5718 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5719 virtual ~HGraphVisitor() {} 5720 5721 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5722 virtual void VisitBasicBlock(HBasicBlock* block); 5723 5724 // Visit the graph following basic block insertion order. 5725 void VisitInsertionOrder(); 5726 5727 // Visit the graph following dominator tree reverse post-order. 5728 void VisitReversePostOrder(); 5729 5730 HGraph* GetGraph() const { return graph_; } 5731 5732 // Visit functions for instruction classes. 5733#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5734 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5735 5736 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5737 5738#undef DECLARE_VISIT_INSTRUCTION 5739 5740 private: 5741 HGraph* const graph_; 5742 5743 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5744}; 5745 5746class HGraphDelegateVisitor : public HGraphVisitor { 5747 public: 5748 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5749 virtual ~HGraphDelegateVisitor() {} 5750 5751 // Visit functions that delegate to to super class. 5752#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5753 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5754 5755 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5756 5757#undef DECLARE_VISIT_INSTRUCTION 5758 5759 private: 5760 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5761}; 5762 5763class HInsertionOrderIterator : public ValueObject { 5764 public: 5765 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5766 5767 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5768 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5769 void Advance() { ++index_; } 5770 5771 private: 5772 const HGraph& graph_; 5773 size_t index_; 5774 5775 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5776}; 5777 5778class HReversePostOrderIterator : public ValueObject { 5779 public: 5780 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5781 // Check that reverse post order of the graph has been built. 5782 DCHECK(!graph.GetReversePostOrder().empty()); 5783 } 5784 5785 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5786 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5787 void Advance() { ++index_; } 5788 5789 private: 5790 const HGraph& graph_; 5791 size_t index_; 5792 5793 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5794}; 5795 5796class HPostOrderIterator : public ValueObject { 5797 public: 5798 explicit HPostOrderIterator(const HGraph& graph) 5799 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5800 // Check that reverse post order of the graph has been built. 5801 DCHECK(!graph.GetReversePostOrder().empty()); 5802 } 5803 5804 bool Done() const { return index_ == 0; } 5805 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5806 void Advance() { --index_; } 5807 5808 private: 5809 const HGraph& graph_; 5810 size_t index_; 5811 5812 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5813}; 5814 5815class HLinearPostOrderIterator : public ValueObject { 5816 public: 5817 explicit HLinearPostOrderIterator(const HGraph& graph) 5818 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5819 5820 bool Done() const { return index_ == 0; } 5821 5822 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5823 5824 void Advance() { 5825 --index_; 5826 DCHECK_GE(index_, 0U); 5827 } 5828 5829 private: 5830 const ArenaVector<HBasicBlock*>& order_; 5831 size_t index_; 5832 5833 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5834}; 5835 5836class HLinearOrderIterator : public ValueObject { 5837 public: 5838 explicit HLinearOrderIterator(const HGraph& graph) 5839 : order_(graph.GetLinearOrder()), index_(0) {} 5840 5841 bool Done() const { return index_ == order_.size(); } 5842 HBasicBlock* Current() const { return order_[index_]; } 5843 void Advance() { ++index_; } 5844 5845 private: 5846 const ArenaVector<HBasicBlock*>& order_; 5847 size_t index_; 5848 5849 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5850}; 5851 5852// Iterator over the blocks that art part of the loop. Includes blocks part 5853// of an inner loop. The order in which the blocks are iterated is on their 5854// block id. 5855class HBlocksInLoopIterator : public ValueObject { 5856 public: 5857 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5858 : blocks_in_loop_(info.GetBlocks()), 5859 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5860 index_(0) { 5861 if (!blocks_in_loop_.IsBitSet(index_)) { 5862 Advance(); 5863 } 5864 } 5865 5866 bool Done() const { return index_ == blocks_.size(); } 5867 HBasicBlock* Current() const { return blocks_[index_]; } 5868 void Advance() { 5869 ++index_; 5870 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5871 if (blocks_in_loop_.IsBitSet(index_)) { 5872 break; 5873 } 5874 } 5875 } 5876 5877 private: 5878 const BitVector& blocks_in_loop_; 5879 const ArenaVector<HBasicBlock*>& blocks_; 5880 size_t index_; 5881 5882 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5883}; 5884 5885// Iterator over the blocks that art part of the loop. Includes blocks part 5886// of an inner loop. The order in which the blocks are iterated is reverse 5887// post order. 5888class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5889 public: 5890 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5891 : blocks_in_loop_(info.GetBlocks()), 5892 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5893 index_(0) { 5894 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5895 Advance(); 5896 } 5897 } 5898 5899 bool Done() const { return index_ == blocks_.size(); } 5900 HBasicBlock* Current() const { return blocks_[index_]; } 5901 void Advance() { 5902 ++index_; 5903 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5904 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5905 break; 5906 } 5907 } 5908 } 5909 5910 private: 5911 const BitVector& blocks_in_loop_; 5912 const ArenaVector<HBasicBlock*>& blocks_; 5913 size_t index_; 5914 5915 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5916}; 5917 5918inline int64_t Int64FromConstant(HConstant* constant) { 5919 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5920 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5921 : constant->AsLongConstant()->GetValue(); 5922} 5923 5924inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5925 // For the purposes of the compiler, the dex files must actually be the same object 5926 // if we want to safely treat them as the same. This is especially important for JIT 5927 // as custom class loaders can open the same underlying file (or memory) multiple 5928 // times and provide different class resolution but no two class loaders should ever 5929 // use the same DexFile object - doing so is an unsupported hack that can lead to 5930 // all sorts of weird failures. 5931 return &lhs == &rhs; 5932} 5933 5934#define INSTRUCTION_TYPE_CHECK(type, super) \ 5935 inline bool HInstruction::Is##type() const { return GetKind() == k##type; } \ 5936 inline const H##type* HInstruction::As##type() const { \ 5937 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \ 5938 } \ 5939 inline H##type* HInstruction::As##type() { \ 5940 return Is##type() ? static_cast<H##type*>(this) : nullptr; \ 5941 } 5942 5943 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 5944#undef INSTRUCTION_TYPE_CHECK 5945 5946} // namespace art 5947 5948#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5949