nodes.h revision 917d01680714b2295f109f8fea0aa06764a30b70
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38#include "utils/array_ref.h" 39 40namespace art { 41 42class GraphChecker; 43class HBasicBlock; 44class HCurrentMethod; 45class HDoubleConstant; 46class HEnvironment; 47class HFakeString; 48class HFloatConstant; 49class HGraphBuilder; 50class HGraphVisitor; 51class HInstruction; 52class HIntConstant; 53class HInvoke; 54class HLongConstant; 55class HNullConstant; 56class HPhi; 57class HSuspendCheck; 58class HTryBoundary; 59class LiveInterval; 60class LocationSummary; 61class SlowPathCode; 62class SsaBuilder; 63 64namespace mirror { 65class DexCache; 66} // namespace mirror 67 68static const int kDefaultNumberOfBlocks = 8; 69static const int kDefaultNumberOfSuccessors = 2; 70static const int kDefaultNumberOfPredecessors = 2; 71static const int kDefaultNumberOfExceptionalPredecessors = 0; 72static const int kDefaultNumberOfDominatedBlocks = 1; 73static const int kDefaultNumberOfBackEdges = 1; 74 75static constexpr uint32_t kMaxIntShiftValue = 0x1f; 76static constexpr uint64_t kMaxLongShiftValue = 0x3f; 77 78static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 79static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 80 81static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 82 83static constexpr uint32_t kNoDexPc = -1; 84 85enum IfCondition { 86 // All types. 87 kCondEQ, // == 88 kCondNE, // != 89 // Signed integers and floating-point numbers. 90 kCondLT, // < 91 kCondLE, // <= 92 kCondGT, // > 93 kCondGE, // >= 94 // Unsigned integers. 95 kCondB, // < 96 kCondBE, // <= 97 kCondA, // > 98 kCondAE, // >= 99}; 100 101class HInstructionList : public ValueObject { 102 public: 103 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 104 105 void AddInstruction(HInstruction* instruction); 106 void RemoveInstruction(HInstruction* instruction); 107 108 // Insert `instruction` before/after an existing instruction `cursor`. 109 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 110 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 111 112 // Return true if this list contains `instruction`. 113 bool Contains(HInstruction* instruction) const; 114 115 // Return true if `instruction1` is found before `instruction2` in 116 // this instruction list and false otherwise. Abort if none 117 // of these instructions is found. 118 bool FoundBefore(const HInstruction* instruction1, 119 const HInstruction* instruction2) const; 120 121 bool IsEmpty() const { return first_instruction_ == nullptr; } 122 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 123 124 // Update the block of all instructions to be `block`. 125 void SetBlockOfInstructions(HBasicBlock* block) const; 126 127 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 128 void Add(const HInstructionList& instruction_list); 129 130 // Return the number of instructions in the list. This is an expensive operation. 131 size_t CountSize() const; 132 133 private: 134 HInstruction* first_instruction_; 135 HInstruction* last_instruction_; 136 137 friend class HBasicBlock; 138 friend class HGraph; 139 friend class HInstruction; 140 friend class HInstructionIterator; 141 friend class HBackwardInstructionIterator; 142 143 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 144}; 145 146// Control-flow graph of a method. Contains a list of basic blocks. 147class HGraph : public ArenaObject<kArenaAllocGraph> { 148 public: 149 HGraph(ArenaAllocator* arena, 150 const DexFile& dex_file, 151 uint32_t method_idx, 152 bool should_generate_constructor_barrier, 153 InstructionSet instruction_set, 154 InvokeType invoke_type = kInvalidInvokeType, 155 bool debuggable = false, 156 int start_instruction_id = 0) 157 : arena_(arena), 158 blocks_(arena->Adapter(kArenaAllocBlockList)), 159 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 160 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 161 entry_block_(nullptr), 162 exit_block_(nullptr), 163 maximum_number_of_out_vregs_(0), 164 number_of_vregs_(0), 165 number_of_in_vregs_(0), 166 temporaries_vreg_slots_(0), 167 has_bounds_checks_(false), 168 has_try_catch_(false), 169 debuggable_(debuggable), 170 current_instruction_id_(start_instruction_id), 171 dex_file_(dex_file), 172 method_idx_(method_idx), 173 invoke_type_(invoke_type), 174 in_ssa_form_(false), 175 should_generate_constructor_barrier_(should_generate_constructor_barrier), 176 instruction_set_(instruction_set), 177 cached_null_constant_(nullptr), 178 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 179 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 180 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 181 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 182 cached_current_method_(nullptr) { 183 blocks_.reserve(kDefaultNumberOfBlocks); 184 } 185 186 ArenaAllocator* GetArena() const { return arena_; } 187 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 188 189 bool IsInSsaForm() const { return in_ssa_form_; } 190 191 HBasicBlock* GetEntryBlock() const { return entry_block_; } 192 HBasicBlock* GetExitBlock() const { return exit_block_; } 193 bool HasExitBlock() const { return exit_block_ != nullptr; } 194 195 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 196 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 197 198 void AddBlock(HBasicBlock* block); 199 200 // Try building the SSA form of this graph, with dominance computation and loop 201 // recognition. Returns whether it was successful in doing all these steps. 202 bool TryBuildingSsa() { 203 BuildDominatorTree(); 204 // The SSA builder requires loops to all be natural. Specifically, the dead phi 205 // elimination phase checks the consistency of the graph when doing a post-order 206 // visit for eliminating dead phis: a dead phi can only have loop header phi 207 // users remaining when being visited. 208 if (!AnalyzeNaturalLoops()) return false; 209 // Precompute per-block try membership before entering the SSA builder, 210 // which needs the information to build catch block phis from values of 211 // locals at throwing instructions inside try blocks. 212 ComputeTryBlockInformation(); 213 TransformToSsa(); 214 in_ssa_form_ = true; 215 return true; 216 } 217 218 void ComputeDominanceInformation(); 219 void ClearDominanceInformation(); 220 221 void BuildDominatorTree(); 222 void TransformToSsa(); 223 void SimplifyCFG(); 224 void SimplifyCatchBlocks(); 225 226 // Analyze all natural loops in this graph. Returns false if one 227 // loop is not natural, that is the header does not dominate the 228 // back edge. 229 bool AnalyzeNaturalLoops() const; 230 231 // Iterate over blocks to compute try block membership. Needs reverse post 232 // order and loop information. 233 void ComputeTryBlockInformation(); 234 235 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 236 // Returns the instruction used to replace the invoke expression or null if the 237 // invoke is for a void method. 238 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 239 240 // Need to add a couple of blocks to test if the loop body is entered and 241 // put deoptimization instructions, etc. 242 void TransformLoopHeaderForBCE(HBasicBlock* header); 243 244 // Removes `block` from the graph. Assumes `block` has been disconnected from 245 // other blocks and has no instructions or phis. 246 void DeleteDeadEmptyBlock(HBasicBlock* block); 247 248 // Splits the edge between `block` and `successor` while preserving the 249 // indices in the predecessor/successor lists. If there are multiple edges 250 // between the blocks, the lowest indices are used. 251 // Returns the new block which is empty and has the same dex pc as `successor`. 252 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 253 254 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 255 void SimplifyLoop(HBasicBlock* header); 256 257 int32_t GetNextInstructionId() { 258 DCHECK_NE(current_instruction_id_, INT32_MAX); 259 return current_instruction_id_++; 260 } 261 262 int32_t GetCurrentInstructionId() const { 263 return current_instruction_id_; 264 } 265 266 void SetCurrentInstructionId(int32_t id) { 267 current_instruction_id_ = id; 268 } 269 270 uint16_t GetMaximumNumberOfOutVRegs() const { 271 return maximum_number_of_out_vregs_; 272 } 273 274 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 275 maximum_number_of_out_vregs_ = new_value; 276 } 277 278 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 279 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 280 } 281 282 void UpdateTemporariesVRegSlots(size_t slots) { 283 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 284 } 285 286 size_t GetTemporariesVRegSlots() const { 287 DCHECK(!in_ssa_form_); 288 return temporaries_vreg_slots_; 289 } 290 291 void SetNumberOfVRegs(uint16_t number_of_vregs) { 292 number_of_vregs_ = number_of_vregs; 293 } 294 295 uint16_t GetNumberOfVRegs() const { 296 return number_of_vregs_; 297 } 298 299 void SetNumberOfInVRegs(uint16_t value) { 300 number_of_in_vregs_ = value; 301 } 302 303 uint16_t GetNumberOfLocalVRegs() const { 304 DCHECK(!in_ssa_form_); 305 return number_of_vregs_ - number_of_in_vregs_; 306 } 307 308 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 309 return reverse_post_order_; 310 } 311 312 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 313 return linear_order_; 314 } 315 316 bool HasBoundsChecks() const { 317 return has_bounds_checks_; 318 } 319 320 void SetHasBoundsChecks(bool value) { 321 has_bounds_checks_ = value; 322 } 323 324 bool ShouldGenerateConstructorBarrier() const { 325 return should_generate_constructor_barrier_; 326 } 327 328 bool IsDebuggable() const { return debuggable_; } 329 330 // Returns a constant of the given type and value. If it does not exist 331 // already, it is created and inserted into the graph. This method is only for 332 // integral types. 333 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 334 335 // TODO: This is problematic for the consistency of reference type propagation 336 // because it can be created anytime after the pass and thus it will be left 337 // with an invalid type. 338 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 339 340 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 341 return CreateConstant(value, &cached_int_constants_, dex_pc); 342 } 343 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 344 return CreateConstant(value, &cached_long_constants_, dex_pc); 345 } 346 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 347 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 348 } 349 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 350 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 351 } 352 353 HCurrentMethod* GetCurrentMethod(); 354 355 const DexFile& GetDexFile() const { 356 return dex_file_; 357 } 358 359 uint32_t GetMethodIdx() const { 360 return method_idx_; 361 } 362 363 InvokeType GetInvokeType() const { 364 return invoke_type_; 365 } 366 367 InstructionSet GetInstructionSet() const { 368 return instruction_set_; 369 } 370 371 bool HasTryCatch() const { return has_try_catch_; } 372 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 373 374 ArtMethod* GetArtMethod() const { return art_method_; } 375 void SetArtMethod(ArtMethod* method) { art_method_ = method; } 376 377 // Returns an instruction with the opposite boolean value from 'cond'. 378 // The instruction has been inserted into the graph, either as a constant, or 379 // before cursor. 380 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor); 381 382 private: 383 void FindBackEdges(ArenaBitVector* visited); 384 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 385 void RemoveDeadBlocks(const ArenaBitVector& visited); 386 387 template <class InstructionType, typename ValueType> 388 InstructionType* CreateConstant(ValueType value, 389 ArenaSafeMap<ValueType, InstructionType*>* cache, 390 uint32_t dex_pc = kNoDexPc) { 391 // Try to find an existing constant of the given value. 392 InstructionType* constant = nullptr; 393 auto cached_constant = cache->find(value); 394 if (cached_constant != cache->end()) { 395 constant = cached_constant->second; 396 } 397 398 // If not found or previously deleted, create and cache a new instruction. 399 // Don't bother reviving a previously deleted instruction, for simplicity. 400 if (constant == nullptr || constant->GetBlock() == nullptr) { 401 constant = new (arena_) InstructionType(value, dex_pc); 402 cache->Overwrite(value, constant); 403 InsertConstant(constant); 404 } 405 return constant; 406 } 407 408 void InsertConstant(HConstant* instruction); 409 410 // Cache a float constant into the graph. This method should only be 411 // called by the SsaBuilder when creating "equivalent" instructions. 412 void CacheFloatConstant(HFloatConstant* constant); 413 414 // See CacheFloatConstant comment. 415 void CacheDoubleConstant(HDoubleConstant* constant); 416 417 ArenaAllocator* const arena_; 418 419 // List of blocks in insertion order. 420 ArenaVector<HBasicBlock*> blocks_; 421 422 // List of blocks to perform a reverse post order tree traversal. 423 ArenaVector<HBasicBlock*> reverse_post_order_; 424 425 // List of blocks to perform a linear order tree traversal. 426 ArenaVector<HBasicBlock*> linear_order_; 427 428 HBasicBlock* entry_block_; 429 HBasicBlock* exit_block_; 430 431 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 432 uint16_t maximum_number_of_out_vregs_; 433 434 // The number of virtual registers in this method. Contains the parameters. 435 uint16_t number_of_vregs_; 436 437 // The number of virtual registers used by parameters of this method. 438 uint16_t number_of_in_vregs_; 439 440 // Number of vreg size slots that the temporaries use (used in baseline compiler). 441 size_t temporaries_vreg_slots_; 442 443 // Has bounds checks. We can totally skip BCE if it's false. 444 bool has_bounds_checks_; 445 446 // Flag whether there are any try/catch blocks in the graph. We will skip 447 // try/catch-related passes if false. 448 bool has_try_catch_; 449 450 // Indicates whether the graph should be compiled in a way that 451 // ensures full debuggability. If false, we can apply more 452 // aggressive optimizations that may limit the level of debugging. 453 const bool debuggable_; 454 455 // The current id to assign to a newly added instruction. See HInstruction.id_. 456 int32_t current_instruction_id_; 457 458 // The dex file from which the method is from. 459 const DexFile& dex_file_; 460 461 // The method index in the dex file. 462 const uint32_t method_idx_; 463 464 // If inlined, this encodes how the callee is being invoked. 465 const InvokeType invoke_type_; 466 467 // Whether the graph has been transformed to SSA form. Only used 468 // in debug mode to ensure we are not using properties only valid 469 // for non-SSA form (like the number of temporaries). 470 bool in_ssa_form_; 471 472 const bool should_generate_constructor_barrier_; 473 474 const InstructionSet instruction_set_; 475 476 // Cached constants. 477 HNullConstant* cached_null_constant_; 478 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 479 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 480 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 481 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 482 483 HCurrentMethod* cached_current_method_; 484 485 // The ArtMethod this graph is for. Note that for AOT, it may be null, 486 // for example for methods whose declaring class could not be resolved 487 // (such as when the superclass could not be found). 488 ArtMethod* art_method_; 489 490 friend class SsaBuilder; // For caching constants. 491 friend class SsaLivenessAnalysis; // For the linear order. 492 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 493 DISALLOW_COPY_AND_ASSIGN(HGraph); 494}; 495 496class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 497 public: 498 HLoopInformation(HBasicBlock* header, HGraph* graph) 499 : header_(header), 500 suspend_check_(nullptr), 501 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 502 // Make bit vector growable, as the number of blocks may change. 503 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 504 back_edges_.reserve(kDefaultNumberOfBackEdges); 505 } 506 507 HBasicBlock* GetHeader() const { 508 return header_; 509 } 510 511 void SetHeader(HBasicBlock* block) { 512 header_ = block; 513 } 514 515 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 516 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 517 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 518 519 void AddBackEdge(HBasicBlock* back_edge) { 520 back_edges_.push_back(back_edge); 521 } 522 523 void RemoveBackEdge(HBasicBlock* back_edge) { 524 RemoveElement(back_edges_, back_edge); 525 } 526 527 bool IsBackEdge(const HBasicBlock& block) const { 528 return ContainsElement(back_edges_, &block); 529 } 530 531 size_t NumberOfBackEdges() const { 532 return back_edges_.size(); 533 } 534 535 HBasicBlock* GetPreHeader() const; 536 537 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 538 return back_edges_; 539 } 540 541 // Returns the lifetime position of the back edge that has the 542 // greatest lifetime position. 543 size_t GetLifetimeEnd() const; 544 545 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 546 ReplaceElement(back_edges_, existing, new_back_edge); 547 } 548 549 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 550 // that is the header dominates the back edge. 551 bool Populate(); 552 553 // Reanalyzes the loop by removing loop info from its blocks and re-running 554 // Populate(). If there are no back edges left, the loop info is completely 555 // removed as well as its SuspendCheck instruction. It must be run on nested 556 // inner loops first. 557 void Update(); 558 559 // Returns whether this loop information contains `block`. 560 // Note that this loop information *must* be populated before entering this function. 561 bool Contains(const HBasicBlock& block) const; 562 563 // Returns whether this loop information is an inner loop of `other`. 564 // Note that `other` *must* be populated before entering this function. 565 bool IsIn(const HLoopInformation& other) const; 566 567 // Returns true if instruction is not defined within this loop. 568 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const; 569 570 const ArenaBitVector& GetBlocks() const { return blocks_; } 571 572 void Add(HBasicBlock* block); 573 void Remove(HBasicBlock* block); 574 575 private: 576 // Internal recursive implementation of `Populate`. 577 void PopulateRecursive(HBasicBlock* block); 578 579 HBasicBlock* header_; 580 HSuspendCheck* suspend_check_; 581 ArenaVector<HBasicBlock*> back_edges_; 582 ArenaBitVector blocks_; 583 584 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 585}; 586 587// Stores try/catch information for basic blocks. 588// Note that HGraph is constructed so that catch blocks cannot simultaneously 589// be try blocks. 590class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 591 public: 592 // Try block information constructor. 593 explicit TryCatchInformation(const HTryBoundary& try_entry) 594 : try_entry_(&try_entry), 595 catch_dex_file_(nullptr), 596 catch_type_index_(DexFile::kDexNoIndex16) { 597 DCHECK(try_entry_ != nullptr); 598 } 599 600 // Catch block information constructor. 601 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 602 : try_entry_(nullptr), 603 catch_dex_file_(&dex_file), 604 catch_type_index_(catch_type_index) {} 605 606 bool IsTryBlock() const { return try_entry_ != nullptr; } 607 608 const HTryBoundary& GetTryEntry() const { 609 DCHECK(IsTryBlock()); 610 return *try_entry_; 611 } 612 613 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 614 615 bool IsCatchAllTypeIndex() const { 616 DCHECK(IsCatchBlock()); 617 return catch_type_index_ == DexFile::kDexNoIndex16; 618 } 619 620 uint16_t GetCatchTypeIndex() const { 621 DCHECK(IsCatchBlock()); 622 return catch_type_index_; 623 } 624 625 const DexFile& GetCatchDexFile() const { 626 DCHECK(IsCatchBlock()); 627 return *catch_dex_file_; 628 } 629 630 private: 631 // One of possibly several TryBoundary instructions entering the block's try. 632 // Only set for try blocks. 633 const HTryBoundary* try_entry_; 634 635 // Exception type information. Only set for catch blocks. 636 const DexFile* catch_dex_file_; 637 const uint16_t catch_type_index_; 638}; 639 640static constexpr size_t kNoLifetime = -1; 641static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 642 643// A block in a method. Contains the list of instructions represented 644// as a double linked list. Each block knows its predecessors and 645// successors. 646 647class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 648 public: 649 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 650 : graph_(graph), 651 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 652 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 653 loop_information_(nullptr), 654 dominator_(nullptr), 655 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 656 block_id_(kInvalidBlockId), 657 dex_pc_(dex_pc), 658 lifetime_start_(kNoLifetime), 659 lifetime_end_(kNoLifetime), 660 try_catch_information_(nullptr) { 661 predecessors_.reserve(kDefaultNumberOfPredecessors); 662 successors_.reserve(kDefaultNumberOfSuccessors); 663 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 664 } 665 666 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 667 return predecessors_; 668 } 669 670 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 671 return successors_; 672 } 673 674 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const; 675 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const; 676 677 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 678 return ContainsElement(successors_, block, start_from); 679 } 680 681 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 682 return dominated_blocks_; 683 } 684 685 bool IsEntryBlock() const { 686 return graph_->GetEntryBlock() == this; 687 } 688 689 bool IsExitBlock() const { 690 return graph_->GetExitBlock() == this; 691 } 692 693 bool IsSingleGoto() const; 694 bool IsSingleTryBoundary() const; 695 696 // Returns true if this block emits nothing but a jump. 697 bool IsSingleJump() const { 698 HLoopInformation* loop_info = GetLoopInformation(); 699 return (IsSingleGoto() || IsSingleTryBoundary()) 700 // Back edges generate a suspend check. 701 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 702 } 703 704 void AddBackEdge(HBasicBlock* back_edge) { 705 if (loop_information_ == nullptr) { 706 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 707 } 708 DCHECK_EQ(loop_information_->GetHeader(), this); 709 loop_information_->AddBackEdge(back_edge); 710 } 711 712 HGraph* GetGraph() const { return graph_; } 713 void SetGraph(HGraph* graph) { graph_ = graph; } 714 715 uint32_t GetBlockId() const { return block_id_; } 716 void SetBlockId(int id) { block_id_ = id; } 717 uint32_t GetDexPc() const { return dex_pc_; } 718 719 HBasicBlock* GetDominator() const { return dominator_; } 720 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 721 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 722 723 void RemoveDominatedBlock(HBasicBlock* block) { 724 RemoveElement(dominated_blocks_, block); 725 } 726 727 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 728 ReplaceElement(dominated_blocks_, existing, new_block); 729 } 730 731 void ClearDominanceInformation(); 732 733 int NumberOfBackEdges() const { 734 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 735 } 736 737 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 738 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 739 const HInstructionList& GetInstructions() const { return instructions_; } 740 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 741 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 742 const HInstructionList& GetPhis() const { return phis_; } 743 744 void AddSuccessor(HBasicBlock* block) { 745 successors_.push_back(block); 746 block->predecessors_.push_back(this); 747 } 748 749 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 750 size_t successor_index = GetSuccessorIndexOf(existing); 751 existing->RemovePredecessor(this); 752 new_block->predecessors_.push_back(this); 753 successors_[successor_index] = new_block; 754 } 755 756 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 757 size_t predecessor_index = GetPredecessorIndexOf(existing); 758 existing->RemoveSuccessor(this); 759 new_block->successors_.push_back(this); 760 predecessors_[predecessor_index] = new_block; 761 } 762 763 // Insert `this` between `predecessor` and `successor. This method 764 // preserves the indicies, and will update the first edge found between 765 // `predecessor` and `successor`. 766 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 767 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 768 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 769 successor->predecessors_[predecessor_index] = this; 770 predecessor->successors_[successor_index] = this; 771 successors_.push_back(successor); 772 predecessors_.push_back(predecessor); 773 } 774 775 void RemovePredecessor(HBasicBlock* block) { 776 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 777 } 778 779 void RemoveSuccessor(HBasicBlock* block) { 780 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 781 } 782 783 void ClearAllPredecessors() { 784 predecessors_.clear(); 785 } 786 787 void AddPredecessor(HBasicBlock* block) { 788 predecessors_.push_back(block); 789 block->successors_.push_back(this); 790 } 791 792 void SwapPredecessors() { 793 DCHECK_EQ(predecessors_.size(), 2u); 794 std::swap(predecessors_[0], predecessors_[1]); 795 } 796 797 void SwapSuccessors() { 798 DCHECK_EQ(successors_.size(), 2u); 799 std::swap(successors_[0], successors_[1]); 800 } 801 802 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 803 return IndexOfElement(predecessors_, predecessor); 804 } 805 806 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 807 return IndexOfElement(successors_, successor); 808 } 809 810 HBasicBlock* GetSinglePredecessor() const { 811 DCHECK_EQ(GetPredecessors().size(), 1u); 812 return GetPredecessors()[0]; 813 } 814 815 HBasicBlock* GetSingleSuccessor() const { 816 DCHECK_EQ(GetSuccessors().size(), 1u); 817 return GetSuccessors()[0]; 818 } 819 820 // Returns whether the first occurrence of `predecessor` in the list of 821 // predecessors is at index `idx`. 822 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 823 DCHECK_EQ(GetPredecessors()[idx], predecessor); 824 return GetPredecessorIndexOf(predecessor) == idx; 825 } 826 827 // Create a new block between this block and its predecessors. The new block 828 // is added to the graph, all predecessor edges are relinked to it and an edge 829 // is created to `this`. Returns the new empty block. Reverse post order or 830 // loop and try/catch information are not updated. 831 HBasicBlock* CreateImmediateDominator(); 832 833 // Split the block into two blocks just before `cursor`. Returns the newly 834 // created, latter block. Note that this method will add the block to the 835 // graph, create a Goto at the end of the former block and will create an edge 836 // between the blocks. It will not, however, update the reverse post order or 837 // loop and try/catch information. 838 HBasicBlock* SplitBefore(HInstruction* cursor); 839 840 // Split the block into two blocks just after `cursor`. Returns the newly 841 // created block. Note that this method just updates raw block information, 842 // like predecessors, successors, dominators, and instruction list. It does not 843 // update the graph, reverse post order, loop information, nor make sure the 844 // blocks are consistent (for example ending with a control flow instruction). 845 HBasicBlock* SplitAfter(HInstruction* cursor); 846 847 // Split catch block into two blocks after the original move-exception bytecode 848 // instruction, or at the beginning if not present. Returns the newly created, 849 // latter block, or nullptr if such block could not be created (must be dead 850 // in that case). Note that this method just updates raw block information, 851 // like predecessors, successors, dominators, and instruction list. It does not 852 // update the graph, reverse post order, loop information, nor make sure the 853 // blocks are consistent (for example ending with a control flow instruction). 854 HBasicBlock* SplitCatchBlockAfterMoveException(); 855 856 // Merge `other` at the end of `this`. Successors and dominated blocks of 857 // `other` are changed to be successors and dominated blocks of `this`. Note 858 // that this method does not update the graph, reverse post order, loop 859 // information, nor make sure the blocks are consistent (for example ending 860 // with a control flow instruction). 861 void MergeWithInlined(HBasicBlock* other); 862 863 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 864 // of `this` are moved to `other`. 865 // Note that this method does not update the graph, reverse post order, loop 866 // information, nor make sure the blocks are consistent (for example ending 867 // with a control flow instruction). 868 void ReplaceWith(HBasicBlock* other); 869 870 // Merge `other` at the end of `this`. This method updates loops, reverse post 871 // order, links to predecessors, successors, dominators and deletes the block 872 // from the graph. The two blocks must be successive, i.e. `this` the only 873 // predecessor of `other` and vice versa. 874 void MergeWith(HBasicBlock* other); 875 876 // Disconnects `this` from all its predecessors, successors and dominator, 877 // removes it from all loops it is included in and eventually from the graph. 878 // The block must not dominate any other block. Predecessors and successors 879 // are safely updated. 880 void DisconnectAndDelete(); 881 882 void AddInstruction(HInstruction* instruction); 883 // Insert `instruction` before/after an existing instruction `cursor`. 884 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 885 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 886 // Replace instruction `initial` with `replacement` within this block. 887 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 888 HInstruction* replacement); 889 void AddPhi(HPhi* phi); 890 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 891 // RemoveInstruction and RemovePhi delete a given instruction from the respective 892 // instruction list. With 'ensure_safety' set to true, it verifies that the 893 // instruction is not in use and removes it from the use lists of its inputs. 894 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 895 void RemovePhi(HPhi* phi, bool ensure_safety = true); 896 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 897 898 bool IsLoopHeader() const { 899 return IsInLoop() && (loop_information_->GetHeader() == this); 900 } 901 902 bool IsLoopPreHeaderFirstPredecessor() const { 903 DCHECK(IsLoopHeader()); 904 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 905 } 906 907 HLoopInformation* GetLoopInformation() const { 908 return loop_information_; 909 } 910 911 // Set the loop_information_ on this block. Overrides the current 912 // loop_information if it is an outer loop of the passed loop information. 913 // Note that this method is called while creating the loop information. 914 void SetInLoop(HLoopInformation* info) { 915 if (IsLoopHeader()) { 916 // Nothing to do. This just means `info` is an outer loop. 917 } else if (!IsInLoop()) { 918 loop_information_ = info; 919 } else if (loop_information_->Contains(*info->GetHeader())) { 920 // Block is currently part of an outer loop. Make it part of this inner loop. 921 // Note that a non loop header having a loop information means this loop information 922 // has already been populated 923 loop_information_ = info; 924 } else { 925 // Block is part of an inner loop. Do not update the loop information. 926 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 927 // at this point, because this method is being called while populating `info`. 928 } 929 } 930 931 // Raw update of the loop information. 932 void SetLoopInformation(HLoopInformation* info) { 933 loop_information_ = info; 934 } 935 936 bool IsInLoop() const { return loop_information_ != nullptr; } 937 938 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 939 940 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 941 try_catch_information_ = try_catch_information; 942 } 943 944 bool IsTryBlock() const { 945 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 946 } 947 948 bool IsCatchBlock() const { 949 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 950 } 951 952 // Returns the try entry that this block's successors should have. They will 953 // be in the same try, unless the block ends in a try boundary. In that case, 954 // the appropriate try entry will be returned. 955 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 956 957 bool HasThrowingInstructions() const; 958 959 // Returns whether this block dominates the blocked passed as parameter. 960 bool Dominates(HBasicBlock* block) const; 961 962 size_t GetLifetimeStart() const { return lifetime_start_; } 963 size_t GetLifetimeEnd() const { return lifetime_end_; } 964 965 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 966 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 967 968 bool EndsWithControlFlowInstruction() const; 969 bool EndsWithIf() const; 970 bool EndsWithTryBoundary() const; 971 bool HasSinglePhi() const; 972 973 private: 974 HGraph* graph_; 975 ArenaVector<HBasicBlock*> predecessors_; 976 ArenaVector<HBasicBlock*> successors_; 977 HInstructionList instructions_; 978 HInstructionList phis_; 979 HLoopInformation* loop_information_; 980 HBasicBlock* dominator_; 981 ArenaVector<HBasicBlock*> dominated_blocks_; 982 uint32_t block_id_; 983 // The dex program counter of the first instruction of this block. 984 const uint32_t dex_pc_; 985 size_t lifetime_start_; 986 size_t lifetime_end_; 987 TryCatchInformation* try_catch_information_; 988 989 friend class HGraph; 990 friend class HInstruction; 991 992 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 993}; 994 995// Iterates over the LoopInformation of all loops which contain 'block' 996// from the innermost to the outermost. 997class HLoopInformationOutwardIterator : public ValueObject { 998 public: 999 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 1000 : current_(block.GetLoopInformation()) {} 1001 1002 bool Done() const { return current_ == nullptr; } 1003 1004 void Advance() { 1005 DCHECK(!Done()); 1006 current_ = current_->GetPreHeader()->GetLoopInformation(); 1007 } 1008 1009 HLoopInformation* Current() const { 1010 DCHECK(!Done()); 1011 return current_; 1012 } 1013 1014 private: 1015 HLoopInformation* current_; 1016 1017 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1018}; 1019 1020#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1021 M(Above, Condition) \ 1022 M(AboveOrEqual, Condition) \ 1023 M(Add, BinaryOperation) \ 1024 M(And, BinaryOperation) \ 1025 M(ArrayGet, Instruction) \ 1026 M(ArrayLength, Instruction) \ 1027 M(ArraySet, Instruction) \ 1028 M(Below, Condition) \ 1029 M(BelowOrEqual, Condition) \ 1030 M(BooleanNot, UnaryOperation) \ 1031 M(BoundsCheck, Instruction) \ 1032 M(BoundType, Instruction) \ 1033 M(CheckCast, Instruction) \ 1034 M(ClearException, Instruction) \ 1035 M(ClinitCheck, Instruction) \ 1036 M(Compare, BinaryOperation) \ 1037 M(Condition, BinaryOperation) \ 1038 M(CurrentMethod, Instruction) \ 1039 M(Deoptimize, Instruction) \ 1040 M(Div, BinaryOperation) \ 1041 M(DivZeroCheck, Instruction) \ 1042 M(DoubleConstant, Constant) \ 1043 M(Equal, Condition) \ 1044 M(Exit, Instruction) \ 1045 M(FakeString, Instruction) \ 1046 M(FloatConstant, Constant) \ 1047 M(Goto, Instruction) \ 1048 M(GreaterThan, Condition) \ 1049 M(GreaterThanOrEqual, Condition) \ 1050 M(If, Instruction) \ 1051 M(InstanceFieldGet, Instruction) \ 1052 M(InstanceFieldSet, Instruction) \ 1053 M(InstanceOf, Instruction) \ 1054 M(IntConstant, Constant) \ 1055 M(InvokeUnresolved, Invoke) \ 1056 M(InvokeInterface, Invoke) \ 1057 M(InvokeStaticOrDirect, Invoke) \ 1058 M(InvokeVirtual, Invoke) \ 1059 M(LessThan, Condition) \ 1060 M(LessThanOrEqual, Condition) \ 1061 M(LoadClass, Instruction) \ 1062 M(LoadException, Instruction) \ 1063 M(LoadLocal, Instruction) \ 1064 M(LoadString, Instruction) \ 1065 M(Local, Instruction) \ 1066 M(LongConstant, Constant) \ 1067 M(MemoryBarrier, Instruction) \ 1068 M(MonitorOperation, Instruction) \ 1069 M(Mul, BinaryOperation) \ 1070 M(Neg, UnaryOperation) \ 1071 M(NewArray, Instruction) \ 1072 M(NewInstance, Instruction) \ 1073 M(Not, UnaryOperation) \ 1074 M(NotEqual, Condition) \ 1075 M(NullConstant, Instruction) \ 1076 M(NullCheck, Instruction) \ 1077 M(Or, BinaryOperation) \ 1078 M(PackedSwitch, Instruction) \ 1079 M(ParallelMove, Instruction) \ 1080 M(ParameterValue, Instruction) \ 1081 M(Phi, Instruction) \ 1082 M(Rem, BinaryOperation) \ 1083 M(Return, Instruction) \ 1084 M(ReturnVoid, Instruction) \ 1085 M(Shl, BinaryOperation) \ 1086 M(Shr, BinaryOperation) \ 1087 M(StaticFieldGet, Instruction) \ 1088 M(StaticFieldSet, Instruction) \ 1089 M(UnresolvedInstanceFieldGet, Instruction) \ 1090 M(UnresolvedInstanceFieldSet, Instruction) \ 1091 M(UnresolvedStaticFieldGet, Instruction) \ 1092 M(UnresolvedStaticFieldSet, Instruction) \ 1093 M(StoreLocal, Instruction) \ 1094 M(Sub, BinaryOperation) \ 1095 M(SuspendCheck, Instruction) \ 1096 M(Temporary, Instruction) \ 1097 M(Throw, Instruction) \ 1098 M(TryBoundary, Instruction) \ 1099 M(TypeConversion, Instruction) \ 1100 M(UShr, BinaryOperation) \ 1101 M(Xor, BinaryOperation) \ 1102 1103#ifndef ART_ENABLE_CODEGEN_arm 1104#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1105#else 1106#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1107 M(ArmDexCacheArraysBase, Instruction) 1108#endif 1109 1110#ifndef ART_ENABLE_CODEGEN_arm64 1111#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1112#else 1113#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1114 M(Arm64DataProcWithShifterOp, Instruction) \ 1115 M(Arm64IntermediateAddress, Instruction) \ 1116 M(Arm64MultiplyAccumulate, Instruction) 1117#endif 1118 1119#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1120 1121#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1122 1123#ifndef ART_ENABLE_CODEGEN_x86 1124#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1125#else 1126#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1127 M(X86ComputeBaseMethodAddress, Instruction) \ 1128 M(X86LoadFromConstantTable, Instruction) \ 1129 M(X86PackedSwitch, Instruction) 1130#endif 1131 1132#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1133 1134#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1135 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1136 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1137 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1138 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1139 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1140 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1141 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1142 1143#define FOR_EACH_INSTRUCTION(M) \ 1144 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1145 M(Constant, Instruction) \ 1146 M(UnaryOperation, Instruction) \ 1147 M(BinaryOperation, Instruction) \ 1148 M(Invoke, Instruction) 1149 1150#define FORWARD_DECLARATION(type, super) class H##type; 1151FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1152#undef FORWARD_DECLARATION 1153 1154#define DECLARE_INSTRUCTION(type) \ 1155 InstructionKind GetKind() const OVERRIDE { return k##type; } \ 1156 const char* DebugName() const OVERRIDE { return #type; } \ 1157 const H##type* As##type() const OVERRIDE { return this; } \ 1158 H##type* As##type() OVERRIDE { return this; } \ 1159 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1160 return other->Is##type(); \ 1161 } \ 1162 void Accept(HGraphVisitor* visitor) OVERRIDE 1163 1164template <typename T> class HUseList; 1165 1166template <typename T> 1167class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1168 public: 1169 HUseListNode* GetPrevious() const { return prev_; } 1170 HUseListNode* GetNext() const { return next_; } 1171 T GetUser() const { return user_; } 1172 size_t GetIndex() const { return index_; } 1173 void SetIndex(size_t index) { index_ = index; } 1174 1175 private: 1176 HUseListNode(T user, size_t index) 1177 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1178 1179 T const user_; 1180 size_t index_; 1181 HUseListNode<T>* prev_; 1182 HUseListNode<T>* next_; 1183 1184 friend class HUseList<T>; 1185 1186 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1187}; 1188 1189template <typename T> 1190class HUseList : public ValueObject { 1191 public: 1192 HUseList() : first_(nullptr) {} 1193 1194 void Clear() { 1195 first_ = nullptr; 1196 } 1197 1198 // Adds a new entry at the beginning of the use list and returns 1199 // the newly created node. 1200 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1201 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1202 if (IsEmpty()) { 1203 first_ = new_node; 1204 } else { 1205 first_->prev_ = new_node; 1206 new_node->next_ = first_; 1207 first_ = new_node; 1208 } 1209 return new_node; 1210 } 1211 1212 HUseListNode<T>* GetFirst() const { 1213 return first_; 1214 } 1215 1216 void Remove(HUseListNode<T>* node) { 1217 DCHECK(node != nullptr); 1218 DCHECK(Contains(node)); 1219 1220 if (node->prev_ != nullptr) { 1221 node->prev_->next_ = node->next_; 1222 } 1223 if (node->next_ != nullptr) { 1224 node->next_->prev_ = node->prev_; 1225 } 1226 if (node == first_) { 1227 first_ = node->next_; 1228 } 1229 } 1230 1231 bool Contains(const HUseListNode<T>* node) const { 1232 if (node == nullptr) { 1233 return false; 1234 } 1235 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1236 if (current == node) { 1237 return true; 1238 } 1239 } 1240 return false; 1241 } 1242 1243 bool IsEmpty() const { 1244 return first_ == nullptr; 1245 } 1246 1247 bool HasOnlyOneUse() const { 1248 return first_ != nullptr && first_->next_ == nullptr; 1249 } 1250 1251 size_t SizeSlow() const { 1252 size_t count = 0; 1253 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1254 ++count; 1255 } 1256 return count; 1257 } 1258 1259 private: 1260 HUseListNode<T>* first_; 1261}; 1262 1263template<typename T> 1264class HUseIterator : public ValueObject { 1265 public: 1266 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1267 1268 bool Done() const { return current_ == nullptr; } 1269 1270 void Advance() { 1271 DCHECK(!Done()); 1272 current_ = current_->GetNext(); 1273 } 1274 1275 HUseListNode<T>* Current() const { 1276 DCHECK(!Done()); 1277 return current_; 1278 } 1279 1280 private: 1281 HUseListNode<T>* current_; 1282 1283 friend class HValue; 1284}; 1285 1286// This class is used by HEnvironment and HInstruction classes to record the 1287// instructions they use and pointers to the corresponding HUseListNodes kept 1288// by the used instructions. 1289template <typename T> 1290class HUserRecord : public ValueObject { 1291 public: 1292 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1293 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1294 1295 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1296 : instruction_(old_record.instruction_), use_node_(use_node) { 1297 DCHECK(instruction_ != nullptr); 1298 DCHECK(use_node_ != nullptr); 1299 DCHECK(old_record.use_node_ == nullptr); 1300 } 1301 1302 HInstruction* GetInstruction() const { return instruction_; } 1303 HUseListNode<T>* GetUseNode() const { return use_node_; } 1304 1305 private: 1306 // Instruction used by the user. 1307 HInstruction* instruction_; 1308 1309 // Corresponding entry in the use list kept by 'instruction_'. 1310 HUseListNode<T>* use_node_; 1311}; 1312 1313/** 1314 * Side-effects representation. 1315 * 1316 * For write/read dependences on fields/arrays, the dependence analysis uses 1317 * type disambiguation (e.g. a float field write cannot modify the value of an 1318 * integer field read) and the access type (e.g. a reference array write cannot 1319 * modify the value of a reference field read [although it may modify the 1320 * reference fetch prior to reading the field, which is represented by its own 1321 * write/read dependence]). The analysis makes conservative points-to 1322 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1323 * the same, and any reference read depends on any reference read without 1324 * further regard of its type). 1325 * 1326 * The internal representation uses 38-bit and is described in the table below. 1327 * The first line indicates the side effect, and for field/array accesses the 1328 * second line indicates the type of the access (in the order of the 1329 * Primitive::Type enum). 1330 * The two numbered lines below indicate the bit position in the bitfield (read 1331 * vertically). 1332 * 1333 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1334 * +-------------+---------+---------+--------------+---------+---------+ 1335 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1336 * | 3 |333333322|222222221| 1 |111111110|000000000| 1337 * | 7 |654321098|765432109| 8 |765432109|876543210| 1338 * 1339 * Note that, to ease the implementation, 'changes' bits are least significant 1340 * bits, while 'dependency' bits are most significant bits. 1341 */ 1342class SideEffects : public ValueObject { 1343 public: 1344 SideEffects() : flags_(0) {} 1345 1346 static SideEffects None() { 1347 return SideEffects(0); 1348 } 1349 1350 static SideEffects All() { 1351 return SideEffects(kAllChangeBits | kAllDependOnBits); 1352 } 1353 1354 static SideEffects AllChanges() { 1355 return SideEffects(kAllChangeBits); 1356 } 1357 1358 static SideEffects AllDependencies() { 1359 return SideEffects(kAllDependOnBits); 1360 } 1361 1362 static SideEffects AllExceptGCDependency() { 1363 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1364 } 1365 1366 static SideEffects AllWritesAndReads() { 1367 return SideEffects(kAllWrites | kAllReads); 1368 } 1369 1370 static SideEffects AllWrites() { 1371 return SideEffects(kAllWrites); 1372 } 1373 1374 static SideEffects AllReads() { 1375 return SideEffects(kAllReads); 1376 } 1377 1378 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1379 return is_volatile 1380 ? AllWritesAndReads() 1381 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1382 } 1383 1384 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1385 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1386 } 1387 1388 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1389 return is_volatile 1390 ? AllWritesAndReads() 1391 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1392 } 1393 1394 static SideEffects ArrayReadOfType(Primitive::Type type) { 1395 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1396 } 1397 1398 static SideEffects CanTriggerGC() { 1399 return SideEffects(1ULL << kCanTriggerGCBit); 1400 } 1401 1402 static SideEffects DependsOnGC() { 1403 return SideEffects(1ULL << kDependsOnGCBit); 1404 } 1405 1406 // Combines the side-effects of this and the other. 1407 SideEffects Union(SideEffects other) const { 1408 return SideEffects(flags_ | other.flags_); 1409 } 1410 1411 SideEffects Exclusion(SideEffects other) const { 1412 return SideEffects(flags_ & ~other.flags_); 1413 } 1414 1415 void Add(SideEffects other) { 1416 flags_ |= other.flags_; 1417 } 1418 1419 bool Includes(SideEffects other) const { 1420 return (other.flags_ & flags_) == other.flags_; 1421 } 1422 1423 bool HasSideEffects() const { 1424 return (flags_ & kAllChangeBits); 1425 } 1426 1427 bool HasDependencies() const { 1428 return (flags_ & kAllDependOnBits); 1429 } 1430 1431 // Returns true if there are no side effects or dependencies. 1432 bool DoesNothing() const { 1433 return flags_ == 0; 1434 } 1435 1436 // Returns true if something is written. 1437 bool DoesAnyWrite() const { 1438 return (flags_ & kAllWrites); 1439 } 1440 1441 // Returns true if something is read. 1442 bool DoesAnyRead() const { 1443 return (flags_ & kAllReads); 1444 } 1445 1446 // Returns true if potentially everything is written and read 1447 // (every type and every kind of access). 1448 bool DoesAllReadWrite() const { 1449 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1450 } 1451 1452 bool DoesAll() const { 1453 return flags_ == (kAllChangeBits | kAllDependOnBits); 1454 } 1455 1456 // Returns true if `this` may read something written by `other`. 1457 bool MayDependOn(SideEffects other) const { 1458 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1459 return (other.flags_ & depends_on_flags); 1460 } 1461 1462 // Returns string representation of flags (for debugging only). 1463 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1464 std::string ToString() const { 1465 std::string flags = "|"; 1466 for (int s = kLastBit; s >= 0; s--) { 1467 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1468 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1469 // This is a bit for the GC side effect. 1470 if (current_bit_is_set) { 1471 flags += "GC"; 1472 } 1473 flags += "|"; 1474 } else { 1475 // This is a bit for the array/field analysis. 1476 // The underscore character stands for the 'can trigger GC' bit. 1477 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1478 if (current_bit_is_set) { 1479 flags += kDebug[s]; 1480 } 1481 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1482 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1483 flags += "|"; 1484 } 1485 } 1486 } 1487 return flags; 1488 } 1489 1490 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1491 1492 private: 1493 static constexpr int kFieldArrayAnalysisBits = 9; 1494 1495 static constexpr int kFieldWriteOffset = 0; 1496 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1497 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1498 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1499 1500 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1501 1502 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1503 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1504 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1505 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1506 1507 static constexpr int kLastBit = kDependsOnGCBit; 1508 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1509 1510 // Aliases. 1511 1512 static_assert(kChangeBits == kDependOnBits, 1513 "the 'change' bits should match the 'depend on' bits."); 1514 1515 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1516 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1517 static constexpr uint64_t kAllWrites = 1518 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1519 static constexpr uint64_t kAllReads = 1520 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1521 1522 // Work around the fact that HIR aliases I/F and J/D. 1523 // TODO: remove this interceptor once HIR types are clean 1524 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1525 switch (type) { 1526 case Primitive::kPrimInt: 1527 case Primitive::kPrimFloat: 1528 return TypeFlag(Primitive::kPrimInt, offset) | 1529 TypeFlag(Primitive::kPrimFloat, offset); 1530 case Primitive::kPrimLong: 1531 case Primitive::kPrimDouble: 1532 return TypeFlag(Primitive::kPrimLong, offset) | 1533 TypeFlag(Primitive::kPrimDouble, offset); 1534 default: 1535 return TypeFlag(type, offset); 1536 } 1537 } 1538 1539 // Translates type to bit flag. 1540 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1541 CHECK_NE(type, Primitive::kPrimVoid); 1542 const uint64_t one = 1; 1543 const int shift = type; // 0-based consecutive enum 1544 DCHECK_LE(kFieldWriteOffset, shift); 1545 DCHECK_LT(shift, kArrayWriteOffset); 1546 return one << (type + offset); 1547 } 1548 1549 // Private constructor on direct flags value. 1550 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1551 1552 uint64_t flags_; 1553}; 1554 1555// A HEnvironment object contains the values of virtual registers at a given location. 1556class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1557 public: 1558 HEnvironment(ArenaAllocator* arena, 1559 size_t number_of_vregs, 1560 const DexFile& dex_file, 1561 uint32_t method_idx, 1562 uint32_t dex_pc, 1563 InvokeType invoke_type, 1564 HInstruction* holder) 1565 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1566 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1567 parent_(nullptr), 1568 dex_file_(dex_file), 1569 method_idx_(method_idx), 1570 dex_pc_(dex_pc), 1571 invoke_type_(invoke_type), 1572 holder_(holder) { 1573 } 1574 1575 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1576 : HEnvironment(arena, 1577 to_copy.Size(), 1578 to_copy.GetDexFile(), 1579 to_copy.GetMethodIdx(), 1580 to_copy.GetDexPc(), 1581 to_copy.GetInvokeType(), 1582 holder) {} 1583 1584 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1585 if (parent_ != nullptr) { 1586 parent_->SetAndCopyParentChain(allocator, parent); 1587 } else { 1588 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1589 parent_->CopyFrom(parent); 1590 if (parent->GetParent() != nullptr) { 1591 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1592 } 1593 } 1594 } 1595 1596 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1597 void CopyFrom(HEnvironment* environment); 1598 1599 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1600 // input to the loop phi instead. This is for inserting instructions that 1601 // require an environment (like HDeoptimization) in the loop pre-header. 1602 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1603 1604 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1605 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1606 } 1607 1608 HInstruction* GetInstructionAt(size_t index) const { 1609 return vregs_[index].GetInstruction(); 1610 } 1611 1612 void RemoveAsUserOfInput(size_t index) const; 1613 1614 size_t Size() const { return vregs_.size(); } 1615 1616 HEnvironment* GetParent() const { return parent_; } 1617 1618 void SetLocationAt(size_t index, Location location) { 1619 locations_[index] = location; 1620 } 1621 1622 Location GetLocationAt(size_t index) const { 1623 return locations_[index]; 1624 } 1625 1626 uint32_t GetDexPc() const { 1627 return dex_pc_; 1628 } 1629 1630 uint32_t GetMethodIdx() const { 1631 return method_idx_; 1632 } 1633 1634 InvokeType GetInvokeType() const { 1635 return invoke_type_; 1636 } 1637 1638 const DexFile& GetDexFile() const { 1639 return dex_file_; 1640 } 1641 1642 HInstruction* GetHolder() const { 1643 return holder_; 1644 } 1645 1646 1647 bool IsFromInlinedInvoke() const { 1648 return GetParent() != nullptr; 1649 } 1650 1651 private: 1652 // Record instructions' use entries of this environment for constant-time removal. 1653 // It should only be called by HInstruction when a new environment use is added. 1654 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1655 DCHECK(env_use->GetUser() == this); 1656 size_t index = env_use->GetIndex(); 1657 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1658 } 1659 1660 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1661 ArenaVector<Location> locations_; 1662 HEnvironment* parent_; 1663 const DexFile& dex_file_; 1664 const uint32_t method_idx_; 1665 const uint32_t dex_pc_; 1666 const InvokeType invoke_type_; 1667 1668 // The instruction that holds this environment. 1669 HInstruction* const holder_; 1670 1671 friend class HInstruction; 1672 1673 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1674}; 1675 1676class ReferenceTypeInfo : ValueObject { 1677 public: 1678 typedef Handle<mirror::Class> TypeHandle; 1679 1680 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1681 // The constructor will check that the type_handle is valid. 1682 return ReferenceTypeInfo(type_handle, is_exact); 1683 } 1684 1685 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1686 1687 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1688 return handle.GetReference() != nullptr; 1689 } 1690 1691 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1692 return IsValidHandle(type_handle_); 1693 } 1694 1695 bool IsExact() const { return is_exact_; } 1696 1697 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1698 DCHECK(IsValid()); 1699 return GetTypeHandle()->IsObjectClass(); 1700 } 1701 1702 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1703 DCHECK(IsValid()); 1704 return GetTypeHandle()->IsStringClass(); 1705 } 1706 1707 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1708 DCHECK(IsValid()); 1709 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1710 } 1711 1712 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1713 DCHECK(IsValid()); 1714 return GetTypeHandle()->IsInterface(); 1715 } 1716 1717 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1718 DCHECK(IsValid()); 1719 return GetTypeHandle()->IsArrayClass(); 1720 } 1721 1722 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1723 DCHECK(IsValid()); 1724 return GetTypeHandle()->IsPrimitiveArray(); 1725 } 1726 1727 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1728 DCHECK(IsValid()); 1729 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 1730 } 1731 1732 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1733 DCHECK(IsValid()); 1734 if (!IsExact()) return false; 1735 if (!IsArrayClass()) return false; 1736 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1737 } 1738 1739 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1740 DCHECK(IsValid()); 1741 if (!IsExact()) return false; 1742 if (!IsArrayClass()) return false; 1743 if (!rti.IsArrayClass()) return false; 1744 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 1745 rti.GetTypeHandle()->GetComponentType()); 1746 } 1747 1748 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1749 1750 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1751 DCHECK(IsValid()); 1752 DCHECK(rti.IsValid()); 1753 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1754 } 1755 1756 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1757 DCHECK(IsValid()); 1758 DCHECK(rti.IsValid()); 1759 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() && 1760 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1761 } 1762 1763 // Returns true if the type information provide the same amount of details. 1764 // Note that it does not mean that the instructions have the same actual type 1765 // (because the type can be the result of a merge). 1766 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1767 if (!IsValid() && !rti.IsValid()) { 1768 // Invalid types are equal. 1769 return true; 1770 } 1771 if (!IsValid() || !rti.IsValid()) { 1772 // One is valid, the other not. 1773 return false; 1774 } 1775 return IsExact() == rti.IsExact() 1776 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1777 } 1778 1779 private: 1780 ReferenceTypeInfo(); 1781 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1782 1783 // The class of the object. 1784 TypeHandle type_handle_; 1785 // Whether or not the type is exact or a superclass of the actual type. 1786 // Whether or not we have any information about this type. 1787 bool is_exact_; 1788}; 1789 1790std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1791 1792class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1793 public: 1794 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1795 : previous_(nullptr), 1796 next_(nullptr), 1797 block_(nullptr), 1798 dex_pc_(dex_pc), 1799 id_(-1), 1800 ssa_index_(-1), 1801 environment_(nullptr), 1802 locations_(nullptr), 1803 live_interval_(nullptr), 1804 lifetime_position_(kNoLifetime), 1805 side_effects_(side_effects), 1806 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1807 1808 virtual ~HInstruction() {} 1809 1810#define DECLARE_KIND(type, super) k##type, 1811 enum InstructionKind { 1812 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1813 }; 1814#undef DECLARE_KIND 1815 1816 HInstruction* GetNext() const { return next_; } 1817 HInstruction* GetPrevious() const { return previous_; } 1818 1819 HInstruction* GetNextDisregardingMoves() const; 1820 HInstruction* GetPreviousDisregardingMoves() const; 1821 1822 HBasicBlock* GetBlock() const { return block_; } 1823 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1824 void SetBlock(HBasicBlock* block) { block_ = block; } 1825 bool IsInBlock() const { return block_ != nullptr; } 1826 bool IsInLoop() const { return block_->IsInLoop(); } 1827 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1828 1829 virtual size_t InputCount() const = 0; 1830 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1831 1832 virtual void Accept(HGraphVisitor* visitor) = 0; 1833 virtual const char* DebugName() const = 0; 1834 1835 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1836 void SetRawInputAt(size_t index, HInstruction* input) { 1837 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1838 } 1839 1840 virtual bool NeedsEnvironment() const { return false; } 1841 1842 uint32_t GetDexPc() const { return dex_pc_; } 1843 1844 virtual bool IsControlFlow() const { return false; } 1845 1846 virtual bool CanThrow() const { return false; } 1847 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1848 1849 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1850 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1851 1852 // Does not apply for all instructions, but having this at top level greatly 1853 // simplifies the null check elimination. 1854 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1855 virtual bool CanBeNull() const { 1856 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1857 return true; 1858 } 1859 1860 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1861 return false; 1862 } 1863 1864 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1865 1866 ReferenceTypeInfo GetReferenceTypeInfo() const { 1867 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1868 return reference_type_info_; 1869 } 1870 1871 void AddUseAt(HInstruction* user, size_t index) { 1872 DCHECK(user != nullptr); 1873 HUseListNode<HInstruction*>* use = 1874 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1875 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1876 } 1877 1878 void AddEnvUseAt(HEnvironment* user, size_t index) { 1879 DCHECK(user != nullptr); 1880 HUseListNode<HEnvironment*>* env_use = 1881 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1882 user->RecordEnvUse(env_use); 1883 } 1884 1885 void RemoveAsUserOfInput(size_t input) { 1886 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1887 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1888 } 1889 1890 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1891 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1892 1893 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1894 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1895 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1896 bool HasOnlyOneNonEnvironmentUse() const { 1897 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1898 } 1899 1900 // Does this instruction strictly dominate `other_instruction`? 1901 // Returns false if this instruction and `other_instruction` are the same. 1902 // Aborts if this instruction and `other_instruction` are both phis. 1903 bool StrictlyDominates(HInstruction* other_instruction) const; 1904 1905 int GetId() const { return id_; } 1906 void SetId(int id) { id_ = id; } 1907 1908 int GetSsaIndex() const { return ssa_index_; } 1909 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1910 bool HasSsaIndex() const { return ssa_index_ != -1; } 1911 1912 bool HasEnvironment() const { return environment_ != nullptr; } 1913 HEnvironment* GetEnvironment() const { return environment_; } 1914 // Set the `environment_` field. Raw because this method does not 1915 // update the uses lists. 1916 void SetRawEnvironment(HEnvironment* environment) { 1917 DCHECK(environment_ == nullptr); 1918 DCHECK_EQ(environment->GetHolder(), this); 1919 environment_ = environment; 1920 } 1921 1922 // Set the environment of this instruction, copying it from `environment`. While 1923 // copying, the uses lists are being updated. 1924 void CopyEnvironmentFrom(HEnvironment* environment) { 1925 DCHECK(environment_ == nullptr); 1926 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1927 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1928 environment_->CopyFrom(environment); 1929 if (environment->GetParent() != nullptr) { 1930 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1931 } 1932 } 1933 1934 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1935 HBasicBlock* block) { 1936 DCHECK(environment_ == nullptr); 1937 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1938 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1939 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1940 if (environment->GetParent() != nullptr) { 1941 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1942 } 1943 } 1944 1945 // Returns the number of entries in the environment. Typically, that is the 1946 // number of dex registers in a method. It could be more in case of inlining. 1947 size_t EnvironmentSize() const; 1948 1949 LocationSummary* GetLocations() const { return locations_; } 1950 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1951 1952 void ReplaceWith(HInstruction* instruction); 1953 void ReplaceInput(HInstruction* replacement, size_t index); 1954 1955 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1956 // uses of this instruction by `other` are *not* updated. 1957 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1958 ReplaceWith(other); 1959 other->ReplaceInput(this, use_index); 1960 } 1961 1962 // Move `this` instruction before `cursor`. 1963 void MoveBefore(HInstruction* cursor); 1964 1965 // Move `this` before its first user and out of any loops. If there is no 1966 // out-of-loop user that dominates all other users, move the instruction 1967 // to the end of the out-of-loop common dominator of the user's blocks. 1968 // 1969 // This can be used only on non-throwing instructions with no side effects that 1970 // have at least one use but no environment uses. 1971 void MoveBeforeFirstUserAndOutOfLoops(); 1972 1973#define INSTRUCTION_TYPE_CHECK(type, super) \ 1974 bool Is##type() const { return (As##type() != nullptr); } \ 1975 virtual const H##type* As##type() const { return nullptr; } \ 1976 virtual H##type* As##type() { return nullptr; } 1977 1978 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1979#undef INSTRUCTION_TYPE_CHECK 1980 1981 // Returns whether the instruction can be moved within the graph. 1982 virtual bool CanBeMoved() const { return false; } 1983 1984 // Returns whether the two instructions are of the same kind. 1985 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1986 return false; 1987 } 1988 1989 // Returns whether any data encoded in the two instructions is equal. 1990 // This method does not look at the inputs. Both instructions must be 1991 // of the same type, otherwise the method has undefined behavior. 1992 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 1993 return false; 1994 } 1995 1996 // Returns whether two instructions are equal, that is: 1997 // 1) They have the same type and contain the same data (InstructionDataEquals). 1998 // 2) Their inputs are identical. 1999 bool Equals(HInstruction* other) const; 2000 2001 virtual InstructionKind GetKind() const = 0; 2002 2003 virtual size_t ComputeHashCode() const { 2004 size_t result = GetKind(); 2005 for (size_t i = 0, e = InputCount(); i < e; ++i) { 2006 result = (result * 31) + InputAt(i)->GetId(); 2007 } 2008 return result; 2009 } 2010 2011 SideEffects GetSideEffects() const { return side_effects_; } 2012 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 2013 2014 size_t GetLifetimePosition() const { return lifetime_position_; } 2015 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 2016 LiveInterval* GetLiveInterval() const { return live_interval_; } 2017 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 2018 bool HasLiveInterval() const { return live_interval_ != nullptr; } 2019 2020 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 2021 2022 // Returns whether the code generation of the instruction will require to have access 2023 // to the current method. Such instructions are: 2024 // (1): Instructions that require an environment, as calling the runtime requires 2025 // to walk the stack and have the current method stored at a specific stack address. 2026 // (2): Object literals like classes and strings, that are loaded from the dex cache 2027 // fields of the current method. 2028 bool NeedsCurrentMethod() const { 2029 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 2030 } 2031 2032 // Returns whether the code generation of the instruction will require to have access 2033 // to the dex cache of the current method's declaring class via the current method. 2034 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 2035 2036 // Does this instruction have any use in an environment before 2037 // control flow hits 'other'? 2038 bool HasAnyEnvironmentUseBefore(HInstruction* other); 2039 2040 // Remove all references to environment uses of this instruction. 2041 // The caller must ensure that this is safe to do. 2042 void RemoveEnvironmentUsers(); 2043 2044 protected: 2045 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2046 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2047 2048 private: 2049 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2050 2051 HInstruction* previous_; 2052 HInstruction* next_; 2053 HBasicBlock* block_; 2054 const uint32_t dex_pc_; 2055 2056 // An instruction gets an id when it is added to the graph. 2057 // It reflects creation order. A negative id means the instruction 2058 // has not been added to the graph. 2059 int id_; 2060 2061 // When doing liveness analysis, instructions that have uses get an SSA index. 2062 int ssa_index_; 2063 2064 // List of instructions that have this instruction as input. 2065 HUseList<HInstruction*> uses_; 2066 2067 // List of environments that contain this instruction. 2068 HUseList<HEnvironment*> env_uses_; 2069 2070 // The environment associated with this instruction. Not null if the instruction 2071 // might jump out of the method. 2072 HEnvironment* environment_; 2073 2074 // Set by the code generator. 2075 LocationSummary* locations_; 2076 2077 // Set by the liveness analysis. 2078 LiveInterval* live_interval_; 2079 2080 // Set by the liveness analysis, this is the position in a linear 2081 // order of blocks where this instruction's live interval start. 2082 size_t lifetime_position_; 2083 2084 SideEffects side_effects_; 2085 2086 // TODO: for primitive types this should be marked as invalid. 2087 ReferenceTypeInfo reference_type_info_; 2088 2089 friend class GraphChecker; 2090 friend class HBasicBlock; 2091 friend class HEnvironment; 2092 friend class HGraph; 2093 friend class HInstructionList; 2094 2095 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2096}; 2097std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2098 2099class HInputIterator : public ValueObject { 2100 public: 2101 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2102 2103 bool Done() const { return index_ == instruction_->InputCount(); } 2104 HInstruction* Current() const { return instruction_->InputAt(index_); } 2105 void Advance() { index_++; } 2106 2107 private: 2108 HInstruction* instruction_; 2109 size_t index_; 2110 2111 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2112}; 2113 2114class HInstructionIterator : public ValueObject { 2115 public: 2116 explicit HInstructionIterator(const HInstructionList& instructions) 2117 : instruction_(instructions.first_instruction_) { 2118 next_ = Done() ? nullptr : instruction_->GetNext(); 2119 } 2120 2121 bool Done() const { return instruction_ == nullptr; } 2122 HInstruction* Current() const { return instruction_; } 2123 void Advance() { 2124 instruction_ = next_; 2125 next_ = Done() ? nullptr : instruction_->GetNext(); 2126 } 2127 2128 private: 2129 HInstruction* instruction_; 2130 HInstruction* next_; 2131 2132 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2133}; 2134 2135class HBackwardInstructionIterator : public ValueObject { 2136 public: 2137 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2138 : instruction_(instructions.last_instruction_) { 2139 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2140 } 2141 2142 bool Done() const { return instruction_ == nullptr; } 2143 HInstruction* Current() const { return instruction_; } 2144 void Advance() { 2145 instruction_ = next_; 2146 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2147 } 2148 2149 private: 2150 HInstruction* instruction_; 2151 HInstruction* next_; 2152 2153 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2154}; 2155 2156template<size_t N> 2157class HTemplateInstruction: public HInstruction { 2158 public: 2159 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2160 : HInstruction(side_effects, dex_pc), inputs_() {} 2161 virtual ~HTemplateInstruction() {} 2162 2163 size_t InputCount() const OVERRIDE { return N; } 2164 2165 protected: 2166 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2167 DCHECK_LT(i, N); 2168 return inputs_[i]; 2169 } 2170 2171 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2172 DCHECK_LT(i, N); 2173 inputs_[i] = input; 2174 } 2175 2176 private: 2177 std::array<HUserRecord<HInstruction*>, N> inputs_; 2178 2179 friend class SsaBuilder; 2180}; 2181 2182// HTemplateInstruction specialization for N=0. 2183template<> 2184class HTemplateInstruction<0>: public HInstruction { 2185 public: 2186 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2187 : HInstruction(side_effects, dex_pc) {} 2188 2189 virtual ~HTemplateInstruction() {} 2190 2191 size_t InputCount() const OVERRIDE { return 0; } 2192 2193 protected: 2194 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2195 LOG(FATAL) << "Unreachable"; 2196 UNREACHABLE(); 2197 } 2198 2199 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2200 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2201 LOG(FATAL) << "Unreachable"; 2202 UNREACHABLE(); 2203 } 2204 2205 private: 2206 friend class SsaBuilder; 2207}; 2208 2209template<intptr_t N> 2210class HExpression : public HTemplateInstruction<N> { 2211 public: 2212 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2213 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2214 virtual ~HExpression() {} 2215 2216 Primitive::Type GetType() const OVERRIDE { return type_; } 2217 2218 protected: 2219 Primitive::Type type_; 2220}; 2221 2222// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2223// instruction that branches to the exit block. 2224class HReturnVoid : public HTemplateInstruction<0> { 2225 public: 2226 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2227 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2228 2229 bool IsControlFlow() const OVERRIDE { return true; } 2230 2231 DECLARE_INSTRUCTION(ReturnVoid); 2232 2233 private: 2234 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2235}; 2236 2237// Represents dex's RETURN opcodes. A HReturn is a control flow 2238// instruction that branches to the exit block. 2239class HReturn : public HTemplateInstruction<1> { 2240 public: 2241 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2242 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2243 SetRawInputAt(0, value); 2244 } 2245 2246 bool IsControlFlow() const OVERRIDE { return true; } 2247 2248 DECLARE_INSTRUCTION(Return); 2249 2250 private: 2251 DISALLOW_COPY_AND_ASSIGN(HReturn); 2252}; 2253 2254// The exit instruction is the only instruction of the exit block. 2255// Instructions aborting the method (HThrow and HReturn) must branch to the 2256// exit block. 2257class HExit : public HTemplateInstruction<0> { 2258 public: 2259 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2260 2261 bool IsControlFlow() const OVERRIDE { return true; } 2262 2263 DECLARE_INSTRUCTION(Exit); 2264 2265 private: 2266 DISALLOW_COPY_AND_ASSIGN(HExit); 2267}; 2268 2269// Jumps from one block to another. 2270class HGoto : public HTemplateInstruction<0> { 2271 public: 2272 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2273 2274 bool IsControlFlow() const OVERRIDE { return true; } 2275 2276 HBasicBlock* GetSuccessor() const { 2277 return GetBlock()->GetSingleSuccessor(); 2278 } 2279 2280 DECLARE_INSTRUCTION(Goto); 2281 2282 private: 2283 DISALLOW_COPY_AND_ASSIGN(HGoto); 2284}; 2285 2286class HConstant : public HExpression<0> { 2287 public: 2288 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2289 : HExpression(type, SideEffects::None(), dex_pc) {} 2290 2291 bool CanBeMoved() const OVERRIDE { return true; } 2292 2293 virtual bool IsMinusOne() const { return false; } 2294 virtual bool IsZero() const { return false; } 2295 virtual bool IsOne() const { return false; } 2296 2297 virtual uint64_t GetValueAsUint64() const = 0; 2298 2299 DECLARE_INSTRUCTION(Constant); 2300 2301 private: 2302 DISALLOW_COPY_AND_ASSIGN(HConstant); 2303}; 2304 2305class HNullConstant : public HConstant { 2306 public: 2307 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2308 return true; 2309 } 2310 2311 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2312 2313 size_t ComputeHashCode() const OVERRIDE { return 0; } 2314 2315 DECLARE_INSTRUCTION(NullConstant); 2316 2317 private: 2318 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2319 2320 friend class HGraph; 2321 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2322}; 2323 2324// Constants of the type int. Those can be from Dex instructions, or 2325// synthesized (for example with the if-eqz instruction). 2326class HIntConstant : public HConstant { 2327 public: 2328 int32_t GetValue() const { return value_; } 2329 2330 uint64_t GetValueAsUint64() const OVERRIDE { 2331 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2332 } 2333 2334 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2335 DCHECK(other->IsIntConstant()); 2336 return other->AsIntConstant()->value_ == value_; 2337 } 2338 2339 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2340 2341 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2342 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2343 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2344 2345 DECLARE_INSTRUCTION(IntConstant); 2346 2347 private: 2348 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2349 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2350 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2351 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2352 2353 const int32_t value_; 2354 2355 friend class HGraph; 2356 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2357 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2358 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2359}; 2360 2361class HLongConstant : public HConstant { 2362 public: 2363 int64_t GetValue() const { return value_; } 2364 2365 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2366 2367 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2368 DCHECK(other->IsLongConstant()); 2369 return other->AsLongConstant()->value_ == value_; 2370 } 2371 2372 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2373 2374 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2375 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2376 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2377 2378 DECLARE_INSTRUCTION(LongConstant); 2379 2380 private: 2381 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2382 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2383 2384 const int64_t value_; 2385 2386 friend class HGraph; 2387 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2388}; 2389 2390// Conditional branch. A block ending with an HIf instruction must have 2391// two successors. 2392class HIf : public HTemplateInstruction<1> { 2393 public: 2394 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2395 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2396 SetRawInputAt(0, input); 2397 } 2398 2399 bool IsControlFlow() const OVERRIDE { return true; } 2400 2401 HBasicBlock* IfTrueSuccessor() const { 2402 return GetBlock()->GetSuccessors()[0]; 2403 } 2404 2405 HBasicBlock* IfFalseSuccessor() const { 2406 return GetBlock()->GetSuccessors()[1]; 2407 } 2408 2409 DECLARE_INSTRUCTION(If); 2410 2411 private: 2412 DISALLOW_COPY_AND_ASSIGN(HIf); 2413}; 2414 2415 2416// Abstract instruction which marks the beginning and/or end of a try block and 2417// links it to the respective exception handlers. Behaves the same as a Goto in 2418// non-exceptional control flow. 2419// Normal-flow successor is stored at index zero, exception handlers under 2420// higher indices in no particular order. 2421class HTryBoundary : public HTemplateInstruction<0> { 2422 public: 2423 enum BoundaryKind { 2424 kEntry, 2425 kExit, 2426 }; 2427 2428 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2429 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2430 2431 bool IsControlFlow() const OVERRIDE { return true; } 2432 2433 // Returns the block's non-exceptional successor (index zero). 2434 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2435 2436 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const { 2437 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u); 2438 } 2439 2440 // Returns whether `handler` is among its exception handlers (non-zero index 2441 // successors). 2442 bool HasExceptionHandler(const HBasicBlock& handler) const { 2443 DCHECK(handler.IsCatchBlock()); 2444 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2445 } 2446 2447 // If not present already, adds `handler` to its block's list of exception 2448 // handlers. 2449 void AddExceptionHandler(HBasicBlock* handler) { 2450 if (!HasExceptionHandler(*handler)) { 2451 GetBlock()->AddSuccessor(handler); 2452 } 2453 } 2454 2455 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2456 2457 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2458 2459 DECLARE_INSTRUCTION(TryBoundary); 2460 2461 private: 2462 const BoundaryKind kind_; 2463 2464 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2465}; 2466 2467// Deoptimize to interpreter, upon checking a condition. 2468class HDeoptimize : public HTemplateInstruction<1> { 2469 public: 2470 HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2471 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2472 SetRawInputAt(0, cond); 2473 } 2474 2475 bool CanBeMoved() const OVERRIDE { return true; } 2476 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2477 return true; 2478 } 2479 bool NeedsEnvironment() const OVERRIDE { return true; } 2480 bool CanThrow() const OVERRIDE { return true; } 2481 2482 DECLARE_INSTRUCTION(Deoptimize); 2483 2484 private: 2485 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2486}; 2487 2488// Represents the ArtMethod that was passed as a first argument to 2489// the method. It is used by instructions that depend on it, like 2490// instructions that work with the dex cache. 2491class HCurrentMethod : public HExpression<0> { 2492 public: 2493 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2494 : HExpression(type, SideEffects::None(), dex_pc) {} 2495 2496 DECLARE_INSTRUCTION(CurrentMethod); 2497 2498 private: 2499 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2500}; 2501 2502// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2503// have one successor for each entry in the switch table, and the final successor 2504// will be the block containing the next Dex opcode. 2505class HPackedSwitch : public HTemplateInstruction<1> { 2506 public: 2507 HPackedSwitch(int32_t start_value, 2508 uint32_t num_entries, 2509 HInstruction* input, 2510 uint32_t dex_pc = kNoDexPc) 2511 : HTemplateInstruction(SideEffects::None(), dex_pc), 2512 start_value_(start_value), 2513 num_entries_(num_entries) { 2514 SetRawInputAt(0, input); 2515 } 2516 2517 bool IsControlFlow() const OVERRIDE { return true; } 2518 2519 int32_t GetStartValue() const { return start_value_; } 2520 2521 uint32_t GetNumEntries() const { return num_entries_; } 2522 2523 HBasicBlock* GetDefaultBlock() const { 2524 // Last entry is the default block. 2525 return GetBlock()->GetSuccessors()[num_entries_]; 2526 } 2527 DECLARE_INSTRUCTION(PackedSwitch); 2528 2529 private: 2530 const int32_t start_value_; 2531 const uint32_t num_entries_; 2532 2533 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2534}; 2535 2536class HUnaryOperation : public HExpression<1> { 2537 public: 2538 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2539 : HExpression(result_type, SideEffects::None(), dex_pc) { 2540 SetRawInputAt(0, input); 2541 } 2542 2543 HInstruction* GetInput() const { return InputAt(0); } 2544 Primitive::Type GetResultType() const { return GetType(); } 2545 2546 bool CanBeMoved() const OVERRIDE { return true; } 2547 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2548 return true; 2549 } 2550 2551 // Try to statically evaluate `operation` and return a HConstant 2552 // containing the result of this evaluation. If `operation` cannot 2553 // be evaluated as a constant, return null. 2554 HConstant* TryStaticEvaluation() const; 2555 2556 // Apply this operation to `x`. 2557 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2558 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2559 2560 DECLARE_INSTRUCTION(UnaryOperation); 2561 2562 private: 2563 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2564}; 2565 2566class HBinaryOperation : public HExpression<2> { 2567 public: 2568 HBinaryOperation(Primitive::Type result_type, 2569 HInstruction* left, 2570 HInstruction* right, 2571 SideEffects side_effects = SideEffects::None(), 2572 uint32_t dex_pc = kNoDexPc) 2573 : HExpression(result_type, side_effects, dex_pc) { 2574 SetRawInputAt(0, left); 2575 SetRawInputAt(1, right); 2576 } 2577 2578 HInstruction* GetLeft() const { return InputAt(0); } 2579 HInstruction* GetRight() const { return InputAt(1); } 2580 Primitive::Type GetResultType() const { return GetType(); } 2581 2582 virtual bool IsCommutative() const { return false; } 2583 2584 // Put constant on the right. 2585 // Returns whether order is changed. 2586 bool OrderInputsWithConstantOnTheRight() { 2587 HInstruction* left = InputAt(0); 2588 HInstruction* right = InputAt(1); 2589 if (left->IsConstant() && !right->IsConstant()) { 2590 ReplaceInput(right, 0); 2591 ReplaceInput(left, 1); 2592 return true; 2593 } 2594 return false; 2595 } 2596 2597 // Order inputs by instruction id, but favor constant on the right side. 2598 // This helps GVN for commutative ops. 2599 void OrderInputs() { 2600 DCHECK(IsCommutative()); 2601 HInstruction* left = InputAt(0); 2602 HInstruction* right = InputAt(1); 2603 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2604 return; 2605 } 2606 if (OrderInputsWithConstantOnTheRight()) { 2607 return; 2608 } 2609 // Order according to instruction id. 2610 if (left->GetId() > right->GetId()) { 2611 ReplaceInput(right, 0); 2612 ReplaceInput(left, 1); 2613 } 2614 } 2615 2616 bool CanBeMoved() const OVERRIDE { return true; } 2617 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2618 return true; 2619 } 2620 2621 // Try to statically evaluate `operation` and return a HConstant 2622 // containing the result of this evaluation. If `operation` cannot 2623 // be evaluated as a constant, return null. 2624 HConstant* TryStaticEvaluation() const; 2625 2626 // Apply this operation to `x` and `y`. 2627 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2628 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2629 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2630 HLongConstant* y ATTRIBUTE_UNUSED) const { 2631 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2632 return nullptr; 2633 } 2634 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2635 HIntConstant* y ATTRIBUTE_UNUSED) const { 2636 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2637 return nullptr; 2638 } 2639 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2640 HNullConstant* y ATTRIBUTE_UNUSED) const { 2641 VLOG(compiler) << DebugName() << " is not defined for the (null, null) case."; 2642 return nullptr; 2643 } 2644 2645 // Returns an input that can legally be used as the right input and is 2646 // constant, or null. 2647 HConstant* GetConstantRight() const; 2648 2649 // If `GetConstantRight()` returns one of the input, this returns the other 2650 // one. Otherwise it returns null. 2651 HInstruction* GetLeastConstantLeft() const; 2652 2653 DECLARE_INSTRUCTION(BinaryOperation); 2654 2655 private: 2656 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2657}; 2658 2659// The comparison bias applies for floating point operations and indicates how NaN 2660// comparisons are treated: 2661enum class ComparisonBias { 2662 kNoBias, // bias is not applicable (i.e. for long operation) 2663 kGtBias, // return 1 for NaN comparisons 2664 kLtBias, // return -1 for NaN comparisons 2665}; 2666 2667class HCondition : public HBinaryOperation { 2668 public: 2669 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2670 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2671 needs_materialization_(true), 2672 bias_(ComparisonBias::kNoBias) {} 2673 2674 bool NeedsMaterialization() const { return needs_materialization_; } 2675 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2676 2677 // For code generation purposes, returns whether this instruction is just before 2678 // `instruction`, and disregard moves in between. 2679 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2680 2681 DECLARE_INSTRUCTION(Condition); 2682 2683 virtual IfCondition GetCondition() const = 0; 2684 2685 virtual IfCondition GetOppositeCondition() const = 0; 2686 2687 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2688 2689 void SetBias(ComparisonBias bias) { bias_ = bias; } 2690 2691 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2692 return bias_ == other->AsCondition()->bias_; 2693 } 2694 2695 bool IsFPConditionTrueIfNaN() const { 2696 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2697 IfCondition if_cond = GetCondition(); 2698 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2699 } 2700 2701 bool IsFPConditionFalseIfNaN() const { 2702 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2703 IfCondition if_cond = GetCondition(); 2704 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2705 } 2706 2707 private: 2708 // For register allocation purposes, returns whether this instruction needs to be 2709 // materialized (that is, not just be in the processor flags). 2710 bool needs_materialization_; 2711 2712 // Needed if we merge a HCompare into a HCondition. 2713 ComparisonBias bias_; 2714 2715 DISALLOW_COPY_AND_ASSIGN(HCondition); 2716}; 2717 2718// Instruction to check if two inputs are equal to each other. 2719class HEqual : public HCondition { 2720 public: 2721 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2722 : HCondition(first, second, dex_pc) {} 2723 2724 bool IsCommutative() const OVERRIDE { return true; } 2725 2726 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2727 return GetBlock()->GetGraph()->GetIntConstant( 2728 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2729 } 2730 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2731 return GetBlock()->GetGraph()->GetIntConstant( 2732 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2733 } 2734 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2735 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2736 return GetBlock()->GetGraph()->GetIntConstant(1); 2737 } 2738 2739 DECLARE_INSTRUCTION(Equal); 2740 2741 IfCondition GetCondition() const OVERRIDE { 2742 return kCondEQ; 2743 } 2744 2745 IfCondition GetOppositeCondition() const OVERRIDE { 2746 return kCondNE; 2747 } 2748 2749 private: 2750 template <typename T> bool Compute(T x, T y) const { return x == y; } 2751 2752 DISALLOW_COPY_AND_ASSIGN(HEqual); 2753}; 2754 2755class HNotEqual : public HCondition { 2756 public: 2757 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2758 : HCondition(first, second, dex_pc) {} 2759 2760 bool IsCommutative() const OVERRIDE { return true; } 2761 2762 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2763 return GetBlock()->GetGraph()->GetIntConstant( 2764 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2765 } 2766 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2767 return GetBlock()->GetGraph()->GetIntConstant( 2768 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2769 } 2770 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2771 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2772 return GetBlock()->GetGraph()->GetIntConstant(0); 2773 } 2774 2775 DECLARE_INSTRUCTION(NotEqual); 2776 2777 IfCondition GetCondition() const OVERRIDE { 2778 return kCondNE; 2779 } 2780 2781 IfCondition GetOppositeCondition() const OVERRIDE { 2782 return kCondEQ; 2783 } 2784 2785 private: 2786 template <typename T> bool Compute(T x, T y) const { return x != y; } 2787 2788 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2789}; 2790 2791class HLessThan : public HCondition { 2792 public: 2793 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2794 : HCondition(first, second, dex_pc) {} 2795 2796 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2797 return GetBlock()->GetGraph()->GetIntConstant( 2798 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2799 } 2800 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2801 return GetBlock()->GetGraph()->GetIntConstant( 2802 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2803 } 2804 2805 DECLARE_INSTRUCTION(LessThan); 2806 2807 IfCondition GetCondition() const OVERRIDE { 2808 return kCondLT; 2809 } 2810 2811 IfCondition GetOppositeCondition() const OVERRIDE { 2812 return kCondGE; 2813 } 2814 2815 private: 2816 template <typename T> bool Compute(T x, T y) const { return x < y; } 2817 2818 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2819}; 2820 2821class HLessThanOrEqual : public HCondition { 2822 public: 2823 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2824 : HCondition(first, second, dex_pc) {} 2825 2826 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2827 return GetBlock()->GetGraph()->GetIntConstant( 2828 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2829 } 2830 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2831 return GetBlock()->GetGraph()->GetIntConstant( 2832 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2833 } 2834 2835 DECLARE_INSTRUCTION(LessThanOrEqual); 2836 2837 IfCondition GetCondition() const OVERRIDE { 2838 return kCondLE; 2839 } 2840 2841 IfCondition GetOppositeCondition() const OVERRIDE { 2842 return kCondGT; 2843 } 2844 2845 private: 2846 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2847 2848 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2849}; 2850 2851class HGreaterThan : public HCondition { 2852 public: 2853 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2854 : HCondition(first, second, dex_pc) {} 2855 2856 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2857 return GetBlock()->GetGraph()->GetIntConstant( 2858 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2859 } 2860 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2861 return GetBlock()->GetGraph()->GetIntConstant( 2862 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2863 } 2864 2865 DECLARE_INSTRUCTION(GreaterThan); 2866 2867 IfCondition GetCondition() const OVERRIDE { 2868 return kCondGT; 2869 } 2870 2871 IfCondition GetOppositeCondition() const OVERRIDE { 2872 return kCondLE; 2873 } 2874 2875 private: 2876 template <typename T> bool Compute(T x, T y) const { return x > y; } 2877 2878 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2879}; 2880 2881class HGreaterThanOrEqual : public HCondition { 2882 public: 2883 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2884 : HCondition(first, second, dex_pc) {} 2885 2886 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2887 return GetBlock()->GetGraph()->GetIntConstant( 2888 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2889 } 2890 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2891 return GetBlock()->GetGraph()->GetIntConstant( 2892 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2893 } 2894 2895 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2896 2897 IfCondition GetCondition() const OVERRIDE { 2898 return kCondGE; 2899 } 2900 2901 IfCondition GetOppositeCondition() const OVERRIDE { 2902 return kCondLT; 2903 } 2904 2905 private: 2906 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2907 2908 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2909}; 2910 2911class HBelow : public HCondition { 2912 public: 2913 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2914 : HCondition(first, second, dex_pc) {} 2915 2916 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2917 return GetBlock()->GetGraph()->GetIntConstant( 2918 Compute(static_cast<uint32_t>(x->GetValue()), 2919 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2920 } 2921 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2922 return GetBlock()->GetGraph()->GetIntConstant( 2923 Compute(static_cast<uint64_t>(x->GetValue()), 2924 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2925 } 2926 2927 DECLARE_INSTRUCTION(Below); 2928 2929 IfCondition GetCondition() const OVERRIDE { 2930 return kCondB; 2931 } 2932 2933 IfCondition GetOppositeCondition() const OVERRIDE { 2934 return kCondAE; 2935 } 2936 2937 private: 2938 template <typename T> bool Compute(T x, T y) const { return x < y; } 2939 2940 DISALLOW_COPY_AND_ASSIGN(HBelow); 2941}; 2942 2943class HBelowOrEqual : public HCondition { 2944 public: 2945 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2946 : HCondition(first, second, dex_pc) {} 2947 2948 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2949 return GetBlock()->GetGraph()->GetIntConstant( 2950 Compute(static_cast<uint32_t>(x->GetValue()), 2951 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2952 } 2953 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2954 return GetBlock()->GetGraph()->GetIntConstant( 2955 Compute(static_cast<uint64_t>(x->GetValue()), 2956 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2957 } 2958 2959 DECLARE_INSTRUCTION(BelowOrEqual); 2960 2961 IfCondition GetCondition() const OVERRIDE { 2962 return kCondBE; 2963 } 2964 2965 IfCondition GetOppositeCondition() const OVERRIDE { 2966 return kCondA; 2967 } 2968 2969 private: 2970 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2971 2972 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 2973}; 2974 2975class HAbove : public HCondition { 2976 public: 2977 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2978 : HCondition(first, second, dex_pc) {} 2979 2980 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2981 return GetBlock()->GetGraph()->GetIntConstant( 2982 Compute(static_cast<uint32_t>(x->GetValue()), 2983 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2984 } 2985 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2986 return GetBlock()->GetGraph()->GetIntConstant( 2987 Compute(static_cast<uint64_t>(x->GetValue()), 2988 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2989 } 2990 2991 DECLARE_INSTRUCTION(Above); 2992 2993 IfCondition GetCondition() const OVERRIDE { 2994 return kCondA; 2995 } 2996 2997 IfCondition GetOppositeCondition() const OVERRIDE { 2998 return kCondBE; 2999 } 3000 3001 private: 3002 template <typename T> bool Compute(T x, T y) const { return x > y; } 3003 3004 DISALLOW_COPY_AND_ASSIGN(HAbove); 3005}; 3006 3007class HAboveOrEqual : public HCondition { 3008 public: 3009 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3010 : HCondition(first, second, dex_pc) {} 3011 3012 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3013 return GetBlock()->GetGraph()->GetIntConstant( 3014 Compute(static_cast<uint32_t>(x->GetValue()), 3015 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3016 } 3017 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3018 return GetBlock()->GetGraph()->GetIntConstant( 3019 Compute(static_cast<uint64_t>(x->GetValue()), 3020 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3021 } 3022 3023 DECLARE_INSTRUCTION(AboveOrEqual); 3024 3025 IfCondition GetCondition() const OVERRIDE { 3026 return kCondAE; 3027 } 3028 3029 IfCondition GetOppositeCondition() const OVERRIDE { 3030 return kCondB; 3031 } 3032 3033 private: 3034 template <typename T> bool Compute(T x, T y) const { return x >= y; } 3035 3036 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 3037}; 3038 3039// Instruction to check how two inputs compare to each other. 3040// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 3041class HCompare : public HBinaryOperation { 3042 public: 3043 HCompare(Primitive::Type type, 3044 HInstruction* first, 3045 HInstruction* second, 3046 ComparisonBias bias, 3047 uint32_t dex_pc) 3048 : HBinaryOperation(Primitive::kPrimInt, 3049 first, 3050 second, 3051 SideEffectsForArchRuntimeCalls(type), 3052 dex_pc), 3053 bias_(bias) { 3054 DCHECK_EQ(type, first->GetType()); 3055 DCHECK_EQ(type, second->GetType()); 3056 } 3057 3058 template <typename T> 3059 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3060 3061 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3062 return GetBlock()->GetGraph()->GetIntConstant( 3063 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3064 } 3065 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3066 return GetBlock()->GetGraph()->GetIntConstant( 3067 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3068 } 3069 3070 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3071 return bias_ == other->AsCompare()->bias_; 3072 } 3073 3074 ComparisonBias GetBias() const { return bias_; } 3075 3076 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3077 3078 3079 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3080 // MIPS64 uses a runtime call for FP comparisons. 3081 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3082 } 3083 3084 DECLARE_INSTRUCTION(Compare); 3085 3086 private: 3087 const ComparisonBias bias_; 3088 3089 DISALLOW_COPY_AND_ASSIGN(HCompare); 3090}; 3091 3092// A local in the graph. Corresponds to a Dex register. 3093class HLocal : public HTemplateInstruction<0> { 3094 public: 3095 explicit HLocal(uint16_t reg_number) 3096 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3097 3098 DECLARE_INSTRUCTION(Local); 3099 3100 uint16_t GetRegNumber() const { return reg_number_; } 3101 3102 private: 3103 // The Dex register number. 3104 const uint16_t reg_number_; 3105 3106 DISALLOW_COPY_AND_ASSIGN(HLocal); 3107}; 3108 3109// Load a given local. The local is an input of this instruction. 3110class HLoadLocal : public HExpression<1> { 3111 public: 3112 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3113 : HExpression(type, SideEffects::None(), dex_pc) { 3114 SetRawInputAt(0, local); 3115 } 3116 3117 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3118 3119 DECLARE_INSTRUCTION(LoadLocal); 3120 3121 private: 3122 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3123}; 3124 3125// Store a value in a given local. This instruction has two inputs: the value 3126// and the local. 3127class HStoreLocal : public HTemplateInstruction<2> { 3128 public: 3129 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3130 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3131 SetRawInputAt(0, local); 3132 SetRawInputAt(1, value); 3133 } 3134 3135 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3136 3137 DECLARE_INSTRUCTION(StoreLocal); 3138 3139 private: 3140 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3141}; 3142 3143class HFloatConstant : public HConstant { 3144 public: 3145 float GetValue() const { return value_; } 3146 3147 uint64_t GetValueAsUint64() const OVERRIDE { 3148 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3149 } 3150 3151 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3152 DCHECK(other->IsFloatConstant()); 3153 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3154 } 3155 3156 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3157 3158 bool IsMinusOne() const OVERRIDE { 3159 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3160 } 3161 bool IsZero() const OVERRIDE { 3162 return value_ == 0.0f; 3163 } 3164 bool IsOne() const OVERRIDE { 3165 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3166 } 3167 bool IsNaN() const { 3168 return std::isnan(value_); 3169 } 3170 3171 DECLARE_INSTRUCTION(FloatConstant); 3172 3173 private: 3174 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3175 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3176 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3177 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3178 3179 const float value_; 3180 3181 // Only the SsaBuilder and HGraph can create floating-point constants. 3182 friend class SsaBuilder; 3183 friend class HGraph; 3184 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3185}; 3186 3187class HDoubleConstant : public HConstant { 3188 public: 3189 double GetValue() const { return value_; } 3190 3191 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3192 3193 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3194 DCHECK(other->IsDoubleConstant()); 3195 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3196 } 3197 3198 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3199 3200 bool IsMinusOne() const OVERRIDE { 3201 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3202 } 3203 bool IsZero() const OVERRIDE { 3204 return value_ == 0.0; 3205 } 3206 bool IsOne() const OVERRIDE { 3207 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3208 } 3209 bool IsNaN() const { 3210 return std::isnan(value_); 3211 } 3212 3213 DECLARE_INSTRUCTION(DoubleConstant); 3214 3215 private: 3216 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3217 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3218 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3219 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3220 3221 const double value_; 3222 3223 // Only the SsaBuilder and HGraph can create floating-point constants. 3224 friend class SsaBuilder; 3225 friend class HGraph; 3226 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3227}; 3228 3229enum class Intrinsics { 3230#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache) k ## Name, 3231#include "intrinsics_list.h" 3232 kNone, 3233 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3234#undef INTRINSICS_LIST 3235#undef OPTIMIZING_INTRINSICS 3236}; 3237std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3238 3239enum IntrinsicNeedsEnvironmentOrCache { 3240 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3241 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3242}; 3243 3244class HInvoke : public HInstruction { 3245 public: 3246 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3247 3248 bool NeedsEnvironment() const OVERRIDE; 3249 3250 void SetArgumentAt(size_t index, HInstruction* argument) { 3251 SetRawInputAt(index, argument); 3252 } 3253 3254 // Return the number of arguments. This number can be lower than 3255 // the number of inputs returned by InputCount(), as some invoke 3256 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3257 // inputs at the end of their list of inputs. 3258 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3259 3260 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3261 3262 3263 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3264 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3265 3266 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3267 3268 Intrinsics GetIntrinsic() const { 3269 return intrinsic_; 3270 } 3271 3272 void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache); 3273 3274 bool IsFromInlinedInvoke() const { 3275 return GetEnvironment()->IsFromInlinedInvoke(); 3276 } 3277 3278 bool CanThrow() const OVERRIDE { return true; } 3279 3280 uint32_t* GetIntrinsicOptimizations() { 3281 return &intrinsic_optimizations_; 3282 } 3283 3284 const uint32_t* GetIntrinsicOptimizations() const { 3285 return &intrinsic_optimizations_; 3286 } 3287 3288 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3289 3290 DECLARE_INSTRUCTION(Invoke); 3291 3292 protected: 3293 HInvoke(ArenaAllocator* arena, 3294 uint32_t number_of_arguments, 3295 uint32_t number_of_other_inputs, 3296 Primitive::Type return_type, 3297 uint32_t dex_pc, 3298 uint32_t dex_method_index, 3299 InvokeType original_invoke_type) 3300 : HInstruction( 3301 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3302 number_of_arguments_(number_of_arguments), 3303 inputs_(number_of_arguments + number_of_other_inputs, 3304 arena->Adapter(kArenaAllocInvokeInputs)), 3305 return_type_(return_type), 3306 dex_method_index_(dex_method_index), 3307 original_invoke_type_(original_invoke_type), 3308 intrinsic_(Intrinsics::kNone), 3309 intrinsic_optimizations_(0) { 3310 } 3311 3312 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3313 return inputs_[index]; 3314 } 3315 3316 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3317 inputs_[index] = input; 3318 } 3319 3320 uint32_t number_of_arguments_; 3321 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3322 const Primitive::Type return_type_; 3323 const uint32_t dex_method_index_; 3324 const InvokeType original_invoke_type_; 3325 Intrinsics intrinsic_; 3326 3327 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3328 uint32_t intrinsic_optimizations_; 3329 3330 private: 3331 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3332}; 3333 3334class HInvokeUnresolved : public HInvoke { 3335 public: 3336 HInvokeUnresolved(ArenaAllocator* arena, 3337 uint32_t number_of_arguments, 3338 Primitive::Type return_type, 3339 uint32_t dex_pc, 3340 uint32_t dex_method_index, 3341 InvokeType invoke_type) 3342 : HInvoke(arena, 3343 number_of_arguments, 3344 0u /* number_of_other_inputs */, 3345 return_type, 3346 dex_pc, 3347 dex_method_index, 3348 invoke_type) { 3349 } 3350 3351 DECLARE_INSTRUCTION(InvokeUnresolved); 3352 3353 private: 3354 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3355}; 3356 3357class HInvokeStaticOrDirect : public HInvoke { 3358 public: 3359 // Requirements of this method call regarding the class 3360 // initialization (clinit) check of its declaring class. 3361 enum class ClinitCheckRequirement { 3362 kNone, // Class already initialized. 3363 kExplicit, // Static call having explicit clinit check as last input. 3364 kImplicit, // Static call implicitly requiring a clinit check. 3365 }; 3366 3367 // Determines how to load the target ArtMethod*. 3368 enum class MethodLoadKind { 3369 // Use a String init ArtMethod* loaded from Thread entrypoints. 3370 kStringInit, 3371 3372 // Use the method's own ArtMethod* loaded by the register allocator. 3373 kRecursive, 3374 3375 // Use ArtMethod* at a known address, embed the direct address in the code. 3376 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3377 kDirectAddress, 3378 3379 // Use ArtMethod* at an address that will be known at link time, embed the direct 3380 // address in the code. If the image is relocatable, emit .patch_oat entry. 3381 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3382 // the image relocatable or not. 3383 kDirectAddressWithFixup, 3384 3385 // Load from resoved methods array in the dex cache using a PC-relative load. 3386 // Used when we need to use the dex cache, for example for invoke-static that 3387 // may cause class initialization (the entry may point to a resolution method), 3388 // and we know that we can access the dex cache arrays using a PC-relative load. 3389 kDexCachePcRelative, 3390 3391 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3392 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3393 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3394 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3395 kDexCacheViaMethod, 3396 }; 3397 3398 // Determines the location of the code pointer. 3399 enum class CodePtrLocation { 3400 // Recursive call, use local PC-relative call instruction. 3401 kCallSelf, 3402 3403 // Use PC-relative call instruction patched at link time. 3404 // Used for calls within an oat file, boot->boot or app->app. 3405 kCallPCRelative, 3406 3407 // Call to a known target address, embed the direct address in code. 3408 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3409 kCallDirect, 3410 3411 // Call to a target address that will be known at link time, embed the direct 3412 // address in code. If the image is relocatable, emit .patch_oat entry. 3413 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3414 // the image relocatable or not. 3415 kCallDirectWithFixup, 3416 3417 // Use code pointer from the ArtMethod*. 3418 // Used when we don't know the target code. This is also the last-resort-kind used when 3419 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3420 kCallArtMethod, 3421 }; 3422 3423 struct DispatchInfo { 3424 MethodLoadKind method_load_kind; 3425 CodePtrLocation code_ptr_location; 3426 // The method load data holds 3427 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3428 // Note that there are multiple string init methods, each having its own offset. 3429 // - the method address for kDirectAddress 3430 // - the dex cache arrays offset for kDexCachePcRel. 3431 uint64_t method_load_data; 3432 uint64_t direct_code_ptr; 3433 }; 3434 3435 HInvokeStaticOrDirect(ArenaAllocator* arena, 3436 uint32_t number_of_arguments, 3437 Primitive::Type return_type, 3438 uint32_t dex_pc, 3439 uint32_t method_index, 3440 MethodReference target_method, 3441 DispatchInfo dispatch_info, 3442 InvokeType original_invoke_type, 3443 InvokeType optimized_invoke_type, 3444 ClinitCheckRequirement clinit_check_requirement) 3445 : HInvoke(arena, 3446 number_of_arguments, 3447 // There is potentially one extra argument for the HCurrentMethod node, and 3448 // potentially one other if the clinit check is explicit, and potentially 3449 // one other if the method is a string factory. 3450 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) + 3451 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) + 3452 (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3453 return_type, 3454 dex_pc, 3455 method_index, 3456 original_invoke_type), 3457 optimized_invoke_type_(optimized_invoke_type), 3458 clinit_check_requirement_(clinit_check_requirement), 3459 target_method_(target_method), 3460 dispatch_info_(dispatch_info) { } 3461 3462 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3463 bool had_current_method_input = HasCurrentMethodInput(); 3464 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind); 3465 3466 // Using the current method is the default and once we find a better 3467 // method load kind, we should not go back to using the current method. 3468 DCHECK(had_current_method_input || !needs_current_method_input); 3469 3470 if (had_current_method_input && !needs_current_method_input) { 3471 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod()); 3472 RemoveInputAt(GetSpecialInputIndex()); 3473 } 3474 dispatch_info_ = dispatch_info; 3475 } 3476 3477 void AddSpecialInput(HInstruction* input) { 3478 // We allow only one special input. 3479 DCHECK(!IsStringInit() && !HasCurrentMethodInput()); 3480 DCHECK(InputCount() == GetSpecialInputIndex() || 3481 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck())); 3482 InsertInputAt(GetSpecialInputIndex(), input); 3483 } 3484 3485 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3486 // We access the method via the dex cache so we can't do an implicit null check. 3487 // TODO: for intrinsics we can generate implicit null checks. 3488 return false; 3489 } 3490 3491 bool CanBeNull() const OVERRIDE { 3492 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3493 } 3494 3495 // Get the index of the special input, if any. 3496 // 3497 // If the invoke IsStringInit(), it initially has a HFakeString special argument 3498 // which is removed by the instruction simplifier; if the invoke HasCurrentMethodInput(), 3499 // the "special input" is the current method pointer; otherwise there may be one 3500 // platform-specific special input, such as PC-relative addressing base. 3501 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } 3502 3503 InvokeType GetOptimizedInvokeType() const { return optimized_invoke_type_; } 3504 void SetOptimizedInvokeType(InvokeType invoke_type) { 3505 optimized_invoke_type_ = invoke_type; 3506 } 3507 3508 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3509 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3510 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3511 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3512 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3513 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3514 bool HasPcRelativeDexCache() const { 3515 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3516 } 3517 bool HasCurrentMethodInput() const { 3518 // This function can be called only after the invoke has been fully initialized by the builder. 3519 if (NeedsCurrentMethodInput(GetMethodLoadKind())) { 3520 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3521 return true; 3522 } else { 3523 DCHECK(InputCount() == GetSpecialInputIndex() || 3524 !InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3525 return false; 3526 } 3527 } 3528 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3529 MethodReference GetTargetMethod() const { return target_method_; } 3530 void SetTargetMethod(MethodReference method) { target_method_ = method; } 3531 3532 int32_t GetStringInitOffset() const { 3533 DCHECK(IsStringInit()); 3534 return dispatch_info_.method_load_data; 3535 } 3536 3537 uint64_t GetMethodAddress() const { 3538 DCHECK(HasMethodAddress()); 3539 return dispatch_info_.method_load_data; 3540 } 3541 3542 uint32_t GetDexCacheArrayOffset() const { 3543 DCHECK(HasPcRelativeDexCache()); 3544 return dispatch_info_.method_load_data; 3545 } 3546 3547 uint64_t GetDirectCodePtr() const { 3548 DCHECK(HasDirectCodePtr()); 3549 return dispatch_info_.direct_code_ptr; 3550 } 3551 3552 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3553 3554 // Is this instruction a call to a static method? 3555 bool IsStatic() const { 3556 return GetOriginalInvokeType() == kStatic; 3557 } 3558 3559 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by 3560 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck) 3561 // instruction; only relevant for static calls with explicit clinit check. 3562 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) { 3563 DCHECK(IsStaticWithExplicitClinitCheck()); 3564 size_t last_input_index = InputCount() - 1; 3565 HInstruction* last_input = InputAt(last_input_index); 3566 DCHECK(last_input != nullptr); 3567 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName(); 3568 RemoveAsUserOfInput(last_input_index); 3569 inputs_.pop_back(); 3570 clinit_check_requirement_ = new_requirement; 3571 DCHECK(!IsStaticWithExplicitClinitCheck()); 3572 } 3573 3574 bool IsStringFactoryFor(HFakeString* str) const { 3575 if (!IsStringInit()) return false; 3576 DCHECK(!HasCurrentMethodInput()); 3577 if (InputCount() == (number_of_arguments_)) return false; 3578 return InputAt(InputCount() - 1)->AsFakeString() == str; 3579 } 3580 3581 void RemoveFakeStringArgumentAsLastInput() { 3582 DCHECK(IsStringInit()); 3583 size_t last_input_index = InputCount() - 1; 3584 HInstruction* last_input = InputAt(last_input_index); 3585 DCHECK(last_input != nullptr); 3586 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3587 RemoveAsUserOfInput(last_input_index); 3588 inputs_.pop_back(); 3589 } 3590 3591 // Is this a call to a static method whose declaring class has an 3592 // explicit initialization check in the graph? 3593 bool IsStaticWithExplicitClinitCheck() const { 3594 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3595 } 3596 3597 // Is this a call to a static method whose declaring class has an 3598 // implicit intialization check requirement? 3599 bool IsStaticWithImplicitClinitCheck() const { 3600 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3601 } 3602 3603 // Does this method load kind need the current method as an input? 3604 static bool NeedsCurrentMethodInput(MethodLoadKind kind) { 3605 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod; 3606 } 3607 3608 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3609 3610 protected: 3611 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3612 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3613 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3614 HInstruction* input = input_record.GetInstruction(); 3615 // `input` is the last input of a static invoke marked as having 3616 // an explicit clinit check. It must either be: 3617 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3618 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3619 DCHECK(input != nullptr); 3620 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3621 } 3622 return input_record; 3623 } 3624 3625 void InsertInputAt(size_t index, HInstruction* input); 3626 void RemoveInputAt(size_t index); 3627 3628 private: 3629 InvokeType optimized_invoke_type_; 3630 ClinitCheckRequirement clinit_check_requirement_; 3631 // The target method may refer to different dex file or method index than the original 3632 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3633 // in derived class to increase visibility. 3634 MethodReference target_method_; 3635 DispatchInfo dispatch_info_; 3636 3637 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3638}; 3639std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs); 3640std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs); 3641 3642class HInvokeVirtual : public HInvoke { 3643 public: 3644 HInvokeVirtual(ArenaAllocator* arena, 3645 uint32_t number_of_arguments, 3646 Primitive::Type return_type, 3647 uint32_t dex_pc, 3648 uint32_t dex_method_index, 3649 uint32_t vtable_index) 3650 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3651 vtable_index_(vtable_index) {} 3652 3653 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3654 // TODO: Add implicit null checks in intrinsics. 3655 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3656 } 3657 3658 uint32_t GetVTableIndex() const { return vtable_index_; } 3659 3660 DECLARE_INSTRUCTION(InvokeVirtual); 3661 3662 private: 3663 const uint32_t vtable_index_; 3664 3665 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3666}; 3667 3668class HInvokeInterface : public HInvoke { 3669 public: 3670 HInvokeInterface(ArenaAllocator* arena, 3671 uint32_t number_of_arguments, 3672 Primitive::Type return_type, 3673 uint32_t dex_pc, 3674 uint32_t dex_method_index, 3675 uint32_t imt_index) 3676 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3677 imt_index_(imt_index) {} 3678 3679 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3680 // TODO: Add implicit null checks in intrinsics. 3681 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3682 } 3683 3684 uint32_t GetImtIndex() const { return imt_index_; } 3685 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3686 3687 DECLARE_INSTRUCTION(InvokeInterface); 3688 3689 private: 3690 const uint32_t imt_index_; 3691 3692 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3693}; 3694 3695class HNewInstance : public HExpression<2> { 3696 public: 3697 HNewInstance(HInstruction* cls, 3698 HCurrentMethod* current_method, 3699 uint32_t dex_pc, 3700 uint16_t type_index, 3701 const DexFile& dex_file, 3702 bool can_throw, 3703 bool finalizable, 3704 QuickEntrypointEnum entrypoint) 3705 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3706 type_index_(type_index), 3707 dex_file_(dex_file), 3708 can_throw_(can_throw), 3709 finalizable_(finalizable), 3710 entrypoint_(entrypoint) { 3711 SetRawInputAt(0, cls); 3712 SetRawInputAt(1, current_method); 3713 } 3714 3715 uint16_t GetTypeIndex() const { return type_index_; } 3716 const DexFile& GetDexFile() const { return dex_file_; } 3717 3718 // Calls runtime so needs an environment. 3719 bool NeedsEnvironment() const OVERRIDE { return true; } 3720 3721 // It may throw when called on type that's not instantiable/accessible. 3722 // It can throw OOME. 3723 // TODO: distinguish between the two cases so we can for example allow allocation elimination. 3724 bool CanThrow() const OVERRIDE { return can_throw_ || true; } 3725 3726 bool IsFinalizable() const { return finalizable_; } 3727 3728 bool CanBeNull() const OVERRIDE { return false; } 3729 3730 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3731 3732 void SetEntrypoint(QuickEntrypointEnum entrypoint) { 3733 entrypoint_ = entrypoint; 3734 } 3735 3736 DECLARE_INSTRUCTION(NewInstance); 3737 3738 private: 3739 const uint16_t type_index_; 3740 const DexFile& dex_file_; 3741 const bool can_throw_; 3742 const bool finalizable_; 3743 QuickEntrypointEnum entrypoint_; 3744 3745 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3746}; 3747 3748class HNeg : public HUnaryOperation { 3749 public: 3750 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3751 : HUnaryOperation(result_type, input, dex_pc) {} 3752 3753 template <typename T> T Compute(T x) const { return -x; } 3754 3755 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3756 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3757 } 3758 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3759 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3760 } 3761 3762 DECLARE_INSTRUCTION(Neg); 3763 3764 private: 3765 DISALLOW_COPY_AND_ASSIGN(HNeg); 3766}; 3767 3768class HNewArray : public HExpression<2> { 3769 public: 3770 HNewArray(HInstruction* length, 3771 HCurrentMethod* current_method, 3772 uint32_t dex_pc, 3773 uint16_t type_index, 3774 const DexFile& dex_file, 3775 QuickEntrypointEnum entrypoint) 3776 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3777 type_index_(type_index), 3778 dex_file_(dex_file), 3779 entrypoint_(entrypoint) { 3780 SetRawInputAt(0, length); 3781 SetRawInputAt(1, current_method); 3782 } 3783 3784 uint16_t GetTypeIndex() const { return type_index_; } 3785 const DexFile& GetDexFile() const { return dex_file_; } 3786 3787 // Calls runtime so needs an environment. 3788 bool NeedsEnvironment() const OVERRIDE { return true; } 3789 3790 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3791 bool CanThrow() const OVERRIDE { return true; } 3792 3793 bool CanBeNull() const OVERRIDE { return false; } 3794 3795 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3796 3797 DECLARE_INSTRUCTION(NewArray); 3798 3799 private: 3800 const uint16_t type_index_; 3801 const DexFile& dex_file_; 3802 const QuickEntrypointEnum entrypoint_; 3803 3804 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3805}; 3806 3807class HAdd : public HBinaryOperation { 3808 public: 3809 HAdd(Primitive::Type result_type, 3810 HInstruction* left, 3811 HInstruction* right, 3812 uint32_t dex_pc = kNoDexPc) 3813 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3814 3815 bool IsCommutative() const OVERRIDE { return true; } 3816 3817 template <typename T> T Compute(T x, T y) const { return x + y; } 3818 3819 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3820 return GetBlock()->GetGraph()->GetIntConstant( 3821 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3822 } 3823 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3824 return GetBlock()->GetGraph()->GetLongConstant( 3825 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3826 } 3827 3828 DECLARE_INSTRUCTION(Add); 3829 3830 private: 3831 DISALLOW_COPY_AND_ASSIGN(HAdd); 3832}; 3833 3834class HSub : public HBinaryOperation { 3835 public: 3836 HSub(Primitive::Type result_type, 3837 HInstruction* left, 3838 HInstruction* right, 3839 uint32_t dex_pc = kNoDexPc) 3840 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3841 3842 template <typename T> T Compute(T x, T y) const { return x - y; } 3843 3844 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3845 return GetBlock()->GetGraph()->GetIntConstant( 3846 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3847 } 3848 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3849 return GetBlock()->GetGraph()->GetLongConstant( 3850 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3851 } 3852 3853 DECLARE_INSTRUCTION(Sub); 3854 3855 private: 3856 DISALLOW_COPY_AND_ASSIGN(HSub); 3857}; 3858 3859class HMul : public HBinaryOperation { 3860 public: 3861 HMul(Primitive::Type result_type, 3862 HInstruction* left, 3863 HInstruction* right, 3864 uint32_t dex_pc = kNoDexPc) 3865 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3866 3867 bool IsCommutative() const OVERRIDE { return true; } 3868 3869 template <typename T> T Compute(T x, T y) const { return x * y; } 3870 3871 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3872 return GetBlock()->GetGraph()->GetIntConstant( 3873 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3874 } 3875 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3876 return GetBlock()->GetGraph()->GetLongConstant( 3877 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3878 } 3879 3880 DECLARE_INSTRUCTION(Mul); 3881 3882 private: 3883 DISALLOW_COPY_AND_ASSIGN(HMul); 3884}; 3885 3886class HDiv : public HBinaryOperation { 3887 public: 3888 HDiv(Primitive::Type result_type, 3889 HInstruction* left, 3890 HInstruction* right, 3891 uint32_t dex_pc) 3892 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3893 3894 template <typename T> 3895 T Compute(T x, T y) const { 3896 // Our graph structure ensures we never have 0 for `y` during 3897 // constant folding. 3898 DCHECK_NE(y, 0); 3899 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3900 return (y == -1) ? -x : x / y; 3901 } 3902 3903 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3904 return GetBlock()->GetGraph()->GetIntConstant( 3905 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3906 } 3907 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3908 return GetBlock()->GetGraph()->GetLongConstant( 3909 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3910 } 3911 3912 static SideEffects SideEffectsForArchRuntimeCalls() { 3913 // The generated code can use a runtime call. 3914 return SideEffects::CanTriggerGC(); 3915 } 3916 3917 DECLARE_INSTRUCTION(Div); 3918 3919 private: 3920 DISALLOW_COPY_AND_ASSIGN(HDiv); 3921}; 3922 3923class HRem : public HBinaryOperation { 3924 public: 3925 HRem(Primitive::Type result_type, 3926 HInstruction* left, 3927 HInstruction* right, 3928 uint32_t dex_pc) 3929 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3930 3931 template <typename T> 3932 T Compute(T x, T y) const { 3933 // Our graph structure ensures we never have 0 for `y` during 3934 // constant folding. 3935 DCHECK_NE(y, 0); 3936 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3937 return (y == -1) ? 0 : x % y; 3938 } 3939 3940 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3941 return GetBlock()->GetGraph()->GetIntConstant( 3942 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3943 } 3944 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3945 return GetBlock()->GetGraph()->GetLongConstant( 3946 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3947 } 3948 3949 3950 static SideEffects SideEffectsForArchRuntimeCalls() { 3951 return SideEffects::CanTriggerGC(); 3952 } 3953 3954 DECLARE_INSTRUCTION(Rem); 3955 3956 private: 3957 DISALLOW_COPY_AND_ASSIGN(HRem); 3958}; 3959 3960class HDivZeroCheck : public HExpression<1> { 3961 public: 3962 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 3963 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 3964 SetRawInputAt(0, value); 3965 } 3966 3967 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 3968 3969 bool CanBeMoved() const OVERRIDE { return true; } 3970 3971 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 3972 return true; 3973 } 3974 3975 bool NeedsEnvironment() const OVERRIDE { return true; } 3976 bool CanThrow() const OVERRIDE { return true; } 3977 3978 DECLARE_INSTRUCTION(DivZeroCheck); 3979 3980 private: 3981 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 3982}; 3983 3984class HShl : public HBinaryOperation { 3985 public: 3986 HShl(Primitive::Type result_type, 3987 HInstruction* left, 3988 HInstruction* right, 3989 uint32_t dex_pc = kNoDexPc) 3990 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3991 3992 template <typename T, typename U, typename V> 3993 T Compute(T x, U y, V max_shift_value) const { 3994 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3995 "V is not the unsigned integer type corresponding to T"); 3996 return x << (y & max_shift_value); 3997 } 3998 3999 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4000 return GetBlock()->GetGraph()->GetIntConstant( 4001 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4002 } 4003 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4004 // case is handled as `x << static_cast<int>(y)`. 4005 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4006 return GetBlock()->GetGraph()->GetLongConstant( 4007 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4008 } 4009 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4010 return GetBlock()->GetGraph()->GetLongConstant( 4011 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4012 } 4013 4014 DECLARE_INSTRUCTION(Shl); 4015 4016 private: 4017 DISALLOW_COPY_AND_ASSIGN(HShl); 4018}; 4019 4020class HShr : public HBinaryOperation { 4021 public: 4022 HShr(Primitive::Type result_type, 4023 HInstruction* left, 4024 HInstruction* right, 4025 uint32_t dex_pc = kNoDexPc) 4026 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4027 4028 template <typename T, typename U, typename V> 4029 T Compute(T x, U y, V max_shift_value) const { 4030 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4031 "V is not the unsigned integer type corresponding to T"); 4032 return x >> (y & max_shift_value); 4033 } 4034 4035 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4036 return GetBlock()->GetGraph()->GetIntConstant( 4037 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4038 } 4039 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4040 // case is handled as `x >> static_cast<int>(y)`. 4041 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4042 return GetBlock()->GetGraph()->GetLongConstant( 4043 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4044 } 4045 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4046 return GetBlock()->GetGraph()->GetLongConstant( 4047 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4048 } 4049 4050 DECLARE_INSTRUCTION(Shr); 4051 4052 private: 4053 DISALLOW_COPY_AND_ASSIGN(HShr); 4054}; 4055 4056class HUShr : public HBinaryOperation { 4057 public: 4058 HUShr(Primitive::Type result_type, 4059 HInstruction* left, 4060 HInstruction* right, 4061 uint32_t dex_pc = kNoDexPc) 4062 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4063 4064 template <typename T, typename U, typename V> 4065 T Compute(T x, U y, V max_shift_value) const { 4066 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4067 "V is not the unsigned integer type corresponding to T"); 4068 V ux = static_cast<V>(x); 4069 return static_cast<T>(ux >> (y & max_shift_value)); 4070 } 4071 4072 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4073 return GetBlock()->GetGraph()->GetIntConstant( 4074 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4075 } 4076 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4077 // case is handled as `x >>> static_cast<int>(y)`. 4078 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4079 return GetBlock()->GetGraph()->GetLongConstant( 4080 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4081 } 4082 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4083 return GetBlock()->GetGraph()->GetLongConstant( 4084 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4085 } 4086 4087 DECLARE_INSTRUCTION(UShr); 4088 4089 private: 4090 DISALLOW_COPY_AND_ASSIGN(HUShr); 4091}; 4092 4093class HAnd : public HBinaryOperation { 4094 public: 4095 HAnd(Primitive::Type result_type, 4096 HInstruction* left, 4097 HInstruction* right, 4098 uint32_t dex_pc = kNoDexPc) 4099 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4100 4101 bool IsCommutative() const OVERRIDE { return true; } 4102 4103 template <typename T, typename U> 4104 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 4105 4106 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4107 return GetBlock()->GetGraph()->GetIntConstant( 4108 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4109 } 4110 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4111 return GetBlock()->GetGraph()->GetLongConstant( 4112 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4113 } 4114 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4115 return GetBlock()->GetGraph()->GetLongConstant( 4116 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4117 } 4118 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4119 return GetBlock()->GetGraph()->GetLongConstant( 4120 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4121 } 4122 4123 DECLARE_INSTRUCTION(And); 4124 4125 private: 4126 DISALLOW_COPY_AND_ASSIGN(HAnd); 4127}; 4128 4129class HOr : public HBinaryOperation { 4130 public: 4131 HOr(Primitive::Type result_type, 4132 HInstruction* left, 4133 HInstruction* right, 4134 uint32_t dex_pc = kNoDexPc) 4135 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4136 4137 bool IsCommutative() const OVERRIDE { return true; } 4138 4139 template <typename T, typename U> 4140 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4141 4142 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4143 return GetBlock()->GetGraph()->GetIntConstant( 4144 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4145 } 4146 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4147 return GetBlock()->GetGraph()->GetLongConstant( 4148 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4149 } 4150 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4151 return GetBlock()->GetGraph()->GetLongConstant( 4152 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4153 } 4154 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4155 return GetBlock()->GetGraph()->GetLongConstant( 4156 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4157 } 4158 4159 DECLARE_INSTRUCTION(Or); 4160 4161 private: 4162 DISALLOW_COPY_AND_ASSIGN(HOr); 4163}; 4164 4165class HXor : public HBinaryOperation { 4166 public: 4167 HXor(Primitive::Type result_type, 4168 HInstruction* left, 4169 HInstruction* right, 4170 uint32_t dex_pc = kNoDexPc) 4171 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4172 4173 bool IsCommutative() const OVERRIDE { return true; } 4174 4175 template <typename T, typename U> 4176 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4177 4178 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4179 return GetBlock()->GetGraph()->GetIntConstant( 4180 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4181 } 4182 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4183 return GetBlock()->GetGraph()->GetLongConstant( 4184 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4185 } 4186 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4187 return GetBlock()->GetGraph()->GetLongConstant( 4188 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4189 } 4190 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4191 return GetBlock()->GetGraph()->GetLongConstant( 4192 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4193 } 4194 4195 DECLARE_INSTRUCTION(Xor); 4196 4197 private: 4198 DISALLOW_COPY_AND_ASSIGN(HXor); 4199}; 4200 4201// The value of a parameter in this method. Its location depends on 4202// the calling convention. 4203class HParameterValue : public HExpression<0> { 4204 public: 4205 HParameterValue(const DexFile& dex_file, 4206 uint16_t type_index, 4207 uint8_t index, 4208 Primitive::Type parameter_type, 4209 bool is_this = false) 4210 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4211 dex_file_(dex_file), 4212 type_index_(type_index), 4213 index_(index), 4214 is_this_(is_this), 4215 can_be_null_(!is_this) {} 4216 4217 const DexFile& GetDexFile() const { return dex_file_; } 4218 uint16_t GetTypeIndex() const { return type_index_; } 4219 uint8_t GetIndex() const { return index_; } 4220 bool IsThis() const { return is_this_; } 4221 4222 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4223 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4224 4225 DECLARE_INSTRUCTION(ParameterValue); 4226 4227 private: 4228 const DexFile& dex_file_; 4229 const uint16_t type_index_; 4230 // The index of this parameter in the parameters list. Must be less 4231 // than HGraph::number_of_in_vregs_. 4232 const uint8_t index_; 4233 4234 // Whether or not the parameter value corresponds to 'this' argument. 4235 const bool is_this_; 4236 4237 bool can_be_null_; 4238 4239 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4240}; 4241 4242class HNot : public HUnaryOperation { 4243 public: 4244 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4245 : HUnaryOperation(result_type, input, dex_pc) {} 4246 4247 bool CanBeMoved() const OVERRIDE { return true; } 4248 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4249 return true; 4250 } 4251 4252 template <typename T> T Compute(T x) const { return ~x; } 4253 4254 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4255 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4256 } 4257 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4258 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4259 } 4260 4261 DECLARE_INSTRUCTION(Not); 4262 4263 private: 4264 DISALLOW_COPY_AND_ASSIGN(HNot); 4265}; 4266 4267class HBooleanNot : public HUnaryOperation { 4268 public: 4269 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4270 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4271 4272 bool CanBeMoved() const OVERRIDE { return true; } 4273 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4274 return true; 4275 } 4276 4277 template <typename T> bool Compute(T x) const { 4278 DCHECK(IsUint<1>(x)); 4279 return !x; 4280 } 4281 4282 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4283 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4284 } 4285 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4286 LOG(FATAL) << DebugName() << " is not defined for long values"; 4287 UNREACHABLE(); 4288 } 4289 4290 DECLARE_INSTRUCTION(BooleanNot); 4291 4292 private: 4293 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4294}; 4295 4296class HTypeConversion : public HExpression<1> { 4297 public: 4298 // Instantiate a type conversion of `input` to `result_type`. 4299 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4300 : HExpression(result_type, 4301 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4302 dex_pc) { 4303 SetRawInputAt(0, input); 4304 DCHECK_NE(input->GetType(), result_type); 4305 } 4306 4307 HInstruction* GetInput() const { return InputAt(0); } 4308 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4309 Primitive::Type GetResultType() const { return GetType(); } 4310 4311 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4312 // to the runtime. 4313 4314 bool CanBeMoved() const OVERRIDE { return true; } 4315 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4316 4317 // Try to statically evaluate the conversion and return a HConstant 4318 // containing the result. If the input cannot be converted, return nullptr. 4319 HConstant* TryStaticEvaluation() const; 4320 4321 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4322 Primitive::Type result_type) { 4323 // Some architectures may not require the 'GC' side effects, but at this point 4324 // in the compilation process we do not know what architecture we will 4325 // generate code for, so we must be conservative. 4326 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4327 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4328 return SideEffects::CanTriggerGC(); 4329 } 4330 return SideEffects::None(); 4331 } 4332 4333 DECLARE_INSTRUCTION(TypeConversion); 4334 4335 private: 4336 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4337}; 4338 4339static constexpr uint32_t kNoRegNumber = -1; 4340 4341class HPhi : public HInstruction { 4342 public: 4343 HPhi(ArenaAllocator* arena, 4344 uint32_t reg_number, 4345 size_t number_of_inputs, 4346 Primitive::Type type, 4347 uint32_t dex_pc = kNoDexPc) 4348 : HInstruction(SideEffects::None(), dex_pc), 4349 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4350 reg_number_(reg_number), 4351 type_(ToPhiType(type)), 4352 // Phis are constructed live and marked dead if conflicting or unused. 4353 // Individual steps of SsaBuilder should assume that if a phi has been 4354 // marked dead, it can be ignored and will be removed by SsaPhiElimination. 4355 is_live_(true), 4356 can_be_null_(true) { 4357 DCHECK_NE(type_, Primitive::kPrimVoid); 4358 } 4359 4360 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4361 static Primitive::Type ToPhiType(Primitive::Type type) { 4362 switch (type) { 4363 case Primitive::kPrimBoolean: 4364 case Primitive::kPrimByte: 4365 case Primitive::kPrimShort: 4366 case Primitive::kPrimChar: 4367 return Primitive::kPrimInt; 4368 default: 4369 return type; 4370 } 4371 } 4372 4373 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4374 4375 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4376 4377 void AddInput(HInstruction* input); 4378 void RemoveInputAt(size_t index); 4379 4380 Primitive::Type GetType() const OVERRIDE { return type_; } 4381 void SetType(Primitive::Type type) { type_ = type; } 4382 4383 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4384 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4385 4386 uint32_t GetRegNumber() const { return reg_number_; } 4387 4388 void SetDead() { is_live_ = false; } 4389 void SetLive() { is_live_ = true; } 4390 bool IsDead() const { return !is_live_; } 4391 bool IsLive() const { return is_live_; } 4392 4393 bool IsVRegEquivalentOf(HInstruction* other) const { 4394 return other != nullptr 4395 && other->IsPhi() 4396 && other->AsPhi()->GetBlock() == GetBlock() 4397 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4398 } 4399 4400 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4401 // An equivalent phi is a phi having the same dex register and type. 4402 // It assumes that phis with the same dex register are adjacent. 4403 HPhi* GetNextEquivalentPhiWithSameType() { 4404 HInstruction* next = GetNext(); 4405 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4406 if (next->GetType() == GetType()) { 4407 return next->AsPhi(); 4408 } 4409 next = next->GetNext(); 4410 } 4411 return nullptr; 4412 } 4413 4414 DECLARE_INSTRUCTION(Phi); 4415 4416 protected: 4417 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4418 return inputs_[index]; 4419 } 4420 4421 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4422 inputs_[index] = input; 4423 } 4424 4425 private: 4426 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4427 const uint32_t reg_number_; 4428 Primitive::Type type_; 4429 bool is_live_; 4430 bool can_be_null_; 4431 4432 DISALLOW_COPY_AND_ASSIGN(HPhi); 4433}; 4434 4435class HNullCheck : public HExpression<1> { 4436 public: 4437 HNullCheck(HInstruction* value, uint32_t dex_pc) 4438 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4439 SetRawInputAt(0, value); 4440 } 4441 4442 bool CanBeMoved() const OVERRIDE { return true; } 4443 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4444 return true; 4445 } 4446 4447 bool NeedsEnvironment() const OVERRIDE { return true; } 4448 4449 bool CanThrow() const OVERRIDE { return true; } 4450 4451 bool CanBeNull() const OVERRIDE { return false; } 4452 4453 4454 DECLARE_INSTRUCTION(NullCheck); 4455 4456 private: 4457 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4458}; 4459 4460class FieldInfo : public ValueObject { 4461 public: 4462 FieldInfo(MemberOffset field_offset, 4463 Primitive::Type field_type, 4464 bool is_volatile, 4465 uint32_t index, 4466 uint16_t declaring_class_def_index, 4467 const DexFile& dex_file, 4468 Handle<mirror::DexCache> dex_cache) 4469 : field_offset_(field_offset), 4470 field_type_(field_type), 4471 is_volatile_(is_volatile), 4472 index_(index), 4473 declaring_class_def_index_(declaring_class_def_index), 4474 dex_file_(dex_file), 4475 dex_cache_(dex_cache) {} 4476 4477 MemberOffset GetFieldOffset() const { return field_offset_; } 4478 Primitive::Type GetFieldType() const { return field_type_; } 4479 uint32_t GetFieldIndex() const { return index_; } 4480 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4481 const DexFile& GetDexFile() const { return dex_file_; } 4482 bool IsVolatile() const { return is_volatile_; } 4483 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4484 4485 private: 4486 const MemberOffset field_offset_; 4487 const Primitive::Type field_type_; 4488 const bool is_volatile_; 4489 const uint32_t index_; 4490 const uint16_t declaring_class_def_index_; 4491 const DexFile& dex_file_; 4492 const Handle<mirror::DexCache> dex_cache_; 4493}; 4494 4495class HInstanceFieldGet : public HExpression<1> { 4496 public: 4497 HInstanceFieldGet(HInstruction* value, 4498 Primitive::Type field_type, 4499 MemberOffset field_offset, 4500 bool is_volatile, 4501 uint32_t field_idx, 4502 uint16_t declaring_class_def_index, 4503 const DexFile& dex_file, 4504 Handle<mirror::DexCache> dex_cache, 4505 uint32_t dex_pc) 4506 : HExpression(field_type, 4507 SideEffects::FieldReadOfType(field_type, is_volatile), 4508 dex_pc), 4509 field_info_(field_offset, 4510 field_type, 4511 is_volatile, 4512 field_idx, 4513 declaring_class_def_index, 4514 dex_file, 4515 dex_cache) { 4516 SetRawInputAt(0, value); 4517 } 4518 4519 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4520 4521 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4522 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4523 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4524 } 4525 4526 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4527 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4528 } 4529 4530 size_t ComputeHashCode() const OVERRIDE { 4531 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4532 } 4533 4534 const FieldInfo& GetFieldInfo() const { return field_info_; } 4535 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4536 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4537 bool IsVolatile() const { return field_info_.IsVolatile(); } 4538 4539 DECLARE_INSTRUCTION(InstanceFieldGet); 4540 4541 private: 4542 const FieldInfo field_info_; 4543 4544 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4545}; 4546 4547class HInstanceFieldSet : public HTemplateInstruction<2> { 4548 public: 4549 HInstanceFieldSet(HInstruction* object, 4550 HInstruction* value, 4551 Primitive::Type field_type, 4552 MemberOffset field_offset, 4553 bool is_volatile, 4554 uint32_t field_idx, 4555 uint16_t declaring_class_def_index, 4556 const DexFile& dex_file, 4557 Handle<mirror::DexCache> dex_cache, 4558 uint32_t dex_pc) 4559 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4560 dex_pc), 4561 field_info_(field_offset, 4562 field_type, 4563 is_volatile, 4564 field_idx, 4565 declaring_class_def_index, 4566 dex_file, 4567 dex_cache), 4568 value_can_be_null_(true) { 4569 SetRawInputAt(0, object); 4570 SetRawInputAt(1, value); 4571 } 4572 4573 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4574 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4575 } 4576 4577 const FieldInfo& GetFieldInfo() const { return field_info_; } 4578 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4579 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4580 bool IsVolatile() const { return field_info_.IsVolatile(); } 4581 HInstruction* GetValue() const { return InputAt(1); } 4582 bool GetValueCanBeNull() const { return value_can_be_null_; } 4583 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4584 4585 DECLARE_INSTRUCTION(InstanceFieldSet); 4586 4587 private: 4588 const FieldInfo field_info_; 4589 bool value_can_be_null_; 4590 4591 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4592}; 4593 4594class HArrayGet : public HExpression<2> { 4595 public: 4596 HArrayGet(HInstruction* array, 4597 HInstruction* index, 4598 Primitive::Type type, 4599 uint32_t dex_pc, 4600 SideEffects additional_side_effects = SideEffects::None()) 4601 : HExpression(type, 4602 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4603 dex_pc) { 4604 SetRawInputAt(0, array); 4605 SetRawInputAt(1, index); 4606 } 4607 4608 bool CanBeMoved() const OVERRIDE { return true; } 4609 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4610 return true; 4611 } 4612 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4613 // TODO: We can be smarter here. 4614 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4615 // which generates the implicit null check. There are cases when these can be removed 4616 // to produce better code. If we ever add optimizations to do so we should allow an 4617 // implicit check here (as long as the address falls in the first page). 4618 return false; 4619 } 4620 4621 void SetType(Primitive::Type type) { type_ = type; } 4622 4623 HInstruction* GetArray() const { return InputAt(0); } 4624 HInstruction* GetIndex() const { return InputAt(1); } 4625 4626 DECLARE_INSTRUCTION(ArrayGet); 4627 4628 private: 4629 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4630}; 4631 4632class HArraySet : public HTemplateInstruction<3> { 4633 public: 4634 HArraySet(HInstruction* array, 4635 HInstruction* index, 4636 HInstruction* value, 4637 Primitive::Type expected_component_type, 4638 uint32_t dex_pc, 4639 SideEffects additional_side_effects = SideEffects::None()) 4640 : HTemplateInstruction( 4641 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4642 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4643 additional_side_effects), 4644 dex_pc), 4645 expected_component_type_(expected_component_type), 4646 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4647 value_can_be_null_(true), 4648 static_type_of_array_is_object_array_(false) { 4649 SetRawInputAt(0, array); 4650 SetRawInputAt(1, index); 4651 SetRawInputAt(2, value); 4652 } 4653 4654 bool NeedsEnvironment() const OVERRIDE { 4655 // We currently always call a runtime method to catch array store 4656 // exceptions. 4657 return needs_type_check_; 4658 } 4659 4660 // Can throw ArrayStoreException. 4661 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4662 4663 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4664 // TODO: Same as for ArrayGet. 4665 return false; 4666 } 4667 4668 void ClearNeedsTypeCheck() { 4669 needs_type_check_ = false; 4670 } 4671 4672 void ClearValueCanBeNull() { 4673 value_can_be_null_ = false; 4674 } 4675 4676 void SetStaticTypeOfArrayIsObjectArray() { 4677 static_type_of_array_is_object_array_ = true; 4678 } 4679 4680 bool GetValueCanBeNull() const { return value_can_be_null_; } 4681 bool NeedsTypeCheck() const { return needs_type_check_; } 4682 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4683 4684 HInstruction* GetArray() const { return InputAt(0); } 4685 HInstruction* GetIndex() const { return InputAt(1); } 4686 HInstruction* GetValue() const { return InputAt(2); } 4687 4688 Primitive::Type GetComponentType() const { 4689 // The Dex format does not type floating point index operations. Since the 4690 // `expected_component_type_` is set during building and can therefore not 4691 // be correct, we also check what is the value type. If it is a floating 4692 // point type, we must use that type. 4693 Primitive::Type value_type = GetValue()->GetType(); 4694 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4695 ? value_type 4696 : expected_component_type_; 4697 } 4698 4699 Primitive::Type GetRawExpectedComponentType() const { 4700 return expected_component_type_; 4701 } 4702 4703 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4704 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4705 } 4706 4707 DECLARE_INSTRUCTION(ArraySet); 4708 4709 private: 4710 const Primitive::Type expected_component_type_; 4711 bool needs_type_check_; 4712 bool value_can_be_null_; 4713 // Cached information for the reference_type_info_ so that codegen 4714 // does not need to inspect the static type. 4715 bool static_type_of_array_is_object_array_; 4716 4717 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4718}; 4719 4720class HArrayLength : public HExpression<1> { 4721 public: 4722 HArrayLength(HInstruction* array, uint32_t dex_pc) 4723 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4724 // Note that arrays do not change length, so the instruction does not 4725 // depend on any write. 4726 SetRawInputAt(0, array); 4727 } 4728 4729 bool CanBeMoved() const OVERRIDE { return true; } 4730 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4731 return true; 4732 } 4733 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4734 return obj == InputAt(0); 4735 } 4736 4737 DECLARE_INSTRUCTION(ArrayLength); 4738 4739 private: 4740 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4741}; 4742 4743class HBoundsCheck : public HExpression<2> { 4744 public: 4745 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4746 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4747 DCHECK(index->GetType() == Primitive::kPrimInt); 4748 SetRawInputAt(0, index); 4749 SetRawInputAt(1, length); 4750 } 4751 4752 bool CanBeMoved() const OVERRIDE { return true; } 4753 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4754 return true; 4755 } 4756 4757 bool NeedsEnvironment() const OVERRIDE { return true; } 4758 4759 bool CanThrow() const OVERRIDE { return true; } 4760 4761 HInstruction* GetIndex() const { return InputAt(0); } 4762 4763 DECLARE_INSTRUCTION(BoundsCheck); 4764 4765 private: 4766 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4767}; 4768 4769/** 4770 * Some DEX instructions are folded into multiple HInstructions that need 4771 * to stay live until the last HInstruction. This class 4772 * is used as a marker for the baseline compiler to ensure its preceding 4773 * HInstruction stays live. `index` represents the stack location index of the 4774 * instruction (the actual offset is computed as index * vreg_size). 4775 */ 4776class HTemporary : public HTemplateInstruction<0> { 4777 public: 4778 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4779 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4780 4781 size_t GetIndex() const { return index_; } 4782 4783 Primitive::Type GetType() const OVERRIDE { 4784 // The previous instruction is the one that will be stored in the temporary location. 4785 DCHECK(GetPrevious() != nullptr); 4786 return GetPrevious()->GetType(); 4787 } 4788 4789 DECLARE_INSTRUCTION(Temporary); 4790 4791 private: 4792 const size_t index_; 4793 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4794}; 4795 4796class HSuspendCheck : public HTemplateInstruction<0> { 4797 public: 4798 explicit HSuspendCheck(uint32_t dex_pc) 4799 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4800 4801 bool NeedsEnvironment() const OVERRIDE { 4802 return true; 4803 } 4804 4805 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4806 SlowPathCode* GetSlowPath() const { return slow_path_; } 4807 4808 DECLARE_INSTRUCTION(SuspendCheck); 4809 4810 private: 4811 // Only used for code generation, in order to share the same slow path between back edges 4812 // of a same loop. 4813 SlowPathCode* slow_path_; 4814 4815 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4816}; 4817 4818/** 4819 * Instruction to load a Class object. 4820 */ 4821class HLoadClass : public HExpression<1> { 4822 public: 4823 HLoadClass(HCurrentMethod* current_method, 4824 uint16_t type_index, 4825 const DexFile& dex_file, 4826 bool is_referrers_class, 4827 uint32_t dex_pc, 4828 bool needs_access_check, 4829 bool is_in_dex_cache) 4830 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4831 type_index_(type_index), 4832 dex_file_(dex_file), 4833 is_referrers_class_(is_referrers_class), 4834 generate_clinit_check_(false), 4835 needs_access_check_(needs_access_check), 4836 is_in_dex_cache_(is_in_dex_cache), 4837 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4838 // Referrers class should not need access check. We never inline unverified 4839 // methods so we can't possibly end up in this situation. 4840 DCHECK(!is_referrers_class_ || !needs_access_check_); 4841 SetRawInputAt(0, current_method); 4842 } 4843 4844 bool CanBeMoved() const OVERRIDE { return true; } 4845 4846 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4847 // Note that we don't need to test for generate_clinit_check_. 4848 // Whether or not we need to generate the clinit check is processed in 4849 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4850 return other->AsLoadClass()->type_index_ == type_index_ && 4851 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4852 } 4853 4854 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4855 4856 uint16_t GetTypeIndex() const { return type_index_; } 4857 bool IsReferrersClass() const { return is_referrers_class_; } 4858 bool CanBeNull() const OVERRIDE { return false; } 4859 4860 bool NeedsEnvironment() const OVERRIDE { 4861 return CanCallRuntime(); 4862 } 4863 4864 bool MustGenerateClinitCheck() const { 4865 return generate_clinit_check_; 4866 } 4867 4868 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4869 // The entrypoint the code generator is going to call does not do 4870 // clinit of the class. 4871 DCHECK(!NeedsAccessCheck()); 4872 generate_clinit_check_ = generate_clinit_check; 4873 } 4874 4875 bool CanCallRuntime() const { 4876 return MustGenerateClinitCheck() || 4877 (!is_referrers_class_ && !is_in_dex_cache_) || 4878 needs_access_check_; 4879 } 4880 4881 bool NeedsAccessCheck() const { 4882 return needs_access_check_; 4883 } 4884 4885 bool CanThrow() const OVERRIDE { 4886 return CanCallRuntime(); 4887 } 4888 4889 ReferenceTypeInfo GetLoadedClassRTI() { 4890 return loaded_class_rti_; 4891 } 4892 4893 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4894 // Make sure we only set exact types (the loaded class should never be merged). 4895 DCHECK(rti.IsExact()); 4896 loaded_class_rti_ = rti; 4897 } 4898 4899 const DexFile& GetDexFile() { return dex_file_; } 4900 4901 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 4902 4903 static SideEffects SideEffectsForArchRuntimeCalls() { 4904 return SideEffects::CanTriggerGC(); 4905 } 4906 4907 bool IsInDexCache() const { return is_in_dex_cache_; } 4908 4909 DECLARE_INSTRUCTION(LoadClass); 4910 4911 private: 4912 const uint16_t type_index_; 4913 const DexFile& dex_file_; 4914 const bool is_referrers_class_; 4915 // Whether this instruction must generate the initialization check. 4916 // Used for code generation. 4917 bool generate_clinit_check_; 4918 const bool needs_access_check_; 4919 const bool is_in_dex_cache_; 4920 4921 ReferenceTypeInfo loaded_class_rti_; 4922 4923 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 4924}; 4925 4926class HLoadString : public HExpression<1> { 4927 public: 4928 HLoadString(HCurrentMethod* current_method, 4929 uint32_t string_index, 4930 uint32_t dex_pc, 4931 bool is_in_dex_cache) 4932 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4933 string_index_(string_index), 4934 is_in_dex_cache_(is_in_dex_cache) { 4935 SetRawInputAt(0, current_method); 4936 } 4937 4938 bool CanBeMoved() const OVERRIDE { return true; } 4939 4940 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4941 return other->AsLoadString()->string_index_ == string_index_; 4942 } 4943 4944 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 4945 4946 uint32_t GetStringIndex() const { return string_index_; } 4947 4948 // TODO: Can we deopt or debug when we resolve a string? 4949 bool NeedsEnvironment() const OVERRIDE { return false; } 4950 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 4951 bool CanBeNull() const OVERRIDE { return false; } 4952 bool IsInDexCache() const { return is_in_dex_cache_; } 4953 4954 static SideEffects SideEffectsForArchRuntimeCalls() { 4955 return SideEffects::CanTriggerGC(); 4956 } 4957 4958 DECLARE_INSTRUCTION(LoadString); 4959 4960 private: 4961 const uint32_t string_index_; 4962 const bool is_in_dex_cache_; 4963 4964 DISALLOW_COPY_AND_ASSIGN(HLoadString); 4965}; 4966 4967/** 4968 * Performs an initialization check on its Class object input. 4969 */ 4970class HClinitCheck : public HExpression<1> { 4971 public: 4972 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 4973 : HExpression( 4974 Primitive::kPrimNot, 4975 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 4976 dex_pc) { 4977 SetRawInputAt(0, constant); 4978 } 4979 4980 bool CanBeMoved() const OVERRIDE { return true; } 4981 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4982 return true; 4983 } 4984 4985 bool NeedsEnvironment() const OVERRIDE { 4986 // May call runtime to initialize the class. 4987 return true; 4988 } 4989 4990 bool CanThrow() const OVERRIDE { return true; } 4991 4992 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 4993 4994 DECLARE_INSTRUCTION(ClinitCheck); 4995 4996 private: 4997 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 4998}; 4999 5000class HStaticFieldGet : public HExpression<1> { 5001 public: 5002 HStaticFieldGet(HInstruction* cls, 5003 Primitive::Type field_type, 5004 MemberOffset field_offset, 5005 bool is_volatile, 5006 uint32_t field_idx, 5007 uint16_t declaring_class_def_index, 5008 const DexFile& dex_file, 5009 Handle<mirror::DexCache> dex_cache, 5010 uint32_t dex_pc) 5011 : HExpression(field_type, 5012 SideEffects::FieldReadOfType(field_type, is_volatile), 5013 dex_pc), 5014 field_info_(field_offset, 5015 field_type, 5016 is_volatile, 5017 field_idx, 5018 declaring_class_def_index, 5019 dex_file, 5020 dex_cache) { 5021 SetRawInputAt(0, cls); 5022 } 5023 5024 5025 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 5026 5027 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 5028 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 5029 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 5030 } 5031 5032 size_t ComputeHashCode() const OVERRIDE { 5033 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 5034 } 5035 5036 const FieldInfo& GetFieldInfo() const { return field_info_; } 5037 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5038 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5039 bool IsVolatile() const { return field_info_.IsVolatile(); } 5040 5041 DECLARE_INSTRUCTION(StaticFieldGet); 5042 5043 private: 5044 const FieldInfo field_info_; 5045 5046 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 5047}; 5048 5049class HStaticFieldSet : public HTemplateInstruction<2> { 5050 public: 5051 HStaticFieldSet(HInstruction* cls, 5052 HInstruction* value, 5053 Primitive::Type field_type, 5054 MemberOffset field_offset, 5055 bool is_volatile, 5056 uint32_t field_idx, 5057 uint16_t declaring_class_def_index, 5058 const DexFile& dex_file, 5059 Handle<mirror::DexCache> dex_cache, 5060 uint32_t dex_pc) 5061 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 5062 dex_pc), 5063 field_info_(field_offset, 5064 field_type, 5065 is_volatile, 5066 field_idx, 5067 declaring_class_def_index, 5068 dex_file, 5069 dex_cache), 5070 value_can_be_null_(true) { 5071 SetRawInputAt(0, cls); 5072 SetRawInputAt(1, value); 5073 } 5074 5075 const FieldInfo& GetFieldInfo() const { return field_info_; } 5076 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5077 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5078 bool IsVolatile() const { return field_info_.IsVolatile(); } 5079 5080 HInstruction* GetValue() const { return InputAt(1); } 5081 bool GetValueCanBeNull() const { return value_can_be_null_; } 5082 void ClearValueCanBeNull() { value_can_be_null_ = false; } 5083 5084 DECLARE_INSTRUCTION(StaticFieldSet); 5085 5086 private: 5087 const FieldInfo field_info_; 5088 bool value_can_be_null_; 5089 5090 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 5091}; 5092 5093class HUnresolvedInstanceFieldGet : public HExpression<1> { 5094 public: 5095 HUnresolvedInstanceFieldGet(HInstruction* obj, 5096 Primitive::Type field_type, 5097 uint32_t field_index, 5098 uint32_t dex_pc) 5099 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5100 field_index_(field_index) { 5101 SetRawInputAt(0, obj); 5102 } 5103 5104 bool NeedsEnvironment() const OVERRIDE { return true; } 5105 bool CanThrow() const OVERRIDE { return true; } 5106 5107 Primitive::Type GetFieldType() const { return GetType(); } 5108 uint32_t GetFieldIndex() const { return field_index_; } 5109 5110 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 5111 5112 private: 5113 const uint32_t field_index_; 5114 5115 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 5116}; 5117 5118class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 5119 public: 5120 HUnresolvedInstanceFieldSet(HInstruction* obj, 5121 HInstruction* value, 5122 Primitive::Type field_type, 5123 uint32_t field_index, 5124 uint32_t dex_pc) 5125 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5126 field_type_(field_type), 5127 field_index_(field_index) { 5128 DCHECK_EQ(field_type, value->GetType()); 5129 SetRawInputAt(0, obj); 5130 SetRawInputAt(1, value); 5131 } 5132 5133 bool NeedsEnvironment() const OVERRIDE { return true; } 5134 bool CanThrow() const OVERRIDE { return true; } 5135 5136 Primitive::Type GetFieldType() const { return field_type_; } 5137 uint32_t GetFieldIndex() const { return field_index_; } 5138 5139 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5140 5141 private: 5142 const Primitive::Type field_type_; 5143 const uint32_t field_index_; 5144 5145 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5146}; 5147 5148class HUnresolvedStaticFieldGet : public HExpression<0> { 5149 public: 5150 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5151 uint32_t field_index, 5152 uint32_t dex_pc) 5153 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5154 field_index_(field_index) { 5155 } 5156 5157 bool NeedsEnvironment() const OVERRIDE { return true; } 5158 bool CanThrow() const OVERRIDE { return true; } 5159 5160 Primitive::Type GetFieldType() const { return GetType(); } 5161 uint32_t GetFieldIndex() const { return field_index_; } 5162 5163 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5164 5165 private: 5166 const uint32_t field_index_; 5167 5168 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5169}; 5170 5171class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5172 public: 5173 HUnresolvedStaticFieldSet(HInstruction* value, 5174 Primitive::Type field_type, 5175 uint32_t field_index, 5176 uint32_t dex_pc) 5177 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5178 field_type_(field_type), 5179 field_index_(field_index) { 5180 DCHECK_EQ(field_type, value->GetType()); 5181 SetRawInputAt(0, value); 5182 } 5183 5184 bool NeedsEnvironment() const OVERRIDE { return true; } 5185 bool CanThrow() const OVERRIDE { return true; } 5186 5187 Primitive::Type GetFieldType() const { return field_type_; } 5188 uint32_t GetFieldIndex() const { return field_index_; } 5189 5190 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5191 5192 private: 5193 const Primitive::Type field_type_; 5194 const uint32_t field_index_; 5195 5196 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5197}; 5198 5199// Implement the move-exception DEX instruction. 5200class HLoadException : public HExpression<0> { 5201 public: 5202 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5203 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5204 5205 bool CanBeNull() const OVERRIDE { return false; } 5206 5207 DECLARE_INSTRUCTION(LoadException); 5208 5209 private: 5210 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5211}; 5212 5213// Implicit part of move-exception which clears thread-local exception storage. 5214// Must not be removed because the runtime expects the TLS to get cleared. 5215class HClearException : public HTemplateInstruction<0> { 5216 public: 5217 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5218 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5219 5220 DECLARE_INSTRUCTION(ClearException); 5221 5222 private: 5223 DISALLOW_COPY_AND_ASSIGN(HClearException); 5224}; 5225 5226class HThrow : public HTemplateInstruction<1> { 5227 public: 5228 HThrow(HInstruction* exception, uint32_t dex_pc) 5229 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5230 SetRawInputAt(0, exception); 5231 } 5232 5233 bool IsControlFlow() const OVERRIDE { return true; } 5234 5235 bool NeedsEnvironment() const OVERRIDE { return true; } 5236 5237 bool CanThrow() const OVERRIDE { return true; } 5238 5239 5240 DECLARE_INSTRUCTION(Throw); 5241 5242 private: 5243 DISALLOW_COPY_AND_ASSIGN(HThrow); 5244}; 5245 5246/** 5247 * Implementation strategies for the code generator of a HInstanceOf 5248 * or `HCheckCast`. 5249 */ 5250enum class TypeCheckKind { 5251 kUnresolvedCheck, // Check against an unresolved type. 5252 kExactCheck, // Can do a single class compare. 5253 kClassHierarchyCheck, // Can just walk the super class chain. 5254 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5255 kInterfaceCheck, // No optimization yet when checking against an interface. 5256 kArrayObjectCheck, // Can just check if the array is not primitive. 5257 kArrayCheck // No optimization yet when checking against a generic array. 5258}; 5259 5260class HInstanceOf : public HExpression<2> { 5261 public: 5262 HInstanceOf(HInstruction* object, 5263 HLoadClass* constant, 5264 TypeCheckKind check_kind, 5265 uint32_t dex_pc) 5266 : HExpression(Primitive::kPrimBoolean, 5267 SideEffectsForArchRuntimeCalls(check_kind), 5268 dex_pc), 5269 check_kind_(check_kind), 5270 must_do_null_check_(true) { 5271 SetRawInputAt(0, object); 5272 SetRawInputAt(1, constant); 5273 } 5274 5275 bool CanBeMoved() const OVERRIDE { return true; } 5276 5277 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5278 return true; 5279 } 5280 5281 bool NeedsEnvironment() const OVERRIDE { 5282 return false; 5283 } 5284 5285 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5286 5287 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5288 5289 // Used only in code generation. 5290 bool MustDoNullCheck() const { return must_do_null_check_; } 5291 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5292 5293 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5294 return (check_kind == TypeCheckKind::kExactCheck) 5295 ? SideEffects::None() 5296 // Mips currently does runtime calls for any other checks. 5297 : SideEffects::CanTriggerGC(); 5298 } 5299 5300 DECLARE_INSTRUCTION(InstanceOf); 5301 5302 private: 5303 const TypeCheckKind check_kind_; 5304 bool must_do_null_check_; 5305 5306 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5307}; 5308 5309class HBoundType : public HExpression<1> { 5310 public: 5311 // Constructs an HBoundType with the given upper_bound. 5312 // Ensures that the upper_bound is valid. 5313 HBoundType(HInstruction* input, 5314 ReferenceTypeInfo upper_bound, 5315 bool upper_can_be_null, 5316 uint32_t dex_pc = kNoDexPc) 5317 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5318 upper_bound_(upper_bound), 5319 upper_can_be_null_(upper_can_be_null), 5320 can_be_null_(upper_can_be_null) { 5321 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5322 SetRawInputAt(0, input); 5323 SetReferenceTypeInfo(upper_bound_); 5324 } 5325 5326 // GetUpper* should only be used in reference type propagation. 5327 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5328 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5329 5330 void SetCanBeNull(bool can_be_null) { 5331 DCHECK(upper_can_be_null_ || !can_be_null); 5332 can_be_null_ = can_be_null; 5333 } 5334 5335 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5336 5337 DECLARE_INSTRUCTION(BoundType); 5338 5339 private: 5340 // Encodes the most upper class that this instruction can have. In other words 5341 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5342 // It is used to bound the type in cases like: 5343 // if (x instanceof ClassX) { 5344 // // uper_bound_ will be ClassX 5345 // } 5346 const ReferenceTypeInfo upper_bound_; 5347 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5348 // is false then can_be_null_ cannot be true). 5349 const bool upper_can_be_null_; 5350 bool can_be_null_; 5351 5352 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5353}; 5354 5355class HCheckCast : public HTemplateInstruction<2> { 5356 public: 5357 HCheckCast(HInstruction* object, 5358 HLoadClass* constant, 5359 TypeCheckKind check_kind, 5360 uint32_t dex_pc) 5361 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5362 check_kind_(check_kind), 5363 must_do_null_check_(true) { 5364 SetRawInputAt(0, object); 5365 SetRawInputAt(1, constant); 5366 } 5367 5368 bool CanBeMoved() const OVERRIDE { return true; } 5369 5370 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5371 return true; 5372 } 5373 5374 bool NeedsEnvironment() const OVERRIDE { 5375 // Instruction may throw a CheckCastError. 5376 return true; 5377 } 5378 5379 bool CanThrow() const OVERRIDE { return true; } 5380 5381 bool MustDoNullCheck() const { return must_do_null_check_; } 5382 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5383 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5384 5385 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5386 5387 DECLARE_INSTRUCTION(CheckCast); 5388 5389 private: 5390 const TypeCheckKind check_kind_; 5391 bool must_do_null_check_; 5392 5393 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5394}; 5395 5396class HMemoryBarrier : public HTemplateInstruction<0> { 5397 public: 5398 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5399 : HTemplateInstruction( 5400 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5401 barrier_kind_(barrier_kind) {} 5402 5403 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5404 5405 DECLARE_INSTRUCTION(MemoryBarrier); 5406 5407 private: 5408 const MemBarrierKind barrier_kind_; 5409 5410 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5411}; 5412 5413class HMonitorOperation : public HTemplateInstruction<1> { 5414 public: 5415 enum OperationKind { 5416 kEnter, 5417 kExit, 5418 }; 5419 5420 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5421 : HTemplateInstruction( 5422 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5423 kind_(kind) { 5424 SetRawInputAt(0, object); 5425 } 5426 5427 // Instruction may throw a Java exception, so we need an environment. 5428 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5429 5430 bool CanThrow() const OVERRIDE { 5431 // Verifier guarantees that monitor-exit cannot throw. 5432 // This is important because it allows the HGraphBuilder to remove 5433 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5434 return IsEnter(); 5435 } 5436 5437 5438 bool IsEnter() const { return kind_ == kEnter; } 5439 5440 DECLARE_INSTRUCTION(MonitorOperation); 5441 5442 private: 5443 const OperationKind kind_; 5444 5445 private: 5446 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5447}; 5448 5449/** 5450 * A HInstruction used as a marker for the replacement of new + <init> 5451 * of a String to a call to a StringFactory. Only baseline will see 5452 * the node at code generation, where it will be be treated as null. 5453 * When compiling non-baseline, `HFakeString` instructions are being removed 5454 * in the instruction simplifier. 5455 */ 5456class HFakeString : public HTemplateInstruction<0> { 5457 public: 5458 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5459 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5460 5461 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5462 5463 DECLARE_INSTRUCTION(FakeString); 5464 5465 private: 5466 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5467}; 5468 5469class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5470 public: 5471 MoveOperands(Location source, 5472 Location destination, 5473 Primitive::Type type, 5474 HInstruction* instruction) 5475 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5476 5477 Location GetSource() const { return source_; } 5478 Location GetDestination() const { return destination_; } 5479 5480 void SetSource(Location value) { source_ = value; } 5481 void SetDestination(Location value) { destination_ = value; } 5482 5483 // The parallel move resolver marks moves as "in-progress" by clearing the 5484 // destination (but not the source). 5485 Location MarkPending() { 5486 DCHECK(!IsPending()); 5487 Location dest = destination_; 5488 destination_ = Location::NoLocation(); 5489 return dest; 5490 } 5491 5492 void ClearPending(Location dest) { 5493 DCHECK(IsPending()); 5494 destination_ = dest; 5495 } 5496 5497 bool IsPending() const { 5498 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5499 return destination_.IsInvalid() && !source_.IsInvalid(); 5500 } 5501 5502 // True if this blocks a move from the given location. 5503 bool Blocks(Location loc) const { 5504 return !IsEliminated() && source_.OverlapsWith(loc); 5505 } 5506 5507 // A move is redundant if it's been eliminated, if its source and 5508 // destination are the same, or if its destination is unneeded. 5509 bool IsRedundant() const { 5510 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5511 } 5512 5513 // We clear both operands to indicate move that's been eliminated. 5514 void Eliminate() { 5515 source_ = destination_ = Location::NoLocation(); 5516 } 5517 5518 bool IsEliminated() const { 5519 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5520 return source_.IsInvalid(); 5521 } 5522 5523 Primitive::Type GetType() const { return type_; } 5524 5525 bool Is64BitMove() const { 5526 return Primitive::Is64BitType(type_); 5527 } 5528 5529 HInstruction* GetInstruction() const { return instruction_; } 5530 5531 private: 5532 Location source_; 5533 Location destination_; 5534 // The type this move is for. 5535 Primitive::Type type_; 5536 // The instruction this move is assocatied with. Null when this move is 5537 // for moving an input in the expected locations of user (including a phi user). 5538 // This is only used in debug mode, to ensure we do not connect interval siblings 5539 // in the same parallel move. 5540 HInstruction* instruction_; 5541}; 5542 5543static constexpr size_t kDefaultNumberOfMoves = 4; 5544 5545class HParallelMove : public HTemplateInstruction<0> { 5546 public: 5547 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5548 : HTemplateInstruction(SideEffects::None(), dex_pc), 5549 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5550 moves_.reserve(kDefaultNumberOfMoves); 5551 } 5552 5553 void AddMove(Location source, 5554 Location destination, 5555 Primitive::Type type, 5556 HInstruction* instruction) { 5557 DCHECK(source.IsValid()); 5558 DCHECK(destination.IsValid()); 5559 if (kIsDebugBuild) { 5560 if (instruction != nullptr) { 5561 for (const MoveOperands& move : moves_) { 5562 if (move.GetInstruction() == instruction) { 5563 // Special case the situation where the move is for the spill slot 5564 // of the instruction. 5565 if ((GetPrevious() == instruction) 5566 || ((GetPrevious() == nullptr) 5567 && instruction->IsPhi() 5568 && instruction->GetBlock() == GetBlock())) { 5569 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5570 << "Doing parallel moves for the same instruction."; 5571 } else { 5572 DCHECK(false) << "Doing parallel moves for the same instruction."; 5573 } 5574 } 5575 } 5576 } 5577 for (const MoveOperands& move : moves_) { 5578 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5579 << "Overlapped destination for two moves in a parallel move: " 5580 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5581 << source << " ==> " << destination; 5582 } 5583 } 5584 moves_.emplace_back(source, destination, type, instruction); 5585 } 5586 5587 MoveOperands* MoveOperandsAt(size_t index) { 5588 return &moves_[index]; 5589 } 5590 5591 size_t NumMoves() const { return moves_.size(); } 5592 5593 DECLARE_INSTRUCTION(ParallelMove); 5594 5595 private: 5596 ArenaVector<MoveOperands> moves_; 5597 5598 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5599}; 5600 5601} // namespace art 5602 5603#ifdef ART_ENABLE_CODEGEN_arm 5604#include "nodes_arm.h" 5605#endif 5606#ifdef ART_ENABLE_CODEGEN_arm64 5607#include "nodes_arm64.h" 5608#endif 5609#ifdef ART_ENABLE_CODEGEN_x86 5610#include "nodes_x86.h" 5611#endif 5612 5613namespace art { 5614 5615class HGraphVisitor : public ValueObject { 5616 public: 5617 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5618 virtual ~HGraphVisitor() {} 5619 5620 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5621 virtual void VisitBasicBlock(HBasicBlock* block); 5622 5623 // Visit the graph following basic block insertion order. 5624 void VisitInsertionOrder(); 5625 5626 // Visit the graph following dominator tree reverse post-order. 5627 void VisitReversePostOrder(); 5628 5629 HGraph* GetGraph() const { return graph_; } 5630 5631 // Visit functions for instruction classes. 5632#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5633 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5634 5635 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5636 5637#undef DECLARE_VISIT_INSTRUCTION 5638 5639 private: 5640 HGraph* const graph_; 5641 5642 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5643}; 5644 5645class HGraphDelegateVisitor : public HGraphVisitor { 5646 public: 5647 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5648 virtual ~HGraphDelegateVisitor() {} 5649 5650 // Visit functions that delegate to to super class. 5651#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5652 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5653 5654 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5655 5656#undef DECLARE_VISIT_INSTRUCTION 5657 5658 private: 5659 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5660}; 5661 5662class HInsertionOrderIterator : public ValueObject { 5663 public: 5664 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5665 5666 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5667 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5668 void Advance() { ++index_; } 5669 5670 private: 5671 const HGraph& graph_; 5672 size_t index_; 5673 5674 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5675}; 5676 5677class HReversePostOrderIterator : public ValueObject { 5678 public: 5679 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5680 // Check that reverse post order of the graph has been built. 5681 DCHECK(!graph.GetReversePostOrder().empty()); 5682 } 5683 5684 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5685 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5686 void Advance() { ++index_; } 5687 5688 private: 5689 const HGraph& graph_; 5690 size_t index_; 5691 5692 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5693}; 5694 5695class HPostOrderIterator : public ValueObject { 5696 public: 5697 explicit HPostOrderIterator(const HGraph& graph) 5698 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5699 // Check that reverse post order of the graph has been built. 5700 DCHECK(!graph.GetReversePostOrder().empty()); 5701 } 5702 5703 bool Done() const { return index_ == 0; } 5704 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5705 void Advance() { --index_; } 5706 5707 private: 5708 const HGraph& graph_; 5709 size_t index_; 5710 5711 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5712}; 5713 5714class HLinearPostOrderIterator : public ValueObject { 5715 public: 5716 explicit HLinearPostOrderIterator(const HGraph& graph) 5717 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5718 5719 bool Done() const { return index_ == 0; } 5720 5721 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5722 5723 void Advance() { 5724 --index_; 5725 DCHECK_GE(index_, 0U); 5726 } 5727 5728 private: 5729 const ArenaVector<HBasicBlock*>& order_; 5730 size_t index_; 5731 5732 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5733}; 5734 5735class HLinearOrderIterator : public ValueObject { 5736 public: 5737 explicit HLinearOrderIterator(const HGraph& graph) 5738 : order_(graph.GetLinearOrder()), index_(0) {} 5739 5740 bool Done() const { return index_ == order_.size(); } 5741 HBasicBlock* Current() const { return order_[index_]; } 5742 void Advance() { ++index_; } 5743 5744 private: 5745 const ArenaVector<HBasicBlock*>& order_; 5746 size_t index_; 5747 5748 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5749}; 5750 5751// Iterator over the blocks that art part of the loop. Includes blocks part 5752// of an inner loop. The order in which the blocks are iterated is on their 5753// block id. 5754class HBlocksInLoopIterator : public ValueObject { 5755 public: 5756 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5757 : blocks_in_loop_(info.GetBlocks()), 5758 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5759 index_(0) { 5760 if (!blocks_in_loop_.IsBitSet(index_)) { 5761 Advance(); 5762 } 5763 } 5764 5765 bool Done() const { return index_ == blocks_.size(); } 5766 HBasicBlock* Current() const { return blocks_[index_]; } 5767 void Advance() { 5768 ++index_; 5769 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5770 if (blocks_in_loop_.IsBitSet(index_)) { 5771 break; 5772 } 5773 } 5774 } 5775 5776 private: 5777 const BitVector& blocks_in_loop_; 5778 const ArenaVector<HBasicBlock*>& blocks_; 5779 size_t index_; 5780 5781 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5782}; 5783 5784// Iterator over the blocks that art part of the loop. Includes blocks part 5785// of an inner loop. The order in which the blocks are iterated is reverse 5786// post order. 5787class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5788 public: 5789 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5790 : blocks_in_loop_(info.GetBlocks()), 5791 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5792 index_(0) { 5793 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5794 Advance(); 5795 } 5796 } 5797 5798 bool Done() const { return index_ == blocks_.size(); } 5799 HBasicBlock* Current() const { return blocks_[index_]; } 5800 void Advance() { 5801 ++index_; 5802 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5803 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5804 break; 5805 } 5806 } 5807 } 5808 5809 private: 5810 const BitVector& blocks_in_loop_; 5811 const ArenaVector<HBasicBlock*>& blocks_; 5812 size_t index_; 5813 5814 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5815}; 5816 5817inline int64_t Int64FromConstant(HConstant* constant) { 5818 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5819 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5820 : constant->AsLongConstant()->GetValue(); 5821} 5822 5823inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5824 // For the purposes of the compiler, the dex files must actually be the same object 5825 // if we want to safely treat them as the same. This is especially important for JIT 5826 // as custom class loaders can open the same underlying file (or memory) multiple 5827 // times and provide different class resolution but no two class loaders should ever 5828 // use the same DexFile object - doing so is an unsupported hack that can lead to 5829 // all sorts of weird failures. 5830 return &lhs == &rhs; 5831} 5832 5833} // namespace art 5834 5835#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5836