nodes.h revision 98893e146b0ff0e1fd1d7c29252f1d1e75a163f2
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_containers.h" 25#include "base/arena_object.h" 26#include "base/stl_util.h" 27#include "dex/compiler_enums.h" 28#include "entrypoints/quick/quick_entrypoints_enum.h" 29#include "handle.h" 30#include "handle_scope.h" 31#include "invoke_type.h" 32#include "locations.h" 33#include "method_reference.h" 34#include "mirror/class.h" 35#include "offsets.h" 36#include "primitive.h" 37#include "utils/arena_bit_vector.h" 38 39namespace art { 40 41class GraphChecker; 42class HBasicBlock; 43class HCurrentMethod; 44class HDoubleConstant; 45class HEnvironment; 46class HFakeString; 47class HFloatConstant; 48class HGraphBuilder; 49class HGraphVisitor; 50class HInstruction; 51class HIntConstant; 52class HInvoke; 53class HLongConstant; 54class HNullConstant; 55class HPhi; 56class HSuspendCheck; 57class HTryBoundary; 58class LiveInterval; 59class LocationSummary; 60class SlowPathCode; 61class SsaBuilder; 62 63namespace mirror { 64class DexCache; 65} // namespace mirror 66 67static const int kDefaultNumberOfBlocks = 8; 68static const int kDefaultNumberOfSuccessors = 2; 69static const int kDefaultNumberOfPredecessors = 2; 70static const int kDefaultNumberOfExceptionalPredecessors = 0; 71static const int kDefaultNumberOfDominatedBlocks = 1; 72static const int kDefaultNumberOfBackEdges = 1; 73 74static constexpr uint32_t kMaxIntShiftValue = 0x1f; 75static constexpr uint64_t kMaxLongShiftValue = 0x3f; 76 77static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 78 79static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 80 81static constexpr uint32_t kNoDexPc = -1; 82 83enum IfCondition { 84 kCondEQ, 85 kCondNE, 86 kCondLT, 87 kCondLE, 88 kCondGT, 89 kCondGE, 90}; 91 92class HInstructionList : public ValueObject { 93 public: 94 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 95 96 void AddInstruction(HInstruction* instruction); 97 void RemoveInstruction(HInstruction* instruction); 98 99 // Insert `instruction` before/after an existing instruction `cursor`. 100 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 101 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 102 103 // Return true if this list contains `instruction`. 104 bool Contains(HInstruction* instruction) const; 105 106 // Return true if `instruction1` is found before `instruction2` in 107 // this instruction list and false otherwise. Abort if none 108 // of these instructions is found. 109 bool FoundBefore(const HInstruction* instruction1, 110 const HInstruction* instruction2) const; 111 112 bool IsEmpty() const { return first_instruction_ == nullptr; } 113 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 114 115 // Update the block of all instructions to be `block`. 116 void SetBlockOfInstructions(HBasicBlock* block) const; 117 118 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 119 void Add(const HInstructionList& instruction_list); 120 121 // Return the number of instructions in the list. This is an expensive operation. 122 size_t CountSize() const; 123 124 private: 125 HInstruction* first_instruction_; 126 HInstruction* last_instruction_; 127 128 friend class HBasicBlock; 129 friend class HGraph; 130 friend class HInstruction; 131 friend class HInstructionIterator; 132 friend class HBackwardInstructionIterator; 133 134 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 135}; 136 137// Control-flow graph of a method. Contains a list of basic blocks. 138class HGraph : public ArenaObject<kArenaAllocGraph> { 139 public: 140 HGraph(ArenaAllocator* arena, 141 const DexFile& dex_file, 142 uint32_t method_idx, 143 bool should_generate_constructor_barrier, 144 InstructionSet instruction_set, 145 InvokeType invoke_type = kInvalidInvokeType, 146 bool debuggable = false, 147 int start_instruction_id = 0) 148 : arena_(arena), 149 blocks_(arena->Adapter(kArenaAllocBlockList)), 150 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 151 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 152 entry_block_(nullptr), 153 exit_block_(nullptr), 154 maximum_number_of_out_vregs_(0), 155 number_of_vregs_(0), 156 number_of_in_vregs_(0), 157 temporaries_vreg_slots_(0), 158 has_bounds_checks_(false), 159 has_try_catch_(false), 160 debuggable_(debuggable), 161 current_instruction_id_(start_instruction_id), 162 dex_file_(dex_file), 163 method_idx_(method_idx), 164 invoke_type_(invoke_type), 165 in_ssa_form_(false), 166 should_generate_constructor_barrier_(should_generate_constructor_barrier), 167 instruction_set_(instruction_set), 168 cached_null_constant_(nullptr), 169 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 170 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 171 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 172 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 173 cached_current_method_(nullptr) { 174 blocks_.reserve(kDefaultNumberOfBlocks); 175 } 176 177 ArenaAllocator* GetArena() const { return arena_; } 178 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 179 180 HBasicBlock* GetBlock(size_t id) const { 181 DCHECK_LT(id, blocks_.size()); 182 return blocks_[id]; 183 } 184 185 bool IsInSsaForm() const { return in_ssa_form_; } 186 187 HBasicBlock* GetEntryBlock() const { return entry_block_; } 188 HBasicBlock* GetExitBlock() const { return exit_block_; } 189 bool HasExitBlock() const { return exit_block_ != nullptr; } 190 191 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 192 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 193 194 void AddBlock(HBasicBlock* block); 195 196 // Try building the SSA form of this graph, with dominance computation and loop 197 // recognition. Returns whether it was successful in doing all these steps. 198 bool TryBuildingSsa() { 199 BuildDominatorTree(); 200 // The SSA builder requires loops to all be natural. Specifically, the dead phi 201 // elimination phase checks the consistency of the graph when doing a post-order 202 // visit for eliminating dead phis: a dead phi can only have loop header phi 203 // users remaining when being visited. 204 if (!AnalyzeNaturalLoops()) return false; 205 // Precompute per-block try membership before entering the SSA builder, 206 // which needs the information to build catch block phis from values of 207 // locals at throwing instructions inside try blocks. 208 ComputeTryBlockInformation(); 209 TransformToSsa(); 210 in_ssa_form_ = true; 211 return true; 212 } 213 214 void ComputeDominanceInformation(); 215 void ClearDominanceInformation(); 216 217 void BuildDominatorTree(); 218 void TransformToSsa(); 219 void SimplifyCFG(); 220 void SimplifyCatchBlocks(); 221 222 // Analyze all natural loops in this graph. Returns false if one 223 // loop is not natural, that is the header does not dominate the 224 // back edge. 225 bool AnalyzeNaturalLoops() const; 226 227 // Iterate over blocks to compute try block membership. Needs reverse post 228 // order and loop information. 229 void ComputeTryBlockInformation(); 230 231 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 232 // Returns the instruction used to replace the invoke expression or null if the 233 // invoke is for a void method. 234 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 235 236 // Need to add a couple of blocks to test if the loop body is entered and 237 // put deoptimization instructions, etc. 238 void TransformLoopHeaderForBCE(HBasicBlock* header); 239 240 // Removes `block` from the graph. 241 void DeleteDeadBlock(HBasicBlock* block); 242 243 // Splits the edge between `block` and `successor` while preserving the 244 // indices in the predecessor/successor lists. If there are multiple edges 245 // between the blocks, the lowest indices are used. 246 // Returns the new block which is empty and has the same dex pc as `successor`. 247 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 248 249 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 250 void SimplifyLoop(HBasicBlock* header); 251 252 int32_t GetNextInstructionId() { 253 DCHECK_NE(current_instruction_id_, INT32_MAX); 254 return current_instruction_id_++; 255 } 256 257 int32_t GetCurrentInstructionId() const { 258 return current_instruction_id_; 259 } 260 261 void SetCurrentInstructionId(int32_t id) { 262 current_instruction_id_ = id; 263 } 264 265 uint16_t GetMaximumNumberOfOutVRegs() const { 266 return maximum_number_of_out_vregs_; 267 } 268 269 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 270 maximum_number_of_out_vregs_ = new_value; 271 } 272 273 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 274 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 275 } 276 277 void UpdateTemporariesVRegSlots(size_t slots) { 278 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 279 } 280 281 size_t GetTemporariesVRegSlots() const { 282 DCHECK(!in_ssa_form_); 283 return temporaries_vreg_slots_; 284 } 285 286 void SetNumberOfVRegs(uint16_t number_of_vregs) { 287 number_of_vregs_ = number_of_vregs; 288 } 289 290 uint16_t GetNumberOfVRegs() const { 291 return number_of_vregs_; 292 } 293 294 void SetNumberOfInVRegs(uint16_t value) { 295 number_of_in_vregs_ = value; 296 } 297 298 uint16_t GetNumberOfLocalVRegs() const { 299 DCHECK(!in_ssa_form_); 300 return number_of_vregs_ - number_of_in_vregs_; 301 } 302 303 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 304 return reverse_post_order_; 305 } 306 307 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 308 return linear_order_; 309 } 310 311 bool HasBoundsChecks() const { 312 return has_bounds_checks_; 313 } 314 315 void SetHasBoundsChecks(bool value) { 316 has_bounds_checks_ = value; 317 } 318 319 bool ShouldGenerateConstructorBarrier() const { 320 return should_generate_constructor_barrier_; 321 } 322 323 bool IsDebuggable() const { return debuggable_; } 324 325 // Returns a constant of the given type and value. If it does not exist 326 // already, it is created and inserted into the graph. This method is only for 327 // integral types. 328 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 329 330 // TODO: This is problematic for the consistency of reference type propagation 331 // because it can be created anytime after the pass and thus it will be left 332 // with an invalid type. 333 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 334 335 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 336 return CreateConstant(value, &cached_int_constants_, dex_pc); 337 } 338 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 339 return CreateConstant(value, &cached_long_constants_, dex_pc); 340 } 341 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 342 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 343 } 344 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 345 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 346 } 347 348 HCurrentMethod* GetCurrentMethod(); 349 350 HBasicBlock* FindCommonDominator(HBasicBlock* first, HBasicBlock* second) const; 351 352 const DexFile& GetDexFile() const { 353 return dex_file_; 354 } 355 356 uint32_t GetMethodIdx() const { 357 return method_idx_; 358 } 359 360 InvokeType GetInvokeType() const { 361 return invoke_type_; 362 } 363 364 InstructionSet GetInstructionSet() const { 365 return instruction_set_; 366 } 367 368 bool HasTryCatch() const { return has_try_catch_; } 369 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 370 371 private: 372 void FindBackEdges(ArenaBitVector* visited); 373 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 374 void RemoveDeadBlocks(const ArenaBitVector& visited); 375 376 template <class InstructionType, typename ValueType> 377 InstructionType* CreateConstant(ValueType value, 378 ArenaSafeMap<ValueType, InstructionType*>* cache, 379 uint32_t dex_pc = kNoDexPc) { 380 // Try to find an existing constant of the given value. 381 InstructionType* constant = nullptr; 382 auto cached_constant = cache->find(value); 383 if (cached_constant != cache->end()) { 384 constant = cached_constant->second; 385 } 386 387 // If not found or previously deleted, create and cache a new instruction. 388 // Don't bother reviving a previously deleted instruction, for simplicity. 389 if (constant == nullptr || constant->GetBlock() == nullptr) { 390 constant = new (arena_) InstructionType(value, dex_pc); 391 cache->Overwrite(value, constant); 392 InsertConstant(constant); 393 } 394 return constant; 395 } 396 397 void InsertConstant(HConstant* instruction); 398 399 // Cache a float constant into the graph. This method should only be 400 // called by the SsaBuilder when creating "equivalent" instructions. 401 void CacheFloatConstant(HFloatConstant* constant); 402 403 // See CacheFloatConstant comment. 404 void CacheDoubleConstant(HDoubleConstant* constant); 405 406 ArenaAllocator* const arena_; 407 408 // List of blocks in insertion order. 409 ArenaVector<HBasicBlock*> blocks_; 410 411 // List of blocks to perform a reverse post order tree traversal. 412 ArenaVector<HBasicBlock*> reverse_post_order_; 413 414 // List of blocks to perform a linear order tree traversal. 415 ArenaVector<HBasicBlock*> linear_order_; 416 417 HBasicBlock* entry_block_; 418 HBasicBlock* exit_block_; 419 420 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 421 uint16_t maximum_number_of_out_vregs_; 422 423 // The number of virtual registers in this method. Contains the parameters. 424 uint16_t number_of_vregs_; 425 426 // The number of virtual registers used by parameters of this method. 427 uint16_t number_of_in_vregs_; 428 429 // Number of vreg size slots that the temporaries use (used in baseline compiler). 430 size_t temporaries_vreg_slots_; 431 432 // Has bounds checks. We can totally skip BCE if it's false. 433 bool has_bounds_checks_; 434 435 // Flag whether there are any try/catch blocks in the graph. We will skip 436 // try/catch-related passes if false. 437 bool has_try_catch_; 438 439 // Indicates whether the graph should be compiled in a way that 440 // ensures full debuggability. If false, we can apply more 441 // aggressive optimizations that may limit the level of debugging. 442 const bool debuggable_; 443 444 // The current id to assign to a newly added instruction. See HInstruction.id_. 445 int32_t current_instruction_id_; 446 447 // The dex file from which the method is from. 448 const DexFile& dex_file_; 449 450 // The method index in the dex file. 451 const uint32_t method_idx_; 452 453 // If inlined, this encodes how the callee is being invoked. 454 const InvokeType invoke_type_; 455 456 // Whether the graph has been transformed to SSA form. Only used 457 // in debug mode to ensure we are not using properties only valid 458 // for non-SSA form (like the number of temporaries). 459 bool in_ssa_form_; 460 461 const bool should_generate_constructor_barrier_; 462 463 const InstructionSet instruction_set_; 464 465 // Cached constants. 466 HNullConstant* cached_null_constant_; 467 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 468 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 469 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 470 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 471 472 HCurrentMethod* cached_current_method_; 473 474 friend class SsaBuilder; // For caching constants. 475 friend class SsaLivenessAnalysis; // For the linear order. 476 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 477 DISALLOW_COPY_AND_ASSIGN(HGraph); 478}; 479 480class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 481 public: 482 HLoopInformation(HBasicBlock* header, HGraph* graph) 483 : header_(header), 484 suspend_check_(nullptr), 485 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 486 // Make bit vector growable, as the number of blocks may change. 487 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 488 back_edges_.reserve(kDefaultNumberOfBackEdges); 489 } 490 491 HBasicBlock* GetHeader() const { 492 return header_; 493 } 494 495 void SetHeader(HBasicBlock* block) { 496 header_ = block; 497 } 498 499 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 500 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 501 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 502 503 void AddBackEdge(HBasicBlock* back_edge) { 504 back_edges_.push_back(back_edge); 505 } 506 507 void RemoveBackEdge(HBasicBlock* back_edge) { 508 RemoveElement(back_edges_, back_edge); 509 } 510 511 bool IsBackEdge(const HBasicBlock& block) const { 512 return ContainsElement(back_edges_, &block); 513 } 514 515 size_t NumberOfBackEdges() const { 516 return back_edges_.size(); 517 } 518 519 HBasicBlock* GetPreHeader() const; 520 521 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 522 return back_edges_; 523 } 524 525 // Returns the lifetime position of the back edge that has the 526 // greatest lifetime position. 527 size_t GetLifetimeEnd() const; 528 529 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 530 ReplaceElement(back_edges_, existing, new_back_edge); 531 } 532 533 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 534 // that is the header dominates the back edge. 535 bool Populate(); 536 537 // Reanalyzes the loop by removing loop info from its blocks and re-running 538 // Populate(). If there are no back edges left, the loop info is completely 539 // removed as well as its SuspendCheck instruction. It must be run on nested 540 // inner loops first. 541 void Update(); 542 543 // Returns whether this loop information contains `block`. 544 // Note that this loop information *must* be populated before entering this function. 545 bool Contains(const HBasicBlock& block) const; 546 547 // Returns whether this loop information is an inner loop of `other`. 548 // Note that `other` *must* be populated before entering this function. 549 bool IsIn(const HLoopInformation& other) const; 550 551 const ArenaBitVector& GetBlocks() const { return blocks_; } 552 553 void Add(HBasicBlock* block); 554 void Remove(HBasicBlock* block); 555 556 private: 557 // Internal recursive implementation of `Populate`. 558 void PopulateRecursive(HBasicBlock* block); 559 560 HBasicBlock* header_; 561 HSuspendCheck* suspend_check_; 562 ArenaVector<HBasicBlock*> back_edges_; 563 ArenaBitVector blocks_; 564 565 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 566}; 567 568// Stores try/catch information for basic blocks. 569// Note that HGraph is constructed so that catch blocks cannot simultaneously 570// be try blocks. 571class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 572 public: 573 // Try block information constructor. 574 explicit TryCatchInformation(const HTryBoundary& try_entry) 575 : try_entry_(&try_entry), 576 catch_dex_file_(nullptr), 577 catch_type_index_(DexFile::kDexNoIndex16) { 578 DCHECK(try_entry_ != nullptr); 579 } 580 581 // Catch block information constructor. 582 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 583 : try_entry_(nullptr), 584 catch_dex_file_(&dex_file), 585 catch_type_index_(catch_type_index) {} 586 587 bool IsTryBlock() const { return try_entry_ != nullptr; } 588 589 const HTryBoundary& GetTryEntry() const { 590 DCHECK(IsTryBlock()); 591 return *try_entry_; 592 } 593 594 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 595 596 bool IsCatchAllTypeIndex() const { 597 DCHECK(IsCatchBlock()); 598 return catch_type_index_ == DexFile::kDexNoIndex16; 599 } 600 601 uint16_t GetCatchTypeIndex() const { 602 DCHECK(IsCatchBlock()); 603 return catch_type_index_; 604 } 605 606 const DexFile& GetCatchDexFile() const { 607 DCHECK(IsCatchBlock()); 608 return *catch_dex_file_; 609 } 610 611 private: 612 // One of possibly several TryBoundary instructions entering the block's try. 613 // Only set for try blocks. 614 const HTryBoundary* try_entry_; 615 616 // Exception type information. Only set for catch blocks. 617 const DexFile* catch_dex_file_; 618 const uint16_t catch_type_index_; 619}; 620 621static constexpr size_t kNoLifetime = -1; 622static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 623 624// A block in a method. Contains the list of instructions represented 625// as a double linked list. Each block knows its predecessors and 626// successors. 627 628class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 629 public: 630 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 631 : graph_(graph), 632 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 633 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 634 loop_information_(nullptr), 635 dominator_(nullptr), 636 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 637 block_id_(kInvalidBlockId), 638 dex_pc_(dex_pc), 639 lifetime_start_(kNoLifetime), 640 lifetime_end_(kNoLifetime), 641 try_catch_information_(nullptr) { 642 predecessors_.reserve(kDefaultNumberOfPredecessors); 643 successors_.reserve(kDefaultNumberOfSuccessors); 644 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 645 } 646 647 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 648 return predecessors_; 649 } 650 651 HBasicBlock* GetPredecessor(size_t pred_idx) const { 652 DCHECK_LT(pred_idx, predecessors_.size()); 653 return predecessors_[pred_idx]; 654 } 655 656 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 657 return successors_; 658 } 659 660 HBasicBlock* GetSuccessor(size_t succ_idx) const { 661 DCHECK_LT(succ_idx, successors_.size()); 662 return successors_[succ_idx]; 663 } 664 665 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 666 return ContainsElement(successors_, block, start_from); 667 } 668 669 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 670 return dominated_blocks_; 671 } 672 673 bool IsEntryBlock() const { 674 return graph_->GetEntryBlock() == this; 675 } 676 677 bool IsExitBlock() const { 678 return graph_->GetExitBlock() == this; 679 } 680 681 bool IsSingleGoto() const; 682 bool IsSingleTryBoundary() const; 683 684 // Returns true if this block emits nothing but a jump. 685 bool IsSingleJump() const { 686 HLoopInformation* loop_info = GetLoopInformation(); 687 return (IsSingleGoto() || IsSingleTryBoundary()) 688 // Back edges generate a suspend check. 689 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 690 } 691 692 void AddBackEdge(HBasicBlock* back_edge) { 693 if (loop_information_ == nullptr) { 694 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 695 } 696 DCHECK_EQ(loop_information_->GetHeader(), this); 697 loop_information_->AddBackEdge(back_edge); 698 } 699 700 HGraph* GetGraph() const { return graph_; } 701 void SetGraph(HGraph* graph) { graph_ = graph; } 702 703 uint32_t GetBlockId() const { return block_id_; } 704 void SetBlockId(int id) { block_id_ = id; } 705 uint32_t GetDexPc() const { return dex_pc_; } 706 707 HBasicBlock* GetDominator() const { return dominator_; } 708 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 709 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 710 711 void RemoveDominatedBlock(HBasicBlock* block) { 712 RemoveElement(dominated_blocks_, block); 713 } 714 715 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 716 ReplaceElement(dominated_blocks_, existing, new_block); 717 } 718 719 void ClearDominanceInformation(); 720 721 int NumberOfBackEdges() const { 722 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 723 } 724 725 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 726 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 727 const HInstructionList& GetInstructions() const { return instructions_; } 728 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 729 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 730 const HInstructionList& GetPhis() const { return phis_; } 731 732 void AddSuccessor(HBasicBlock* block) { 733 successors_.push_back(block); 734 block->predecessors_.push_back(this); 735 } 736 737 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 738 size_t successor_index = GetSuccessorIndexOf(existing); 739 existing->RemovePredecessor(this); 740 new_block->predecessors_.push_back(this); 741 successors_[successor_index] = new_block; 742 } 743 744 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 745 size_t predecessor_index = GetPredecessorIndexOf(existing); 746 existing->RemoveSuccessor(this); 747 new_block->successors_.push_back(this); 748 predecessors_[predecessor_index] = new_block; 749 } 750 751 // Insert `this` between `predecessor` and `successor. This method 752 // preserves the indicies, and will update the first edge found between 753 // `predecessor` and `successor`. 754 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 755 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 756 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 757 successor->predecessors_[predecessor_index] = this; 758 predecessor->successors_[successor_index] = this; 759 successors_.push_back(successor); 760 predecessors_.push_back(predecessor); 761 } 762 763 void RemovePredecessor(HBasicBlock* block) { 764 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 765 } 766 767 void RemoveSuccessor(HBasicBlock* block) { 768 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 769 } 770 771 void ClearAllPredecessors() { 772 predecessors_.clear(); 773 } 774 775 void AddPredecessor(HBasicBlock* block) { 776 predecessors_.push_back(block); 777 block->successors_.push_back(this); 778 } 779 780 void SwapPredecessors() { 781 DCHECK_EQ(predecessors_.size(), 2u); 782 std::swap(predecessors_[0], predecessors_[1]); 783 } 784 785 void SwapSuccessors() { 786 DCHECK_EQ(successors_.size(), 2u); 787 std::swap(successors_[0], successors_[1]); 788 } 789 790 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 791 return IndexOfElement(predecessors_, predecessor); 792 } 793 794 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 795 return IndexOfElement(successors_, successor); 796 } 797 798 HBasicBlock* GetSinglePredecessor() const { 799 DCHECK_EQ(GetPredecessors().size(), 1u); 800 return GetPredecessor(0); 801 } 802 803 HBasicBlock* GetSingleSuccessor() const { 804 DCHECK_EQ(GetSuccessors().size(), 1u); 805 return GetSuccessor(0); 806 } 807 808 // Returns whether the first occurrence of `predecessor` in the list of 809 // predecessors is at index `idx`. 810 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 811 DCHECK_EQ(GetPredecessor(idx), predecessor); 812 return GetPredecessorIndexOf(predecessor) == idx; 813 } 814 815 // Returns the number of non-exceptional successors. SsaChecker ensures that 816 // these are stored at the beginning of the successor list. 817 size_t NumberOfNormalSuccessors() const { 818 return EndsWithTryBoundary() ? 1 : GetSuccessors().size(); 819 } 820 821 // Create a new block between this block and its predecessors. The new block 822 // is added to the graph, all predecessor edges are relinked to it and an edge 823 // is created to `this`. Returns the new empty block. Reverse post order or 824 // loop and try/catch information are not updated. 825 HBasicBlock* CreateImmediateDominator(); 826 827 // Split the block into two blocks just before `cursor`. Returns the newly 828 // created, latter block. Note that this method will add the block to the 829 // graph, create a Goto at the end of the former block and will create an edge 830 // between the blocks. It will not, however, update the reverse post order or 831 // loop and try/catch information. 832 HBasicBlock* SplitBefore(HInstruction* cursor); 833 834 // Split the block into two blocks just after `cursor`. Returns the newly 835 // created block. Note that this method just updates raw block information, 836 // like predecessors, successors, dominators, and instruction list. It does not 837 // update the graph, reverse post order, loop information, nor make sure the 838 // blocks are consistent (for example ending with a control flow instruction). 839 HBasicBlock* SplitAfter(HInstruction* cursor); 840 841 // Merge `other` at the end of `this`. Successors and dominated blocks of 842 // `other` are changed to be successors and dominated blocks of `this`. Note 843 // that this method does not update the graph, reverse post order, loop 844 // information, nor make sure the blocks are consistent (for example ending 845 // with a control flow instruction). 846 void MergeWithInlined(HBasicBlock* other); 847 848 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 849 // of `this` are moved to `other`. 850 // Note that this method does not update the graph, reverse post order, loop 851 // information, nor make sure the blocks are consistent (for example ending 852 // with a control flow instruction). 853 void ReplaceWith(HBasicBlock* other); 854 855 // Merge `other` at the end of `this`. This method updates loops, reverse post 856 // order, links to predecessors, successors, dominators and deletes the block 857 // from the graph. The two blocks must be successive, i.e. `this` the only 858 // predecessor of `other` and vice versa. 859 void MergeWith(HBasicBlock* other); 860 861 // Disconnects `this` from all its predecessors, successors and dominator, 862 // removes it from all loops it is included in and eventually from the graph. 863 // The block must not dominate any other block. Predecessors and successors 864 // are safely updated. 865 void DisconnectAndDelete(); 866 867 void AddInstruction(HInstruction* instruction); 868 // Insert `instruction` before/after an existing instruction `cursor`. 869 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 870 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 871 // Replace instruction `initial` with `replacement` within this block. 872 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 873 HInstruction* replacement); 874 void AddPhi(HPhi* phi); 875 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 876 // RemoveInstruction and RemovePhi delete a given instruction from the respective 877 // instruction list. With 'ensure_safety' set to true, it verifies that the 878 // instruction is not in use and removes it from the use lists of its inputs. 879 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 880 void RemovePhi(HPhi* phi, bool ensure_safety = true); 881 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 882 883 bool IsLoopHeader() const { 884 return IsInLoop() && (loop_information_->GetHeader() == this); 885 } 886 887 bool IsLoopPreHeaderFirstPredecessor() const { 888 DCHECK(IsLoopHeader()); 889 return GetPredecessor(0) == GetLoopInformation()->GetPreHeader(); 890 } 891 892 HLoopInformation* GetLoopInformation() const { 893 return loop_information_; 894 } 895 896 // Set the loop_information_ on this block. Overrides the current 897 // loop_information if it is an outer loop of the passed loop information. 898 // Note that this method is called while creating the loop information. 899 void SetInLoop(HLoopInformation* info) { 900 if (IsLoopHeader()) { 901 // Nothing to do. This just means `info` is an outer loop. 902 } else if (!IsInLoop()) { 903 loop_information_ = info; 904 } else if (loop_information_->Contains(*info->GetHeader())) { 905 // Block is currently part of an outer loop. Make it part of this inner loop. 906 // Note that a non loop header having a loop information means this loop information 907 // has already been populated 908 loop_information_ = info; 909 } else { 910 // Block is part of an inner loop. Do not update the loop information. 911 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 912 // at this point, because this method is being called while populating `info`. 913 } 914 } 915 916 // Raw update of the loop information. 917 void SetLoopInformation(HLoopInformation* info) { 918 loop_information_ = info; 919 } 920 921 bool IsInLoop() const { return loop_information_ != nullptr; } 922 923 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 924 925 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 926 try_catch_information_ = try_catch_information; 927 } 928 929 bool IsTryBlock() const { 930 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 931 } 932 933 bool IsCatchBlock() const { 934 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 935 } 936 937 // Returns the try entry that this block's successors should have. They will 938 // be in the same try, unless the block ends in a try boundary. In that case, 939 // the appropriate try entry will be returned. 940 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 941 942 bool HasThrowingInstructions() const; 943 944 // Returns whether this block dominates the blocked passed as parameter. 945 bool Dominates(HBasicBlock* block) const; 946 947 size_t GetLifetimeStart() const { return lifetime_start_; } 948 size_t GetLifetimeEnd() const { return lifetime_end_; } 949 950 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 951 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 952 953 bool EndsWithControlFlowInstruction() const; 954 bool EndsWithIf() const; 955 bool EndsWithTryBoundary() const; 956 bool HasSinglePhi() const; 957 958 private: 959 HGraph* graph_; 960 ArenaVector<HBasicBlock*> predecessors_; 961 ArenaVector<HBasicBlock*> successors_; 962 HInstructionList instructions_; 963 HInstructionList phis_; 964 HLoopInformation* loop_information_; 965 HBasicBlock* dominator_; 966 ArenaVector<HBasicBlock*> dominated_blocks_; 967 uint32_t block_id_; 968 // The dex program counter of the first instruction of this block. 969 const uint32_t dex_pc_; 970 size_t lifetime_start_; 971 size_t lifetime_end_; 972 TryCatchInformation* try_catch_information_; 973 974 friend class HGraph; 975 friend class HInstruction; 976 977 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 978}; 979 980// Iterates over the LoopInformation of all loops which contain 'block' 981// from the innermost to the outermost. 982class HLoopInformationOutwardIterator : public ValueObject { 983 public: 984 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 985 : current_(block.GetLoopInformation()) {} 986 987 bool Done() const { return current_ == nullptr; } 988 989 void Advance() { 990 DCHECK(!Done()); 991 current_ = current_->GetPreHeader()->GetLoopInformation(); 992 } 993 994 HLoopInformation* Current() const { 995 DCHECK(!Done()); 996 return current_; 997 } 998 999 private: 1000 HLoopInformation* current_; 1001 1002 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1003}; 1004 1005#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1006 M(Add, BinaryOperation) \ 1007 M(And, BinaryOperation) \ 1008 M(ArrayGet, Instruction) \ 1009 M(ArrayLength, Instruction) \ 1010 M(ArraySet, Instruction) \ 1011 M(BooleanNot, UnaryOperation) \ 1012 M(BoundsCheck, Instruction) \ 1013 M(BoundType, Instruction) \ 1014 M(CheckCast, Instruction) \ 1015 M(ClearException, Instruction) \ 1016 M(ClinitCheck, Instruction) \ 1017 M(Compare, BinaryOperation) \ 1018 M(Condition, BinaryOperation) \ 1019 M(CurrentMethod, Instruction) \ 1020 M(Deoptimize, Instruction) \ 1021 M(Div, BinaryOperation) \ 1022 M(DivZeroCheck, Instruction) \ 1023 M(DoubleConstant, Constant) \ 1024 M(Equal, Condition) \ 1025 M(Exit, Instruction) \ 1026 M(FakeString, Instruction) \ 1027 M(FloatConstant, Constant) \ 1028 M(Goto, Instruction) \ 1029 M(GreaterThan, Condition) \ 1030 M(GreaterThanOrEqual, Condition) \ 1031 M(If, Instruction) \ 1032 M(InstanceFieldGet, Instruction) \ 1033 M(InstanceFieldSet, Instruction) \ 1034 M(InstanceOf, Instruction) \ 1035 M(IntConstant, Constant) \ 1036 M(InvokeUnresolved, Invoke) \ 1037 M(InvokeInterface, Invoke) \ 1038 M(InvokeStaticOrDirect, Invoke) \ 1039 M(InvokeVirtual, Invoke) \ 1040 M(LessThan, Condition) \ 1041 M(LessThanOrEqual, Condition) \ 1042 M(LoadClass, Instruction) \ 1043 M(LoadException, Instruction) \ 1044 M(LoadLocal, Instruction) \ 1045 M(LoadString, Instruction) \ 1046 M(Local, Instruction) \ 1047 M(LongConstant, Constant) \ 1048 M(MemoryBarrier, Instruction) \ 1049 M(MonitorOperation, Instruction) \ 1050 M(Mul, BinaryOperation) \ 1051 M(Neg, UnaryOperation) \ 1052 M(NewArray, Instruction) \ 1053 M(NewInstance, Instruction) \ 1054 M(Not, UnaryOperation) \ 1055 M(NotEqual, Condition) \ 1056 M(NullConstant, Instruction) \ 1057 M(NullCheck, Instruction) \ 1058 M(Or, BinaryOperation) \ 1059 M(PackedSwitch, Instruction) \ 1060 M(ParallelMove, Instruction) \ 1061 M(ParameterValue, Instruction) \ 1062 M(Phi, Instruction) \ 1063 M(Rem, BinaryOperation) \ 1064 M(Return, Instruction) \ 1065 M(ReturnVoid, Instruction) \ 1066 M(Shl, BinaryOperation) \ 1067 M(Shr, BinaryOperation) \ 1068 M(StaticFieldGet, Instruction) \ 1069 M(StaticFieldSet, Instruction) \ 1070 M(UnresolvedInstanceFieldGet, Instruction) \ 1071 M(UnresolvedInstanceFieldSet, Instruction) \ 1072 M(UnresolvedStaticFieldGet, Instruction) \ 1073 M(UnresolvedStaticFieldSet, Instruction) \ 1074 M(StoreLocal, Instruction) \ 1075 M(Sub, BinaryOperation) \ 1076 M(SuspendCheck, Instruction) \ 1077 M(Temporary, Instruction) \ 1078 M(Throw, Instruction) \ 1079 M(TryBoundary, Instruction) \ 1080 M(TypeConversion, Instruction) \ 1081 M(UShr, BinaryOperation) \ 1082 M(Xor, BinaryOperation) \ 1083 1084#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1085 1086#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1087 1088#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1089 1090#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1091 M(X86ComputeBaseMethodAddress, Instruction) \ 1092 M(X86LoadFromConstantTable, Instruction) 1093 1094#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1095 1096#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1097 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1098 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1099 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1100 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1101 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1102 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1103 1104#define FOR_EACH_INSTRUCTION(M) \ 1105 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1106 M(Constant, Instruction) \ 1107 M(UnaryOperation, Instruction) \ 1108 M(BinaryOperation, Instruction) \ 1109 M(Invoke, Instruction) 1110 1111#define FORWARD_DECLARATION(type, super) class H##type; 1112FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1113#undef FORWARD_DECLARATION 1114 1115#define DECLARE_INSTRUCTION(type) \ 1116 InstructionKind GetKind() const OVERRIDE { return k##type; } \ 1117 const char* DebugName() const OVERRIDE { return #type; } \ 1118 const H##type* As##type() const OVERRIDE { return this; } \ 1119 H##type* As##type() OVERRIDE { return this; } \ 1120 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1121 return other->Is##type(); \ 1122 } \ 1123 void Accept(HGraphVisitor* visitor) OVERRIDE 1124 1125template <typename T> class HUseList; 1126 1127template <typename T> 1128class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1129 public: 1130 HUseListNode* GetPrevious() const { return prev_; } 1131 HUseListNode* GetNext() const { return next_; } 1132 T GetUser() const { return user_; } 1133 size_t GetIndex() const { return index_; } 1134 void SetIndex(size_t index) { index_ = index; } 1135 1136 private: 1137 HUseListNode(T user, size_t index) 1138 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1139 1140 T const user_; 1141 size_t index_; 1142 HUseListNode<T>* prev_; 1143 HUseListNode<T>* next_; 1144 1145 friend class HUseList<T>; 1146 1147 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1148}; 1149 1150template <typename T> 1151class HUseList : public ValueObject { 1152 public: 1153 HUseList() : first_(nullptr) {} 1154 1155 void Clear() { 1156 first_ = nullptr; 1157 } 1158 1159 // Adds a new entry at the beginning of the use list and returns 1160 // the newly created node. 1161 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1162 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1163 if (IsEmpty()) { 1164 first_ = new_node; 1165 } else { 1166 first_->prev_ = new_node; 1167 new_node->next_ = first_; 1168 first_ = new_node; 1169 } 1170 return new_node; 1171 } 1172 1173 HUseListNode<T>* GetFirst() const { 1174 return first_; 1175 } 1176 1177 void Remove(HUseListNode<T>* node) { 1178 DCHECK(node != nullptr); 1179 DCHECK(Contains(node)); 1180 1181 if (node->prev_ != nullptr) { 1182 node->prev_->next_ = node->next_; 1183 } 1184 if (node->next_ != nullptr) { 1185 node->next_->prev_ = node->prev_; 1186 } 1187 if (node == first_) { 1188 first_ = node->next_; 1189 } 1190 } 1191 1192 bool Contains(const HUseListNode<T>* node) const { 1193 if (node == nullptr) { 1194 return false; 1195 } 1196 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1197 if (current == node) { 1198 return true; 1199 } 1200 } 1201 return false; 1202 } 1203 1204 bool IsEmpty() const { 1205 return first_ == nullptr; 1206 } 1207 1208 bool HasOnlyOneUse() const { 1209 return first_ != nullptr && first_->next_ == nullptr; 1210 } 1211 1212 size_t SizeSlow() const { 1213 size_t count = 0; 1214 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1215 ++count; 1216 } 1217 return count; 1218 } 1219 1220 private: 1221 HUseListNode<T>* first_; 1222}; 1223 1224template<typename T> 1225class HUseIterator : public ValueObject { 1226 public: 1227 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1228 1229 bool Done() const { return current_ == nullptr; } 1230 1231 void Advance() { 1232 DCHECK(!Done()); 1233 current_ = current_->GetNext(); 1234 } 1235 1236 HUseListNode<T>* Current() const { 1237 DCHECK(!Done()); 1238 return current_; 1239 } 1240 1241 private: 1242 HUseListNode<T>* current_; 1243 1244 friend class HValue; 1245}; 1246 1247// This class is used by HEnvironment and HInstruction classes to record the 1248// instructions they use and pointers to the corresponding HUseListNodes kept 1249// by the used instructions. 1250template <typename T> 1251class HUserRecord : public ValueObject { 1252 public: 1253 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1254 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1255 1256 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1257 : instruction_(old_record.instruction_), use_node_(use_node) { 1258 DCHECK(instruction_ != nullptr); 1259 DCHECK(use_node_ != nullptr); 1260 DCHECK(old_record.use_node_ == nullptr); 1261 } 1262 1263 HInstruction* GetInstruction() const { return instruction_; } 1264 HUseListNode<T>* GetUseNode() const { return use_node_; } 1265 1266 private: 1267 // Instruction used by the user. 1268 HInstruction* instruction_; 1269 1270 // Corresponding entry in the use list kept by 'instruction_'. 1271 HUseListNode<T>* use_node_; 1272}; 1273 1274/** 1275 * Side-effects representation. 1276 * 1277 * For write/read dependences on fields/arrays, the dependence analysis uses 1278 * type disambiguation (e.g. a float field write cannot modify the value of an 1279 * integer field read) and the access type (e.g. a reference array write cannot 1280 * modify the value of a reference field read [although it may modify the 1281 * reference fetch prior to reading the field, which is represented by its own 1282 * write/read dependence]). The analysis makes conservative points-to 1283 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1284 * the same, and any reference read depends on any reference read without 1285 * further regard of its type). 1286 * 1287 * The internal representation uses 38-bit and is described in the table below. 1288 * The first line indicates the side effect, and for field/array accesses the 1289 * second line indicates the type of the access (in the order of the 1290 * Primitive::Type enum). 1291 * The two numbered lines below indicate the bit position in the bitfield (read 1292 * vertically). 1293 * 1294 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1295 * +-------------+---------+---------+--------------+---------+---------+ 1296 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1297 * | 3 |333333322|222222221| 1 |111111110|000000000| 1298 * | 7 |654321098|765432109| 8 |765432109|876543210| 1299 * 1300 * Note that, to ease the implementation, 'changes' bits are least significant 1301 * bits, while 'dependency' bits are most significant bits. 1302 */ 1303class SideEffects : public ValueObject { 1304 public: 1305 SideEffects() : flags_(0) {} 1306 1307 static SideEffects None() { 1308 return SideEffects(0); 1309 } 1310 1311 static SideEffects All() { 1312 return SideEffects(kAllChangeBits | kAllDependOnBits); 1313 } 1314 1315 static SideEffects AllChanges() { 1316 return SideEffects(kAllChangeBits); 1317 } 1318 1319 static SideEffects AllDependencies() { 1320 return SideEffects(kAllDependOnBits); 1321 } 1322 1323 static SideEffects AllExceptGCDependency() { 1324 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1325 } 1326 1327 static SideEffects AllWritesAndReads() { 1328 return SideEffects(kAllWrites | kAllReads); 1329 } 1330 1331 static SideEffects AllWrites() { 1332 return SideEffects(kAllWrites); 1333 } 1334 1335 static SideEffects AllReads() { 1336 return SideEffects(kAllReads); 1337 } 1338 1339 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1340 return is_volatile 1341 ? AllWritesAndReads() 1342 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1343 } 1344 1345 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1346 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1347 } 1348 1349 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1350 return is_volatile 1351 ? AllWritesAndReads() 1352 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1353 } 1354 1355 static SideEffects ArrayReadOfType(Primitive::Type type) { 1356 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1357 } 1358 1359 static SideEffects CanTriggerGC() { 1360 return SideEffects(1ULL << kCanTriggerGCBit); 1361 } 1362 1363 static SideEffects DependsOnGC() { 1364 return SideEffects(1ULL << kDependsOnGCBit); 1365 } 1366 1367 // Combines the side-effects of this and the other. 1368 SideEffects Union(SideEffects other) const { 1369 return SideEffects(flags_ | other.flags_); 1370 } 1371 1372 SideEffects Exclusion(SideEffects other) const { 1373 return SideEffects(flags_ & ~other.flags_); 1374 } 1375 1376 bool Includes(SideEffects other) const { 1377 return (other.flags_ & flags_) == other.flags_; 1378 } 1379 1380 bool HasSideEffects() const { 1381 return (flags_ & kAllChangeBits); 1382 } 1383 1384 bool HasDependencies() const { 1385 return (flags_ & kAllDependOnBits); 1386 } 1387 1388 // Returns true if there are no side effects or dependencies. 1389 bool DoesNothing() const { 1390 return flags_ == 0; 1391 } 1392 1393 // Returns true if something is written. 1394 bool DoesAnyWrite() const { 1395 return (flags_ & kAllWrites); 1396 } 1397 1398 // Returns true if something is read. 1399 bool DoesAnyRead() const { 1400 return (flags_ & kAllReads); 1401 } 1402 1403 // Returns true if potentially everything is written and read 1404 // (every type and every kind of access). 1405 bool DoesAllReadWrite() const { 1406 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1407 } 1408 1409 bool DoesAll() const { 1410 return flags_ == (kAllChangeBits | kAllDependOnBits); 1411 } 1412 1413 // Returns true if this may read something written by other. 1414 bool MayDependOn(SideEffects other) const { 1415 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1416 return (other.flags_ & depends_on_flags); 1417 } 1418 1419 // Returns string representation of flags (for debugging only). 1420 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1421 std::string ToString() const { 1422 std::string flags = "|"; 1423 for (int s = kLastBit; s >= 0; s--) { 1424 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1425 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1426 // This is a bit for the GC side effect. 1427 if (current_bit_is_set) { 1428 flags += "GC"; 1429 } 1430 flags += "|"; 1431 } else { 1432 // This is a bit for the array/field analysis. 1433 // The underscore character stands for the 'can trigger GC' bit. 1434 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1435 if (current_bit_is_set) { 1436 flags += kDebug[s]; 1437 } 1438 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1439 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1440 flags += "|"; 1441 } 1442 } 1443 } 1444 return flags; 1445 } 1446 1447 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1448 1449 private: 1450 static constexpr int kFieldArrayAnalysisBits = 9; 1451 1452 static constexpr int kFieldWriteOffset = 0; 1453 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1454 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1455 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1456 1457 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1458 1459 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1460 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1461 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1462 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1463 1464 static constexpr int kLastBit = kDependsOnGCBit; 1465 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1466 1467 // Aliases. 1468 1469 static_assert(kChangeBits == kDependOnBits, 1470 "the 'change' bits should match the 'depend on' bits."); 1471 1472 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1473 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1474 static constexpr uint64_t kAllWrites = 1475 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1476 static constexpr uint64_t kAllReads = 1477 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1478 1479 // Work around the fact that HIR aliases I/F and J/D. 1480 // TODO: remove this interceptor once HIR types are clean 1481 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1482 switch (type) { 1483 case Primitive::kPrimInt: 1484 case Primitive::kPrimFloat: 1485 return TypeFlag(Primitive::kPrimInt, offset) | 1486 TypeFlag(Primitive::kPrimFloat, offset); 1487 case Primitive::kPrimLong: 1488 case Primitive::kPrimDouble: 1489 return TypeFlag(Primitive::kPrimLong, offset) | 1490 TypeFlag(Primitive::kPrimDouble, offset); 1491 default: 1492 return TypeFlag(type, offset); 1493 } 1494 } 1495 1496 // Translates type to bit flag. 1497 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1498 CHECK_NE(type, Primitive::kPrimVoid); 1499 const uint64_t one = 1; 1500 const int shift = type; // 0-based consecutive enum 1501 DCHECK_LE(kFieldWriteOffset, shift); 1502 DCHECK_LT(shift, kArrayWriteOffset); 1503 return one << (type + offset); 1504 } 1505 1506 // Private constructor on direct flags value. 1507 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1508 1509 uint64_t flags_; 1510}; 1511 1512// A HEnvironment object contains the values of virtual registers at a given location. 1513class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1514 public: 1515 HEnvironment(ArenaAllocator* arena, 1516 size_t number_of_vregs, 1517 const DexFile& dex_file, 1518 uint32_t method_idx, 1519 uint32_t dex_pc, 1520 InvokeType invoke_type, 1521 HInstruction* holder) 1522 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1523 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1524 parent_(nullptr), 1525 dex_file_(dex_file), 1526 method_idx_(method_idx), 1527 dex_pc_(dex_pc), 1528 invoke_type_(invoke_type), 1529 holder_(holder) { 1530 } 1531 1532 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1533 : HEnvironment(arena, 1534 to_copy.Size(), 1535 to_copy.GetDexFile(), 1536 to_copy.GetMethodIdx(), 1537 to_copy.GetDexPc(), 1538 to_copy.GetInvokeType(), 1539 holder) {} 1540 1541 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1542 if (parent_ != nullptr) { 1543 parent_->SetAndCopyParentChain(allocator, parent); 1544 } else { 1545 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1546 parent_->CopyFrom(parent); 1547 if (parent->GetParent() != nullptr) { 1548 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1549 } 1550 } 1551 } 1552 1553 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1554 void CopyFrom(HEnvironment* environment); 1555 1556 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1557 // input to the loop phi instead. This is for inserting instructions that 1558 // require an environment (like HDeoptimization) in the loop pre-header. 1559 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1560 1561 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1562 DCHECK_LT(index, Size()); 1563 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1564 } 1565 1566 HInstruction* GetInstructionAt(size_t index) const { 1567 DCHECK_LT(index, Size()); 1568 return vregs_[index].GetInstruction(); 1569 } 1570 1571 void RemoveAsUserOfInput(size_t index) const; 1572 1573 size_t Size() const { return vregs_.size(); } 1574 1575 HEnvironment* GetParent() const { return parent_; } 1576 1577 void SetLocationAt(size_t index, Location location) { 1578 DCHECK_LT(index, Size()); 1579 locations_[index] = location; 1580 } 1581 1582 Location GetLocationAt(size_t index) const { 1583 DCHECK_LT(index, Size()); 1584 return locations_[index]; 1585 } 1586 1587 uint32_t GetDexPc() const { 1588 return dex_pc_; 1589 } 1590 1591 uint32_t GetMethodIdx() const { 1592 return method_idx_; 1593 } 1594 1595 InvokeType GetInvokeType() const { 1596 return invoke_type_; 1597 } 1598 1599 const DexFile& GetDexFile() const { 1600 return dex_file_; 1601 } 1602 1603 HInstruction* GetHolder() const { 1604 return holder_; 1605 } 1606 1607 private: 1608 // Record instructions' use entries of this environment for constant-time removal. 1609 // It should only be called by HInstruction when a new environment use is added. 1610 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1611 DCHECK(env_use->GetUser() == this); 1612 size_t index = env_use->GetIndex(); 1613 DCHECK_LT(index, Size()); 1614 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1615 } 1616 1617 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1618 ArenaVector<Location> locations_; 1619 HEnvironment* parent_; 1620 const DexFile& dex_file_; 1621 const uint32_t method_idx_; 1622 const uint32_t dex_pc_; 1623 const InvokeType invoke_type_; 1624 1625 // The instruction that holds this environment. 1626 HInstruction* const holder_; 1627 1628 friend class HInstruction; 1629 1630 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1631}; 1632 1633class ReferenceTypeInfo : ValueObject { 1634 public: 1635 typedef Handle<mirror::Class> TypeHandle; 1636 1637 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 1638 // The constructor will check that the type_handle is valid. 1639 return ReferenceTypeInfo(type_handle, is_exact); 1640 } 1641 1642 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 1643 1644 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 1645 return handle.GetReference() != nullptr; 1646 } 1647 1648 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 1649 return IsValidHandle(type_handle_); 1650 } 1651 1652 bool IsExact() const { return is_exact_; } 1653 1654 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1655 DCHECK(IsValid()); 1656 return GetTypeHandle()->IsObjectClass(); 1657 } 1658 1659 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 1660 DCHECK(IsValid()); 1661 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 1662 } 1663 1664 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 1665 DCHECK(IsValid()); 1666 return GetTypeHandle()->IsInterface(); 1667 } 1668 1669 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 1670 return GetTypeHandle()->IsArrayClass(); 1671 } 1672 1673 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1674 if (!IsExact()) return false; 1675 if (!IsArrayClass()) return false; 1676 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1677 } 1678 1679 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 1680 1681 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 1682 DCHECK(IsValid()); 1683 DCHECK(rti.IsValid()); 1684 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 1685 } 1686 1687 // Returns true if the type information provide the same amount of details. 1688 // Note that it does not mean that the instructions have the same actual type 1689 // (because the type can be the result of a merge). 1690 bool IsEqual(ReferenceTypeInfo rti) SHARED_REQUIRES(Locks::mutator_lock_) { 1691 if (!IsValid() && !rti.IsValid()) { 1692 // Invalid types are equal. 1693 return true; 1694 } 1695 if (!IsValid() || !rti.IsValid()) { 1696 // One is valid, the other not. 1697 return false; 1698 } 1699 return IsExact() == rti.IsExact() 1700 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 1701 } 1702 1703 private: 1704 ReferenceTypeInfo(); 1705 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 1706 1707 // The class of the object. 1708 TypeHandle type_handle_; 1709 // Whether or not the type is exact or a superclass of the actual type. 1710 // Whether or not we have any information about this type. 1711 bool is_exact_; 1712}; 1713 1714std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 1715 1716class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1717 public: 1718 HInstruction(SideEffects side_effects, uint32_t dex_pc = kNoDexPc) 1719 : previous_(nullptr), 1720 next_(nullptr), 1721 block_(nullptr), 1722 dex_pc_(dex_pc), 1723 id_(-1), 1724 ssa_index_(-1), 1725 environment_(nullptr), 1726 locations_(nullptr), 1727 live_interval_(nullptr), 1728 lifetime_position_(kNoLifetime), 1729 side_effects_(side_effects), 1730 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1731 1732 virtual ~HInstruction() {} 1733 1734#define DECLARE_KIND(type, super) k##type, 1735 enum InstructionKind { 1736 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1737 }; 1738#undef DECLARE_KIND 1739 1740 HInstruction* GetNext() const { return next_; } 1741 HInstruction* GetPrevious() const { return previous_; } 1742 1743 HInstruction* GetNextDisregardingMoves() const; 1744 HInstruction* GetPreviousDisregardingMoves() const; 1745 1746 HBasicBlock* GetBlock() const { return block_; } 1747 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1748 void SetBlock(HBasicBlock* block) { block_ = block; } 1749 bool IsInBlock() const { return block_ != nullptr; } 1750 bool IsInLoop() const { return block_->IsInLoop(); } 1751 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1752 1753 virtual size_t InputCount() const = 0; 1754 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1755 1756 virtual void Accept(HGraphVisitor* visitor) = 0; 1757 virtual const char* DebugName() const = 0; 1758 1759 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1760 void SetRawInputAt(size_t index, HInstruction* input) { 1761 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1762 } 1763 1764 virtual bool NeedsEnvironment() const { return false; } 1765 1766 uint32_t GetDexPc() const { return dex_pc_; } 1767 1768 virtual bool IsControlFlow() const { return false; } 1769 1770 virtual bool CanThrow() const { return false; } 1771 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1772 1773 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1774 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1775 1776 // Does not apply for all instructions, but having this at top level greatly 1777 // simplifies the null check elimination. 1778 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1779 virtual bool CanBeNull() const { 1780 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1781 return true; 1782 } 1783 1784 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj) const { 1785 UNUSED(obj); 1786 return false; 1787 } 1788 1789 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1790 1791 ReferenceTypeInfo GetReferenceTypeInfo() const { 1792 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1793 return reference_type_info_; 1794 } 1795 1796 void AddUseAt(HInstruction* user, size_t index) { 1797 DCHECK(user != nullptr); 1798 HUseListNode<HInstruction*>* use = 1799 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1800 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1801 } 1802 1803 void AddEnvUseAt(HEnvironment* user, size_t index) { 1804 DCHECK(user != nullptr); 1805 HUseListNode<HEnvironment*>* env_use = 1806 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1807 user->RecordEnvUse(env_use); 1808 } 1809 1810 void RemoveAsUserOfInput(size_t input) { 1811 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1812 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1813 } 1814 1815 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1816 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1817 1818 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1819 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1820 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1821 bool HasOnlyOneNonEnvironmentUse() const { 1822 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1823 } 1824 1825 // Does this instruction strictly dominate `other_instruction`? 1826 // Returns false if this instruction and `other_instruction` are the same. 1827 // Aborts if this instruction and `other_instruction` are both phis. 1828 bool StrictlyDominates(HInstruction* other_instruction) const; 1829 1830 int GetId() const { return id_; } 1831 void SetId(int id) { id_ = id; } 1832 1833 int GetSsaIndex() const { return ssa_index_; } 1834 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1835 bool HasSsaIndex() const { return ssa_index_ != -1; } 1836 1837 bool HasEnvironment() const { return environment_ != nullptr; } 1838 HEnvironment* GetEnvironment() const { return environment_; } 1839 // Set the `environment_` field. Raw because this method does not 1840 // update the uses lists. 1841 void SetRawEnvironment(HEnvironment* environment) { 1842 DCHECK(environment_ == nullptr); 1843 DCHECK_EQ(environment->GetHolder(), this); 1844 environment_ = environment; 1845 } 1846 1847 // Set the environment of this instruction, copying it from `environment`. While 1848 // copying, the uses lists are being updated. 1849 void CopyEnvironmentFrom(HEnvironment* environment) { 1850 DCHECK(environment_ == nullptr); 1851 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1852 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1853 environment_->CopyFrom(environment); 1854 if (environment->GetParent() != nullptr) { 1855 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1856 } 1857 } 1858 1859 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1860 HBasicBlock* block) { 1861 DCHECK(environment_ == nullptr); 1862 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1863 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1864 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1865 if (environment->GetParent() != nullptr) { 1866 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1867 } 1868 } 1869 1870 // Returns the number of entries in the environment. Typically, that is the 1871 // number of dex registers in a method. It could be more in case of inlining. 1872 size_t EnvironmentSize() const; 1873 1874 LocationSummary* GetLocations() const { return locations_; } 1875 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1876 1877 void ReplaceWith(HInstruction* instruction); 1878 void ReplaceInput(HInstruction* replacement, size_t index); 1879 1880 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1881 // uses of this instruction by `other` are *not* updated. 1882 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1883 ReplaceWith(other); 1884 other->ReplaceInput(this, use_index); 1885 } 1886 1887 // Move `this` instruction before `cursor`. 1888 void MoveBefore(HInstruction* cursor); 1889 1890#define INSTRUCTION_TYPE_CHECK(type, super) \ 1891 bool Is##type() const { return (As##type() != nullptr); } \ 1892 virtual const H##type* As##type() const { return nullptr; } \ 1893 virtual H##type* As##type() { return nullptr; } 1894 1895 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1896#undef INSTRUCTION_TYPE_CHECK 1897 1898 // Returns whether the instruction can be moved within the graph. 1899 virtual bool CanBeMoved() const { return false; } 1900 1901 // Returns whether the two instructions are of the same kind. 1902 virtual bool InstructionTypeEquals(HInstruction* other) const { 1903 UNUSED(other); 1904 return false; 1905 } 1906 1907 // Returns whether any data encoded in the two instructions is equal. 1908 // This method does not look at the inputs. Both instructions must be 1909 // of the same type, otherwise the method has undefined behavior. 1910 virtual bool InstructionDataEquals(HInstruction* other) const { 1911 UNUSED(other); 1912 return false; 1913 } 1914 1915 // Returns whether two instructions are equal, that is: 1916 // 1) They have the same type and contain the same data (InstructionDataEquals). 1917 // 2) Their inputs are identical. 1918 bool Equals(HInstruction* other) const; 1919 1920 virtual InstructionKind GetKind() const = 0; 1921 1922 virtual size_t ComputeHashCode() const { 1923 size_t result = GetKind(); 1924 for (size_t i = 0, e = InputCount(); i < e; ++i) { 1925 result = (result * 31) + InputAt(i)->GetId(); 1926 } 1927 return result; 1928 } 1929 1930 SideEffects GetSideEffects() const { return side_effects_; } 1931 1932 size_t GetLifetimePosition() const { return lifetime_position_; } 1933 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 1934 LiveInterval* GetLiveInterval() const { return live_interval_; } 1935 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 1936 bool HasLiveInterval() const { return live_interval_ != nullptr; } 1937 1938 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 1939 1940 // Returns whether the code generation of the instruction will require to have access 1941 // to the current method. Such instructions are: 1942 // (1): Instructions that require an environment, as calling the runtime requires 1943 // to walk the stack and have the current method stored at a specific stack address. 1944 // (2): Object literals like classes and strings, that are loaded from the dex cache 1945 // fields of the current method. 1946 bool NeedsCurrentMethod() const { 1947 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 1948 } 1949 1950 virtual bool NeedsDexCache() const { return false; } 1951 1952 // Does this instruction have any use in an environment before 1953 // control flow hits 'other'? 1954 bool HasAnyEnvironmentUseBefore(HInstruction* other); 1955 1956 // Remove all references to environment uses of this instruction. 1957 // The caller must ensure that this is safe to do. 1958 void RemoveEnvironmentUsers(); 1959 1960 protected: 1961 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 1962 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 1963 1964 private: 1965 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 1966 1967 HInstruction* previous_; 1968 HInstruction* next_; 1969 HBasicBlock* block_; 1970 const uint32_t dex_pc_; 1971 1972 // An instruction gets an id when it is added to the graph. 1973 // It reflects creation order. A negative id means the instruction 1974 // has not been added to the graph. 1975 int id_; 1976 1977 // When doing liveness analysis, instructions that have uses get an SSA index. 1978 int ssa_index_; 1979 1980 // List of instructions that have this instruction as input. 1981 HUseList<HInstruction*> uses_; 1982 1983 // List of environments that contain this instruction. 1984 HUseList<HEnvironment*> env_uses_; 1985 1986 // The environment associated with this instruction. Not null if the instruction 1987 // might jump out of the method. 1988 HEnvironment* environment_; 1989 1990 // Set by the code generator. 1991 LocationSummary* locations_; 1992 1993 // Set by the liveness analysis. 1994 LiveInterval* live_interval_; 1995 1996 // Set by the liveness analysis, this is the position in a linear 1997 // order of blocks where this instruction's live interval start. 1998 size_t lifetime_position_; 1999 2000 const SideEffects side_effects_; 2001 2002 // TODO: for primitive types this should be marked as invalid. 2003 ReferenceTypeInfo reference_type_info_; 2004 2005 friend class GraphChecker; 2006 friend class HBasicBlock; 2007 friend class HEnvironment; 2008 friend class HGraph; 2009 friend class HInstructionList; 2010 2011 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2012}; 2013std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2014 2015class HInputIterator : public ValueObject { 2016 public: 2017 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2018 2019 bool Done() const { return index_ == instruction_->InputCount(); } 2020 HInstruction* Current() const { return instruction_->InputAt(index_); } 2021 void Advance() { index_++; } 2022 2023 private: 2024 HInstruction* instruction_; 2025 size_t index_; 2026 2027 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2028}; 2029 2030class HInstructionIterator : public ValueObject { 2031 public: 2032 explicit HInstructionIterator(const HInstructionList& instructions) 2033 : instruction_(instructions.first_instruction_) { 2034 next_ = Done() ? nullptr : instruction_->GetNext(); 2035 } 2036 2037 bool Done() const { return instruction_ == nullptr; } 2038 HInstruction* Current() const { return instruction_; } 2039 void Advance() { 2040 instruction_ = next_; 2041 next_ = Done() ? nullptr : instruction_->GetNext(); 2042 } 2043 2044 private: 2045 HInstruction* instruction_; 2046 HInstruction* next_; 2047 2048 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2049}; 2050 2051class HBackwardInstructionIterator : public ValueObject { 2052 public: 2053 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2054 : instruction_(instructions.last_instruction_) { 2055 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2056 } 2057 2058 bool Done() const { return instruction_ == nullptr; } 2059 HInstruction* Current() const { return instruction_; } 2060 void Advance() { 2061 instruction_ = next_; 2062 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2063 } 2064 2065 private: 2066 HInstruction* instruction_; 2067 HInstruction* next_; 2068 2069 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2070}; 2071 2072template<size_t N> 2073class HTemplateInstruction: public HInstruction { 2074 public: 2075 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc = kNoDexPc) 2076 : HInstruction(side_effects, dex_pc), inputs_() {} 2077 virtual ~HTemplateInstruction() {} 2078 2079 size_t InputCount() const OVERRIDE { return N; } 2080 2081 protected: 2082 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2083 DCHECK_LT(i, N); 2084 return inputs_[i]; 2085 } 2086 2087 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2088 DCHECK_LT(i, N); 2089 inputs_[i] = input; 2090 } 2091 2092 private: 2093 std::array<HUserRecord<HInstruction*>, N> inputs_; 2094 2095 friend class SsaBuilder; 2096}; 2097 2098// HTemplateInstruction specialization for N=0. 2099template<> 2100class HTemplateInstruction<0>: public HInstruction { 2101 public: 2102 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc = kNoDexPc) 2103 : HInstruction(side_effects, dex_pc) {} 2104 2105 virtual ~HTemplateInstruction() {} 2106 2107 size_t InputCount() const OVERRIDE { return 0; } 2108 2109 protected: 2110 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2111 LOG(FATAL) << "Unreachable"; 2112 UNREACHABLE(); 2113 } 2114 2115 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2116 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2117 LOG(FATAL) << "Unreachable"; 2118 UNREACHABLE(); 2119 } 2120 2121 private: 2122 friend class SsaBuilder; 2123}; 2124 2125template<intptr_t N> 2126class HExpression : public HTemplateInstruction<N> { 2127 public: 2128 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc = kNoDexPc) 2129 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2130 virtual ~HExpression() {} 2131 2132 Primitive::Type GetType() const OVERRIDE { return type_; } 2133 2134 protected: 2135 Primitive::Type type_; 2136}; 2137 2138// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2139// instruction that branches to the exit block. 2140class HReturnVoid : public HTemplateInstruction<0> { 2141 public: 2142 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2143 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2144 2145 bool IsControlFlow() const OVERRIDE { return true; } 2146 2147 DECLARE_INSTRUCTION(ReturnVoid); 2148 2149 private: 2150 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2151}; 2152 2153// Represents dex's RETURN opcodes. A HReturn is a control flow 2154// instruction that branches to the exit block. 2155class HReturn : public HTemplateInstruction<1> { 2156 public: 2157 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2158 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2159 SetRawInputAt(0, value); 2160 } 2161 2162 bool IsControlFlow() const OVERRIDE { return true; } 2163 2164 DECLARE_INSTRUCTION(Return); 2165 2166 private: 2167 DISALLOW_COPY_AND_ASSIGN(HReturn); 2168}; 2169 2170// The exit instruction is the only instruction of the exit block. 2171// Instructions aborting the method (HThrow and HReturn) must branch to the 2172// exit block. 2173class HExit : public HTemplateInstruction<0> { 2174 public: 2175 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2176 2177 bool IsControlFlow() const OVERRIDE { return true; } 2178 2179 DECLARE_INSTRUCTION(Exit); 2180 2181 private: 2182 DISALLOW_COPY_AND_ASSIGN(HExit); 2183}; 2184 2185// Jumps from one block to another. 2186class HGoto : public HTemplateInstruction<0> { 2187 public: 2188 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2189 2190 bool IsControlFlow() const OVERRIDE { return true; } 2191 2192 HBasicBlock* GetSuccessor() const { 2193 return GetBlock()->GetSingleSuccessor(); 2194 } 2195 2196 DECLARE_INSTRUCTION(Goto); 2197 2198 private: 2199 DISALLOW_COPY_AND_ASSIGN(HGoto); 2200}; 2201 2202class HConstant : public HExpression<0> { 2203 public: 2204 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2205 : HExpression(type, SideEffects::None(), dex_pc) {} 2206 2207 bool CanBeMoved() const OVERRIDE { return true; } 2208 2209 virtual bool IsMinusOne() const { return false; } 2210 virtual bool IsZero() const { return false; } 2211 virtual bool IsOne() const { return false; } 2212 2213 virtual uint64_t GetValueAsUint64() const = 0; 2214 2215 DECLARE_INSTRUCTION(Constant); 2216 2217 private: 2218 DISALLOW_COPY_AND_ASSIGN(HConstant); 2219}; 2220 2221class HNullConstant : public HConstant { 2222 public: 2223 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2224 return true; 2225 } 2226 2227 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2228 2229 size_t ComputeHashCode() const OVERRIDE { return 0; } 2230 2231 DECLARE_INSTRUCTION(NullConstant); 2232 2233 private: 2234 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2235 2236 friend class HGraph; 2237 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2238}; 2239 2240// Constants of the type int. Those can be from Dex instructions, or 2241// synthesized (for example with the if-eqz instruction). 2242class HIntConstant : public HConstant { 2243 public: 2244 int32_t GetValue() const { return value_; } 2245 2246 uint64_t GetValueAsUint64() const OVERRIDE { 2247 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2248 } 2249 2250 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2251 DCHECK(other->IsIntConstant()); 2252 return other->AsIntConstant()->value_ == value_; 2253 } 2254 2255 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2256 2257 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2258 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2259 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2260 2261 DECLARE_INSTRUCTION(IntConstant); 2262 2263 private: 2264 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2265 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2266 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2267 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2268 2269 const int32_t value_; 2270 2271 friend class HGraph; 2272 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2273 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2274 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2275}; 2276 2277class HLongConstant : public HConstant { 2278 public: 2279 int64_t GetValue() const { return value_; } 2280 2281 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2282 2283 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2284 DCHECK(other->IsLongConstant()); 2285 return other->AsLongConstant()->value_ == value_; 2286 } 2287 2288 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2289 2290 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2291 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2292 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2293 2294 DECLARE_INSTRUCTION(LongConstant); 2295 2296 private: 2297 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2298 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2299 2300 const int64_t value_; 2301 2302 friend class HGraph; 2303 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2304}; 2305 2306// Conditional branch. A block ending with an HIf instruction must have 2307// two successors. 2308class HIf : public HTemplateInstruction<1> { 2309 public: 2310 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2311 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2312 SetRawInputAt(0, input); 2313 } 2314 2315 bool IsControlFlow() const OVERRIDE { return true; } 2316 2317 HBasicBlock* IfTrueSuccessor() const { 2318 return GetBlock()->GetSuccessor(0); 2319 } 2320 2321 HBasicBlock* IfFalseSuccessor() const { 2322 return GetBlock()->GetSuccessor(1); 2323 } 2324 2325 DECLARE_INSTRUCTION(If); 2326 2327 private: 2328 DISALLOW_COPY_AND_ASSIGN(HIf); 2329}; 2330 2331 2332// Abstract instruction which marks the beginning and/or end of a try block and 2333// links it to the respective exception handlers. Behaves the same as a Goto in 2334// non-exceptional control flow. 2335// Normal-flow successor is stored at index zero, exception handlers under 2336// higher indices in no particular order. 2337class HTryBoundary : public HTemplateInstruction<0> { 2338 public: 2339 enum BoundaryKind { 2340 kEntry, 2341 kExit, 2342 }; 2343 2344 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2345 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2346 2347 bool IsControlFlow() const OVERRIDE { return true; } 2348 2349 // Returns the block's non-exceptional successor (index zero). 2350 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessor(0); } 2351 2352 // Returns whether `handler` is among its exception handlers (non-zero index 2353 // successors). 2354 bool HasExceptionHandler(const HBasicBlock& handler) const { 2355 DCHECK(handler.IsCatchBlock()); 2356 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2357 } 2358 2359 // If not present already, adds `handler` to its block's list of exception 2360 // handlers. 2361 void AddExceptionHandler(HBasicBlock* handler) { 2362 if (!HasExceptionHandler(*handler)) { 2363 GetBlock()->AddSuccessor(handler); 2364 } 2365 } 2366 2367 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2368 2369 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2370 2371 DECLARE_INSTRUCTION(TryBoundary); 2372 2373 private: 2374 const BoundaryKind kind_; 2375 2376 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2377}; 2378 2379// Iterator over exception handlers of a given HTryBoundary, i.e. over 2380// exceptional successors of its basic block. 2381class HExceptionHandlerIterator : public ValueObject { 2382 public: 2383 explicit HExceptionHandlerIterator(const HTryBoundary& try_boundary) 2384 : block_(*try_boundary.GetBlock()), index_(block_.NumberOfNormalSuccessors()) {} 2385 2386 bool Done() const { return index_ == block_.GetSuccessors().size(); } 2387 HBasicBlock* Current() const { return block_.GetSuccessor(index_); } 2388 size_t CurrentSuccessorIndex() const { return index_; } 2389 void Advance() { ++index_; } 2390 2391 private: 2392 const HBasicBlock& block_; 2393 size_t index_; 2394 2395 DISALLOW_COPY_AND_ASSIGN(HExceptionHandlerIterator); 2396}; 2397 2398// Deoptimize to interpreter, upon checking a condition. 2399class HDeoptimize : public HTemplateInstruction<1> { 2400 public: 2401 explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2402 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2403 SetRawInputAt(0, cond); 2404 } 2405 2406 bool NeedsEnvironment() const OVERRIDE { return true; } 2407 bool CanThrow() const OVERRIDE { return true; } 2408 2409 DECLARE_INSTRUCTION(Deoptimize); 2410 2411 private: 2412 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2413}; 2414 2415// Represents the ArtMethod that was passed as a first argument to 2416// the method. It is used by instructions that depend on it, like 2417// instructions that work with the dex cache. 2418class HCurrentMethod : public HExpression<0> { 2419 public: 2420 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2421 : HExpression(type, SideEffects::None(), dex_pc) {} 2422 2423 DECLARE_INSTRUCTION(CurrentMethod); 2424 2425 private: 2426 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2427}; 2428 2429// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2430// have one successor for each entry in the switch table, and the final successor 2431// will be the block containing the next Dex opcode. 2432class HPackedSwitch : public HTemplateInstruction<1> { 2433 public: 2434 HPackedSwitch(int32_t start_value, 2435 uint32_t num_entries, 2436 HInstruction* input, 2437 uint32_t dex_pc = kNoDexPc) 2438 : HTemplateInstruction(SideEffects::None(), dex_pc), 2439 start_value_(start_value), 2440 num_entries_(num_entries) { 2441 SetRawInputAt(0, input); 2442 } 2443 2444 bool IsControlFlow() const OVERRIDE { return true; } 2445 2446 int32_t GetStartValue() const { return start_value_; } 2447 2448 uint32_t GetNumEntries() const { return num_entries_; } 2449 2450 HBasicBlock* GetDefaultBlock() const { 2451 // Last entry is the default block. 2452 return GetBlock()->GetSuccessor(num_entries_); 2453 } 2454 DECLARE_INSTRUCTION(PackedSwitch); 2455 2456 private: 2457 const int32_t start_value_; 2458 const uint32_t num_entries_; 2459 2460 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2461}; 2462 2463class HUnaryOperation : public HExpression<1> { 2464 public: 2465 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2466 : HExpression(result_type, SideEffects::None(), dex_pc) { 2467 SetRawInputAt(0, input); 2468 } 2469 2470 HInstruction* GetInput() const { return InputAt(0); } 2471 Primitive::Type GetResultType() const { return GetType(); } 2472 2473 bool CanBeMoved() const OVERRIDE { return true; } 2474 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2475 UNUSED(other); 2476 return true; 2477 } 2478 2479 // Try to statically evaluate `operation` and return a HConstant 2480 // containing the result of this evaluation. If `operation` cannot 2481 // be evaluated as a constant, return null. 2482 HConstant* TryStaticEvaluation() const; 2483 2484 // Apply this operation to `x`. 2485 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2486 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2487 2488 DECLARE_INSTRUCTION(UnaryOperation); 2489 2490 private: 2491 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2492}; 2493 2494class HBinaryOperation : public HExpression<2> { 2495 public: 2496 HBinaryOperation(Primitive::Type result_type, 2497 HInstruction* left, 2498 HInstruction* right, 2499 SideEffects side_effects = SideEffects::None(), 2500 uint32_t dex_pc = kNoDexPc) 2501 : HExpression(result_type, side_effects, dex_pc) { 2502 SetRawInputAt(0, left); 2503 SetRawInputAt(1, right); 2504 } 2505 2506 HInstruction* GetLeft() const { return InputAt(0); } 2507 HInstruction* GetRight() const { return InputAt(1); } 2508 Primitive::Type GetResultType() const { return GetType(); } 2509 2510 virtual bool IsCommutative() const { return false; } 2511 2512 // Put constant on the right. 2513 // Returns whether order is changed. 2514 bool OrderInputsWithConstantOnTheRight() { 2515 HInstruction* left = InputAt(0); 2516 HInstruction* right = InputAt(1); 2517 if (left->IsConstant() && !right->IsConstant()) { 2518 ReplaceInput(right, 0); 2519 ReplaceInput(left, 1); 2520 return true; 2521 } 2522 return false; 2523 } 2524 2525 // Order inputs by instruction id, but favor constant on the right side. 2526 // This helps GVN for commutative ops. 2527 void OrderInputs() { 2528 DCHECK(IsCommutative()); 2529 HInstruction* left = InputAt(0); 2530 HInstruction* right = InputAt(1); 2531 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2532 return; 2533 } 2534 if (OrderInputsWithConstantOnTheRight()) { 2535 return; 2536 } 2537 // Order according to instruction id. 2538 if (left->GetId() > right->GetId()) { 2539 ReplaceInput(right, 0); 2540 ReplaceInput(left, 1); 2541 } 2542 } 2543 2544 bool CanBeMoved() const OVERRIDE { return true; } 2545 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2546 UNUSED(other); 2547 return true; 2548 } 2549 2550 // Try to statically evaluate `operation` and return a HConstant 2551 // containing the result of this evaluation. If `operation` cannot 2552 // be evaluated as a constant, return null. 2553 HConstant* TryStaticEvaluation() const; 2554 2555 // Apply this operation to `x` and `y`. 2556 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2557 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2558 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2559 HLongConstant* y ATTRIBUTE_UNUSED) const { 2560 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2561 return nullptr; 2562 } 2563 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2564 HIntConstant* y ATTRIBUTE_UNUSED) const { 2565 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2566 return nullptr; 2567 } 2568 2569 // Returns an input that can legally be used as the right input and is 2570 // constant, or null. 2571 HConstant* GetConstantRight() const; 2572 2573 // If `GetConstantRight()` returns one of the input, this returns the other 2574 // one. Otherwise it returns null. 2575 HInstruction* GetLeastConstantLeft() const; 2576 2577 DECLARE_INSTRUCTION(BinaryOperation); 2578 2579 private: 2580 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2581}; 2582 2583// The comparison bias applies for floating point operations and indicates how NaN 2584// comparisons are treated: 2585enum class ComparisonBias { 2586 kNoBias, // bias is not applicable (i.e. for long operation) 2587 kGtBias, // return 1 for NaN comparisons 2588 kLtBias, // return -1 for NaN comparisons 2589}; 2590 2591class HCondition : public HBinaryOperation { 2592 public: 2593 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2594 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2595 needs_materialization_(true), 2596 bias_(ComparisonBias::kNoBias) {} 2597 2598 bool NeedsMaterialization() const { return needs_materialization_; } 2599 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2600 2601 // For code generation purposes, returns whether this instruction is just before 2602 // `instruction`, and disregard moves in between. 2603 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2604 2605 DECLARE_INSTRUCTION(Condition); 2606 2607 virtual IfCondition GetCondition() const = 0; 2608 2609 virtual IfCondition GetOppositeCondition() const = 0; 2610 2611 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2612 2613 void SetBias(ComparisonBias bias) { bias_ = bias; } 2614 2615 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2616 return bias_ == other->AsCondition()->bias_; 2617 } 2618 2619 bool IsFPConditionTrueIfNaN() const { 2620 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2621 IfCondition if_cond = GetCondition(); 2622 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2623 } 2624 2625 bool IsFPConditionFalseIfNaN() const { 2626 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2627 IfCondition if_cond = GetCondition(); 2628 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2629 } 2630 2631 private: 2632 // For register allocation purposes, returns whether this instruction needs to be 2633 // materialized (that is, not just be in the processor flags). 2634 bool needs_materialization_; 2635 2636 // Needed if we merge a HCompare into a HCondition. 2637 ComparisonBias bias_; 2638 2639 DISALLOW_COPY_AND_ASSIGN(HCondition); 2640}; 2641 2642// Instruction to check if two inputs are equal to each other. 2643class HEqual : public HCondition { 2644 public: 2645 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2646 : HCondition(first, second, dex_pc) {} 2647 2648 bool IsCommutative() const OVERRIDE { return true; } 2649 2650 template <typename T> bool Compute(T x, T y) const { return x == y; } 2651 2652 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2653 return GetBlock()->GetGraph()->GetIntConstant( 2654 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2655 } 2656 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2657 return GetBlock()->GetGraph()->GetIntConstant( 2658 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2659 } 2660 2661 DECLARE_INSTRUCTION(Equal); 2662 2663 IfCondition GetCondition() const OVERRIDE { 2664 return kCondEQ; 2665 } 2666 2667 IfCondition GetOppositeCondition() const OVERRIDE { 2668 return kCondNE; 2669 } 2670 2671 private: 2672 DISALLOW_COPY_AND_ASSIGN(HEqual); 2673}; 2674 2675class HNotEqual : public HCondition { 2676 public: 2677 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2678 : HCondition(first, second, dex_pc) {} 2679 2680 bool IsCommutative() const OVERRIDE { return true; } 2681 2682 template <typename T> bool Compute(T x, T y) const { return x != y; } 2683 2684 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2685 return GetBlock()->GetGraph()->GetIntConstant( 2686 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2687 } 2688 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2689 return GetBlock()->GetGraph()->GetIntConstant( 2690 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2691 } 2692 2693 DECLARE_INSTRUCTION(NotEqual); 2694 2695 IfCondition GetCondition() const OVERRIDE { 2696 return kCondNE; 2697 } 2698 2699 IfCondition GetOppositeCondition() const OVERRIDE { 2700 return kCondEQ; 2701 } 2702 2703 private: 2704 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2705}; 2706 2707class HLessThan : public HCondition { 2708 public: 2709 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2710 : HCondition(first, second, dex_pc) {} 2711 2712 template <typename T> bool Compute(T x, T y) const { return x < y; } 2713 2714 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2715 return GetBlock()->GetGraph()->GetIntConstant( 2716 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2717 } 2718 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2719 return GetBlock()->GetGraph()->GetIntConstant( 2720 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2721 } 2722 2723 DECLARE_INSTRUCTION(LessThan); 2724 2725 IfCondition GetCondition() const OVERRIDE { 2726 return kCondLT; 2727 } 2728 2729 IfCondition GetOppositeCondition() const OVERRIDE { 2730 return kCondGE; 2731 } 2732 2733 private: 2734 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2735}; 2736 2737class HLessThanOrEqual : public HCondition { 2738 public: 2739 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2740 : HCondition(first, second, dex_pc) {} 2741 2742 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2743 2744 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2745 return GetBlock()->GetGraph()->GetIntConstant( 2746 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2747 } 2748 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2749 return GetBlock()->GetGraph()->GetIntConstant( 2750 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2751 } 2752 2753 DECLARE_INSTRUCTION(LessThanOrEqual); 2754 2755 IfCondition GetCondition() const OVERRIDE { 2756 return kCondLE; 2757 } 2758 2759 IfCondition GetOppositeCondition() const OVERRIDE { 2760 return kCondGT; 2761 } 2762 2763 private: 2764 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2765}; 2766 2767class HGreaterThan : public HCondition { 2768 public: 2769 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2770 : HCondition(first, second, dex_pc) {} 2771 2772 template <typename T> bool Compute(T x, T y) const { return x > y; } 2773 2774 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2775 return GetBlock()->GetGraph()->GetIntConstant( 2776 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2777 } 2778 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2779 return GetBlock()->GetGraph()->GetIntConstant( 2780 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2781 } 2782 2783 DECLARE_INSTRUCTION(GreaterThan); 2784 2785 IfCondition GetCondition() const OVERRIDE { 2786 return kCondGT; 2787 } 2788 2789 IfCondition GetOppositeCondition() const OVERRIDE { 2790 return kCondLE; 2791 } 2792 2793 private: 2794 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2795}; 2796 2797class HGreaterThanOrEqual : public HCondition { 2798 public: 2799 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2800 : HCondition(first, second, dex_pc) {} 2801 2802 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2803 2804 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2805 return GetBlock()->GetGraph()->GetIntConstant( 2806 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2807 } 2808 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2809 return GetBlock()->GetGraph()->GetIntConstant( 2810 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2811 } 2812 2813 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2814 2815 IfCondition GetCondition() const OVERRIDE { 2816 return kCondGE; 2817 } 2818 2819 IfCondition GetOppositeCondition() const OVERRIDE { 2820 return kCondLT; 2821 } 2822 2823 private: 2824 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2825}; 2826 2827 2828// Instruction to check how two inputs compare to each other. 2829// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 2830class HCompare : public HBinaryOperation { 2831 public: 2832 HCompare(Primitive::Type type, 2833 HInstruction* first, 2834 HInstruction* second, 2835 ComparisonBias bias, 2836 uint32_t dex_pc) 2837 : HBinaryOperation(Primitive::kPrimInt, 2838 first, 2839 second, 2840 SideEffectsForArchRuntimeCalls(type), 2841 dex_pc), 2842 bias_(bias) { 2843 DCHECK_EQ(type, first->GetType()); 2844 DCHECK_EQ(type, second->GetType()); 2845 } 2846 2847 template <typename T> 2848 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 2849 2850 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2851 return GetBlock()->GetGraph()->GetIntConstant( 2852 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2853 } 2854 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2855 return GetBlock()->GetGraph()->GetIntConstant( 2856 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2857 } 2858 2859 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2860 return bias_ == other->AsCompare()->bias_; 2861 } 2862 2863 ComparisonBias GetBias() const { return bias_; } 2864 2865 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 2866 2867 2868 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 2869 // MIPS64 uses a runtime call for FP comparisons. 2870 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 2871 } 2872 2873 DECLARE_INSTRUCTION(Compare); 2874 2875 private: 2876 const ComparisonBias bias_; 2877 2878 DISALLOW_COPY_AND_ASSIGN(HCompare); 2879}; 2880 2881// A local in the graph. Corresponds to a Dex register. 2882class HLocal : public HTemplateInstruction<0> { 2883 public: 2884 explicit HLocal(uint16_t reg_number) 2885 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 2886 2887 DECLARE_INSTRUCTION(Local); 2888 2889 uint16_t GetRegNumber() const { return reg_number_; } 2890 2891 private: 2892 // The Dex register number. 2893 const uint16_t reg_number_; 2894 2895 DISALLOW_COPY_AND_ASSIGN(HLocal); 2896}; 2897 2898// Load a given local. The local is an input of this instruction. 2899class HLoadLocal : public HExpression<1> { 2900 public: 2901 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2902 : HExpression(type, SideEffects::None(), dex_pc) { 2903 SetRawInputAt(0, local); 2904 } 2905 2906 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 2907 2908 DECLARE_INSTRUCTION(LoadLocal); 2909 2910 private: 2911 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 2912}; 2913 2914// Store a value in a given local. This instruction has two inputs: the value 2915// and the local. 2916class HStoreLocal : public HTemplateInstruction<2> { 2917 public: 2918 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 2919 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2920 SetRawInputAt(0, local); 2921 SetRawInputAt(1, value); 2922 } 2923 2924 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 2925 2926 DECLARE_INSTRUCTION(StoreLocal); 2927 2928 private: 2929 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 2930}; 2931 2932class HFloatConstant : public HConstant { 2933 public: 2934 float GetValue() const { return value_; } 2935 2936 uint64_t GetValueAsUint64() const OVERRIDE { 2937 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 2938 } 2939 2940 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2941 DCHECK(other->IsFloatConstant()); 2942 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 2943 } 2944 2945 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2946 2947 bool IsMinusOne() const OVERRIDE { 2948 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 2949 } 2950 bool IsZero() const OVERRIDE { 2951 return value_ == 0.0f; 2952 } 2953 bool IsOne() const OVERRIDE { 2954 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 2955 } 2956 bool IsNaN() const { 2957 return std::isnan(value_); 2958 } 2959 2960 DECLARE_INSTRUCTION(FloatConstant); 2961 2962 private: 2963 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 2964 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 2965 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2966 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 2967 2968 const float value_; 2969 2970 // Only the SsaBuilder and HGraph can create floating-point constants. 2971 friend class SsaBuilder; 2972 friend class HGraph; 2973 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 2974}; 2975 2976class HDoubleConstant : public HConstant { 2977 public: 2978 double GetValue() const { return value_; } 2979 2980 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 2981 2982 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2983 DCHECK(other->IsDoubleConstant()); 2984 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 2985 } 2986 2987 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2988 2989 bool IsMinusOne() const OVERRIDE { 2990 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 2991 } 2992 bool IsZero() const OVERRIDE { 2993 return value_ == 0.0; 2994 } 2995 bool IsOne() const OVERRIDE { 2996 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 2997 } 2998 bool IsNaN() const { 2999 return std::isnan(value_); 3000 } 3001 3002 DECLARE_INSTRUCTION(DoubleConstant); 3003 3004 private: 3005 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3006 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3007 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3008 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3009 3010 const double value_; 3011 3012 // Only the SsaBuilder and HGraph can create floating-point constants. 3013 friend class SsaBuilder; 3014 friend class HGraph; 3015 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3016}; 3017 3018enum class Intrinsics { 3019#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache) k ## Name, 3020#include "intrinsics_list.h" 3021 kNone, 3022 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3023#undef INTRINSICS_LIST 3024#undef OPTIMIZING_INTRINSICS 3025}; 3026std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3027 3028enum IntrinsicNeedsEnvironmentOrCache { 3029 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3030 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3031}; 3032 3033class HInvoke : public HInstruction { 3034 public: 3035 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3036 3037 // Runtime needs to walk the stack, so Dex -> Dex calls need to 3038 // know their environment. 3039 bool NeedsEnvironment() const OVERRIDE { 3040 return needs_environment_or_cache_ == kNeedsEnvironmentOrCache; 3041 } 3042 3043 void SetArgumentAt(size_t index, HInstruction* argument) { 3044 SetRawInputAt(index, argument); 3045 } 3046 3047 // Return the number of arguments. This number can be lower than 3048 // the number of inputs returned by InputCount(), as some invoke 3049 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3050 // inputs at the end of their list of inputs. 3051 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3052 3053 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3054 3055 3056 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3057 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3058 3059 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3060 3061 Intrinsics GetIntrinsic() const { 3062 return intrinsic_; 3063 } 3064 3065 void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache) { 3066 intrinsic_ = intrinsic; 3067 needs_environment_or_cache_ = needs_env_or_cache; 3068 } 3069 3070 bool IsFromInlinedInvoke() const { 3071 return GetEnvironment()->GetParent() != nullptr; 3072 } 3073 3074 bool CanThrow() const OVERRIDE { return true; } 3075 3076 DECLARE_INSTRUCTION(Invoke); 3077 3078 protected: 3079 HInvoke(ArenaAllocator* arena, 3080 uint32_t number_of_arguments, 3081 uint32_t number_of_other_inputs, 3082 Primitive::Type return_type, 3083 uint32_t dex_pc, 3084 uint32_t dex_method_index, 3085 InvokeType original_invoke_type) 3086 : HInstruction( 3087 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3088 number_of_arguments_(number_of_arguments), 3089 inputs_(number_of_arguments + number_of_other_inputs, 3090 arena->Adapter(kArenaAllocInvokeInputs)), 3091 return_type_(return_type), 3092 dex_method_index_(dex_method_index), 3093 original_invoke_type_(original_invoke_type), 3094 intrinsic_(Intrinsics::kNone), 3095 needs_environment_or_cache_(kNeedsEnvironmentOrCache) { 3096 } 3097 3098 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3099 DCHECK_LT(index, InputCount()); 3100 return inputs_[index]; 3101 } 3102 3103 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3104 DCHECK_LT(index, InputCount()); 3105 inputs_[index] = input; 3106 } 3107 3108 uint32_t number_of_arguments_; 3109 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3110 const Primitive::Type return_type_; 3111 const uint32_t dex_method_index_; 3112 const InvokeType original_invoke_type_; 3113 Intrinsics intrinsic_; 3114 IntrinsicNeedsEnvironmentOrCache needs_environment_or_cache_; 3115 3116 private: 3117 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3118}; 3119 3120class HInvokeUnresolved : public HInvoke { 3121 public: 3122 HInvokeUnresolved(ArenaAllocator* arena, 3123 uint32_t number_of_arguments, 3124 Primitive::Type return_type, 3125 uint32_t dex_pc, 3126 uint32_t dex_method_index, 3127 InvokeType invoke_type) 3128 : HInvoke(arena, 3129 number_of_arguments, 3130 0u /* number_of_other_inputs */, 3131 return_type, 3132 dex_pc, 3133 dex_method_index, 3134 invoke_type) { 3135 } 3136 3137 DECLARE_INSTRUCTION(InvokeUnresolved); 3138 3139 private: 3140 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3141}; 3142 3143class HInvokeStaticOrDirect : public HInvoke { 3144 public: 3145 // Requirements of this method call regarding the class 3146 // initialization (clinit) check of its declaring class. 3147 enum class ClinitCheckRequirement { 3148 kNone, // Class already initialized. 3149 kExplicit, // Static call having explicit clinit check as last input. 3150 kImplicit, // Static call implicitly requiring a clinit check. 3151 }; 3152 3153 // Determines how to load the target ArtMethod*. 3154 enum class MethodLoadKind { 3155 // Use a String init ArtMethod* loaded from Thread entrypoints. 3156 kStringInit, 3157 3158 // Use the method's own ArtMethod* loaded by the register allocator. 3159 kRecursive, 3160 3161 // Use ArtMethod* at a known address, embed the direct address in the code. 3162 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3163 kDirectAddress, 3164 3165 // Use ArtMethod* at an address that will be known at link time, embed the direct 3166 // address in the code. If the image is relocatable, emit .patch_oat entry. 3167 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3168 // the image relocatable or not. 3169 kDirectAddressWithFixup, 3170 3171 // Load from resoved methods array in the dex cache using a PC-relative load. 3172 // Used when we need to use the dex cache, for example for invoke-static that 3173 // may cause class initialization (the entry may point to a resolution method), 3174 // and we know that we can access the dex cache arrays using a PC-relative load. 3175 kDexCachePcRelative, 3176 3177 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3178 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3179 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3180 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3181 kDexCacheViaMethod, 3182 }; 3183 3184 // Determines the location of the code pointer. 3185 enum class CodePtrLocation { 3186 // Recursive call, use local PC-relative call instruction. 3187 kCallSelf, 3188 3189 // Use PC-relative call instruction patched at link time. 3190 // Used for calls within an oat file, boot->boot or app->app. 3191 kCallPCRelative, 3192 3193 // Call to a known target address, embed the direct address in code. 3194 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3195 kCallDirect, 3196 3197 // Call to a target address that will be known at link time, embed the direct 3198 // address in code. If the image is relocatable, emit .patch_oat entry. 3199 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3200 // the image relocatable or not. 3201 kCallDirectWithFixup, 3202 3203 // Use code pointer from the ArtMethod*. 3204 // Used when we don't know the target code. This is also the last-resort-kind used when 3205 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3206 kCallArtMethod, 3207 }; 3208 3209 struct DispatchInfo { 3210 const MethodLoadKind method_load_kind; 3211 const CodePtrLocation code_ptr_location; 3212 // The method load data holds 3213 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3214 // Note that there are multiple string init methods, each having its own offset. 3215 // - the method address for kDirectAddress 3216 // - the dex cache arrays offset for kDexCachePcRel. 3217 const uint64_t method_load_data; 3218 const uint64_t direct_code_ptr; 3219 }; 3220 3221 HInvokeStaticOrDirect(ArenaAllocator* arena, 3222 uint32_t number_of_arguments, 3223 Primitive::Type return_type, 3224 uint32_t dex_pc, 3225 uint32_t method_index, 3226 MethodReference target_method, 3227 DispatchInfo dispatch_info, 3228 InvokeType original_invoke_type, 3229 InvokeType invoke_type, 3230 ClinitCheckRequirement clinit_check_requirement) 3231 : HInvoke(arena, 3232 number_of_arguments, 3233 // There is one extra argument for the HCurrentMethod node, and 3234 // potentially one other if the clinit check is explicit, and one other 3235 // if the method is a string factory. 3236 1u + (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) 3237 + (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3238 return_type, 3239 dex_pc, 3240 method_index, 3241 original_invoke_type), 3242 invoke_type_(invoke_type), 3243 clinit_check_requirement_(clinit_check_requirement), 3244 target_method_(target_method), 3245 dispatch_info_(dispatch_info) {} 3246 3247 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3248 UNUSED(obj); 3249 // We access the method via the dex cache so we can't do an implicit null check. 3250 // TODO: for intrinsics we can generate implicit null checks. 3251 return false; 3252 } 3253 3254 bool CanBeNull() const OVERRIDE { 3255 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3256 } 3257 3258 InvokeType GetInvokeType() const { return invoke_type_; } 3259 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3260 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3261 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3262 bool NeedsDexCache() const OVERRIDE { 3263 if (intrinsic_ != Intrinsics::kNone) { return needs_environment_or_cache_; } 3264 return !IsRecursive() && !IsStringInit(); 3265 } 3266 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3267 uint32_t GetCurrentMethodInputIndex() const { return GetNumberOfArguments(); } 3268 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3269 bool HasPcRelDexCache() const { return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; } 3270 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3271 MethodReference GetTargetMethod() const { return target_method_; } 3272 3273 int32_t GetStringInitOffset() const { 3274 DCHECK(IsStringInit()); 3275 return dispatch_info_.method_load_data; 3276 } 3277 3278 uint64_t GetMethodAddress() const { 3279 DCHECK(HasMethodAddress()); 3280 return dispatch_info_.method_load_data; 3281 } 3282 3283 uint32_t GetDexCacheArrayOffset() const { 3284 DCHECK(HasPcRelDexCache()); 3285 return dispatch_info_.method_load_data; 3286 } 3287 3288 uint64_t GetDirectCodePtr() const { 3289 DCHECK(HasDirectCodePtr()); 3290 return dispatch_info_.direct_code_ptr; 3291 } 3292 3293 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3294 3295 // Is this instruction a call to a static method? 3296 bool IsStatic() const { 3297 return GetInvokeType() == kStatic; 3298 } 3299 3300 // Remove the art::HLoadClass instruction set as last input by 3301 // art::PrepareForRegisterAllocation::VisitClinitCheck in lieu of 3302 // the initial art::HClinitCheck instruction (only relevant for 3303 // static calls with explicit clinit check). 3304 void RemoveLoadClassAsLastInput() { 3305 DCHECK(IsStaticWithExplicitClinitCheck()); 3306 size_t last_input_index = InputCount() - 1; 3307 HInstruction* last_input = InputAt(last_input_index); 3308 DCHECK(last_input != nullptr); 3309 DCHECK(last_input->IsLoadClass()) << last_input->DebugName(); 3310 RemoveAsUserOfInput(last_input_index); 3311 inputs_.pop_back(); 3312 clinit_check_requirement_ = ClinitCheckRequirement::kImplicit; 3313 DCHECK(IsStaticWithImplicitClinitCheck()); 3314 } 3315 3316 bool IsStringFactoryFor(HFakeString* str) const { 3317 if (!IsStringInit()) return false; 3318 // +1 for the current method. 3319 if (InputCount() == (number_of_arguments_ + 1)) return false; 3320 return InputAt(InputCount() - 1)->AsFakeString() == str; 3321 } 3322 3323 void RemoveFakeStringArgumentAsLastInput() { 3324 DCHECK(IsStringInit()); 3325 size_t last_input_index = InputCount() - 1; 3326 HInstruction* last_input = InputAt(last_input_index); 3327 DCHECK(last_input != nullptr); 3328 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3329 RemoveAsUserOfInput(last_input_index); 3330 inputs_.pop_back(); 3331 } 3332 3333 // Is this a call to a static method whose declaring class has an 3334 // explicit intialization check in the graph? 3335 bool IsStaticWithExplicitClinitCheck() const { 3336 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3337 } 3338 3339 // Is this a call to a static method whose declaring class has an 3340 // implicit intialization check requirement? 3341 bool IsStaticWithImplicitClinitCheck() const { 3342 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3343 } 3344 3345 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3346 3347 protected: 3348 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3349 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3350 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3351 HInstruction* input = input_record.GetInstruction(); 3352 // `input` is the last input of a static invoke marked as having 3353 // an explicit clinit check. It must either be: 3354 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3355 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3356 DCHECK(input != nullptr); 3357 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3358 } 3359 return input_record; 3360 } 3361 3362 private: 3363 const InvokeType invoke_type_; 3364 ClinitCheckRequirement clinit_check_requirement_; 3365 // The target method may refer to different dex file or method index than the original 3366 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3367 // in derived class to increase visibility. 3368 MethodReference target_method_; 3369 DispatchInfo dispatch_info_; 3370 3371 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3372}; 3373 3374class HInvokeVirtual : public HInvoke { 3375 public: 3376 HInvokeVirtual(ArenaAllocator* arena, 3377 uint32_t number_of_arguments, 3378 Primitive::Type return_type, 3379 uint32_t dex_pc, 3380 uint32_t dex_method_index, 3381 uint32_t vtable_index) 3382 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3383 vtable_index_(vtable_index) {} 3384 3385 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3386 // TODO: Add implicit null checks in intrinsics. 3387 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3388 } 3389 3390 uint32_t GetVTableIndex() const { return vtable_index_; } 3391 3392 DECLARE_INSTRUCTION(InvokeVirtual); 3393 3394 private: 3395 const uint32_t vtable_index_; 3396 3397 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3398}; 3399 3400class HInvokeInterface : public HInvoke { 3401 public: 3402 HInvokeInterface(ArenaAllocator* arena, 3403 uint32_t number_of_arguments, 3404 Primitive::Type return_type, 3405 uint32_t dex_pc, 3406 uint32_t dex_method_index, 3407 uint32_t imt_index) 3408 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3409 imt_index_(imt_index) {} 3410 3411 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3412 // TODO: Add implicit null checks in intrinsics. 3413 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3414 } 3415 3416 uint32_t GetImtIndex() const { return imt_index_; } 3417 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3418 3419 DECLARE_INSTRUCTION(InvokeInterface); 3420 3421 private: 3422 const uint32_t imt_index_; 3423 3424 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3425}; 3426 3427class HNewInstance : public HExpression<1> { 3428 public: 3429 HNewInstance(HCurrentMethod* current_method, 3430 uint32_t dex_pc, 3431 uint16_t type_index, 3432 const DexFile& dex_file, 3433 QuickEntrypointEnum entrypoint) 3434 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3435 type_index_(type_index), 3436 dex_file_(dex_file), 3437 entrypoint_(entrypoint) { 3438 SetRawInputAt(0, current_method); 3439 } 3440 3441 uint16_t GetTypeIndex() const { return type_index_; } 3442 const DexFile& GetDexFile() const { return dex_file_; } 3443 3444 // Calls runtime so needs an environment. 3445 bool NeedsEnvironment() const OVERRIDE { return true; } 3446 // It may throw when called on: 3447 // - interfaces 3448 // - abstract/innaccessible/unknown classes 3449 // TODO: optimize when possible. 3450 bool CanThrow() const OVERRIDE { return true; } 3451 3452 bool CanBeNull() const OVERRIDE { return false; } 3453 3454 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3455 3456 DECLARE_INSTRUCTION(NewInstance); 3457 3458 private: 3459 const uint16_t type_index_; 3460 const DexFile& dex_file_; 3461 const QuickEntrypointEnum entrypoint_; 3462 3463 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3464}; 3465 3466class HNeg : public HUnaryOperation { 3467 public: 3468 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3469 : HUnaryOperation(result_type, input, dex_pc) {} 3470 3471 template <typename T> T Compute(T x) const { return -x; } 3472 3473 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3474 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3475 } 3476 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3477 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3478 } 3479 3480 DECLARE_INSTRUCTION(Neg); 3481 3482 private: 3483 DISALLOW_COPY_AND_ASSIGN(HNeg); 3484}; 3485 3486class HNewArray : public HExpression<2> { 3487 public: 3488 HNewArray(HInstruction* length, 3489 HCurrentMethod* current_method, 3490 uint32_t dex_pc, 3491 uint16_t type_index, 3492 const DexFile& dex_file, 3493 QuickEntrypointEnum entrypoint) 3494 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3495 type_index_(type_index), 3496 dex_file_(dex_file), 3497 entrypoint_(entrypoint) { 3498 SetRawInputAt(0, length); 3499 SetRawInputAt(1, current_method); 3500 } 3501 3502 uint16_t GetTypeIndex() const { return type_index_; } 3503 const DexFile& GetDexFile() const { return dex_file_; } 3504 3505 // Calls runtime so needs an environment. 3506 bool NeedsEnvironment() const OVERRIDE { return true; } 3507 3508 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3509 bool CanThrow() const OVERRIDE { return true; } 3510 3511 bool CanBeNull() const OVERRIDE { return false; } 3512 3513 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3514 3515 DECLARE_INSTRUCTION(NewArray); 3516 3517 private: 3518 const uint16_t type_index_; 3519 const DexFile& dex_file_; 3520 const QuickEntrypointEnum entrypoint_; 3521 3522 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3523}; 3524 3525class HAdd : public HBinaryOperation { 3526 public: 3527 HAdd(Primitive::Type result_type, 3528 HInstruction* left, 3529 HInstruction* right, 3530 uint32_t dex_pc = kNoDexPc) 3531 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3532 3533 bool IsCommutative() const OVERRIDE { return true; } 3534 3535 template <typename T> T Compute(T x, T y) const { return x + y; } 3536 3537 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3538 return GetBlock()->GetGraph()->GetIntConstant( 3539 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3540 } 3541 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3542 return GetBlock()->GetGraph()->GetLongConstant( 3543 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3544 } 3545 3546 DECLARE_INSTRUCTION(Add); 3547 3548 private: 3549 DISALLOW_COPY_AND_ASSIGN(HAdd); 3550}; 3551 3552class HSub : public HBinaryOperation { 3553 public: 3554 HSub(Primitive::Type result_type, 3555 HInstruction* left, 3556 HInstruction* right, 3557 uint32_t dex_pc = kNoDexPc) 3558 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3559 3560 template <typename T> T Compute(T x, T y) const { return x - y; } 3561 3562 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3563 return GetBlock()->GetGraph()->GetIntConstant( 3564 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3565 } 3566 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3567 return GetBlock()->GetGraph()->GetLongConstant( 3568 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3569 } 3570 3571 DECLARE_INSTRUCTION(Sub); 3572 3573 private: 3574 DISALLOW_COPY_AND_ASSIGN(HSub); 3575}; 3576 3577class HMul : public HBinaryOperation { 3578 public: 3579 HMul(Primitive::Type result_type, 3580 HInstruction* left, 3581 HInstruction* right, 3582 uint32_t dex_pc = kNoDexPc) 3583 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3584 3585 bool IsCommutative() const OVERRIDE { return true; } 3586 3587 template <typename T> T Compute(T x, T y) const { return x * y; } 3588 3589 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3590 return GetBlock()->GetGraph()->GetIntConstant( 3591 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3592 } 3593 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3594 return GetBlock()->GetGraph()->GetLongConstant( 3595 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3596 } 3597 3598 DECLARE_INSTRUCTION(Mul); 3599 3600 private: 3601 DISALLOW_COPY_AND_ASSIGN(HMul); 3602}; 3603 3604class HDiv : public HBinaryOperation { 3605 public: 3606 HDiv(Primitive::Type result_type, 3607 HInstruction* left, 3608 HInstruction* right, 3609 uint32_t dex_pc) 3610 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3611 3612 template <typename T> 3613 T Compute(T x, T y) const { 3614 // Our graph structure ensures we never have 0 for `y` during 3615 // constant folding. 3616 DCHECK_NE(y, 0); 3617 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3618 return (y == -1) ? -x : x / y; 3619 } 3620 3621 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3622 return GetBlock()->GetGraph()->GetIntConstant( 3623 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3624 } 3625 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3626 return GetBlock()->GetGraph()->GetLongConstant( 3627 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3628 } 3629 3630 static SideEffects SideEffectsForArchRuntimeCalls() { 3631 // The generated code can use a runtime call. 3632 return SideEffects::CanTriggerGC(); 3633 } 3634 3635 DECLARE_INSTRUCTION(Div); 3636 3637 private: 3638 DISALLOW_COPY_AND_ASSIGN(HDiv); 3639}; 3640 3641class HRem : public HBinaryOperation { 3642 public: 3643 HRem(Primitive::Type result_type, 3644 HInstruction* left, 3645 HInstruction* right, 3646 uint32_t dex_pc) 3647 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3648 3649 template <typename T> 3650 T Compute(T x, T y) const { 3651 // Our graph structure ensures we never have 0 for `y` during 3652 // constant folding. 3653 DCHECK_NE(y, 0); 3654 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3655 return (y == -1) ? 0 : x % y; 3656 } 3657 3658 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3659 return GetBlock()->GetGraph()->GetIntConstant( 3660 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3661 } 3662 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3663 return GetBlock()->GetGraph()->GetLongConstant( 3664 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3665 } 3666 3667 3668 static SideEffects SideEffectsForArchRuntimeCalls() { 3669 return SideEffects::CanTriggerGC(); 3670 } 3671 3672 DECLARE_INSTRUCTION(Rem); 3673 3674 private: 3675 DISALLOW_COPY_AND_ASSIGN(HRem); 3676}; 3677 3678class HDivZeroCheck : public HExpression<1> { 3679 public: 3680 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 3681 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 3682 SetRawInputAt(0, value); 3683 } 3684 3685 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 3686 3687 bool CanBeMoved() const OVERRIDE { return true; } 3688 3689 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3690 UNUSED(other); 3691 return true; 3692 } 3693 3694 bool NeedsEnvironment() const OVERRIDE { return true; } 3695 bool CanThrow() const OVERRIDE { return true; } 3696 3697 DECLARE_INSTRUCTION(DivZeroCheck); 3698 3699 private: 3700 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 3701}; 3702 3703class HShl : public HBinaryOperation { 3704 public: 3705 HShl(Primitive::Type result_type, 3706 HInstruction* left, 3707 HInstruction* right, 3708 uint32_t dex_pc = kNoDexPc) 3709 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3710 3711 template <typename T, typename U, typename V> 3712 T Compute(T x, U y, V max_shift_value) const { 3713 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3714 "V is not the unsigned integer type corresponding to T"); 3715 return x << (y & max_shift_value); 3716 } 3717 3718 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3719 return GetBlock()->GetGraph()->GetIntConstant( 3720 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3721 } 3722 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3723 // case is handled as `x << static_cast<int>(y)`. 3724 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3725 return GetBlock()->GetGraph()->GetLongConstant( 3726 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3727 } 3728 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3729 return GetBlock()->GetGraph()->GetLongConstant( 3730 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3731 } 3732 3733 DECLARE_INSTRUCTION(Shl); 3734 3735 private: 3736 DISALLOW_COPY_AND_ASSIGN(HShl); 3737}; 3738 3739class HShr : public HBinaryOperation { 3740 public: 3741 HShr(Primitive::Type result_type, 3742 HInstruction* left, 3743 HInstruction* right, 3744 uint32_t dex_pc = kNoDexPc) 3745 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3746 3747 template <typename T, typename U, typename V> 3748 T Compute(T x, U y, V max_shift_value) const { 3749 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3750 "V is not the unsigned integer type corresponding to T"); 3751 return x >> (y & max_shift_value); 3752 } 3753 3754 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3755 return GetBlock()->GetGraph()->GetIntConstant( 3756 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3757 } 3758 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3759 // case is handled as `x >> static_cast<int>(y)`. 3760 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3761 return GetBlock()->GetGraph()->GetLongConstant( 3762 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3763 } 3764 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3765 return GetBlock()->GetGraph()->GetLongConstant( 3766 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3767 } 3768 3769 DECLARE_INSTRUCTION(Shr); 3770 3771 private: 3772 DISALLOW_COPY_AND_ASSIGN(HShr); 3773}; 3774 3775class HUShr : public HBinaryOperation { 3776 public: 3777 HUShr(Primitive::Type result_type, 3778 HInstruction* left, 3779 HInstruction* right, 3780 uint32_t dex_pc = kNoDexPc) 3781 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3782 3783 template <typename T, typename U, typename V> 3784 T Compute(T x, U y, V max_shift_value) const { 3785 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 3786 "V is not the unsigned integer type corresponding to T"); 3787 V ux = static_cast<V>(x); 3788 return static_cast<T>(ux >> (y & max_shift_value)); 3789 } 3790 3791 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3792 return GetBlock()->GetGraph()->GetIntConstant( 3793 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 3794 } 3795 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 3796 // case is handled as `x >>> static_cast<int>(y)`. 3797 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3798 return GetBlock()->GetGraph()->GetLongConstant( 3799 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3800 } 3801 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3802 return GetBlock()->GetGraph()->GetLongConstant( 3803 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 3804 } 3805 3806 DECLARE_INSTRUCTION(UShr); 3807 3808 private: 3809 DISALLOW_COPY_AND_ASSIGN(HUShr); 3810}; 3811 3812class HAnd : public HBinaryOperation { 3813 public: 3814 HAnd(Primitive::Type result_type, 3815 HInstruction* left, 3816 HInstruction* right, 3817 uint32_t dex_pc = kNoDexPc) 3818 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3819 3820 bool IsCommutative() const OVERRIDE { return true; } 3821 3822 template <typename T, typename U> 3823 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 3824 3825 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3826 return GetBlock()->GetGraph()->GetIntConstant( 3827 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3828 } 3829 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 3830 return GetBlock()->GetGraph()->GetLongConstant( 3831 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3832 } 3833 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3834 return GetBlock()->GetGraph()->GetLongConstant( 3835 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3836 } 3837 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3838 return GetBlock()->GetGraph()->GetLongConstant( 3839 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3840 } 3841 3842 DECLARE_INSTRUCTION(And); 3843 3844 private: 3845 DISALLOW_COPY_AND_ASSIGN(HAnd); 3846}; 3847 3848class HOr : public HBinaryOperation { 3849 public: 3850 HOr(Primitive::Type result_type, 3851 HInstruction* left, 3852 HInstruction* right, 3853 uint32_t dex_pc = kNoDexPc) 3854 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3855 3856 bool IsCommutative() const OVERRIDE { return true; } 3857 3858 template <typename T, typename U> 3859 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 3860 3861 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3862 return GetBlock()->GetGraph()->GetIntConstant( 3863 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3864 } 3865 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 3866 return GetBlock()->GetGraph()->GetLongConstant( 3867 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3868 } 3869 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3870 return GetBlock()->GetGraph()->GetLongConstant( 3871 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3872 } 3873 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3874 return GetBlock()->GetGraph()->GetLongConstant( 3875 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3876 } 3877 3878 DECLARE_INSTRUCTION(Or); 3879 3880 private: 3881 DISALLOW_COPY_AND_ASSIGN(HOr); 3882}; 3883 3884class HXor : public HBinaryOperation { 3885 public: 3886 HXor(Primitive::Type result_type, 3887 HInstruction* left, 3888 HInstruction* right, 3889 uint32_t dex_pc = kNoDexPc) 3890 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3891 3892 bool IsCommutative() const OVERRIDE { return true; } 3893 3894 template <typename T, typename U> 3895 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 3896 3897 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3898 return GetBlock()->GetGraph()->GetIntConstant( 3899 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3900 } 3901 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 3902 return GetBlock()->GetGraph()->GetLongConstant( 3903 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3904 } 3905 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 3906 return GetBlock()->GetGraph()->GetLongConstant( 3907 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3908 } 3909 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3910 return GetBlock()->GetGraph()->GetLongConstant( 3911 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3912 } 3913 3914 DECLARE_INSTRUCTION(Xor); 3915 3916 private: 3917 DISALLOW_COPY_AND_ASSIGN(HXor); 3918}; 3919 3920// The value of a parameter in this method. Its location depends on 3921// the calling convention. 3922class HParameterValue : public HExpression<0> { 3923 public: 3924 HParameterValue(uint8_t index, 3925 Primitive::Type parameter_type, 3926 bool is_this = false) 3927 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 3928 index_(index), 3929 is_this_(is_this), 3930 can_be_null_(!is_this) {} 3931 3932 uint8_t GetIndex() const { return index_; } 3933 3934 bool CanBeNull() const OVERRIDE { return can_be_null_; } 3935 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 3936 3937 bool IsThis() const { return is_this_; } 3938 3939 DECLARE_INSTRUCTION(ParameterValue); 3940 3941 private: 3942 // The index of this parameter in the parameters list. Must be less 3943 // than HGraph::number_of_in_vregs_. 3944 const uint8_t index_; 3945 3946 // Whether or not the parameter value corresponds to 'this' argument. 3947 const bool is_this_; 3948 3949 bool can_be_null_; 3950 3951 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 3952}; 3953 3954class HNot : public HUnaryOperation { 3955 public: 3956 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3957 : HUnaryOperation(result_type, input, dex_pc) {} 3958 3959 bool CanBeMoved() const OVERRIDE { return true; } 3960 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3961 UNUSED(other); 3962 return true; 3963 } 3964 3965 template <typename T> T Compute(T x) const { return ~x; } 3966 3967 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3968 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3969 } 3970 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3971 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3972 } 3973 3974 DECLARE_INSTRUCTION(Not); 3975 3976 private: 3977 DISALLOW_COPY_AND_ASSIGN(HNot); 3978}; 3979 3980class HBooleanNot : public HUnaryOperation { 3981 public: 3982 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 3983 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 3984 3985 bool CanBeMoved() const OVERRIDE { return true; } 3986 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3987 UNUSED(other); 3988 return true; 3989 } 3990 3991 template <typename T> bool Compute(T x) const { 3992 DCHECK(IsUint<1>(x)); 3993 return !x; 3994 } 3995 3996 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3997 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3998 } 3999 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4000 LOG(FATAL) << DebugName() << " is not defined for long values"; 4001 UNREACHABLE(); 4002 } 4003 4004 DECLARE_INSTRUCTION(BooleanNot); 4005 4006 private: 4007 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4008}; 4009 4010class HTypeConversion : public HExpression<1> { 4011 public: 4012 // Instantiate a type conversion of `input` to `result_type`. 4013 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4014 : HExpression(result_type, 4015 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4016 dex_pc) { 4017 SetRawInputAt(0, input); 4018 DCHECK_NE(input->GetType(), result_type); 4019 } 4020 4021 HInstruction* GetInput() const { return InputAt(0); } 4022 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4023 Primitive::Type GetResultType() const { return GetType(); } 4024 4025 // Required by the x86 and ARM code generators when producing calls 4026 // to the runtime. 4027 4028 bool CanBeMoved() const OVERRIDE { return true; } 4029 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4030 4031 // Try to statically evaluate the conversion and return a HConstant 4032 // containing the result. If the input cannot be converted, return nullptr. 4033 HConstant* TryStaticEvaluation() const; 4034 4035 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4036 Primitive::Type result_type) { 4037 // Some architectures may not require the 'GC' side effects, but at this point 4038 // in the compilation process we do not know what architecture we will 4039 // generate code for, so we must be conservative. 4040 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4041 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4042 return SideEffects::CanTriggerGC(); 4043 } 4044 return SideEffects::None(); 4045 } 4046 4047 DECLARE_INSTRUCTION(TypeConversion); 4048 4049 private: 4050 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4051}; 4052 4053static constexpr uint32_t kNoRegNumber = -1; 4054 4055class HPhi : public HInstruction { 4056 public: 4057 HPhi(ArenaAllocator* arena, 4058 uint32_t reg_number, 4059 size_t number_of_inputs, 4060 Primitive::Type type, 4061 uint32_t dex_pc = kNoDexPc) 4062 : HInstruction(SideEffects::None(), dex_pc), 4063 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4064 reg_number_(reg_number), 4065 type_(type), 4066 is_live_(false), 4067 can_be_null_(true) { 4068 } 4069 4070 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4071 static Primitive::Type ToPhiType(Primitive::Type type) { 4072 switch (type) { 4073 case Primitive::kPrimBoolean: 4074 case Primitive::kPrimByte: 4075 case Primitive::kPrimShort: 4076 case Primitive::kPrimChar: 4077 return Primitive::kPrimInt; 4078 default: 4079 return type; 4080 } 4081 } 4082 4083 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4084 4085 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4086 4087 void AddInput(HInstruction* input); 4088 void RemoveInputAt(size_t index); 4089 4090 Primitive::Type GetType() const OVERRIDE { return type_; } 4091 void SetType(Primitive::Type type) { type_ = type; } 4092 4093 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4094 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4095 4096 uint32_t GetRegNumber() const { return reg_number_; } 4097 4098 void SetDead() { is_live_ = false; } 4099 void SetLive() { is_live_ = true; } 4100 bool IsDead() const { return !is_live_; } 4101 bool IsLive() const { return is_live_; } 4102 4103 bool IsVRegEquivalentOf(HInstruction* other) const { 4104 return other != nullptr 4105 && other->IsPhi() 4106 && other->AsPhi()->GetBlock() == GetBlock() 4107 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4108 } 4109 4110 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4111 // An equivalent phi is a phi having the same dex register and type. 4112 // It assumes that phis with the same dex register are adjacent. 4113 HPhi* GetNextEquivalentPhiWithSameType() { 4114 HInstruction* next = GetNext(); 4115 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4116 if (next->GetType() == GetType()) { 4117 return next->AsPhi(); 4118 } 4119 next = next->GetNext(); 4120 } 4121 return nullptr; 4122 } 4123 4124 DECLARE_INSTRUCTION(Phi); 4125 4126 protected: 4127 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4128 DCHECK_LE(index, InputCount()); 4129 return inputs_[index]; 4130 } 4131 4132 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4133 DCHECK_LE(index, InputCount()); 4134 inputs_[index] = input; 4135 } 4136 4137 private: 4138 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4139 const uint32_t reg_number_; 4140 Primitive::Type type_; 4141 bool is_live_; 4142 bool can_be_null_; 4143 4144 DISALLOW_COPY_AND_ASSIGN(HPhi); 4145}; 4146 4147class HNullCheck : public HExpression<1> { 4148 public: 4149 HNullCheck(HInstruction* value, uint32_t dex_pc) 4150 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4151 SetRawInputAt(0, value); 4152 } 4153 4154 bool CanBeMoved() const OVERRIDE { return true; } 4155 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4156 UNUSED(other); 4157 return true; 4158 } 4159 4160 bool NeedsEnvironment() const OVERRIDE { return true; } 4161 4162 bool CanThrow() const OVERRIDE { return true; } 4163 4164 bool CanBeNull() const OVERRIDE { return false; } 4165 4166 4167 DECLARE_INSTRUCTION(NullCheck); 4168 4169 private: 4170 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4171}; 4172 4173class FieldInfo : public ValueObject { 4174 public: 4175 FieldInfo(MemberOffset field_offset, 4176 Primitive::Type field_type, 4177 bool is_volatile, 4178 uint32_t index, 4179 const DexFile& dex_file, 4180 Handle<mirror::DexCache> dex_cache) 4181 : field_offset_(field_offset), 4182 field_type_(field_type), 4183 is_volatile_(is_volatile), 4184 index_(index), 4185 dex_file_(dex_file), 4186 dex_cache_(dex_cache) {} 4187 4188 MemberOffset GetFieldOffset() const { return field_offset_; } 4189 Primitive::Type GetFieldType() const { return field_type_; } 4190 uint32_t GetFieldIndex() const { return index_; } 4191 const DexFile& GetDexFile() const { return dex_file_; } 4192 bool IsVolatile() const { return is_volatile_; } 4193 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4194 4195 private: 4196 const MemberOffset field_offset_; 4197 const Primitive::Type field_type_; 4198 const bool is_volatile_; 4199 const uint32_t index_; 4200 const DexFile& dex_file_; 4201 const Handle<mirror::DexCache> dex_cache_; 4202}; 4203 4204class HInstanceFieldGet : public HExpression<1> { 4205 public: 4206 HInstanceFieldGet(HInstruction* value, 4207 Primitive::Type field_type, 4208 MemberOffset field_offset, 4209 bool is_volatile, 4210 uint32_t field_idx, 4211 const DexFile& dex_file, 4212 Handle<mirror::DexCache> dex_cache, 4213 uint32_t dex_pc = kNoDexPc) 4214 : HExpression( 4215 field_type, 4216 SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), 4217 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache) { 4218 SetRawInputAt(0, value); 4219 } 4220 4221 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4222 4223 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4224 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4225 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4226 } 4227 4228 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4229 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4230 } 4231 4232 size_t ComputeHashCode() const OVERRIDE { 4233 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4234 } 4235 4236 const FieldInfo& GetFieldInfo() const { return field_info_; } 4237 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4238 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4239 bool IsVolatile() const { return field_info_.IsVolatile(); } 4240 4241 DECLARE_INSTRUCTION(InstanceFieldGet); 4242 4243 private: 4244 const FieldInfo field_info_; 4245 4246 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4247}; 4248 4249class HInstanceFieldSet : public HTemplateInstruction<2> { 4250 public: 4251 HInstanceFieldSet(HInstruction* object, 4252 HInstruction* value, 4253 Primitive::Type field_type, 4254 MemberOffset field_offset, 4255 bool is_volatile, 4256 uint32_t field_idx, 4257 const DexFile& dex_file, 4258 Handle<mirror::DexCache> dex_cache, 4259 uint32_t dex_pc = kNoDexPc) 4260 : HTemplateInstruction( 4261 SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), 4262 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache), 4263 value_can_be_null_(true) { 4264 SetRawInputAt(0, object); 4265 SetRawInputAt(1, value); 4266 } 4267 4268 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4269 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4270 } 4271 4272 const FieldInfo& GetFieldInfo() const { return field_info_; } 4273 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4274 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4275 bool IsVolatile() const { return field_info_.IsVolatile(); } 4276 HInstruction* GetValue() const { return InputAt(1); } 4277 bool GetValueCanBeNull() const { return value_can_be_null_; } 4278 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4279 4280 DECLARE_INSTRUCTION(InstanceFieldSet); 4281 4282 private: 4283 const FieldInfo field_info_; 4284 bool value_can_be_null_; 4285 4286 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4287}; 4288 4289class HArrayGet : public HExpression<2> { 4290 public: 4291 HArrayGet(HInstruction* array, 4292 HInstruction* index, 4293 Primitive::Type type, 4294 uint32_t dex_pc = kNoDexPc) 4295 : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) { 4296 SetRawInputAt(0, array); 4297 SetRawInputAt(1, index); 4298 } 4299 4300 bool CanBeMoved() const OVERRIDE { return true; } 4301 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4302 UNUSED(other); 4303 return true; 4304 } 4305 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4306 UNUSED(obj); 4307 // TODO: We can be smarter here. 4308 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4309 // which generates the implicit null check. There are cases when these can be removed 4310 // to produce better code. If we ever add optimizations to do so we should allow an 4311 // implicit check here (as long as the address falls in the first page). 4312 return false; 4313 } 4314 4315 void SetType(Primitive::Type type) { type_ = type; } 4316 4317 HInstruction* GetArray() const { return InputAt(0); } 4318 HInstruction* GetIndex() const { return InputAt(1); } 4319 4320 DECLARE_INSTRUCTION(ArrayGet); 4321 4322 private: 4323 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4324}; 4325 4326class HArraySet : public HTemplateInstruction<3> { 4327 public: 4328 HArraySet(HInstruction* array, 4329 HInstruction* index, 4330 HInstruction* value, 4331 Primitive::Type expected_component_type, 4332 uint32_t dex_pc) 4333 : HTemplateInstruction( 4334 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4335 SideEffectsForArchRuntimeCalls(value->GetType())), dex_pc), 4336 expected_component_type_(expected_component_type), 4337 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4338 value_can_be_null_(true), 4339 static_type_of_array_is_object_array_(false) { 4340 SetRawInputAt(0, array); 4341 SetRawInputAt(1, index); 4342 SetRawInputAt(2, value); 4343 } 4344 4345 bool NeedsEnvironment() const OVERRIDE { 4346 // We currently always call a runtime method to catch array store 4347 // exceptions. 4348 return needs_type_check_; 4349 } 4350 4351 // Can throw ArrayStoreException. 4352 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4353 4354 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4355 UNUSED(obj); 4356 // TODO: Same as for ArrayGet. 4357 return false; 4358 } 4359 4360 void ClearNeedsTypeCheck() { 4361 needs_type_check_ = false; 4362 } 4363 4364 void ClearValueCanBeNull() { 4365 value_can_be_null_ = false; 4366 } 4367 4368 void SetStaticTypeOfArrayIsObjectArray() { 4369 static_type_of_array_is_object_array_ = true; 4370 } 4371 4372 bool GetValueCanBeNull() const { return value_can_be_null_; } 4373 bool NeedsTypeCheck() const { return needs_type_check_; } 4374 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4375 4376 HInstruction* GetArray() const { return InputAt(0); } 4377 HInstruction* GetIndex() const { return InputAt(1); } 4378 HInstruction* GetValue() const { return InputAt(2); } 4379 4380 Primitive::Type GetComponentType() const { 4381 // The Dex format does not type floating point index operations. Since the 4382 // `expected_component_type_` is set during building and can therefore not 4383 // be correct, we also check what is the value type. If it is a floating 4384 // point type, we must use that type. 4385 Primitive::Type value_type = GetValue()->GetType(); 4386 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4387 ? value_type 4388 : expected_component_type_; 4389 } 4390 4391 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4392 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4393 } 4394 4395 DECLARE_INSTRUCTION(ArraySet); 4396 4397 private: 4398 const Primitive::Type expected_component_type_; 4399 bool needs_type_check_; 4400 bool value_can_be_null_; 4401 // Cached information for the reference_type_info_ so that codegen 4402 // does not need to inspect the static type. 4403 bool static_type_of_array_is_object_array_; 4404 4405 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4406}; 4407 4408class HArrayLength : public HExpression<1> { 4409 public: 4410 explicit HArrayLength(HInstruction* array, uint32_t dex_pc = kNoDexPc) 4411 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4412 // Note that arrays do not change length, so the instruction does not 4413 // depend on any write. 4414 SetRawInputAt(0, array); 4415 } 4416 4417 bool CanBeMoved() const OVERRIDE { return true; } 4418 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4419 UNUSED(other); 4420 return true; 4421 } 4422 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4423 return obj == InputAt(0); 4424 } 4425 4426 DECLARE_INSTRUCTION(ArrayLength); 4427 4428 private: 4429 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4430}; 4431 4432class HBoundsCheck : public HExpression<2> { 4433 public: 4434 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4435 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4436 DCHECK(index->GetType() == Primitive::kPrimInt); 4437 SetRawInputAt(0, index); 4438 SetRawInputAt(1, length); 4439 } 4440 4441 bool CanBeMoved() const OVERRIDE { return true; } 4442 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4443 UNUSED(other); 4444 return true; 4445 } 4446 4447 bool NeedsEnvironment() const OVERRIDE { return true; } 4448 4449 bool CanThrow() const OVERRIDE { return true; } 4450 4451 4452 DECLARE_INSTRUCTION(BoundsCheck); 4453 4454 private: 4455 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4456}; 4457 4458/** 4459 * Some DEX instructions are folded into multiple HInstructions that need 4460 * to stay live until the last HInstruction. This class 4461 * is used as a marker for the baseline compiler to ensure its preceding 4462 * HInstruction stays live. `index` represents the stack location index of the 4463 * instruction (the actual offset is computed as index * vreg_size). 4464 */ 4465class HTemporary : public HTemplateInstruction<0> { 4466 public: 4467 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4468 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4469 4470 size_t GetIndex() const { return index_; } 4471 4472 Primitive::Type GetType() const OVERRIDE { 4473 // The previous instruction is the one that will be stored in the temporary location. 4474 DCHECK(GetPrevious() != nullptr); 4475 return GetPrevious()->GetType(); 4476 } 4477 4478 DECLARE_INSTRUCTION(Temporary); 4479 4480 private: 4481 const size_t index_; 4482 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4483}; 4484 4485class HSuspendCheck : public HTemplateInstruction<0> { 4486 public: 4487 explicit HSuspendCheck(uint32_t dex_pc) 4488 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4489 4490 bool NeedsEnvironment() const OVERRIDE { 4491 return true; 4492 } 4493 4494 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4495 SlowPathCode* GetSlowPath() const { return slow_path_; } 4496 4497 DECLARE_INSTRUCTION(SuspendCheck); 4498 4499 private: 4500 // Only used for code generation, in order to share the same slow path between back edges 4501 // of a same loop. 4502 SlowPathCode* slow_path_; 4503 4504 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4505}; 4506 4507/** 4508 * Instruction to load a Class object. 4509 */ 4510class HLoadClass : public HExpression<1> { 4511 public: 4512 HLoadClass(HCurrentMethod* current_method, 4513 uint16_t type_index, 4514 const DexFile& dex_file, 4515 bool is_referrers_class, 4516 uint32_t dex_pc, 4517 bool needs_access_check) 4518 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4519 type_index_(type_index), 4520 dex_file_(dex_file), 4521 is_referrers_class_(is_referrers_class), 4522 generate_clinit_check_(false), 4523 needs_access_check_(needs_access_check), 4524 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4525 SetRawInputAt(0, current_method); 4526 } 4527 4528 bool CanBeMoved() const OVERRIDE { return true; } 4529 4530 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4531 return other->AsLoadClass()->type_index_ == type_index_; 4532 } 4533 4534 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4535 4536 uint16_t GetTypeIndex() const { return type_index_; } 4537 bool IsReferrersClass() const { return is_referrers_class_; } 4538 bool CanBeNull() const OVERRIDE { return false; } 4539 4540 bool NeedsEnvironment() const OVERRIDE { 4541 // Will call runtime and load the class if the class is not loaded yet. 4542 // TODO: finer grain decision. 4543 return !is_referrers_class_ || needs_access_check_; 4544 } 4545 4546 bool MustGenerateClinitCheck() const { 4547 return generate_clinit_check_; 4548 } 4549 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4550 generate_clinit_check_ = generate_clinit_check; 4551 } 4552 4553 bool CanCallRuntime() const { 4554 return MustGenerateClinitCheck() || !is_referrers_class_ || needs_access_check_; 4555 } 4556 4557 bool NeedsAccessCheck() const { 4558 return needs_access_check_; 4559 } 4560 4561 bool CanThrow() const OVERRIDE { 4562 // May call runtime and and therefore can throw. 4563 // TODO: finer grain decision. 4564 return CanCallRuntime(); 4565 } 4566 4567 ReferenceTypeInfo GetLoadedClassRTI() { 4568 return loaded_class_rti_; 4569 } 4570 4571 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 4572 // Make sure we only set exact types (the loaded class should never be merged). 4573 DCHECK(rti.IsExact()); 4574 loaded_class_rti_ = rti; 4575 } 4576 4577 const DexFile& GetDexFile() { return dex_file_; } 4578 4579 bool NeedsDexCache() const OVERRIDE { return !is_referrers_class_; } 4580 4581 static SideEffects SideEffectsForArchRuntimeCalls() { 4582 return SideEffects::CanTriggerGC(); 4583 } 4584 4585 DECLARE_INSTRUCTION(LoadClass); 4586 4587 private: 4588 const uint16_t type_index_; 4589 const DexFile& dex_file_; 4590 const bool is_referrers_class_; 4591 // Whether this instruction must generate the initialization check. 4592 // Used for code generation. 4593 bool generate_clinit_check_; 4594 bool needs_access_check_; 4595 4596 ReferenceTypeInfo loaded_class_rti_; 4597 4598 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 4599}; 4600 4601class HLoadString : public HExpression<1> { 4602 public: 4603 HLoadString(HCurrentMethod* current_method, uint32_t string_index, uint32_t dex_pc) 4604 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4605 string_index_(string_index) { 4606 SetRawInputAt(0, current_method); 4607 } 4608 4609 bool CanBeMoved() const OVERRIDE { return true; } 4610 4611 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4612 return other->AsLoadString()->string_index_ == string_index_; 4613 } 4614 4615 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 4616 4617 uint32_t GetStringIndex() const { return string_index_; } 4618 4619 // TODO: Can we deopt or debug when we resolve a string? 4620 bool NeedsEnvironment() const OVERRIDE { return false; } 4621 bool NeedsDexCache() const OVERRIDE { return true; } 4622 bool CanBeNull() const OVERRIDE { return false; } 4623 4624 static SideEffects SideEffectsForArchRuntimeCalls() { 4625 return SideEffects::CanTriggerGC(); 4626 } 4627 4628 DECLARE_INSTRUCTION(LoadString); 4629 4630 private: 4631 const uint32_t string_index_; 4632 4633 DISALLOW_COPY_AND_ASSIGN(HLoadString); 4634}; 4635 4636/** 4637 * Performs an initialization check on its Class object input. 4638 */ 4639class HClinitCheck : public HExpression<1> { 4640 public: 4641 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 4642 : HExpression( 4643 Primitive::kPrimNot, 4644 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 4645 dex_pc) { 4646 SetRawInputAt(0, constant); 4647 } 4648 4649 bool CanBeMoved() const OVERRIDE { return true; } 4650 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4651 UNUSED(other); 4652 return true; 4653 } 4654 4655 bool NeedsEnvironment() const OVERRIDE { 4656 // May call runtime to initialize the class. 4657 return true; 4658 } 4659 4660 4661 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 4662 4663 DECLARE_INSTRUCTION(ClinitCheck); 4664 4665 private: 4666 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 4667}; 4668 4669class HStaticFieldGet : public HExpression<1> { 4670 public: 4671 HStaticFieldGet(HInstruction* cls, 4672 Primitive::Type field_type, 4673 MemberOffset field_offset, 4674 bool is_volatile, 4675 uint32_t field_idx, 4676 const DexFile& dex_file, 4677 Handle<mirror::DexCache> dex_cache, 4678 uint32_t dex_pc = kNoDexPc) 4679 : HExpression( 4680 field_type, 4681 SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), 4682 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache) { 4683 SetRawInputAt(0, cls); 4684 } 4685 4686 4687 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4688 4689 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4690 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 4691 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4692 } 4693 4694 size_t ComputeHashCode() const OVERRIDE { 4695 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4696 } 4697 4698 const FieldInfo& GetFieldInfo() const { return field_info_; } 4699 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4700 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4701 bool IsVolatile() const { return field_info_.IsVolatile(); } 4702 4703 DECLARE_INSTRUCTION(StaticFieldGet); 4704 4705 private: 4706 const FieldInfo field_info_; 4707 4708 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 4709}; 4710 4711class HStaticFieldSet : public HTemplateInstruction<2> { 4712 public: 4713 HStaticFieldSet(HInstruction* cls, 4714 HInstruction* value, 4715 Primitive::Type field_type, 4716 MemberOffset field_offset, 4717 bool is_volatile, 4718 uint32_t field_idx, 4719 const DexFile& dex_file, 4720 Handle<mirror::DexCache> dex_cache, 4721 uint32_t dex_pc = kNoDexPc) 4722 : HTemplateInstruction( 4723 SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), 4724 field_info_(field_offset, field_type, is_volatile, field_idx, dex_file, dex_cache), 4725 value_can_be_null_(true) { 4726 SetRawInputAt(0, cls); 4727 SetRawInputAt(1, value); 4728 } 4729 4730 const FieldInfo& GetFieldInfo() const { return field_info_; } 4731 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4732 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4733 bool IsVolatile() const { return field_info_.IsVolatile(); } 4734 4735 HInstruction* GetValue() const { return InputAt(1); } 4736 bool GetValueCanBeNull() const { return value_can_be_null_; } 4737 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4738 4739 DECLARE_INSTRUCTION(StaticFieldSet); 4740 4741 private: 4742 const FieldInfo field_info_; 4743 bool value_can_be_null_; 4744 4745 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 4746}; 4747 4748class HUnresolvedInstanceFieldGet : public HExpression<1> { 4749 public: 4750 HUnresolvedInstanceFieldGet(HInstruction* obj, 4751 Primitive::Type field_type, 4752 uint32_t field_index, 4753 uint32_t dex_pc) 4754 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 4755 field_index_(field_index) { 4756 SetRawInputAt(0, obj); 4757 } 4758 4759 bool NeedsEnvironment() const OVERRIDE { return true; } 4760 bool CanThrow() const OVERRIDE { return true; } 4761 4762 Primitive::Type GetFieldType() const { return GetType(); } 4763 uint32_t GetFieldIndex() const { return field_index_; } 4764 4765 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 4766 4767 private: 4768 const uint32_t field_index_; 4769 4770 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 4771}; 4772 4773class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 4774 public: 4775 HUnresolvedInstanceFieldSet(HInstruction* obj, 4776 HInstruction* value, 4777 Primitive::Type field_type, 4778 uint32_t field_index, 4779 uint32_t dex_pc) 4780 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 4781 field_type_(field_type), 4782 field_index_(field_index) { 4783 DCHECK_EQ(field_type, value->GetType()); 4784 SetRawInputAt(0, obj); 4785 SetRawInputAt(1, value); 4786 } 4787 4788 bool NeedsEnvironment() const OVERRIDE { return true; } 4789 bool CanThrow() const OVERRIDE { return true; } 4790 4791 Primitive::Type GetFieldType() const { return field_type_; } 4792 uint32_t GetFieldIndex() const { return field_index_; } 4793 4794 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 4795 4796 private: 4797 const Primitive::Type field_type_; 4798 const uint32_t field_index_; 4799 4800 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 4801}; 4802 4803class HUnresolvedStaticFieldGet : public HExpression<0> { 4804 public: 4805 HUnresolvedStaticFieldGet(Primitive::Type field_type, 4806 uint32_t field_index, 4807 uint32_t dex_pc) 4808 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 4809 field_index_(field_index) { 4810 } 4811 4812 bool NeedsEnvironment() const OVERRIDE { return true; } 4813 bool CanThrow() const OVERRIDE { return true; } 4814 4815 Primitive::Type GetFieldType() const { return GetType(); } 4816 uint32_t GetFieldIndex() const { return field_index_; } 4817 4818 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 4819 4820 private: 4821 const uint32_t field_index_; 4822 4823 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 4824}; 4825 4826class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 4827 public: 4828 HUnresolvedStaticFieldSet(HInstruction* value, 4829 Primitive::Type field_type, 4830 uint32_t field_index, 4831 uint32_t dex_pc) 4832 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 4833 field_type_(field_type), 4834 field_index_(field_index) { 4835 DCHECK_EQ(field_type, value->GetType()); 4836 SetRawInputAt(0, value); 4837 } 4838 4839 bool NeedsEnvironment() const OVERRIDE { return true; } 4840 bool CanThrow() const OVERRIDE { return true; } 4841 4842 Primitive::Type GetFieldType() const { return field_type_; } 4843 uint32_t GetFieldIndex() const { return field_index_; } 4844 4845 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 4846 4847 private: 4848 const Primitive::Type field_type_; 4849 const uint32_t field_index_; 4850 4851 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 4852}; 4853 4854// Implement the move-exception DEX instruction. 4855class HLoadException : public HExpression<0> { 4856 public: 4857 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 4858 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 4859 4860 bool CanBeNull() const OVERRIDE { return false; } 4861 4862 DECLARE_INSTRUCTION(LoadException); 4863 4864 private: 4865 DISALLOW_COPY_AND_ASSIGN(HLoadException); 4866}; 4867 4868// Implicit part of move-exception which clears thread-local exception storage. 4869// Must not be removed because the runtime expects the TLS to get cleared. 4870class HClearException : public HTemplateInstruction<0> { 4871 public: 4872 explicit HClearException(uint32_t dex_pc = kNoDexPc) 4873 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 4874 4875 DECLARE_INSTRUCTION(ClearException); 4876 4877 private: 4878 DISALLOW_COPY_AND_ASSIGN(HClearException); 4879}; 4880 4881class HThrow : public HTemplateInstruction<1> { 4882 public: 4883 HThrow(HInstruction* exception, uint32_t dex_pc) 4884 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 4885 SetRawInputAt(0, exception); 4886 } 4887 4888 bool IsControlFlow() const OVERRIDE { return true; } 4889 4890 bool NeedsEnvironment() const OVERRIDE { return true; } 4891 4892 bool CanThrow() const OVERRIDE { return true; } 4893 4894 4895 DECLARE_INSTRUCTION(Throw); 4896 4897 private: 4898 DISALLOW_COPY_AND_ASSIGN(HThrow); 4899}; 4900 4901/** 4902 * Implementation strategies for the code generator of a HInstanceOf 4903 * or `HCheckCast`. 4904 */ 4905enum class TypeCheckKind { 4906 kUnresolvedCheck, // Check against an unresolved type. 4907 kExactCheck, // Can do a single class compare. 4908 kClassHierarchyCheck, // Can just walk the super class chain. 4909 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 4910 kInterfaceCheck, // No optimization yet when checking against an interface. 4911 kArrayObjectCheck, // Can just check if the array is not primitive. 4912 kArrayCheck // No optimization yet when checking against a generic array. 4913}; 4914 4915class HInstanceOf : public HExpression<2> { 4916 public: 4917 HInstanceOf(HInstruction* object, 4918 HLoadClass* constant, 4919 TypeCheckKind check_kind, 4920 uint32_t dex_pc) 4921 : HExpression(Primitive::kPrimBoolean, 4922 SideEffectsForArchRuntimeCalls(check_kind), 4923 dex_pc), 4924 check_kind_(check_kind), 4925 must_do_null_check_(true) { 4926 SetRawInputAt(0, object); 4927 SetRawInputAt(1, constant); 4928 } 4929 4930 bool CanBeMoved() const OVERRIDE { return true; } 4931 4932 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4933 return true; 4934 } 4935 4936 bool NeedsEnvironment() const OVERRIDE { 4937 return false; 4938 } 4939 4940 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 4941 4942 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 4943 4944 // Used only in code generation. 4945 bool MustDoNullCheck() const { return must_do_null_check_; } 4946 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 4947 4948 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 4949 return (check_kind == TypeCheckKind::kExactCheck) 4950 ? SideEffects::None() 4951 // Mips currently does runtime calls for any other checks. 4952 : SideEffects::CanTriggerGC(); 4953 } 4954 4955 DECLARE_INSTRUCTION(InstanceOf); 4956 4957 private: 4958 const TypeCheckKind check_kind_; 4959 bool must_do_null_check_; 4960 4961 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 4962}; 4963 4964class HBoundType : public HExpression<1> { 4965 public: 4966 // Constructs an HBoundType with the given upper_bound. 4967 // Ensures that the upper_bound is valid. 4968 HBoundType(HInstruction* input, 4969 ReferenceTypeInfo upper_bound, 4970 bool upper_can_be_null, 4971 uint32_t dex_pc = kNoDexPc) 4972 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 4973 upper_bound_(upper_bound), 4974 upper_can_be_null_(upper_can_be_null), 4975 can_be_null_(upper_can_be_null) { 4976 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 4977 SetRawInputAt(0, input); 4978 SetReferenceTypeInfo(upper_bound_); 4979 } 4980 4981 // GetUpper* should only be used in reference type propagation. 4982 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 4983 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 4984 4985 void SetCanBeNull(bool can_be_null) { 4986 DCHECK(upper_can_be_null_ || !can_be_null); 4987 can_be_null_ = can_be_null; 4988 } 4989 4990 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4991 4992 DECLARE_INSTRUCTION(BoundType); 4993 4994 private: 4995 // Encodes the most upper class that this instruction can have. In other words 4996 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 4997 // It is used to bound the type in cases like: 4998 // if (x instanceof ClassX) { 4999 // // uper_bound_ will be ClassX 5000 // } 5001 const ReferenceTypeInfo upper_bound_; 5002 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5003 // is false then can_be_null_ cannot be true). 5004 const bool upper_can_be_null_; 5005 bool can_be_null_; 5006 5007 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5008}; 5009 5010class HCheckCast : public HTemplateInstruction<2> { 5011 public: 5012 HCheckCast(HInstruction* object, 5013 HLoadClass* constant, 5014 TypeCheckKind check_kind, 5015 uint32_t dex_pc) 5016 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5017 check_kind_(check_kind), 5018 must_do_null_check_(true) { 5019 SetRawInputAt(0, object); 5020 SetRawInputAt(1, constant); 5021 } 5022 5023 bool CanBeMoved() const OVERRIDE { return true; } 5024 5025 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5026 return true; 5027 } 5028 5029 bool NeedsEnvironment() const OVERRIDE { 5030 // Instruction may throw a CheckCastError. 5031 return true; 5032 } 5033 5034 bool CanThrow() const OVERRIDE { return true; } 5035 5036 bool MustDoNullCheck() const { return must_do_null_check_; } 5037 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5038 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5039 5040 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5041 5042 DECLARE_INSTRUCTION(CheckCast); 5043 5044 private: 5045 const TypeCheckKind check_kind_; 5046 bool must_do_null_check_; 5047 5048 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5049}; 5050 5051class HMemoryBarrier : public HTemplateInstruction<0> { 5052 public: 5053 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5054 : HTemplateInstruction( 5055 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5056 barrier_kind_(barrier_kind) {} 5057 5058 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5059 5060 DECLARE_INSTRUCTION(MemoryBarrier); 5061 5062 private: 5063 const MemBarrierKind barrier_kind_; 5064 5065 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5066}; 5067 5068class HMonitorOperation : public HTemplateInstruction<1> { 5069 public: 5070 enum OperationKind { 5071 kEnter, 5072 kExit, 5073 }; 5074 5075 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5076 : HTemplateInstruction( 5077 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5078 kind_(kind) { 5079 SetRawInputAt(0, object); 5080 } 5081 5082 // Instruction may throw a Java exception, so we need an environment. 5083 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5084 5085 bool CanThrow() const OVERRIDE { 5086 // Verifier guarantees that monitor-exit cannot throw. 5087 // This is important because it allows the HGraphBuilder to remove 5088 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5089 return IsEnter(); 5090 } 5091 5092 5093 bool IsEnter() const { return kind_ == kEnter; } 5094 5095 DECLARE_INSTRUCTION(MonitorOperation); 5096 5097 private: 5098 const OperationKind kind_; 5099 5100 private: 5101 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5102}; 5103 5104/** 5105 * A HInstruction used as a marker for the replacement of new + <init> 5106 * of a String to a call to a StringFactory. Only baseline will see 5107 * the node at code generation, where it will be be treated as null. 5108 * When compiling non-baseline, `HFakeString` instructions are being removed 5109 * in the instruction simplifier. 5110 */ 5111class HFakeString : public HTemplateInstruction<0> { 5112 public: 5113 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5114 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5115 5116 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5117 5118 DECLARE_INSTRUCTION(FakeString); 5119 5120 private: 5121 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5122}; 5123 5124class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5125 public: 5126 MoveOperands(Location source, 5127 Location destination, 5128 Primitive::Type type, 5129 HInstruction* instruction) 5130 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5131 5132 Location GetSource() const { return source_; } 5133 Location GetDestination() const { return destination_; } 5134 5135 void SetSource(Location value) { source_ = value; } 5136 void SetDestination(Location value) { destination_ = value; } 5137 5138 // The parallel move resolver marks moves as "in-progress" by clearing the 5139 // destination (but not the source). 5140 Location MarkPending() { 5141 DCHECK(!IsPending()); 5142 Location dest = destination_; 5143 destination_ = Location::NoLocation(); 5144 return dest; 5145 } 5146 5147 void ClearPending(Location dest) { 5148 DCHECK(IsPending()); 5149 destination_ = dest; 5150 } 5151 5152 bool IsPending() const { 5153 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5154 return destination_.IsInvalid() && !source_.IsInvalid(); 5155 } 5156 5157 // True if this blocks a move from the given location. 5158 bool Blocks(Location loc) const { 5159 return !IsEliminated() && source_.OverlapsWith(loc); 5160 } 5161 5162 // A move is redundant if it's been eliminated, if its source and 5163 // destination are the same, or if its destination is unneeded. 5164 bool IsRedundant() const { 5165 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5166 } 5167 5168 // We clear both operands to indicate move that's been eliminated. 5169 void Eliminate() { 5170 source_ = destination_ = Location::NoLocation(); 5171 } 5172 5173 bool IsEliminated() const { 5174 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5175 return source_.IsInvalid(); 5176 } 5177 5178 Primitive::Type GetType() const { return type_; } 5179 5180 bool Is64BitMove() const { 5181 return Primitive::Is64BitType(type_); 5182 } 5183 5184 HInstruction* GetInstruction() const { return instruction_; } 5185 5186 private: 5187 Location source_; 5188 Location destination_; 5189 // The type this move is for. 5190 Primitive::Type type_; 5191 // The instruction this move is assocatied with. Null when this move is 5192 // for moving an input in the expected locations of user (including a phi user). 5193 // This is only used in debug mode, to ensure we do not connect interval siblings 5194 // in the same parallel move. 5195 HInstruction* instruction_; 5196}; 5197 5198static constexpr size_t kDefaultNumberOfMoves = 4; 5199 5200class HParallelMove : public HTemplateInstruction<0> { 5201 public: 5202 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5203 : HTemplateInstruction(SideEffects::None(), dex_pc), 5204 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5205 moves_.reserve(kDefaultNumberOfMoves); 5206 } 5207 5208 void AddMove(Location source, 5209 Location destination, 5210 Primitive::Type type, 5211 HInstruction* instruction) { 5212 DCHECK(source.IsValid()); 5213 DCHECK(destination.IsValid()); 5214 if (kIsDebugBuild) { 5215 if (instruction != nullptr) { 5216 for (const MoveOperands& move : moves_) { 5217 if (move.GetInstruction() == instruction) { 5218 // Special case the situation where the move is for the spill slot 5219 // of the instruction. 5220 if ((GetPrevious() == instruction) 5221 || ((GetPrevious() == nullptr) 5222 && instruction->IsPhi() 5223 && instruction->GetBlock() == GetBlock())) { 5224 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5225 << "Doing parallel moves for the same instruction."; 5226 } else { 5227 DCHECK(false) << "Doing parallel moves for the same instruction."; 5228 } 5229 } 5230 } 5231 } 5232 for (const MoveOperands& move : moves_) { 5233 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5234 << "Overlapped destination for two moves in a parallel move: " 5235 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5236 << source << " ==> " << destination; 5237 } 5238 } 5239 moves_.emplace_back(source, destination, type, instruction); 5240 } 5241 5242 MoveOperands* MoveOperandsAt(size_t index) { 5243 DCHECK_LT(index, moves_.size()); 5244 return &moves_[index]; 5245 } 5246 5247 size_t NumMoves() const { return moves_.size(); } 5248 5249 DECLARE_INSTRUCTION(ParallelMove); 5250 5251 private: 5252 ArenaVector<MoveOperands> moves_; 5253 5254 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5255}; 5256 5257} // namespace art 5258 5259#ifdef ART_ENABLE_CODEGEN_x86 5260#include "nodes_x86.h" 5261#endif 5262 5263namespace art { 5264 5265class HGraphVisitor : public ValueObject { 5266 public: 5267 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5268 virtual ~HGraphVisitor() {} 5269 5270 virtual void VisitInstruction(HInstruction* instruction) { UNUSED(instruction); } 5271 virtual void VisitBasicBlock(HBasicBlock* block); 5272 5273 // Visit the graph following basic block insertion order. 5274 void VisitInsertionOrder(); 5275 5276 // Visit the graph following dominator tree reverse post-order. 5277 void VisitReversePostOrder(); 5278 5279 HGraph* GetGraph() const { return graph_; } 5280 5281 // Visit functions for instruction classes. 5282#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5283 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5284 5285 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5286 5287#undef DECLARE_VISIT_INSTRUCTION 5288 5289 private: 5290 HGraph* const graph_; 5291 5292 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5293}; 5294 5295class HGraphDelegateVisitor : public HGraphVisitor { 5296 public: 5297 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5298 virtual ~HGraphDelegateVisitor() {} 5299 5300 // Visit functions that delegate to to super class. 5301#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5302 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5303 5304 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5305 5306#undef DECLARE_VISIT_INSTRUCTION 5307 5308 private: 5309 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5310}; 5311 5312class HInsertionOrderIterator : public ValueObject { 5313 public: 5314 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5315 5316 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5317 HBasicBlock* Current() const { return graph_.GetBlock(index_); } 5318 void Advance() { ++index_; } 5319 5320 private: 5321 const HGraph& graph_; 5322 size_t index_; 5323 5324 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5325}; 5326 5327class HReversePostOrderIterator : public ValueObject { 5328 public: 5329 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5330 // Check that reverse post order of the graph has been built. 5331 DCHECK(!graph.GetReversePostOrder().empty()); 5332 } 5333 5334 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5335 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5336 void Advance() { ++index_; } 5337 5338 private: 5339 const HGraph& graph_; 5340 size_t index_; 5341 5342 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5343}; 5344 5345class HPostOrderIterator : public ValueObject { 5346 public: 5347 explicit HPostOrderIterator(const HGraph& graph) 5348 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5349 // Check that reverse post order of the graph has been built. 5350 DCHECK(!graph.GetReversePostOrder().empty()); 5351 } 5352 5353 bool Done() const { return index_ == 0; } 5354 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5355 void Advance() { --index_; } 5356 5357 private: 5358 const HGraph& graph_; 5359 size_t index_; 5360 5361 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5362}; 5363 5364class HLinearPostOrderIterator : public ValueObject { 5365 public: 5366 explicit HLinearPostOrderIterator(const HGraph& graph) 5367 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5368 5369 bool Done() const { return index_ == 0; } 5370 5371 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5372 5373 void Advance() { 5374 --index_; 5375 DCHECK_GE(index_, 0U); 5376 } 5377 5378 private: 5379 const ArenaVector<HBasicBlock*>& order_; 5380 size_t index_; 5381 5382 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5383}; 5384 5385class HLinearOrderIterator : public ValueObject { 5386 public: 5387 explicit HLinearOrderIterator(const HGraph& graph) 5388 : order_(graph.GetLinearOrder()), index_(0) {} 5389 5390 bool Done() const { return index_ == order_.size(); } 5391 HBasicBlock* Current() const { return order_[index_]; } 5392 void Advance() { ++index_; } 5393 5394 private: 5395 const ArenaVector<HBasicBlock*>& order_; 5396 size_t index_; 5397 5398 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5399}; 5400 5401// Iterator over the blocks that art part of the loop. Includes blocks part 5402// of an inner loop. The order in which the blocks are iterated is on their 5403// block id. 5404class HBlocksInLoopIterator : public ValueObject { 5405 public: 5406 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5407 : blocks_in_loop_(info.GetBlocks()), 5408 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5409 index_(0) { 5410 if (!blocks_in_loop_.IsBitSet(index_)) { 5411 Advance(); 5412 } 5413 } 5414 5415 bool Done() const { return index_ == blocks_.size(); } 5416 HBasicBlock* Current() const { return blocks_[index_]; } 5417 void Advance() { 5418 ++index_; 5419 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5420 if (blocks_in_loop_.IsBitSet(index_)) { 5421 break; 5422 } 5423 } 5424 } 5425 5426 private: 5427 const BitVector& blocks_in_loop_; 5428 const ArenaVector<HBasicBlock*>& blocks_; 5429 size_t index_; 5430 5431 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5432}; 5433 5434// Iterator over the blocks that art part of the loop. Includes blocks part 5435// of an inner loop. The order in which the blocks are iterated is reverse 5436// post order. 5437class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5438 public: 5439 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5440 : blocks_in_loop_(info.GetBlocks()), 5441 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5442 index_(0) { 5443 DCHECK(!blocks_.empty()); 5444 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5445 Advance(); 5446 } 5447 } 5448 5449 bool Done() const { return index_ == blocks_.size(); } 5450 HBasicBlock* Current() const { return blocks_[index_]; } 5451 void Advance() { 5452 ++index_; 5453 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5454 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5455 break; 5456 } 5457 } 5458 } 5459 5460 private: 5461 const BitVector& blocks_in_loop_; 5462 const ArenaVector<HBasicBlock*>& blocks_; 5463 size_t index_; 5464 5465 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5466}; 5467 5468inline int64_t Int64FromConstant(HConstant* constant) { 5469 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5470 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5471 : constant->AsLongConstant()->GetValue(); 5472} 5473 5474inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5475 // For the purposes of the compiler, the dex files must actually be the same object 5476 // if we want to safely treat them as the same. This is especially important for JIT 5477 // as custom class loaders can open the same underlying file (or memory) multiple 5478 // times and provide different class resolution but no two class loaders should ever 5479 // use the same DexFile object - doing so is an unsupported hack that can lead to 5480 // all sorts of weird failures. 5481 return &lhs == &rhs; 5482} 5483 5484} // namespace art 5485 5486#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5487