nodes.h revision 9cf132ba612dcb6d53f3105d32ed007c698968a0
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38#include "utils/array_ref.h" 39 40namespace art { 41 42class GraphChecker; 43class HBasicBlock; 44class HCurrentMethod; 45class HDoubleConstant; 46class HEnvironment; 47class HFakeString; 48class HFloatConstant; 49class HGraphBuilder; 50class HGraphVisitor; 51class HInstruction; 52class HIntConstant; 53class HInvoke; 54class HLongConstant; 55class HNullConstant; 56class HPhi; 57class HSuspendCheck; 58class HTryBoundary; 59class LiveInterval; 60class LocationSummary; 61class SlowPathCode; 62class SsaBuilder; 63 64namespace mirror { 65class DexCache; 66} // namespace mirror 67 68static const int kDefaultNumberOfBlocks = 8; 69static const int kDefaultNumberOfSuccessors = 2; 70static const int kDefaultNumberOfPredecessors = 2; 71static const int kDefaultNumberOfExceptionalPredecessors = 0; 72static const int kDefaultNumberOfDominatedBlocks = 1; 73static const int kDefaultNumberOfBackEdges = 1; 74 75static constexpr uint32_t kMaxIntShiftValue = 0x1f; 76static constexpr uint64_t kMaxLongShiftValue = 0x3f; 77 78static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 79static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 80 81static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 82 83static constexpr uint32_t kNoDexPc = -1; 84 85enum IfCondition { 86 // All types. 87 kCondEQ, // == 88 kCondNE, // != 89 // Signed integers and floating-point numbers. 90 kCondLT, // < 91 kCondLE, // <= 92 kCondGT, // > 93 kCondGE, // >= 94 // Unsigned integers. 95 kCondB, // < 96 kCondBE, // <= 97 kCondA, // > 98 kCondAE, // >= 99}; 100 101enum BuildSsaResult { 102 kBuildSsaFailNonNaturalLoop, 103 kBuildSsaFailThrowCatchLoop, 104 kBuildSsaFailAmbiguousArrayOp, 105 kBuildSsaSuccess, 106}; 107 108class HInstructionList : public ValueObject { 109 public: 110 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 111 112 void AddInstruction(HInstruction* instruction); 113 void RemoveInstruction(HInstruction* instruction); 114 115 // Insert `instruction` before/after an existing instruction `cursor`. 116 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 117 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 118 119 // Return true if this list contains `instruction`. 120 bool Contains(HInstruction* instruction) const; 121 122 // Return true if `instruction1` is found before `instruction2` in 123 // this instruction list and false otherwise. Abort if none 124 // of these instructions is found. 125 bool FoundBefore(const HInstruction* instruction1, 126 const HInstruction* instruction2) const; 127 128 bool IsEmpty() const { return first_instruction_ == nullptr; } 129 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 130 131 // Update the block of all instructions to be `block`. 132 void SetBlockOfInstructions(HBasicBlock* block) const; 133 134 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 135 void Add(const HInstructionList& instruction_list); 136 137 // Return the number of instructions in the list. This is an expensive operation. 138 size_t CountSize() const; 139 140 private: 141 HInstruction* first_instruction_; 142 HInstruction* last_instruction_; 143 144 friend class HBasicBlock; 145 friend class HGraph; 146 friend class HInstruction; 147 friend class HInstructionIterator; 148 friend class HBackwardInstructionIterator; 149 150 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 151}; 152 153class ReferenceTypeInfo : ValueObject { 154 public: 155 typedef Handle<mirror::Class> TypeHandle; 156 157 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 158 // The constructor will check that the type_handle is valid. 159 return ReferenceTypeInfo(type_handle, is_exact); 160 } 161 162 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 163 164 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 165 return handle.GetReference() != nullptr; 166 } 167 168 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 169 return IsValidHandle(type_handle_); 170 } 171 172 bool IsExact() const { return is_exact_; } 173 174 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 175 DCHECK(IsValid()); 176 return GetTypeHandle()->IsObjectClass(); 177 } 178 179 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 180 DCHECK(IsValid()); 181 return GetTypeHandle()->IsStringClass(); 182 } 183 184 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 185 DCHECK(IsValid()); 186 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 187 } 188 189 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 190 DCHECK(IsValid()); 191 return GetTypeHandle()->IsInterface(); 192 } 193 194 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 195 DCHECK(IsValid()); 196 return GetTypeHandle()->IsArrayClass(); 197 } 198 199 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 200 DCHECK(IsValid()); 201 return GetTypeHandle()->IsPrimitiveArray(); 202 } 203 204 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 205 DCHECK(IsValid()); 206 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 207 } 208 209 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 210 DCHECK(IsValid()); 211 if (!IsExact()) return false; 212 if (!IsArrayClass()) return false; 213 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 214 } 215 216 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 217 DCHECK(IsValid()); 218 if (!IsExact()) return false; 219 if (!IsArrayClass()) return false; 220 if (!rti.IsArrayClass()) return false; 221 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 222 rti.GetTypeHandle()->GetComponentType()); 223 } 224 225 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 226 227 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 228 DCHECK(IsValid()); 229 DCHECK(rti.IsValid()); 230 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 231 } 232 233 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 234 DCHECK(IsValid()); 235 DCHECK(rti.IsValid()); 236 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() && 237 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 238 } 239 240 // Returns true if the type information provide the same amount of details. 241 // Note that it does not mean that the instructions have the same actual type 242 // (because the type can be the result of a merge). 243 bool IsEqual(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 244 if (!IsValid() && !rti.IsValid()) { 245 // Invalid types are equal. 246 return true; 247 } 248 if (!IsValid() || !rti.IsValid()) { 249 // One is valid, the other not. 250 return false; 251 } 252 return IsExact() == rti.IsExact() 253 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 254 } 255 256 private: 257 ReferenceTypeInfo(); 258 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 259 260 // The class of the object. 261 TypeHandle type_handle_; 262 // Whether or not the type is exact or a superclass of the actual type. 263 // Whether or not we have any information about this type. 264 bool is_exact_; 265}; 266 267std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 268 269// Control-flow graph of a method. Contains a list of basic blocks. 270class HGraph : public ArenaObject<kArenaAllocGraph> { 271 public: 272 HGraph(ArenaAllocator* arena, 273 const DexFile& dex_file, 274 uint32_t method_idx, 275 bool should_generate_constructor_barrier, 276 InstructionSet instruction_set, 277 InvokeType invoke_type = kInvalidInvokeType, 278 bool debuggable = false, 279 int start_instruction_id = 0) 280 : arena_(arena), 281 blocks_(arena->Adapter(kArenaAllocBlockList)), 282 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 283 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 284 entry_block_(nullptr), 285 exit_block_(nullptr), 286 maximum_number_of_out_vregs_(0), 287 number_of_vregs_(0), 288 number_of_in_vregs_(0), 289 temporaries_vreg_slots_(0), 290 has_bounds_checks_(false), 291 has_try_catch_(false), 292 debuggable_(debuggable), 293 current_instruction_id_(start_instruction_id), 294 dex_file_(dex_file), 295 method_idx_(method_idx), 296 invoke_type_(invoke_type), 297 in_ssa_form_(false), 298 should_generate_constructor_barrier_(should_generate_constructor_barrier), 299 instruction_set_(instruction_set), 300 cached_null_constant_(nullptr), 301 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 302 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 303 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 304 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 305 cached_current_method_(nullptr), 306 inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()) { 307 blocks_.reserve(kDefaultNumberOfBlocks); 308 } 309 310 ArenaAllocator* GetArena() const { return arena_; } 311 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 312 313 bool IsInSsaForm() const { return in_ssa_form_; } 314 315 HBasicBlock* GetEntryBlock() const { return entry_block_; } 316 HBasicBlock* GetExitBlock() const { return exit_block_; } 317 bool HasExitBlock() const { return exit_block_ != nullptr; } 318 319 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 320 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 321 322 void AddBlock(HBasicBlock* block); 323 324 // Try building the SSA form of this graph, with dominance computation and 325 // loop recognition. Returns a code specifying that it was successful or the 326 // reason for failure. 327 BuildSsaResult TryBuildingSsa(StackHandleScopeCollection* handles); 328 329 void ComputeDominanceInformation(); 330 void ClearDominanceInformation(); 331 332 void BuildDominatorTree(); 333 void SimplifyCFG(); 334 void SimplifyCatchBlocks(); 335 336 // Analyze all natural loops in this graph. Returns a code specifying that it 337 // was successful or the reason for failure. The method will fail if a loop 338 // is not natural, that is the header does not dominate a back edge, or if it 339 // is a throw-catch loop, i.e. the header is a catch block. 340 BuildSsaResult AnalyzeNaturalLoops() const; 341 342 // Iterate over blocks to compute try block membership. Needs reverse post 343 // order and loop information. 344 void ComputeTryBlockInformation(); 345 346 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 347 // Returns the instruction used to replace the invoke expression or null if the 348 // invoke is for a void method. 349 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 350 351 // Need to add a couple of blocks to test if the loop body is entered and 352 // put deoptimization instructions, etc. 353 void TransformLoopHeaderForBCE(HBasicBlock* header); 354 355 // Removes `block` from the graph. Assumes `block` has been disconnected from 356 // other blocks and has no instructions or phis. 357 void DeleteDeadEmptyBlock(HBasicBlock* block); 358 359 // Splits the edge between `block` and `successor` while preserving the 360 // indices in the predecessor/successor lists. If there are multiple edges 361 // between the blocks, the lowest indices are used. 362 // Returns the new block which is empty and has the same dex pc as `successor`. 363 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 364 365 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 366 void SimplifyLoop(HBasicBlock* header); 367 368 int32_t GetNextInstructionId() { 369 DCHECK_NE(current_instruction_id_, INT32_MAX); 370 return current_instruction_id_++; 371 } 372 373 int32_t GetCurrentInstructionId() const { 374 return current_instruction_id_; 375 } 376 377 void SetCurrentInstructionId(int32_t id) { 378 current_instruction_id_ = id; 379 } 380 381 uint16_t GetMaximumNumberOfOutVRegs() const { 382 return maximum_number_of_out_vregs_; 383 } 384 385 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 386 maximum_number_of_out_vregs_ = new_value; 387 } 388 389 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 390 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 391 } 392 393 void UpdateTemporariesVRegSlots(size_t slots) { 394 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 395 } 396 397 size_t GetTemporariesVRegSlots() const { 398 DCHECK(!in_ssa_form_); 399 return temporaries_vreg_slots_; 400 } 401 402 void SetNumberOfVRegs(uint16_t number_of_vregs) { 403 number_of_vregs_ = number_of_vregs; 404 } 405 406 uint16_t GetNumberOfVRegs() const { 407 return number_of_vregs_; 408 } 409 410 void SetNumberOfInVRegs(uint16_t value) { 411 number_of_in_vregs_ = value; 412 } 413 414 uint16_t GetNumberOfLocalVRegs() const { 415 DCHECK(!in_ssa_form_); 416 return number_of_vregs_ - number_of_in_vregs_; 417 } 418 419 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 420 return reverse_post_order_; 421 } 422 423 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 424 return linear_order_; 425 } 426 427 bool HasBoundsChecks() const { 428 return has_bounds_checks_; 429 } 430 431 void SetHasBoundsChecks(bool value) { 432 has_bounds_checks_ = value; 433 } 434 435 bool ShouldGenerateConstructorBarrier() const { 436 return should_generate_constructor_barrier_; 437 } 438 439 bool IsDebuggable() const { return debuggable_; } 440 441 // Returns a constant of the given type and value. If it does not exist 442 // already, it is created and inserted into the graph. This method is only for 443 // integral types. 444 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 445 446 // TODO: This is problematic for the consistency of reference type propagation 447 // because it can be created anytime after the pass and thus it will be left 448 // with an invalid type. 449 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 450 451 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 452 return CreateConstant(value, &cached_int_constants_, dex_pc); 453 } 454 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 455 return CreateConstant(value, &cached_long_constants_, dex_pc); 456 } 457 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 458 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 459 } 460 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 461 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 462 } 463 464 HCurrentMethod* GetCurrentMethod(); 465 466 const DexFile& GetDexFile() const { 467 return dex_file_; 468 } 469 470 uint32_t GetMethodIdx() const { 471 return method_idx_; 472 } 473 474 InvokeType GetInvokeType() const { 475 return invoke_type_; 476 } 477 478 InstructionSet GetInstructionSet() const { 479 return instruction_set_; 480 } 481 482 bool HasTryCatch() const { return has_try_catch_; } 483 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 484 485 ArtMethod* GetArtMethod() const { return art_method_; } 486 void SetArtMethod(ArtMethod* method) { art_method_ = method; } 487 488 // Returns an instruction with the opposite boolean value from 'cond'. 489 // The instruction has been inserted into the graph, either as a constant, or 490 // before cursor. 491 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor); 492 493 private: 494 void FindBackEdges(ArenaBitVector* visited); 495 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 496 void RemoveDeadBlocks(const ArenaBitVector& visited); 497 498 template <class InstructionType, typename ValueType> 499 InstructionType* CreateConstant(ValueType value, 500 ArenaSafeMap<ValueType, InstructionType*>* cache, 501 uint32_t dex_pc = kNoDexPc) { 502 // Try to find an existing constant of the given value. 503 InstructionType* constant = nullptr; 504 auto cached_constant = cache->find(value); 505 if (cached_constant != cache->end()) { 506 constant = cached_constant->second; 507 } 508 509 // If not found or previously deleted, create and cache a new instruction. 510 // Don't bother reviving a previously deleted instruction, for simplicity. 511 if (constant == nullptr || constant->GetBlock() == nullptr) { 512 constant = new (arena_) InstructionType(value, dex_pc); 513 cache->Overwrite(value, constant); 514 InsertConstant(constant); 515 } 516 return constant; 517 } 518 519 void InsertConstant(HConstant* instruction); 520 521 // Cache a float constant into the graph. This method should only be 522 // called by the SsaBuilder when creating "equivalent" instructions. 523 void CacheFloatConstant(HFloatConstant* constant); 524 525 // See CacheFloatConstant comment. 526 void CacheDoubleConstant(HDoubleConstant* constant); 527 528 ArenaAllocator* const arena_; 529 530 // List of blocks in insertion order. 531 ArenaVector<HBasicBlock*> blocks_; 532 533 // List of blocks to perform a reverse post order tree traversal. 534 ArenaVector<HBasicBlock*> reverse_post_order_; 535 536 // List of blocks to perform a linear order tree traversal. 537 ArenaVector<HBasicBlock*> linear_order_; 538 539 HBasicBlock* entry_block_; 540 HBasicBlock* exit_block_; 541 542 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 543 uint16_t maximum_number_of_out_vregs_; 544 545 // The number of virtual registers in this method. Contains the parameters. 546 uint16_t number_of_vregs_; 547 548 // The number of virtual registers used by parameters of this method. 549 uint16_t number_of_in_vregs_; 550 551 // Number of vreg size slots that the temporaries use (used in baseline compiler). 552 size_t temporaries_vreg_slots_; 553 554 // Has bounds checks. We can totally skip BCE if it's false. 555 bool has_bounds_checks_; 556 557 // Flag whether there are any try/catch blocks in the graph. We will skip 558 // try/catch-related passes if false. 559 bool has_try_catch_; 560 561 // Indicates whether the graph should be compiled in a way that 562 // ensures full debuggability. If false, we can apply more 563 // aggressive optimizations that may limit the level of debugging. 564 const bool debuggable_; 565 566 // The current id to assign to a newly added instruction. See HInstruction.id_. 567 int32_t current_instruction_id_; 568 569 // The dex file from which the method is from. 570 const DexFile& dex_file_; 571 572 // The method index in the dex file. 573 const uint32_t method_idx_; 574 575 // If inlined, this encodes how the callee is being invoked. 576 const InvokeType invoke_type_; 577 578 // Whether the graph has been transformed to SSA form. Only used 579 // in debug mode to ensure we are not using properties only valid 580 // for non-SSA form (like the number of temporaries). 581 bool in_ssa_form_; 582 583 const bool should_generate_constructor_barrier_; 584 585 const InstructionSet instruction_set_; 586 587 // Cached constants. 588 HNullConstant* cached_null_constant_; 589 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 590 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 591 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 592 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 593 594 HCurrentMethod* cached_current_method_; 595 596 // The ArtMethod this graph is for. Note that for AOT, it may be null, 597 // for example for methods whose declaring class could not be resolved 598 // (such as when the superclass could not be found). 599 ArtMethod* art_method_; 600 601 // Keep the RTI of inexact Object to avoid having to pass stack handle 602 // collection pointer to passes which may create NullConstant. 603 ReferenceTypeInfo inexact_object_rti_; 604 605 friend class SsaBuilder; // For caching constants. 606 friend class SsaLivenessAnalysis; // For the linear order. 607 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 608 DISALLOW_COPY_AND_ASSIGN(HGraph); 609}; 610 611class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 612 public: 613 HLoopInformation(HBasicBlock* header, HGraph* graph) 614 : header_(header), 615 suspend_check_(nullptr), 616 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 617 // Make bit vector growable, as the number of blocks may change. 618 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 619 back_edges_.reserve(kDefaultNumberOfBackEdges); 620 } 621 622 HBasicBlock* GetHeader() const { 623 return header_; 624 } 625 626 void SetHeader(HBasicBlock* block) { 627 header_ = block; 628 } 629 630 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 631 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 632 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 633 634 void AddBackEdge(HBasicBlock* back_edge) { 635 back_edges_.push_back(back_edge); 636 } 637 638 void RemoveBackEdge(HBasicBlock* back_edge) { 639 RemoveElement(back_edges_, back_edge); 640 } 641 642 bool IsBackEdge(const HBasicBlock& block) const { 643 return ContainsElement(back_edges_, &block); 644 } 645 646 size_t NumberOfBackEdges() const { 647 return back_edges_.size(); 648 } 649 650 HBasicBlock* GetPreHeader() const; 651 652 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 653 return back_edges_; 654 } 655 656 // Returns the lifetime position of the back edge that has the 657 // greatest lifetime position. 658 size_t GetLifetimeEnd() const; 659 660 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 661 ReplaceElement(back_edges_, existing, new_back_edge); 662 } 663 664 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 665 // that is the header dominates the back edge. 666 bool Populate(); 667 668 // Reanalyzes the loop by removing loop info from its blocks and re-running 669 // Populate(). If there are no back edges left, the loop info is completely 670 // removed as well as its SuspendCheck instruction. It must be run on nested 671 // inner loops first. 672 void Update(); 673 674 // Returns whether this loop information contains `block`. 675 // Note that this loop information *must* be populated before entering this function. 676 bool Contains(const HBasicBlock& block) const; 677 678 // Returns whether this loop information is an inner loop of `other`. 679 // Note that `other` *must* be populated before entering this function. 680 bool IsIn(const HLoopInformation& other) const; 681 682 // Returns true if instruction is not defined within this loop. 683 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const; 684 685 const ArenaBitVector& GetBlocks() const { return blocks_; } 686 687 void Add(HBasicBlock* block); 688 void Remove(HBasicBlock* block); 689 690 private: 691 // Internal recursive implementation of `Populate`. 692 void PopulateRecursive(HBasicBlock* block); 693 694 HBasicBlock* header_; 695 HSuspendCheck* suspend_check_; 696 ArenaVector<HBasicBlock*> back_edges_; 697 ArenaBitVector blocks_; 698 699 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 700}; 701 702// Stores try/catch information for basic blocks. 703// Note that HGraph is constructed so that catch blocks cannot simultaneously 704// be try blocks. 705class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 706 public: 707 // Try block information constructor. 708 explicit TryCatchInformation(const HTryBoundary& try_entry) 709 : try_entry_(&try_entry), 710 catch_dex_file_(nullptr), 711 catch_type_index_(DexFile::kDexNoIndex16) { 712 DCHECK(try_entry_ != nullptr); 713 } 714 715 // Catch block information constructor. 716 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 717 : try_entry_(nullptr), 718 catch_dex_file_(&dex_file), 719 catch_type_index_(catch_type_index) {} 720 721 bool IsTryBlock() const { return try_entry_ != nullptr; } 722 723 const HTryBoundary& GetTryEntry() const { 724 DCHECK(IsTryBlock()); 725 return *try_entry_; 726 } 727 728 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 729 730 bool IsCatchAllTypeIndex() const { 731 DCHECK(IsCatchBlock()); 732 return catch_type_index_ == DexFile::kDexNoIndex16; 733 } 734 735 uint16_t GetCatchTypeIndex() const { 736 DCHECK(IsCatchBlock()); 737 return catch_type_index_; 738 } 739 740 const DexFile& GetCatchDexFile() const { 741 DCHECK(IsCatchBlock()); 742 return *catch_dex_file_; 743 } 744 745 private: 746 // One of possibly several TryBoundary instructions entering the block's try. 747 // Only set for try blocks. 748 const HTryBoundary* try_entry_; 749 750 // Exception type information. Only set for catch blocks. 751 const DexFile* catch_dex_file_; 752 const uint16_t catch_type_index_; 753}; 754 755static constexpr size_t kNoLifetime = -1; 756static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 757 758// A block in a method. Contains the list of instructions represented 759// as a double linked list. Each block knows its predecessors and 760// successors. 761 762class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 763 public: 764 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 765 : graph_(graph), 766 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 767 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 768 loop_information_(nullptr), 769 dominator_(nullptr), 770 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 771 block_id_(kInvalidBlockId), 772 dex_pc_(dex_pc), 773 lifetime_start_(kNoLifetime), 774 lifetime_end_(kNoLifetime), 775 try_catch_information_(nullptr) { 776 predecessors_.reserve(kDefaultNumberOfPredecessors); 777 successors_.reserve(kDefaultNumberOfSuccessors); 778 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 779 } 780 781 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 782 return predecessors_; 783 } 784 785 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 786 return successors_; 787 } 788 789 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const; 790 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const; 791 792 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 793 return ContainsElement(successors_, block, start_from); 794 } 795 796 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 797 return dominated_blocks_; 798 } 799 800 bool IsEntryBlock() const { 801 return graph_->GetEntryBlock() == this; 802 } 803 804 bool IsExitBlock() const { 805 return graph_->GetExitBlock() == this; 806 } 807 808 bool IsSingleGoto() const; 809 bool IsSingleTryBoundary() const; 810 811 // Returns true if this block emits nothing but a jump. 812 bool IsSingleJump() const { 813 HLoopInformation* loop_info = GetLoopInformation(); 814 return (IsSingleGoto() || IsSingleTryBoundary()) 815 // Back edges generate a suspend check. 816 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 817 } 818 819 void AddBackEdge(HBasicBlock* back_edge) { 820 if (loop_information_ == nullptr) { 821 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 822 } 823 DCHECK_EQ(loop_information_->GetHeader(), this); 824 loop_information_->AddBackEdge(back_edge); 825 } 826 827 HGraph* GetGraph() const { return graph_; } 828 void SetGraph(HGraph* graph) { graph_ = graph; } 829 830 uint32_t GetBlockId() const { return block_id_; } 831 void SetBlockId(int id) { block_id_ = id; } 832 uint32_t GetDexPc() const { return dex_pc_; } 833 834 HBasicBlock* GetDominator() const { return dominator_; } 835 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 836 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 837 838 void RemoveDominatedBlock(HBasicBlock* block) { 839 RemoveElement(dominated_blocks_, block); 840 } 841 842 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 843 ReplaceElement(dominated_blocks_, existing, new_block); 844 } 845 846 void ClearDominanceInformation(); 847 848 int NumberOfBackEdges() const { 849 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 850 } 851 852 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 853 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 854 const HInstructionList& GetInstructions() const { return instructions_; } 855 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 856 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 857 const HInstructionList& GetPhis() const { return phis_; } 858 859 void AddSuccessor(HBasicBlock* block) { 860 successors_.push_back(block); 861 block->predecessors_.push_back(this); 862 } 863 864 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 865 size_t successor_index = GetSuccessorIndexOf(existing); 866 existing->RemovePredecessor(this); 867 new_block->predecessors_.push_back(this); 868 successors_[successor_index] = new_block; 869 } 870 871 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 872 size_t predecessor_index = GetPredecessorIndexOf(existing); 873 existing->RemoveSuccessor(this); 874 new_block->successors_.push_back(this); 875 predecessors_[predecessor_index] = new_block; 876 } 877 878 // Insert `this` between `predecessor` and `successor. This method 879 // preserves the indicies, and will update the first edge found between 880 // `predecessor` and `successor`. 881 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 882 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 883 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 884 successor->predecessors_[predecessor_index] = this; 885 predecessor->successors_[successor_index] = this; 886 successors_.push_back(successor); 887 predecessors_.push_back(predecessor); 888 } 889 890 void RemovePredecessor(HBasicBlock* block) { 891 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 892 } 893 894 void RemoveSuccessor(HBasicBlock* block) { 895 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 896 } 897 898 void ClearAllPredecessors() { 899 predecessors_.clear(); 900 } 901 902 void AddPredecessor(HBasicBlock* block) { 903 predecessors_.push_back(block); 904 block->successors_.push_back(this); 905 } 906 907 void SwapPredecessors() { 908 DCHECK_EQ(predecessors_.size(), 2u); 909 std::swap(predecessors_[0], predecessors_[1]); 910 } 911 912 void SwapSuccessors() { 913 DCHECK_EQ(successors_.size(), 2u); 914 std::swap(successors_[0], successors_[1]); 915 } 916 917 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 918 return IndexOfElement(predecessors_, predecessor); 919 } 920 921 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 922 return IndexOfElement(successors_, successor); 923 } 924 925 HBasicBlock* GetSinglePredecessor() const { 926 DCHECK_EQ(GetPredecessors().size(), 1u); 927 return GetPredecessors()[0]; 928 } 929 930 HBasicBlock* GetSingleSuccessor() const { 931 DCHECK_EQ(GetSuccessors().size(), 1u); 932 return GetSuccessors()[0]; 933 } 934 935 // Returns whether the first occurrence of `predecessor` in the list of 936 // predecessors is at index `idx`. 937 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 938 DCHECK_EQ(GetPredecessors()[idx], predecessor); 939 return GetPredecessorIndexOf(predecessor) == idx; 940 } 941 942 // Create a new block between this block and its predecessors. The new block 943 // is added to the graph, all predecessor edges are relinked to it and an edge 944 // is created to `this`. Returns the new empty block. Reverse post order or 945 // loop and try/catch information are not updated. 946 HBasicBlock* CreateImmediateDominator(); 947 948 // Split the block into two blocks just before `cursor`. Returns the newly 949 // created, latter block. Note that this method will add the block to the 950 // graph, create a Goto at the end of the former block and will create an edge 951 // between the blocks. It will not, however, update the reverse post order or 952 // loop and try/catch information. 953 HBasicBlock* SplitBefore(HInstruction* cursor); 954 955 // Split the block into two blocks just after `cursor`. Returns the newly 956 // created block. Note that this method just updates raw block information, 957 // like predecessors, successors, dominators, and instruction list. It does not 958 // update the graph, reverse post order, loop information, nor make sure the 959 // blocks are consistent (for example ending with a control flow instruction). 960 HBasicBlock* SplitAfter(HInstruction* cursor); 961 962 // Split catch block into two blocks after the original move-exception bytecode 963 // instruction, or at the beginning if not present. Returns the newly created, 964 // latter block, or nullptr if such block could not be created (must be dead 965 // in that case). Note that this method just updates raw block information, 966 // like predecessors, successors, dominators, and instruction list. It does not 967 // update the graph, reverse post order, loop information, nor make sure the 968 // blocks are consistent (for example ending with a control flow instruction). 969 HBasicBlock* SplitCatchBlockAfterMoveException(); 970 971 // Merge `other` at the end of `this`. Successors and dominated blocks of 972 // `other` are changed to be successors and dominated blocks of `this`. Note 973 // that this method does not update the graph, reverse post order, loop 974 // information, nor make sure the blocks are consistent (for example ending 975 // with a control flow instruction). 976 void MergeWithInlined(HBasicBlock* other); 977 978 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 979 // of `this` are moved to `other`. 980 // Note that this method does not update the graph, reverse post order, loop 981 // information, nor make sure the blocks are consistent (for example ending 982 // with a control flow instruction). 983 void ReplaceWith(HBasicBlock* other); 984 985 // Merge `other` at the end of `this`. This method updates loops, reverse post 986 // order, links to predecessors, successors, dominators and deletes the block 987 // from the graph. The two blocks must be successive, i.e. `this` the only 988 // predecessor of `other` and vice versa. 989 void MergeWith(HBasicBlock* other); 990 991 // Disconnects `this` from all its predecessors, successors and dominator, 992 // removes it from all loops it is included in and eventually from the graph. 993 // The block must not dominate any other block. Predecessors and successors 994 // are safely updated. 995 void DisconnectAndDelete(); 996 997 void AddInstruction(HInstruction* instruction); 998 // Insert `instruction` before/after an existing instruction `cursor`. 999 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 1000 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 1001 // Replace instruction `initial` with `replacement` within this block. 1002 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 1003 HInstruction* replacement); 1004 void AddPhi(HPhi* phi); 1005 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 1006 // RemoveInstruction and RemovePhi delete a given instruction from the respective 1007 // instruction list. With 'ensure_safety' set to true, it verifies that the 1008 // instruction is not in use and removes it from the use lists of its inputs. 1009 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 1010 void RemovePhi(HPhi* phi, bool ensure_safety = true); 1011 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 1012 1013 bool IsLoopHeader() const { 1014 return IsInLoop() && (loop_information_->GetHeader() == this); 1015 } 1016 1017 bool IsLoopPreHeaderFirstPredecessor() const { 1018 DCHECK(IsLoopHeader()); 1019 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 1020 } 1021 1022 HLoopInformation* GetLoopInformation() const { 1023 return loop_information_; 1024 } 1025 1026 // Set the loop_information_ on this block. Overrides the current 1027 // loop_information if it is an outer loop of the passed loop information. 1028 // Note that this method is called while creating the loop information. 1029 void SetInLoop(HLoopInformation* info) { 1030 if (IsLoopHeader()) { 1031 // Nothing to do. This just means `info` is an outer loop. 1032 } else if (!IsInLoop()) { 1033 loop_information_ = info; 1034 } else if (loop_information_->Contains(*info->GetHeader())) { 1035 // Block is currently part of an outer loop. Make it part of this inner loop. 1036 // Note that a non loop header having a loop information means this loop information 1037 // has already been populated 1038 loop_information_ = info; 1039 } else { 1040 // Block is part of an inner loop. Do not update the loop information. 1041 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 1042 // at this point, because this method is being called while populating `info`. 1043 } 1044 } 1045 1046 // Raw update of the loop information. 1047 void SetLoopInformation(HLoopInformation* info) { 1048 loop_information_ = info; 1049 } 1050 1051 bool IsInLoop() const { return loop_information_ != nullptr; } 1052 1053 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 1054 1055 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 1056 try_catch_information_ = try_catch_information; 1057 } 1058 1059 bool IsTryBlock() const { 1060 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 1061 } 1062 1063 bool IsCatchBlock() const { 1064 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 1065 } 1066 1067 // Returns the try entry that this block's successors should have. They will 1068 // be in the same try, unless the block ends in a try boundary. In that case, 1069 // the appropriate try entry will be returned. 1070 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 1071 1072 bool HasThrowingInstructions() const; 1073 1074 // Returns whether this block dominates the blocked passed as parameter. 1075 bool Dominates(HBasicBlock* block) const; 1076 1077 size_t GetLifetimeStart() const { return lifetime_start_; } 1078 size_t GetLifetimeEnd() const { return lifetime_end_; } 1079 1080 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 1081 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 1082 1083 bool EndsWithControlFlowInstruction() const; 1084 bool EndsWithIf() const; 1085 bool EndsWithTryBoundary() const; 1086 bool HasSinglePhi() const; 1087 1088 private: 1089 HGraph* graph_; 1090 ArenaVector<HBasicBlock*> predecessors_; 1091 ArenaVector<HBasicBlock*> successors_; 1092 HInstructionList instructions_; 1093 HInstructionList phis_; 1094 HLoopInformation* loop_information_; 1095 HBasicBlock* dominator_; 1096 ArenaVector<HBasicBlock*> dominated_blocks_; 1097 uint32_t block_id_; 1098 // The dex program counter of the first instruction of this block. 1099 const uint32_t dex_pc_; 1100 size_t lifetime_start_; 1101 size_t lifetime_end_; 1102 TryCatchInformation* try_catch_information_; 1103 1104 friend class HGraph; 1105 friend class HInstruction; 1106 1107 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 1108}; 1109 1110// Iterates over the LoopInformation of all loops which contain 'block' 1111// from the innermost to the outermost. 1112class HLoopInformationOutwardIterator : public ValueObject { 1113 public: 1114 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 1115 : current_(block.GetLoopInformation()) {} 1116 1117 bool Done() const { return current_ == nullptr; } 1118 1119 void Advance() { 1120 DCHECK(!Done()); 1121 current_ = current_->GetPreHeader()->GetLoopInformation(); 1122 } 1123 1124 HLoopInformation* Current() const { 1125 DCHECK(!Done()); 1126 return current_; 1127 } 1128 1129 private: 1130 HLoopInformation* current_; 1131 1132 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1133}; 1134 1135#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1136 M(Above, Condition) \ 1137 M(AboveOrEqual, Condition) \ 1138 M(Add, BinaryOperation) \ 1139 M(And, BinaryOperation) \ 1140 M(ArrayGet, Instruction) \ 1141 M(ArrayLength, Instruction) \ 1142 M(ArraySet, Instruction) \ 1143 M(Below, Condition) \ 1144 M(BelowOrEqual, Condition) \ 1145 M(BooleanNot, UnaryOperation) \ 1146 M(BoundsCheck, Instruction) \ 1147 M(BoundType, Instruction) \ 1148 M(CheckCast, Instruction) \ 1149 M(ClearException, Instruction) \ 1150 M(ClinitCheck, Instruction) \ 1151 M(Compare, BinaryOperation) \ 1152 M(CurrentMethod, Instruction) \ 1153 M(Deoptimize, Instruction) \ 1154 M(Div, BinaryOperation) \ 1155 M(DivZeroCheck, Instruction) \ 1156 M(DoubleConstant, Constant) \ 1157 M(Equal, Condition) \ 1158 M(Exit, Instruction) \ 1159 M(FakeString, Instruction) \ 1160 M(FloatConstant, Constant) \ 1161 M(Goto, Instruction) \ 1162 M(GreaterThan, Condition) \ 1163 M(GreaterThanOrEqual, Condition) \ 1164 M(If, Instruction) \ 1165 M(InstanceFieldGet, Instruction) \ 1166 M(InstanceFieldSet, Instruction) \ 1167 M(InstanceOf, Instruction) \ 1168 M(IntConstant, Constant) \ 1169 M(InvokeUnresolved, Invoke) \ 1170 M(InvokeInterface, Invoke) \ 1171 M(InvokeStaticOrDirect, Invoke) \ 1172 M(InvokeVirtual, Invoke) \ 1173 M(LessThan, Condition) \ 1174 M(LessThanOrEqual, Condition) \ 1175 M(LoadClass, Instruction) \ 1176 M(LoadException, Instruction) \ 1177 M(LoadLocal, Instruction) \ 1178 M(LoadString, Instruction) \ 1179 M(Local, Instruction) \ 1180 M(LongConstant, Constant) \ 1181 M(MemoryBarrier, Instruction) \ 1182 M(MonitorOperation, Instruction) \ 1183 M(Mul, BinaryOperation) \ 1184 M(NativeDebugInfo, Instruction) \ 1185 M(Neg, UnaryOperation) \ 1186 M(NewArray, Instruction) \ 1187 M(NewInstance, Instruction) \ 1188 M(Not, UnaryOperation) \ 1189 M(NotEqual, Condition) \ 1190 M(NullConstant, Instruction) \ 1191 M(NullCheck, Instruction) \ 1192 M(Or, BinaryOperation) \ 1193 M(PackedSwitch, Instruction) \ 1194 M(ParallelMove, Instruction) \ 1195 M(ParameterValue, Instruction) \ 1196 M(Phi, Instruction) \ 1197 M(Rem, BinaryOperation) \ 1198 M(Return, Instruction) \ 1199 M(ReturnVoid, Instruction) \ 1200 M(Ror, BinaryOperation) \ 1201 M(Shl, BinaryOperation) \ 1202 M(Shr, BinaryOperation) \ 1203 M(StaticFieldGet, Instruction) \ 1204 M(StaticFieldSet, Instruction) \ 1205 M(UnresolvedInstanceFieldGet, Instruction) \ 1206 M(UnresolvedInstanceFieldSet, Instruction) \ 1207 M(UnresolvedStaticFieldGet, Instruction) \ 1208 M(UnresolvedStaticFieldSet, Instruction) \ 1209 M(StoreLocal, Instruction) \ 1210 M(Sub, BinaryOperation) \ 1211 M(SuspendCheck, Instruction) \ 1212 M(Temporary, Instruction) \ 1213 M(Throw, Instruction) \ 1214 M(TryBoundary, Instruction) \ 1215 M(TypeConversion, Instruction) \ 1216 M(UShr, BinaryOperation) \ 1217 M(Xor, BinaryOperation) \ 1218 1219#ifndef ART_ENABLE_CODEGEN_arm 1220#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1221#else 1222#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1223 M(ArmDexCacheArraysBase, Instruction) 1224#endif 1225 1226#ifndef ART_ENABLE_CODEGEN_arm64 1227#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1228#else 1229#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1230 M(Arm64DataProcWithShifterOp, Instruction) \ 1231 M(Arm64IntermediateAddress, Instruction) \ 1232 M(Arm64MultiplyAccumulate, Instruction) 1233#endif 1234 1235#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1236 1237#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1238 1239#ifndef ART_ENABLE_CODEGEN_x86 1240#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1241#else 1242#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1243 M(X86ComputeBaseMethodAddress, Instruction) \ 1244 M(X86LoadFromConstantTable, Instruction) \ 1245 M(X86PackedSwitch, Instruction) 1246#endif 1247 1248#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1249 1250#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1251 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1252 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1253 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1254 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1255 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1256 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1257 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1258 1259#define FOR_EACH_ABSTRACT_INSTRUCTION(M) \ 1260 M(Condition, BinaryOperation) \ 1261 M(Constant, Instruction) \ 1262 M(UnaryOperation, Instruction) \ 1263 M(BinaryOperation, Instruction) \ 1264 M(Invoke, Instruction) 1265 1266#define FOR_EACH_INSTRUCTION(M) \ 1267 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1268 FOR_EACH_ABSTRACT_INSTRUCTION(M) 1269 1270#define FORWARD_DECLARATION(type, super) class H##type; 1271FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1272#undef FORWARD_DECLARATION 1273 1274#define DECLARE_INSTRUCTION(type) \ 1275 InstructionKind GetKindInternal() const OVERRIDE { return k##type; } \ 1276 const char* DebugName() const OVERRIDE { return #type; } \ 1277 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1278 return other->Is##type(); \ 1279 } \ 1280 void Accept(HGraphVisitor* visitor) OVERRIDE 1281 1282#define DECLARE_ABSTRACT_INSTRUCTION(type) \ 1283 bool Is##type() const { return As##type() != nullptr; } \ 1284 const H##type* As##type() const { return this; } \ 1285 H##type* As##type() { return this; } 1286 1287template <typename T> class HUseList; 1288 1289template <typename T> 1290class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1291 public: 1292 HUseListNode* GetPrevious() const { return prev_; } 1293 HUseListNode* GetNext() const { return next_; } 1294 T GetUser() const { return user_; } 1295 size_t GetIndex() const { return index_; } 1296 void SetIndex(size_t index) { index_ = index; } 1297 1298 private: 1299 HUseListNode(T user, size_t index) 1300 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1301 1302 T const user_; 1303 size_t index_; 1304 HUseListNode<T>* prev_; 1305 HUseListNode<T>* next_; 1306 1307 friend class HUseList<T>; 1308 1309 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1310}; 1311 1312template <typename T> 1313class HUseList : public ValueObject { 1314 public: 1315 HUseList() : first_(nullptr) {} 1316 1317 void Clear() { 1318 first_ = nullptr; 1319 } 1320 1321 // Adds a new entry at the beginning of the use list and returns 1322 // the newly created node. 1323 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1324 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1325 if (IsEmpty()) { 1326 first_ = new_node; 1327 } else { 1328 first_->prev_ = new_node; 1329 new_node->next_ = first_; 1330 first_ = new_node; 1331 } 1332 return new_node; 1333 } 1334 1335 HUseListNode<T>* GetFirst() const { 1336 return first_; 1337 } 1338 1339 void Remove(HUseListNode<T>* node) { 1340 DCHECK(node != nullptr); 1341 DCHECK(Contains(node)); 1342 1343 if (node->prev_ != nullptr) { 1344 node->prev_->next_ = node->next_; 1345 } 1346 if (node->next_ != nullptr) { 1347 node->next_->prev_ = node->prev_; 1348 } 1349 if (node == first_) { 1350 first_ = node->next_; 1351 } 1352 } 1353 1354 bool Contains(const HUseListNode<T>* node) const { 1355 if (node == nullptr) { 1356 return false; 1357 } 1358 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1359 if (current == node) { 1360 return true; 1361 } 1362 } 1363 return false; 1364 } 1365 1366 bool IsEmpty() const { 1367 return first_ == nullptr; 1368 } 1369 1370 bool HasOnlyOneUse() const { 1371 return first_ != nullptr && first_->next_ == nullptr; 1372 } 1373 1374 size_t SizeSlow() const { 1375 size_t count = 0; 1376 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1377 ++count; 1378 } 1379 return count; 1380 } 1381 1382 private: 1383 HUseListNode<T>* first_; 1384}; 1385 1386template<typename T> 1387class HUseIterator : public ValueObject { 1388 public: 1389 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1390 1391 bool Done() const { return current_ == nullptr; } 1392 1393 void Advance() { 1394 DCHECK(!Done()); 1395 current_ = current_->GetNext(); 1396 } 1397 1398 HUseListNode<T>* Current() const { 1399 DCHECK(!Done()); 1400 return current_; 1401 } 1402 1403 private: 1404 HUseListNode<T>* current_; 1405 1406 friend class HValue; 1407}; 1408 1409// This class is used by HEnvironment and HInstruction classes to record the 1410// instructions they use and pointers to the corresponding HUseListNodes kept 1411// by the used instructions. 1412template <typename T> 1413class HUserRecord : public ValueObject { 1414 public: 1415 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1416 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1417 1418 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1419 : instruction_(old_record.instruction_), use_node_(use_node) { 1420 DCHECK(instruction_ != nullptr); 1421 DCHECK(use_node_ != nullptr); 1422 DCHECK(old_record.use_node_ == nullptr); 1423 } 1424 1425 HInstruction* GetInstruction() const { return instruction_; } 1426 HUseListNode<T>* GetUseNode() const { return use_node_; } 1427 1428 private: 1429 // Instruction used by the user. 1430 HInstruction* instruction_; 1431 1432 // Corresponding entry in the use list kept by 'instruction_'. 1433 HUseListNode<T>* use_node_; 1434}; 1435 1436/** 1437 * Side-effects representation. 1438 * 1439 * For write/read dependences on fields/arrays, the dependence analysis uses 1440 * type disambiguation (e.g. a float field write cannot modify the value of an 1441 * integer field read) and the access type (e.g. a reference array write cannot 1442 * modify the value of a reference field read [although it may modify the 1443 * reference fetch prior to reading the field, which is represented by its own 1444 * write/read dependence]). The analysis makes conservative points-to 1445 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1446 * the same, and any reference read depends on any reference read without 1447 * further regard of its type). 1448 * 1449 * The internal representation uses 38-bit and is described in the table below. 1450 * The first line indicates the side effect, and for field/array accesses the 1451 * second line indicates the type of the access (in the order of the 1452 * Primitive::Type enum). 1453 * The two numbered lines below indicate the bit position in the bitfield (read 1454 * vertically). 1455 * 1456 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1457 * +-------------+---------+---------+--------------+---------+---------+ 1458 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1459 * | 3 |333333322|222222221| 1 |111111110|000000000| 1460 * | 7 |654321098|765432109| 8 |765432109|876543210| 1461 * 1462 * Note that, to ease the implementation, 'changes' bits are least significant 1463 * bits, while 'dependency' bits are most significant bits. 1464 */ 1465class SideEffects : public ValueObject { 1466 public: 1467 SideEffects() : flags_(0) {} 1468 1469 static SideEffects None() { 1470 return SideEffects(0); 1471 } 1472 1473 static SideEffects All() { 1474 return SideEffects(kAllChangeBits | kAllDependOnBits); 1475 } 1476 1477 static SideEffects AllChanges() { 1478 return SideEffects(kAllChangeBits); 1479 } 1480 1481 static SideEffects AllDependencies() { 1482 return SideEffects(kAllDependOnBits); 1483 } 1484 1485 static SideEffects AllExceptGCDependency() { 1486 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1487 } 1488 1489 static SideEffects AllWritesAndReads() { 1490 return SideEffects(kAllWrites | kAllReads); 1491 } 1492 1493 static SideEffects AllWrites() { 1494 return SideEffects(kAllWrites); 1495 } 1496 1497 static SideEffects AllReads() { 1498 return SideEffects(kAllReads); 1499 } 1500 1501 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1502 return is_volatile 1503 ? AllWritesAndReads() 1504 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1505 } 1506 1507 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1508 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1509 } 1510 1511 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1512 return is_volatile 1513 ? AllWritesAndReads() 1514 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1515 } 1516 1517 static SideEffects ArrayReadOfType(Primitive::Type type) { 1518 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1519 } 1520 1521 static SideEffects CanTriggerGC() { 1522 return SideEffects(1ULL << kCanTriggerGCBit); 1523 } 1524 1525 static SideEffects DependsOnGC() { 1526 return SideEffects(1ULL << kDependsOnGCBit); 1527 } 1528 1529 // Combines the side-effects of this and the other. 1530 SideEffects Union(SideEffects other) const { 1531 return SideEffects(flags_ | other.flags_); 1532 } 1533 1534 SideEffects Exclusion(SideEffects other) const { 1535 return SideEffects(flags_ & ~other.flags_); 1536 } 1537 1538 void Add(SideEffects other) { 1539 flags_ |= other.flags_; 1540 } 1541 1542 bool Includes(SideEffects other) const { 1543 return (other.flags_ & flags_) == other.flags_; 1544 } 1545 1546 bool HasSideEffects() const { 1547 return (flags_ & kAllChangeBits); 1548 } 1549 1550 bool HasDependencies() const { 1551 return (flags_ & kAllDependOnBits); 1552 } 1553 1554 // Returns true if there are no side effects or dependencies. 1555 bool DoesNothing() const { 1556 return flags_ == 0; 1557 } 1558 1559 // Returns true if something is written. 1560 bool DoesAnyWrite() const { 1561 return (flags_ & kAllWrites); 1562 } 1563 1564 // Returns true if something is read. 1565 bool DoesAnyRead() const { 1566 return (flags_ & kAllReads); 1567 } 1568 1569 // Returns true if potentially everything is written and read 1570 // (every type and every kind of access). 1571 bool DoesAllReadWrite() const { 1572 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1573 } 1574 1575 bool DoesAll() const { 1576 return flags_ == (kAllChangeBits | kAllDependOnBits); 1577 } 1578 1579 // Returns true if `this` may read something written by `other`. 1580 bool MayDependOn(SideEffects other) const { 1581 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1582 return (other.flags_ & depends_on_flags); 1583 } 1584 1585 // Returns string representation of flags (for debugging only). 1586 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1587 std::string ToString() const { 1588 std::string flags = "|"; 1589 for (int s = kLastBit; s >= 0; s--) { 1590 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1591 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1592 // This is a bit for the GC side effect. 1593 if (current_bit_is_set) { 1594 flags += "GC"; 1595 } 1596 flags += "|"; 1597 } else { 1598 // This is a bit for the array/field analysis. 1599 // The underscore character stands for the 'can trigger GC' bit. 1600 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1601 if (current_bit_is_set) { 1602 flags += kDebug[s]; 1603 } 1604 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1605 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1606 flags += "|"; 1607 } 1608 } 1609 } 1610 return flags; 1611 } 1612 1613 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1614 1615 private: 1616 static constexpr int kFieldArrayAnalysisBits = 9; 1617 1618 static constexpr int kFieldWriteOffset = 0; 1619 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1620 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1621 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1622 1623 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1624 1625 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1626 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1627 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1628 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1629 1630 static constexpr int kLastBit = kDependsOnGCBit; 1631 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1632 1633 // Aliases. 1634 1635 static_assert(kChangeBits == kDependOnBits, 1636 "the 'change' bits should match the 'depend on' bits."); 1637 1638 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1639 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1640 static constexpr uint64_t kAllWrites = 1641 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1642 static constexpr uint64_t kAllReads = 1643 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1644 1645 // Work around the fact that HIR aliases I/F and J/D. 1646 // TODO: remove this interceptor once HIR types are clean 1647 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1648 switch (type) { 1649 case Primitive::kPrimInt: 1650 case Primitive::kPrimFloat: 1651 return TypeFlag(Primitive::kPrimInt, offset) | 1652 TypeFlag(Primitive::kPrimFloat, offset); 1653 case Primitive::kPrimLong: 1654 case Primitive::kPrimDouble: 1655 return TypeFlag(Primitive::kPrimLong, offset) | 1656 TypeFlag(Primitive::kPrimDouble, offset); 1657 default: 1658 return TypeFlag(type, offset); 1659 } 1660 } 1661 1662 // Translates type to bit flag. 1663 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1664 CHECK_NE(type, Primitive::kPrimVoid); 1665 const uint64_t one = 1; 1666 const int shift = type; // 0-based consecutive enum 1667 DCHECK_LE(kFieldWriteOffset, shift); 1668 DCHECK_LT(shift, kArrayWriteOffset); 1669 return one << (type + offset); 1670 } 1671 1672 // Private constructor on direct flags value. 1673 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1674 1675 uint64_t flags_; 1676}; 1677 1678// A HEnvironment object contains the values of virtual registers at a given location. 1679class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1680 public: 1681 HEnvironment(ArenaAllocator* arena, 1682 size_t number_of_vregs, 1683 const DexFile& dex_file, 1684 uint32_t method_idx, 1685 uint32_t dex_pc, 1686 InvokeType invoke_type, 1687 HInstruction* holder) 1688 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1689 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1690 parent_(nullptr), 1691 dex_file_(dex_file), 1692 method_idx_(method_idx), 1693 dex_pc_(dex_pc), 1694 invoke_type_(invoke_type), 1695 holder_(holder) { 1696 } 1697 1698 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1699 : HEnvironment(arena, 1700 to_copy.Size(), 1701 to_copy.GetDexFile(), 1702 to_copy.GetMethodIdx(), 1703 to_copy.GetDexPc(), 1704 to_copy.GetInvokeType(), 1705 holder) {} 1706 1707 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1708 if (parent_ != nullptr) { 1709 parent_->SetAndCopyParentChain(allocator, parent); 1710 } else { 1711 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1712 parent_->CopyFrom(parent); 1713 if (parent->GetParent() != nullptr) { 1714 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1715 } 1716 } 1717 } 1718 1719 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1720 void CopyFrom(HEnvironment* environment); 1721 1722 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1723 // input to the loop phi instead. This is for inserting instructions that 1724 // require an environment (like HDeoptimization) in the loop pre-header. 1725 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1726 1727 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1728 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1729 } 1730 1731 HInstruction* GetInstructionAt(size_t index) const { 1732 return vregs_[index].GetInstruction(); 1733 } 1734 1735 void RemoveAsUserOfInput(size_t index) const; 1736 1737 size_t Size() const { return vregs_.size(); } 1738 1739 HEnvironment* GetParent() const { return parent_; } 1740 1741 void SetLocationAt(size_t index, Location location) { 1742 locations_[index] = location; 1743 } 1744 1745 Location GetLocationAt(size_t index) const { 1746 return locations_[index]; 1747 } 1748 1749 uint32_t GetDexPc() const { 1750 return dex_pc_; 1751 } 1752 1753 uint32_t GetMethodIdx() const { 1754 return method_idx_; 1755 } 1756 1757 InvokeType GetInvokeType() const { 1758 return invoke_type_; 1759 } 1760 1761 const DexFile& GetDexFile() const { 1762 return dex_file_; 1763 } 1764 1765 HInstruction* GetHolder() const { 1766 return holder_; 1767 } 1768 1769 1770 bool IsFromInlinedInvoke() const { 1771 return GetParent() != nullptr; 1772 } 1773 1774 private: 1775 // Record instructions' use entries of this environment for constant-time removal. 1776 // It should only be called by HInstruction when a new environment use is added. 1777 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1778 DCHECK(env_use->GetUser() == this); 1779 size_t index = env_use->GetIndex(); 1780 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1781 } 1782 1783 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1784 ArenaVector<Location> locations_; 1785 HEnvironment* parent_; 1786 const DexFile& dex_file_; 1787 const uint32_t method_idx_; 1788 const uint32_t dex_pc_; 1789 const InvokeType invoke_type_; 1790 1791 // The instruction that holds this environment. 1792 HInstruction* const holder_; 1793 1794 friend class HInstruction; 1795 1796 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1797}; 1798 1799class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1800 public: 1801 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1802 : previous_(nullptr), 1803 next_(nullptr), 1804 block_(nullptr), 1805 dex_pc_(dex_pc), 1806 id_(-1), 1807 ssa_index_(-1), 1808 environment_(nullptr), 1809 locations_(nullptr), 1810 live_interval_(nullptr), 1811 lifetime_position_(kNoLifetime), 1812 side_effects_(side_effects), 1813 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1814 1815 virtual ~HInstruction() {} 1816 1817#define DECLARE_KIND(type, super) k##type, 1818 enum InstructionKind { 1819 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1820 }; 1821#undef DECLARE_KIND 1822 1823 HInstruction* GetNext() const { return next_; } 1824 HInstruction* GetPrevious() const { return previous_; } 1825 1826 HInstruction* GetNextDisregardingMoves() const; 1827 HInstruction* GetPreviousDisregardingMoves() const; 1828 1829 HBasicBlock* GetBlock() const { return block_; } 1830 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1831 void SetBlock(HBasicBlock* block) { block_ = block; } 1832 bool IsInBlock() const { return block_ != nullptr; } 1833 bool IsInLoop() const { return block_->IsInLoop(); } 1834 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1835 1836 virtual size_t InputCount() const = 0; 1837 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1838 1839 virtual void Accept(HGraphVisitor* visitor) = 0; 1840 virtual const char* DebugName() const = 0; 1841 1842 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1843 void SetRawInputAt(size_t index, HInstruction* input) { 1844 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1845 } 1846 1847 virtual bool NeedsEnvironment() const { return false; } 1848 1849 uint32_t GetDexPc() const { return dex_pc_; } 1850 1851 virtual bool IsControlFlow() const { return false; } 1852 1853 virtual bool CanThrow() const { return false; } 1854 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1855 1856 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1857 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1858 1859 // Does not apply for all instructions, but having this at top level greatly 1860 // simplifies the null check elimination. 1861 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1862 virtual bool CanBeNull() const { 1863 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1864 return true; 1865 } 1866 1867 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1868 return false; 1869 } 1870 1871 virtual bool IsActualObject() const { 1872 return GetType() == Primitive::kPrimNot; 1873 } 1874 1875 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1876 1877 ReferenceTypeInfo GetReferenceTypeInfo() const { 1878 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1879 return reference_type_info_; 1880 } 1881 1882 void AddUseAt(HInstruction* user, size_t index) { 1883 DCHECK(user != nullptr); 1884 HUseListNode<HInstruction*>* use = 1885 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1886 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1887 } 1888 1889 void AddEnvUseAt(HEnvironment* user, size_t index) { 1890 DCHECK(user != nullptr); 1891 HUseListNode<HEnvironment*>* env_use = 1892 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1893 user->RecordEnvUse(env_use); 1894 } 1895 1896 void RemoveAsUserOfInput(size_t input) { 1897 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1898 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1899 } 1900 1901 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1902 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1903 1904 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1905 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1906 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1907 bool HasOnlyOneNonEnvironmentUse() const { 1908 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1909 } 1910 1911 // Does this instruction strictly dominate `other_instruction`? 1912 // Returns false if this instruction and `other_instruction` are the same. 1913 // Aborts if this instruction and `other_instruction` are both phis. 1914 bool StrictlyDominates(HInstruction* other_instruction) const; 1915 1916 int GetId() const { return id_; } 1917 void SetId(int id) { id_ = id; } 1918 1919 int GetSsaIndex() const { return ssa_index_; } 1920 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1921 bool HasSsaIndex() const { return ssa_index_ != -1; } 1922 1923 bool HasEnvironment() const { return environment_ != nullptr; } 1924 HEnvironment* GetEnvironment() const { return environment_; } 1925 // Set the `environment_` field. Raw because this method does not 1926 // update the uses lists. 1927 void SetRawEnvironment(HEnvironment* environment) { 1928 DCHECK(environment_ == nullptr); 1929 DCHECK_EQ(environment->GetHolder(), this); 1930 environment_ = environment; 1931 } 1932 1933 // Set the environment of this instruction, copying it from `environment`. While 1934 // copying, the uses lists are being updated. 1935 void CopyEnvironmentFrom(HEnvironment* environment) { 1936 DCHECK(environment_ == nullptr); 1937 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1938 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1939 environment_->CopyFrom(environment); 1940 if (environment->GetParent() != nullptr) { 1941 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1942 } 1943 } 1944 1945 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1946 HBasicBlock* block) { 1947 DCHECK(environment_ == nullptr); 1948 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1949 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1950 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1951 if (environment->GetParent() != nullptr) { 1952 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1953 } 1954 } 1955 1956 // Returns the number of entries in the environment. Typically, that is the 1957 // number of dex registers in a method. It could be more in case of inlining. 1958 size_t EnvironmentSize() const; 1959 1960 LocationSummary* GetLocations() const { return locations_; } 1961 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1962 1963 void ReplaceWith(HInstruction* instruction); 1964 void ReplaceInput(HInstruction* replacement, size_t index); 1965 1966 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1967 // uses of this instruction by `other` are *not* updated. 1968 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1969 ReplaceWith(other); 1970 other->ReplaceInput(this, use_index); 1971 } 1972 1973 // Move `this` instruction before `cursor`. 1974 void MoveBefore(HInstruction* cursor); 1975 1976 // Move `this` before its first user and out of any loops. If there is no 1977 // out-of-loop user that dominates all other users, move the instruction 1978 // to the end of the out-of-loop common dominator of the user's blocks. 1979 // 1980 // This can be used only on non-throwing instructions with no side effects that 1981 // have at least one use but no environment uses. 1982 void MoveBeforeFirstUserAndOutOfLoops(); 1983 1984#define INSTRUCTION_TYPE_CHECK(type, super) \ 1985 bool Is##type() const; \ 1986 const H##type* As##type() const; \ 1987 H##type* As##type(); 1988 1989 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1990#undef INSTRUCTION_TYPE_CHECK 1991 1992#define INSTRUCTION_TYPE_CHECK(type, super) \ 1993 bool Is##type() const { return (As##type() != nullptr); } \ 1994 virtual const H##type* As##type() const { return nullptr; } \ 1995 virtual H##type* As##type() { return nullptr; } 1996 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1997#undef INSTRUCTION_TYPE_CHECK 1998 1999 // Returns whether the instruction can be moved within the graph. 2000 virtual bool CanBeMoved() const { return false; } 2001 2002 // Returns whether the two instructions are of the same kind. 2003 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 2004 return false; 2005 } 2006 2007 // Returns whether any data encoded in the two instructions is equal. 2008 // This method does not look at the inputs. Both instructions must be 2009 // of the same type, otherwise the method has undefined behavior. 2010 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 2011 return false; 2012 } 2013 2014 // Returns whether two instructions are equal, that is: 2015 // 1) They have the same type and contain the same data (InstructionDataEquals). 2016 // 2) Their inputs are identical. 2017 bool Equals(HInstruction* other) const; 2018 2019 // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744) 2020 // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide 2021 // the virtual function because the __attribute__((__pure__)) doesn't really 2022 // apply the strong requirement for virtual functions, preventing optimizations. 2023 InstructionKind GetKind() const PURE; 2024 virtual InstructionKind GetKindInternal() const = 0; 2025 2026 virtual size_t ComputeHashCode() const { 2027 size_t result = GetKind(); 2028 for (size_t i = 0, e = InputCount(); i < e; ++i) { 2029 result = (result * 31) + InputAt(i)->GetId(); 2030 } 2031 return result; 2032 } 2033 2034 SideEffects GetSideEffects() const { return side_effects_; } 2035 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 2036 2037 size_t GetLifetimePosition() const { return lifetime_position_; } 2038 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 2039 LiveInterval* GetLiveInterval() const { return live_interval_; } 2040 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 2041 bool HasLiveInterval() const { return live_interval_ != nullptr; } 2042 2043 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 2044 2045 // Returns whether the code generation of the instruction will require to have access 2046 // to the current method. Such instructions are: 2047 // (1): Instructions that require an environment, as calling the runtime requires 2048 // to walk the stack and have the current method stored at a specific stack address. 2049 // (2): Object literals like classes and strings, that are loaded from the dex cache 2050 // fields of the current method. 2051 bool NeedsCurrentMethod() const { 2052 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 2053 } 2054 2055 // Returns whether the code generation of the instruction will require to have access 2056 // to the dex cache of the current method's declaring class via the current method. 2057 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 2058 2059 // Does this instruction have any use in an environment before 2060 // control flow hits 'other'? 2061 bool HasAnyEnvironmentUseBefore(HInstruction* other); 2062 2063 // Remove all references to environment uses of this instruction. 2064 // The caller must ensure that this is safe to do. 2065 void RemoveEnvironmentUsers(); 2066 2067 protected: 2068 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2069 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2070 void SetSideEffects(SideEffects other) { side_effects_ = other; } 2071 2072 private: 2073 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2074 2075 HInstruction* previous_; 2076 HInstruction* next_; 2077 HBasicBlock* block_; 2078 const uint32_t dex_pc_; 2079 2080 // An instruction gets an id when it is added to the graph. 2081 // It reflects creation order. A negative id means the instruction 2082 // has not been added to the graph. 2083 int id_; 2084 2085 // When doing liveness analysis, instructions that have uses get an SSA index. 2086 int ssa_index_; 2087 2088 // List of instructions that have this instruction as input. 2089 HUseList<HInstruction*> uses_; 2090 2091 // List of environments that contain this instruction. 2092 HUseList<HEnvironment*> env_uses_; 2093 2094 // The environment associated with this instruction. Not null if the instruction 2095 // might jump out of the method. 2096 HEnvironment* environment_; 2097 2098 // Set by the code generator. 2099 LocationSummary* locations_; 2100 2101 // Set by the liveness analysis. 2102 LiveInterval* live_interval_; 2103 2104 // Set by the liveness analysis, this is the position in a linear 2105 // order of blocks where this instruction's live interval start. 2106 size_t lifetime_position_; 2107 2108 SideEffects side_effects_; 2109 2110 // TODO: for primitive types this should be marked as invalid. 2111 ReferenceTypeInfo reference_type_info_; 2112 2113 friend class GraphChecker; 2114 friend class HBasicBlock; 2115 friend class HEnvironment; 2116 friend class HGraph; 2117 friend class HInstructionList; 2118 2119 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2120}; 2121std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2122 2123class HInputIterator : public ValueObject { 2124 public: 2125 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2126 2127 bool Done() const { return index_ == instruction_->InputCount(); } 2128 HInstruction* Current() const { return instruction_->InputAt(index_); } 2129 void Advance() { index_++; } 2130 2131 private: 2132 HInstruction* instruction_; 2133 size_t index_; 2134 2135 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2136}; 2137 2138class HInstructionIterator : public ValueObject { 2139 public: 2140 explicit HInstructionIterator(const HInstructionList& instructions) 2141 : instruction_(instructions.first_instruction_) { 2142 next_ = Done() ? nullptr : instruction_->GetNext(); 2143 } 2144 2145 bool Done() const { return instruction_ == nullptr; } 2146 HInstruction* Current() const { return instruction_; } 2147 void Advance() { 2148 instruction_ = next_; 2149 next_ = Done() ? nullptr : instruction_->GetNext(); 2150 } 2151 2152 private: 2153 HInstruction* instruction_; 2154 HInstruction* next_; 2155 2156 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2157}; 2158 2159class HBackwardInstructionIterator : public ValueObject { 2160 public: 2161 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2162 : instruction_(instructions.last_instruction_) { 2163 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2164 } 2165 2166 bool Done() const { return instruction_ == nullptr; } 2167 HInstruction* Current() const { return instruction_; } 2168 void Advance() { 2169 instruction_ = next_; 2170 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2171 } 2172 2173 private: 2174 HInstruction* instruction_; 2175 HInstruction* next_; 2176 2177 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2178}; 2179 2180template<size_t N> 2181class HTemplateInstruction: public HInstruction { 2182 public: 2183 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2184 : HInstruction(side_effects, dex_pc), inputs_() {} 2185 virtual ~HTemplateInstruction() {} 2186 2187 size_t InputCount() const OVERRIDE { return N; } 2188 2189 protected: 2190 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2191 DCHECK_LT(i, N); 2192 return inputs_[i]; 2193 } 2194 2195 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2196 DCHECK_LT(i, N); 2197 inputs_[i] = input; 2198 } 2199 2200 private: 2201 std::array<HUserRecord<HInstruction*>, N> inputs_; 2202 2203 friend class SsaBuilder; 2204}; 2205 2206// HTemplateInstruction specialization for N=0. 2207template<> 2208class HTemplateInstruction<0>: public HInstruction { 2209 public: 2210 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2211 : HInstruction(side_effects, dex_pc) {} 2212 2213 virtual ~HTemplateInstruction() {} 2214 2215 size_t InputCount() const OVERRIDE { return 0; } 2216 2217 protected: 2218 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2219 LOG(FATAL) << "Unreachable"; 2220 UNREACHABLE(); 2221 } 2222 2223 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2224 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2225 LOG(FATAL) << "Unreachable"; 2226 UNREACHABLE(); 2227 } 2228 2229 private: 2230 friend class SsaBuilder; 2231}; 2232 2233template<intptr_t N> 2234class HExpression : public HTemplateInstruction<N> { 2235 public: 2236 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2237 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2238 virtual ~HExpression() {} 2239 2240 Primitive::Type GetType() const OVERRIDE { return type_; } 2241 2242 protected: 2243 Primitive::Type type_; 2244}; 2245 2246// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2247// instruction that branches to the exit block. 2248class HReturnVoid : public HTemplateInstruction<0> { 2249 public: 2250 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2251 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2252 2253 bool IsControlFlow() const OVERRIDE { return true; } 2254 2255 DECLARE_INSTRUCTION(ReturnVoid); 2256 2257 private: 2258 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2259}; 2260 2261// Represents dex's RETURN opcodes. A HReturn is a control flow 2262// instruction that branches to the exit block. 2263class HReturn : public HTemplateInstruction<1> { 2264 public: 2265 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2266 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2267 SetRawInputAt(0, value); 2268 } 2269 2270 bool IsControlFlow() const OVERRIDE { return true; } 2271 2272 DECLARE_INSTRUCTION(Return); 2273 2274 private: 2275 DISALLOW_COPY_AND_ASSIGN(HReturn); 2276}; 2277 2278// The exit instruction is the only instruction of the exit block. 2279// Instructions aborting the method (HThrow and HReturn) must branch to the 2280// exit block. 2281class HExit : public HTemplateInstruction<0> { 2282 public: 2283 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2284 2285 bool IsControlFlow() const OVERRIDE { return true; } 2286 2287 DECLARE_INSTRUCTION(Exit); 2288 2289 private: 2290 DISALLOW_COPY_AND_ASSIGN(HExit); 2291}; 2292 2293// Jumps from one block to another. 2294class HGoto : public HTemplateInstruction<0> { 2295 public: 2296 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2297 2298 bool IsControlFlow() const OVERRIDE { return true; } 2299 2300 HBasicBlock* GetSuccessor() const { 2301 return GetBlock()->GetSingleSuccessor(); 2302 } 2303 2304 DECLARE_INSTRUCTION(Goto); 2305 2306 private: 2307 DISALLOW_COPY_AND_ASSIGN(HGoto); 2308}; 2309 2310class HConstant : public HExpression<0> { 2311 public: 2312 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2313 : HExpression(type, SideEffects::None(), dex_pc) {} 2314 2315 bool CanBeMoved() const OVERRIDE { return true; } 2316 2317 virtual bool IsMinusOne() const { return false; } 2318 virtual bool IsZero() const { return false; } 2319 virtual bool IsOne() const { return false; } 2320 2321 virtual uint64_t GetValueAsUint64() const = 0; 2322 2323 DECLARE_ABSTRACT_INSTRUCTION(Constant); 2324 2325 private: 2326 DISALLOW_COPY_AND_ASSIGN(HConstant); 2327}; 2328 2329class HNullConstant : public HConstant { 2330 public: 2331 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2332 return true; 2333 } 2334 2335 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2336 2337 size_t ComputeHashCode() const OVERRIDE { return 0; } 2338 2339 DECLARE_INSTRUCTION(NullConstant); 2340 2341 private: 2342 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2343 2344 friend class HGraph; 2345 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2346}; 2347 2348// Constants of the type int. Those can be from Dex instructions, or 2349// synthesized (for example with the if-eqz instruction). 2350class HIntConstant : public HConstant { 2351 public: 2352 int32_t GetValue() const { return value_; } 2353 2354 uint64_t GetValueAsUint64() const OVERRIDE { 2355 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2356 } 2357 2358 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2359 DCHECK(other->IsIntConstant()); 2360 return other->AsIntConstant()->value_ == value_; 2361 } 2362 2363 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2364 2365 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2366 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2367 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2368 2369 DECLARE_INSTRUCTION(IntConstant); 2370 2371 private: 2372 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2373 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2374 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2375 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2376 2377 const int32_t value_; 2378 2379 friend class HGraph; 2380 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2381 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2382 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2383}; 2384 2385class HLongConstant : public HConstant { 2386 public: 2387 int64_t GetValue() const { return value_; } 2388 2389 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2390 2391 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2392 DCHECK(other->IsLongConstant()); 2393 return other->AsLongConstant()->value_ == value_; 2394 } 2395 2396 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2397 2398 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2399 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2400 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2401 2402 DECLARE_INSTRUCTION(LongConstant); 2403 2404 private: 2405 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2406 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2407 2408 const int64_t value_; 2409 2410 friend class HGraph; 2411 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2412}; 2413 2414// Conditional branch. A block ending with an HIf instruction must have 2415// two successors. 2416class HIf : public HTemplateInstruction<1> { 2417 public: 2418 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2419 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2420 SetRawInputAt(0, input); 2421 } 2422 2423 bool IsControlFlow() const OVERRIDE { return true; } 2424 2425 HBasicBlock* IfTrueSuccessor() const { 2426 return GetBlock()->GetSuccessors()[0]; 2427 } 2428 2429 HBasicBlock* IfFalseSuccessor() const { 2430 return GetBlock()->GetSuccessors()[1]; 2431 } 2432 2433 DECLARE_INSTRUCTION(If); 2434 2435 private: 2436 DISALLOW_COPY_AND_ASSIGN(HIf); 2437}; 2438 2439 2440// Abstract instruction which marks the beginning and/or end of a try block and 2441// links it to the respective exception handlers. Behaves the same as a Goto in 2442// non-exceptional control flow. 2443// Normal-flow successor is stored at index zero, exception handlers under 2444// higher indices in no particular order. 2445class HTryBoundary : public HTemplateInstruction<0> { 2446 public: 2447 enum BoundaryKind { 2448 kEntry, 2449 kExit, 2450 }; 2451 2452 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2453 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2454 2455 bool IsControlFlow() const OVERRIDE { return true; } 2456 2457 // Returns the block's non-exceptional successor (index zero). 2458 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2459 2460 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const { 2461 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u); 2462 } 2463 2464 // Returns whether `handler` is among its exception handlers (non-zero index 2465 // successors). 2466 bool HasExceptionHandler(const HBasicBlock& handler) const { 2467 DCHECK(handler.IsCatchBlock()); 2468 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2469 } 2470 2471 // If not present already, adds `handler` to its block's list of exception 2472 // handlers. 2473 void AddExceptionHandler(HBasicBlock* handler) { 2474 if (!HasExceptionHandler(*handler)) { 2475 GetBlock()->AddSuccessor(handler); 2476 } 2477 } 2478 2479 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2480 2481 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2482 2483 DECLARE_INSTRUCTION(TryBoundary); 2484 2485 private: 2486 const BoundaryKind kind_; 2487 2488 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2489}; 2490 2491// Deoptimize to interpreter, upon checking a condition. 2492class HDeoptimize : public HTemplateInstruction<1> { 2493 public: 2494 // We set CanTriggerGC to prevent any intermediate address to be live 2495 // at the point of the `HDeoptimize`. 2496 HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2497 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 2498 SetRawInputAt(0, cond); 2499 } 2500 2501 bool CanBeMoved() const OVERRIDE { return true; } 2502 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2503 return true; 2504 } 2505 bool NeedsEnvironment() const OVERRIDE { return true; } 2506 bool CanThrow() const OVERRIDE { return true; } 2507 2508 DECLARE_INSTRUCTION(Deoptimize); 2509 2510 private: 2511 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2512}; 2513 2514// Represents the ArtMethod that was passed as a first argument to 2515// the method. It is used by instructions that depend on it, like 2516// instructions that work with the dex cache. 2517class HCurrentMethod : public HExpression<0> { 2518 public: 2519 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2520 : HExpression(type, SideEffects::None(), dex_pc) {} 2521 2522 DECLARE_INSTRUCTION(CurrentMethod); 2523 2524 private: 2525 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2526}; 2527 2528// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2529// have one successor for each entry in the switch table, and the final successor 2530// will be the block containing the next Dex opcode. 2531class HPackedSwitch : public HTemplateInstruction<1> { 2532 public: 2533 HPackedSwitch(int32_t start_value, 2534 uint32_t num_entries, 2535 HInstruction* input, 2536 uint32_t dex_pc = kNoDexPc) 2537 : HTemplateInstruction(SideEffects::None(), dex_pc), 2538 start_value_(start_value), 2539 num_entries_(num_entries) { 2540 SetRawInputAt(0, input); 2541 } 2542 2543 bool IsControlFlow() const OVERRIDE { return true; } 2544 2545 int32_t GetStartValue() const { return start_value_; } 2546 2547 uint32_t GetNumEntries() const { return num_entries_; } 2548 2549 HBasicBlock* GetDefaultBlock() const { 2550 // Last entry is the default block. 2551 return GetBlock()->GetSuccessors()[num_entries_]; 2552 } 2553 DECLARE_INSTRUCTION(PackedSwitch); 2554 2555 private: 2556 const int32_t start_value_; 2557 const uint32_t num_entries_; 2558 2559 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2560}; 2561 2562class HUnaryOperation : public HExpression<1> { 2563 public: 2564 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2565 : HExpression(result_type, SideEffects::None(), dex_pc) { 2566 SetRawInputAt(0, input); 2567 } 2568 2569 HInstruction* GetInput() const { return InputAt(0); } 2570 Primitive::Type GetResultType() const { return GetType(); } 2571 2572 bool CanBeMoved() const OVERRIDE { return true; } 2573 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2574 return true; 2575 } 2576 2577 // Try to statically evaluate `operation` and return a HConstant 2578 // containing the result of this evaluation. If `operation` cannot 2579 // be evaluated as a constant, return null. 2580 HConstant* TryStaticEvaluation() const; 2581 2582 // Apply this operation to `x`. 2583 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2584 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2585 2586 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation); 2587 2588 private: 2589 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2590}; 2591 2592class HBinaryOperation : public HExpression<2> { 2593 public: 2594 HBinaryOperation(Primitive::Type result_type, 2595 HInstruction* left, 2596 HInstruction* right, 2597 SideEffects side_effects = SideEffects::None(), 2598 uint32_t dex_pc = kNoDexPc) 2599 : HExpression(result_type, side_effects, dex_pc) { 2600 SetRawInputAt(0, left); 2601 SetRawInputAt(1, right); 2602 } 2603 2604 HInstruction* GetLeft() const { return InputAt(0); } 2605 HInstruction* GetRight() const { return InputAt(1); } 2606 Primitive::Type GetResultType() const { return GetType(); } 2607 2608 virtual bool IsCommutative() const { return false; } 2609 2610 // Put constant on the right. 2611 // Returns whether order is changed. 2612 bool OrderInputsWithConstantOnTheRight() { 2613 HInstruction* left = InputAt(0); 2614 HInstruction* right = InputAt(1); 2615 if (left->IsConstant() && !right->IsConstant()) { 2616 ReplaceInput(right, 0); 2617 ReplaceInput(left, 1); 2618 return true; 2619 } 2620 return false; 2621 } 2622 2623 // Order inputs by instruction id, but favor constant on the right side. 2624 // This helps GVN for commutative ops. 2625 void OrderInputs() { 2626 DCHECK(IsCommutative()); 2627 HInstruction* left = InputAt(0); 2628 HInstruction* right = InputAt(1); 2629 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2630 return; 2631 } 2632 if (OrderInputsWithConstantOnTheRight()) { 2633 return; 2634 } 2635 // Order according to instruction id. 2636 if (left->GetId() > right->GetId()) { 2637 ReplaceInput(right, 0); 2638 ReplaceInput(left, 1); 2639 } 2640 } 2641 2642 bool CanBeMoved() const OVERRIDE { return true; } 2643 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2644 return true; 2645 } 2646 2647 // Try to statically evaluate `operation` and return a HConstant 2648 // containing the result of this evaluation. If `operation` cannot 2649 // be evaluated as a constant, return null. 2650 HConstant* TryStaticEvaluation() const; 2651 2652 // Apply this operation to `x` and `y`. 2653 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2654 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2655 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2656 HLongConstant* y ATTRIBUTE_UNUSED) const { 2657 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2658 return nullptr; 2659 } 2660 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2661 HIntConstant* y ATTRIBUTE_UNUSED) const { 2662 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2663 return nullptr; 2664 } 2665 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2666 HNullConstant* y ATTRIBUTE_UNUSED) const { 2667 VLOG(compiler) << DebugName() << " is not defined for the (null, null) case."; 2668 return nullptr; 2669 } 2670 2671 // Returns an input that can legally be used as the right input and is 2672 // constant, or null. 2673 HConstant* GetConstantRight() const; 2674 2675 // If `GetConstantRight()` returns one of the input, this returns the other 2676 // one. Otherwise it returns null. 2677 HInstruction* GetLeastConstantLeft() const; 2678 2679 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation); 2680 2681 private: 2682 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2683}; 2684 2685// The comparison bias applies for floating point operations and indicates how NaN 2686// comparisons are treated: 2687enum class ComparisonBias { 2688 kNoBias, // bias is not applicable (i.e. for long operation) 2689 kGtBias, // return 1 for NaN comparisons 2690 kLtBias, // return -1 for NaN comparisons 2691}; 2692 2693class HCondition : public HBinaryOperation { 2694 public: 2695 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2696 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2697 needs_materialization_(true), 2698 bias_(ComparisonBias::kNoBias) {} 2699 2700 bool NeedsMaterialization() const { return needs_materialization_; } 2701 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2702 2703 // For code generation purposes, returns whether this instruction is just before 2704 // `instruction`, and disregard moves in between. 2705 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2706 2707 DECLARE_ABSTRACT_INSTRUCTION(Condition); 2708 2709 virtual IfCondition GetCondition() const = 0; 2710 2711 virtual IfCondition GetOppositeCondition() const = 0; 2712 2713 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2714 2715 void SetBias(ComparisonBias bias) { bias_ = bias; } 2716 2717 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2718 return bias_ == other->AsCondition()->bias_; 2719 } 2720 2721 bool IsFPConditionTrueIfNaN() const { 2722 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2723 IfCondition if_cond = GetCondition(); 2724 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2725 } 2726 2727 bool IsFPConditionFalseIfNaN() const { 2728 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2729 IfCondition if_cond = GetCondition(); 2730 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2731 } 2732 2733 private: 2734 // For register allocation purposes, returns whether this instruction needs to be 2735 // materialized (that is, not just be in the processor flags). 2736 bool needs_materialization_; 2737 2738 // Needed if we merge a HCompare into a HCondition. 2739 ComparisonBias bias_; 2740 2741 DISALLOW_COPY_AND_ASSIGN(HCondition); 2742}; 2743 2744// Instruction to check if two inputs are equal to each other. 2745class HEqual : public HCondition { 2746 public: 2747 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2748 : HCondition(first, second, dex_pc) {} 2749 2750 bool IsCommutative() const OVERRIDE { return true; } 2751 2752 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2753 return GetBlock()->GetGraph()->GetIntConstant( 2754 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2755 } 2756 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2757 return GetBlock()->GetGraph()->GetIntConstant( 2758 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2759 } 2760 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2761 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2762 return GetBlock()->GetGraph()->GetIntConstant(1); 2763 } 2764 2765 DECLARE_INSTRUCTION(Equal); 2766 2767 IfCondition GetCondition() const OVERRIDE { 2768 return kCondEQ; 2769 } 2770 2771 IfCondition GetOppositeCondition() const OVERRIDE { 2772 return kCondNE; 2773 } 2774 2775 private: 2776 template <typename T> bool Compute(T x, T y) const { return x == y; } 2777 2778 DISALLOW_COPY_AND_ASSIGN(HEqual); 2779}; 2780 2781class HNotEqual : public HCondition { 2782 public: 2783 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2784 : HCondition(first, second, dex_pc) {} 2785 2786 bool IsCommutative() const OVERRIDE { return true; } 2787 2788 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2789 return GetBlock()->GetGraph()->GetIntConstant( 2790 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2791 } 2792 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2793 return GetBlock()->GetGraph()->GetIntConstant( 2794 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2795 } 2796 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2797 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2798 return GetBlock()->GetGraph()->GetIntConstant(0); 2799 } 2800 2801 DECLARE_INSTRUCTION(NotEqual); 2802 2803 IfCondition GetCondition() const OVERRIDE { 2804 return kCondNE; 2805 } 2806 2807 IfCondition GetOppositeCondition() const OVERRIDE { 2808 return kCondEQ; 2809 } 2810 2811 private: 2812 template <typename T> bool Compute(T x, T y) const { return x != y; } 2813 2814 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2815}; 2816 2817class HLessThan : public HCondition { 2818 public: 2819 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2820 : HCondition(first, second, dex_pc) {} 2821 2822 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2823 return GetBlock()->GetGraph()->GetIntConstant( 2824 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2825 } 2826 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2827 return GetBlock()->GetGraph()->GetIntConstant( 2828 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2829 } 2830 2831 DECLARE_INSTRUCTION(LessThan); 2832 2833 IfCondition GetCondition() const OVERRIDE { 2834 return kCondLT; 2835 } 2836 2837 IfCondition GetOppositeCondition() const OVERRIDE { 2838 return kCondGE; 2839 } 2840 2841 private: 2842 template <typename T> bool Compute(T x, T y) const { return x < y; } 2843 2844 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2845}; 2846 2847class HLessThanOrEqual : public HCondition { 2848 public: 2849 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2850 : HCondition(first, second, dex_pc) {} 2851 2852 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2853 return GetBlock()->GetGraph()->GetIntConstant( 2854 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2855 } 2856 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2857 return GetBlock()->GetGraph()->GetIntConstant( 2858 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2859 } 2860 2861 DECLARE_INSTRUCTION(LessThanOrEqual); 2862 2863 IfCondition GetCondition() const OVERRIDE { 2864 return kCondLE; 2865 } 2866 2867 IfCondition GetOppositeCondition() const OVERRIDE { 2868 return kCondGT; 2869 } 2870 2871 private: 2872 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2873 2874 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2875}; 2876 2877class HGreaterThan : public HCondition { 2878 public: 2879 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2880 : HCondition(first, second, dex_pc) {} 2881 2882 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2883 return GetBlock()->GetGraph()->GetIntConstant( 2884 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2885 } 2886 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2887 return GetBlock()->GetGraph()->GetIntConstant( 2888 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2889 } 2890 2891 DECLARE_INSTRUCTION(GreaterThan); 2892 2893 IfCondition GetCondition() const OVERRIDE { 2894 return kCondGT; 2895 } 2896 2897 IfCondition GetOppositeCondition() const OVERRIDE { 2898 return kCondLE; 2899 } 2900 2901 private: 2902 template <typename T> bool Compute(T x, T y) const { return x > y; } 2903 2904 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2905}; 2906 2907class HGreaterThanOrEqual : public HCondition { 2908 public: 2909 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2910 : HCondition(first, second, dex_pc) {} 2911 2912 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2913 return GetBlock()->GetGraph()->GetIntConstant( 2914 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2915 } 2916 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2917 return GetBlock()->GetGraph()->GetIntConstant( 2918 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2919 } 2920 2921 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2922 2923 IfCondition GetCondition() const OVERRIDE { 2924 return kCondGE; 2925 } 2926 2927 IfCondition GetOppositeCondition() const OVERRIDE { 2928 return kCondLT; 2929 } 2930 2931 private: 2932 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2933 2934 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2935}; 2936 2937class HBelow : public HCondition { 2938 public: 2939 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2940 : HCondition(first, second, dex_pc) {} 2941 2942 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2943 return GetBlock()->GetGraph()->GetIntConstant( 2944 Compute(static_cast<uint32_t>(x->GetValue()), 2945 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2946 } 2947 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2948 return GetBlock()->GetGraph()->GetIntConstant( 2949 Compute(static_cast<uint64_t>(x->GetValue()), 2950 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2951 } 2952 2953 DECLARE_INSTRUCTION(Below); 2954 2955 IfCondition GetCondition() const OVERRIDE { 2956 return kCondB; 2957 } 2958 2959 IfCondition GetOppositeCondition() const OVERRIDE { 2960 return kCondAE; 2961 } 2962 2963 private: 2964 template <typename T> bool Compute(T x, T y) const { return x < y; } 2965 2966 DISALLOW_COPY_AND_ASSIGN(HBelow); 2967}; 2968 2969class HBelowOrEqual : public HCondition { 2970 public: 2971 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2972 : HCondition(first, second, dex_pc) {} 2973 2974 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2975 return GetBlock()->GetGraph()->GetIntConstant( 2976 Compute(static_cast<uint32_t>(x->GetValue()), 2977 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2978 } 2979 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2980 return GetBlock()->GetGraph()->GetIntConstant( 2981 Compute(static_cast<uint64_t>(x->GetValue()), 2982 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2983 } 2984 2985 DECLARE_INSTRUCTION(BelowOrEqual); 2986 2987 IfCondition GetCondition() const OVERRIDE { 2988 return kCondBE; 2989 } 2990 2991 IfCondition GetOppositeCondition() const OVERRIDE { 2992 return kCondA; 2993 } 2994 2995 private: 2996 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2997 2998 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 2999}; 3000 3001class HAbove : public HCondition { 3002 public: 3003 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3004 : HCondition(first, second, dex_pc) {} 3005 3006 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3007 return GetBlock()->GetGraph()->GetIntConstant( 3008 Compute(static_cast<uint32_t>(x->GetValue()), 3009 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3010 } 3011 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3012 return GetBlock()->GetGraph()->GetIntConstant( 3013 Compute(static_cast<uint64_t>(x->GetValue()), 3014 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3015 } 3016 3017 DECLARE_INSTRUCTION(Above); 3018 3019 IfCondition GetCondition() const OVERRIDE { 3020 return kCondA; 3021 } 3022 3023 IfCondition GetOppositeCondition() const OVERRIDE { 3024 return kCondBE; 3025 } 3026 3027 private: 3028 template <typename T> bool Compute(T x, T y) const { return x > y; } 3029 3030 DISALLOW_COPY_AND_ASSIGN(HAbove); 3031}; 3032 3033class HAboveOrEqual : public HCondition { 3034 public: 3035 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3036 : HCondition(first, second, dex_pc) {} 3037 3038 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3039 return GetBlock()->GetGraph()->GetIntConstant( 3040 Compute(static_cast<uint32_t>(x->GetValue()), 3041 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3042 } 3043 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3044 return GetBlock()->GetGraph()->GetIntConstant( 3045 Compute(static_cast<uint64_t>(x->GetValue()), 3046 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3047 } 3048 3049 DECLARE_INSTRUCTION(AboveOrEqual); 3050 3051 IfCondition GetCondition() const OVERRIDE { 3052 return kCondAE; 3053 } 3054 3055 IfCondition GetOppositeCondition() const OVERRIDE { 3056 return kCondB; 3057 } 3058 3059 private: 3060 template <typename T> bool Compute(T x, T y) const { return x >= y; } 3061 3062 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 3063}; 3064 3065// Instruction to check how two inputs compare to each other. 3066// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 3067class HCompare : public HBinaryOperation { 3068 public: 3069 HCompare(Primitive::Type type, 3070 HInstruction* first, 3071 HInstruction* second, 3072 ComparisonBias bias, 3073 uint32_t dex_pc) 3074 : HBinaryOperation(Primitive::kPrimInt, 3075 first, 3076 second, 3077 SideEffectsForArchRuntimeCalls(type), 3078 dex_pc), 3079 bias_(bias) { 3080 DCHECK_EQ(type, first->GetType()); 3081 DCHECK_EQ(type, second->GetType()); 3082 } 3083 3084 template <typename T> 3085 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3086 3087 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3088 return GetBlock()->GetGraph()->GetIntConstant( 3089 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3090 } 3091 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3092 return GetBlock()->GetGraph()->GetIntConstant( 3093 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3094 } 3095 3096 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3097 return bias_ == other->AsCompare()->bias_; 3098 } 3099 3100 ComparisonBias GetBias() const { return bias_; } 3101 3102 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3103 3104 3105 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3106 // MIPS64 uses a runtime call for FP comparisons. 3107 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3108 } 3109 3110 DECLARE_INSTRUCTION(Compare); 3111 3112 private: 3113 const ComparisonBias bias_; 3114 3115 DISALLOW_COPY_AND_ASSIGN(HCompare); 3116}; 3117 3118// A local in the graph. Corresponds to a Dex register. 3119class HLocal : public HTemplateInstruction<0> { 3120 public: 3121 explicit HLocal(uint16_t reg_number) 3122 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3123 3124 DECLARE_INSTRUCTION(Local); 3125 3126 uint16_t GetRegNumber() const { return reg_number_; } 3127 3128 private: 3129 // The Dex register number. 3130 const uint16_t reg_number_; 3131 3132 DISALLOW_COPY_AND_ASSIGN(HLocal); 3133}; 3134 3135// Load a given local. The local is an input of this instruction. 3136class HLoadLocal : public HExpression<1> { 3137 public: 3138 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3139 : HExpression(type, SideEffects::None(), dex_pc) { 3140 SetRawInputAt(0, local); 3141 } 3142 3143 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3144 3145 DECLARE_INSTRUCTION(LoadLocal); 3146 3147 private: 3148 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3149}; 3150 3151// Store a value in a given local. This instruction has two inputs: the value 3152// and the local. 3153class HStoreLocal : public HTemplateInstruction<2> { 3154 public: 3155 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3156 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3157 SetRawInputAt(0, local); 3158 SetRawInputAt(1, value); 3159 } 3160 3161 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3162 3163 DECLARE_INSTRUCTION(StoreLocal); 3164 3165 private: 3166 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3167}; 3168 3169class HFloatConstant : public HConstant { 3170 public: 3171 float GetValue() const { return value_; } 3172 3173 uint64_t GetValueAsUint64() const OVERRIDE { 3174 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3175 } 3176 3177 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3178 DCHECK(other->IsFloatConstant()); 3179 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3180 } 3181 3182 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3183 3184 bool IsMinusOne() const OVERRIDE { 3185 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3186 } 3187 bool IsZero() const OVERRIDE { 3188 return value_ == 0.0f; 3189 } 3190 bool IsOne() const OVERRIDE { 3191 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3192 } 3193 bool IsNaN() const { 3194 return std::isnan(value_); 3195 } 3196 3197 DECLARE_INSTRUCTION(FloatConstant); 3198 3199 private: 3200 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3201 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3202 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3203 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3204 3205 const float value_; 3206 3207 // Only the SsaBuilder and HGraph can create floating-point constants. 3208 friend class SsaBuilder; 3209 friend class HGraph; 3210 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3211}; 3212 3213class HDoubleConstant : public HConstant { 3214 public: 3215 double GetValue() const { return value_; } 3216 3217 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3218 3219 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3220 DCHECK(other->IsDoubleConstant()); 3221 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3222 } 3223 3224 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3225 3226 bool IsMinusOne() const OVERRIDE { 3227 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3228 } 3229 bool IsZero() const OVERRIDE { 3230 return value_ == 0.0; 3231 } 3232 bool IsOne() const OVERRIDE { 3233 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3234 } 3235 bool IsNaN() const { 3236 return std::isnan(value_); 3237 } 3238 3239 DECLARE_INSTRUCTION(DoubleConstant); 3240 3241 private: 3242 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3243 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3244 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3245 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3246 3247 const double value_; 3248 3249 // Only the SsaBuilder and HGraph can create floating-point constants. 3250 friend class SsaBuilder; 3251 friend class HGraph; 3252 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3253}; 3254 3255enum class Intrinsics { 3256#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \ 3257 k ## Name, 3258#include "intrinsics_list.h" 3259 kNone, 3260 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3261#undef INTRINSICS_LIST 3262#undef OPTIMIZING_INTRINSICS 3263}; 3264std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3265 3266enum IntrinsicNeedsEnvironmentOrCache { 3267 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3268 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3269}; 3270 3271enum IntrinsicSideEffects { 3272 kNoSideEffects, // Intrinsic does not have any heap memory side effects. 3273 kReadSideEffects, // Intrinsic may read heap memory. 3274 kWriteSideEffects, // Intrinsic may write heap memory. 3275 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC. 3276}; 3277 3278enum IntrinsicExceptions { 3279 kNoThrow, // Intrinsic does not throw any exceptions. 3280 kCanThrow // Intrinsic may throw exceptions. 3281}; 3282 3283class HInvoke : public HInstruction { 3284 public: 3285 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3286 3287 bool NeedsEnvironment() const OVERRIDE; 3288 3289 void SetArgumentAt(size_t index, HInstruction* argument) { 3290 SetRawInputAt(index, argument); 3291 } 3292 3293 // Return the number of arguments. This number can be lower than 3294 // the number of inputs returned by InputCount(), as some invoke 3295 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3296 // inputs at the end of their list of inputs. 3297 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3298 3299 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3300 3301 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3302 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3303 3304 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3305 3306 Intrinsics GetIntrinsic() const { 3307 return intrinsic_; 3308 } 3309 3310 void SetIntrinsic(Intrinsics intrinsic, 3311 IntrinsicNeedsEnvironmentOrCache needs_env_or_cache, 3312 IntrinsicSideEffects side_effects, 3313 IntrinsicExceptions exceptions); 3314 3315 bool IsFromInlinedInvoke() const { 3316 return GetEnvironment()->IsFromInlinedInvoke(); 3317 } 3318 3319 bool CanThrow() const OVERRIDE { return can_throw_; } 3320 3321 bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); } 3322 3323 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3324 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_; 3325 } 3326 3327 uint32_t* GetIntrinsicOptimizations() { 3328 return &intrinsic_optimizations_; 3329 } 3330 3331 const uint32_t* GetIntrinsicOptimizations() const { 3332 return &intrinsic_optimizations_; 3333 } 3334 3335 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3336 3337 DECLARE_ABSTRACT_INSTRUCTION(Invoke); 3338 3339 protected: 3340 HInvoke(ArenaAllocator* arena, 3341 uint32_t number_of_arguments, 3342 uint32_t number_of_other_inputs, 3343 Primitive::Type return_type, 3344 uint32_t dex_pc, 3345 uint32_t dex_method_index, 3346 InvokeType original_invoke_type) 3347 : HInstruction( 3348 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3349 number_of_arguments_(number_of_arguments), 3350 inputs_(number_of_arguments + number_of_other_inputs, 3351 arena->Adapter(kArenaAllocInvokeInputs)), 3352 return_type_(return_type), 3353 dex_method_index_(dex_method_index), 3354 original_invoke_type_(original_invoke_type), 3355 can_throw_(true), 3356 intrinsic_(Intrinsics::kNone), 3357 intrinsic_optimizations_(0) { 3358 } 3359 3360 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3361 return inputs_[index]; 3362 } 3363 3364 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3365 inputs_[index] = input; 3366 } 3367 3368 void SetCanThrow(bool can_throw) { can_throw_ = can_throw; } 3369 3370 uint32_t number_of_arguments_; 3371 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3372 const Primitive::Type return_type_; 3373 const uint32_t dex_method_index_; 3374 const InvokeType original_invoke_type_; 3375 bool can_throw_; 3376 Intrinsics intrinsic_; 3377 3378 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3379 uint32_t intrinsic_optimizations_; 3380 3381 private: 3382 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3383}; 3384 3385class HInvokeUnresolved : public HInvoke { 3386 public: 3387 HInvokeUnresolved(ArenaAllocator* arena, 3388 uint32_t number_of_arguments, 3389 Primitive::Type return_type, 3390 uint32_t dex_pc, 3391 uint32_t dex_method_index, 3392 InvokeType invoke_type) 3393 : HInvoke(arena, 3394 number_of_arguments, 3395 0u /* number_of_other_inputs */, 3396 return_type, 3397 dex_pc, 3398 dex_method_index, 3399 invoke_type) { 3400 } 3401 3402 DECLARE_INSTRUCTION(InvokeUnresolved); 3403 3404 private: 3405 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3406}; 3407 3408class HInvokeStaticOrDirect : public HInvoke { 3409 public: 3410 // Requirements of this method call regarding the class 3411 // initialization (clinit) check of its declaring class. 3412 enum class ClinitCheckRequirement { 3413 kNone, // Class already initialized. 3414 kExplicit, // Static call having explicit clinit check as last input. 3415 kImplicit, // Static call implicitly requiring a clinit check. 3416 }; 3417 3418 // Determines how to load the target ArtMethod*. 3419 enum class MethodLoadKind { 3420 // Use a String init ArtMethod* loaded from Thread entrypoints. 3421 kStringInit, 3422 3423 // Use the method's own ArtMethod* loaded by the register allocator. 3424 kRecursive, 3425 3426 // Use ArtMethod* at a known address, embed the direct address in the code. 3427 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3428 kDirectAddress, 3429 3430 // Use ArtMethod* at an address that will be known at link time, embed the direct 3431 // address in the code. If the image is relocatable, emit .patch_oat entry. 3432 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3433 // the image relocatable or not. 3434 kDirectAddressWithFixup, 3435 3436 // Load from resoved methods array in the dex cache using a PC-relative load. 3437 // Used when we need to use the dex cache, for example for invoke-static that 3438 // may cause class initialization (the entry may point to a resolution method), 3439 // and we know that we can access the dex cache arrays using a PC-relative load. 3440 kDexCachePcRelative, 3441 3442 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3443 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3444 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3445 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3446 kDexCacheViaMethod, 3447 }; 3448 3449 // Determines the location of the code pointer. 3450 enum class CodePtrLocation { 3451 // Recursive call, use local PC-relative call instruction. 3452 kCallSelf, 3453 3454 // Use PC-relative call instruction patched at link time. 3455 // Used for calls within an oat file, boot->boot or app->app. 3456 kCallPCRelative, 3457 3458 // Call to a known target address, embed the direct address in code. 3459 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3460 kCallDirect, 3461 3462 // Call to a target address that will be known at link time, embed the direct 3463 // address in code. If the image is relocatable, emit .patch_oat entry. 3464 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3465 // the image relocatable or not. 3466 kCallDirectWithFixup, 3467 3468 // Use code pointer from the ArtMethod*. 3469 // Used when we don't know the target code. This is also the last-resort-kind used when 3470 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3471 kCallArtMethod, 3472 }; 3473 3474 struct DispatchInfo { 3475 MethodLoadKind method_load_kind; 3476 CodePtrLocation code_ptr_location; 3477 // The method load data holds 3478 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3479 // Note that there are multiple string init methods, each having its own offset. 3480 // - the method address for kDirectAddress 3481 // - the dex cache arrays offset for kDexCachePcRel. 3482 uint64_t method_load_data; 3483 uint64_t direct_code_ptr; 3484 }; 3485 3486 HInvokeStaticOrDirect(ArenaAllocator* arena, 3487 uint32_t number_of_arguments, 3488 Primitive::Type return_type, 3489 uint32_t dex_pc, 3490 uint32_t method_index, 3491 MethodReference target_method, 3492 DispatchInfo dispatch_info, 3493 InvokeType original_invoke_type, 3494 InvokeType optimized_invoke_type, 3495 ClinitCheckRequirement clinit_check_requirement) 3496 : HInvoke(arena, 3497 number_of_arguments, 3498 // There is potentially one extra argument for the HCurrentMethod node, and 3499 // potentially one other if the clinit check is explicit, and potentially 3500 // one other if the method is a string factory. 3501 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) + 3502 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) + 3503 (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3504 return_type, 3505 dex_pc, 3506 method_index, 3507 original_invoke_type), 3508 optimized_invoke_type_(optimized_invoke_type), 3509 clinit_check_requirement_(clinit_check_requirement), 3510 target_method_(target_method), 3511 dispatch_info_(dispatch_info) { } 3512 3513 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3514 bool had_current_method_input = HasCurrentMethodInput(); 3515 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind); 3516 3517 // Using the current method is the default and once we find a better 3518 // method load kind, we should not go back to using the current method. 3519 DCHECK(had_current_method_input || !needs_current_method_input); 3520 3521 if (had_current_method_input && !needs_current_method_input) { 3522 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod()); 3523 RemoveInputAt(GetSpecialInputIndex()); 3524 } 3525 dispatch_info_ = dispatch_info; 3526 } 3527 3528 void AddSpecialInput(HInstruction* input) { 3529 // We allow only one special input. 3530 DCHECK(!IsStringInit() && !HasCurrentMethodInput()); 3531 DCHECK(InputCount() == GetSpecialInputIndex() || 3532 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck())); 3533 InsertInputAt(GetSpecialInputIndex(), input); 3534 } 3535 3536 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3537 // We access the method via the dex cache so we can't do an implicit null check. 3538 // TODO: for intrinsics we can generate implicit null checks. 3539 return false; 3540 } 3541 3542 bool CanBeNull() const OVERRIDE { 3543 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3544 } 3545 3546 // Get the index of the special input, if any. 3547 // 3548 // If the invoke IsStringInit(), it initially has a HFakeString special argument 3549 // which is removed by the instruction simplifier; if the invoke HasCurrentMethodInput(), 3550 // the "special input" is the current method pointer; otherwise there may be one 3551 // platform-specific special input, such as PC-relative addressing base. 3552 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } 3553 3554 InvokeType GetOptimizedInvokeType() const { return optimized_invoke_type_; } 3555 void SetOptimizedInvokeType(InvokeType invoke_type) { 3556 optimized_invoke_type_ = invoke_type; 3557 } 3558 3559 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3560 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3561 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3562 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3563 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3564 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3565 bool HasPcRelativeDexCache() const { 3566 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3567 } 3568 bool HasCurrentMethodInput() const { 3569 // This function can be called only after the invoke has been fully initialized by the builder. 3570 if (NeedsCurrentMethodInput(GetMethodLoadKind())) { 3571 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3572 return true; 3573 } else { 3574 DCHECK(InputCount() == GetSpecialInputIndex() || 3575 !InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3576 return false; 3577 } 3578 } 3579 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3580 MethodReference GetTargetMethod() const { return target_method_; } 3581 void SetTargetMethod(MethodReference method) { target_method_ = method; } 3582 3583 int32_t GetStringInitOffset() const { 3584 DCHECK(IsStringInit()); 3585 return dispatch_info_.method_load_data; 3586 } 3587 3588 uint64_t GetMethodAddress() const { 3589 DCHECK(HasMethodAddress()); 3590 return dispatch_info_.method_load_data; 3591 } 3592 3593 uint32_t GetDexCacheArrayOffset() const { 3594 DCHECK(HasPcRelativeDexCache()); 3595 return dispatch_info_.method_load_data; 3596 } 3597 3598 uint64_t GetDirectCodePtr() const { 3599 DCHECK(HasDirectCodePtr()); 3600 return dispatch_info_.direct_code_ptr; 3601 } 3602 3603 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3604 3605 // Is this instruction a call to a static method? 3606 bool IsStatic() const { 3607 return GetOriginalInvokeType() == kStatic; 3608 } 3609 3610 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by 3611 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck) 3612 // instruction; only relevant for static calls with explicit clinit check. 3613 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) { 3614 DCHECK(IsStaticWithExplicitClinitCheck()); 3615 size_t last_input_index = InputCount() - 1; 3616 HInstruction* last_input = InputAt(last_input_index); 3617 DCHECK(last_input != nullptr); 3618 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName(); 3619 RemoveAsUserOfInput(last_input_index); 3620 inputs_.pop_back(); 3621 clinit_check_requirement_ = new_requirement; 3622 DCHECK(!IsStaticWithExplicitClinitCheck()); 3623 } 3624 3625 bool IsStringFactoryFor(HFakeString* str) const { 3626 if (!IsStringInit()) return false; 3627 DCHECK(!HasCurrentMethodInput()); 3628 if (InputCount() == (number_of_arguments_)) return false; 3629 return InputAt(InputCount() - 1)->AsFakeString() == str; 3630 } 3631 3632 void RemoveFakeStringArgumentAsLastInput() { 3633 DCHECK(IsStringInit()); 3634 size_t last_input_index = InputCount() - 1; 3635 HInstruction* last_input = InputAt(last_input_index); 3636 DCHECK(last_input != nullptr); 3637 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3638 RemoveAsUserOfInput(last_input_index); 3639 inputs_.pop_back(); 3640 } 3641 3642 // Is this a call to a static method whose declaring class has an 3643 // explicit initialization check in the graph? 3644 bool IsStaticWithExplicitClinitCheck() const { 3645 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3646 } 3647 3648 // Is this a call to a static method whose declaring class has an 3649 // implicit intialization check requirement? 3650 bool IsStaticWithImplicitClinitCheck() const { 3651 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3652 } 3653 3654 // Does this method load kind need the current method as an input? 3655 static bool NeedsCurrentMethodInput(MethodLoadKind kind) { 3656 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod; 3657 } 3658 3659 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3660 3661 protected: 3662 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3663 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3664 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3665 HInstruction* input = input_record.GetInstruction(); 3666 // `input` is the last input of a static invoke marked as having 3667 // an explicit clinit check. It must either be: 3668 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3669 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3670 DCHECK(input != nullptr); 3671 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3672 } 3673 return input_record; 3674 } 3675 3676 void InsertInputAt(size_t index, HInstruction* input); 3677 void RemoveInputAt(size_t index); 3678 3679 private: 3680 InvokeType optimized_invoke_type_; 3681 ClinitCheckRequirement clinit_check_requirement_; 3682 // The target method may refer to different dex file or method index than the original 3683 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3684 // in derived class to increase visibility. 3685 MethodReference target_method_; 3686 DispatchInfo dispatch_info_; 3687 3688 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3689}; 3690std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs); 3691std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs); 3692 3693class HInvokeVirtual : public HInvoke { 3694 public: 3695 HInvokeVirtual(ArenaAllocator* arena, 3696 uint32_t number_of_arguments, 3697 Primitive::Type return_type, 3698 uint32_t dex_pc, 3699 uint32_t dex_method_index, 3700 uint32_t vtable_index) 3701 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3702 vtable_index_(vtable_index) {} 3703 3704 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3705 // TODO: Add implicit null checks in intrinsics. 3706 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3707 } 3708 3709 uint32_t GetVTableIndex() const { return vtable_index_; } 3710 3711 DECLARE_INSTRUCTION(InvokeVirtual); 3712 3713 private: 3714 const uint32_t vtable_index_; 3715 3716 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3717}; 3718 3719class HInvokeInterface : public HInvoke { 3720 public: 3721 HInvokeInterface(ArenaAllocator* arena, 3722 uint32_t number_of_arguments, 3723 Primitive::Type return_type, 3724 uint32_t dex_pc, 3725 uint32_t dex_method_index, 3726 uint32_t imt_index) 3727 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3728 imt_index_(imt_index) {} 3729 3730 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3731 // TODO: Add implicit null checks in intrinsics. 3732 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3733 } 3734 3735 uint32_t GetImtIndex() const { return imt_index_; } 3736 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3737 3738 DECLARE_INSTRUCTION(InvokeInterface); 3739 3740 private: 3741 const uint32_t imt_index_; 3742 3743 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3744}; 3745 3746class HNewInstance : public HExpression<2> { 3747 public: 3748 HNewInstance(HInstruction* cls, 3749 HCurrentMethod* current_method, 3750 uint32_t dex_pc, 3751 uint16_t type_index, 3752 const DexFile& dex_file, 3753 bool can_throw, 3754 bool finalizable, 3755 QuickEntrypointEnum entrypoint) 3756 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3757 type_index_(type_index), 3758 dex_file_(dex_file), 3759 can_throw_(can_throw), 3760 finalizable_(finalizable), 3761 entrypoint_(entrypoint) { 3762 SetRawInputAt(0, cls); 3763 SetRawInputAt(1, current_method); 3764 } 3765 3766 uint16_t GetTypeIndex() const { return type_index_; } 3767 const DexFile& GetDexFile() const { return dex_file_; } 3768 3769 // Calls runtime so needs an environment. 3770 bool NeedsEnvironment() const OVERRIDE { return true; } 3771 3772 // It may throw when called on type that's not instantiable/accessible. 3773 // It can throw OOME. 3774 // TODO: distinguish between the two cases so we can for example allow allocation elimination. 3775 bool CanThrow() const OVERRIDE { return can_throw_ || true; } 3776 3777 bool IsFinalizable() const { return finalizable_; } 3778 3779 bool CanBeNull() const OVERRIDE { return false; } 3780 3781 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3782 3783 void SetEntrypoint(QuickEntrypointEnum entrypoint) { 3784 entrypoint_ = entrypoint; 3785 } 3786 3787 DECLARE_INSTRUCTION(NewInstance); 3788 3789 private: 3790 const uint16_t type_index_; 3791 const DexFile& dex_file_; 3792 const bool can_throw_; 3793 const bool finalizable_; 3794 QuickEntrypointEnum entrypoint_; 3795 3796 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3797}; 3798 3799class HNeg : public HUnaryOperation { 3800 public: 3801 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3802 : HUnaryOperation(result_type, input, dex_pc) {} 3803 3804 template <typename T> T Compute(T x) const { return -x; } 3805 3806 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3807 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3808 } 3809 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3810 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3811 } 3812 3813 DECLARE_INSTRUCTION(Neg); 3814 3815 private: 3816 DISALLOW_COPY_AND_ASSIGN(HNeg); 3817}; 3818 3819class HNewArray : public HExpression<2> { 3820 public: 3821 HNewArray(HInstruction* length, 3822 HCurrentMethod* current_method, 3823 uint32_t dex_pc, 3824 uint16_t type_index, 3825 const DexFile& dex_file, 3826 QuickEntrypointEnum entrypoint) 3827 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3828 type_index_(type_index), 3829 dex_file_(dex_file), 3830 entrypoint_(entrypoint) { 3831 SetRawInputAt(0, length); 3832 SetRawInputAt(1, current_method); 3833 } 3834 3835 uint16_t GetTypeIndex() const { return type_index_; } 3836 const DexFile& GetDexFile() const { return dex_file_; } 3837 3838 // Calls runtime so needs an environment. 3839 bool NeedsEnvironment() const OVERRIDE { return true; } 3840 3841 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3842 bool CanThrow() const OVERRIDE { return true; } 3843 3844 bool CanBeNull() const OVERRIDE { return false; } 3845 3846 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3847 3848 DECLARE_INSTRUCTION(NewArray); 3849 3850 private: 3851 const uint16_t type_index_; 3852 const DexFile& dex_file_; 3853 const QuickEntrypointEnum entrypoint_; 3854 3855 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3856}; 3857 3858class HAdd : public HBinaryOperation { 3859 public: 3860 HAdd(Primitive::Type result_type, 3861 HInstruction* left, 3862 HInstruction* right, 3863 uint32_t dex_pc = kNoDexPc) 3864 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3865 3866 bool IsCommutative() const OVERRIDE { return true; } 3867 3868 template <typename T> T Compute(T x, T y) const { return x + y; } 3869 3870 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3871 return GetBlock()->GetGraph()->GetIntConstant( 3872 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3873 } 3874 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3875 return GetBlock()->GetGraph()->GetLongConstant( 3876 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3877 } 3878 3879 DECLARE_INSTRUCTION(Add); 3880 3881 private: 3882 DISALLOW_COPY_AND_ASSIGN(HAdd); 3883}; 3884 3885class HSub : public HBinaryOperation { 3886 public: 3887 HSub(Primitive::Type result_type, 3888 HInstruction* left, 3889 HInstruction* right, 3890 uint32_t dex_pc = kNoDexPc) 3891 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3892 3893 template <typename T> T Compute(T x, T y) const { return x - y; } 3894 3895 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3896 return GetBlock()->GetGraph()->GetIntConstant( 3897 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3898 } 3899 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3900 return GetBlock()->GetGraph()->GetLongConstant( 3901 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3902 } 3903 3904 DECLARE_INSTRUCTION(Sub); 3905 3906 private: 3907 DISALLOW_COPY_AND_ASSIGN(HSub); 3908}; 3909 3910class HMul : public HBinaryOperation { 3911 public: 3912 HMul(Primitive::Type result_type, 3913 HInstruction* left, 3914 HInstruction* right, 3915 uint32_t dex_pc = kNoDexPc) 3916 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3917 3918 bool IsCommutative() const OVERRIDE { return true; } 3919 3920 template <typename T> T Compute(T x, T y) const { return x * y; } 3921 3922 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3923 return GetBlock()->GetGraph()->GetIntConstant( 3924 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3925 } 3926 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3927 return GetBlock()->GetGraph()->GetLongConstant( 3928 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3929 } 3930 3931 DECLARE_INSTRUCTION(Mul); 3932 3933 private: 3934 DISALLOW_COPY_AND_ASSIGN(HMul); 3935}; 3936 3937class HDiv : public HBinaryOperation { 3938 public: 3939 HDiv(Primitive::Type result_type, 3940 HInstruction* left, 3941 HInstruction* right, 3942 uint32_t dex_pc) 3943 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3944 3945 template <typename T> 3946 T Compute(T x, T y) const { 3947 // Our graph structure ensures we never have 0 for `y` during 3948 // constant folding. 3949 DCHECK_NE(y, 0); 3950 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3951 return (y == -1) ? -x : x / y; 3952 } 3953 3954 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3955 return GetBlock()->GetGraph()->GetIntConstant( 3956 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3957 } 3958 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3959 return GetBlock()->GetGraph()->GetLongConstant( 3960 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3961 } 3962 3963 static SideEffects SideEffectsForArchRuntimeCalls() { 3964 // The generated code can use a runtime call. 3965 return SideEffects::CanTriggerGC(); 3966 } 3967 3968 DECLARE_INSTRUCTION(Div); 3969 3970 private: 3971 DISALLOW_COPY_AND_ASSIGN(HDiv); 3972}; 3973 3974class HRem : public HBinaryOperation { 3975 public: 3976 HRem(Primitive::Type result_type, 3977 HInstruction* left, 3978 HInstruction* right, 3979 uint32_t dex_pc) 3980 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3981 3982 template <typename T> 3983 T Compute(T x, T y) const { 3984 // Our graph structure ensures we never have 0 for `y` during 3985 // constant folding. 3986 DCHECK_NE(y, 0); 3987 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3988 return (y == -1) ? 0 : x % y; 3989 } 3990 3991 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3992 return GetBlock()->GetGraph()->GetIntConstant( 3993 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3994 } 3995 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3996 return GetBlock()->GetGraph()->GetLongConstant( 3997 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3998 } 3999 4000 4001 static SideEffects SideEffectsForArchRuntimeCalls() { 4002 return SideEffects::CanTriggerGC(); 4003 } 4004 4005 DECLARE_INSTRUCTION(Rem); 4006 4007 private: 4008 DISALLOW_COPY_AND_ASSIGN(HRem); 4009}; 4010 4011class HDivZeroCheck : public HExpression<1> { 4012 public: 4013 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 4014 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4015 SetRawInputAt(0, value); 4016 } 4017 4018 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 4019 4020 bool CanBeMoved() const OVERRIDE { return true; } 4021 4022 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4023 return true; 4024 } 4025 4026 bool NeedsEnvironment() const OVERRIDE { return true; } 4027 bool CanThrow() const OVERRIDE { return true; } 4028 4029 DECLARE_INSTRUCTION(DivZeroCheck); 4030 4031 private: 4032 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 4033}; 4034 4035class HShl : public HBinaryOperation { 4036 public: 4037 HShl(Primitive::Type result_type, 4038 HInstruction* left, 4039 HInstruction* right, 4040 uint32_t dex_pc = kNoDexPc) 4041 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4042 4043 template <typename T, typename U, typename V> 4044 T Compute(T x, U y, V max_shift_value) const { 4045 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4046 "V is not the unsigned integer type corresponding to T"); 4047 return x << (y & max_shift_value); 4048 } 4049 4050 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4051 return GetBlock()->GetGraph()->GetIntConstant( 4052 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4053 } 4054 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4055 // case is handled as `x << static_cast<int>(y)`. 4056 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4057 return GetBlock()->GetGraph()->GetLongConstant( 4058 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4059 } 4060 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4061 return GetBlock()->GetGraph()->GetLongConstant( 4062 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4063 } 4064 4065 DECLARE_INSTRUCTION(Shl); 4066 4067 private: 4068 DISALLOW_COPY_AND_ASSIGN(HShl); 4069}; 4070 4071class HShr : public HBinaryOperation { 4072 public: 4073 HShr(Primitive::Type result_type, 4074 HInstruction* left, 4075 HInstruction* right, 4076 uint32_t dex_pc = kNoDexPc) 4077 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4078 4079 template <typename T, typename U, typename V> 4080 T Compute(T x, U y, V max_shift_value) const { 4081 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4082 "V is not the unsigned integer type corresponding to T"); 4083 return x >> (y & max_shift_value); 4084 } 4085 4086 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4087 return GetBlock()->GetGraph()->GetIntConstant( 4088 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4089 } 4090 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4091 // case is handled as `x >> static_cast<int>(y)`. 4092 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4093 return GetBlock()->GetGraph()->GetLongConstant( 4094 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4095 } 4096 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4097 return GetBlock()->GetGraph()->GetLongConstant( 4098 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4099 } 4100 4101 DECLARE_INSTRUCTION(Shr); 4102 4103 private: 4104 DISALLOW_COPY_AND_ASSIGN(HShr); 4105}; 4106 4107class HUShr : public HBinaryOperation { 4108 public: 4109 HUShr(Primitive::Type result_type, 4110 HInstruction* left, 4111 HInstruction* right, 4112 uint32_t dex_pc = kNoDexPc) 4113 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4114 4115 template <typename T, typename U, typename V> 4116 T Compute(T x, U y, V max_shift_value) const { 4117 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4118 "V is not the unsigned integer type corresponding to T"); 4119 V ux = static_cast<V>(x); 4120 return static_cast<T>(ux >> (y & max_shift_value)); 4121 } 4122 4123 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4124 return GetBlock()->GetGraph()->GetIntConstant( 4125 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4126 } 4127 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4128 // case is handled as `x >>> static_cast<int>(y)`. 4129 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4130 return GetBlock()->GetGraph()->GetLongConstant( 4131 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4132 } 4133 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4134 return GetBlock()->GetGraph()->GetLongConstant( 4135 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4136 } 4137 4138 DECLARE_INSTRUCTION(UShr); 4139 4140 private: 4141 DISALLOW_COPY_AND_ASSIGN(HUShr); 4142}; 4143 4144class HAnd : public HBinaryOperation { 4145 public: 4146 HAnd(Primitive::Type result_type, 4147 HInstruction* left, 4148 HInstruction* right, 4149 uint32_t dex_pc = kNoDexPc) 4150 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4151 4152 bool IsCommutative() const OVERRIDE { return true; } 4153 4154 template <typename T, typename U> 4155 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 4156 4157 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4158 return GetBlock()->GetGraph()->GetIntConstant( 4159 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4160 } 4161 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4162 return GetBlock()->GetGraph()->GetLongConstant( 4163 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4164 } 4165 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4166 return GetBlock()->GetGraph()->GetLongConstant( 4167 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4168 } 4169 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4170 return GetBlock()->GetGraph()->GetLongConstant( 4171 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4172 } 4173 4174 DECLARE_INSTRUCTION(And); 4175 4176 private: 4177 DISALLOW_COPY_AND_ASSIGN(HAnd); 4178}; 4179 4180class HOr : public HBinaryOperation { 4181 public: 4182 HOr(Primitive::Type result_type, 4183 HInstruction* left, 4184 HInstruction* right, 4185 uint32_t dex_pc = kNoDexPc) 4186 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4187 4188 bool IsCommutative() const OVERRIDE { return true; } 4189 4190 template <typename T, typename U> 4191 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4192 4193 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4194 return GetBlock()->GetGraph()->GetIntConstant( 4195 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4196 } 4197 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4198 return GetBlock()->GetGraph()->GetLongConstant( 4199 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4200 } 4201 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4202 return GetBlock()->GetGraph()->GetLongConstant( 4203 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4204 } 4205 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4206 return GetBlock()->GetGraph()->GetLongConstant( 4207 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4208 } 4209 4210 DECLARE_INSTRUCTION(Or); 4211 4212 private: 4213 DISALLOW_COPY_AND_ASSIGN(HOr); 4214}; 4215 4216class HXor : public HBinaryOperation { 4217 public: 4218 HXor(Primitive::Type result_type, 4219 HInstruction* left, 4220 HInstruction* right, 4221 uint32_t dex_pc = kNoDexPc) 4222 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4223 4224 bool IsCommutative() const OVERRIDE { return true; } 4225 4226 template <typename T, typename U> 4227 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4228 4229 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4230 return GetBlock()->GetGraph()->GetIntConstant( 4231 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4232 } 4233 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4234 return GetBlock()->GetGraph()->GetLongConstant( 4235 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4236 } 4237 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4238 return GetBlock()->GetGraph()->GetLongConstant( 4239 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4240 } 4241 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4242 return GetBlock()->GetGraph()->GetLongConstant( 4243 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4244 } 4245 4246 DECLARE_INSTRUCTION(Xor); 4247 4248 private: 4249 DISALLOW_COPY_AND_ASSIGN(HXor); 4250}; 4251 4252class HRor : public HBinaryOperation { 4253 public: 4254 HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance) 4255 : HBinaryOperation(result_type, value, distance) {} 4256 4257 template <typename T, typename U, typename V> 4258 T Compute(T x, U y, V max_shift_value) const { 4259 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4260 "V is not the unsigned integer type corresponding to T"); 4261 V ux = static_cast<V>(x); 4262 if ((y & max_shift_value) == 0) { 4263 return static_cast<T>(ux); 4264 } else { 4265 const V reg_bits = sizeof(T) * 8; 4266 return static_cast<T>(ux >> (y & max_shift_value)) | 4267 (x << (reg_bits - (y & max_shift_value))); 4268 } 4269 } 4270 4271 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4272 return GetBlock()->GetGraph()->GetIntConstant( 4273 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4274 } 4275 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4276 return GetBlock()->GetGraph()->GetLongConstant( 4277 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4278 } 4279 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4280 return GetBlock()->GetGraph()->GetLongConstant( 4281 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4282 } 4283 4284 DECLARE_INSTRUCTION(Ror); 4285 4286 private: 4287 DISALLOW_COPY_AND_ASSIGN(HRor); 4288}; 4289 4290// The value of a parameter in this method. Its location depends on 4291// the calling convention. 4292class HParameterValue : public HExpression<0> { 4293 public: 4294 HParameterValue(const DexFile& dex_file, 4295 uint16_t type_index, 4296 uint8_t index, 4297 Primitive::Type parameter_type, 4298 bool is_this = false) 4299 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4300 dex_file_(dex_file), 4301 type_index_(type_index), 4302 index_(index), 4303 is_this_(is_this), 4304 can_be_null_(!is_this) {} 4305 4306 const DexFile& GetDexFile() const { return dex_file_; } 4307 uint16_t GetTypeIndex() const { return type_index_; } 4308 uint8_t GetIndex() const { return index_; } 4309 bool IsThis() const { return is_this_; } 4310 4311 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4312 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4313 4314 DECLARE_INSTRUCTION(ParameterValue); 4315 4316 private: 4317 const DexFile& dex_file_; 4318 const uint16_t type_index_; 4319 // The index of this parameter in the parameters list. Must be less 4320 // than HGraph::number_of_in_vregs_. 4321 const uint8_t index_; 4322 4323 // Whether or not the parameter value corresponds to 'this' argument. 4324 const bool is_this_; 4325 4326 bool can_be_null_; 4327 4328 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4329}; 4330 4331class HNot : public HUnaryOperation { 4332 public: 4333 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4334 : HUnaryOperation(result_type, input, dex_pc) {} 4335 4336 bool CanBeMoved() const OVERRIDE { return true; } 4337 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4338 return true; 4339 } 4340 4341 template <typename T> T Compute(T x) const { return ~x; } 4342 4343 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4344 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4345 } 4346 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4347 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4348 } 4349 4350 DECLARE_INSTRUCTION(Not); 4351 4352 private: 4353 DISALLOW_COPY_AND_ASSIGN(HNot); 4354}; 4355 4356class HBooleanNot : public HUnaryOperation { 4357 public: 4358 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4359 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4360 4361 bool CanBeMoved() const OVERRIDE { return true; } 4362 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4363 return true; 4364 } 4365 4366 template <typename T> bool Compute(T x) const { 4367 DCHECK(IsUint<1>(x)); 4368 return !x; 4369 } 4370 4371 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4372 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4373 } 4374 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4375 LOG(FATAL) << DebugName() << " is not defined for long values"; 4376 UNREACHABLE(); 4377 } 4378 4379 DECLARE_INSTRUCTION(BooleanNot); 4380 4381 private: 4382 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4383}; 4384 4385class HTypeConversion : public HExpression<1> { 4386 public: 4387 // Instantiate a type conversion of `input` to `result_type`. 4388 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4389 : HExpression(result_type, 4390 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4391 dex_pc) { 4392 SetRawInputAt(0, input); 4393 DCHECK_NE(input->GetType(), result_type); 4394 } 4395 4396 HInstruction* GetInput() const { return InputAt(0); } 4397 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4398 Primitive::Type GetResultType() const { return GetType(); } 4399 4400 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4401 // to the runtime. 4402 4403 bool CanBeMoved() const OVERRIDE { return true; } 4404 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4405 4406 // Try to statically evaluate the conversion and return a HConstant 4407 // containing the result. If the input cannot be converted, return nullptr. 4408 HConstant* TryStaticEvaluation() const; 4409 4410 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4411 Primitive::Type result_type) { 4412 // Some architectures may not require the 'GC' side effects, but at this point 4413 // in the compilation process we do not know what architecture we will 4414 // generate code for, so we must be conservative. 4415 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4416 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4417 return SideEffects::CanTriggerGC(); 4418 } 4419 return SideEffects::None(); 4420 } 4421 4422 DECLARE_INSTRUCTION(TypeConversion); 4423 4424 private: 4425 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4426}; 4427 4428static constexpr uint32_t kNoRegNumber = -1; 4429 4430class HPhi : public HInstruction { 4431 public: 4432 HPhi(ArenaAllocator* arena, 4433 uint32_t reg_number, 4434 size_t number_of_inputs, 4435 Primitive::Type type, 4436 uint32_t dex_pc = kNoDexPc) 4437 : HInstruction(SideEffects::None(), dex_pc), 4438 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4439 reg_number_(reg_number), 4440 type_(ToPhiType(type)), 4441 // Phis are constructed live and marked dead if conflicting or unused. 4442 // Individual steps of SsaBuilder should assume that if a phi has been 4443 // marked dead, it can be ignored and will be removed by SsaPhiElimination. 4444 is_live_(true), 4445 can_be_null_(true) { 4446 DCHECK_NE(type_, Primitive::kPrimVoid); 4447 } 4448 4449 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4450 static Primitive::Type ToPhiType(Primitive::Type type) { 4451 switch (type) { 4452 case Primitive::kPrimBoolean: 4453 case Primitive::kPrimByte: 4454 case Primitive::kPrimShort: 4455 case Primitive::kPrimChar: 4456 return Primitive::kPrimInt; 4457 default: 4458 return type; 4459 } 4460 } 4461 4462 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4463 4464 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4465 4466 void AddInput(HInstruction* input); 4467 void RemoveInputAt(size_t index); 4468 4469 Primitive::Type GetType() const OVERRIDE { return type_; } 4470 void SetType(Primitive::Type new_type) { 4471 // Make sure that only valid type changes occur. The following are allowed: 4472 // (1) int -> float/ref (primitive type propagation), 4473 // (2) long -> double (primitive type propagation). 4474 DCHECK(type_ == new_type || 4475 (type_ == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) || 4476 (type_ == Primitive::kPrimInt && new_type == Primitive::kPrimNot) || 4477 (type_ == Primitive::kPrimLong && new_type == Primitive::kPrimDouble)); 4478 type_ = new_type; 4479 } 4480 4481 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4482 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4483 4484 uint32_t GetRegNumber() const { return reg_number_; } 4485 4486 void SetDead() { is_live_ = false; } 4487 void SetLive() { is_live_ = true; } 4488 bool IsDead() const { return !is_live_; } 4489 bool IsLive() const { return is_live_; } 4490 4491 bool IsVRegEquivalentOf(HInstruction* other) const { 4492 return other != nullptr 4493 && other->IsPhi() 4494 && other->AsPhi()->GetBlock() == GetBlock() 4495 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4496 } 4497 4498 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4499 // An equivalent phi is a phi having the same dex register and type. 4500 // It assumes that phis with the same dex register are adjacent. 4501 HPhi* GetNextEquivalentPhiWithSameType() { 4502 HInstruction* next = GetNext(); 4503 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4504 if (next->GetType() == GetType()) { 4505 return next->AsPhi(); 4506 } 4507 next = next->GetNext(); 4508 } 4509 return nullptr; 4510 } 4511 4512 DECLARE_INSTRUCTION(Phi); 4513 4514 protected: 4515 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4516 return inputs_[index]; 4517 } 4518 4519 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4520 inputs_[index] = input; 4521 } 4522 4523 private: 4524 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4525 const uint32_t reg_number_; 4526 Primitive::Type type_; 4527 bool is_live_; 4528 bool can_be_null_; 4529 4530 DISALLOW_COPY_AND_ASSIGN(HPhi); 4531}; 4532 4533class HNullCheck : public HExpression<1> { 4534 public: 4535 HNullCheck(HInstruction* value, uint32_t dex_pc) 4536 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4537 SetRawInputAt(0, value); 4538 } 4539 4540 bool CanBeMoved() const OVERRIDE { return true; } 4541 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4542 return true; 4543 } 4544 4545 bool NeedsEnvironment() const OVERRIDE { return true; } 4546 4547 bool CanThrow() const OVERRIDE { return true; } 4548 4549 bool CanBeNull() const OVERRIDE { return false; } 4550 4551 4552 DECLARE_INSTRUCTION(NullCheck); 4553 4554 private: 4555 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4556}; 4557 4558class FieldInfo : public ValueObject { 4559 public: 4560 FieldInfo(MemberOffset field_offset, 4561 Primitive::Type field_type, 4562 bool is_volatile, 4563 uint32_t index, 4564 uint16_t declaring_class_def_index, 4565 const DexFile& dex_file, 4566 Handle<mirror::DexCache> dex_cache) 4567 : field_offset_(field_offset), 4568 field_type_(field_type), 4569 is_volatile_(is_volatile), 4570 index_(index), 4571 declaring_class_def_index_(declaring_class_def_index), 4572 dex_file_(dex_file), 4573 dex_cache_(dex_cache) {} 4574 4575 MemberOffset GetFieldOffset() const { return field_offset_; } 4576 Primitive::Type GetFieldType() const { return field_type_; } 4577 uint32_t GetFieldIndex() const { return index_; } 4578 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4579 const DexFile& GetDexFile() const { return dex_file_; } 4580 bool IsVolatile() const { return is_volatile_; } 4581 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4582 4583 private: 4584 const MemberOffset field_offset_; 4585 const Primitive::Type field_type_; 4586 const bool is_volatile_; 4587 const uint32_t index_; 4588 const uint16_t declaring_class_def_index_; 4589 const DexFile& dex_file_; 4590 const Handle<mirror::DexCache> dex_cache_; 4591}; 4592 4593class HInstanceFieldGet : public HExpression<1> { 4594 public: 4595 HInstanceFieldGet(HInstruction* value, 4596 Primitive::Type field_type, 4597 MemberOffset field_offset, 4598 bool is_volatile, 4599 uint32_t field_idx, 4600 uint16_t declaring_class_def_index, 4601 const DexFile& dex_file, 4602 Handle<mirror::DexCache> dex_cache, 4603 uint32_t dex_pc) 4604 : HExpression(field_type, 4605 SideEffects::FieldReadOfType(field_type, is_volatile), 4606 dex_pc), 4607 field_info_(field_offset, 4608 field_type, 4609 is_volatile, 4610 field_idx, 4611 declaring_class_def_index, 4612 dex_file, 4613 dex_cache) { 4614 SetRawInputAt(0, value); 4615 } 4616 4617 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4618 4619 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4620 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4621 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4622 } 4623 4624 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4625 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4626 } 4627 4628 size_t ComputeHashCode() const OVERRIDE { 4629 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4630 } 4631 4632 const FieldInfo& GetFieldInfo() const { return field_info_; } 4633 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4634 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4635 bool IsVolatile() const { return field_info_.IsVolatile(); } 4636 4637 DECLARE_INSTRUCTION(InstanceFieldGet); 4638 4639 private: 4640 const FieldInfo field_info_; 4641 4642 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4643}; 4644 4645class HInstanceFieldSet : public HTemplateInstruction<2> { 4646 public: 4647 HInstanceFieldSet(HInstruction* object, 4648 HInstruction* value, 4649 Primitive::Type field_type, 4650 MemberOffset field_offset, 4651 bool is_volatile, 4652 uint32_t field_idx, 4653 uint16_t declaring_class_def_index, 4654 const DexFile& dex_file, 4655 Handle<mirror::DexCache> dex_cache, 4656 uint32_t dex_pc) 4657 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4658 dex_pc), 4659 field_info_(field_offset, 4660 field_type, 4661 is_volatile, 4662 field_idx, 4663 declaring_class_def_index, 4664 dex_file, 4665 dex_cache), 4666 value_can_be_null_(true) { 4667 SetRawInputAt(0, object); 4668 SetRawInputAt(1, value); 4669 } 4670 4671 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4672 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4673 } 4674 4675 const FieldInfo& GetFieldInfo() const { return field_info_; } 4676 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4677 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4678 bool IsVolatile() const { return field_info_.IsVolatile(); } 4679 HInstruction* GetValue() const { return InputAt(1); } 4680 bool GetValueCanBeNull() const { return value_can_be_null_; } 4681 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4682 4683 DECLARE_INSTRUCTION(InstanceFieldSet); 4684 4685 private: 4686 const FieldInfo field_info_; 4687 bool value_can_be_null_; 4688 4689 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4690}; 4691 4692class HArrayGet : public HExpression<2> { 4693 public: 4694 HArrayGet(HInstruction* array, 4695 HInstruction* index, 4696 Primitive::Type type, 4697 uint32_t dex_pc, 4698 SideEffects additional_side_effects = SideEffects::None()) 4699 : HExpression(type, 4700 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4701 dex_pc) { 4702 SetRawInputAt(0, array); 4703 SetRawInputAt(1, index); 4704 } 4705 4706 bool CanBeMoved() const OVERRIDE { return true; } 4707 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4708 return true; 4709 } 4710 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4711 // TODO: We can be smarter here. 4712 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4713 // which generates the implicit null check. There are cases when these can be removed 4714 // to produce better code. If we ever add optimizations to do so we should allow an 4715 // implicit check here (as long as the address falls in the first page). 4716 return false; 4717 } 4718 4719 bool IsEquivalentOf(HArrayGet* other) const { 4720 bool result = (GetDexPc() == other->GetDexPc()); 4721 if (kIsDebugBuild && result) { 4722 DCHECK_EQ(GetBlock(), other->GetBlock()); 4723 DCHECK_EQ(GetArray(), other->GetArray()); 4724 DCHECK_EQ(GetIndex(), other->GetIndex()); 4725 if (Primitive::IsIntOrLongType(GetType())) { 4726 DCHECK(Primitive::IsFloatingPointType(other->GetType())); 4727 } else { 4728 DCHECK(Primitive::IsFloatingPointType(GetType())); 4729 DCHECK(Primitive::IsIntOrLongType(other->GetType())); 4730 } 4731 } 4732 return result; 4733 } 4734 4735 HInstruction* GetArray() const { return InputAt(0); } 4736 HInstruction* GetIndex() const { return InputAt(1); } 4737 4738 DECLARE_INSTRUCTION(ArrayGet); 4739 4740 private: 4741 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4742}; 4743 4744class HArraySet : public HTemplateInstruction<3> { 4745 public: 4746 HArraySet(HInstruction* array, 4747 HInstruction* index, 4748 HInstruction* value, 4749 Primitive::Type expected_component_type, 4750 uint32_t dex_pc, 4751 SideEffects additional_side_effects = SideEffects::None()) 4752 : HTemplateInstruction( 4753 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4754 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4755 additional_side_effects), 4756 dex_pc), 4757 expected_component_type_(expected_component_type), 4758 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4759 value_can_be_null_(true), 4760 static_type_of_array_is_object_array_(false) { 4761 SetRawInputAt(0, array); 4762 SetRawInputAt(1, index); 4763 SetRawInputAt(2, value); 4764 } 4765 4766 bool NeedsEnvironment() const OVERRIDE { 4767 // We currently always call a runtime method to catch array store 4768 // exceptions. 4769 return needs_type_check_; 4770 } 4771 4772 // Can throw ArrayStoreException. 4773 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4774 4775 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4776 // TODO: Same as for ArrayGet. 4777 return false; 4778 } 4779 4780 void ClearNeedsTypeCheck() { 4781 needs_type_check_ = false; 4782 } 4783 4784 void ClearValueCanBeNull() { 4785 value_can_be_null_ = false; 4786 } 4787 4788 void SetStaticTypeOfArrayIsObjectArray() { 4789 static_type_of_array_is_object_array_ = true; 4790 } 4791 4792 bool GetValueCanBeNull() const { return value_can_be_null_; } 4793 bool NeedsTypeCheck() const { return needs_type_check_; } 4794 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4795 4796 HInstruction* GetArray() const { return InputAt(0); } 4797 HInstruction* GetIndex() const { return InputAt(1); } 4798 HInstruction* GetValue() const { return InputAt(2); } 4799 4800 Primitive::Type GetComponentType() const { 4801 // The Dex format does not type floating point index operations. Since the 4802 // `expected_component_type_` is set during building and can therefore not 4803 // be correct, we also check what is the value type. If it is a floating 4804 // point type, we must use that type. 4805 Primitive::Type value_type = GetValue()->GetType(); 4806 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4807 ? value_type 4808 : expected_component_type_; 4809 } 4810 4811 Primitive::Type GetRawExpectedComponentType() const { 4812 return expected_component_type_; 4813 } 4814 4815 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4816 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4817 } 4818 4819 DECLARE_INSTRUCTION(ArraySet); 4820 4821 private: 4822 const Primitive::Type expected_component_type_; 4823 bool needs_type_check_; 4824 bool value_can_be_null_; 4825 // Cached information for the reference_type_info_ so that codegen 4826 // does not need to inspect the static type. 4827 bool static_type_of_array_is_object_array_; 4828 4829 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4830}; 4831 4832class HArrayLength : public HExpression<1> { 4833 public: 4834 HArrayLength(HInstruction* array, uint32_t dex_pc) 4835 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4836 // Note that arrays do not change length, so the instruction does not 4837 // depend on any write. 4838 SetRawInputAt(0, array); 4839 } 4840 4841 bool CanBeMoved() const OVERRIDE { return true; } 4842 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4843 return true; 4844 } 4845 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4846 return obj == InputAt(0); 4847 } 4848 4849 DECLARE_INSTRUCTION(ArrayLength); 4850 4851 private: 4852 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4853}; 4854 4855class HBoundsCheck : public HExpression<2> { 4856 public: 4857 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4858 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4859 DCHECK(index->GetType() == Primitive::kPrimInt); 4860 SetRawInputAt(0, index); 4861 SetRawInputAt(1, length); 4862 } 4863 4864 bool CanBeMoved() const OVERRIDE { return true; } 4865 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4866 return true; 4867 } 4868 4869 bool NeedsEnvironment() const OVERRIDE { return true; } 4870 4871 bool CanThrow() const OVERRIDE { return true; } 4872 4873 HInstruction* GetIndex() const { return InputAt(0); } 4874 4875 DECLARE_INSTRUCTION(BoundsCheck); 4876 4877 private: 4878 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4879}; 4880 4881/** 4882 * Some DEX instructions are folded into multiple HInstructions that need 4883 * to stay live until the last HInstruction. This class 4884 * is used as a marker for the baseline compiler to ensure its preceding 4885 * HInstruction stays live. `index` represents the stack location index of the 4886 * instruction (the actual offset is computed as index * vreg_size). 4887 */ 4888class HTemporary : public HTemplateInstruction<0> { 4889 public: 4890 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4891 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4892 4893 size_t GetIndex() const { return index_; } 4894 4895 Primitive::Type GetType() const OVERRIDE { 4896 // The previous instruction is the one that will be stored in the temporary location. 4897 DCHECK(GetPrevious() != nullptr); 4898 return GetPrevious()->GetType(); 4899 } 4900 4901 DECLARE_INSTRUCTION(Temporary); 4902 4903 private: 4904 const size_t index_; 4905 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4906}; 4907 4908class HSuspendCheck : public HTemplateInstruction<0> { 4909 public: 4910 explicit HSuspendCheck(uint32_t dex_pc) 4911 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4912 4913 bool NeedsEnvironment() const OVERRIDE { 4914 return true; 4915 } 4916 4917 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4918 SlowPathCode* GetSlowPath() const { return slow_path_; } 4919 4920 DECLARE_INSTRUCTION(SuspendCheck); 4921 4922 private: 4923 // Only used for code generation, in order to share the same slow path between back edges 4924 // of a same loop. 4925 SlowPathCode* slow_path_; 4926 4927 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4928}; 4929 4930// Pseudo-instruction which provides the native debugger with mapping information. 4931// It ensures that we can generate line number and local variables at this point. 4932class HNativeDebugInfo : public HTemplateInstruction<0> { 4933 public: 4934 explicit HNativeDebugInfo(uint32_t dex_pc) 4935 : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {} 4936 4937 bool NeedsEnvironment() const OVERRIDE { 4938 return true; 4939 } 4940 4941 DECLARE_INSTRUCTION(NativeDebugInfo); 4942 4943 private: 4944 DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo); 4945}; 4946 4947/** 4948 * Instruction to load a Class object. 4949 */ 4950class HLoadClass : public HExpression<1> { 4951 public: 4952 HLoadClass(HCurrentMethod* current_method, 4953 uint16_t type_index, 4954 const DexFile& dex_file, 4955 bool is_referrers_class, 4956 uint32_t dex_pc, 4957 bool needs_access_check, 4958 bool is_in_dex_cache) 4959 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4960 type_index_(type_index), 4961 dex_file_(dex_file), 4962 is_referrers_class_(is_referrers_class), 4963 generate_clinit_check_(false), 4964 needs_access_check_(needs_access_check), 4965 is_in_dex_cache_(is_in_dex_cache), 4966 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4967 // Referrers class should not need access check. We never inline unverified 4968 // methods so we can't possibly end up in this situation. 4969 DCHECK(!is_referrers_class_ || !needs_access_check_); 4970 SetRawInputAt(0, current_method); 4971 } 4972 4973 bool CanBeMoved() const OVERRIDE { return true; } 4974 4975 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4976 // Note that we don't need to test for generate_clinit_check_. 4977 // Whether or not we need to generate the clinit check is processed in 4978 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4979 return other->AsLoadClass()->type_index_ == type_index_ && 4980 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4981 } 4982 4983 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4984 4985 uint16_t GetTypeIndex() const { return type_index_; } 4986 bool IsReferrersClass() const { return is_referrers_class_; } 4987 bool CanBeNull() const OVERRIDE { return false; } 4988 4989 bool NeedsEnvironment() const OVERRIDE { 4990 return CanCallRuntime(); 4991 } 4992 4993 bool MustGenerateClinitCheck() const { 4994 return generate_clinit_check_; 4995 } 4996 4997 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 4998 // The entrypoint the code generator is going to call does not do 4999 // clinit of the class. 5000 DCHECK(!NeedsAccessCheck()); 5001 generate_clinit_check_ = generate_clinit_check; 5002 } 5003 5004 bool CanCallRuntime() const { 5005 return MustGenerateClinitCheck() || 5006 (!is_referrers_class_ && !is_in_dex_cache_) || 5007 needs_access_check_; 5008 } 5009 5010 bool NeedsAccessCheck() const { 5011 return needs_access_check_; 5012 } 5013 5014 bool CanThrow() const OVERRIDE { 5015 return CanCallRuntime(); 5016 } 5017 5018 ReferenceTypeInfo GetLoadedClassRTI() { 5019 return loaded_class_rti_; 5020 } 5021 5022 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 5023 // Make sure we only set exact types (the loaded class should never be merged). 5024 DCHECK(rti.IsExact()); 5025 loaded_class_rti_ = rti; 5026 } 5027 5028 const DexFile& GetDexFile() { return dex_file_; } 5029 5030 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 5031 5032 static SideEffects SideEffectsForArchRuntimeCalls() { 5033 return SideEffects::CanTriggerGC(); 5034 } 5035 5036 bool IsInDexCache() const { return is_in_dex_cache_; } 5037 5038 DECLARE_INSTRUCTION(LoadClass); 5039 5040 private: 5041 const uint16_t type_index_; 5042 const DexFile& dex_file_; 5043 const bool is_referrers_class_; 5044 // Whether this instruction must generate the initialization check. 5045 // Used for code generation. 5046 bool generate_clinit_check_; 5047 const bool needs_access_check_; 5048 const bool is_in_dex_cache_; 5049 5050 ReferenceTypeInfo loaded_class_rti_; 5051 5052 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 5053}; 5054 5055class HLoadString : public HExpression<1> { 5056 public: 5057 HLoadString(HCurrentMethod* current_method, 5058 uint32_t string_index, 5059 uint32_t dex_pc, 5060 bool is_in_dex_cache) 5061 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 5062 string_index_(string_index), 5063 is_in_dex_cache_(is_in_dex_cache) { 5064 SetRawInputAt(0, current_method); 5065 } 5066 5067 bool CanBeMoved() const OVERRIDE { return true; } 5068 5069 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 5070 return other->AsLoadString()->string_index_ == string_index_; 5071 } 5072 5073 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 5074 5075 uint32_t GetStringIndex() const { return string_index_; } 5076 5077 // TODO: Can we deopt or debug when we resolve a string? 5078 bool NeedsEnvironment() const OVERRIDE { return false; } 5079 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 5080 bool CanBeNull() const OVERRIDE { return false; } 5081 bool IsInDexCache() const { return is_in_dex_cache_; } 5082 5083 static SideEffects SideEffectsForArchRuntimeCalls() { 5084 return SideEffects::CanTriggerGC(); 5085 } 5086 5087 DECLARE_INSTRUCTION(LoadString); 5088 5089 private: 5090 const uint32_t string_index_; 5091 const bool is_in_dex_cache_; 5092 5093 DISALLOW_COPY_AND_ASSIGN(HLoadString); 5094}; 5095 5096/** 5097 * Performs an initialization check on its Class object input. 5098 */ 5099class HClinitCheck : public HExpression<1> { 5100 public: 5101 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 5102 : HExpression( 5103 Primitive::kPrimNot, 5104 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 5105 dex_pc) { 5106 SetRawInputAt(0, constant); 5107 } 5108 5109 bool CanBeMoved() const OVERRIDE { return true; } 5110 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5111 return true; 5112 } 5113 5114 bool NeedsEnvironment() const OVERRIDE { 5115 // May call runtime to initialize the class. 5116 return true; 5117 } 5118 5119 bool CanThrow() const OVERRIDE { return true; } 5120 5121 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 5122 5123 DECLARE_INSTRUCTION(ClinitCheck); 5124 5125 private: 5126 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 5127}; 5128 5129class HStaticFieldGet : public HExpression<1> { 5130 public: 5131 HStaticFieldGet(HInstruction* cls, 5132 Primitive::Type field_type, 5133 MemberOffset field_offset, 5134 bool is_volatile, 5135 uint32_t field_idx, 5136 uint16_t declaring_class_def_index, 5137 const DexFile& dex_file, 5138 Handle<mirror::DexCache> dex_cache, 5139 uint32_t dex_pc) 5140 : HExpression(field_type, 5141 SideEffects::FieldReadOfType(field_type, is_volatile), 5142 dex_pc), 5143 field_info_(field_offset, 5144 field_type, 5145 is_volatile, 5146 field_idx, 5147 declaring_class_def_index, 5148 dex_file, 5149 dex_cache) { 5150 SetRawInputAt(0, cls); 5151 } 5152 5153 5154 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 5155 5156 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 5157 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 5158 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 5159 } 5160 5161 size_t ComputeHashCode() const OVERRIDE { 5162 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 5163 } 5164 5165 const FieldInfo& GetFieldInfo() const { return field_info_; } 5166 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5167 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5168 bool IsVolatile() const { return field_info_.IsVolatile(); } 5169 5170 DECLARE_INSTRUCTION(StaticFieldGet); 5171 5172 private: 5173 const FieldInfo field_info_; 5174 5175 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 5176}; 5177 5178class HStaticFieldSet : public HTemplateInstruction<2> { 5179 public: 5180 HStaticFieldSet(HInstruction* cls, 5181 HInstruction* value, 5182 Primitive::Type field_type, 5183 MemberOffset field_offset, 5184 bool is_volatile, 5185 uint32_t field_idx, 5186 uint16_t declaring_class_def_index, 5187 const DexFile& dex_file, 5188 Handle<mirror::DexCache> dex_cache, 5189 uint32_t dex_pc) 5190 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 5191 dex_pc), 5192 field_info_(field_offset, 5193 field_type, 5194 is_volatile, 5195 field_idx, 5196 declaring_class_def_index, 5197 dex_file, 5198 dex_cache), 5199 value_can_be_null_(true) { 5200 SetRawInputAt(0, cls); 5201 SetRawInputAt(1, value); 5202 } 5203 5204 const FieldInfo& GetFieldInfo() const { return field_info_; } 5205 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5206 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5207 bool IsVolatile() const { return field_info_.IsVolatile(); } 5208 5209 HInstruction* GetValue() const { return InputAt(1); } 5210 bool GetValueCanBeNull() const { return value_can_be_null_; } 5211 void ClearValueCanBeNull() { value_can_be_null_ = false; } 5212 5213 DECLARE_INSTRUCTION(StaticFieldSet); 5214 5215 private: 5216 const FieldInfo field_info_; 5217 bool value_can_be_null_; 5218 5219 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 5220}; 5221 5222class HUnresolvedInstanceFieldGet : public HExpression<1> { 5223 public: 5224 HUnresolvedInstanceFieldGet(HInstruction* obj, 5225 Primitive::Type field_type, 5226 uint32_t field_index, 5227 uint32_t dex_pc) 5228 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5229 field_index_(field_index) { 5230 SetRawInputAt(0, obj); 5231 } 5232 5233 bool NeedsEnvironment() const OVERRIDE { return true; } 5234 bool CanThrow() const OVERRIDE { return true; } 5235 5236 Primitive::Type GetFieldType() const { return GetType(); } 5237 uint32_t GetFieldIndex() const { return field_index_; } 5238 5239 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 5240 5241 private: 5242 const uint32_t field_index_; 5243 5244 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 5245}; 5246 5247class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 5248 public: 5249 HUnresolvedInstanceFieldSet(HInstruction* obj, 5250 HInstruction* value, 5251 Primitive::Type field_type, 5252 uint32_t field_index, 5253 uint32_t dex_pc) 5254 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5255 field_type_(field_type), 5256 field_index_(field_index) { 5257 DCHECK_EQ(field_type, value->GetType()); 5258 SetRawInputAt(0, obj); 5259 SetRawInputAt(1, value); 5260 } 5261 5262 bool NeedsEnvironment() const OVERRIDE { return true; } 5263 bool CanThrow() const OVERRIDE { return true; } 5264 5265 Primitive::Type GetFieldType() const { return field_type_; } 5266 uint32_t GetFieldIndex() const { return field_index_; } 5267 5268 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5269 5270 private: 5271 const Primitive::Type field_type_; 5272 const uint32_t field_index_; 5273 5274 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5275}; 5276 5277class HUnresolvedStaticFieldGet : public HExpression<0> { 5278 public: 5279 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5280 uint32_t field_index, 5281 uint32_t dex_pc) 5282 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5283 field_index_(field_index) { 5284 } 5285 5286 bool NeedsEnvironment() const OVERRIDE { return true; } 5287 bool CanThrow() const OVERRIDE { return true; } 5288 5289 Primitive::Type GetFieldType() const { return GetType(); } 5290 uint32_t GetFieldIndex() const { return field_index_; } 5291 5292 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5293 5294 private: 5295 const uint32_t field_index_; 5296 5297 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5298}; 5299 5300class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5301 public: 5302 HUnresolvedStaticFieldSet(HInstruction* value, 5303 Primitive::Type field_type, 5304 uint32_t field_index, 5305 uint32_t dex_pc) 5306 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5307 field_type_(field_type), 5308 field_index_(field_index) { 5309 DCHECK_EQ(field_type, value->GetType()); 5310 SetRawInputAt(0, value); 5311 } 5312 5313 bool NeedsEnvironment() const OVERRIDE { return true; } 5314 bool CanThrow() const OVERRIDE { return true; } 5315 5316 Primitive::Type GetFieldType() const { return field_type_; } 5317 uint32_t GetFieldIndex() const { return field_index_; } 5318 5319 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5320 5321 private: 5322 const Primitive::Type field_type_; 5323 const uint32_t field_index_; 5324 5325 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5326}; 5327 5328// Implement the move-exception DEX instruction. 5329class HLoadException : public HExpression<0> { 5330 public: 5331 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5332 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5333 5334 bool CanBeNull() const OVERRIDE { return false; } 5335 5336 DECLARE_INSTRUCTION(LoadException); 5337 5338 private: 5339 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5340}; 5341 5342// Implicit part of move-exception which clears thread-local exception storage. 5343// Must not be removed because the runtime expects the TLS to get cleared. 5344class HClearException : public HTemplateInstruction<0> { 5345 public: 5346 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5347 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5348 5349 DECLARE_INSTRUCTION(ClearException); 5350 5351 private: 5352 DISALLOW_COPY_AND_ASSIGN(HClearException); 5353}; 5354 5355class HThrow : public HTemplateInstruction<1> { 5356 public: 5357 HThrow(HInstruction* exception, uint32_t dex_pc) 5358 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5359 SetRawInputAt(0, exception); 5360 } 5361 5362 bool IsControlFlow() const OVERRIDE { return true; } 5363 5364 bool NeedsEnvironment() const OVERRIDE { return true; } 5365 5366 bool CanThrow() const OVERRIDE { return true; } 5367 5368 5369 DECLARE_INSTRUCTION(Throw); 5370 5371 private: 5372 DISALLOW_COPY_AND_ASSIGN(HThrow); 5373}; 5374 5375/** 5376 * Implementation strategies for the code generator of a HInstanceOf 5377 * or `HCheckCast`. 5378 */ 5379enum class TypeCheckKind { 5380 kUnresolvedCheck, // Check against an unresolved type. 5381 kExactCheck, // Can do a single class compare. 5382 kClassHierarchyCheck, // Can just walk the super class chain. 5383 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5384 kInterfaceCheck, // No optimization yet when checking against an interface. 5385 kArrayObjectCheck, // Can just check if the array is not primitive. 5386 kArrayCheck // No optimization yet when checking against a generic array. 5387}; 5388 5389class HInstanceOf : public HExpression<2> { 5390 public: 5391 HInstanceOf(HInstruction* object, 5392 HLoadClass* constant, 5393 TypeCheckKind check_kind, 5394 uint32_t dex_pc) 5395 : HExpression(Primitive::kPrimBoolean, 5396 SideEffectsForArchRuntimeCalls(check_kind), 5397 dex_pc), 5398 check_kind_(check_kind), 5399 must_do_null_check_(true) { 5400 SetRawInputAt(0, object); 5401 SetRawInputAt(1, constant); 5402 } 5403 5404 bool CanBeMoved() const OVERRIDE { return true; } 5405 5406 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5407 return true; 5408 } 5409 5410 bool NeedsEnvironment() const OVERRIDE { 5411 return false; 5412 } 5413 5414 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5415 5416 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5417 5418 // Used only in code generation. 5419 bool MustDoNullCheck() const { return must_do_null_check_; } 5420 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5421 5422 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5423 return (check_kind == TypeCheckKind::kExactCheck) 5424 ? SideEffects::None() 5425 // Mips currently does runtime calls for any other checks. 5426 : SideEffects::CanTriggerGC(); 5427 } 5428 5429 DECLARE_INSTRUCTION(InstanceOf); 5430 5431 private: 5432 const TypeCheckKind check_kind_; 5433 bool must_do_null_check_; 5434 5435 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5436}; 5437 5438class HBoundType : public HExpression<1> { 5439 public: 5440 HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc) 5441 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5442 upper_bound_(ReferenceTypeInfo::CreateInvalid()), 5443 upper_can_be_null_(true), 5444 can_be_null_(true) { 5445 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5446 SetRawInputAt(0, input); 5447 } 5448 5449 // {Get,Set}Upper* should only be used in reference type propagation. 5450 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5451 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5452 void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null); 5453 5454 void SetCanBeNull(bool can_be_null) { 5455 DCHECK(upper_can_be_null_ || !can_be_null); 5456 can_be_null_ = can_be_null; 5457 } 5458 5459 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5460 5461 DECLARE_INSTRUCTION(BoundType); 5462 5463 private: 5464 // Encodes the most upper class that this instruction can have. In other words 5465 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5466 // It is used to bound the type in cases like: 5467 // if (x instanceof ClassX) { 5468 // // uper_bound_ will be ClassX 5469 // } 5470 ReferenceTypeInfo upper_bound_; 5471 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5472 // is false then can_be_null_ cannot be true). 5473 bool upper_can_be_null_; 5474 bool can_be_null_; 5475 5476 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5477}; 5478 5479class HCheckCast : public HTemplateInstruction<2> { 5480 public: 5481 HCheckCast(HInstruction* object, 5482 HLoadClass* constant, 5483 TypeCheckKind check_kind, 5484 uint32_t dex_pc) 5485 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5486 check_kind_(check_kind), 5487 must_do_null_check_(true) { 5488 SetRawInputAt(0, object); 5489 SetRawInputAt(1, constant); 5490 } 5491 5492 bool CanBeMoved() const OVERRIDE { return true; } 5493 5494 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5495 return true; 5496 } 5497 5498 bool NeedsEnvironment() const OVERRIDE { 5499 // Instruction may throw a CheckCastError. 5500 return true; 5501 } 5502 5503 bool CanThrow() const OVERRIDE { return true; } 5504 5505 bool MustDoNullCheck() const { return must_do_null_check_; } 5506 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5507 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5508 5509 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5510 5511 DECLARE_INSTRUCTION(CheckCast); 5512 5513 private: 5514 const TypeCheckKind check_kind_; 5515 bool must_do_null_check_; 5516 5517 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5518}; 5519 5520class HMemoryBarrier : public HTemplateInstruction<0> { 5521 public: 5522 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5523 : HTemplateInstruction( 5524 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5525 barrier_kind_(barrier_kind) {} 5526 5527 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5528 5529 DECLARE_INSTRUCTION(MemoryBarrier); 5530 5531 private: 5532 const MemBarrierKind barrier_kind_; 5533 5534 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5535}; 5536 5537class HMonitorOperation : public HTemplateInstruction<1> { 5538 public: 5539 enum OperationKind { 5540 kEnter, 5541 kExit, 5542 }; 5543 5544 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5545 : HTemplateInstruction( 5546 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5547 kind_(kind) { 5548 SetRawInputAt(0, object); 5549 } 5550 5551 // Instruction may throw a Java exception, so we need an environment. 5552 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5553 5554 bool CanThrow() const OVERRIDE { 5555 // Verifier guarantees that monitor-exit cannot throw. 5556 // This is important because it allows the HGraphBuilder to remove 5557 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5558 return IsEnter(); 5559 } 5560 5561 5562 bool IsEnter() const { return kind_ == kEnter; } 5563 5564 DECLARE_INSTRUCTION(MonitorOperation); 5565 5566 private: 5567 const OperationKind kind_; 5568 5569 private: 5570 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5571}; 5572 5573/** 5574 * A HInstruction used as a marker for the replacement of new + <init> 5575 * of a String to a call to a StringFactory. Only baseline will see 5576 * the node at code generation, where it will be be treated as null. 5577 * When compiling non-baseline, `HFakeString` instructions are being removed 5578 * in the instruction simplifier. 5579 */ 5580class HFakeString : public HTemplateInstruction<0> { 5581 public: 5582 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5583 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5584 5585 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5586 5587 DECLARE_INSTRUCTION(FakeString); 5588 5589 private: 5590 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5591}; 5592 5593class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5594 public: 5595 MoveOperands(Location source, 5596 Location destination, 5597 Primitive::Type type, 5598 HInstruction* instruction) 5599 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5600 5601 Location GetSource() const { return source_; } 5602 Location GetDestination() const { return destination_; } 5603 5604 void SetSource(Location value) { source_ = value; } 5605 void SetDestination(Location value) { destination_ = value; } 5606 5607 // The parallel move resolver marks moves as "in-progress" by clearing the 5608 // destination (but not the source). 5609 Location MarkPending() { 5610 DCHECK(!IsPending()); 5611 Location dest = destination_; 5612 destination_ = Location::NoLocation(); 5613 return dest; 5614 } 5615 5616 void ClearPending(Location dest) { 5617 DCHECK(IsPending()); 5618 destination_ = dest; 5619 } 5620 5621 bool IsPending() const { 5622 DCHECK(source_.IsValid() || destination_.IsInvalid()); 5623 return destination_.IsInvalid() && source_.IsValid(); 5624 } 5625 5626 // True if this blocks a move from the given location. 5627 bool Blocks(Location loc) const { 5628 return !IsEliminated() && source_.OverlapsWith(loc); 5629 } 5630 5631 // A move is redundant if it's been eliminated, if its source and 5632 // destination are the same, or if its destination is unneeded. 5633 bool IsRedundant() const { 5634 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5635 } 5636 5637 // We clear both operands to indicate move that's been eliminated. 5638 void Eliminate() { 5639 source_ = destination_ = Location::NoLocation(); 5640 } 5641 5642 bool IsEliminated() const { 5643 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5644 return source_.IsInvalid(); 5645 } 5646 5647 Primitive::Type GetType() const { return type_; } 5648 5649 bool Is64BitMove() const { 5650 return Primitive::Is64BitType(type_); 5651 } 5652 5653 HInstruction* GetInstruction() const { return instruction_; } 5654 5655 private: 5656 Location source_; 5657 Location destination_; 5658 // The type this move is for. 5659 Primitive::Type type_; 5660 // The instruction this move is assocatied with. Null when this move is 5661 // for moving an input in the expected locations of user (including a phi user). 5662 // This is only used in debug mode, to ensure we do not connect interval siblings 5663 // in the same parallel move. 5664 HInstruction* instruction_; 5665}; 5666 5667std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs); 5668 5669static constexpr size_t kDefaultNumberOfMoves = 4; 5670 5671class HParallelMove : public HTemplateInstruction<0> { 5672 public: 5673 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5674 : HTemplateInstruction(SideEffects::None(), dex_pc), 5675 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5676 moves_.reserve(kDefaultNumberOfMoves); 5677 } 5678 5679 void AddMove(Location source, 5680 Location destination, 5681 Primitive::Type type, 5682 HInstruction* instruction) { 5683 DCHECK(source.IsValid()); 5684 DCHECK(destination.IsValid()); 5685 if (kIsDebugBuild) { 5686 if (instruction != nullptr) { 5687 for (const MoveOperands& move : moves_) { 5688 if (move.GetInstruction() == instruction) { 5689 // Special case the situation where the move is for the spill slot 5690 // of the instruction. 5691 if ((GetPrevious() == instruction) 5692 || ((GetPrevious() == nullptr) 5693 && instruction->IsPhi() 5694 && instruction->GetBlock() == GetBlock())) { 5695 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5696 << "Doing parallel moves for the same instruction."; 5697 } else { 5698 DCHECK(false) << "Doing parallel moves for the same instruction."; 5699 } 5700 } 5701 } 5702 } 5703 for (const MoveOperands& move : moves_) { 5704 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5705 << "Overlapped destination for two moves in a parallel move: " 5706 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5707 << source << " ==> " << destination; 5708 } 5709 } 5710 moves_.emplace_back(source, destination, type, instruction); 5711 } 5712 5713 MoveOperands* MoveOperandsAt(size_t index) { 5714 return &moves_[index]; 5715 } 5716 5717 size_t NumMoves() const { return moves_.size(); } 5718 5719 DECLARE_INSTRUCTION(ParallelMove); 5720 5721 private: 5722 ArenaVector<MoveOperands> moves_; 5723 5724 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5725}; 5726 5727} // namespace art 5728 5729#ifdef ART_ENABLE_CODEGEN_arm 5730#include "nodes_arm.h" 5731#endif 5732#ifdef ART_ENABLE_CODEGEN_arm64 5733#include "nodes_arm64.h" 5734#endif 5735#ifdef ART_ENABLE_CODEGEN_x86 5736#include "nodes_x86.h" 5737#endif 5738 5739namespace art { 5740 5741class HGraphVisitor : public ValueObject { 5742 public: 5743 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5744 virtual ~HGraphVisitor() {} 5745 5746 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5747 virtual void VisitBasicBlock(HBasicBlock* block); 5748 5749 // Visit the graph following basic block insertion order. 5750 void VisitInsertionOrder(); 5751 5752 // Visit the graph following dominator tree reverse post-order. 5753 void VisitReversePostOrder(); 5754 5755 HGraph* GetGraph() const { return graph_; } 5756 5757 // Visit functions for instruction classes. 5758#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5759 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5760 5761 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5762 5763#undef DECLARE_VISIT_INSTRUCTION 5764 5765 private: 5766 HGraph* const graph_; 5767 5768 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5769}; 5770 5771class HGraphDelegateVisitor : public HGraphVisitor { 5772 public: 5773 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5774 virtual ~HGraphDelegateVisitor() {} 5775 5776 // Visit functions that delegate to to super class. 5777#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5778 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5779 5780 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5781 5782#undef DECLARE_VISIT_INSTRUCTION 5783 5784 private: 5785 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5786}; 5787 5788class HInsertionOrderIterator : public ValueObject { 5789 public: 5790 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5791 5792 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5793 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5794 void Advance() { ++index_; } 5795 5796 private: 5797 const HGraph& graph_; 5798 size_t index_; 5799 5800 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5801}; 5802 5803class HReversePostOrderIterator : public ValueObject { 5804 public: 5805 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5806 // Check that reverse post order of the graph has been built. 5807 DCHECK(!graph.GetReversePostOrder().empty()); 5808 } 5809 5810 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5811 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5812 void Advance() { ++index_; } 5813 5814 private: 5815 const HGraph& graph_; 5816 size_t index_; 5817 5818 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5819}; 5820 5821class HPostOrderIterator : public ValueObject { 5822 public: 5823 explicit HPostOrderIterator(const HGraph& graph) 5824 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5825 // Check that reverse post order of the graph has been built. 5826 DCHECK(!graph.GetReversePostOrder().empty()); 5827 } 5828 5829 bool Done() const { return index_ == 0; } 5830 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5831 void Advance() { --index_; } 5832 5833 private: 5834 const HGraph& graph_; 5835 size_t index_; 5836 5837 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5838}; 5839 5840class HLinearPostOrderIterator : public ValueObject { 5841 public: 5842 explicit HLinearPostOrderIterator(const HGraph& graph) 5843 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5844 5845 bool Done() const { return index_ == 0; } 5846 5847 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5848 5849 void Advance() { 5850 --index_; 5851 DCHECK_GE(index_, 0U); 5852 } 5853 5854 private: 5855 const ArenaVector<HBasicBlock*>& order_; 5856 size_t index_; 5857 5858 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5859}; 5860 5861class HLinearOrderIterator : public ValueObject { 5862 public: 5863 explicit HLinearOrderIterator(const HGraph& graph) 5864 : order_(graph.GetLinearOrder()), index_(0) {} 5865 5866 bool Done() const { return index_ == order_.size(); } 5867 HBasicBlock* Current() const { return order_[index_]; } 5868 void Advance() { ++index_; } 5869 5870 private: 5871 const ArenaVector<HBasicBlock*>& order_; 5872 size_t index_; 5873 5874 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5875}; 5876 5877// Iterator over the blocks that art part of the loop. Includes blocks part 5878// of an inner loop. The order in which the blocks are iterated is on their 5879// block id. 5880class HBlocksInLoopIterator : public ValueObject { 5881 public: 5882 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5883 : blocks_in_loop_(info.GetBlocks()), 5884 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5885 index_(0) { 5886 if (!blocks_in_loop_.IsBitSet(index_)) { 5887 Advance(); 5888 } 5889 } 5890 5891 bool Done() const { return index_ == blocks_.size(); } 5892 HBasicBlock* Current() const { return blocks_[index_]; } 5893 void Advance() { 5894 ++index_; 5895 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5896 if (blocks_in_loop_.IsBitSet(index_)) { 5897 break; 5898 } 5899 } 5900 } 5901 5902 private: 5903 const BitVector& blocks_in_loop_; 5904 const ArenaVector<HBasicBlock*>& blocks_; 5905 size_t index_; 5906 5907 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5908}; 5909 5910// Iterator over the blocks that art part of the loop. Includes blocks part 5911// of an inner loop. The order in which the blocks are iterated is reverse 5912// post order. 5913class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5914 public: 5915 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5916 : blocks_in_loop_(info.GetBlocks()), 5917 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5918 index_(0) { 5919 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5920 Advance(); 5921 } 5922 } 5923 5924 bool Done() const { return index_ == blocks_.size(); } 5925 HBasicBlock* Current() const { return blocks_[index_]; } 5926 void Advance() { 5927 ++index_; 5928 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5929 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5930 break; 5931 } 5932 } 5933 } 5934 5935 private: 5936 const BitVector& blocks_in_loop_; 5937 const ArenaVector<HBasicBlock*>& blocks_; 5938 size_t index_; 5939 5940 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5941}; 5942 5943inline int64_t Int64FromConstant(HConstant* constant) { 5944 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5945 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5946 : constant->AsLongConstant()->GetValue(); 5947} 5948 5949inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5950 // For the purposes of the compiler, the dex files must actually be the same object 5951 // if we want to safely treat them as the same. This is especially important for JIT 5952 // as custom class loaders can open the same underlying file (or memory) multiple 5953 // times and provide different class resolution but no two class loaders should ever 5954 // use the same DexFile object - doing so is an unsupported hack that can lead to 5955 // all sorts of weird failures. 5956 return &lhs == &rhs; 5957} 5958 5959#define INSTRUCTION_TYPE_CHECK(type, super) \ 5960 inline bool HInstruction::Is##type() const { return GetKind() == k##type; } \ 5961 inline const H##type* HInstruction::As##type() const { \ 5962 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \ 5963 } \ 5964 inline H##type* HInstruction::As##type() { \ 5965 return Is##type() ? static_cast<H##type*>(this) : nullptr; \ 5966 } 5967 5968 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 5969#undef INSTRUCTION_TYPE_CHECK 5970 5971} // namespace art 5972 5973#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5974