nodes.h revision 884e54c8a45e49b58cb1127c8ed890f79f382601
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38#include "utils/array_ref.h" 39 40namespace art { 41 42class GraphChecker; 43class HBasicBlock; 44class HCurrentMethod; 45class HDoubleConstant; 46class HEnvironment; 47class HFakeString; 48class HFloatConstant; 49class HGraphBuilder; 50class HGraphVisitor; 51class HInstruction; 52class HIntConstant; 53class HInvoke; 54class HLongConstant; 55class HNullConstant; 56class HPhi; 57class HSuspendCheck; 58class HTryBoundary; 59class LiveInterval; 60class LocationSummary; 61class SlowPathCode; 62class SsaBuilder; 63 64namespace mirror { 65class DexCache; 66} // namespace mirror 67 68static const int kDefaultNumberOfBlocks = 8; 69static const int kDefaultNumberOfSuccessors = 2; 70static const int kDefaultNumberOfPredecessors = 2; 71static const int kDefaultNumberOfExceptionalPredecessors = 0; 72static const int kDefaultNumberOfDominatedBlocks = 1; 73static const int kDefaultNumberOfBackEdges = 1; 74 75static constexpr uint32_t kMaxIntShiftValue = 0x1f; 76static constexpr uint64_t kMaxLongShiftValue = 0x3f; 77 78static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 79static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 80 81static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 82 83static constexpr uint32_t kNoDexPc = -1; 84 85enum IfCondition { 86 // All types. 87 kCondEQ, // == 88 kCondNE, // != 89 // Signed integers and floating-point numbers. 90 kCondLT, // < 91 kCondLE, // <= 92 kCondGT, // > 93 kCondGE, // >= 94 // Unsigned integers. 95 kCondB, // < 96 kCondBE, // <= 97 kCondA, // > 98 kCondAE, // >= 99}; 100 101enum BuildSsaResult { 102 kBuildSsaFailNonNaturalLoop, 103 kBuildSsaFailThrowCatchLoop, 104 kBuildSsaFailAmbiguousArrayOp, 105 kBuildSsaSuccess, 106}; 107 108class HInstructionList : public ValueObject { 109 public: 110 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 111 112 void AddInstruction(HInstruction* instruction); 113 void RemoveInstruction(HInstruction* instruction); 114 115 // Insert `instruction` before/after an existing instruction `cursor`. 116 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 117 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 118 119 // Return true if this list contains `instruction`. 120 bool Contains(HInstruction* instruction) const; 121 122 // Return true if `instruction1` is found before `instruction2` in 123 // this instruction list and false otherwise. Abort if none 124 // of these instructions is found. 125 bool FoundBefore(const HInstruction* instruction1, 126 const HInstruction* instruction2) const; 127 128 bool IsEmpty() const { return first_instruction_ == nullptr; } 129 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 130 131 // Update the block of all instructions to be `block`. 132 void SetBlockOfInstructions(HBasicBlock* block) const; 133 134 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 135 void Add(const HInstructionList& instruction_list); 136 137 // Return the number of instructions in the list. This is an expensive operation. 138 size_t CountSize() const; 139 140 private: 141 HInstruction* first_instruction_; 142 HInstruction* last_instruction_; 143 144 friend class HBasicBlock; 145 friend class HGraph; 146 friend class HInstruction; 147 friend class HInstructionIterator; 148 friend class HBackwardInstructionIterator; 149 150 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 151}; 152 153class ReferenceTypeInfo : ValueObject { 154 public: 155 typedef Handle<mirror::Class> TypeHandle; 156 157 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 158 // The constructor will check that the type_handle is valid. 159 return ReferenceTypeInfo(type_handle, is_exact); 160 } 161 162 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 163 164 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 165 return handle.GetReference() != nullptr; 166 } 167 168 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 169 return IsValidHandle(type_handle_); 170 } 171 172 bool IsExact() const { return is_exact_; } 173 174 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 175 DCHECK(IsValid()); 176 return GetTypeHandle()->IsObjectClass(); 177 } 178 179 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 180 DCHECK(IsValid()); 181 return GetTypeHandle()->IsStringClass(); 182 } 183 184 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 185 DCHECK(IsValid()); 186 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 187 } 188 189 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 190 DCHECK(IsValid()); 191 return GetTypeHandle()->IsInterface(); 192 } 193 194 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 195 DCHECK(IsValid()); 196 return GetTypeHandle()->IsArrayClass(); 197 } 198 199 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 200 DCHECK(IsValid()); 201 return GetTypeHandle()->IsPrimitiveArray(); 202 } 203 204 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 205 DCHECK(IsValid()); 206 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 207 } 208 209 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 210 DCHECK(IsValid()); 211 if (!IsExact()) return false; 212 if (!IsArrayClass()) return false; 213 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 214 } 215 216 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 217 DCHECK(IsValid()); 218 if (!IsExact()) return false; 219 if (!IsArrayClass()) return false; 220 if (!rti.IsArrayClass()) return false; 221 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 222 rti.GetTypeHandle()->GetComponentType()); 223 } 224 225 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 226 227 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 228 DCHECK(IsValid()); 229 DCHECK(rti.IsValid()); 230 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 231 } 232 233 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 234 DCHECK(IsValid()); 235 DCHECK(rti.IsValid()); 236 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() && 237 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 238 } 239 240 // Returns true if the type information provide the same amount of details. 241 // Note that it does not mean that the instructions have the same actual type 242 // (because the type can be the result of a merge). 243 bool IsEqual(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 244 if (!IsValid() && !rti.IsValid()) { 245 // Invalid types are equal. 246 return true; 247 } 248 if (!IsValid() || !rti.IsValid()) { 249 // One is valid, the other not. 250 return false; 251 } 252 return IsExact() == rti.IsExact() 253 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 254 } 255 256 private: 257 ReferenceTypeInfo(); 258 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 259 260 // The class of the object. 261 TypeHandle type_handle_; 262 // Whether or not the type is exact or a superclass of the actual type. 263 // Whether or not we have any information about this type. 264 bool is_exact_; 265}; 266 267std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 268 269// Control-flow graph of a method. Contains a list of basic blocks. 270class HGraph : public ArenaObject<kArenaAllocGraph> { 271 public: 272 HGraph(ArenaAllocator* arena, 273 const DexFile& dex_file, 274 uint32_t method_idx, 275 bool should_generate_constructor_barrier, 276 InstructionSet instruction_set, 277 InvokeType invoke_type = kInvalidInvokeType, 278 bool debuggable = false, 279 int start_instruction_id = 0) 280 : arena_(arena), 281 blocks_(arena->Adapter(kArenaAllocBlockList)), 282 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 283 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 284 entry_block_(nullptr), 285 exit_block_(nullptr), 286 maximum_number_of_out_vregs_(0), 287 number_of_vregs_(0), 288 number_of_in_vregs_(0), 289 temporaries_vreg_slots_(0), 290 has_bounds_checks_(false), 291 has_try_catch_(false), 292 debuggable_(debuggable), 293 current_instruction_id_(start_instruction_id), 294 dex_file_(dex_file), 295 method_idx_(method_idx), 296 invoke_type_(invoke_type), 297 in_ssa_form_(false), 298 should_generate_constructor_barrier_(should_generate_constructor_barrier), 299 instruction_set_(instruction_set), 300 cached_null_constant_(nullptr), 301 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 302 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 303 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 304 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 305 cached_current_method_(nullptr), 306 inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()) { 307 blocks_.reserve(kDefaultNumberOfBlocks); 308 } 309 310 ArenaAllocator* GetArena() const { return arena_; } 311 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 312 313 bool IsInSsaForm() const { return in_ssa_form_; } 314 315 HBasicBlock* GetEntryBlock() const { return entry_block_; } 316 HBasicBlock* GetExitBlock() const { return exit_block_; } 317 bool HasExitBlock() const { return exit_block_ != nullptr; } 318 319 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 320 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 321 322 void AddBlock(HBasicBlock* block); 323 324 // Try building the SSA form of this graph, with dominance computation and 325 // loop recognition. Returns a code specifying that it was successful or the 326 // reason for failure. 327 BuildSsaResult TryBuildingSsa(StackHandleScopeCollection* handles); 328 329 void ComputeDominanceInformation(); 330 void ClearDominanceInformation(); 331 332 void BuildDominatorTree(); 333 void SimplifyCFG(); 334 void SimplifyCatchBlocks(); 335 336 // Analyze all natural loops in this graph. Returns a code specifying that it 337 // was successful or the reason for failure. The method will fail if a loop 338 // is not natural, that is the header does not dominate a back edge, or if it 339 // is a throw-catch loop, i.e. the header is a catch block. 340 BuildSsaResult AnalyzeNaturalLoops() const; 341 342 // Iterate over blocks to compute try block membership. Needs reverse post 343 // order and loop information. 344 void ComputeTryBlockInformation(); 345 346 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 347 // Returns the instruction used to replace the invoke expression or null if the 348 // invoke is for a void method. 349 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 350 351 // Need to add a couple of blocks to test if the loop body is entered and 352 // put deoptimization instructions, etc. 353 void TransformLoopHeaderForBCE(HBasicBlock* header); 354 355 // Removes `block` from the graph. Assumes `block` has been disconnected from 356 // other blocks and has no instructions or phis. 357 void DeleteDeadEmptyBlock(HBasicBlock* block); 358 359 // Splits the edge between `block` and `successor` while preserving the 360 // indices in the predecessor/successor lists. If there are multiple edges 361 // between the blocks, the lowest indices are used. 362 // Returns the new block which is empty and has the same dex pc as `successor`. 363 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 364 365 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 366 void SimplifyLoop(HBasicBlock* header); 367 368 int32_t GetNextInstructionId() { 369 DCHECK_NE(current_instruction_id_, INT32_MAX); 370 return current_instruction_id_++; 371 } 372 373 int32_t GetCurrentInstructionId() const { 374 return current_instruction_id_; 375 } 376 377 void SetCurrentInstructionId(int32_t id) { 378 current_instruction_id_ = id; 379 } 380 381 uint16_t GetMaximumNumberOfOutVRegs() const { 382 return maximum_number_of_out_vregs_; 383 } 384 385 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 386 maximum_number_of_out_vregs_ = new_value; 387 } 388 389 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 390 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 391 } 392 393 void UpdateTemporariesVRegSlots(size_t slots) { 394 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 395 } 396 397 size_t GetTemporariesVRegSlots() const { 398 DCHECK(!in_ssa_form_); 399 return temporaries_vreg_slots_; 400 } 401 402 void SetNumberOfVRegs(uint16_t number_of_vregs) { 403 number_of_vregs_ = number_of_vregs; 404 } 405 406 uint16_t GetNumberOfVRegs() const { 407 return number_of_vregs_; 408 } 409 410 void SetNumberOfInVRegs(uint16_t value) { 411 number_of_in_vregs_ = value; 412 } 413 414 uint16_t GetNumberOfLocalVRegs() const { 415 DCHECK(!in_ssa_form_); 416 return number_of_vregs_ - number_of_in_vregs_; 417 } 418 419 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 420 return reverse_post_order_; 421 } 422 423 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 424 return linear_order_; 425 } 426 427 bool HasBoundsChecks() const { 428 return has_bounds_checks_; 429 } 430 431 void SetHasBoundsChecks(bool value) { 432 has_bounds_checks_ = value; 433 } 434 435 bool ShouldGenerateConstructorBarrier() const { 436 return should_generate_constructor_barrier_; 437 } 438 439 bool IsDebuggable() const { return debuggable_; } 440 441 // Returns a constant of the given type and value. If it does not exist 442 // already, it is created and inserted into the graph. This method is only for 443 // integral types. 444 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 445 446 // TODO: This is problematic for the consistency of reference type propagation 447 // because it can be created anytime after the pass and thus it will be left 448 // with an invalid type. 449 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 450 451 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 452 return CreateConstant(value, &cached_int_constants_, dex_pc); 453 } 454 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 455 return CreateConstant(value, &cached_long_constants_, dex_pc); 456 } 457 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 458 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 459 } 460 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 461 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 462 } 463 464 HCurrentMethod* GetCurrentMethod(); 465 466 const DexFile& GetDexFile() const { 467 return dex_file_; 468 } 469 470 uint32_t GetMethodIdx() const { 471 return method_idx_; 472 } 473 474 InvokeType GetInvokeType() const { 475 return invoke_type_; 476 } 477 478 InstructionSet GetInstructionSet() const { 479 return instruction_set_; 480 } 481 482 bool HasTryCatch() const { return has_try_catch_; } 483 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 484 485 ArtMethod* GetArtMethod() const { return art_method_; } 486 void SetArtMethod(ArtMethod* method) { art_method_ = method; } 487 488 // Returns an instruction with the opposite boolean value from 'cond'. 489 // The instruction has been inserted into the graph, either as a constant, or 490 // before cursor. 491 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor); 492 493 private: 494 void FindBackEdges(ArenaBitVector* visited); 495 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 496 void RemoveDeadBlocks(const ArenaBitVector& visited); 497 498 template <class InstructionType, typename ValueType> 499 InstructionType* CreateConstant(ValueType value, 500 ArenaSafeMap<ValueType, InstructionType*>* cache, 501 uint32_t dex_pc = kNoDexPc) { 502 // Try to find an existing constant of the given value. 503 InstructionType* constant = nullptr; 504 auto cached_constant = cache->find(value); 505 if (cached_constant != cache->end()) { 506 constant = cached_constant->second; 507 } 508 509 // If not found or previously deleted, create and cache a new instruction. 510 // Don't bother reviving a previously deleted instruction, for simplicity. 511 if (constant == nullptr || constant->GetBlock() == nullptr) { 512 constant = new (arena_) InstructionType(value, dex_pc); 513 cache->Overwrite(value, constant); 514 InsertConstant(constant); 515 } 516 return constant; 517 } 518 519 void InsertConstant(HConstant* instruction); 520 521 // Cache a float constant into the graph. This method should only be 522 // called by the SsaBuilder when creating "equivalent" instructions. 523 void CacheFloatConstant(HFloatConstant* constant); 524 525 // See CacheFloatConstant comment. 526 void CacheDoubleConstant(HDoubleConstant* constant); 527 528 ArenaAllocator* const arena_; 529 530 // List of blocks in insertion order. 531 ArenaVector<HBasicBlock*> blocks_; 532 533 // List of blocks to perform a reverse post order tree traversal. 534 ArenaVector<HBasicBlock*> reverse_post_order_; 535 536 // List of blocks to perform a linear order tree traversal. 537 ArenaVector<HBasicBlock*> linear_order_; 538 539 HBasicBlock* entry_block_; 540 HBasicBlock* exit_block_; 541 542 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 543 uint16_t maximum_number_of_out_vregs_; 544 545 // The number of virtual registers in this method. Contains the parameters. 546 uint16_t number_of_vregs_; 547 548 // The number of virtual registers used by parameters of this method. 549 uint16_t number_of_in_vregs_; 550 551 // Number of vreg size slots that the temporaries use (used in baseline compiler). 552 size_t temporaries_vreg_slots_; 553 554 // Has bounds checks. We can totally skip BCE if it's false. 555 bool has_bounds_checks_; 556 557 // Flag whether there are any try/catch blocks in the graph. We will skip 558 // try/catch-related passes if false. 559 bool has_try_catch_; 560 561 // Indicates whether the graph should be compiled in a way that 562 // ensures full debuggability. If false, we can apply more 563 // aggressive optimizations that may limit the level of debugging. 564 const bool debuggable_; 565 566 // The current id to assign to a newly added instruction. See HInstruction.id_. 567 int32_t current_instruction_id_; 568 569 // The dex file from which the method is from. 570 const DexFile& dex_file_; 571 572 // The method index in the dex file. 573 const uint32_t method_idx_; 574 575 // If inlined, this encodes how the callee is being invoked. 576 const InvokeType invoke_type_; 577 578 // Whether the graph has been transformed to SSA form. Only used 579 // in debug mode to ensure we are not using properties only valid 580 // for non-SSA form (like the number of temporaries). 581 bool in_ssa_form_; 582 583 const bool should_generate_constructor_barrier_; 584 585 const InstructionSet instruction_set_; 586 587 // Cached constants. 588 HNullConstant* cached_null_constant_; 589 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 590 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 591 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 592 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 593 594 HCurrentMethod* cached_current_method_; 595 596 // The ArtMethod this graph is for. Note that for AOT, it may be null, 597 // for example for methods whose declaring class could not be resolved 598 // (such as when the superclass could not be found). 599 ArtMethod* art_method_; 600 601 // Keep the RTI of inexact Object to avoid having to pass stack handle 602 // collection pointer to passes which may create NullConstant. 603 ReferenceTypeInfo inexact_object_rti_; 604 605 friend class SsaBuilder; // For caching constants. 606 friend class SsaLivenessAnalysis; // For the linear order. 607 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 608 DISALLOW_COPY_AND_ASSIGN(HGraph); 609}; 610 611class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 612 public: 613 HLoopInformation(HBasicBlock* header, HGraph* graph) 614 : header_(header), 615 suspend_check_(nullptr), 616 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 617 // Make bit vector growable, as the number of blocks may change. 618 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 619 back_edges_.reserve(kDefaultNumberOfBackEdges); 620 } 621 622 HBasicBlock* GetHeader() const { 623 return header_; 624 } 625 626 void SetHeader(HBasicBlock* block) { 627 header_ = block; 628 } 629 630 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 631 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 632 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 633 634 void AddBackEdge(HBasicBlock* back_edge) { 635 back_edges_.push_back(back_edge); 636 } 637 638 void RemoveBackEdge(HBasicBlock* back_edge) { 639 RemoveElement(back_edges_, back_edge); 640 } 641 642 bool IsBackEdge(const HBasicBlock& block) const { 643 return ContainsElement(back_edges_, &block); 644 } 645 646 size_t NumberOfBackEdges() const { 647 return back_edges_.size(); 648 } 649 650 HBasicBlock* GetPreHeader() const; 651 652 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 653 return back_edges_; 654 } 655 656 // Returns the lifetime position of the back edge that has the 657 // greatest lifetime position. 658 size_t GetLifetimeEnd() const; 659 660 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 661 ReplaceElement(back_edges_, existing, new_back_edge); 662 } 663 664 // Finds blocks that are part of this loop. Returns whether the loop is a natural loop, 665 // that is the header dominates the back edge. 666 bool Populate(); 667 668 // Reanalyzes the loop by removing loop info from its blocks and re-running 669 // Populate(). If there are no back edges left, the loop info is completely 670 // removed as well as its SuspendCheck instruction. It must be run on nested 671 // inner loops first. 672 void Update(); 673 674 // Returns whether this loop information contains `block`. 675 // Note that this loop information *must* be populated before entering this function. 676 bool Contains(const HBasicBlock& block) const; 677 678 // Returns whether this loop information is an inner loop of `other`. 679 // Note that `other` *must* be populated before entering this function. 680 bool IsIn(const HLoopInformation& other) const; 681 682 // Returns true if instruction is not defined within this loop. 683 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const; 684 685 const ArenaBitVector& GetBlocks() const { return blocks_; } 686 687 void Add(HBasicBlock* block); 688 void Remove(HBasicBlock* block); 689 690 private: 691 // Internal recursive implementation of `Populate`. 692 void PopulateRecursive(HBasicBlock* block); 693 694 HBasicBlock* header_; 695 HSuspendCheck* suspend_check_; 696 ArenaVector<HBasicBlock*> back_edges_; 697 ArenaBitVector blocks_; 698 699 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 700}; 701 702// Stores try/catch information for basic blocks. 703// Note that HGraph is constructed so that catch blocks cannot simultaneously 704// be try blocks. 705class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 706 public: 707 // Try block information constructor. 708 explicit TryCatchInformation(const HTryBoundary& try_entry) 709 : try_entry_(&try_entry), 710 catch_dex_file_(nullptr), 711 catch_type_index_(DexFile::kDexNoIndex16) { 712 DCHECK(try_entry_ != nullptr); 713 } 714 715 // Catch block information constructor. 716 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 717 : try_entry_(nullptr), 718 catch_dex_file_(&dex_file), 719 catch_type_index_(catch_type_index) {} 720 721 bool IsTryBlock() const { return try_entry_ != nullptr; } 722 723 const HTryBoundary& GetTryEntry() const { 724 DCHECK(IsTryBlock()); 725 return *try_entry_; 726 } 727 728 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 729 730 bool IsCatchAllTypeIndex() const { 731 DCHECK(IsCatchBlock()); 732 return catch_type_index_ == DexFile::kDexNoIndex16; 733 } 734 735 uint16_t GetCatchTypeIndex() const { 736 DCHECK(IsCatchBlock()); 737 return catch_type_index_; 738 } 739 740 const DexFile& GetCatchDexFile() const { 741 DCHECK(IsCatchBlock()); 742 return *catch_dex_file_; 743 } 744 745 private: 746 // One of possibly several TryBoundary instructions entering the block's try. 747 // Only set for try blocks. 748 const HTryBoundary* try_entry_; 749 750 // Exception type information. Only set for catch blocks. 751 const DexFile* catch_dex_file_; 752 const uint16_t catch_type_index_; 753}; 754 755static constexpr size_t kNoLifetime = -1; 756static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 757 758// A block in a method. Contains the list of instructions represented 759// as a double linked list. Each block knows its predecessors and 760// successors. 761 762class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 763 public: 764 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 765 : graph_(graph), 766 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 767 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 768 loop_information_(nullptr), 769 dominator_(nullptr), 770 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 771 block_id_(kInvalidBlockId), 772 dex_pc_(dex_pc), 773 lifetime_start_(kNoLifetime), 774 lifetime_end_(kNoLifetime), 775 try_catch_information_(nullptr) { 776 predecessors_.reserve(kDefaultNumberOfPredecessors); 777 successors_.reserve(kDefaultNumberOfSuccessors); 778 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 779 } 780 781 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 782 return predecessors_; 783 } 784 785 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 786 return successors_; 787 } 788 789 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const; 790 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const; 791 792 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 793 return ContainsElement(successors_, block, start_from); 794 } 795 796 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 797 return dominated_blocks_; 798 } 799 800 bool IsEntryBlock() const { 801 return graph_->GetEntryBlock() == this; 802 } 803 804 bool IsExitBlock() const { 805 return graph_->GetExitBlock() == this; 806 } 807 808 bool IsSingleGoto() const; 809 bool IsSingleTryBoundary() const; 810 811 // Returns true if this block emits nothing but a jump. 812 bool IsSingleJump() const { 813 HLoopInformation* loop_info = GetLoopInformation(); 814 return (IsSingleGoto() || IsSingleTryBoundary()) 815 // Back edges generate a suspend check. 816 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 817 } 818 819 void AddBackEdge(HBasicBlock* back_edge) { 820 if (loop_information_ == nullptr) { 821 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 822 } 823 DCHECK_EQ(loop_information_->GetHeader(), this); 824 loop_information_->AddBackEdge(back_edge); 825 } 826 827 HGraph* GetGraph() const { return graph_; } 828 void SetGraph(HGraph* graph) { graph_ = graph; } 829 830 uint32_t GetBlockId() const { return block_id_; } 831 void SetBlockId(int id) { block_id_ = id; } 832 uint32_t GetDexPc() const { return dex_pc_; } 833 834 HBasicBlock* GetDominator() const { return dominator_; } 835 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 836 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 837 838 void RemoveDominatedBlock(HBasicBlock* block) { 839 RemoveElement(dominated_blocks_, block); 840 } 841 842 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 843 ReplaceElement(dominated_blocks_, existing, new_block); 844 } 845 846 void ClearDominanceInformation(); 847 848 int NumberOfBackEdges() const { 849 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 850 } 851 852 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 853 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 854 const HInstructionList& GetInstructions() const { return instructions_; } 855 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 856 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 857 const HInstructionList& GetPhis() const { return phis_; } 858 859 void AddSuccessor(HBasicBlock* block) { 860 successors_.push_back(block); 861 block->predecessors_.push_back(this); 862 } 863 864 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 865 size_t successor_index = GetSuccessorIndexOf(existing); 866 existing->RemovePredecessor(this); 867 new_block->predecessors_.push_back(this); 868 successors_[successor_index] = new_block; 869 } 870 871 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 872 size_t predecessor_index = GetPredecessorIndexOf(existing); 873 existing->RemoveSuccessor(this); 874 new_block->successors_.push_back(this); 875 predecessors_[predecessor_index] = new_block; 876 } 877 878 // Insert `this` between `predecessor` and `successor. This method 879 // preserves the indicies, and will update the first edge found between 880 // `predecessor` and `successor`. 881 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 882 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 883 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 884 successor->predecessors_[predecessor_index] = this; 885 predecessor->successors_[successor_index] = this; 886 successors_.push_back(successor); 887 predecessors_.push_back(predecessor); 888 } 889 890 void RemovePredecessor(HBasicBlock* block) { 891 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 892 } 893 894 void RemoveSuccessor(HBasicBlock* block) { 895 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 896 } 897 898 void ClearAllPredecessors() { 899 predecessors_.clear(); 900 } 901 902 void AddPredecessor(HBasicBlock* block) { 903 predecessors_.push_back(block); 904 block->successors_.push_back(this); 905 } 906 907 void SwapPredecessors() { 908 DCHECK_EQ(predecessors_.size(), 2u); 909 std::swap(predecessors_[0], predecessors_[1]); 910 } 911 912 void SwapSuccessors() { 913 DCHECK_EQ(successors_.size(), 2u); 914 std::swap(successors_[0], successors_[1]); 915 } 916 917 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 918 return IndexOfElement(predecessors_, predecessor); 919 } 920 921 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 922 return IndexOfElement(successors_, successor); 923 } 924 925 HBasicBlock* GetSinglePredecessor() const { 926 DCHECK_EQ(GetPredecessors().size(), 1u); 927 return GetPredecessors()[0]; 928 } 929 930 HBasicBlock* GetSingleSuccessor() const { 931 DCHECK_EQ(GetSuccessors().size(), 1u); 932 return GetSuccessors()[0]; 933 } 934 935 // Returns whether the first occurrence of `predecessor` in the list of 936 // predecessors is at index `idx`. 937 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 938 DCHECK_EQ(GetPredecessors()[idx], predecessor); 939 return GetPredecessorIndexOf(predecessor) == idx; 940 } 941 942 // Create a new block between this block and its predecessors. The new block 943 // is added to the graph, all predecessor edges are relinked to it and an edge 944 // is created to `this`. Returns the new empty block. Reverse post order or 945 // loop and try/catch information are not updated. 946 HBasicBlock* CreateImmediateDominator(); 947 948 // Split the block into two blocks just before `cursor`. Returns the newly 949 // created, latter block. Note that this method will add the block to the 950 // graph, create a Goto at the end of the former block and will create an edge 951 // between the blocks. It will not, however, update the reverse post order or 952 // loop and try/catch information. 953 HBasicBlock* SplitBefore(HInstruction* cursor); 954 955 // Split the block into two blocks just after `cursor`. Returns the newly 956 // created block. Note that this method just updates raw block information, 957 // like predecessors, successors, dominators, and instruction list. It does not 958 // update the graph, reverse post order, loop information, nor make sure the 959 // blocks are consistent (for example ending with a control flow instruction). 960 HBasicBlock* SplitAfter(HInstruction* cursor); 961 962 // Split catch block into two blocks after the original move-exception bytecode 963 // instruction, or at the beginning if not present. Returns the newly created, 964 // latter block, or nullptr if such block could not be created (must be dead 965 // in that case). Note that this method just updates raw block information, 966 // like predecessors, successors, dominators, and instruction list. It does not 967 // update the graph, reverse post order, loop information, nor make sure the 968 // blocks are consistent (for example ending with a control flow instruction). 969 HBasicBlock* SplitCatchBlockAfterMoveException(); 970 971 // Merge `other` at the end of `this`. Successors and dominated blocks of 972 // `other` are changed to be successors and dominated blocks of `this`. Note 973 // that this method does not update the graph, reverse post order, loop 974 // information, nor make sure the blocks are consistent (for example ending 975 // with a control flow instruction). 976 void MergeWithInlined(HBasicBlock* other); 977 978 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 979 // of `this` are moved to `other`. 980 // Note that this method does not update the graph, reverse post order, loop 981 // information, nor make sure the blocks are consistent (for example ending 982 // with a control flow instruction). 983 void ReplaceWith(HBasicBlock* other); 984 985 // Merge `other` at the end of `this`. This method updates loops, reverse post 986 // order, links to predecessors, successors, dominators and deletes the block 987 // from the graph. The two blocks must be successive, i.e. `this` the only 988 // predecessor of `other` and vice versa. 989 void MergeWith(HBasicBlock* other); 990 991 // Disconnects `this` from all its predecessors, successors and dominator, 992 // removes it from all loops it is included in and eventually from the graph. 993 // The block must not dominate any other block. Predecessors and successors 994 // are safely updated. 995 void DisconnectAndDelete(); 996 997 void AddInstruction(HInstruction* instruction); 998 // Insert `instruction` before/after an existing instruction `cursor`. 999 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 1000 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 1001 // Replace instruction `initial` with `replacement` within this block. 1002 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 1003 HInstruction* replacement); 1004 void AddPhi(HPhi* phi); 1005 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 1006 // RemoveInstruction and RemovePhi delete a given instruction from the respective 1007 // instruction list. With 'ensure_safety' set to true, it verifies that the 1008 // instruction is not in use and removes it from the use lists of its inputs. 1009 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 1010 void RemovePhi(HPhi* phi, bool ensure_safety = true); 1011 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 1012 1013 bool IsLoopHeader() const { 1014 return IsInLoop() && (loop_information_->GetHeader() == this); 1015 } 1016 1017 bool IsLoopPreHeaderFirstPredecessor() const { 1018 DCHECK(IsLoopHeader()); 1019 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 1020 } 1021 1022 HLoopInformation* GetLoopInformation() const { 1023 return loop_information_; 1024 } 1025 1026 // Set the loop_information_ on this block. Overrides the current 1027 // loop_information if it is an outer loop of the passed loop information. 1028 // Note that this method is called while creating the loop information. 1029 void SetInLoop(HLoopInformation* info) { 1030 if (IsLoopHeader()) { 1031 // Nothing to do. This just means `info` is an outer loop. 1032 } else if (!IsInLoop()) { 1033 loop_information_ = info; 1034 } else if (loop_information_->Contains(*info->GetHeader())) { 1035 // Block is currently part of an outer loop. Make it part of this inner loop. 1036 // Note that a non loop header having a loop information means this loop information 1037 // has already been populated 1038 loop_information_ = info; 1039 } else { 1040 // Block is part of an inner loop. Do not update the loop information. 1041 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 1042 // at this point, because this method is being called while populating `info`. 1043 } 1044 } 1045 1046 // Raw update of the loop information. 1047 void SetLoopInformation(HLoopInformation* info) { 1048 loop_information_ = info; 1049 } 1050 1051 bool IsInLoop() const { return loop_information_ != nullptr; } 1052 1053 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 1054 1055 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 1056 try_catch_information_ = try_catch_information; 1057 } 1058 1059 bool IsTryBlock() const { 1060 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 1061 } 1062 1063 bool IsCatchBlock() const { 1064 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 1065 } 1066 1067 // Returns the try entry that this block's successors should have. They will 1068 // be in the same try, unless the block ends in a try boundary. In that case, 1069 // the appropriate try entry will be returned. 1070 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 1071 1072 bool HasThrowingInstructions() const; 1073 1074 // Returns whether this block dominates the blocked passed as parameter. 1075 bool Dominates(HBasicBlock* block) const; 1076 1077 size_t GetLifetimeStart() const { return lifetime_start_; } 1078 size_t GetLifetimeEnd() const { return lifetime_end_; } 1079 1080 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 1081 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 1082 1083 bool EndsWithControlFlowInstruction() const; 1084 bool EndsWithIf() const; 1085 bool EndsWithTryBoundary() const; 1086 bool HasSinglePhi() const; 1087 1088 private: 1089 HGraph* graph_; 1090 ArenaVector<HBasicBlock*> predecessors_; 1091 ArenaVector<HBasicBlock*> successors_; 1092 HInstructionList instructions_; 1093 HInstructionList phis_; 1094 HLoopInformation* loop_information_; 1095 HBasicBlock* dominator_; 1096 ArenaVector<HBasicBlock*> dominated_blocks_; 1097 uint32_t block_id_; 1098 // The dex program counter of the first instruction of this block. 1099 const uint32_t dex_pc_; 1100 size_t lifetime_start_; 1101 size_t lifetime_end_; 1102 TryCatchInformation* try_catch_information_; 1103 1104 friend class HGraph; 1105 friend class HInstruction; 1106 1107 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 1108}; 1109 1110// Iterates over the LoopInformation of all loops which contain 'block' 1111// from the innermost to the outermost. 1112class HLoopInformationOutwardIterator : public ValueObject { 1113 public: 1114 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 1115 : current_(block.GetLoopInformation()) {} 1116 1117 bool Done() const { return current_ == nullptr; } 1118 1119 void Advance() { 1120 DCHECK(!Done()); 1121 current_ = current_->GetPreHeader()->GetLoopInformation(); 1122 } 1123 1124 HLoopInformation* Current() const { 1125 DCHECK(!Done()); 1126 return current_; 1127 } 1128 1129 private: 1130 HLoopInformation* current_; 1131 1132 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1133}; 1134 1135#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1136 M(Above, Condition) \ 1137 M(AboveOrEqual, Condition) \ 1138 M(Add, BinaryOperation) \ 1139 M(And, BinaryOperation) \ 1140 M(ArrayGet, Instruction) \ 1141 M(ArrayLength, Instruction) \ 1142 M(ArraySet, Instruction) \ 1143 M(Below, Condition) \ 1144 M(BelowOrEqual, Condition) \ 1145 M(BooleanNot, UnaryOperation) \ 1146 M(BoundsCheck, Instruction) \ 1147 M(BoundType, Instruction) \ 1148 M(CheckCast, Instruction) \ 1149 M(ClearException, Instruction) \ 1150 M(ClinitCheck, Instruction) \ 1151 M(Compare, BinaryOperation) \ 1152 M(CurrentMethod, Instruction) \ 1153 M(Deoptimize, Instruction) \ 1154 M(Div, BinaryOperation) \ 1155 M(DivZeroCheck, Instruction) \ 1156 M(DoubleConstant, Constant) \ 1157 M(Equal, Condition) \ 1158 M(Exit, Instruction) \ 1159 M(FakeString, Instruction) \ 1160 M(FloatConstant, Constant) \ 1161 M(Goto, Instruction) \ 1162 M(GreaterThan, Condition) \ 1163 M(GreaterThanOrEqual, Condition) \ 1164 M(If, Instruction) \ 1165 M(InstanceFieldGet, Instruction) \ 1166 M(InstanceFieldSet, Instruction) \ 1167 M(InstanceOf, Instruction) \ 1168 M(IntConstant, Constant) \ 1169 M(InvokeUnresolved, Invoke) \ 1170 M(InvokeInterface, Invoke) \ 1171 M(InvokeStaticOrDirect, Invoke) \ 1172 M(InvokeVirtual, Invoke) \ 1173 M(LessThan, Condition) \ 1174 M(LessThanOrEqual, Condition) \ 1175 M(LoadClass, Instruction) \ 1176 M(LoadException, Instruction) \ 1177 M(LoadLocal, Instruction) \ 1178 M(LoadString, Instruction) \ 1179 M(Local, Instruction) \ 1180 M(LongConstant, Constant) \ 1181 M(MemoryBarrier, Instruction) \ 1182 M(MonitorOperation, Instruction) \ 1183 M(Mul, BinaryOperation) \ 1184 M(NativeDebugInfo, Instruction) \ 1185 M(Neg, UnaryOperation) \ 1186 M(NewArray, Instruction) \ 1187 M(NewInstance, Instruction) \ 1188 M(Not, UnaryOperation) \ 1189 M(NotEqual, Condition) \ 1190 M(NullConstant, Instruction) \ 1191 M(NullCheck, Instruction) \ 1192 M(Or, BinaryOperation) \ 1193 M(PackedSwitch, Instruction) \ 1194 M(ParallelMove, Instruction) \ 1195 M(ParameterValue, Instruction) \ 1196 M(Phi, Instruction) \ 1197 M(Rem, BinaryOperation) \ 1198 M(Return, Instruction) \ 1199 M(ReturnVoid, Instruction) \ 1200 M(Ror, BinaryOperation) \ 1201 M(Shl, BinaryOperation) \ 1202 M(Shr, BinaryOperation) \ 1203 M(StaticFieldGet, Instruction) \ 1204 M(StaticFieldSet, Instruction) \ 1205 M(UnresolvedInstanceFieldGet, Instruction) \ 1206 M(UnresolvedInstanceFieldSet, Instruction) \ 1207 M(UnresolvedStaticFieldGet, Instruction) \ 1208 M(UnresolvedStaticFieldSet, Instruction) \ 1209 M(StoreLocal, Instruction) \ 1210 M(Sub, BinaryOperation) \ 1211 M(SuspendCheck, Instruction) \ 1212 M(Temporary, Instruction) \ 1213 M(Throw, Instruction) \ 1214 M(TryBoundary, Instruction) \ 1215 M(TypeConversion, Instruction) \ 1216 M(UShr, BinaryOperation) \ 1217 M(Xor, BinaryOperation) \ 1218 1219#ifndef ART_ENABLE_CODEGEN_arm 1220#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1221#else 1222#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1223 M(ArmDexCacheArraysBase, Instruction) 1224#endif 1225 1226#ifndef ART_ENABLE_CODEGEN_arm64 1227#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1228#else 1229#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1230 M(Arm64DataProcWithShifterOp, Instruction) \ 1231 M(Arm64IntermediateAddress, Instruction) \ 1232 M(Arm64MultiplyAccumulate, Instruction) 1233#endif 1234 1235#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1236 1237#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1238 1239#ifndef ART_ENABLE_CODEGEN_x86 1240#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1241#else 1242#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1243 M(X86ComputeBaseMethodAddress, Instruction) \ 1244 M(X86LoadFromConstantTable, Instruction) \ 1245 M(X86PackedSwitch, Instruction) 1246#endif 1247 1248#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1249 1250#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1251 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1252 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1253 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1254 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1255 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1256 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1257 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1258 1259#define FOR_EACH_ABSTRACT_INSTRUCTION(M) \ 1260 M(Condition, BinaryOperation) \ 1261 M(Constant, Instruction) \ 1262 M(UnaryOperation, Instruction) \ 1263 M(BinaryOperation, Instruction) \ 1264 M(Invoke, Instruction) 1265 1266#define FOR_EACH_INSTRUCTION(M) \ 1267 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1268 FOR_EACH_ABSTRACT_INSTRUCTION(M) 1269 1270#define FORWARD_DECLARATION(type, super) class H##type; 1271FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1272#undef FORWARD_DECLARATION 1273 1274#define DECLARE_INSTRUCTION(type) \ 1275 InstructionKind GetKindInternal() const OVERRIDE { return k##type; } \ 1276 const char* DebugName() const OVERRIDE { return #type; } \ 1277 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1278 return other->Is##type(); \ 1279 } \ 1280 void Accept(HGraphVisitor* visitor) OVERRIDE 1281 1282#define DECLARE_ABSTRACT_INSTRUCTION(type) \ 1283 bool Is##type() const { return As##type() != nullptr; } \ 1284 const H##type* As##type() const { return this; } \ 1285 H##type* As##type() { return this; } 1286 1287template <typename T> class HUseList; 1288 1289template <typename T> 1290class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1291 public: 1292 HUseListNode* GetPrevious() const { return prev_; } 1293 HUseListNode* GetNext() const { return next_; } 1294 T GetUser() const { return user_; } 1295 size_t GetIndex() const { return index_; } 1296 void SetIndex(size_t index) { index_ = index; } 1297 1298 private: 1299 HUseListNode(T user, size_t index) 1300 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1301 1302 T const user_; 1303 size_t index_; 1304 HUseListNode<T>* prev_; 1305 HUseListNode<T>* next_; 1306 1307 friend class HUseList<T>; 1308 1309 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1310}; 1311 1312template <typename T> 1313class HUseList : public ValueObject { 1314 public: 1315 HUseList() : first_(nullptr) {} 1316 1317 void Clear() { 1318 first_ = nullptr; 1319 } 1320 1321 // Adds a new entry at the beginning of the use list and returns 1322 // the newly created node. 1323 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1324 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1325 if (IsEmpty()) { 1326 first_ = new_node; 1327 } else { 1328 first_->prev_ = new_node; 1329 new_node->next_ = first_; 1330 first_ = new_node; 1331 } 1332 return new_node; 1333 } 1334 1335 HUseListNode<T>* GetFirst() const { 1336 return first_; 1337 } 1338 1339 void Remove(HUseListNode<T>* node) { 1340 DCHECK(node != nullptr); 1341 DCHECK(Contains(node)); 1342 1343 if (node->prev_ != nullptr) { 1344 node->prev_->next_ = node->next_; 1345 } 1346 if (node->next_ != nullptr) { 1347 node->next_->prev_ = node->prev_; 1348 } 1349 if (node == first_) { 1350 first_ = node->next_; 1351 } 1352 } 1353 1354 bool Contains(const HUseListNode<T>* node) const { 1355 if (node == nullptr) { 1356 return false; 1357 } 1358 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1359 if (current == node) { 1360 return true; 1361 } 1362 } 1363 return false; 1364 } 1365 1366 bool IsEmpty() const { 1367 return first_ == nullptr; 1368 } 1369 1370 bool HasOnlyOneUse() const { 1371 return first_ != nullptr && first_->next_ == nullptr; 1372 } 1373 1374 size_t SizeSlow() const { 1375 size_t count = 0; 1376 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1377 ++count; 1378 } 1379 return count; 1380 } 1381 1382 private: 1383 HUseListNode<T>* first_; 1384}; 1385 1386template<typename T> 1387class HUseIterator : public ValueObject { 1388 public: 1389 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1390 1391 bool Done() const { return current_ == nullptr; } 1392 1393 void Advance() { 1394 DCHECK(!Done()); 1395 current_ = current_->GetNext(); 1396 } 1397 1398 HUseListNode<T>* Current() const { 1399 DCHECK(!Done()); 1400 return current_; 1401 } 1402 1403 private: 1404 HUseListNode<T>* current_; 1405 1406 friend class HValue; 1407}; 1408 1409// This class is used by HEnvironment and HInstruction classes to record the 1410// instructions they use and pointers to the corresponding HUseListNodes kept 1411// by the used instructions. 1412template <typename T> 1413class HUserRecord : public ValueObject { 1414 public: 1415 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1416 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1417 1418 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1419 : instruction_(old_record.instruction_), use_node_(use_node) { 1420 DCHECK(instruction_ != nullptr); 1421 DCHECK(use_node_ != nullptr); 1422 DCHECK(old_record.use_node_ == nullptr); 1423 } 1424 1425 HInstruction* GetInstruction() const { return instruction_; } 1426 HUseListNode<T>* GetUseNode() const { return use_node_; } 1427 1428 private: 1429 // Instruction used by the user. 1430 HInstruction* instruction_; 1431 1432 // Corresponding entry in the use list kept by 'instruction_'. 1433 HUseListNode<T>* use_node_; 1434}; 1435 1436/** 1437 * Side-effects representation. 1438 * 1439 * For write/read dependences on fields/arrays, the dependence analysis uses 1440 * type disambiguation (e.g. a float field write cannot modify the value of an 1441 * integer field read) and the access type (e.g. a reference array write cannot 1442 * modify the value of a reference field read [although it may modify the 1443 * reference fetch prior to reading the field, which is represented by its own 1444 * write/read dependence]). The analysis makes conservative points-to 1445 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1446 * the same, and any reference read depends on any reference read without 1447 * further regard of its type). 1448 * 1449 * The internal representation uses 38-bit and is described in the table below. 1450 * The first line indicates the side effect, and for field/array accesses the 1451 * second line indicates the type of the access (in the order of the 1452 * Primitive::Type enum). 1453 * The two numbered lines below indicate the bit position in the bitfield (read 1454 * vertically). 1455 * 1456 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1457 * +-------------+---------+---------+--------------+---------+---------+ 1458 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1459 * | 3 |333333322|222222221| 1 |111111110|000000000| 1460 * | 7 |654321098|765432109| 8 |765432109|876543210| 1461 * 1462 * Note that, to ease the implementation, 'changes' bits are least significant 1463 * bits, while 'dependency' bits are most significant bits. 1464 */ 1465class SideEffects : public ValueObject { 1466 public: 1467 SideEffects() : flags_(0) {} 1468 1469 static SideEffects None() { 1470 return SideEffects(0); 1471 } 1472 1473 static SideEffects All() { 1474 return SideEffects(kAllChangeBits | kAllDependOnBits); 1475 } 1476 1477 static SideEffects AllChanges() { 1478 return SideEffects(kAllChangeBits); 1479 } 1480 1481 static SideEffects AllDependencies() { 1482 return SideEffects(kAllDependOnBits); 1483 } 1484 1485 static SideEffects AllExceptGCDependency() { 1486 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1487 } 1488 1489 static SideEffects AllWritesAndReads() { 1490 return SideEffects(kAllWrites | kAllReads); 1491 } 1492 1493 static SideEffects AllWrites() { 1494 return SideEffects(kAllWrites); 1495 } 1496 1497 static SideEffects AllReads() { 1498 return SideEffects(kAllReads); 1499 } 1500 1501 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1502 return is_volatile 1503 ? AllWritesAndReads() 1504 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1505 } 1506 1507 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1508 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1509 } 1510 1511 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1512 return is_volatile 1513 ? AllWritesAndReads() 1514 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1515 } 1516 1517 static SideEffects ArrayReadOfType(Primitive::Type type) { 1518 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1519 } 1520 1521 static SideEffects CanTriggerGC() { 1522 return SideEffects(1ULL << kCanTriggerGCBit); 1523 } 1524 1525 static SideEffects DependsOnGC() { 1526 return SideEffects(1ULL << kDependsOnGCBit); 1527 } 1528 1529 // Combines the side-effects of this and the other. 1530 SideEffects Union(SideEffects other) const { 1531 return SideEffects(flags_ | other.flags_); 1532 } 1533 1534 SideEffects Exclusion(SideEffects other) const { 1535 return SideEffects(flags_ & ~other.flags_); 1536 } 1537 1538 void Add(SideEffects other) { 1539 flags_ |= other.flags_; 1540 } 1541 1542 bool Includes(SideEffects other) const { 1543 return (other.flags_ & flags_) == other.flags_; 1544 } 1545 1546 bool HasSideEffects() const { 1547 return (flags_ & kAllChangeBits); 1548 } 1549 1550 bool HasDependencies() const { 1551 return (flags_ & kAllDependOnBits); 1552 } 1553 1554 // Returns true if there are no side effects or dependencies. 1555 bool DoesNothing() const { 1556 return flags_ == 0; 1557 } 1558 1559 // Returns true if something is written. 1560 bool DoesAnyWrite() const { 1561 return (flags_ & kAllWrites); 1562 } 1563 1564 // Returns true if something is read. 1565 bool DoesAnyRead() const { 1566 return (flags_ & kAllReads); 1567 } 1568 1569 // Returns true if potentially everything is written and read 1570 // (every type and every kind of access). 1571 bool DoesAllReadWrite() const { 1572 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1573 } 1574 1575 bool DoesAll() const { 1576 return flags_ == (kAllChangeBits | kAllDependOnBits); 1577 } 1578 1579 // Returns true if `this` may read something written by `other`. 1580 bool MayDependOn(SideEffects other) const { 1581 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1582 return (other.flags_ & depends_on_flags); 1583 } 1584 1585 // Returns string representation of flags (for debugging only). 1586 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1587 std::string ToString() const { 1588 std::string flags = "|"; 1589 for (int s = kLastBit; s >= 0; s--) { 1590 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1591 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1592 // This is a bit for the GC side effect. 1593 if (current_bit_is_set) { 1594 flags += "GC"; 1595 } 1596 flags += "|"; 1597 } else { 1598 // This is a bit for the array/field analysis. 1599 // The underscore character stands for the 'can trigger GC' bit. 1600 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1601 if (current_bit_is_set) { 1602 flags += kDebug[s]; 1603 } 1604 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1605 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1606 flags += "|"; 1607 } 1608 } 1609 } 1610 return flags; 1611 } 1612 1613 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1614 1615 private: 1616 static constexpr int kFieldArrayAnalysisBits = 9; 1617 1618 static constexpr int kFieldWriteOffset = 0; 1619 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1620 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1621 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1622 1623 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1624 1625 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1626 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1627 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1628 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1629 1630 static constexpr int kLastBit = kDependsOnGCBit; 1631 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1632 1633 // Aliases. 1634 1635 static_assert(kChangeBits == kDependOnBits, 1636 "the 'change' bits should match the 'depend on' bits."); 1637 1638 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1639 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1640 static constexpr uint64_t kAllWrites = 1641 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1642 static constexpr uint64_t kAllReads = 1643 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1644 1645 // Work around the fact that HIR aliases I/F and J/D. 1646 // TODO: remove this interceptor once HIR types are clean 1647 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1648 switch (type) { 1649 case Primitive::kPrimInt: 1650 case Primitive::kPrimFloat: 1651 return TypeFlag(Primitive::kPrimInt, offset) | 1652 TypeFlag(Primitive::kPrimFloat, offset); 1653 case Primitive::kPrimLong: 1654 case Primitive::kPrimDouble: 1655 return TypeFlag(Primitive::kPrimLong, offset) | 1656 TypeFlag(Primitive::kPrimDouble, offset); 1657 default: 1658 return TypeFlag(type, offset); 1659 } 1660 } 1661 1662 // Translates type to bit flag. 1663 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1664 CHECK_NE(type, Primitive::kPrimVoid); 1665 const uint64_t one = 1; 1666 const int shift = type; // 0-based consecutive enum 1667 DCHECK_LE(kFieldWriteOffset, shift); 1668 DCHECK_LT(shift, kArrayWriteOffset); 1669 return one << (type + offset); 1670 } 1671 1672 // Private constructor on direct flags value. 1673 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1674 1675 uint64_t flags_; 1676}; 1677 1678// A HEnvironment object contains the values of virtual registers at a given location. 1679class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1680 public: 1681 HEnvironment(ArenaAllocator* arena, 1682 size_t number_of_vregs, 1683 const DexFile& dex_file, 1684 uint32_t method_idx, 1685 uint32_t dex_pc, 1686 InvokeType invoke_type, 1687 HInstruction* holder) 1688 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1689 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1690 parent_(nullptr), 1691 dex_file_(dex_file), 1692 method_idx_(method_idx), 1693 dex_pc_(dex_pc), 1694 invoke_type_(invoke_type), 1695 holder_(holder) { 1696 } 1697 1698 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1699 : HEnvironment(arena, 1700 to_copy.Size(), 1701 to_copy.GetDexFile(), 1702 to_copy.GetMethodIdx(), 1703 to_copy.GetDexPc(), 1704 to_copy.GetInvokeType(), 1705 holder) {} 1706 1707 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1708 if (parent_ != nullptr) { 1709 parent_->SetAndCopyParentChain(allocator, parent); 1710 } else { 1711 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1712 parent_->CopyFrom(parent); 1713 if (parent->GetParent() != nullptr) { 1714 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1715 } 1716 } 1717 } 1718 1719 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1720 void CopyFrom(HEnvironment* environment); 1721 1722 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1723 // input to the loop phi instead. This is for inserting instructions that 1724 // require an environment (like HDeoptimization) in the loop pre-header. 1725 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1726 1727 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1728 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1729 } 1730 1731 HInstruction* GetInstructionAt(size_t index) const { 1732 return vregs_[index].GetInstruction(); 1733 } 1734 1735 void RemoveAsUserOfInput(size_t index) const; 1736 1737 size_t Size() const { return vregs_.size(); } 1738 1739 HEnvironment* GetParent() const { return parent_; } 1740 1741 void SetLocationAt(size_t index, Location location) { 1742 locations_[index] = location; 1743 } 1744 1745 Location GetLocationAt(size_t index) const { 1746 return locations_[index]; 1747 } 1748 1749 uint32_t GetDexPc() const { 1750 return dex_pc_; 1751 } 1752 1753 uint32_t GetMethodIdx() const { 1754 return method_idx_; 1755 } 1756 1757 InvokeType GetInvokeType() const { 1758 return invoke_type_; 1759 } 1760 1761 const DexFile& GetDexFile() const { 1762 return dex_file_; 1763 } 1764 1765 HInstruction* GetHolder() const { 1766 return holder_; 1767 } 1768 1769 1770 bool IsFromInlinedInvoke() const { 1771 return GetParent() != nullptr; 1772 } 1773 1774 private: 1775 // Record instructions' use entries of this environment for constant-time removal. 1776 // It should only be called by HInstruction when a new environment use is added. 1777 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1778 DCHECK(env_use->GetUser() == this); 1779 size_t index = env_use->GetIndex(); 1780 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1781 } 1782 1783 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1784 ArenaVector<Location> locations_; 1785 HEnvironment* parent_; 1786 const DexFile& dex_file_; 1787 const uint32_t method_idx_; 1788 const uint32_t dex_pc_; 1789 const InvokeType invoke_type_; 1790 1791 // The instruction that holds this environment. 1792 HInstruction* const holder_; 1793 1794 friend class HInstruction; 1795 1796 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1797}; 1798 1799class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1800 public: 1801 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1802 : previous_(nullptr), 1803 next_(nullptr), 1804 block_(nullptr), 1805 dex_pc_(dex_pc), 1806 id_(-1), 1807 ssa_index_(-1), 1808 environment_(nullptr), 1809 locations_(nullptr), 1810 live_interval_(nullptr), 1811 lifetime_position_(kNoLifetime), 1812 side_effects_(side_effects), 1813 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1814 1815 virtual ~HInstruction() {} 1816 1817#define DECLARE_KIND(type, super) k##type, 1818 enum InstructionKind { 1819 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1820 }; 1821#undef DECLARE_KIND 1822 1823 HInstruction* GetNext() const { return next_; } 1824 HInstruction* GetPrevious() const { return previous_; } 1825 1826 HInstruction* GetNextDisregardingMoves() const; 1827 HInstruction* GetPreviousDisregardingMoves() const; 1828 1829 HBasicBlock* GetBlock() const { return block_; } 1830 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1831 void SetBlock(HBasicBlock* block) { block_ = block; } 1832 bool IsInBlock() const { return block_ != nullptr; } 1833 bool IsInLoop() const { return block_->IsInLoop(); } 1834 bool IsLoopHeaderPhi() { return IsPhi() && block_->IsLoopHeader(); } 1835 1836 virtual size_t InputCount() const = 0; 1837 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1838 1839 virtual void Accept(HGraphVisitor* visitor) = 0; 1840 virtual const char* DebugName() const = 0; 1841 1842 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1843 void SetRawInputAt(size_t index, HInstruction* input) { 1844 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1845 } 1846 1847 virtual bool NeedsEnvironment() const { return false; } 1848 1849 uint32_t GetDexPc() const { return dex_pc_; } 1850 1851 virtual bool IsControlFlow() const { return false; } 1852 1853 virtual bool CanThrow() const { return false; } 1854 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1855 1856 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1857 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1858 1859 // Does not apply for all instructions, but having this at top level greatly 1860 // simplifies the null check elimination. 1861 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1862 virtual bool CanBeNull() const { 1863 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1864 return true; 1865 } 1866 1867 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1868 return false; 1869 } 1870 1871 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1872 1873 ReferenceTypeInfo GetReferenceTypeInfo() const { 1874 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1875 return reference_type_info_; 1876 } 1877 1878 void AddUseAt(HInstruction* user, size_t index) { 1879 DCHECK(user != nullptr); 1880 HUseListNode<HInstruction*>* use = 1881 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1882 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1883 } 1884 1885 void AddEnvUseAt(HEnvironment* user, size_t index) { 1886 DCHECK(user != nullptr); 1887 HUseListNode<HEnvironment*>* env_use = 1888 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1889 user->RecordEnvUse(env_use); 1890 } 1891 1892 void RemoveAsUserOfInput(size_t input) { 1893 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1894 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1895 } 1896 1897 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1898 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1899 1900 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1901 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1902 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1903 bool HasOnlyOneNonEnvironmentUse() const { 1904 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1905 } 1906 1907 // Does this instruction strictly dominate `other_instruction`? 1908 // Returns false if this instruction and `other_instruction` are the same. 1909 // Aborts if this instruction and `other_instruction` are both phis. 1910 bool StrictlyDominates(HInstruction* other_instruction) const; 1911 1912 int GetId() const { return id_; } 1913 void SetId(int id) { id_ = id; } 1914 1915 int GetSsaIndex() const { return ssa_index_; } 1916 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1917 bool HasSsaIndex() const { return ssa_index_ != -1; } 1918 1919 bool HasEnvironment() const { return environment_ != nullptr; } 1920 HEnvironment* GetEnvironment() const { return environment_; } 1921 // Set the `environment_` field. Raw because this method does not 1922 // update the uses lists. 1923 void SetRawEnvironment(HEnvironment* environment) { 1924 DCHECK(environment_ == nullptr); 1925 DCHECK_EQ(environment->GetHolder(), this); 1926 environment_ = environment; 1927 } 1928 1929 // Set the environment of this instruction, copying it from `environment`. While 1930 // copying, the uses lists are being updated. 1931 void CopyEnvironmentFrom(HEnvironment* environment) { 1932 DCHECK(environment_ == nullptr); 1933 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1934 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1935 environment_->CopyFrom(environment); 1936 if (environment->GetParent() != nullptr) { 1937 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1938 } 1939 } 1940 1941 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1942 HBasicBlock* block) { 1943 DCHECK(environment_ == nullptr); 1944 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1945 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1946 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1947 if (environment->GetParent() != nullptr) { 1948 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1949 } 1950 } 1951 1952 // Returns the number of entries in the environment. Typically, that is the 1953 // number of dex registers in a method. It could be more in case of inlining. 1954 size_t EnvironmentSize() const; 1955 1956 LocationSummary* GetLocations() const { return locations_; } 1957 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1958 1959 void ReplaceWith(HInstruction* instruction); 1960 void ReplaceInput(HInstruction* replacement, size_t index); 1961 1962 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1963 // uses of this instruction by `other` are *not* updated. 1964 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1965 ReplaceWith(other); 1966 other->ReplaceInput(this, use_index); 1967 } 1968 1969 // Move `this` instruction before `cursor`. 1970 void MoveBefore(HInstruction* cursor); 1971 1972 // Move `this` before its first user and out of any loops. If there is no 1973 // out-of-loop user that dominates all other users, move the instruction 1974 // to the end of the out-of-loop common dominator of the user's blocks. 1975 // 1976 // This can be used only on non-throwing instructions with no side effects that 1977 // have at least one use but no environment uses. 1978 void MoveBeforeFirstUserAndOutOfLoops(); 1979 1980#define INSTRUCTION_TYPE_CHECK(type, super) \ 1981 bool Is##type() const; \ 1982 const H##type* As##type() const; \ 1983 H##type* As##type(); 1984 1985 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1986#undef INSTRUCTION_TYPE_CHECK 1987 1988#define INSTRUCTION_TYPE_CHECK(type, super) \ 1989 bool Is##type() const { return (As##type() != nullptr); } \ 1990 virtual const H##type* As##type() const { return nullptr; } \ 1991 virtual H##type* As##type() { return nullptr; } 1992 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 1993#undef INSTRUCTION_TYPE_CHECK 1994 1995 // Returns whether the instruction can be moved within the graph. 1996 virtual bool CanBeMoved() const { return false; } 1997 1998 // Returns whether the two instructions are of the same kind. 1999 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 2000 return false; 2001 } 2002 2003 // Returns whether any data encoded in the two instructions is equal. 2004 // This method does not look at the inputs. Both instructions must be 2005 // of the same type, otherwise the method has undefined behavior. 2006 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 2007 return false; 2008 } 2009 2010 // Returns whether two instructions are equal, that is: 2011 // 1) They have the same type and contain the same data (InstructionDataEquals). 2012 // 2) Their inputs are identical. 2013 bool Equals(HInstruction* other) const; 2014 2015 // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744) 2016 // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide 2017 // the virtual function because the __attribute__((__pure__)) doesn't really 2018 // apply the strong requirement for virtual functions, preventing optimizations. 2019 InstructionKind GetKind() const PURE; 2020 virtual InstructionKind GetKindInternal() const = 0; 2021 2022 virtual size_t ComputeHashCode() const { 2023 size_t result = GetKind(); 2024 for (size_t i = 0, e = InputCount(); i < e; ++i) { 2025 result = (result * 31) + InputAt(i)->GetId(); 2026 } 2027 return result; 2028 } 2029 2030 SideEffects GetSideEffects() const { return side_effects_; } 2031 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 2032 2033 size_t GetLifetimePosition() const { return lifetime_position_; } 2034 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 2035 LiveInterval* GetLiveInterval() const { return live_interval_; } 2036 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 2037 bool HasLiveInterval() const { return live_interval_ != nullptr; } 2038 2039 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 2040 2041 // Returns whether the code generation of the instruction will require to have access 2042 // to the current method. Such instructions are: 2043 // (1): Instructions that require an environment, as calling the runtime requires 2044 // to walk the stack and have the current method stored at a specific stack address. 2045 // (2): Object literals like classes and strings, that are loaded from the dex cache 2046 // fields of the current method. 2047 bool NeedsCurrentMethod() const { 2048 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 2049 } 2050 2051 // Returns whether the code generation of the instruction will require to have access 2052 // to the dex cache of the current method's declaring class via the current method. 2053 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 2054 2055 // Does this instruction have any use in an environment before 2056 // control flow hits 'other'? 2057 bool HasAnyEnvironmentUseBefore(HInstruction* other); 2058 2059 // Remove all references to environment uses of this instruction. 2060 // The caller must ensure that this is safe to do. 2061 void RemoveEnvironmentUsers(); 2062 2063 protected: 2064 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2065 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2066 void SetSideEffects(SideEffects other) { side_effects_ = other; } 2067 2068 private: 2069 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2070 2071 HInstruction* previous_; 2072 HInstruction* next_; 2073 HBasicBlock* block_; 2074 const uint32_t dex_pc_; 2075 2076 // An instruction gets an id when it is added to the graph. 2077 // It reflects creation order. A negative id means the instruction 2078 // has not been added to the graph. 2079 int id_; 2080 2081 // When doing liveness analysis, instructions that have uses get an SSA index. 2082 int ssa_index_; 2083 2084 // List of instructions that have this instruction as input. 2085 HUseList<HInstruction*> uses_; 2086 2087 // List of environments that contain this instruction. 2088 HUseList<HEnvironment*> env_uses_; 2089 2090 // The environment associated with this instruction. Not null if the instruction 2091 // might jump out of the method. 2092 HEnvironment* environment_; 2093 2094 // Set by the code generator. 2095 LocationSummary* locations_; 2096 2097 // Set by the liveness analysis. 2098 LiveInterval* live_interval_; 2099 2100 // Set by the liveness analysis, this is the position in a linear 2101 // order of blocks where this instruction's live interval start. 2102 size_t lifetime_position_; 2103 2104 SideEffects side_effects_; 2105 2106 // TODO: for primitive types this should be marked as invalid. 2107 ReferenceTypeInfo reference_type_info_; 2108 2109 friend class GraphChecker; 2110 friend class HBasicBlock; 2111 friend class HEnvironment; 2112 friend class HGraph; 2113 friend class HInstructionList; 2114 2115 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2116}; 2117std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2118 2119class HInputIterator : public ValueObject { 2120 public: 2121 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2122 2123 bool Done() const { return index_ == instruction_->InputCount(); } 2124 HInstruction* Current() const { return instruction_->InputAt(index_); } 2125 void Advance() { index_++; } 2126 2127 private: 2128 HInstruction* instruction_; 2129 size_t index_; 2130 2131 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2132}; 2133 2134class HInstructionIterator : public ValueObject { 2135 public: 2136 explicit HInstructionIterator(const HInstructionList& instructions) 2137 : instruction_(instructions.first_instruction_) { 2138 next_ = Done() ? nullptr : instruction_->GetNext(); 2139 } 2140 2141 bool Done() const { return instruction_ == nullptr; } 2142 HInstruction* Current() const { return instruction_; } 2143 void Advance() { 2144 instruction_ = next_; 2145 next_ = Done() ? nullptr : instruction_->GetNext(); 2146 } 2147 2148 private: 2149 HInstruction* instruction_; 2150 HInstruction* next_; 2151 2152 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2153}; 2154 2155class HBackwardInstructionIterator : public ValueObject { 2156 public: 2157 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2158 : instruction_(instructions.last_instruction_) { 2159 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2160 } 2161 2162 bool Done() const { return instruction_ == nullptr; } 2163 HInstruction* Current() const { return instruction_; } 2164 void Advance() { 2165 instruction_ = next_; 2166 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2167 } 2168 2169 private: 2170 HInstruction* instruction_; 2171 HInstruction* next_; 2172 2173 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2174}; 2175 2176template<size_t N> 2177class HTemplateInstruction: public HInstruction { 2178 public: 2179 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2180 : HInstruction(side_effects, dex_pc), inputs_() {} 2181 virtual ~HTemplateInstruction() {} 2182 2183 size_t InputCount() const OVERRIDE { return N; } 2184 2185 protected: 2186 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2187 DCHECK_LT(i, N); 2188 return inputs_[i]; 2189 } 2190 2191 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2192 DCHECK_LT(i, N); 2193 inputs_[i] = input; 2194 } 2195 2196 private: 2197 std::array<HUserRecord<HInstruction*>, N> inputs_; 2198 2199 friend class SsaBuilder; 2200}; 2201 2202// HTemplateInstruction specialization for N=0. 2203template<> 2204class HTemplateInstruction<0>: public HInstruction { 2205 public: 2206 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2207 : HInstruction(side_effects, dex_pc) {} 2208 2209 virtual ~HTemplateInstruction() {} 2210 2211 size_t InputCount() const OVERRIDE { return 0; } 2212 2213 protected: 2214 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2215 LOG(FATAL) << "Unreachable"; 2216 UNREACHABLE(); 2217 } 2218 2219 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2220 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2221 LOG(FATAL) << "Unreachable"; 2222 UNREACHABLE(); 2223 } 2224 2225 private: 2226 friend class SsaBuilder; 2227}; 2228 2229template<intptr_t N> 2230class HExpression : public HTemplateInstruction<N> { 2231 public: 2232 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2233 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2234 virtual ~HExpression() {} 2235 2236 Primitive::Type GetType() const OVERRIDE { return type_; } 2237 2238 protected: 2239 Primitive::Type type_; 2240}; 2241 2242// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2243// instruction that branches to the exit block. 2244class HReturnVoid : public HTemplateInstruction<0> { 2245 public: 2246 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2247 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2248 2249 bool IsControlFlow() const OVERRIDE { return true; } 2250 2251 DECLARE_INSTRUCTION(ReturnVoid); 2252 2253 private: 2254 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2255}; 2256 2257// Represents dex's RETURN opcodes. A HReturn is a control flow 2258// instruction that branches to the exit block. 2259class HReturn : public HTemplateInstruction<1> { 2260 public: 2261 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2262 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2263 SetRawInputAt(0, value); 2264 } 2265 2266 bool IsControlFlow() const OVERRIDE { return true; } 2267 2268 DECLARE_INSTRUCTION(Return); 2269 2270 private: 2271 DISALLOW_COPY_AND_ASSIGN(HReturn); 2272}; 2273 2274// The exit instruction is the only instruction of the exit block. 2275// Instructions aborting the method (HThrow and HReturn) must branch to the 2276// exit block. 2277class HExit : public HTemplateInstruction<0> { 2278 public: 2279 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2280 2281 bool IsControlFlow() const OVERRIDE { return true; } 2282 2283 DECLARE_INSTRUCTION(Exit); 2284 2285 private: 2286 DISALLOW_COPY_AND_ASSIGN(HExit); 2287}; 2288 2289// Jumps from one block to another. 2290class HGoto : public HTemplateInstruction<0> { 2291 public: 2292 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2293 2294 bool IsControlFlow() const OVERRIDE { return true; } 2295 2296 HBasicBlock* GetSuccessor() const { 2297 return GetBlock()->GetSingleSuccessor(); 2298 } 2299 2300 DECLARE_INSTRUCTION(Goto); 2301 2302 private: 2303 DISALLOW_COPY_AND_ASSIGN(HGoto); 2304}; 2305 2306class HConstant : public HExpression<0> { 2307 public: 2308 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2309 : HExpression(type, SideEffects::None(), dex_pc) {} 2310 2311 bool CanBeMoved() const OVERRIDE { return true; } 2312 2313 virtual bool IsMinusOne() const { return false; } 2314 virtual bool IsZero() const { return false; } 2315 virtual bool IsOne() const { return false; } 2316 2317 virtual uint64_t GetValueAsUint64() const = 0; 2318 2319 DECLARE_ABSTRACT_INSTRUCTION(Constant); 2320 2321 private: 2322 DISALLOW_COPY_AND_ASSIGN(HConstant); 2323}; 2324 2325class HNullConstant : public HConstant { 2326 public: 2327 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2328 return true; 2329 } 2330 2331 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2332 2333 size_t ComputeHashCode() const OVERRIDE { return 0; } 2334 2335 DECLARE_INSTRUCTION(NullConstant); 2336 2337 private: 2338 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2339 2340 friend class HGraph; 2341 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2342}; 2343 2344// Constants of the type int. Those can be from Dex instructions, or 2345// synthesized (for example with the if-eqz instruction). 2346class HIntConstant : public HConstant { 2347 public: 2348 int32_t GetValue() const { return value_; } 2349 2350 uint64_t GetValueAsUint64() const OVERRIDE { 2351 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2352 } 2353 2354 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2355 DCHECK(other->IsIntConstant()); 2356 return other->AsIntConstant()->value_ == value_; 2357 } 2358 2359 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2360 2361 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2362 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2363 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2364 2365 DECLARE_INSTRUCTION(IntConstant); 2366 2367 private: 2368 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2369 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2370 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2371 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2372 2373 const int32_t value_; 2374 2375 friend class HGraph; 2376 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2377 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2378 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2379}; 2380 2381class HLongConstant : public HConstant { 2382 public: 2383 int64_t GetValue() const { return value_; } 2384 2385 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2386 2387 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2388 DCHECK(other->IsLongConstant()); 2389 return other->AsLongConstant()->value_ == value_; 2390 } 2391 2392 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2393 2394 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2395 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2396 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2397 2398 DECLARE_INSTRUCTION(LongConstant); 2399 2400 private: 2401 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2402 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2403 2404 const int64_t value_; 2405 2406 friend class HGraph; 2407 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2408}; 2409 2410// Conditional branch. A block ending with an HIf instruction must have 2411// two successors. 2412class HIf : public HTemplateInstruction<1> { 2413 public: 2414 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2415 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2416 SetRawInputAt(0, input); 2417 } 2418 2419 bool IsControlFlow() const OVERRIDE { return true; } 2420 2421 HBasicBlock* IfTrueSuccessor() const { 2422 return GetBlock()->GetSuccessors()[0]; 2423 } 2424 2425 HBasicBlock* IfFalseSuccessor() const { 2426 return GetBlock()->GetSuccessors()[1]; 2427 } 2428 2429 DECLARE_INSTRUCTION(If); 2430 2431 private: 2432 DISALLOW_COPY_AND_ASSIGN(HIf); 2433}; 2434 2435 2436// Abstract instruction which marks the beginning and/or end of a try block and 2437// links it to the respective exception handlers. Behaves the same as a Goto in 2438// non-exceptional control flow. 2439// Normal-flow successor is stored at index zero, exception handlers under 2440// higher indices in no particular order. 2441class HTryBoundary : public HTemplateInstruction<0> { 2442 public: 2443 enum BoundaryKind { 2444 kEntry, 2445 kExit, 2446 }; 2447 2448 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2449 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2450 2451 bool IsControlFlow() const OVERRIDE { return true; } 2452 2453 // Returns the block's non-exceptional successor (index zero). 2454 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2455 2456 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const { 2457 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u); 2458 } 2459 2460 // Returns whether `handler` is among its exception handlers (non-zero index 2461 // successors). 2462 bool HasExceptionHandler(const HBasicBlock& handler) const { 2463 DCHECK(handler.IsCatchBlock()); 2464 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2465 } 2466 2467 // If not present already, adds `handler` to its block's list of exception 2468 // handlers. 2469 void AddExceptionHandler(HBasicBlock* handler) { 2470 if (!HasExceptionHandler(*handler)) { 2471 GetBlock()->AddSuccessor(handler); 2472 } 2473 } 2474 2475 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2476 2477 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2478 2479 DECLARE_INSTRUCTION(TryBoundary); 2480 2481 private: 2482 const BoundaryKind kind_; 2483 2484 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2485}; 2486 2487// Deoptimize to interpreter, upon checking a condition. 2488class HDeoptimize : public HTemplateInstruction<1> { 2489 public: 2490 HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2491 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2492 SetRawInputAt(0, cond); 2493 } 2494 2495 bool CanBeMoved() const OVERRIDE { return true; } 2496 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2497 return true; 2498 } 2499 bool NeedsEnvironment() const OVERRIDE { return true; } 2500 bool CanThrow() const OVERRIDE { return true; } 2501 2502 DECLARE_INSTRUCTION(Deoptimize); 2503 2504 private: 2505 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2506}; 2507 2508// Represents the ArtMethod that was passed as a first argument to 2509// the method. It is used by instructions that depend on it, like 2510// instructions that work with the dex cache. 2511class HCurrentMethod : public HExpression<0> { 2512 public: 2513 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2514 : HExpression(type, SideEffects::None(), dex_pc) {} 2515 2516 DECLARE_INSTRUCTION(CurrentMethod); 2517 2518 private: 2519 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2520}; 2521 2522// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2523// have one successor for each entry in the switch table, and the final successor 2524// will be the block containing the next Dex opcode. 2525class HPackedSwitch : public HTemplateInstruction<1> { 2526 public: 2527 HPackedSwitch(int32_t start_value, 2528 uint32_t num_entries, 2529 HInstruction* input, 2530 uint32_t dex_pc = kNoDexPc) 2531 : HTemplateInstruction(SideEffects::None(), dex_pc), 2532 start_value_(start_value), 2533 num_entries_(num_entries) { 2534 SetRawInputAt(0, input); 2535 } 2536 2537 bool IsControlFlow() const OVERRIDE { return true; } 2538 2539 int32_t GetStartValue() const { return start_value_; } 2540 2541 uint32_t GetNumEntries() const { return num_entries_; } 2542 2543 HBasicBlock* GetDefaultBlock() const { 2544 // Last entry is the default block. 2545 return GetBlock()->GetSuccessors()[num_entries_]; 2546 } 2547 DECLARE_INSTRUCTION(PackedSwitch); 2548 2549 private: 2550 const int32_t start_value_; 2551 const uint32_t num_entries_; 2552 2553 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2554}; 2555 2556class HUnaryOperation : public HExpression<1> { 2557 public: 2558 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2559 : HExpression(result_type, SideEffects::None(), dex_pc) { 2560 SetRawInputAt(0, input); 2561 } 2562 2563 HInstruction* GetInput() const { return InputAt(0); } 2564 Primitive::Type GetResultType() const { return GetType(); } 2565 2566 bool CanBeMoved() const OVERRIDE { return true; } 2567 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2568 return true; 2569 } 2570 2571 // Try to statically evaluate `operation` and return a HConstant 2572 // containing the result of this evaluation. If `operation` cannot 2573 // be evaluated as a constant, return null. 2574 HConstant* TryStaticEvaluation() const; 2575 2576 // Apply this operation to `x`. 2577 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2578 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2579 2580 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation); 2581 2582 private: 2583 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2584}; 2585 2586class HBinaryOperation : public HExpression<2> { 2587 public: 2588 HBinaryOperation(Primitive::Type result_type, 2589 HInstruction* left, 2590 HInstruction* right, 2591 SideEffects side_effects = SideEffects::None(), 2592 uint32_t dex_pc = kNoDexPc) 2593 : HExpression(result_type, side_effects, dex_pc) { 2594 SetRawInputAt(0, left); 2595 SetRawInputAt(1, right); 2596 } 2597 2598 HInstruction* GetLeft() const { return InputAt(0); } 2599 HInstruction* GetRight() const { return InputAt(1); } 2600 Primitive::Type GetResultType() const { return GetType(); } 2601 2602 virtual bool IsCommutative() const { return false; } 2603 2604 // Put constant on the right. 2605 // Returns whether order is changed. 2606 bool OrderInputsWithConstantOnTheRight() { 2607 HInstruction* left = InputAt(0); 2608 HInstruction* right = InputAt(1); 2609 if (left->IsConstant() && !right->IsConstant()) { 2610 ReplaceInput(right, 0); 2611 ReplaceInput(left, 1); 2612 return true; 2613 } 2614 return false; 2615 } 2616 2617 // Order inputs by instruction id, but favor constant on the right side. 2618 // This helps GVN for commutative ops. 2619 void OrderInputs() { 2620 DCHECK(IsCommutative()); 2621 HInstruction* left = InputAt(0); 2622 HInstruction* right = InputAt(1); 2623 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2624 return; 2625 } 2626 if (OrderInputsWithConstantOnTheRight()) { 2627 return; 2628 } 2629 // Order according to instruction id. 2630 if (left->GetId() > right->GetId()) { 2631 ReplaceInput(right, 0); 2632 ReplaceInput(left, 1); 2633 } 2634 } 2635 2636 bool CanBeMoved() const OVERRIDE { return true; } 2637 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2638 return true; 2639 } 2640 2641 // Try to statically evaluate `operation` and return a HConstant 2642 // containing the result of this evaluation. If `operation` cannot 2643 // be evaluated as a constant, return null. 2644 HConstant* TryStaticEvaluation() const; 2645 2646 // Apply this operation to `x` and `y`. 2647 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2648 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2649 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2650 HLongConstant* y ATTRIBUTE_UNUSED) const { 2651 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2652 return nullptr; 2653 } 2654 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2655 HIntConstant* y ATTRIBUTE_UNUSED) const { 2656 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2657 return nullptr; 2658 } 2659 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2660 HNullConstant* y ATTRIBUTE_UNUSED) const { 2661 VLOG(compiler) << DebugName() << " is not defined for the (null, null) case."; 2662 return nullptr; 2663 } 2664 2665 // Returns an input that can legally be used as the right input and is 2666 // constant, or null. 2667 HConstant* GetConstantRight() const; 2668 2669 // If `GetConstantRight()` returns one of the input, this returns the other 2670 // one. Otherwise it returns null. 2671 HInstruction* GetLeastConstantLeft() const; 2672 2673 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation); 2674 2675 private: 2676 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2677}; 2678 2679// The comparison bias applies for floating point operations and indicates how NaN 2680// comparisons are treated: 2681enum class ComparisonBias { 2682 kNoBias, // bias is not applicable (i.e. for long operation) 2683 kGtBias, // return 1 for NaN comparisons 2684 kLtBias, // return -1 for NaN comparisons 2685}; 2686 2687class HCondition : public HBinaryOperation { 2688 public: 2689 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2690 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2691 needs_materialization_(true), 2692 bias_(ComparisonBias::kNoBias) {} 2693 2694 bool NeedsMaterialization() const { return needs_materialization_; } 2695 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2696 2697 // For code generation purposes, returns whether this instruction is just before 2698 // `instruction`, and disregard moves in between. 2699 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2700 2701 DECLARE_ABSTRACT_INSTRUCTION(Condition); 2702 2703 virtual IfCondition GetCondition() const = 0; 2704 2705 virtual IfCondition GetOppositeCondition() const = 0; 2706 2707 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2708 2709 bool IsLtBias() const { return bias_ == ComparisonBias::kLtBias; } 2710 2711 void SetBias(ComparisonBias bias) { bias_ = bias; } 2712 2713 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2714 return bias_ == other->AsCondition()->bias_; 2715 } 2716 2717 bool IsFPConditionTrueIfNaN() const { 2718 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2719 IfCondition if_cond = GetCondition(); 2720 if (if_cond == kCondNE) { 2721 return true; 2722 } else if (if_cond == kCondEQ) { 2723 return false; 2724 } 2725 return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias(); 2726 } 2727 2728 bool IsFPConditionFalseIfNaN() const { 2729 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2730 IfCondition if_cond = GetCondition(); 2731 if (if_cond == kCondEQ) { 2732 return true; 2733 } else if (if_cond == kCondNE) { 2734 return false; 2735 } 2736 return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias(); 2737 } 2738 2739 private: 2740 // For register allocation purposes, returns whether this instruction needs to be 2741 // materialized (that is, not just be in the processor flags). 2742 bool needs_materialization_; 2743 2744 // Needed if we merge a HCompare into a HCondition. 2745 ComparisonBias bias_; 2746 2747 DISALLOW_COPY_AND_ASSIGN(HCondition); 2748}; 2749 2750// Instruction to check if two inputs are equal to each other. 2751class HEqual : public HCondition { 2752 public: 2753 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2754 : HCondition(first, second, dex_pc) {} 2755 2756 bool IsCommutative() const OVERRIDE { return true; } 2757 2758 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2759 return GetBlock()->GetGraph()->GetIntConstant( 2760 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2761 } 2762 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2763 return GetBlock()->GetGraph()->GetIntConstant( 2764 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2765 } 2766 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2767 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2768 return GetBlock()->GetGraph()->GetIntConstant(1); 2769 } 2770 2771 DECLARE_INSTRUCTION(Equal); 2772 2773 IfCondition GetCondition() const OVERRIDE { 2774 return kCondEQ; 2775 } 2776 2777 IfCondition GetOppositeCondition() const OVERRIDE { 2778 return kCondNE; 2779 } 2780 2781 private: 2782 template <typename T> bool Compute(T x, T y) const { return x == y; } 2783 2784 DISALLOW_COPY_AND_ASSIGN(HEqual); 2785}; 2786 2787class HNotEqual : public HCondition { 2788 public: 2789 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2790 : HCondition(first, second, dex_pc) {} 2791 2792 bool IsCommutative() const OVERRIDE { return true; } 2793 2794 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2795 return GetBlock()->GetGraph()->GetIntConstant( 2796 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2797 } 2798 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2799 return GetBlock()->GetGraph()->GetIntConstant( 2800 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2801 } 2802 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2803 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2804 return GetBlock()->GetGraph()->GetIntConstant(0); 2805 } 2806 2807 DECLARE_INSTRUCTION(NotEqual); 2808 2809 IfCondition GetCondition() const OVERRIDE { 2810 return kCondNE; 2811 } 2812 2813 IfCondition GetOppositeCondition() const OVERRIDE { 2814 return kCondEQ; 2815 } 2816 2817 private: 2818 template <typename T> bool Compute(T x, T y) const { return x != y; } 2819 2820 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2821}; 2822 2823class HLessThan : public HCondition { 2824 public: 2825 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2826 : HCondition(first, second, dex_pc) {} 2827 2828 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2829 return GetBlock()->GetGraph()->GetIntConstant( 2830 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2831 } 2832 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2833 return GetBlock()->GetGraph()->GetIntConstant( 2834 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2835 } 2836 2837 DECLARE_INSTRUCTION(LessThan); 2838 2839 IfCondition GetCondition() const OVERRIDE { 2840 return kCondLT; 2841 } 2842 2843 IfCondition GetOppositeCondition() const OVERRIDE { 2844 return kCondGE; 2845 } 2846 2847 private: 2848 template <typename T> bool Compute(T x, T y) const { return x < y; } 2849 2850 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2851}; 2852 2853class HLessThanOrEqual : public HCondition { 2854 public: 2855 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2856 : HCondition(first, second, dex_pc) {} 2857 2858 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2859 return GetBlock()->GetGraph()->GetIntConstant( 2860 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2861 } 2862 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2863 return GetBlock()->GetGraph()->GetIntConstant( 2864 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2865 } 2866 2867 DECLARE_INSTRUCTION(LessThanOrEqual); 2868 2869 IfCondition GetCondition() const OVERRIDE { 2870 return kCondLE; 2871 } 2872 2873 IfCondition GetOppositeCondition() const OVERRIDE { 2874 return kCondGT; 2875 } 2876 2877 private: 2878 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2879 2880 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2881}; 2882 2883class HGreaterThan : public HCondition { 2884 public: 2885 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2886 : HCondition(first, second, dex_pc) {} 2887 2888 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2889 return GetBlock()->GetGraph()->GetIntConstant( 2890 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2891 } 2892 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2893 return GetBlock()->GetGraph()->GetIntConstant( 2894 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2895 } 2896 2897 DECLARE_INSTRUCTION(GreaterThan); 2898 2899 IfCondition GetCondition() const OVERRIDE { 2900 return kCondGT; 2901 } 2902 2903 IfCondition GetOppositeCondition() const OVERRIDE { 2904 return kCondLE; 2905 } 2906 2907 private: 2908 template <typename T> bool Compute(T x, T y) const { return x > y; } 2909 2910 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2911}; 2912 2913class HGreaterThanOrEqual : public HCondition { 2914 public: 2915 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2916 : HCondition(first, second, dex_pc) {} 2917 2918 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2919 return GetBlock()->GetGraph()->GetIntConstant( 2920 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2921 } 2922 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2923 return GetBlock()->GetGraph()->GetIntConstant( 2924 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2925 } 2926 2927 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2928 2929 IfCondition GetCondition() const OVERRIDE { 2930 return kCondGE; 2931 } 2932 2933 IfCondition GetOppositeCondition() const OVERRIDE { 2934 return kCondLT; 2935 } 2936 2937 private: 2938 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2939 2940 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2941}; 2942 2943class HBelow : public HCondition { 2944 public: 2945 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2946 : HCondition(first, second, dex_pc) {} 2947 2948 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2949 return GetBlock()->GetGraph()->GetIntConstant( 2950 Compute(static_cast<uint32_t>(x->GetValue()), 2951 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2952 } 2953 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2954 return GetBlock()->GetGraph()->GetIntConstant( 2955 Compute(static_cast<uint64_t>(x->GetValue()), 2956 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2957 } 2958 2959 DECLARE_INSTRUCTION(Below); 2960 2961 IfCondition GetCondition() const OVERRIDE { 2962 return kCondB; 2963 } 2964 2965 IfCondition GetOppositeCondition() const OVERRIDE { 2966 return kCondAE; 2967 } 2968 2969 private: 2970 template <typename T> bool Compute(T x, T y) const { return x < y; } 2971 2972 DISALLOW_COPY_AND_ASSIGN(HBelow); 2973}; 2974 2975class HBelowOrEqual : public HCondition { 2976 public: 2977 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2978 : HCondition(first, second, dex_pc) {} 2979 2980 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2981 return GetBlock()->GetGraph()->GetIntConstant( 2982 Compute(static_cast<uint32_t>(x->GetValue()), 2983 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2984 } 2985 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2986 return GetBlock()->GetGraph()->GetIntConstant( 2987 Compute(static_cast<uint64_t>(x->GetValue()), 2988 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2989 } 2990 2991 DECLARE_INSTRUCTION(BelowOrEqual); 2992 2993 IfCondition GetCondition() const OVERRIDE { 2994 return kCondBE; 2995 } 2996 2997 IfCondition GetOppositeCondition() const OVERRIDE { 2998 return kCondA; 2999 } 3000 3001 private: 3002 template <typename T> bool Compute(T x, T y) const { return x <= y; } 3003 3004 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 3005}; 3006 3007class HAbove : public HCondition { 3008 public: 3009 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3010 : HCondition(first, second, dex_pc) {} 3011 3012 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3013 return GetBlock()->GetGraph()->GetIntConstant( 3014 Compute(static_cast<uint32_t>(x->GetValue()), 3015 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3016 } 3017 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3018 return GetBlock()->GetGraph()->GetIntConstant( 3019 Compute(static_cast<uint64_t>(x->GetValue()), 3020 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3021 } 3022 3023 DECLARE_INSTRUCTION(Above); 3024 3025 IfCondition GetCondition() const OVERRIDE { 3026 return kCondA; 3027 } 3028 3029 IfCondition GetOppositeCondition() const OVERRIDE { 3030 return kCondBE; 3031 } 3032 3033 private: 3034 template <typename T> bool Compute(T x, T y) const { return x > y; } 3035 3036 DISALLOW_COPY_AND_ASSIGN(HAbove); 3037}; 3038 3039class HAboveOrEqual : public HCondition { 3040 public: 3041 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3042 : HCondition(first, second, dex_pc) {} 3043 3044 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3045 return GetBlock()->GetGraph()->GetIntConstant( 3046 Compute(static_cast<uint32_t>(x->GetValue()), 3047 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3048 } 3049 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3050 return GetBlock()->GetGraph()->GetIntConstant( 3051 Compute(static_cast<uint64_t>(x->GetValue()), 3052 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3053 } 3054 3055 DECLARE_INSTRUCTION(AboveOrEqual); 3056 3057 IfCondition GetCondition() const OVERRIDE { 3058 return kCondAE; 3059 } 3060 3061 IfCondition GetOppositeCondition() const OVERRIDE { 3062 return kCondB; 3063 } 3064 3065 private: 3066 template <typename T> bool Compute(T x, T y) const { return x >= y; } 3067 3068 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 3069}; 3070 3071// Instruction to check how two inputs compare to each other. 3072// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 3073class HCompare : public HBinaryOperation { 3074 public: 3075 HCompare(Primitive::Type type, 3076 HInstruction* first, 3077 HInstruction* second, 3078 ComparisonBias bias, 3079 uint32_t dex_pc) 3080 : HBinaryOperation(Primitive::kPrimInt, 3081 first, 3082 second, 3083 SideEffectsForArchRuntimeCalls(type), 3084 dex_pc), 3085 bias_(bias) { 3086 DCHECK_EQ(type, first->GetType()); 3087 DCHECK_EQ(type, second->GetType()); 3088 } 3089 3090 template <typename T> 3091 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3092 3093 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3094 return GetBlock()->GetGraph()->GetIntConstant( 3095 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3096 } 3097 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3098 return GetBlock()->GetGraph()->GetIntConstant( 3099 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3100 } 3101 3102 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3103 return bias_ == other->AsCompare()->bias_; 3104 } 3105 3106 ComparisonBias GetBias() const { return bias_; } 3107 3108 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3109 3110 3111 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3112 // MIPS64 uses a runtime call for FP comparisons. 3113 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3114 } 3115 3116 DECLARE_INSTRUCTION(Compare); 3117 3118 private: 3119 const ComparisonBias bias_; 3120 3121 DISALLOW_COPY_AND_ASSIGN(HCompare); 3122}; 3123 3124// A local in the graph. Corresponds to a Dex register. 3125class HLocal : public HTemplateInstruction<0> { 3126 public: 3127 explicit HLocal(uint16_t reg_number) 3128 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3129 3130 DECLARE_INSTRUCTION(Local); 3131 3132 uint16_t GetRegNumber() const { return reg_number_; } 3133 3134 private: 3135 // The Dex register number. 3136 const uint16_t reg_number_; 3137 3138 DISALLOW_COPY_AND_ASSIGN(HLocal); 3139}; 3140 3141// Load a given local. The local is an input of this instruction. 3142class HLoadLocal : public HExpression<1> { 3143 public: 3144 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3145 : HExpression(type, SideEffects::None(), dex_pc) { 3146 SetRawInputAt(0, local); 3147 } 3148 3149 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3150 3151 DECLARE_INSTRUCTION(LoadLocal); 3152 3153 private: 3154 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3155}; 3156 3157// Store a value in a given local. This instruction has two inputs: the value 3158// and the local. 3159class HStoreLocal : public HTemplateInstruction<2> { 3160 public: 3161 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3162 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3163 SetRawInputAt(0, local); 3164 SetRawInputAt(1, value); 3165 } 3166 3167 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3168 3169 DECLARE_INSTRUCTION(StoreLocal); 3170 3171 private: 3172 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3173}; 3174 3175class HFloatConstant : public HConstant { 3176 public: 3177 float GetValue() const { return value_; } 3178 3179 uint64_t GetValueAsUint64() const OVERRIDE { 3180 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3181 } 3182 3183 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3184 DCHECK(other->IsFloatConstant()); 3185 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3186 } 3187 3188 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3189 3190 bool IsMinusOne() const OVERRIDE { 3191 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3192 } 3193 bool IsZero() const OVERRIDE { 3194 return value_ == 0.0f; 3195 } 3196 bool IsOne() const OVERRIDE { 3197 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3198 } 3199 bool IsNaN() const { 3200 return std::isnan(value_); 3201 } 3202 3203 DECLARE_INSTRUCTION(FloatConstant); 3204 3205 private: 3206 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3207 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3208 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3209 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3210 3211 const float value_; 3212 3213 // Only the SsaBuilder and HGraph can create floating-point constants. 3214 friend class SsaBuilder; 3215 friend class HGraph; 3216 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3217}; 3218 3219class HDoubleConstant : public HConstant { 3220 public: 3221 double GetValue() const { return value_; } 3222 3223 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3224 3225 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3226 DCHECK(other->IsDoubleConstant()); 3227 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3228 } 3229 3230 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3231 3232 bool IsMinusOne() const OVERRIDE { 3233 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3234 } 3235 bool IsZero() const OVERRIDE { 3236 return value_ == 0.0; 3237 } 3238 bool IsOne() const OVERRIDE { 3239 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3240 } 3241 bool IsNaN() const { 3242 return std::isnan(value_); 3243 } 3244 3245 DECLARE_INSTRUCTION(DoubleConstant); 3246 3247 private: 3248 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3249 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3250 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3251 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3252 3253 const double value_; 3254 3255 // Only the SsaBuilder and HGraph can create floating-point constants. 3256 friend class SsaBuilder; 3257 friend class HGraph; 3258 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3259}; 3260 3261enum class Intrinsics { 3262#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \ 3263 k ## Name, 3264#include "intrinsics_list.h" 3265 kNone, 3266 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3267#undef INTRINSICS_LIST 3268#undef OPTIMIZING_INTRINSICS 3269}; 3270std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3271 3272enum IntrinsicNeedsEnvironmentOrCache { 3273 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3274 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3275}; 3276 3277enum IntrinsicSideEffects { 3278 kNoSideEffects, // Intrinsic does not have any heap memory side effects. 3279 kReadSideEffects, // Intrinsic may read heap memory. 3280 kWriteSideEffects, // Intrinsic may write heap memory. 3281 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC. 3282}; 3283 3284enum IntrinsicExceptions { 3285 kNoThrow, // Intrinsic does not throw any exceptions. 3286 kCanThrow // Intrinsic may throw exceptions. 3287}; 3288 3289class HInvoke : public HInstruction { 3290 public: 3291 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3292 3293 bool NeedsEnvironment() const OVERRIDE; 3294 3295 void SetArgumentAt(size_t index, HInstruction* argument) { 3296 SetRawInputAt(index, argument); 3297 } 3298 3299 // Return the number of arguments. This number can be lower than 3300 // the number of inputs returned by InputCount(), as some invoke 3301 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3302 // inputs at the end of their list of inputs. 3303 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3304 3305 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3306 3307 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3308 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3309 3310 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3311 3312 Intrinsics GetIntrinsic() const { 3313 return intrinsic_; 3314 } 3315 3316 void SetIntrinsic(Intrinsics intrinsic, 3317 IntrinsicNeedsEnvironmentOrCache needs_env_or_cache, 3318 IntrinsicSideEffects side_effects, 3319 IntrinsicExceptions exceptions); 3320 3321 bool IsFromInlinedInvoke() const { 3322 return GetEnvironment()->IsFromInlinedInvoke(); 3323 } 3324 3325 bool CanThrow() const OVERRIDE { return can_throw_; } 3326 3327 bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); } 3328 3329 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3330 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_; 3331 } 3332 3333 uint32_t* GetIntrinsicOptimizations() { 3334 return &intrinsic_optimizations_; 3335 } 3336 3337 const uint32_t* GetIntrinsicOptimizations() const { 3338 return &intrinsic_optimizations_; 3339 } 3340 3341 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3342 3343 DECLARE_ABSTRACT_INSTRUCTION(Invoke); 3344 3345 protected: 3346 HInvoke(ArenaAllocator* arena, 3347 uint32_t number_of_arguments, 3348 uint32_t number_of_other_inputs, 3349 Primitive::Type return_type, 3350 uint32_t dex_pc, 3351 uint32_t dex_method_index, 3352 InvokeType original_invoke_type) 3353 : HInstruction( 3354 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3355 number_of_arguments_(number_of_arguments), 3356 inputs_(number_of_arguments + number_of_other_inputs, 3357 arena->Adapter(kArenaAllocInvokeInputs)), 3358 return_type_(return_type), 3359 dex_method_index_(dex_method_index), 3360 original_invoke_type_(original_invoke_type), 3361 can_throw_(true), 3362 intrinsic_(Intrinsics::kNone), 3363 intrinsic_optimizations_(0) { 3364 } 3365 3366 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3367 return inputs_[index]; 3368 } 3369 3370 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3371 inputs_[index] = input; 3372 } 3373 3374 void SetCanThrow(bool can_throw) { can_throw_ = can_throw; } 3375 3376 uint32_t number_of_arguments_; 3377 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3378 const Primitive::Type return_type_; 3379 const uint32_t dex_method_index_; 3380 const InvokeType original_invoke_type_; 3381 bool can_throw_; 3382 Intrinsics intrinsic_; 3383 3384 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3385 uint32_t intrinsic_optimizations_; 3386 3387 private: 3388 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3389}; 3390 3391class HInvokeUnresolved : public HInvoke { 3392 public: 3393 HInvokeUnresolved(ArenaAllocator* arena, 3394 uint32_t number_of_arguments, 3395 Primitive::Type return_type, 3396 uint32_t dex_pc, 3397 uint32_t dex_method_index, 3398 InvokeType invoke_type) 3399 : HInvoke(arena, 3400 number_of_arguments, 3401 0u /* number_of_other_inputs */, 3402 return_type, 3403 dex_pc, 3404 dex_method_index, 3405 invoke_type) { 3406 } 3407 3408 DECLARE_INSTRUCTION(InvokeUnresolved); 3409 3410 private: 3411 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3412}; 3413 3414class HInvokeStaticOrDirect : public HInvoke { 3415 public: 3416 // Requirements of this method call regarding the class 3417 // initialization (clinit) check of its declaring class. 3418 enum class ClinitCheckRequirement { 3419 kNone, // Class already initialized. 3420 kExplicit, // Static call having explicit clinit check as last input. 3421 kImplicit, // Static call implicitly requiring a clinit check. 3422 }; 3423 3424 // Determines how to load the target ArtMethod*. 3425 enum class MethodLoadKind { 3426 // Use a String init ArtMethod* loaded from Thread entrypoints. 3427 kStringInit, 3428 3429 // Use the method's own ArtMethod* loaded by the register allocator. 3430 kRecursive, 3431 3432 // Use ArtMethod* at a known address, embed the direct address in the code. 3433 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3434 kDirectAddress, 3435 3436 // Use ArtMethod* at an address that will be known at link time, embed the direct 3437 // address in the code. If the image is relocatable, emit .patch_oat entry. 3438 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3439 // the image relocatable or not. 3440 kDirectAddressWithFixup, 3441 3442 // Load from resoved methods array in the dex cache using a PC-relative load. 3443 // Used when we need to use the dex cache, for example for invoke-static that 3444 // may cause class initialization (the entry may point to a resolution method), 3445 // and we know that we can access the dex cache arrays using a PC-relative load. 3446 kDexCachePcRelative, 3447 3448 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3449 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3450 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3451 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3452 kDexCacheViaMethod, 3453 }; 3454 3455 // Determines the location of the code pointer. 3456 enum class CodePtrLocation { 3457 // Recursive call, use local PC-relative call instruction. 3458 kCallSelf, 3459 3460 // Use PC-relative call instruction patched at link time. 3461 // Used for calls within an oat file, boot->boot or app->app. 3462 kCallPCRelative, 3463 3464 // Call to a known target address, embed the direct address in code. 3465 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3466 kCallDirect, 3467 3468 // Call to a target address that will be known at link time, embed the direct 3469 // address in code. If the image is relocatable, emit .patch_oat entry. 3470 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3471 // the image relocatable or not. 3472 kCallDirectWithFixup, 3473 3474 // Use code pointer from the ArtMethod*. 3475 // Used when we don't know the target code. This is also the last-resort-kind used when 3476 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3477 kCallArtMethod, 3478 }; 3479 3480 struct DispatchInfo { 3481 MethodLoadKind method_load_kind; 3482 CodePtrLocation code_ptr_location; 3483 // The method load data holds 3484 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3485 // Note that there are multiple string init methods, each having its own offset. 3486 // - the method address for kDirectAddress 3487 // - the dex cache arrays offset for kDexCachePcRel. 3488 uint64_t method_load_data; 3489 uint64_t direct_code_ptr; 3490 }; 3491 3492 HInvokeStaticOrDirect(ArenaAllocator* arena, 3493 uint32_t number_of_arguments, 3494 Primitive::Type return_type, 3495 uint32_t dex_pc, 3496 uint32_t method_index, 3497 MethodReference target_method, 3498 DispatchInfo dispatch_info, 3499 InvokeType original_invoke_type, 3500 InvokeType optimized_invoke_type, 3501 ClinitCheckRequirement clinit_check_requirement) 3502 : HInvoke(arena, 3503 number_of_arguments, 3504 // There is potentially one extra argument for the HCurrentMethod node, and 3505 // potentially one other if the clinit check is explicit, and potentially 3506 // one other if the method is a string factory. 3507 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) + 3508 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) + 3509 (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3510 return_type, 3511 dex_pc, 3512 method_index, 3513 original_invoke_type), 3514 optimized_invoke_type_(optimized_invoke_type), 3515 clinit_check_requirement_(clinit_check_requirement), 3516 target_method_(target_method), 3517 dispatch_info_(dispatch_info) { } 3518 3519 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3520 bool had_current_method_input = HasCurrentMethodInput(); 3521 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind); 3522 3523 // Using the current method is the default and once we find a better 3524 // method load kind, we should not go back to using the current method. 3525 DCHECK(had_current_method_input || !needs_current_method_input); 3526 3527 if (had_current_method_input && !needs_current_method_input) { 3528 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod()); 3529 RemoveInputAt(GetSpecialInputIndex()); 3530 } 3531 dispatch_info_ = dispatch_info; 3532 } 3533 3534 void AddSpecialInput(HInstruction* input) { 3535 // We allow only one special input. 3536 DCHECK(!IsStringInit() && !HasCurrentMethodInput()); 3537 DCHECK(InputCount() == GetSpecialInputIndex() || 3538 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck())); 3539 InsertInputAt(GetSpecialInputIndex(), input); 3540 } 3541 3542 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3543 // We access the method via the dex cache so we can't do an implicit null check. 3544 // TODO: for intrinsics we can generate implicit null checks. 3545 return false; 3546 } 3547 3548 bool CanBeNull() const OVERRIDE { 3549 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3550 } 3551 3552 // Get the index of the special input, if any. 3553 // 3554 // If the invoke IsStringInit(), it initially has a HFakeString special argument 3555 // which is removed by the instruction simplifier; if the invoke HasCurrentMethodInput(), 3556 // the "special input" is the current method pointer; otherwise there may be one 3557 // platform-specific special input, such as PC-relative addressing base. 3558 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } 3559 3560 InvokeType GetOptimizedInvokeType() const { return optimized_invoke_type_; } 3561 void SetOptimizedInvokeType(InvokeType invoke_type) { 3562 optimized_invoke_type_ = invoke_type; 3563 } 3564 3565 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3566 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3567 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3568 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3569 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3570 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3571 bool HasPcRelativeDexCache() const { 3572 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3573 } 3574 bool HasCurrentMethodInput() const { 3575 // This function can be called only after the invoke has been fully initialized by the builder. 3576 if (NeedsCurrentMethodInput(GetMethodLoadKind())) { 3577 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3578 return true; 3579 } else { 3580 DCHECK(InputCount() == GetSpecialInputIndex() || 3581 !InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3582 return false; 3583 } 3584 } 3585 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3586 MethodReference GetTargetMethod() const { return target_method_; } 3587 void SetTargetMethod(MethodReference method) { target_method_ = method; } 3588 3589 int32_t GetStringInitOffset() const { 3590 DCHECK(IsStringInit()); 3591 return dispatch_info_.method_load_data; 3592 } 3593 3594 uint64_t GetMethodAddress() const { 3595 DCHECK(HasMethodAddress()); 3596 return dispatch_info_.method_load_data; 3597 } 3598 3599 uint32_t GetDexCacheArrayOffset() const { 3600 DCHECK(HasPcRelativeDexCache()); 3601 return dispatch_info_.method_load_data; 3602 } 3603 3604 uint64_t GetDirectCodePtr() const { 3605 DCHECK(HasDirectCodePtr()); 3606 return dispatch_info_.direct_code_ptr; 3607 } 3608 3609 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3610 3611 // Is this instruction a call to a static method? 3612 bool IsStatic() const { 3613 return GetOriginalInvokeType() == kStatic; 3614 } 3615 3616 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by 3617 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck) 3618 // instruction; only relevant for static calls with explicit clinit check. 3619 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) { 3620 DCHECK(IsStaticWithExplicitClinitCheck()); 3621 size_t last_input_index = InputCount() - 1; 3622 HInstruction* last_input = InputAt(last_input_index); 3623 DCHECK(last_input != nullptr); 3624 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName(); 3625 RemoveAsUserOfInput(last_input_index); 3626 inputs_.pop_back(); 3627 clinit_check_requirement_ = new_requirement; 3628 DCHECK(!IsStaticWithExplicitClinitCheck()); 3629 } 3630 3631 bool IsStringFactoryFor(HFakeString* str) const { 3632 if (!IsStringInit()) return false; 3633 DCHECK(!HasCurrentMethodInput()); 3634 if (InputCount() == (number_of_arguments_)) return false; 3635 return InputAt(InputCount() - 1)->AsFakeString() == str; 3636 } 3637 3638 void RemoveFakeStringArgumentAsLastInput() { 3639 DCHECK(IsStringInit()); 3640 size_t last_input_index = InputCount() - 1; 3641 HInstruction* last_input = InputAt(last_input_index); 3642 DCHECK(last_input != nullptr); 3643 DCHECK(last_input->IsFakeString()) << last_input->DebugName(); 3644 RemoveAsUserOfInput(last_input_index); 3645 inputs_.pop_back(); 3646 } 3647 3648 // Is this a call to a static method whose declaring class has an 3649 // explicit initialization check in the graph? 3650 bool IsStaticWithExplicitClinitCheck() const { 3651 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3652 } 3653 3654 // Is this a call to a static method whose declaring class has an 3655 // implicit intialization check requirement? 3656 bool IsStaticWithImplicitClinitCheck() const { 3657 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3658 } 3659 3660 // Does this method load kind need the current method as an input? 3661 static bool NeedsCurrentMethodInput(MethodLoadKind kind) { 3662 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod; 3663 } 3664 3665 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3666 3667 protected: 3668 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3669 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3670 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3671 HInstruction* input = input_record.GetInstruction(); 3672 // `input` is the last input of a static invoke marked as having 3673 // an explicit clinit check. It must either be: 3674 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3675 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3676 DCHECK(input != nullptr); 3677 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3678 } 3679 return input_record; 3680 } 3681 3682 void InsertInputAt(size_t index, HInstruction* input); 3683 void RemoveInputAt(size_t index); 3684 3685 private: 3686 InvokeType optimized_invoke_type_; 3687 ClinitCheckRequirement clinit_check_requirement_; 3688 // The target method may refer to different dex file or method index than the original 3689 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3690 // in derived class to increase visibility. 3691 MethodReference target_method_; 3692 DispatchInfo dispatch_info_; 3693 3694 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3695}; 3696std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs); 3697std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs); 3698 3699class HInvokeVirtual : public HInvoke { 3700 public: 3701 HInvokeVirtual(ArenaAllocator* arena, 3702 uint32_t number_of_arguments, 3703 Primitive::Type return_type, 3704 uint32_t dex_pc, 3705 uint32_t dex_method_index, 3706 uint32_t vtable_index) 3707 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3708 vtable_index_(vtable_index) {} 3709 3710 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3711 // TODO: Add implicit null checks in intrinsics. 3712 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3713 } 3714 3715 uint32_t GetVTableIndex() const { return vtable_index_; } 3716 3717 DECLARE_INSTRUCTION(InvokeVirtual); 3718 3719 private: 3720 const uint32_t vtable_index_; 3721 3722 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3723}; 3724 3725class HInvokeInterface : public HInvoke { 3726 public: 3727 HInvokeInterface(ArenaAllocator* arena, 3728 uint32_t number_of_arguments, 3729 Primitive::Type return_type, 3730 uint32_t dex_pc, 3731 uint32_t dex_method_index, 3732 uint32_t imt_index) 3733 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3734 imt_index_(imt_index) {} 3735 3736 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3737 // TODO: Add implicit null checks in intrinsics. 3738 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3739 } 3740 3741 uint32_t GetImtIndex() const { return imt_index_; } 3742 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3743 3744 DECLARE_INSTRUCTION(InvokeInterface); 3745 3746 private: 3747 const uint32_t imt_index_; 3748 3749 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3750}; 3751 3752class HNewInstance : public HExpression<2> { 3753 public: 3754 HNewInstance(HInstruction* cls, 3755 HCurrentMethod* current_method, 3756 uint32_t dex_pc, 3757 uint16_t type_index, 3758 const DexFile& dex_file, 3759 bool can_throw, 3760 bool finalizable, 3761 QuickEntrypointEnum entrypoint) 3762 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3763 type_index_(type_index), 3764 dex_file_(dex_file), 3765 can_throw_(can_throw), 3766 finalizable_(finalizable), 3767 entrypoint_(entrypoint) { 3768 SetRawInputAt(0, cls); 3769 SetRawInputAt(1, current_method); 3770 } 3771 3772 uint16_t GetTypeIndex() const { return type_index_; } 3773 const DexFile& GetDexFile() const { return dex_file_; } 3774 3775 // Calls runtime so needs an environment. 3776 bool NeedsEnvironment() const OVERRIDE { return true; } 3777 3778 // It may throw when called on type that's not instantiable/accessible. 3779 // It can throw OOME. 3780 // TODO: distinguish between the two cases so we can for example allow allocation elimination. 3781 bool CanThrow() const OVERRIDE { return can_throw_ || true; } 3782 3783 bool IsFinalizable() const { return finalizable_; } 3784 3785 bool CanBeNull() const OVERRIDE { return false; } 3786 3787 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3788 3789 void SetEntrypoint(QuickEntrypointEnum entrypoint) { 3790 entrypoint_ = entrypoint; 3791 } 3792 3793 DECLARE_INSTRUCTION(NewInstance); 3794 3795 private: 3796 const uint16_t type_index_; 3797 const DexFile& dex_file_; 3798 const bool can_throw_; 3799 const bool finalizable_; 3800 QuickEntrypointEnum entrypoint_; 3801 3802 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3803}; 3804 3805class HNeg : public HUnaryOperation { 3806 public: 3807 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3808 : HUnaryOperation(result_type, input, dex_pc) {} 3809 3810 template <typename T> T Compute(T x) const { return -x; } 3811 3812 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3813 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3814 } 3815 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3816 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3817 } 3818 3819 DECLARE_INSTRUCTION(Neg); 3820 3821 private: 3822 DISALLOW_COPY_AND_ASSIGN(HNeg); 3823}; 3824 3825class HNewArray : public HExpression<2> { 3826 public: 3827 HNewArray(HInstruction* length, 3828 HCurrentMethod* current_method, 3829 uint32_t dex_pc, 3830 uint16_t type_index, 3831 const DexFile& dex_file, 3832 QuickEntrypointEnum entrypoint) 3833 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3834 type_index_(type_index), 3835 dex_file_(dex_file), 3836 entrypoint_(entrypoint) { 3837 SetRawInputAt(0, length); 3838 SetRawInputAt(1, current_method); 3839 } 3840 3841 uint16_t GetTypeIndex() const { return type_index_; } 3842 const DexFile& GetDexFile() const { return dex_file_; } 3843 3844 // Calls runtime so needs an environment. 3845 bool NeedsEnvironment() const OVERRIDE { return true; } 3846 3847 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3848 bool CanThrow() const OVERRIDE { return true; } 3849 3850 bool CanBeNull() const OVERRIDE { return false; } 3851 3852 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3853 3854 DECLARE_INSTRUCTION(NewArray); 3855 3856 private: 3857 const uint16_t type_index_; 3858 const DexFile& dex_file_; 3859 const QuickEntrypointEnum entrypoint_; 3860 3861 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3862}; 3863 3864class HAdd : public HBinaryOperation { 3865 public: 3866 HAdd(Primitive::Type result_type, 3867 HInstruction* left, 3868 HInstruction* right, 3869 uint32_t dex_pc = kNoDexPc) 3870 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3871 3872 bool IsCommutative() const OVERRIDE { return true; } 3873 3874 template <typename T> T Compute(T x, T y) const { return x + y; } 3875 3876 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3877 return GetBlock()->GetGraph()->GetIntConstant( 3878 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3879 } 3880 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3881 return GetBlock()->GetGraph()->GetLongConstant( 3882 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3883 } 3884 3885 DECLARE_INSTRUCTION(Add); 3886 3887 private: 3888 DISALLOW_COPY_AND_ASSIGN(HAdd); 3889}; 3890 3891class HSub : public HBinaryOperation { 3892 public: 3893 HSub(Primitive::Type result_type, 3894 HInstruction* left, 3895 HInstruction* right, 3896 uint32_t dex_pc = kNoDexPc) 3897 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3898 3899 template <typename T> T Compute(T x, T y) const { return x - y; } 3900 3901 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3902 return GetBlock()->GetGraph()->GetIntConstant( 3903 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3904 } 3905 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3906 return GetBlock()->GetGraph()->GetLongConstant( 3907 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3908 } 3909 3910 DECLARE_INSTRUCTION(Sub); 3911 3912 private: 3913 DISALLOW_COPY_AND_ASSIGN(HSub); 3914}; 3915 3916class HMul : public HBinaryOperation { 3917 public: 3918 HMul(Primitive::Type result_type, 3919 HInstruction* left, 3920 HInstruction* right, 3921 uint32_t dex_pc = kNoDexPc) 3922 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3923 3924 bool IsCommutative() const OVERRIDE { return true; } 3925 3926 template <typename T> T Compute(T x, T y) const { return x * y; } 3927 3928 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3929 return GetBlock()->GetGraph()->GetIntConstant( 3930 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3931 } 3932 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3933 return GetBlock()->GetGraph()->GetLongConstant( 3934 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3935 } 3936 3937 DECLARE_INSTRUCTION(Mul); 3938 3939 private: 3940 DISALLOW_COPY_AND_ASSIGN(HMul); 3941}; 3942 3943class HDiv : public HBinaryOperation { 3944 public: 3945 HDiv(Primitive::Type result_type, 3946 HInstruction* left, 3947 HInstruction* right, 3948 uint32_t dex_pc) 3949 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3950 3951 template <typename T> 3952 T Compute(T x, T y) const { 3953 // Our graph structure ensures we never have 0 for `y` during 3954 // constant folding. 3955 DCHECK_NE(y, 0); 3956 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3957 return (y == -1) ? -x : x / y; 3958 } 3959 3960 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3961 return GetBlock()->GetGraph()->GetIntConstant( 3962 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3963 } 3964 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3965 return GetBlock()->GetGraph()->GetLongConstant( 3966 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3967 } 3968 3969 static SideEffects SideEffectsForArchRuntimeCalls() { 3970 // The generated code can use a runtime call. 3971 return SideEffects::CanTriggerGC(); 3972 } 3973 3974 DECLARE_INSTRUCTION(Div); 3975 3976 private: 3977 DISALLOW_COPY_AND_ASSIGN(HDiv); 3978}; 3979 3980class HRem : public HBinaryOperation { 3981 public: 3982 HRem(Primitive::Type result_type, 3983 HInstruction* left, 3984 HInstruction* right, 3985 uint32_t dex_pc) 3986 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3987 3988 template <typename T> 3989 T Compute(T x, T y) const { 3990 // Our graph structure ensures we never have 0 for `y` during 3991 // constant folding. 3992 DCHECK_NE(y, 0); 3993 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3994 return (y == -1) ? 0 : x % y; 3995 } 3996 3997 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3998 return GetBlock()->GetGraph()->GetIntConstant( 3999 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4000 } 4001 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4002 return GetBlock()->GetGraph()->GetLongConstant( 4003 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4004 } 4005 4006 4007 static SideEffects SideEffectsForArchRuntimeCalls() { 4008 return SideEffects::CanTriggerGC(); 4009 } 4010 4011 DECLARE_INSTRUCTION(Rem); 4012 4013 private: 4014 DISALLOW_COPY_AND_ASSIGN(HRem); 4015}; 4016 4017class HDivZeroCheck : public HExpression<1> { 4018 public: 4019 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 4020 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4021 SetRawInputAt(0, value); 4022 } 4023 4024 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 4025 4026 bool CanBeMoved() const OVERRIDE { return true; } 4027 4028 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4029 return true; 4030 } 4031 4032 bool NeedsEnvironment() const OVERRIDE { return true; } 4033 bool CanThrow() const OVERRIDE { return true; } 4034 4035 DECLARE_INSTRUCTION(DivZeroCheck); 4036 4037 private: 4038 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 4039}; 4040 4041class HShl : public HBinaryOperation { 4042 public: 4043 HShl(Primitive::Type result_type, 4044 HInstruction* left, 4045 HInstruction* right, 4046 uint32_t dex_pc = kNoDexPc) 4047 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4048 4049 template <typename T, typename U, typename V> 4050 T Compute(T x, U y, V max_shift_value) const { 4051 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4052 "V is not the unsigned integer type corresponding to T"); 4053 return x << (y & max_shift_value); 4054 } 4055 4056 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4057 return GetBlock()->GetGraph()->GetIntConstant( 4058 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4059 } 4060 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4061 // case is handled as `x << static_cast<int>(y)`. 4062 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4063 return GetBlock()->GetGraph()->GetLongConstant( 4064 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4065 } 4066 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4067 return GetBlock()->GetGraph()->GetLongConstant( 4068 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4069 } 4070 4071 DECLARE_INSTRUCTION(Shl); 4072 4073 private: 4074 DISALLOW_COPY_AND_ASSIGN(HShl); 4075}; 4076 4077class HShr : public HBinaryOperation { 4078 public: 4079 HShr(Primitive::Type result_type, 4080 HInstruction* left, 4081 HInstruction* right, 4082 uint32_t dex_pc = kNoDexPc) 4083 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4084 4085 template <typename T, typename U, typename V> 4086 T Compute(T x, U y, V max_shift_value) const { 4087 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4088 "V is not the unsigned integer type corresponding to T"); 4089 return x >> (y & max_shift_value); 4090 } 4091 4092 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4093 return GetBlock()->GetGraph()->GetIntConstant( 4094 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4095 } 4096 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4097 // case is handled as `x >> static_cast<int>(y)`. 4098 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4099 return GetBlock()->GetGraph()->GetLongConstant( 4100 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4101 } 4102 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4103 return GetBlock()->GetGraph()->GetLongConstant( 4104 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4105 } 4106 4107 DECLARE_INSTRUCTION(Shr); 4108 4109 private: 4110 DISALLOW_COPY_AND_ASSIGN(HShr); 4111}; 4112 4113class HUShr : public HBinaryOperation { 4114 public: 4115 HUShr(Primitive::Type result_type, 4116 HInstruction* left, 4117 HInstruction* right, 4118 uint32_t dex_pc = kNoDexPc) 4119 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4120 4121 template <typename T, typename U, typename V> 4122 T Compute(T x, U y, V max_shift_value) const { 4123 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4124 "V is not the unsigned integer type corresponding to T"); 4125 V ux = static_cast<V>(x); 4126 return static_cast<T>(ux >> (y & max_shift_value)); 4127 } 4128 4129 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4130 return GetBlock()->GetGraph()->GetIntConstant( 4131 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4132 } 4133 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4134 // case is handled as `x >>> static_cast<int>(y)`. 4135 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4136 return GetBlock()->GetGraph()->GetLongConstant( 4137 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4138 } 4139 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4140 return GetBlock()->GetGraph()->GetLongConstant( 4141 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4142 } 4143 4144 DECLARE_INSTRUCTION(UShr); 4145 4146 private: 4147 DISALLOW_COPY_AND_ASSIGN(HUShr); 4148}; 4149 4150class HAnd : public HBinaryOperation { 4151 public: 4152 HAnd(Primitive::Type result_type, 4153 HInstruction* left, 4154 HInstruction* right, 4155 uint32_t dex_pc = kNoDexPc) 4156 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4157 4158 bool IsCommutative() const OVERRIDE { return true; } 4159 4160 template <typename T, typename U> 4161 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 4162 4163 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4164 return GetBlock()->GetGraph()->GetIntConstant( 4165 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4166 } 4167 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4168 return GetBlock()->GetGraph()->GetLongConstant( 4169 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4170 } 4171 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4172 return GetBlock()->GetGraph()->GetLongConstant( 4173 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4174 } 4175 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4176 return GetBlock()->GetGraph()->GetLongConstant( 4177 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4178 } 4179 4180 DECLARE_INSTRUCTION(And); 4181 4182 private: 4183 DISALLOW_COPY_AND_ASSIGN(HAnd); 4184}; 4185 4186class HOr : public HBinaryOperation { 4187 public: 4188 HOr(Primitive::Type result_type, 4189 HInstruction* left, 4190 HInstruction* right, 4191 uint32_t dex_pc = kNoDexPc) 4192 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4193 4194 bool IsCommutative() const OVERRIDE { return true; } 4195 4196 template <typename T, typename U> 4197 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4198 4199 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4200 return GetBlock()->GetGraph()->GetIntConstant( 4201 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4202 } 4203 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4204 return GetBlock()->GetGraph()->GetLongConstant( 4205 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4206 } 4207 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4208 return GetBlock()->GetGraph()->GetLongConstant( 4209 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4210 } 4211 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4212 return GetBlock()->GetGraph()->GetLongConstant( 4213 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4214 } 4215 4216 DECLARE_INSTRUCTION(Or); 4217 4218 private: 4219 DISALLOW_COPY_AND_ASSIGN(HOr); 4220}; 4221 4222class HXor : public HBinaryOperation { 4223 public: 4224 HXor(Primitive::Type result_type, 4225 HInstruction* left, 4226 HInstruction* right, 4227 uint32_t dex_pc = kNoDexPc) 4228 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4229 4230 bool IsCommutative() const OVERRIDE { return true; } 4231 4232 template <typename T, typename U> 4233 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4234 4235 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4236 return GetBlock()->GetGraph()->GetIntConstant( 4237 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4238 } 4239 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4240 return GetBlock()->GetGraph()->GetLongConstant( 4241 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4242 } 4243 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4244 return GetBlock()->GetGraph()->GetLongConstant( 4245 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4246 } 4247 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4248 return GetBlock()->GetGraph()->GetLongConstant( 4249 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4250 } 4251 4252 DECLARE_INSTRUCTION(Xor); 4253 4254 private: 4255 DISALLOW_COPY_AND_ASSIGN(HXor); 4256}; 4257 4258class HRor : public HBinaryOperation { 4259 public: 4260 HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance) 4261 : HBinaryOperation(result_type, value, distance) {} 4262 4263 template <typename T, typename U, typename V> 4264 T Compute(T x, U y, V max_shift_value) const { 4265 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4266 "V is not the unsigned integer type corresponding to T"); 4267 V ux = static_cast<V>(x); 4268 if ((y & max_shift_value) == 0) { 4269 return static_cast<T>(ux); 4270 } else { 4271 const V reg_bits = sizeof(T) * 8; 4272 return static_cast<T>(ux >> (y & max_shift_value)) | 4273 (x << (reg_bits - (y & max_shift_value))); 4274 } 4275 } 4276 4277 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4278 return GetBlock()->GetGraph()->GetIntConstant( 4279 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4280 } 4281 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4282 return GetBlock()->GetGraph()->GetLongConstant( 4283 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4284 } 4285 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4286 return GetBlock()->GetGraph()->GetLongConstant( 4287 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4288 } 4289 4290 DECLARE_INSTRUCTION(Ror); 4291 4292 private: 4293 DISALLOW_COPY_AND_ASSIGN(HRor); 4294}; 4295 4296// The value of a parameter in this method. Its location depends on 4297// the calling convention. 4298class HParameterValue : public HExpression<0> { 4299 public: 4300 HParameterValue(const DexFile& dex_file, 4301 uint16_t type_index, 4302 uint8_t index, 4303 Primitive::Type parameter_type, 4304 bool is_this = false) 4305 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4306 dex_file_(dex_file), 4307 type_index_(type_index), 4308 index_(index), 4309 is_this_(is_this), 4310 can_be_null_(!is_this) {} 4311 4312 const DexFile& GetDexFile() const { return dex_file_; } 4313 uint16_t GetTypeIndex() const { return type_index_; } 4314 uint8_t GetIndex() const { return index_; } 4315 bool IsThis() const { return is_this_; } 4316 4317 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4318 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4319 4320 DECLARE_INSTRUCTION(ParameterValue); 4321 4322 private: 4323 const DexFile& dex_file_; 4324 const uint16_t type_index_; 4325 // The index of this parameter in the parameters list. Must be less 4326 // than HGraph::number_of_in_vregs_. 4327 const uint8_t index_; 4328 4329 // Whether or not the parameter value corresponds to 'this' argument. 4330 const bool is_this_; 4331 4332 bool can_be_null_; 4333 4334 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4335}; 4336 4337class HNot : public HUnaryOperation { 4338 public: 4339 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4340 : HUnaryOperation(result_type, input, dex_pc) {} 4341 4342 bool CanBeMoved() const OVERRIDE { return true; } 4343 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4344 return true; 4345 } 4346 4347 template <typename T> T Compute(T x) const { return ~x; } 4348 4349 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4350 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4351 } 4352 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4353 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4354 } 4355 4356 DECLARE_INSTRUCTION(Not); 4357 4358 private: 4359 DISALLOW_COPY_AND_ASSIGN(HNot); 4360}; 4361 4362class HBooleanNot : public HUnaryOperation { 4363 public: 4364 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4365 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4366 4367 bool CanBeMoved() const OVERRIDE { return true; } 4368 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4369 return true; 4370 } 4371 4372 template <typename T> bool Compute(T x) const { 4373 DCHECK(IsUint<1>(x)); 4374 return !x; 4375 } 4376 4377 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4378 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4379 } 4380 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4381 LOG(FATAL) << DebugName() << " is not defined for long values"; 4382 UNREACHABLE(); 4383 } 4384 4385 DECLARE_INSTRUCTION(BooleanNot); 4386 4387 private: 4388 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4389}; 4390 4391class HTypeConversion : public HExpression<1> { 4392 public: 4393 // Instantiate a type conversion of `input` to `result_type`. 4394 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4395 : HExpression(result_type, 4396 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4397 dex_pc) { 4398 SetRawInputAt(0, input); 4399 DCHECK_NE(input->GetType(), result_type); 4400 } 4401 4402 HInstruction* GetInput() const { return InputAt(0); } 4403 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4404 Primitive::Type GetResultType() const { return GetType(); } 4405 4406 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4407 // to the runtime. 4408 4409 bool CanBeMoved() const OVERRIDE { return true; } 4410 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4411 4412 // Try to statically evaluate the conversion and return a HConstant 4413 // containing the result. If the input cannot be converted, return nullptr. 4414 HConstant* TryStaticEvaluation() const; 4415 4416 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4417 Primitive::Type result_type) { 4418 // Some architectures may not require the 'GC' side effects, but at this point 4419 // in the compilation process we do not know what architecture we will 4420 // generate code for, so we must be conservative. 4421 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4422 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4423 return SideEffects::CanTriggerGC(); 4424 } 4425 return SideEffects::None(); 4426 } 4427 4428 DECLARE_INSTRUCTION(TypeConversion); 4429 4430 private: 4431 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4432}; 4433 4434static constexpr uint32_t kNoRegNumber = -1; 4435 4436class HPhi : public HInstruction { 4437 public: 4438 HPhi(ArenaAllocator* arena, 4439 uint32_t reg_number, 4440 size_t number_of_inputs, 4441 Primitive::Type type, 4442 uint32_t dex_pc = kNoDexPc) 4443 : HInstruction(SideEffects::None(), dex_pc), 4444 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4445 reg_number_(reg_number), 4446 type_(ToPhiType(type)), 4447 // Phis are constructed live and marked dead if conflicting or unused. 4448 // Individual steps of SsaBuilder should assume that if a phi has been 4449 // marked dead, it can be ignored and will be removed by SsaPhiElimination. 4450 is_live_(true), 4451 can_be_null_(true) { 4452 DCHECK_NE(type_, Primitive::kPrimVoid); 4453 } 4454 4455 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4456 static Primitive::Type ToPhiType(Primitive::Type type) { 4457 switch (type) { 4458 case Primitive::kPrimBoolean: 4459 case Primitive::kPrimByte: 4460 case Primitive::kPrimShort: 4461 case Primitive::kPrimChar: 4462 return Primitive::kPrimInt; 4463 default: 4464 return type; 4465 } 4466 } 4467 4468 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4469 4470 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4471 4472 void AddInput(HInstruction* input); 4473 void RemoveInputAt(size_t index); 4474 4475 Primitive::Type GetType() const OVERRIDE { return type_; } 4476 void SetType(Primitive::Type new_type) { 4477 // Make sure that only valid type changes occur. The following are allowed: 4478 // (1) int -> float/ref (primitive type propagation), 4479 // (2) long -> double (primitive type propagation). 4480 DCHECK(type_ == new_type || 4481 (type_ == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) || 4482 (type_ == Primitive::kPrimInt && new_type == Primitive::kPrimNot) || 4483 (type_ == Primitive::kPrimLong && new_type == Primitive::kPrimDouble)); 4484 type_ = new_type; 4485 } 4486 4487 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4488 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4489 4490 uint32_t GetRegNumber() const { return reg_number_; } 4491 4492 void SetDead() { is_live_ = false; } 4493 void SetLive() { is_live_ = true; } 4494 bool IsDead() const { return !is_live_; } 4495 bool IsLive() const { return is_live_; } 4496 4497 bool IsVRegEquivalentOf(HInstruction* other) const { 4498 return other != nullptr 4499 && other->IsPhi() 4500 && other->AsPhi()->GetBlock() == GetBlock() 4501 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4502 } 4503 4504 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4505 // An equivalent phi is a phi having the same dex register and type. 4506 // It assumes that phis with the same dex register are adjacent. 4507 HPhi* GetNextEquivalentPhiWithSameType() { 4508 HInstruction* next = GetNext(); 4509 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4510 if (next->GetType() == GetType()) { 4511 return next->AsPhi(); 4512 } 4513 next = next->GetNext(); 4514 } 4515 return nullptr; 4516 } 4517 4518 DECLARE_INSTRUCTION(Phi); 4519 4520 protected: 4521 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4522 return inputs_[index]; 4523 } 4524 4525 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4526 inputs_[index] = input; 4527 } 4528 4529 private: 4530 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4531 const uint32_t reg_number_; 4532 Primitive::Type type_; 4533 bool is_live_; 4534 bool can_be_null_; 4535 4536 DISALLOW_COPY_AND_ASSIGN(HPhi); 4537}; 4538 4539class HNullCheck : public HExpression<1> { 4540 public: 4541 HNullCheck(HInstruction* value, uint32_t dex_pc) 4542 : HExpression(value->GetType(), SideEffects::None(), dex_pc) { 4543 SetRawInputAt(0, value); 4544 } 4545 4546 bool CanBeMoved() const OVERRIDE { return true; } 4547 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4548 return true; 4549 } 4550 4551 bool NeedsEnvironment() const OVERRIDE { return true; } 4552 4553 bool CanThrow() const OVERRIDE { return true; } 4554 4555 bool CanBeNull() const OVERRIDE { return false; } 4556 4557 4558 DECLARE_INSTRUCTION(NullCheck); 4559 4560 private: 4561 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4562}; 4563 4564class FieldInfo : public ValueObject { 4565 public: 4566 FieldInfo(MemberOffset field_offset, 4567 Primitive::Type field_type, 4568 bool is_volatile, 4569 uint32_t index, 4570 uint16_t declaring_class_def_index, 4571 const DexFile& dex_file, 4572 Handle<mirror::DexCache> dex_cache) 4573 : field_offset_(field_offset), 4574 field_type_(field_type), 4575 is_volatile_(is_volatile), 4576 index_(index), 4577 declaring_class_def_index_(declaring_class_def_index), 4578 dex_file_(dex_file), 4579 dex_cache_(dex_cache) {} 4580 4581 MemberOffset GetFieldOffset() const { return field_offset_; } 4582 Primitive::Type GetFieldType() const { return field_type_; } 4583 uint32_t GetFieldIndex() const { return index_; } 4584 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4585 const DexFile& GetDexFile() const { return dex_file_; } 4586 bool IsVolatile() const { return is_volatile_; } 4587 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4588 4589 private: 4590 const MemberOffset field_offset_; 4591 const Primitive::Type field_type_; 4592 const bool is_volatile_; 4593 const uint32_t index_; 4594 const uint16_t declaring_class_def_index_; 4595 const DexFile& dex_file_; 4596 const Handle<mirror::DexCache> dex_cache_; 4597}; 4598 4599class HInstanceFieldGet : public HExpression<1> { 4600 public: 4601 HInstanceFieldGet(HInstruction* value, 4602 Primitive::Type field_type, 4603 MemberOffset field_offset, 4604 bool is_volatile, 4605 uint32_t field_idx, 4606 uint16_t declaring_class_def_index, 4607 const DexFile& dex_file, 4608 Handle<mirror::DexCache> dex_cache, 4609 uint32_t dex_pc) 4610 : HExpression(field_type, 4611 SideEffects::FieldReadOfType(field_type, is_volatile), 4612 dex_pc), 4613 field_info_(field_offset, 4614 field_type, 4615 is_volatile, 4616 field_idx, 4617 declaring_class_def_index, 4618 dex_file, 4619 dex_cache) { 4620 SetRawInputAt(0, value); 4621 } 4622 4623 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4624 4625 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4626 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4627 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4628 } 4629 4630 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4631 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4632 } 4633 4634 size_t ComputeHashCode() const OVERRIDE { 4635 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4636 } 4637 4638 const FieldInfo& GetFieldInfo() const { return field_info_; } 4639 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4640 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4641 bool IsVolatile() const { return field_info_.IsVolatile(); } 4642 4643 DECLARE_INSTRUCTION(InstanceFieldGet); 4644 4645 private: 4646 const FieldInfo field_info_; 4647 4648 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4649}; 4650 4651class HInstanceFieldSet : public HTemplateInstruction<2> { 4652 public: 4653 HInstanceFieldSet(HInstruction* object, 4654 HInstruction* value, 4655 Primitive::Type field_type, 4656 MemberOffset field_offset, 4657 bool is_volatile, 4658 uint32_t field_idx, 4659 uint16_t declaring_class_def_index, 4660 const DexFile& dex_file, 4661 Handle<mirror::DexCache> dex_cache, 4662 uint32_t dex_pc) 4663 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4664 dex_pc), 4665 field_info_(field_offset, 4666 field_type, 4667 is_volatile, 4668 field_idx, 4669 declaring_class_def_index, 4670 dex_file, 4671 dex_cache), 4672 value_can_be_null_(true) { 4673 SetRawInputAt(0, object); 4674 SetRawInputAt(1, value); 4675 } 4676 4677 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4678 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4679 } 4680 4681 const FieldInfo& GetFieldInfo() const { return field_info_; } 4682 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4683 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4684 bool IsVolatile() const { return field_info_.IsVolatile(); } 4685 HInstruction* GetValue() const { return InputAt(1); } 4686 bool GetValueCanBeNull() const { return value_can_be_null_; } 4687 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4688 4689 DECLARE_INSTRUCTION(InstanceFieldSet); 4690 4691 private: 4692 const FieldInfo field_info_; 4693 bool value_can_be_null_; 4694 4695 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4696}; 4697 4698class HArrayGet : public HExpression<2> { 4699 public: 4700 HArrayGet(HInstruction* array, 4701 HInstruction* index, 4702 Primitive::Type type, 4703 uint32_t dex_pc, 4704 SideEffects additional_side_effects = SideEffects::None()) 4705 : HExpression(type, 4706 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4707 dex_pc) { 4708 SetRawInputAt(0, array); 4709 SetRawInputAt(1, index); 4710 } 4711 4712 bool CanBeMoved() const OVERRIDE { return true; } 4713 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4714 return true; 4715 } 4716 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4717 // TODO: We can be smarter here. 4718 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4719 // which generates the implicit null check. There are cases when these can be removed 4720 // to produce better code. If we ever add optimizations to do so we should allow an 4721 // implicit check here (as long as the address falls in the first page). 4722 return false; 4723 } 4724 4725 bool IsEquivalentOf(HArrayGet* other) const { 4726 bool result = (GetDexPc() == other->GetDexPc()); 4727 if (kIsDebugBuild && result) { 4728 DCHECK_EQ(GetBlock(), other->GetBlock()); 4729 DCHECK_EQ(GetArray(), other->GetArray()); 4730 DCHECK_EQ(GetIndex(), other->GetIndex()); 4731 if (Primitive::IsIntOrLongType(GetType())) { 4732 DCHECK(Primitive::IsFloatingPointType(other->GetType())); 4733 } else { 4734 DCHECK(Primitive::IsFloatingPointType(GetType())); 4735 DCHECK(Primitive::IsIntOrLongType(other->GetType())); 4736 } 4737 } 4738 return result; 4739 } 4740 4741 HInstruction* GetArray() const { return InputAt(0); } 4742 HInstruction* GetIndex() const { return InputAt(1); } 4743 4744 DECLARE_INSTRUCTION(ArrayGet); 4745 4746 private: 4747 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4748}; 4749 4750class HArraySet : public HTemplateInstruction<3> { 4751 public: 4752 HArraySet(HInstruction* array, 4753 HInstruction* index, 4754 HInstruction* value, 4755 Primitive::Type expected_component_type, 4756 uint32_t dex_pc, 4757 SideEffects additional_side_effects = SideEffects::None()) 4758 : HTemplateInstruction( 4759 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4760 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4761 additional_side_effects), 4762 dex_pc), 4763 expected_component_type_(expected_component_type), 4764 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4765 value_can_be_null_(true), 4766 static_type_of_array_is_object_array_(false) { 4767 SetRawInputAt(0, array); 4768 SetRawInputAt(1, index); 4769 SetRawInputAt(2, value); 4770 } 4771 4772 bool NeedsEnvironment() const OVERRIDE { 4773 // We currently always call a runtime method to catch array store 4774 // exceptions. 4775 return needs_type_check_; 4776 } 4777 4778 // Can throw ArrayStoreException. 4779 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4780 4781 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4782 // TODO: Same as for ArrayGet. 4783 return false; 4784 } 4785 4786 void ClearNeedsTypeCheck() { 4787 needs_type_check_ = false; 4788 } 4789 4790 void ClearValueCanBeNull() { 4791 value_can_be_null_ = false; 4792 } 4793 4794 void SetStaticTypeOfArrayIsObjectArray() { 4795 static_type_of_array_is_object_array_ = true; 4796 } 4797 4798 bool GetValueCanBeNull() const { return value_can_be_null_; } 4799 bool NeedsTypeCheck() const { return needs_type_check_; } 4800 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4801 4802 HInstruction* GetArray() const { return InputAt(0); } 4803 HInstruction* GetIndex() const { return InputAt(1); } 4804 HInstruction* GetValue() const { return InputAt(2); } 4805 4806 Primitive::Type GetComponentType() const { 4807 // The Dex format does not type floating point index operations. Since the 4808 // `expected_component_type_` is set during building and can therefore not 4809 // be correct, we also check what is the value type. If it is a floating 4810 // point type, we must use that type. 4811 Primitive::Type value_type = GetValue()->GetType(); 4812 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4813 ? value_type 4814 : expected_component_type_; 4815 } 4816 4817 Primitive::Type GetRawExpectedComponentType() const { 4818 return expected_component_type_; 4819 } 4820 4821 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4822 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4823 } 4824 4825 DECLARE_INSTRUCTION(ArraySet); 4826 4827 private: 4828 const Primitive::Type expected_component_type_; 4829 bool needs_type_check_; 4830 bool value_can_be_null_; 4831 // Cached information for the reference_type_info_ so that codegen 4832 // does not need to inspect the static type. 4833 bool static_type_of_array_is_object_array_; 4834 4835 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4836}; 4837 4838class HArrayLength : public HExpression<1> { 4839 public: 4840 HArrayLength(HInstruction* array, uint32_t dex_pc) 4841 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4842 // Note that arrays do not change length, so the instruction does not 4843 // depend on any write. 4844 SetRawInputAt(0, array); 4845 } 4846 4847 bool CanBeMoved() const OVERRIDE { return true; } 4848 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4849 return true; 4850 } 4851 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4852 return obj == InputAt(0); 4853 } 4854 4855 DECLARE_INSTRUCTION(ArrayLength); 4856 4857 private: 4858 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4859}; 4860 4861class HBoundsCheck : public HExpression<2> { 4862 public: 4863 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4864 : HExpression(index->GetType(), SideEffects::None(), dex_pc) { 4865 DCHECK(index->GetType() == Primitive::kPrimInt); 4866 SetRawInputAt(0, index); 4867 SetRawInputAt(1, length); 4868 } 4869 4870 bool CanBeMoved() const OVERRIDE { return true; } 4871 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4872 return true; 4873 } 4874 4875 bool NeedsEnvironment() const OVERRIDE { return true; } 4876 4877 bool CanThrow() const OVERRIDE { return true; } 4878 4879 HInstruction* GetIndex() const { return InputAt(0); } 4880 4881 DECLARE_INSTRUCTION(BoundsCheck); 4882 4883 private: 4884 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4885}; 4886 4887/** 4888 * Some DEX instructions are folded into multiple HInstructions that need 4889 * to stay live until the last HInstruction. This class 4890 * is used as a marker for the baseline compiler to ensure its preceding 4891 * HInstruction stays live. `index` represents the stack location index of the 4892 * instruction (the actual offset is computed as index * vreg_size). 4893 */ 4894class HTemporary : public HTemplateInstruction<0> { 4895 public: 4896 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4897 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4898 4899 size_t GetIndex() const { return index_; } 4900 4901 Primitive::Type GetType() const OVERRIDE { 4902 // The previous instruction is the one that will be stored in the temporary location. 4903 DCHECK(GetPrevious() != nullptr); 4904 return GetPrevious()->GetType(); 4905 } 4906 4907 DECLARE_INSTRUCTION(Temporary); 4908 4909 private: 4910 const size_t index_; 4911 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4912}; 4913 4914class HSuspendCheck : public HTemplateInstruction<0> { 4915 public: 4916 explicit HSuspendCheck(uint32_t dex_pc) 4917 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4918 4919 bool NeedsEnvironment() const OVERRIDE { 4920 return true; 4921 } 4922 4923 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4924 SlowPathCode* GetSlowPath() const { return slow_path_; } 4925 4926 DECLARE_INSTRUCTION(SuspendCheck); 4927 4928 private: 4929 // Only used for code generation, in order to share the same slow path between back edges 4930 // of a same loop. 4931 SlowPathCode* slow_path_; 4932 4933 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4934}; 4935 4936// Pseudo-instruction which provides the native debugger with mapping information. 4937// It ensures that we can generate line number and local variables at this point. 4938class HNativeDebugInfo : public HTemplateInstruction<0> { 4939 public: 4940 explicit HNativeDebugInfo(uint32_t dex_pc) 4941 : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {} 4942 4943 bool NeedsEnvironment() const OVERRIDE { 4944 return true; 4945 } 4946 4947 DECLARE_INSTRUCTION(NativeDebugInfo); 4948 4949 private: 4950 DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo); 4951}; 4952 4953/** 4954 * Instruction to load a Class object. 4955 */ 4956class HLoadClass : public HExpression<1> { 4957 public: 4958 HLoadClass(HCurrentMethod* current_method, 4959 uint16_t type_index, 4960 const DexFile& dex_file, 4961 bool is_referrers_class, 4962 uint32_t dex_pc, 4963 bool needs_access_check, 4964 bool is_in_dex_cache) 4965 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4966 type_index_(type_index), 4967 dex_file_(dex_file), 4968 is_referrers_class_(is_referrers_class), 4969 generate_clinit_check_(false), 4970 needs_access_check_(needs_access_check), 4971 is_in_dex_cache_(is_in_dex_cache), 4972 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4973 // Referrers class should not need access check. We never inline unverified 4974 // methods so we can't possibly end up in this situation. 4975 DCHECK(!is_referrers_class_ || !needs_access_check_); 4976 SetRawInputAt(0, current_method); 4977 } 4978 4979 bool CanBeMoved() const OVERRIDE { return true; } 4980 4981 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4982 // Note that we don't need to test for generate_clinit_check_. 4983 // Whether or not we need to generate the clinit check is processed in 4984 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 4985 return other->AsLoadClass()->type_index_ == type_index_ && 4986 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 4987 } 4988 4989 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 4990 4991 uint16_t GetTypeIndex() const { return type_index_; } 4992 bool IsReferrersClass() const { return is_referrers_class_; } 4993 bool CanBeNull() const OVERRIDE { return false; } 4994 4995 bool NeedsEnvironment() const OVERRIDE { 4996 return CanCallRuntime(); 4997 } 4998 4999 bool MustGenerateClinitCheck() const { 5000 return generate_clinit_check_; 5001 } 5002 5003 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 5004 // The entrypoint the code generator is going to call does not do 5005 // clinit of the class. 5006 DCHECK(!NeedsAccessCheck()); 5007 generate_clinit_check_ = generate_clinit_check; 5008 } 5009 5010 bool CanCallRuntime() const { 5011 return MustGenerateClinitCheck() || 5012 (!is_referrers_class_ && !is_in_dex_cache_) || 5013 needs_access_check_; 5014 } 5015 5016 bool NeedsAccessCheck() const { 5017 return needs_access_check_; 5018 } 5019 5020 bool CanThrow() const OVERRIDE { 5021 return CanCallRuntime(); 5022 } 5023 5024 ReferenceTypeInfo GetLoadedClassRTI() { 5025 return loaded_class_rti_; 5026 } 5027 5028 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 5029 // Make sure we only set exact types (the loaded class should never be merged). 5030 DCHECK(rti.IsExact()); 5031 loaded_class_rti_ = rti; 5032 } 5033 5034 const DexFile& GetDexFile() { return dex_file_; } 5035 5036 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 5037 5038 static SideEffects SideEffectsForArchRuntimeCalls() { 5039 return SideEffects::CanTriggerGC(); 5040 } 5041 5042 bool IsInDexCache() const { return is_in_dex_cache_; } 5043 5044 DECLARE_INSTRUCTION(LoadClass); 5045 5046 private: 5047 const uint16_t type_index_; 5048 const DexFile& dex_file_; 5049 const bool is_referrers_class_; 5050 // Whether this instruction must generate the initialization check. 5051 // Used for code generation. 5052 bool generate_clinit_check_; 5053 const bool needs_access_check_; 5054 const bool is_in_dex_cache_; 5055 5056 ReferenceTypeInfo loaded_class_rti_; 5057 5058 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 5059}; 5060 5061class HLoadString : public HExpression<1> { 5062 public: 5063 HLoadString(HCurrentMethod* current_method, 5064 uint32_t string_index, 5065 uint32_t dex_pc, 5066 bool is_in_dex_cache) 5067 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 5068 string_index_(string_index), 5069 is_in_dex_cache_(is_in_dex_cache) { 5070 SetRawInputAt(0, current_method); 5071 } 5072 5073 bool CanBeMoved() const OVERRIDE { return true; } 5074 5075 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 5076 return other->AsLoadString()->string_index_ == string_index_; 5077 } 5078 5079 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 5080 5081 uint32_t GetStringIndex() const { return string_index_; } 5082 5083 // TODO: Can we deopt or debug when we resolve a string? 5084 bool NeedsEnvironment() const OVERRIDE { return false; } 5085 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 5086 bool CanBeNull() const OVERRIDE { return false; } 5087 bool IsInDexCache() const { return is_in_dex_cache_; } 5088 5089 static SideEffects SideEffectsForArchRuntimeCalls() { 5090 return SideEffects::CanTriggerGC(); 5091 } 5092 5093 DECLARE_INSTRUCTION(LoadString); 5094 5095 private: 5096 const uint32_t string_index_; 5097 const bool is_in_dex_cache_; 5098 5099 DISALLOW_COPY_AND_ASSIGN(HLoadString); 5100}; 5101 5102/** 5103 * Performs an initialization check on its Class object input. 5104 */ 5105class HClinitCheck : public HExpression<1> { 5106 public: 5107 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 5108 : HExpression( 5109 Primitive::kPrimNot, 5110 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 5111 dex_pc) { 5112 SetRawInputAt(0, constant); 5113 } 5114 5115 bool CanBeMoved() const OVERRIDE { return true; } 5116 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5117 return true; 5118 } 5119 5120 bool NeedsEnvironment() const OVERRIDE { 5121 // May call runtime to initialize the class. 5122 return true; 5123 } 5124 5125 bool CanThrow() const OVERRIDE { return true; } 5126 5127 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 5128 5129 DECLARE_INSTRUCTION(ClinitCheck); 5130 5131 private: 5132 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 5133}; 5134 5135class HStaticFieldGet : public HExpression<1> { 5136 public: 5137 HStaticFieldGet(HInstruction* cls, 5138 Primitive::Type field_type, 5139 MemberOffset field_offset, 5140 bool is_volatile, 5141 uint32_t field_idx, 5142 uint16_t declaring_class_def_index, 5143 const DexFile& dex_file, 5144 Handle<mirror::DexCache> dex_cache, 5145 uint32_t dex_pc) 5146 : HExpression(field_type, 5147 SideEffects::FieldReadOfType(field_type, is_volatile), 5148 dex_pc), 5149 field_info_(field_offset, 5150 field_type, 5151 is_volatile, 5152 field_idx, 5153 declaring_class_def_index, 5154 dex_file, 5155 dex_cache) { 5156 SetRawInputAt(0, cls); 5157 } 5158 5159 5160 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 5161 5162 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 5163 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 5164 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 5165 } 5166 5167 size_t ComputeHashCode() const OVERRIDE { 5168 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 5169 } 5170 5171 const FieldInfo& GetFieldInfo() const { return field_info_; } 5172 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5173 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5174 bool IsVolatile() const { return field_info_.IsVolatile(); } 5175 5176 DECLARE_INSTRUCTION(StaticFieldGet); 5177 5178 private: 5179 const FieldInfo field_info_; 5180 5181 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 5182}; 5183 5184class HStaticFieldSet : public HTemplateInstruction<2> { 5185 public: 5186 HStaticFieldSet(HInstruction* cls, 5187 HInstruction* value, 5188 Primitive::Type field_type, 5189 MemberOffset field_offset, 5190 bool is_volatile, 5191 uint32_t field_idx, 5192 uint16_t declaring_class_def_index, 5193 const DexFile& dex_file, 5194 Handle<mirror::DexCache> dex_cache, 5195 uint32_t dex_pc) 5196 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 5197 dex_pc), 5198 field_info_(field_offset, 5199 field_type, 5200 is_volatile, 5201 field_idx, 5202 declaring_class_def_index, 5203 dex_file, 5204 dex_cache), 5205 value_can_be_null_(true) { 5206 SetRawInputAt(0, cls); 5207 SetRawInputAt(1, value); 5208 } 5209 5210 const FieldInfo& GetFieldInfo() const { return field_info_; } 5211 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5212 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5213 bool IsVolatile() const { return field_info_.IsVolatile(); } 5214 5215 HInstruction* GetValue() const { return InputAt(1); } 5216 bool GetValueCanBeNull() const { return value_can_be_null_; } 5217 void ClearValueCanBeNull() { value_can_be_null_ = false; } 5218 5219 DECLARE_INSTRUCTION(StaticFieldSet); 5220 5221 private: 5222 const FieldInfo field_info_; 5223 bool value_can_be_null_; 5224 5225 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 5226}; 5227 5228class HUnresolvedInstanceFieldGet : public HExpression<1> { 5229 public: 5230 HUnresolvedInstanceFieldGet(HInstruction* obj, 5231 Primitive::Type field_type, 5232 uint32_t field_index, 5233 uint32_t dex_pc) 5234 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5235 field_index_(field_index) { 5236 SetRawInputAt(0, obj); 5237 } 5238 5239 bool NeedsEnvironment() const OVERRIDE { return true; } 5240 bool CanThrow() const OVERRIDE { return true; } 5241 5242 Primitive::Type GetFieldType() const { return GetType(); } 5243 uint32_t GetFieldIndex() const { return field_index_; } 5244 5245 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 5246 5247 private: 5248 const uint32_t field_index_; 5249 5250 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 5251}; 5252 5253class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 5254 public: 5255 HUnresolvedInstanceFieldSet(HInstruction* obj, 5256 HInstruction* value, 5257 Primitive::Type field_type, 5258 uint32_t field_index, 5259 uint32_t dex_pc) 5260 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5261 field_type_(field_type), 5262 field_index_(field_index) { 5263 DCHECK_EQ(field_type, value->GetType()); 5264 SetRawInputAt(0, obj); 5265 SetRawInputAt(1, value); 5266 } 5267 5268 bool NeedsEnvironment() const OVERRIDE { return true; } 5269 bool CanThrow() const OVERRIDE { return true; } 5270 5271 Primitive::Type GetFieldType() const { return field_type_; } 5272 uint32_t GetFieldIndex() const { return field_index_; } 5273 5274 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5275 5276 private: 5277 const Primitive::Type field_type_; 5278 const uint32_t field_index_; 5279 5280 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5281}; 5282 5283class HUnresolvedStaticFieldGet : public HExpression<0> { 5284 public: 5285 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5286 uint32_t field_index, 5287 uint32_t dex_pc) 5288 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5289 field_index_(field_index) { 5290 } 5291 5292 bool NeedsEnvironment() const OVERRIDE { return true; } 5293 bool CanThrow() const OVERRIDE { return true; } 5294 5295 Primitive::Type GetFieldType() const { return GetType(); } 5296 uint32_t GetFieldIndex() const { return field_index_; } 5297 5298 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5299 5300 private: 5301 const uint32_t field_index_; 5302 5303 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5304}; 5305 5306class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5307 public: 5308 HUnresolvedStaticFieldSet(HInstruction* value, 5309 Primitive::Type field_type, 5310 uint32_t field_index, 5311 uint32_t dex_pc) 5312 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5313 field_type_(field_type), 5314 field_index_(field_index) { 5315 DCHECK_EQ(field_type, value->GetType()); 5316 SetRawInputAt(0, value); 5317 } 5318 5319 bool NeedsEnvironment() const OVERRIDE { return true; } 5320 bool CanThrow() const OVERRIDE { return true; } 5321 5322 Primitive::Type GetFieldType() const { return field_type_; } 5323 uint32_t GetFieldIndex() const { return field_index_; } 5324 5325 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5326 5327 private: 5328 const Primitive::Type field_type_; 5329 const uint32_t field_index_; 5330 5331 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5332}; 5333 5334// Implement the move-exception DEX instruction. 5335class HLoadException : public HExpression<0> { 5336 public: 5337 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5338 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5339 5340 bool CanBeNull() const OVERRIDE { return false; } 5341 5342 DECLARE_INSTRUCTION(LoadException); 5343 5344 private: 5345 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5346}; 5347 5348// Implicit part of move-exception which clears thread-local exception storage. 5349// Must not be removed because the runtime expects the TLS to get cleared. 5350class HClearException : public HTemplateInstruction<0> { 5351 public: 5352 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5353 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5354 5355 DECLARE_INSTRUCTION(ClearException); 5356 5357 private: 5358 DISALLOW_COPY_AND_ASSIGN(HClearException); 5359}; 5360 5361class HThrow : public HTemplateInstruction<1> { 5362 public: 5363 HThrow(HInstruction* exception, uint32_t dex_pc) 5364 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5365 SetRawInputAt(0, exception); 5366 } 5367 5368 bool IsControlFlow() const OVERRIDE { return true; } 5369 5370 bool NeedsEnvironment() const OVERRIDE { return true; } 5371 5372 bool CanThrow() const OVERRIDE { return true; } 5373 5374 5375 DECLARE_INSTRUCTION(Throw); 5376 5377 private: 5378 DISALLOW_COPY_AND_ASSIGN(HThrow); 5379}; 5380 5381/** 5382 * Implementation strategies for the code generator of a HInstanceOf 5383 * or `HCheckCast`. 5384 */ 5385enum class TypeCheckKind { 5386 kUnresolvedCheck, // Check against an unresolved type. 5387 kExactCheck, // Can do a single class compare. 5388 kClassHierarchyCheck, // Can just walk the super class chain. 5389 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5390 kInterfaceCheck, // No optimization yet when checking against an interface. 5391 kArrayObjectCheck, // Can just check if the array is not primitive. 5392 kArrayCheck // No optimization yet when checking against a generic array. 5393}; 5394 5395class HInstanceOf : public HExpression<2> { 5396 public: 5397 HInstanceOf(HInstruction* object, 5398 HLoadClass* constant, 5399 TypeCheckKind check_kind, 5400 uint32_t dex_pc) 5401 : HExpression(Primitive::kPrimBoolean, 5402 SideEffectsForArchRuntimeCalls(check_kind), 5403 dex_pc), 5404 check_kind_(check_kind), 5405 must_do_null_check_(true) { 5406 SetRawInputAt(0, object); 5407 SetRawInputAt(1, constant); 5408 } 5409 5410 bool CanBeMoved() const OVERRIDE { return true; } 5411 5412 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5413 return true; 5414 } 5415 5416 bool NeedsEnvironment() const OVERRIDE { 5417 return false; 5418 } 5419 5420 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5421 5422 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5423 5424 // Used only in code generation. 5425 bool MustDoNullCheck() const { return must_do_null_check_; } 5426 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5427 5428 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5429 return (check_kind == TypeCheckKind::kExactCheck) 5430 ? SideEffects::None() 5431 // Mips currently does runtime calls for any other checks. 5432 : SideEffects::CanTriggerGC(); 5433 } 5434 5435 DECLARE_INSTRUCTION(InstanceOf); 5436 5437 private: 5438 const TypeCheckKind check_kind_; 5439 bool must_do_null_check_; 5440 5441 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5442}; 5443 5444class HBoundType : public HExpression<1> { 5445 public: 5446 HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc) 5447 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5448 upper_bound_(ReferenceTypeInfo::CreateInvalid()), 5449 upper_can_be_null_(true), 5450 can_be_null_(true) { 5451 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5452 SetRawInputAt(0, input); 5453 } 5454 5455 // {Get,Set}Upper* should only be used in reference type propagation. 5456 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5457 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5458 void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null); 5459 5460 void SetCanBeNull(bool can_be_null) { 5461 DCHECK(upper_can_be_null_ || !can_be_null); 5462 can_be_null_ = can_be_null; 5463 } 5464 5465 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5466 5467 DECLARE_INSTRUCTION(BoundType); 5468 5469 private: 5470 // Encodes the most upper class that this instruction can have. In other words 5471 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5472 // It is used to bound the type in cases like: 5473 // if (x instanceof ClassX) { 5474 // // uper_bound_ will be ClassX 5475 // } 5476 ReferenceTypeInfo upper_bound_; 5477 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5478 // is false then can_be_null_ cannot be true). 5479 bool upper_can_be_null_; 5480 bool can_be_null_; 5481 5482 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5483}; 5484 5485class HCheckCast : public HTemplateInstruction<2> { 5486 public: 5487 HCheckCast(HInstruction* object, 5488 HLoadClass* constant, 5489 TypeCheckKind check_kind, 5490 uint32_t dex_pc) 5491 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5492 check_kind_(check_kind), 5493 must_do_null_check_(true) { 5494 SetRawInputAt(0, object); 5495 SetRawInputAt(1, constant); 5496 } 5497 5498 bool CanBeMoved() const OVERRIDE { return true; } 5499 5500 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5501 return true; 5502 } 5503 5504 bool NeedsEnvironment() const OVERRIDE { 5505 // Instruction may throw a CheckCastError. 5506 return true; 5507 } 5508 5509 bool CanThrow() const OVERRIDE { return true; } 5510 5511 bool MustDoNullCheck() const { return must_do_null_check_; } 5512 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5513 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5514 5515 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5516 5517 DECLARE_INSTRUCTION(CheckCast); 5518 5519 private: 5520 const TypeCheckKind check_kind_; 5521 bool must_do_null_check_; 5522 5523 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5524}; 5525 5526class HMemoryBarrier : public HTemplateInstruction<0> { 5527 public: 5528 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5529 : HTemplateInstruction( 5530 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5531 barrier_kind_(barrier_kind) {} 5532 5533 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5534 5535 DECLARE_INSTRUCTION(MemoryBarrier); 5536 5537 private: 5538 const MemBarrierKind barrier_kind_; 5539 5540 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5541}; 5542 5543class HMonitorOperation : public HTemplateInstruction<1> { 5544 public: 5545 enum OperationKind { 5546 kEnter, 5547 kExit, 5548 }; 5549 5550 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5551 : HTemplateInstruction( 5552 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5553 kind_(kind) { 5554 SetRawInputAt(0, object); 5555 } 5556 5557 // Instruction may throw a Java exception, so we need an environment. 5558 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5559 5560 bool CanThrow() const OVERRIDE { 5561 // Verifier guarantees that monitor-exit cannot throw. 5562 // This is important because it allows the HGraphBuilder to remove 5563 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5564 return IsEnter(); 5565 } 5566 5567 5568 bool IsEnter() const { return kind_ == kEnter; } 5569 5570 DECLARE_INSTRUCTION(MonitorOperation); 5571 5572 private: 5573 const OperationKind kind_; 5574 5575 private: 5576 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5577}; 5578 5579/** 5580 * A HInstruction used as a marker for the replacement of new + <init> 5581 * of a String to a call to a StringFactory. Only baseline will see 5582 * the node at code generation, where it will be be treated as null. 5583 * When compiling non-baseline, `HFakeString` instructions are being removed 5584 * in the instruction simplifier. 5585 */ 5586class HFakeString : public HTemplateInstruction<0> { 5587 public: 5588 explicit HFakeString(uint32_t dex_pc = kNoDexPc) 5589 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 5590 5591 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimNot; } 5592 5593 DECLARE_INSTRUCTION(FakeString); 5594 5595 private: 5596 DISALLOW_COPY_AND_ASSIGN(HFakeString); 5597}; 5598 5599class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5600 public: 5601 MoveOperands(Location source, 5602 Location destination, 5603 Primitive::Type type, 5604 HInstruction* instruction) 5605 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5606 5607 Location GetSource() const { return source_; } 5608 Location GetDestination() const { return destination_; } 5609 5610 void SetSource(Location value) { source_ = value; } 5611 void SetDestination(Location value) { destination_ = value; } 5612 5613 // The parallel move resolver marks moves as "in-progress" by clearing the 5614 // destination (but not the source). 5615 Location MarkPending() { 5616 DCHECK(!IsPending()); 5617 Location dest = destination_; 5618 destination_ = Location::NoLocation(); 5619 return dest; 5620 } 5621 5622 void ClearPending(Location dest) { 5623 DCHECK(IsPending()); 5624 destination_ = dest; 5625 } 5626 5627 bool IsPending() const { 5628 DCHECK(source_.IsValid() || destination_.IsInvalid()); 5629 return destination_.IsInvalid() && source_.IsValid(); 5630 } 5631 5632 // True if this blocks a move from the given location. 5633 bool Blocks(Location loc) const { 5634 return !IsEliminated() && source_.OverlapsWith(loc); 5635 } 5636 5637 // A move is redundant if it's been eliminated, if its source and 5638 // destination are the same, or if its destination is unneeded. 5639 bool IsRedundant() const { 5640 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5641 } 5642 5643 // We clear both operands to indicate move that's been eliminated. 5644 void Eliminate() { 5645 source_ = destination_ = Location::NoLocation(); 5646 } 5647 5648 bool IsEliminated() const { 5649 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5650 return source_.IsInvalid(); 5651 } 5652 5653 Primitive::Type GetType() const { return type_; } 5654 5655 bool Is64BitMove() const { 5656 return Primitive::Is64BitType(type_); 5657 } 5658 5659 HInstruction* GetInstruction() const { return instruction_; } 5660 5661 private: 5662 Location source_; 5663 Location destination_; 5664 // The type this move is for. 5665 Primitive::Type type_; 5666 // The instruction this move is assocatied with. Null when this move is 5667 // for moving an input in the expected locations of user (including a phi user). 5668 // This is only used in debug mode, to ensure we do not connect interval siblings 5669 // in the same parallel move. 5670 HInstruction* instruction_; 5671}; 5672 5673std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs); 5674 5675static constexpr size_t kDefaultNumberOfMoves = 4; 5676 5677class HParallelMove : public HTemplateInstruction<0> { 5678 public: 5679 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5680 : HTemplateInstruction(SideEffects::None(), dex_pc), 5681 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5682 moves_.reserve(kDefaultNumberOfMoves); 5683 } 5684 5685 void AddMove(Location source, 5686 Location destination, 5687 Primitive::Type type, 5688 HInstruction* instruction) { 5689 DCHECK(source.IsValid()); 5690 DCHECK(destination.IsValid()); 5691 if (kIsDebugBuild) { 5692 if (instruction != nullptr) { 5693 for (const MoveOperands& move : moves_) { 5694 if (move.GetInstruction() == instruction) { 5695 // Special case the situation where the move is for the spill slot 5696 // of the instruction. 5697 if ((GetPrevious() == instruction) 5698 || ((GetPrevious() == nullptr) 5699 && instruction->IsPhi() 5700 && instruction->GetBlock() == GetBlock())) { 5701 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5702 << "Doing parallel moves for the same instruction."; 5703 } else { 5704 DCHECK(false) << "Doing parallel moves for the same instruction."; 5705 } 5706 } 5707 } 5708 } 5709 for (const MoveOperands& move : moves_) { 5710 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5711 << "Overlapped destination for two moves in a parallel move: " 5712 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5713 << source << " ==> " << destination; 5714 } 5715 } 5716 moves_.emplace_back(source, destination, type, instruction); 5717 } 5718 5719 MoveOperands* MoveOperandsAt(size_t index) { 5720 return &moves_[index]; 5721 } 5722 5723 size_t NumMoves() const { return moves_.size(); } 5724 5725 DECLARE_INSTRUCTION(ParallelMove); 5726 5727 private: 5728 ArenaVector<MoveOperands> moves_; 5729 5730 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5731}; 5732 5733} // namespace art 5734 5735#ifdef ART_ENABLE_CODEGEN_arm 5736#include "nodes_arm.h" 5737#endif 5738#ifdef ART_ENABLE_CODEGEN_arm64 5739#include "nodes_arm64.h" 5740#endif 5741#ifdef ART_ENABLE_CODEGEN_x86 5742#include "nodes_x86.h" 5743#endif 5744 5745namespace art { 5746 5747class HGraphVisitor : public ValueObject { 5748 public: 5749 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5750 virtual ~HGraphVisitor() {} 5751 5752 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5753 virtual void VisitBasicBlock(HBasicBlock* block); 5754 5755 // Visit the graph following basic block insertion order. 5756 void VisitInsertionOrder(); 5757 5758 // Visit the graph following dominator tree reverse post-order. 5759 void VisitReversePostOrder(); 5760 5761 HGraph* GetGraph() const { return graph_; } 5762 5763 // Visit functions for instruction classes. 5764#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5765 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5766 5767 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5768 5769#undef DECLARE_VISIT_INSTRUCTION 5770 5771 private: 5772 HGraph* const graph_; 5773 5774 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5775}; 5776 5777class HGraphDelegateVisitor : public HGraphVisitor { 5778 public: 5779 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5780 virtual ~HGraphDelegateVisitor() {} 5781 5782 // Visit functions that delegate to to super class. 5783#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5784 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5785 5786 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5787 5788#undef DECLARE_VISIT_INSTRUCTION 5789 5790 private: 5791 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5792}; 5793 5794class HInsertionOrderIterator : public ValueObject { 5795 public: 5796 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5797 5798 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5799 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5800 void Advance() { ++index_; } 5801 5802 private: 5803 const HGraph& graph_; 5804 size_t index_; 5805 5806 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5807}; 5808 5809class HReversePostOrderIterator : public ValueObject { 5810 public: 5811 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5812 // Check that reverse post order of the graph has been built. 5813 DCHECK(!graph.GetReversePostOrder().empty()); 5814 } 5815 5816 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5817 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5818 void Advance() { ++index_; } 5819 5820 private: 5821 const HGraph& graph_; 5822 size_t index_; 5823 5824 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5825}; 5826 5827class HPostOrderIterator : public ValueObject { 5828 public: 5829 explicit HPostOrderIterator(const HGraph& graph) 5830 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5831 // Check that reverse post order of the graph has been built. 5832 DCHECK(!graph.GetReversePostOrder().empty()); 5833 } 5834 5835 bool Done() const { return index_ == 0; } 5836 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5837 void Advance() { --index_; } 5838 5839 private: 5840 const HGraph& graph_; 5841 size_t index_; 5842 5843 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5844}; 5845 5846class HLinearPostOrderIterator : public ValueObject { 5847 public: 5848 explicit HLinearPostOrderIterator(const HGraph& graph) 5849 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5850 5851 bool Done() const { return index_ == 0; } 5852 5853 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5854 5855 void Advance() { 5856 --index_; 5857 DCHECK_GE(index_, 0U); 5858 } 5859 5860 private: 5861 const ArenaVector<HBasicBlock*>& order_; 5862 size_t index_; 5863 5864 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5865}; 5866 5867class HLinearOrderIterator : public ValueObject { 5868 public: 5869 explicit HLinearOrderIterator(const HGraph& graph) 5870 : order_(graph.GetLinearOrder()), index_(0) {} 5871 5872 bool Done() const { return index_ == order_.size(); } 5873 HBasicBlock* Current() const { return order_[index_]; } 5874 void Advance() { ++index_; } 5875 5876 private: 5877 const ArenaVector<HBasicBlock*>& order_; 5878 size_t index_; 5879 5880 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5881}; 5882 5883// Iterator over the blocks that art part of the loop. Includes blocks part 5884// of an inner loop. The order in which the blocks are iterated is on their 5885// block id. 5886class HBlocksInLoopIterator : public ValueObject { 5887 public: 5888 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5889 : blocks_in_loop_(info.GetBlocks()), 5890 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5891 index_(0) { 5892 if (!blocks_in_loop_.IsBitSet(index_)) { 5893 Advance(); 5894 } 5895 } 5896 5897 bool Done() const { return index_ == blocks_.size(); } 5898 HBasicBlock* Current() const { return blocks_[index_]; } 5899 void Advance() { 5900 ++index_; 5901 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5902 if (blocks_in_loop_.IsBitSet(index_)) { 5903 break; 5904 } 5905 } 5906 } 5907 5908 private: 5909 const BitVector& blocks_in_loop_; 5910 const ArenaVector<HBasicBlock*>& blocks_; 5911 size_t index_; 5912 5913 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5914}; 5915 5916// Iterator over the blocks that art part of the loop. Includes blocks part 5917// of an inner loop. The order in which the blocks are iterated is reverse 5918// post order. 5919class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5920 public: 5921 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5922 : blocks_in_loop_(info.GetBlocks()), 5923 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5924 index_(0) { 5925 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5926 Advance(); 5927 } 5928 } 5929 5930 bool Done() const { return index_ == blocks_.size(); } 5931 HBasicBlock* Current() const { return blocks_[index_]; } 5932 void Advance() { 5933 ++index_; 5934 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5935 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5936 break; 5937 } 5938 } 5939 } 5940 5941 private: 5942 const BitVector& blocks_in_loop_; 5943 const ArenaVector<HBasicBlock*>& blocks_; 5944 size_t index_; 5945 5946 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5947}; 5948 5949inline int64_t Int64FromConstant(HConstant* constant) { 5950 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5951 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5952 : constant->AsLongConstant()->GetValue(); 5953} 5954 5955inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5956 // For the purposes of the compiler, the dex files must actually be the same object 5957 // if we want to safely treat them as the same. This is especially important for JIT 5958 // as custom class loaders can open the same underlying file (or memory) multiple 5959 // times and provide different class resolution but no two class loaders should ever 5960 // use the same DexFile object - doing so is an unsupported hack that can lead to 5961 // all sorts of weird failures. 5962 return &lhs == &rhs; 5963} 5964 5965#define INSTRUCTION_TYPE_CHECK(type, super) \ 5966 inline bool HInstruction::Is##type() const { return GetKind() == k##type; } \ 5967 inline const H##type* HInstruction::As##type() const { \ 5968 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \ 5969 } \ 5970 inline H##type* HInstruction::As##type() { \ 5971 return Is##type() ? static_cast<H##type*>(this) : nullptr; \ 5972 } 5973 5974 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 5975#undef INSTRUCTION_TYPE_CHECK 5976 5977} // namespace art 5978 5979#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5980