nodes.h revision debeb98aaa8950caf1a19df490f2ac9bf563075b
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18#define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20#include <algorithm> 21#include <array> 22#include <type_traits> 23 24#include "base/arena_bit_vector.h" 25#include "base/arena_containers.h" 26#include "base/arena_object.h" 27#include "base/stl_util.h" 28#include "dex/compiler_enums.h" 29#include "entrypoints/quick/quick_entrypoints_enum.h" 30#include "handle.h" 31#include "handle_scope.h" 32#include "invoke_type.h" 33#include "locations.h" 34#include "method_reference.h" 35#include "mirror/class.h" 36#include "offsets.h" 37#include "primitive.h" 38#include "utils/array_ref.h" 39 40namespace art { 41 42class GraphChecker; 43class HBasicBlock; 44class HCurrentMethod; 45class HDoubleConstant; 46class HEnvironment; 47class HFloatConstant; 48class HGraphBuilder; 49class HGraphVisitor; 50class HInstruction; 51class HIntConstant; 52class HInvoke; 53class HLongConstant; 54class HNullConstant; 55class HPhi; 56class HSuspendCheck; 57class HTryBoundary; 58class LiveInterval; 59class LocationSummary; 60class SlowPathCode; 61class SsaBuilder; 62 63namespace mirror { 64class DexCache; 65} // namespace mirror 66 67static const int kDefaultNumberOfBlocks = 8; 68static const int kDefaultNumberOfSuccessors = 2; 69static const int kDefaultNumberOfPredecessors = 2; 70static const int kDefaultNumberOfExceptionalPredecessors = 0; 71static const int kDefaultNumberOfDominatedBlocks = 1; 72static const int kDefaultNumberOfBackEdges = 1; 73 74static constexpr uint32_t kMaxIntShiftValue = 0x1f; 75static constexpr uint64_t kMaxLongShiftValue = 0x3f; 76 77static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 78static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 79 80static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 81 82static constexpr uint32_t kNoDexPc = -1; 83 84enum IfCondition { 85 // All types. 86 kCondEQ, // == 87 kCondNE, // != 88 // Signed integers and floating-point numbers. 89 kCondLT, // < 90 kCondLE, // <= 91 kCondGT, // > 92 kCondGE, // >= 93 // Unsigned integers. 94 kCondB, // < 95 kCondBE, // <= 96 kCondA, // > 97 kCondAE, // >= 98}; 99 100enum GraphAnalysisResult { 101 kAnalysisFailThrowCatchLoop, 102 kAnalysisFailAmbiguousArrayOp, 103 kAnalysisSuccess, 104}; 105 106class HInstructionList : public ValueObject { 107 public: 108 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 109 110 void AddInstruction(HInstruction* instruction); 111 void RemoveInstruction(HInstruction* instruction); 112 113 // Insert `instruction` before/after an existing instruction `cursor`. 114 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 115 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 116 117 // Return true if this list contains `instruction`. 118 bool Contains(HInstruction* instruction) const; 119 120 // Return true if `instruction1` is found before `instruction2` in 121 // this instruction list and false otherwise. Abort if none 122 // of these instructions is found. 123 bool FoundBefore(const HInstruction* instruction1, 124 const HInstruction* instruction2) const; 125 126 bool IsEmpty() const { return first_instruction_ == nullptr; } 127 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 128 129 // Update the block of all instructions to be `block`. 130 void SetBlockOfInstructions(HBasicBlock* block) const; 131 132 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 133 void Add(const HInstructionList& instruction_list); 134 135 // Return the number of instructions in the list. This is an expensive operation. 136 size_t CountSize() const; 137 138 private: 139 HInstruction* first_instruction_; 140 HInstruction* last_instruction_; 141 142 friend class HBasicBlock; 143 friend class HGraph; 144 friend class HInstruction; 145 friend class HInstructionIterator; 146 friend class HBackwardInstructionIterator; 147 148 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 149}; 150 151class ReferenceTypeInfo : ValueObject { 152 public: 153 typedef Handle<mirror::Class> TypeHandle; 154 155 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { 156 // The constructor will check that the type_handle is valid. 157 return ReferenceTypeInfo(type_handle, is_exact); 158 } 159 160 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 161 162 static bool IsValidHandle(TypeHandle handle) SHARED_REQUIRES(Locks::mutator_lock_) { 163 return handle.GetReference() != nullptr; 164 } 165 166 bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) { 167 return IsValidHandle(type_handle_); 168 } 169 170 bool IsExact() const { return is_exact_; } 171 172 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 173 DCHECK(IsValid()); 174 return GetTypeHandle()->IsObjectClass(); 175 } 176 177 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 178 DCHECK(IsValid()); 179 return GetTypeHandle()->IsStringClass(); 180 } 181 182 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { 183 DCHECK(IsValid()); 184 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 185 } 186 187 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { 188 DCHECK(IsValid()); 189 return GetTypeHandle()->IsInterface(); 190 } 191 192 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 193 DCHECK(IsValid()); 194 return GetTypeHandle()->IsArrayClass(); 195 } 196 197 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 198 DCHECK(IsValid()); 199 return GetTypeHandle()->IsPrimitiveArray(); 200 } 201 202 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { 203 DCHECK(IsValid()); 204 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 205 } 206 207 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 208 DCHECK(IsValid()); 209 if (!IsExact()) return false; 210 if (!IsArrayClass()) return false; 211 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 212 } 213 214 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 215 DCHECK(IsValid()); 216 if (!IsExact()) return false; 217 if (!IsArrayClass()) return false; 218 if (!rti.IsArrayClass()) return false; 219 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 220 rti.GetTypeHandle()->GetComponentType()); 221 } 222 223 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 224 225 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 226 DCHECK(IsValid()); 227 DCHECK(rti.IsValid()); 228 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 229 } 230 231 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 232 DCHECK(IsValid()); 233 DCHECK(rti.IsValid()); 234 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() && 235 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 236 } 237 238 // Returns true if the type information provide the same amount of details. 239 // Note that it does not mean that the instructions have the same actual type 240 // (because the type can be the result of a merge). 241 bool IsEqual(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { 242 if (!IsValid() && !rti.IsValid()) { 243 // Invalid types are equal. 244 return true; 245 } 246 if (!IsValid() || !rti.IsValid()) { 247 // One is valid, the other not. 248 return false; 249 } 250 return IsExact() == rti.IsExact() 251 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 252 } 253 254 private: 255 ReferenceTypeInfo(); 256 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); 257 258 // The class of the object. 259 TypeHandle type_handle_; 260 // Whether or not the type is exact or a superclass of the actual type. 261 // Whether or not we have any information about this type. 262 bool is_exact_; 263}; 264 265std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 266 267// Control-flow graph of a method. Contains a list of basic blocks. 268class HGraph : public ArenaObject<kArenaAllocGraph> { 269 public: 270 HGraph(ArenaAllocator* arena, 271 const DexFile& dex_file, 272 uint32_t method_idx, 273 bool should_generate_constructor_barrier, 274 InstructionSet instruction_set, 275 InvokeType invoke_type = kInvalidInvokeType, 276 bool debuggable = false, 277 int start_instruction_id = 0) 278 : arena_(arena), 279 blocks_(arena->Adapter(kArenaAllocBlockList)), 280 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 281 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 282 entry_block_(nullptr), 283 exit_block_(nullptr), 284 maximum_number_of_out_vregs_(0), 285 number_of_vregs_(0), 286 number_of_in_vregs_(0), 287 temporaries_vreg_slots_(0), 288 has_bounds_checks_(false), 289 has_try_catch_(false), 290 has_irreducible_loops_(false), 291 debuggable_(debuggable), 292 current_instruction_id_(start_instruction_id), 293 dex_file_(dex_file), 294 method_idx_(method_idx), 295 invoke_type_(invoke_type), 296 in_ssa_form_(false), 297 should_generate_constructor_barrier_(should_generate_constructor_barrier), 298 instruction_set_(instruction_set), 299 cached_null_constant_(nullptr), 300 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 301 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 302 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 303 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 304 cached_current_method_(nullptr), 305 inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()) { 306 blocks_.reserve(kDefaultNumberOfBlocks); 307 } 308 309 ArenaAllocator* GetArena() const { return arena_; } 310 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 311 312 bool IsInSsaForm() const { return in_ssa_form_; } 313 314 HBasicBlock* GetEntryBlock() const { return entry_block_; } 315 HBasicBlock* GetExitBlock() const { return exit_block_; } 316 bool HasExitBlock() const { return exit_block_ != nullptr; } 317 318 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 319 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 320 321 void AddBlock(HBasicBlock* block); 322 323 // Try building the SSA form of this graph, with dominance computation and 324 // loop recognition. Returns a code specifying that it was successful or the 325 // reason for failure. 326 GraphAnalysisResult TryBuildingSsa(StackHandleScopeCollection* handles); 327 328 void ComputeDominanceInformation(); 329 void ClearDominanceInformation(); 330 void ClearLoopInformation(); 331 void FindBackEdges(ArenaBitVector* visited); 332 GraphAnalysisResult BuildDominatorTree(); 333 void SimplifyCFG(); 334 void SimplifyCatchBlocks(); 335 336 // Analyze all natural loops in this graph. Returns a code specifying that it 337 // was successful or the reason for failure. The method will fail if a loop 338 // is a throw-catch loop, i.e. the header is a catch block. 339 GraphAnalysisResult AnalyzeLoops() const; 340 341 // Iterate over blocks to compute try block membership. Needs reverse post 342 // order and loop information. 343 void ComputeTryBlockInformation(); 344 345 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 346 // Returns the instruction used to replace the invoke expression or null if the 347 // invoke is for a void method. 348 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 349 350 // Need to add a couple of blocks to test if the loop body is entered and 351 // put deoptimization instructions, etc. 352 void TransformLoopHeaderForBCE(HBasicBlock* header); 353 354 // Removes `block` from the graph. Assumes `block` has been disconnected from 355 // other blocks and has no instructions or phis. 356 void DeleteDeadEmptyBlock(HBasicBlock* block); 357 358 // Splits the edge between `block` and `successor` while preserving the 359 // indices in the predecessor/successor lists. If there are multiple edges 360 // between the blocks, the lowest indices are used. 361 // Returns the new block which is empty and has the same dex pc as `successor`. 362 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 363 364 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 365 void SimplifyLoop(HBasicBlock* header); 366 367 int32_t GetNextInstructionId() { 368 DCHECK_NE(current_instruction_id_, INT32_MAX); 369 return current_instruction_id_++; 370 } 371 372 int32_t GetCurrentInstructionId() const { 373 return current_instruction_id_; 374 } 375 376 void SetCurrentInstructionId(int32_t id) { 377 current_instruction_id_ = id; 378 } 379 380 uint16_t GetMaximumNumberOfOutVRegs() const { 381 return maximum_number_of_out_vregs_; 382 } 383 384 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 385 maximum_number_of_out_vregs_ = new_value; 386 } 387 388 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 389 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 390 } 391 392 void UpdateTemporariesVRegSlots(size_t slots) { 393 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 394 } 395 396 size_t GetTemporariesVRegSlots() const { 397 DCHECK(!in_ssa_form_); 398 return temporaries_vreg_slots_; 399 } 400 401 void SetNumberOfVRegs(uint16_t number_of_vregs) { 402 number_of_vregs_ = number_of_vregs; 403 } 404 405 uint16_t GetNumberOfVRegs() const { 406 return number_of_vregs_; 407 } 408 409 void SetNumberOfInVRegs(uint16_t value) { 410 number_of_in_vregs_ = value; 411 } 412 413 uint16_t GetNumberOfLocalVRegs() const { 414 DCHECK(!in_ssa_form_); 415 return number_of_vregs_ - number_of_in_vregs_; 416 } 417 418 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 419 return reverse_post_order_; 420 } 421 422 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 423 return linear_order_; 424 } 425 426 bool HasBoundsChecks() const { 427 return has_bounds_checks_; 428 } 429 430 void SetHasBoundsChecks(bool value) { 431 has_bounds_checks_ = value; 432 } 433 434 bool ShouldGenerateConstructorBarrier() const { 435 return should_generate_constructor_barrier_; 436 } 437 438 bool IsDebuggable() const { return debuggable_; } 439 440 // Returns a constant of the given type and value. If it does not exist 441 // already, it is created and inserted into the graph. This method is only for 442 // integral types. 443 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 444 445 // TODO: This is problematic for the consistency of reference type propagation 446 // because it can be created anytime after the pass and thus it will be left 447 // with an invalid type. 448 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 449 450 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 451 return CreateConstant(value, &cached_int_constants_, dex_pc); 452 } 453 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 454 return CreateConstant(value, &cached_long_constants_, dex_pc); 455 } 456 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 457 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 458 } 459 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 460 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 461 } 462 463 HCurrentMethod* GetCurrentMethod(); 464 465 const DexFile& GetDexFile() const { 466 return dex_file_; 467 } 468 469 uint32_t GetMethodIdx() const { 470 return method_idx_; 471 } 472 473 InvokeType GetInvokeType() const { 474 return invoke_type_; 475 } 476 477 InstructionSet GetInstructionSet() const { 478 return instruction_set_; 479 } 480 481 bool HasTryCatch() const { return has_try_catch_; } 482 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 483 484 bool HasIrreducibleLoops() const { return has_irreducible_loops_; } 485 void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; } 486 487 ArtMethod* GetArtMethod() const { return art_method_; } 488 void SetArtMethod(ArtMethod* method) { art_method_ = method; } 489 490 // Returns an instruction with the opposite boolean value from 'cond'. 491 // The instruction has been inserted into the graph, either as a constant, or 492 // before cursor. 493 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor); 494 495 private: 496 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 497 void RemoveDeadBlocks(const ArenaBitVector& visited); 498 499 template <class InstructionType, typename ValueType> 500 InstructionType* CreateConstant(ValueType value, 501 ArenaSafeMap<ValueType, InstructionType*>* cache, 502 uint32_t dex_pc = kNoDexPc) { 503 // Try to find an existing constant of the given value. 504 InstructionType* constant = nullptr; 505 auto cached_constant = cache->find(value); 506 if (cached_constant != cache->end()) { 507 constant = cached_constant->second; 508 } 509 510 // If not found or previously deleted, create and cache a new instruction. 511 // Don't bother reviving a previously deleted instruction, for simplicity. 512 if (constant == nullptr || constant->GetBlock() == nullptr) { 513 constant = new (arena_) InstructionType(value, dex_pc); 514 cache->Overwrite(value, constant); 515 InsertConstant(constant); 516 } 517 return constant; 518 } 519 520 void InsertConstant(HConstant* instruction); 521 522 // Cache a float constant into the graph. This method should only be 523 // called by the SsaBuilder when creating "equivalent" instructions. 524 void CacheFloatConstant(HFloatConstant* constant); 525 526 // See CacheFloatConstant comment. 527 void CacheDoubleConstant(HDoubleConstant* constant); 528 529 ArenaAllocator* const arena_; 530 531 // List of blocks in insertion order. 532 ArenaVector<HBasicBlock*> blocks_; 533 534 // List of blocks to perform a reverse post order tree traversal. 535 ArenaVector<HBasicBlock*> reverse_post_order_; 536 537 // List of blocks to perform a linear order tree traversal. 538 ArenaVector<HBasicBlock*> linear_order_; 539 540 HBasicBlock* entry_block_; 541 HBasicBlock* exit_block_; 542 543 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 544 uint16_t maximum_number_of_out_vregs_; 545 546 // The number of virtual registers in this method. Contains the parameters. 547 uint16_t number_of_vregs_; 548 549 // The number of virtual registers used by parameters of this method. 550 uint16_t number_of_in_vregs_; 551 552 // Number of vreg size slots that the temporaries use (used in baseline compiler). 553 size_t temporaries_vreg_slots_; 554 555 // Has bounds checks. We can totally skip BCE if it's false. 556 bool has_bounds_checks_; 557 558 // Flag whether there are any try/catch blocks in the graph. We will skip 559 // try/catch-related passes if false. 560 bool has_try_catch_; 561 562 // Flag whether there are any irreducible loops in the graph. 563 bool has_irreducible_loops_; 564 565 // Indicates whether the graph should be compiled in a way that 566 // ensures full debuggability. If false, we can apply more 567 // aggressive optimizations that may limit the level of debugging. 568 const bool debuggable_; 569 570 // The current id to assign to a newly added instruction. See HInstruction.id_. 571 int32_t current_instruction_id_; 572 573 // The dex file from which the method is from. 574 const DexFile& dex_file_; 575 576 // The method index in the dex file. 577 const uint32_t method_idx_; 578 579 // If inlined, this encodes how the callee is being invoked. 580 const InvokeType invoke_type_; 581 582 // Whether the graph has been transformed to SSA form. Only used 583 // in debug mode to ensure we are not using properties only valid 584 // for non-SSA form (like the number of temporaries). 585 bool in_ssa_form_; 586 587 const bool should_generate_constructor_barrier_; 588 589 const InstructionSet instruction_set_; 590 591 // Cached constants. 592 HNullConstant* cached_null_constant_; 593 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 594 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 595 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 596 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 597 598 HCurrentMethod* cached_current_method_; 599 600 // The ArtMethod this graph is for. Note that for AOT, it may be null, 601 // for example for methods whose declaring class could not be resolved 602 // (such as when the superclass could not be found). 603 ArtMethod* art_method_; 604 605 // Keep the RTI of inexact Object to avoid having to pass stack handle 606 // collection pointer to passes which may create NullConstant. 607 ReferenceTypeInfo inexact_object_rti_; 608 609 friend class SsaBuilder; // For caching constants. 610 friend class SsaLivenessAnalysis; // For the linear order. 611 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 612 DISALLOW_COPY_AND_ASSIGN(HGraph); 613}; 614 615class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 616 public: 617 HLoopInformation(HBasicBlock* header, HGraph* graph) 618 : header_(header), 619 suspend_check_(nullptr), 620 irreducible_(false), 621 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 622 // Make bit vector growable, as the number of blocks may change. 623 blocks_(graph->GetArena(), graph->GetBlocks().size(), true) { 624 back_edges_.reserve(kDefaultNumberOfBackEdges); 625 } 626 627 bool IsIrreducible() const { return irreducible_; } 628 629 void Dump(std::ostream& os); 630 631 HBasicBlock* GetHeader() const { 632 return header_; 633 } 634 635 void SetHeader(HBasicBlock* block) { 636 header_ = block; 637 } 638 639 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 640 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 641 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 642 643 void AddBackEdge(HBasicBlock* back_edge) { 644 back_edges_.push_back(back_edge); 645 } 646 647 void RemoveBackEdge(HBasicBlock* back_edge) { 648 RemoveElement(back_edges_, back_edge); 649 } 650 651 bool IsBackEdge(const HBasicBlock& block) const { 652 return ContainsElement(back_edges_, &block); 653 } 654 655 size_t NumberOfBackEdges() const { 656 return back_edges_.size(); 657 } 658 659 HBasicBlock* GetPreHeader() const; 660 661 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 662 return back_edges_; 663 } 664 665 // Returns the lifetime position of the back edge that has the 666 // greatest lifetime position. 667 size_t GetLifetimeEnd() const; 668 669 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 670 ReplaceElement(back_edges_, existing, new_back_edge); 671 } 672 673 // Finds blocks that are part of this loop. 674 void Populate(); 675 676 // Returns whether this loop information contains `block`. 677 // Note that this loop information *must* be populated before entering this function. 678 bool Contains(const HBasicBlock& block) const; 679 680 // Returns whether this loop information is an inner loop of `other`. 681 // Note that `other` *must* be populated before entering this function. 682 bool IsIn(const HLoopInformation& other) const; 683 684 // Returns true if instruction is not defined within this loop. 685 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const; 686 687 const ArenaBitVector& GetBlocks() const { return blocks_; } 688 689 void Add(HBasicBlock* block); 690 void Remove(HBasicBlock* block); 691 692 private: 693 // Internal recursive implementation of `Populate`. 694 void PopulateRecursive(HBasicBlock* block); 695 void PopulateIrreducibleRecursive(HBasicBlock* block); 696 697 HBasicBlock* header_; 698 HSuspendCheck* suspend_check_; 699 bool irreducible_; 700 ArenaVector<HBasicBlock*> back_edges_; 701 ArenaBitVector blocks_; 702 703 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 704}; 705 706// Stores try/catch information for basic blocks. 707// Note that HGraph is constructed so that catch blocks cannot simultaneously 708// be try blocks. 709class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 710 public: 711 // Try block information constructor. 712 explicit TryCatchInformation(const HTryBoundary& try_entry) 713 : try_entry_(&try_entry), 714 catch_dex_file_(nullptr), 715 catch_type_index_(DexFile::kDexNoIndex16) { 716 DCHECK(try_entry_ != nullptr); 717 } 718 719 // Catch block information constructor. 720 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file) 721 : try_entry_(nullptr), 722 catch_dex_file_(&dex_file), 723 catch_type_index_(catch_type_index) {} 724 725 bool IsTryBlock() const { return try_entry_ != nullptr; } 726 727 const HTryBoundary& GetTryEntry() const { 728 DCHECK(IsTryBlock()); 729 return *try_entry_; 730 } 731 732 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 733 734 bool IsCatchAllTypeIndex() const { 735 DCHECK(IsCatchBlock()); 736 return catch_type_index_ == DexFile::kDexNoIndex16; 737 } 738 739 uint16_t GetCatchTypeIndex() const { 740 DCHECK(IsCatchBlock()); 741 return catch_type_index_; 742 } 743 744 const DexFile& GetCatchDexFile() const { 745 DCHECK(IsCatchBlock()); 746 return *catch_dex_file_; 747 } 748 749 private: 750 // One of possibly several TryBoundary instructions entering the block's try. 751 // Only set for try blocks. 752 const HTryBoundary* try_entry_; 753 754 // Exception type information. Only set for catch blocks. 755 const DexFile* catch_dex_file_; 756 const uint16_t catch_type_index_; 757}; 758 759static constexpr size_t kNoLifetime = -1; 760static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 761 762// A block in a method. Contains the list of instructions represented 763// as a double linked list. Each block knows its predecessors and 764// successors. 765 766class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 767 public: 768 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 769 : graph_(graph), 770 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 771 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 772 loop_information_(nullptr), 773 dominator_(nullptr), 774 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 775 block_id_(kInvalidBlockId), 776 dex_pc_(dex_pc), 777 lifetime_start_(kNoLifetime), 778 lifetime_end_(kNoLifetime), 779 try_catch_information_(nullptr) { 780 predecessors_.reserve(kDefaultNumberOfPredecessors); 781 successors_.reserve(kDefaultNumberOfSuccessors); 782 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 783 } 784 785 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 786 return predecessors_; 787 } 788 789 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 790 return successors_; 791 } 792 793 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const; 794 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const; 795 796 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 797 return ContainsElement(successors_, block, start_from); 798 } 799 800 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 801 return dominated_blocks_; 802 } 803 804 bool IsEntryBlock() const { 805 return graph_->GetEntryBlock() == this; 806 } 807 808 bool IsExitBlock() const { 809 return graph_->GetExitBlock() == this; 810 } 811 812 bool IsSingleGoto() const; 813 bool IsSingleTryBoundary() const; 814 815 // Returns true if this block emits nothing but a jump. 816 bool IsSingleJump() const { 817 HLoopInformation* loop_info = GetLoopInformation(); 818 return (IsSingleGoto() || IsSingleTryBoundary()) 819 // Back edges generate a suspend check. 820 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 821 } 822 823 void AddBackEdge(HBasicBlock* back_edge) { 824 if (loop_information_ == nullptr) { 825 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 826 } 827 DCHECK_EQ(loop_information_->GetHeader(), this); 828 loop_information_->AddBackEdge(back_edge); 829 } 830 831 HGraph* GetGraph() const { return graph_; } 832 void SetGraph(HGraph* graph) { graph_ = graph; } 833 834 uint32_t GetBlockId() const { return block_id_; } 835 void SetBlockId(int id) { block_id_ = id; } 836 uint32_t GetDexPc() const { return dex_pc_; } 837 838 HBasicBlock* GetDominator() const { return dominator_; } 839 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 840 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 841 842 void RemoveDominatedBlock(HBasicBlock* block) { 843 RemoveElement(dominated_blocks_, block); 844 } 845 846 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 847 ReplaceElement(dominated_blocks_, existing, new_block); 848 } 849 850 void ClearDominanceInformation(); 851 852 int NumberOfBackEdges() const { 853 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 854 } 855 856 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 857 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 858 const HInstructionList& GetInstructions() const { return instructions_; } 859 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 860 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 861 const HInstructionList& GetPhis() const { return phis_; } 862 863 HInstruction* GetFirstInstructionDisregardMoves() const; 864 865 void AddSuccessor(HBasicBlock* block) { 866 successors_.push_back(block); 867 block->predecessors_.push_back(this); 868 } 869 870 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 871 size_t successor_index = GetSuccessorIndexOf(existing); 872 existing->RemovePredecessor(this); 873 new_block->predecessors_.push_back(this); 874 successors_[successor_index] = new_block; 875 } 876 877 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 878 size_t predecessor_index = GetPredecessorIndexOf(existing); 879 existing->RemoveSuccessor(this); 880 new_block->successors_.push_back(this); 881 predecessors_[predecessor_index] = new_block; 882 } 883 884 // Insert `this` between `predecessor` and `successor. This method 885 // preserves the indicies, and will update the first edge found between 886 // `predecessor` and `successor`. 887 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 888 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 889 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 890 successor->predecessors_[predecessor_index] = this; 891 predecessor->successors_[successor_index] = this; 892 successors_.push_back(successor); 893 predecessors_.push_back(predecessor); 894 } 895 896 void RemovePredecessor(HBasicBlock* block) { 897 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 898 } 899 900 void RemoveSuccessor(HBasicBlock* block) { 901 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 902 } 903 904 void ClearAllPredecessors() { 905 predecessors_.clear(); 906 } 907 908 void AddPredecessor(HBasicBlock* block) { 909 predecessors_.push_back(block); 910 block->successors_.push_back(this); 911 } 912 913 void SwapPredecessors() { 914 DCHECK_EQ(predecessors_.size(), 2u); 915 std::swap(predecessors_[0], predecessors_[1]); 916 } 917 918 void SwapSuccessors() { 919 DCHECK_EQ(successors_.size(), 2u); 920 std::swap(successors_[0], successors_[1]); 921 } 922 923 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 924 return IndexOfElement(predecessors_, predecessor); 925 } 926 927 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 928 return IndexOfElement(successors_, successor); 929 } 930 931 HBasicBlock* GetSinglePredecessor() const { 932 DCHECK_EQ(GetPredecessors().size(), 1u); 933 return GetPredecessors()[0]; 934 } 935 936 HBasicBlock* GetSingleSuccessor() const { 937 DCHECK_EQ(GetSuccessors().size(), 1u); 938 return GetSuccessors()[0]; 939 } 940 941 // Returns whether the first occurrence of `predecessor` in the list of 942 // predecessors is at index `idx`. 943 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 944 DCHECK_EQ(GetPredecessors()[idx], predecessor); 945 return GetPredecessorIndexOf(predecessor) == idx; 946 } 947 948 // Create a new block between this block and its predecessors. The new block 949 // is added to the graph, all predecessor edges are relinked to it and an edge 950 // is created to `this`. Returns the new empty block. Reverse post order or 951 // loop and try/catch information are not updated. 952 HBasicBlock* CreateImmediateDominator(); 953 954 // Split the block into two blocks just before `cursor`. Returns the newly 955 // created, latter block. Note that this method will add the block to the 956 // graph, create a Goto at the end of the former block and will create an edge 957 // between the blocks. It will not, however, update the reverse post order or 958 // loop and try/catch information. 959 HBasicBlock* SplitBefore(HInstruction* cursor); 960 961 // Split the block into two blocks just after `cursor`. Returns the newly 962 // created block. Note that this method just updates raw block information, 963 // like predecessors, successors, dominators, and instruction list. It does not 964 // update the graph, reverse post order, loop information, nor make sure the 965 // blocks are consistent (for example ending with a control flow instruction). 966 HBasicBlock* SplitAfter(HInstruction* cursor); 967 968 // Split catch block into two blocks after the original move-exception bytecode 969 // instruction, or at the beginning if not present. Returns the newly created, 970 // latter block, or nullptr if such block could not be created (must be dead 971 // in that case). Note that this method just updates raw block information, 972 // like predecessors, successors, dominators, and instruction list. It does not 973 // update the graph, reverse post order, loop information, nor make sure the 974 // blocks are consistent (for example ending with a control flow instruction). 975 HBasicBlock* SplitCatchBlockAfterMoveException(); 976 977 // Merge `other` at the end of `this`. Successors and dominated blocks of 978 // `other` are changed to be successors and dominated blocks of `this`. Note 979 // that this method does not update the graph, reverse post order, loop 980 // information, nor make sure the blocks are consistent (for example ending 981 // with a control flow instruction). 982 void MergeWithInlined(HBasicBlock* other); 983 984 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 985 // of `this` are moved to `other`. 986 // Note that this method does not update the graph, reverse post order, loop 987 // information, nor make sure the blocks are consistent (for example ending 988 // with a control flow instruction). 989 void ReplaceWith(HBasicBlock* other); 990 991 // Merge `other` at the end of `this`. This method updates loops, reverse post 992 // order, links to predecessors, successors, dominators and deletes the block 993 // from the graph. The two blocks must be successive, i.e. `this` the only 994 // predecessor of `other` and vice versa. 995 void MergeWith(HBasicBlock* other); 996 997 // Disconnects `this` from all its predecessors, successors and dominator, 998 // removes it from all loops it is included in and eventually from the graph. 999 // The block must not dominate any other block. Predecessors and successors 1000 // are safely updated. 1001 void DisconnectAndDelete(); 1002 1003 void AddInstruction(HInstruction* instruction); 1004 // Insert `instruction` before/after an existing instruction `cursor`. 1005 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 1006 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 1007 // Replace instruction `initial` with `replacement` within this block. 1008 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 1009 HInstruction* replacement); 1010 void AddPhi(HPhi* phi); 1011 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 1012 // RemoveInstruction and RemovePhi delete a given instruction from the respective 1013 // instruction list. With 'ensure_safety' set to true, it verifies that the 1014 // instruction is not in use and removes it from the use lists of its inputs. 1015 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 1016 void RemovePhi(HPhi* phi, bool ensure_safety = true); 1017 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 1018 1019 bool IsLoopHeader() const { 1020 return IsInLoop() && (loop_information_->GetHeader() == this); 1021 } 1022 1023 bool IsLoopPreHeaderFirstPredecessor() const { 1024 DCHECK(IsLoopHeader()); 1025 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 1026 } 1027 1028 bool IsFirstPredecessorBackEdge() const { 1029 DCHECK(IsLoopHeader()); 1030 return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]); 1031 } 1032 1033 HLoopInformation* GetLoopInformation() const { 1034 return loop_information_; 1035 } 1036 1037 // Set the loop_information_ on this block. Overrides the current 1038 // loop_information if it is an outer loop of the passed loop information. 1039 // Note that this method is called while creating the loop information. 1040 void SetInLoop(HLoopInformation* info) { 1041 if (IsLoopHeader()) { 1042 // Nothing to do. This just means `info` is an outer loop. 1043 } else if (!IsInLoop()) { 1044 loop_information_ = info; 1045 } else if (loop_information_->Contains(*info->GetHeader())) { 1046 // Block is currently part of an outer loop. Make it part of this inner loop. 1047 // Note that a non loop header having a loop information means this loop information 1048 // has already been populated 1049 loop_information_ = info; 1050 } else { 1051 // Block is part of an inner loop. Do not update the loop information. 1052 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 1053 // at this point, because this method is being called while populating `info`. 1054 } 1055 } 1056 1057 // Raw update of the loop information. 1058 void SetLoopInformation(HLoopInformation* info) { 1059 loop_information_ = info; 1060 } 1061 1062 bool IsInLoop() const { return loop_information_ != nullptr; } 1063 1064 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 1065 1066 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 1067 try_catch_information_ = try_catch_information; 1068 } 1069 1070 bool IsTryBlock() const { 1071 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 1072 } 1073 1074 bool IsCatchBlock() const { 1075 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 1076 } 1077 1078 // Returns the try entry that this block's successors should have. They will 1079 // be in the same try, unless the block ends in a try boundary. In that case, 1080 // the appropriate try entry will be returned. 1081 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 1082 1083 bool HasThrowingInstructions() const; 1084 1085 // Returns whether this block dominates the blocked passed as parameter. 1086 bool Dominates(HBasicBlock* block) const; 1087 1088 size_t GetLifetimeStart() const { return lifetime_start_; } 1089 size_t GetLifetimeEnd() const { return lifetime_end_; } 1090 1091 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 1092 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 1093 1094 bool EndsWithControlFlowInstruction() const; 1095 bool EndsWithIf() const; 1096 bool EndsWithTryBoundary() const; 1097 bool HasSinglePhi() const; 1098 1099 private: 1100 HGraph* graph_; 1101 ArenaVector<HBasicBlock*> predecessors_; 1102 ArenaVector<HBasicBlock*> successors_; 1103 HInstructionList instructions_; 1104 HInstructionList phis_; 1105 HLoopInformation* loop_information_; 1106 HBasicBlock* dominator_; 1107 ArenaVector<HBasicBlock*> dominated_blocks_; 1108 uint32_t block_id_; 1109 // The dex program counter of the first instruction of this block. 1110 const uint32_t dex_pc_; 1111 size_t lifetime_start_; 1112 size_t lifetime_end_; 1113 TryCatchInformation* try_catch_information_; 1114 1115 friend class HGraph; 1116 friend class HInstruction; 1117 1118 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 1119}; 1120 1121// Iterates over the LoopInformation of all loops which contain 'block' 1122// from the innermost to the outermost. 1123class HLoopInformationOutwardIterator : public ValueObject { 1124 public: 1125 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 1126 : current_(block.GetLoopInformation()) {} 1127 1128 bool Done() const { return current_ == nullptr; } 1129 1130 void Advance() { 1131 DCHECK(!Done()); 1132 current_ = current_->GetPreHeader()->GetLoopInformation(); 1133 } 1134 1135 HLoopInformation* Current() const { 1136 DCHECK(!Done()); 1137 return current_; 1138 } 1139 1140 private: 1141 HLoopInformation* current_; 1142 1143 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1144}; 1145 1146#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1147 M(Above, Condition) \ 1148 M(AboveOrEqual, Condition) \ 1149 M(Add, BinaryOperation) \ 1150 M(And, BinaryOperation) \ 1151 M(ArrayGet, Instruction) \ 1152 M(ArrayLength, Instruction) \ 1153 M(ArraySet, Instruction) \ 1154 M(Below, Condition) \ 1155 M(BelowOrEqual, Condition) \ 1156 M(BooleanNot, UnaryOperation) \ 1157 M(BoundsCheck, Instruction) \ 1158 M(BoundType, Instruction) \ 1159 M(CheckCast, Instruction) \ 1160 M(ClearException, Instruction) \ 1161 M(ClinitCheck, Instruction) \ 1162 M(Compare, BinaryOperation) \ 1163 M(CurrentMethod, Instruction) \ 1164 M(Deoptimize, Instruction) \ 1165 M(Div, BinaryOperation) \ 1166 M(DivZeroCheck, Instruction) \ 1167 M(DoubleConstant, Constant) \ 1168 M(Equal, Condition) \ 1169 M(Exit, Instruction) \ 1170 M(FloatConstant, Constant) \ 1171 M(Goto, Instruction) \ 1172 M(GreaterThan, Condition) \ 1173 M(GreaterThanOrEqual, Condition) \ 1174 M(If, Instruction) \ 1175 M(InstanceFieldGet, Instruction) \ 1176 M(InstanceFieldSet, Instruction) \ 1177 M(InstanceOf, Instruction) \ 1178 M(IntConstant, Constant) \ 1179 M(InvokeUnresolved, Invoke) \ 1180 M(InvokeInterface, Invoke) \ 1181 M(InvokeStaticOrDirect, Invoke) \ 1182 M(InvokeVirtual, Invoke) \ 1183 M(LessThan, Condition) \ 1184 M(LessThanOrEqual, Condition) \ 1185 M(LoadClass, Instruction) \ 1186 M(LoadException, Instruction) \ 1187 M(LoadLocal, Instruction) \ 1188 M(LoadString, Instruction) \ 1189 M(Local, Instruction) \ 1190 M(LongConstant, Constant) \ 1191 M(MemoryBarrier, Instruction) \ 1192 M(MonitorOperation, Instruction) \ 1193 M(Mul, BinaryOperation) \ 1194 M(NativeDebugInfo, Instruction) \ 1195 M(Neg, UnaryOperation) \ 1196 M(NewArray, Instruction) \ 1197 M(NewInstance, Instruction) \ 1198 M(Not, UnaryOperation) \ 1199 M(NotEqual, Condition) \ 1200 M(NullConstant, Instruction) \ 1201 M(NullCheck, Instruction) \ 1202 M(Or, BinaryOperation) \ 1203 M(PackedSwitch, Instruction) \ 1204 M(ParallelMove, Instruction) \ 1205 M(ParameterValue, Instruction) \ 1206 M(Phi, Instruction) \ 1207 M(Rem, BinaryOperation) \ 1208 M(Return, Instruction) \ 1209 M(ReturnVoid, Instruction) \ 1210 M(Ror, BinaryOperation) \ 1211 M(Shl, BinaryOperation) \ 1212 M(Shr, BinaryOperation) \ 1213 M(StaticFieldGet, Instruction) \ 1214 M(StaticFieldSet, Instruction) \ 1215 M(UnresolvedInstanceFieldGet, Instruction) \ 1216 M(UnresolvedInstanceFieldSet, Instruction) \ 1217 M(UnresolvedStaticFieldGet, Instruction) \ 1218 M(UnresolvedStaticFieldSet, Instruction) \ 1219 M(StoreLocal, Instruction) \ 1220 M(Sub, BinaryOperation) \ 1221 M(SuspendCheck, Instruction) \ 1222 M(Temporary, Instruction) \ 1223 M(Throw, Instruction) \ 1224 M(TryBoundary, Instruction) \ 1225 M(TypeConversion, Instruction) \ 1226 M(UShr, BinaryOperation) \ 1227 M(Xor, BinaryOperation) \ 1228 1229/* 1230 * Instructions, shared across several (not all) architectures. 1231 */ 1232#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64) 1233#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) 1234#else 1235#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \ 1236 M(MultiplyAccumulate, Instruction) 1237#endif 1238 1239#ifndef ART_ENABLE_CODEGEN_arm 1240#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1241#else 1242#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1243 M(ArmDexCacheArraysBase, Instruction) 1244#endif 1245 1246#ifndef ART_ENABLE_CODEGEN_arm64 1247#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1248#else 1249#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1250 M(Arm64DataProcWithShifterOp, Instruction) \ 1251 M(Arm64IntermediateAddress, Instruction) 1252#endif 1253 1254#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1255 1256#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1257 1258#ifndef ART_ENABLE_CODEGEN_x86 1259#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1260#else 1261#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1262 M(X86ComputeBaseMethodAddress, Instruction) \ 1263 M(X86LoadFromConstantTable, Instruction) \ 1264 M(X86PackedSwitch, Instruction) 1265#endif 1266 1267#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1268 1269#define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1270 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1271 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \ 1272 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1273 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1274 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1275 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1276 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1277 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1278 1279#define FOR_EACH_ABSTRACT_INSTRUCTION(M) \ 1280 M(Condition, BinaryOperation) \ 1281 M(Constant, Instruction) \ 1282 M(UnaryOperation, Instruction) \ 1283 M(BinaryOperation, Instruction) \ 1284 M(Invoke, Instruction) 1285 1286#define FOR_EACH_INSTRUCTION(M) \ 1287 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1288 FOR_EACH_ABSTRACT_INSTRUCTION(M) 1289 1290#define FORWARD_DECLARATION(type, super) class H##type; 1291FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1292#undef FORWARD_DECLARATION 1293 1294#define DECLARE_INSTRUCTION(type) \ 1295 InstructionKind GetKindInternal() const OVERRIDE { return k##type; } \ 1296 const char* DebugName() const OVERRIDE { return #type; } \ 1297 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \ 1298 return other->Is##type(); \ 1299 } \ 1300 void Accept(HGraphVisitor* visitor) OVERRIDE 1301 1302#define DECLARE_ABSTRACT_INSTRUCTION(type) \ 1303 bool Is##type() const { return As##type() != nullptr; } \ 1304 const H##type* As##type() const { return this; } \ 1305 H##type* As##type() { return this; } 1306 1307template <typename T> class HUseList; 1308 1309template <typename T> 1310class HUseListNode : public ArenaObject<kArenaAllocUseListNode> { 1311 public: 1312 HUseListNode* GetPrevious() const { return prev_; } 1313 HUseListNode* GetNext() const { return next_; } 1314 T GetUser() const { return user_; } 1315 size_t GetIndex() const { return index_; } 1316 void SetIndex(size_t index) { index_ = index; } 1317 1318 private: 1319 HUseListNode(T user, size_t index) 1320 : user_(user), index_(index), prev_(nullptr), next_(nullptr) {} 1321 1322 T const user_; 1323 size_t index_; 1324 HUseListNode<T>* prev_; 1325 HUseListNode<T>* next_; 1326 1327 friend class HUseList<T>; 1328 1329 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1330}; 1331 1332template <typename T> 1333class HUseList : public ValueObject { 1334 public: 1335 HUseList() : first_(nullptr) {} 1336 1337 void Clear() { 1338 first_ = nullptr; 1339 } 1340 1341 // Adds a new entry at the beginning of the use list and returns 1342 // the newly created node. 1343 HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) { 1344 HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index); 1345 if (IsEmpty()) { 1346 first_ = new_node; 1347 } else { 1348 first_->prev_ = new_node; 1349 new_node->next_ = first_; 1350 first_ = new_node; 1351 } 1352 return new_node; 1353 } 1354 1355 HUseListNode<T>* GetFirst() const { 1356 return first_; 1357 } 1358 1359 void Remove(HUseListNode<T>* node) { 1360 DCHECK(node != nullptr); 1361 DCHECK(Contains(node)); 1362 1363 if (node->prev_ != nullptr) { 1364 node->prev_->next_ = node->next_; 1365 } 1366 if (node->next_ != nullptr) { 1367 node->next_->prev_ = node->prev_; 1368 } 1369 if (node == first_) { 1370 first_ = node->next_; 1371 } 1372 } 1373 1374 bool Contains(const HUseListNode<T>* node) const { 1375 if (node == nullptr) { 1376 return false; 1377 } 1378 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1379 if (current == node) { 1380 return true; 1381 } 1382 } 1383 return false; 1384 } 1385 1386 bool IsEmpty() const { 1387 return first_ == nullptr; 1388 } 1389 1390 bool HasOnlyOneUse() const { 1391 return first_ != nullptr && first_->next_ == nullptr; 1392 } 1393 1394 size_t SizeSlow() const { 1395 size_t count = 0; 1396 for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) { 1397 ++count; 1398 } 1399 return count; 1400 } 1401 1402 private: 1403 HUseListNode<T>* first_; 1404}; 1405 1406template<typename T> 1407class HUseIterator : public ValueObject { 1408 public: 1409 explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {} 1410 1411 bool Done() const { return current_ == nullptr; } 1412 1413 void Advance() { 1414 DCHECK(!Done()); 1415 current_ = current_->GetNext(); 1416 } 1417 1418 HUseListNode<T>* Current() const { 1419 DCHECK(!Done()); 1420 return current_; 1421 } 1422 1423 private: 1424 HUseListNode<T>* current_; 1425 1426 friend class HValue; 1427}; 1428 1429// This class is used by HEnvironment and HInstruction classes to record the 1430// instructions they use and pointers to the corresponding HUseListNodes kept 1431// by the used instructions. 1432template <typename T> 1433class HUserRecord : public ValueObject { 1434 public: 1435 HUserRecord() : instruction_(nullptr), use_node_(nullptr) {} 1436 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {} 1437 1438 HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node) 1439 : instruction_(old_record.instruction_), use_node_(use_node) { 1440 DCHECK(instruction_ != nullptr); 1441 DCHECK(use_node_ != nullptr); 1442 DCHECK(old_record.use_node_ == nullptr); 1443 } 1444 1445 HInstruction* GetInstruction() const { return instruction_; } 1446 HUseListNode<T>* GetUseNode() const { return use_node_; } 1447 1448 private: 1449 // Instruction used by the user. 1450 HInstruction* instruction_; 1451 1452 // Corresponding entry in the use list kept by 'instruction_'. 1453 HUseListNode<T>* use_node_; 1454}; 1455 1456/** 1457 * Side-effects representation. 1458 * 1459 * For write/read dependences on fields/arrays, the dependence analysis uses 1460 * type disambiguation (e.g. a float field write cannot modify the value of an 1461 * integer field read) and the access type (e.g. a reference array write cannot 1462 * modify the value of a reference field read [although it may modify the 1463 * reference fetch prior to reading the field, which is represented by its own 1464 * write/read dependence]). The analysis makes conservative points-to 1465 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1466 * the same, and any reference read depends on any reference read without 1467 * further regard of its type). 1468 * 1469 * The internal representation uses 38-bit and is described in the table below. 1470 * The first line indicates the side effect, and for field/array accesses the 1471 * second line indicates the type of the access (in the order of the 1472 * Primitive::Type enum). 1473 * The two numbered lines below indicate the bit position in the bitfield (read 1474 * vertically). 1475 * 1476 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1477 * +-------------+---------+---------+--------------+---------+---------+ 1478 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1479 * | 3 |333333322|222222221| 1 |111111110|000000000| 1480 * | 7 |654321098|765432109| 8 |765432109|876543210| 1481 * 1482 * Note that, to ease the implementation, 'changes' bits are least significant 1483 * bits, while 'dependency' bits are most significant bits. 1484 */ 1485class SideEffects : public ValueObject { 1486 public: 1487 SideEffects() : flags_(0) {} 1488 1489 static SideEffects None() { 1490 return SideEffects(0); 1491 } 1492 1493 static SideEffects All() { 1494 return SideEffects(kAllChangeBits | kAllDependOnBits); 1495 } 1496 1497 static SideEffects AllChanges() { 1498 return SideEffects(kAllChangeBits); 1499 } 1500 1501 static SideEffects AllDependencies() { 1502 return SideEffects(kAllDependOnBits); 1503 } 1504 1505 static SideEffects AllExceptGCDependency() { 1506 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1507 } 1508 1509 static SideEffects AllWritesAndReads() { 1510 return SideEffects(kAllWrites | kAllReads); 1511 } 1512 1513 static SideEffects AllWrites() { 1514 return SideEffects(kAllWrites); 1515 } 1516 1517 static SideEffects AllReads() { 1518 return SideEffects(kAllReads); 1519 } 1520 1521 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1522 return is_volatile 1523 ? AllWritesAndReads() 1524 : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset)); 1525 } 1526 1527 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1528 return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset)); 1529 } 1530 1531 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1532 return is_volatile 1533 ? AllWritesAndReads() 1534 : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset)); 1535 } 1536 1537 static SideEffects ArrayReadOfType(Primitive::Type type) { 1538 return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset)); 1539 } 1540 1541 static SideEffects CanTriggerGC() { 1542 return SideEffects(1ULL << kCanTriggerGCBit); 1543 } 1544 1545 static SideEffects DependsOnGC() { 1546 return SideEffects(1ULL << kDependsOnGCBit); 1547 } 1548 1549 // Combines the side-effects of this and the other. 1550 SideEffects Union(SideEffects other) const { 1551 return SideEffects(flags_ | other.flags_); 1552 } 1553 1554 SideEffects Exclusion(SideEffects other) const { 1555 return SideEffects(flags_ & ~other.flags_); 1556 } 1557 1558 void Add(SideEffects other) { 1559 flags_ |= other.flags_; 1560 } 1561 1562 bool Includes(SideEffects other) const { 1563 return (other.flags_ & flags_) == other.flags_; 1564 } 1565 1566 bool HasSideEffects() const { 1567 return (flags_ & kAllChangeBits); 1568 } 1569 1570 bool HasDependencies() const { 1571 return (flags_ & kAllDependOnBits); 1572 } 1573 1574 // Returns true if there are no side effects or dependencies. 1575 bool DoesNothing() const { 1576 return flags_ == 0; 1577 } 1578 1579 // Returns true if something is written. 1580 bool DoesAnyWrite() const { 1581 return (flags_ & kAllWrites); 1582 } 1583 1584 // Returns true if something is read. 1585 bool DoesAnyRead() const { 1586 return (flags_ & kAllReads); 1587 } 1588 1589 // Returns true if potentially everything is written and read 1590 // (every type and every kind of access). 1591 bool DoesAllReadWrite() const { 1592 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1593 } 1594 1595 bool DoesAll() const { 1596 return flags_ == (kAllChangeBits | kAllDependOnBits); 1597 } 1598 1599 // Returns true if `this` may read something written by `other`. 1600 bool MayDependOn(SideEffects other) const { 1601 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1602 return (other.flags_ & depends_on_flags); 1603 } 1604 1605 // Returns string representation of flags (for debugging only). 1606 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1607 std::string ToString() const { 1608 std::string flags = "|"; 1609 for (int s = kLastBit; s >= 0; s--) { 1610 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1611 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1612 // This is a bit for the GC side effect. 1613 if (current_bit_is_set) { 1614 flags += "GC"; 1615 } 1616 flags += "|"; 1617 } else { 1618 // This is a bit for the array/field analysis. 1619 // The underscore character stands for the 'can trigger GC' bit. 1620 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1621 if (current_bit_is_set) { 1622 flags += kDebug[s]; 1623 } 1624 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1625 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1626 flags += "|"; 1627 } 1628 } 1629 } 1630 return flags; 1631 } 1632 1633 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1634 1635 private: 1636 static constexpr int kFieldArrayAnalysisBits = 9; 1637 1638 static constexpr int kFieldWriteOffset = 0; 1639 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1640 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1641 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1642 1643 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1644 1645 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1646 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1647 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1648 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1649 1650 static constexpr int kLastBit = kDependsOnGCBit; 1651 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1652 1653 // Aliases. 1654 1655 static_assert(kChangeBits == kDependOnBits, 1656 "the 'change' bits should match the 'depend on' bits."); 1657 1658 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1659 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1660 static constexpr uint64_t kAllWrites = 1661 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1662 static constexpr uint64_t kAllReads = 1663 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1664 1665 // Work around the fact that HIR aliases I/F and J/D. 1666 // TODO: remove this interceptor once HIR types are clean 1667 static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) { 1668 switch (type) { 1669 case Primitive::kPrimInt: 1670 case Primitive::kPrimFloat: 1671 return TypeFlag(Primitive::kPrimInt, offset) | 1672 TypeFlag(Primitive::kPrimFloat, offset); 1673 case Primitive::kPrimLong: 1674 case Primitive::kPrimDouble: 1675 return TypeFlag(Primitive::kPrimLong, offset) | 1676 TypeFlag(Primitive::kPrimDouble, offset); 1677 default: 1678 return TypeFlag(type, offset); 1679 } 1680 } 1681 1682 // Translates type to bit flag. 1683 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1684 CHECK_NE(type, Primitive::kPrimVoid); 1685 const uint64_t one = 1; 1686 const int shift = type; // 0-based consecutive enum 1687 DCHECK_LE(kFieldWriteOffset, shift); 1688 DCHECK_LT(shift, kArrayWriteOffset); 1689 return one << (type + offset); 1690 } 1691 1692 // Private constructor on direct flags value. 1693 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1694 1695 uint64_t flags_; 1696}; 1697 1698// A HEnvironment object contains the values of virtual registers at a given location. 1699class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1700 public: 1701 HEnvironment(ArenaAllocator* arena, 1702 size_t number_of_vregs, 1703 const DexFile& dex_file, 1704 uint32_t method_idx, 1705 uint32_t dex_pc, 1706 InvokeType invoke_type, 1707 HInstruction* holder) 1708 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1709 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)), 1710 parent_(nullptr), 1711 dex_file_(dex_file), 1712 method_idx_(method_idx), 1713 dex_pc_(dex_pc), 1714 invoke_type_(invoke_type), 1715 holder_(holder) { 1716 } 1717 1718 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1719 : HEnvironment(arena, 1720 to_copy.Size(), 1721 to_copy.GetDexFile(), 1722 to_copy.GetMethodIdx(), 1723 to_copy.GetDexPc(), 1724 to_copy.GetInvokeType(), 1725 holder) {} 1726 1727 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1728 if (parent_ != nullptr) { 1729 parent_->SetAndCopyParentChain(allocator, parent); 1730 } else { 1731 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1732 parent_->CopyFrom(parent); 1733 if (parent->GetParent() != nullptr) { 1734 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1735 } 1736 } 1737 } 1738 1739 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1740 void CopyFrom(HEnvironment* environment); 1741 1742 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1743 // input to the loop phi instead. This is for inserting instructions that 1744 // require an environment (like HDeoptimization) in the loop pre-header. 1745 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1746 1747 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1748 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1749 } 1750 1751 HInstruction* GetInstructionAt(size_t index) const { 1752 return vregs_[index].GetInstruction(); 1753 } 1754 1755 void RemoveAsUserOfInput(size_t index) const; 1756 1757 size_t Size() const { return vregs_.size(); } 1758 1759 HEnvironment* GetParent() const { return parent_; } 1760 1761 void SetLocationAt(size_t index, Location location) { 1762 locations_[index] = location; 1763 } 1764 1765 Location GetLocationAt(size_t index) const { 1766 return locations_[index]; 1767 } 1768 1769 uint32_t GetDexPc() const { 1770 return dex_pc_; 1771 } 1772 1773 uint32_t GetMethodIdx() const { 1774 return method_idx_; 1775 } 1776 1777 InvokeType GetInvokeType() const { 1778 return invoke_type_; 1779 } 1780 1781 const DexFile& GetDexFile() const { 1782 return dex_file_; 1783 } 1784 1785 HInstruction* GetHolder() const { 1786 return holder_; 1787 } 1788 1789 1790 bool IsFromInlinedInvoke() const { 1791 return GetParent() != nullptr; 1792 } 1793 1794 private: 1795 // Record instructions' use entries of this environment for constant-time removal. 1796 // It should only be called by HInstruction when a new environment use is added. 1797 void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) { 1798 DCHECK(env_use->GetUser() == this); 1799 size_t index = env_use->GetIndex(); 1800 vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use); 1801 } 1802 1803 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1804 ArenaVector<Location> locations_; 1805 HEnvironment* parent_; 1806 const DexFile& dex_file_; 1807 const uint32_t method_idx_; 1808 const uint32_t dex_pc_; 1809 const InvokeType invoke_type_; 1810 1811 // The instruction that holds this environment. 1812 HInstruction* const holder_; 1813 1814 friend class HInstruction; 1815 1816 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1817}; 1818 1819class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1820 public: 1821 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1822 : previous_(nullptr), 1823 next_(nullptr), 1824 block_(nullptr), 1825 dex_pc_(dex_pc), 1826 id_(-1), 1827 ssa_index_(-1), 1828 environment_(nullptr), 1829 locations_(nullptr), 1830 live_interval_(nullptr), 1831 lifetime_position_(kNoLifetime), 1832 side_effects_(side_effects), 1833 reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} 1834 1835 virtual ~HInstruction() {} 1836 1837#define DECLARE_KIND(type, super) k##type, 1838 enum InstructionKind { 1839 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1840 }; 1841#undef DECLARE_KIND 1842 1843 HInstruction* GetNext() const { return next_; } 1844 HInstruction* GetPrevious() const { return previous_; } 1845 1846 HInstruction* GetNextDisregardingMoves() const; 1847 HInstruction* GetPreviousDisregardingMoves() const; 1848 1849 HBasicBlock* GetBlock() const { return block_; } 1850 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1851 void SetBlock(HBasicBlock* block) { block_ = block; } 1852 bool IsInBlock() const { return block_ != nullptr; } 1853 bool IsInLoop() const { return block_->IsInLoop(); } 1854 bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); } 1855 bool IsIrreducibleLoopHeaderPhi() const { 1856 return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible(); 1857 } 1858 1859 virtual size_t InputCount() const = 0; 1860 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1861 1862 virtual void Accept(HGraphVisitor* visitor) = 0; 1863 virtual const char* DebugName() const = 0; 1864 1865 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1866 void SetRawInputAt(size_t index, HInstruction* input) { 1867 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1868 } 1869 1870 virtual bool NeedsEnvironment() const { return false; } 1871 1872 uint32_t GetDexPc() const { return dex_pc_; } 1873 1874 virtual bool IsControlFlow() const { return false; } 1875 1876 virtual bool CanThrow() const { return false; } 1877 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1878 1879 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1880 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1881 1882 // Does not apply for all instructions, but having this at top level greatly 1883 // simplifies the null check elimination. 1884 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1885 virtual bool CanBeNull() const { 1886 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1887 return true; 1888 } 1889 1890 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1891 return false; 1892 } 1893 1894 virtual bool IsActualObject() const { 1895 return GetType() == Primitive::kPrimNot; 1896 } 1897 1898 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1899 1900 ReferenceTypeInfo GetReferenceTypeInfo() const { 1901 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1902 return reference_type_info_; 1903 } 1904 1905 void AddUseAt(HInstruction* user, size_t index) { 1906 DCHECK(user != nullptr); 1907 HUseListNode<HInstruction*>* use = 1908 uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1909 user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use)); 1910 } 1911 1912 void AddEnvUseAt(HEnvironment* user, size_t index) { 1913 DCHECK(user != nullptr); 1914 HUseListNode<HEnvironment*>* env_use = 1915 env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena()); 1916 user->RecordEnvUse(env_use); 1917 } 1918 1919 void RemoveAsUserOfInput(size_t input) { 1920 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 1921 input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode()); 1922 } 1923 1924 const HUseList<HInstruction*>& GetUses() const { return uses_; } 1925 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 1926 1927 bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); } 1928 bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); } 1929 bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); } 1930 bool HasOnlyOneNonEnvironmentUse() const { 1931 return !HasEnvironmentUses() && GetUses().HasOnlyOneUse(); 1932 } 1933 1934 // Does this instruction strictly dominate `other_instruction`? 1935 // Returns false if this instruction and `other_instruction` are the same. 1936 // Aborts if this instruction and `other_instruction` are both phis. 1937 bool StrictlyDominates(HInstruction* other_instruction) const; 1938 1939 int GetId() const { return id_; } 1940 void SetId(int id) { id_ = id; } 1941 1942 int GetSsaIndex() const { return ssa_index_; } 1943 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 1944 bool HasSsaIndex() const { return ssa_index_ != -1; } 1945 1946 bool HasEnvironment() const { return environment_ != nullptr; } 1947 HEnvironment* GetEnvironment() const { return environment_; } 1948 // Set the `environment_` field. Raw because this method does not 1949 // update the uses lists. 1950 void SetRawEnvironment(HEnvironment* environment) { 1951 DCHECK(environment_ == nullptr); 1952 DCHECK_EQ(environment->GetHolder(), this); 1953 environment_ = environment; 1954 } 1955 1956 // Set the environment of this instruction, copying it from `environment`. While 1957 // copying, the uses lists are being updated. 1958 void CopyEnvironmentFrom(HEnvironment* environment) { 1959 DCHECK(environment_ == nullptr); 1960 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1961 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1962 environment_->CopyFrom(environment); 1963 if (environment->GetParent() != nullptr) { 1964 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1965 } 1966 } 1967 1968 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 1969 HBasicBlock* block) { 1970 DCHECK(environment_ == nullptr); 1971 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 1972 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 1973 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 1974 if (environment->GetParent() != nullptr) { 1975 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 1976 } 1977 } 1978 1979 // Returns the number of entries in the environment. Typically, that is the 1980 // number of dex registers in a method. It could be more in case of inlining. 1981 size_t EnvironmentSize() const; 1982 1983 LocationSummary* GetLocations() const { return locations_; } 1984 void SetLocations(LocationSummary* locations) { locations_ = locations; } 1985 1986 void ReplaceWith(HInstruction* instruction); 1987 void ReplaceInput(HInstruction* replacement, size_t index); 1988 1989 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 1990 // uses of this instruction by `other` are *not* updated. 1991 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 1992 ReplaceWith(other); 1993 other->ReplaceInput(this, use_index); 1994 } 1995 1996 // Move `this` instruction before `cursor`. 1997 void MoveBefore(HInstruction* cursor); 1998 1999 // Move `this` before its first user and out of any loops. If there is no 2000 // out-of-loop user that dominates all other users, move the instruction 2001 // to the end of the out-of-loop common dominator of the user's blocks. 2002 // 2003 // This can be used only on non-throwing instructions with no side effects that 2004 // have at least one use but no environment uses. 2005 void MoveBeforeFirstUserAndOutOfLoops(); 2006 2007#define INSTRUCTION_TYPE_CHECK(type, super) \ 2008 bool Is##type() const; \ 2009 const H##type* As##type() const; \ 2010 H##type* As##type(); 2011 2012 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 2013#undef INSTRUCTION_TYPE_CHECK 2014 2015#define INSTRUCTION_TYPE_CHECK(type, super) \ 2016 bool Is##type() const { return (As##type() != nullptr); } \ 2017 virtual const H##type* As##type() const { return nullptr; } \ 2018 virtual H##type* As##type() { return nullptr; } 2019 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 2020#undef INSTRUCTION_TYPE_CHECK 2021 2022 // Returns whether the instruction can be moved within the graph. 2023 virtual bool CanBeMoved() const { return false; } 2024 2025 // Returns whether the two instructions are of the same kind. 2026 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 2027 return false; 2028 } 2029 2030 // Returns whether any data encoded in the two instructions is equal. 2031 // This method does not look at the inputs. Both instructions must be 2032 // of the same type, otherwise the method has undefined behavior. 2033 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const { 2034 return false; 2035 } 2036 2037 // Returns whether two instructions are equal, that is: 2038 // 1) They have the same type and contain the same data (InstructionDataEquals). 2039 // 2) Their inputs are identical. 2040 bool Equals(HInstruction* other) const; 2041 2042 // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744) 2043 // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide 2044 // the virtual function because the __attribute__((__pure__)) doesn't really 2045 // apply the strong requirement for virtual functions, preventing optimizations. 2046 InstructionKind GetKind() const PURE; 2047 virtual InstructionKind GetKindInternal() const = 0; 2048 2049 virtual size_t ComputeHashCode() const { 2050 size_t result = GetKind(); 2051 for (size_t i = 0, e = InputCount(); i < e; ++i) { 2052 result = (result * 31) + InputAt(i)->GetId(); 2053 } 2054 return result; 2055 } 2056 2057 SideEffects GetSideEffects() const { return side_effects_; } 2058 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 2059 2060 size_t GetLifetimePosition() const { return lifetime_position_; } 2061 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 2062 LiveInterval* GetLiveInterval() const { return live_interval_; } 2063 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 2064 bool HasLiveInterval() const { return live_interval_ != nullptr; } 2065 2066 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 2067 2068 // Returns whether the code generation of the instruction will require to have access 2069 // to the current method. Such instructions are: 2070 // (1): Instructions that require an environment, as calling the runtime requires 2071 // to walk the stack and have the current method stored at a specific stack address. 2072 // (2): Object literals like classes and strings, that are loaded from the dex cache 2073 // fields of the current method. 2074 bool NeedsCurrentMethod() const { 2075 return NeedsEnvironment() || IsLoadClass() || IsLoadString(); 2076 } 2077 2078 // Returns whether the code generation of the instruction will require to have access 2079 // to the dex cache of the current method's declaring class via the current method. 2080 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 2081 2082 // Does this instruction have any use in an environment before 2083 // control flow hits 'other'? 2084 bool HasAnyEnvironmentUseBefore(HInstruction* other); 2085 2086 // Remove all references to environment uses of this instruction. 2087 // The caller must ensure that this is safe to do. 2088 void RemoveEnvironmentUsers(); 2089 2090 protected: 2091 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; 2092 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; 2093 void SetSideEffects(SideEffects other) { side_effects_ = other; } 2094 2095 private: 2096 void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } 2097 2098 HInstruction* previous_; 2099 HInstruction* next_; 2100 HBasicBlock* block_; 2101 const uint32_t dex_pc_; 2102 2103 // An instruction gets an id when it is added to the graph. 2104 // It reflects creation order. A negative id means the instruction 2105 // has not been added to the graph. 2106 int id_; 2107 2108 // When doing liveness analysis, instructions that have uses get an SSA index. 2109 int ssa_index_; 2110 2111 // List of instructions that have this instruction as input. 2112 HUseList<HInstruction*> uses_; 2113 2114 // List of environments that contain this instruction. 2115 HUseList<HEnvironment*> env_uses_; 2116 2117 // The environment associated with this instruction. Not null if the instruction 2118 // might jump out of the method. 2119 HEnvironment* environment_; 2120 2121 // Set by the code generator. 2122 LocationSummary* locations_; 2123 2124 // Set by the liveness analysis. 2125 LiveInterval* live_interval_; 2126 2127 // Set by the liveness analysis, this is the position in a linear 2128 // order of blocks where this instruction's live interval start. 2129 size_t lifetime_position_; 2130 2131 SideEffects side_effects_; 2132 2133 // TODO: for primitive types this should be marked as invalid. 2134 ReferenceTypeInfo reference_type_info_; 2135 2136 friend class GraphChecker; 2137 friend class HBasicBlock; 2138 friend class HEnvironment; 2139 friend class HGraph; 2140 friend class HInstructionList; 2141 2142 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2143}; 2144std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2145 2146class HInputIterator : public ValueObject { 2147 public: 2148 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {} 2149 2150 bool Done() const { return index_ == instruction_->InputCount(); } 2151 HInstruction* Current() const { return instruction_->InputAt(index_); } 2152 void Advance() { index_++; } 2153 2154 private: 2155 HInstruction* instruction_; 2156 size_t index_; 2157 2158 DISALLOW_COPY_AND_ASSIGN(HInputIterator); 2159}; 2160 2161class HInstructionIterator : public ValueObject { 2162 public: 2163 explicit HInstructionIterator(const HInstructionList& instructions) 2164 : instruction_(instructions.first_instruction_) { 2165 next_ = Done() ? nullptr : instruction_->GetNext(); 2166 } 2167 2168 bool Done() const { return instruction_ == nullptr; } 2169 HInstruction* Current() const { return instruction_; } 2170 void Advance() { 2171 instruction_ = next_; 2172 next_ = Done() ? nullptr : instruction_->GetNext(); 2173 } 2174 2175 private: 2176 HInstruction* instruction_; 2177 HInstruction* next_; 2178 2179 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2180}; 2181 2182class HBackwardInstructionIterator : public ValueObject { 2183 public: 2184 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2185 : instruction_(instructions.last_instruction_) { 2186 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2187 } 2188 2189 bool Done() const { return instruction_ == nullptr; } 2190 HInstruction* Current() const { return instruction_; } 2191 void Advance() { 2192 instruction_ = next_; 2193 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2194 } 2195 2196 private: 2197 HInstruction* instruction_; 2198 HInstruction* next_; 2199 2200 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2201}; 2202 2203template<size_t N> 2204class HTemplateInstruction: public HInstruction { 2205 public: 2206 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2207 : HInstruction(side_effects, dex_pc), inputs_() {} 2208 virtual ~HTemplateInstruction() {} 2209 2210 size_t InputCount() const OVERRIDE { return N; } 2211 2212 protected: 2213 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 2214 DCHECK_LT(i, N); 2215 return inputs_[i]; 2216 } 2217 2218 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE { 2219 DCHECK_LT(i, N); 2220 inputs_[i] = input; 2221 } 2222 2223 private: 2224 std::array<HUserRecord<HInstruction*>, N> inputs_; 2225 2226 friend class SsaBuilder; 2227}; 2228 2229// HTemplateInstruction specialization for N=0. 2230template<> 2231class HTemplateInstruction<0>: public HInstruction { 2232 public: 2233 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2234 : HInstruction(side_effects, dex_pc) {} 2235 2236 virtual ~HTemplateInstruction() {} 2237 2238 size_t InputCount() const OVERRIDE { return 0; } 2239 2240 protected: 2241 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE { 2242 LOG(FATAL) << "Unreachable"; 2243 UNREACHABLE(); 2244 } 2245 2246 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED, 2247 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE { 2248 LOG(FATAL) << "Unreachable"; 2249 UNREACHABLE(); 2250 } 2251 2252 private: 2253 friend class SsaBuilder; 2254}; 2255 2256template<intptr_t N> 2257class HExpression : public HTemplateInstruction<N> { 2258 public: 2259 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2260 : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} 2261 virtual ~HExpression() {} 2262 2263 Primitive::Type GetType() const OVERRIDE { return type_; } 2264 2265 protected: 2266 Primitive::Type type_; 2267}; 2268 2269// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2270// instruction that branches to the exit block. 2271class HReturnVoid : public HTemplateInstruction<0> { 2272 public: 2273 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2274 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2275 2276 bool IsControlFlow() const OVERRIDE { return true; } 2277 2278 DECLARE_INSTRUCTION(ReturnVoid); 2279 2280 private: 2281 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2282}; 2283 2284// Represents dex's RETURN opcodes. A HReturn is a control flow 2285// instruction that branches to the exit block. 2286class HReturn : public HTemplateInstruction<1> { 2287 public: 2288 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2289 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2290 SetRawInputAt(0, value); 2291 } 2292 2293 bool IsControlFlow() const OVERRIDE { return true; } 2294 2295 DECLARE_INSTRUCTION(Return); 2296 2297 private: 2298 DISALLOW_COPY_AND_ASSIGN(HReturn); 2299}; 2300 2301// The exit instruction is the only instruction of the exit block. 2302// Instructions aborting the method (HThrow and HReturn) must branch to the 2303// exit block. 2304class HExit : public HTemplateInstruction<0> { 2305 public: 2306 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2307 2308 bool IsControlFlow() const OVERRIDE { return true; } 2309 2310 DECLARE_INSTRUCTION(Exit); 2311 2312 private: 2313 DISALLOW_COPY_AND_ASSIGN(HExit); 2314}; 2315 2316// Jumps from one block to another. 2317class HGoto : public HTemplateInstruction<0> { 2318 public: 2319 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2320 2321 bool IsControlFlow() const OVERRIDE { return true; } 2322 2323 HBasicBlock* GetSuccessor() const { 2324 return GetBlock()->GetSingleSuccessor(); 2325 } 2326 2327 DECLARE_INSTRUCTION(Goto); 2328 2329 private: 2330 DISALLOW_COPY_AND_ASSIGN(HGoto); 2331}; 2332 2333class HConstant : public HExpression<0> { 2334 public: 2335 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2336 : HExpression(type, SideEffects::None(), dex_pc) {} 2337 2338 bool CanBeMoved() const OVERRIDE { return true; } 2339 2340 virtual bool IsMinusOne() const { return false; } 2341 virtual bool IsZero() const { return false; } 2342 virtual bool IsOne() const { return false; } 2343 2344 virtual uint64_t GetValueAsUint64() const = 0; 2345 2346 DECLARE_ABSTRACT_INSTRUCTION(Constant); 2347 2348 private: 2349 DISALLOW_COPY_AND_ASSIGN(HConstant); 2350}; 2351 2352class HNullConstant : public HConstant { 2353 public: 2354 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2355 return true; 2356 } 2357 2358 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2359 2360 size_t ComputeHashCode() const OVERRIDE { return 0; } 2361 2362 DECLARE_INSTRUCTION(NullConstant); 2363 2364 private: 2365 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2366 2367 friend class HGraph; 2368 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2369}; 2370 2371// Constants of the type int. Those can be from Dex instructions, or 2372// synthesized (for example with the if-eqz instruction). 2373class HIntConstant : public HConstant { 2374 public: 2375 int32_t GetValue() const { return value_; } 2376 2377 uint64_t GetValueAsUint64() const OVERRIDE { 2378 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2379 } 2380 2381 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2382 DCHECK(other->IsIntConstant()); 2383 return other->AsIntConstant()->value_ == value_; 2384 } 2385 2386 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2387 2388 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2389 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2390 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2391 2392 DECLARE_INSTRUCTION(IntConstant); 2393 2394 private: 2395 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2396 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2397 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2398 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2399 2400 const int32_t value_; 2401 2402 friend class HGraph; 2403 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2404 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2405 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2406}; 2407 2408class HLongConstant : public HConstant { 2409 public: 2410 int64_t GetValue() const { return value_; } 2411 2412 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2413 2414 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2415 DCHECK(other->IsLongConstant()); 2416 return other->AsLongConstant()->value_ == value_; 2417 } 2418 2419 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2420 2421 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2422 bool IsZero() const OVERRIDE { return GetValue() == 0; } 2423 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2424 2425 DECLARE_INSTRUCTION(LongConstant); 2426 2427 private: 2428 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2429 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2430 2431 const int64_t value_; 2432 2433 friend class HGraph; 2434 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2435}; 2436 2437// Conditional branch. A block ending with an HIf instruction must have 2438// two successors. 2439class HIf : public HTemplateInstruction<1> { 2440 public: 2441 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2442 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2443 SetRawInputAt(0, input); 2444 } 2445 2446 bool IsControlFlow() const OVERRIDE { return true; } 2447 2448 HBasicBlock* IfTrueSuccessor() const { 2449 return GetBlock()->GetSuccessors()[0]; 2450 } 2451 2452 HBasicBlock* IfFalseSuccessor() const { 2453 return GetBlock()->GetSuccessors()[1]; 2454 } 2455 2456 DECLARE_INSTRUCTION(If); 2457 2458 private: 2459 DISALLOW_COPY_AND_ASSIGN(HIf); 2460}; 2461 2462 2463// Abstract instruction which marks the beginning and/or end of a try block and 2464// links it to the respective exception handlers. Behaves the same as a Goto in 2465// non-exceptional control flow. 2466// Normal-flow successor is stored at index zero, exception handlers under 2467// higher indices in no particular order. 2468class HTryBoundary : public HTemplateInstruction<0> { 2469 public: 2470 enum BoundaryKind { 2471 kEntry, 2472 kExit, 2473 }; 2474 2475 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2476 : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} 2477 2478 bool IsControlFlow() const OVERRIDE { return true; } 2479 2480 // Returns the block's non-exceptional successor (index zero). 2481 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2482 2483 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const { 2484 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u); 2485 } 2486 2487 // Returns whether `handler` is among its exception handlers (non-zero index 2488 // successors). 2489 bool HasExceptionHandler(const HBasicBlock& handler) const { 2490 DCHECK(handler.IsCatchBlock()); 2491 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2492 } 2493 2494 // If not present already, adds `handler` to its block's list of exception 2495 // handlers. 2496 void AddExceptionHandler(HBasicBlock* handler) { 2497 if (!HasExceptionHandler(*handler)) { 2498 GetBlock()->AddSuccessor(handler); 2499 } 2500 } 2501 2502 bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } 2503 2504 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2505 2506 DECLARE_INSTRUCTION(TryBoundary); 2507 2508 private: 2509 const BoundaryKind kind_; 2510 2511 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2512}; 2513 2514// Deoptimize to interpreter, upon checking a condition. 2515class HDeoptimize : public HTemplateInstruction<1> { 2516 public: 2517 // We set CanTriggerGC to prevent any intermediate address to be live 2518 // at the point of the `HDeoptimize`. 2519 HDeoptimize(HInstruction* cond, uint32_t dex_pc) 2520 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 2521 SetRawInputAt(0, cond); 2522 } 2523 2524 bool CanBeMoved() const OVERRIDE { return true; } 2525 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2526 return true; 2527 } 2528 bool NeedsEnvironment() const OVERRIDE { return true; } 2529 bool CanThrow() const OVERRIDE { return true; } 2530 2531 DECLARE_INSTRUCTION(Deoptimize); 2532 2533 private: 2534 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 2535}; 2536 2537// Represents the ArtMethod that was passed as a first argument to 2538// the method. It is used by instructions that depend on it, like 2539// instructions that work with the dex cache. 2540class HCurrentMethod : public HExpression<0> { 2541 public: 2542 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2543 : HExpression(type, SideEffects::None(), dex_pc) {} 2544 2545 DECLARE_INSTRUCTION(CurrentMethod); 2546 2547 private: 2548 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 2549}; 2550 2551// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 2552// have one successor for each entry in the switch table, and the final successor 2553// will be the block containing the next Dex opcode. 2554class HPackedSwitch : public HTemplateInstruction<1> { 2555 public: 2556 HPackedSwitch(int32_t start_value, 2557 uint32_t num_entries, 2558 HInstruction* input, 2559 uint32_t dex_pc = kNoDexPc) 2560 : HTemplateInstruction(SideEffects::None(), dex_pc), 2561 start_value_(start_value), 2562 num_entries_(num_entries) { 2563 SetRawInputAt(0, input); 2564 } 2565 2566 bool IsControlFlow() const OVERRIDE { return true; } 2567 2568 int32_t GetStartValue() const { return start_value_; } 2569 2570 uint32_t GetNumEntries() const { return num_entries_; } 2571 2572 HBasicBlock* GetDefaultBlock() const { 2573 // Last entry is the default block. 2574 return GetBlock()->GetSuccessors()[num_entries_]; 2575 } 2576 DECLARE_INSTRUCTION(PackedSwitch); 2577 2578 private: 2579 const int32_t start_value_; 2580 const uint32_t num_entries_; 2581 2582 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 2583}; 2584 2585class HUnaryOperation : public HExpression<1> { 2586 public: 2587 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 2588 : HExpression(result_type, SideEffects::None(), dex_pc) { 2589 SetRawInputAt(0, input); 2590 } 2591 2592 HInstruction* GetInput() const { return InputAt(0); } 2593 Primitive::Type GetResultType() const { return GetType(); } 2594 2595 bool CanBeMoved() const OVERRIDE { return true; } 2596 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2597 return true; 2598 } 2599 2600 // Try to statically evaluate `operation` and return a HConstant 2601 // containing the result of this evaluation. If `operation` cannot 2602 // be evaluated as a constant, return null. 2603 HConstant* TryStaticEvaluation() const; 2604 2605 // Apply this operation to `x`. 2606 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 2607 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 2608 2609 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation); 2610 2611 private: 2612 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 2613}; 2614 2615class HBinaryOperation : public HExpression<2> { 2616 public: 2617 HBinaryOperation(Primitive::Type result_type, 2618 HInstruction* left, 2619 HInstruction* right, 2620 SideEffects side_effects = SideEffects::None(), 2621 uint32_t dex_pc = kNoDexPc) 2622 : HExpression(result_type, side_effects, dex_pc) { 2623 SetRawInputAt(0, left); 2624 SetRawInputAt(1, right); 2625 } 2626 2627 HInstruction* GetLeft() const { return InputAt(0); } 2628 HInstruction* GetRight() const { return InputAt(1); } 2629 Primitive::Type GetResultType() const { return GetType(); } 2630 2631 virtual bool IsCommutative() const { return false; } 2632 2633 // Put constant on the right. 2634 // Returns whether order is changed. 2635 bool OrderInputsWithConstantOnTheRight() { 2636 HInstruction* left = InputAt(0); 2637 HInstruction* right = InputAt(1); 2638 if (left->IsConstant() && !right->IsConstant()) { 2639 ReplaceInput(right, 0); 2640 ReplaceInput(left, 1); 2641 return true; 2642 } 2643 return false; 2644 } 2645 2646 // Order inputs by instruction id, but favor constant on the right side. 2647 // This helps GVN for commutative ops. 2648 void OrderInputs() { 2649 DCHECK(IsCommutative()); 2650 HInstruction* left = InputAt(0); 2651 HInstruction* right = InputAt(1); 2652 if (left == right || (!left->IsConstant() && right->IsConstant())) { 2653 return; 2654 } 2655 if (OrderInputsWithConstantOnTheRight()) { 2656 return; 2657 } 2658 // Order according to instruction id. 2659 if (left->GetId() > right->GetId()) { 2660 ReplaceInput(right, 0); 2661 ReplaceInput(left, 1); 2662 } 2663 } 2664 2665 bool CanBeMoved() const OVERRIDE { return true; } 2666 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2667 return true; 2668 } 2669 2670 // Try to statically evaluate `operation` and return a HConstant 2671 // containing the result of this evaluation. If `operation` cannot 2672 // be evaluated as a constant, return null. 2673 HConstant* TryStaticEvaluation() const; 2674 2675 // Apply this operation to `x` and `y`. 2676 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 2677 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 2678 virtual HConstant* Evaluate(HIntConstant* x ATTRIBUTE_UNUSED, 2679 HLongConstant* y ATTRIBUTE_UNUSED) const { 2680 VLOG(compiler) << DebugName() << " is not defined for the (int, long) case."; 2681 return nullptr; 2682 } 2683 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 2684 HIntConstant* y ATTRIBUTE_UNUSED) const { 2685 VLOG(compiler) << DebugName() << " is not defined for the (long, int) case."; 2686 return nullptr; 2687 } 2688 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2689 HNullConstant* y ATTRIBUTE_UNUSED) const { 2690 VLOG(compiler) << DebugName() << " is not defined for the (null, null) case."; 2691 return nullptr; 2692 } 2693 2694 // Returns an input that can legally be used as the right input and is 2695 // constant, or null. 2696 HConstant* GetConstantRight() const; 2697 2698 // If `GetConstantRight()` returns one of the input, this returns the other 2699 // one. Otherwise it returns null. 2700 HInstruction* GetLeastConstantLeft() const; 2701 2702 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation); 2703 2704 private: 2705 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 2706}; 2707 2708// The comparison bias applies for floating point operations and indicates how NaN 2709// comparisons are treated: 2710enum class ComparisonBias { 2711 kNoBias, // bias is not applicable (i.e. for long operation) 2712 kGtBias, // return 1 for NaN comparisons 2713 kLtBias, // return -1 for NaN comparisons 2714}; 2715 2716class HCondition : public HBinaryOperation { 2717 public: 2718 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2719 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), 2720 needs_materialization_(true), 2721 bias_(ComparisonBias::kNoBias) {} 2722 2723 bool NeedsMaterialization() const { return needs_materialization_; } 2724 void ClearNeedsMaterialization() { needs_materialization_ = false; } 2725 2726 // For code generation purposes, returns whether this instruction is just before 2727 // `instruction`, and disregard moves in between. 2728 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 2729 2730 DECLARE_ABSTRACT_INSTRUCTION(Condition); 2731 2732 virtual IfCondition GetCondition() const = 0; 2733 2734 virtual IfCondition GetOppositeCondition() const = 0; 2735 2736 bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } 2737 2738 void SetBias(ComparisonBias bias) { bias_ = bias; } 2739 2740 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 2741 return bias_ == other->AsCondition()->bias_; 2742 } 2743 2744 bool IsFPConditionTrueIfNaN() const { 2745 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2746 IfCondition if_cond = GetCondition(); 2747 return IsGtBias() ? ((if_cond == kCondGT) || (if_cond == kCondGE)) : (if_cond == kCondNE); 2748 } 2749 2750 bool IsFPConditionFalseIfNaN() const { 2751 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())); 2752 IfCondition if_cond = GetCondition(); 2753 return IsGtBias() ? ((if_cond == kCondLT) || (if_cond == kCondLE)) : (if_cond == kCondEQ); 2754 } 2755 2756 private: 2757 // For register allocation purposes, returns whether this instruction needs to be 2758 // materialized (that is, not just be in the processor flags). 2759 bool needs_materialization_; 2760 2761 // Needed if we merge a HCompare into a HCondition. 2762 ComparisonBias bias_; 2763 2764 DISALLOW_COPY_AND_ASSIGN(HCondition); 2765}; 2766 2767// Instruction to check if two inputs are equal to each other. 2768class HEqual : public HCondition { 2769 public: 2770 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2771 : HCondition(first, second, dex_pc) {} 2772 2773 bool IsCommutative() const OVERRIDE { return true; } 2774 2775 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2776 return GetBlock()->GetGraph()->GetIntConstant( 2777 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2778 } 2779 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2780 return GetBlock()->GetGraph()->GetIntConstant( 2781 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2782 } 2783 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2784 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2785 return GetBlock()->GetGraph()->GetIntConstant(1); 2786 } 2787 2788 DECLARE_INSTRUCTION(Equal); 2789 2790 IfCondition GetCondition() const OVERRIDE { 2791 return kCondEQ; 2792 } 2793 2794 IfCondition GetOppositeCondition() const OVERRIDE { 2795 return kCondNE; 2796 } 2797 2798 private: 2799 template <typename T> bool Compute(T x, T y) const { return x == y; } 2800 2801 DISALLOW_COPY_AND_ASSIGN(HEqual); 2802}; 2803 2804class HNotEqual : public HCondition { 2805 public: 2806 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2807 : HCondition(first, second, dex_pc) {} 2808 2809 bool IsCommutative() const OVERRIDE { return true; } 2810 2811 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2812 return GetBlock()->GetGraph()->GetIntConstant( 2813 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2814 } 2815 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2816 return GetBlock()->GetGraph()->GetIntConstant( 2817 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2818 } 2819 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 2820 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 2821 return GetBlock()->GetGraph()->GetIntConstant(0); 2822 } 2823 2824 DECLARE_INSTRUCTION(NotEqual); 2825 2826 IfCondition GetCondition() const OVERRIDE { 2827 return kCondNE; 2828 } 2829 2830 IfCondition GetOppositeCondition() const OVERRIDE { 2831 return kCondEQ; 2832 } 2833 2834 private: 2835 template <typename T> bool Compute(T x, T y) const { return x != y; } 2836 2837 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 2838}; 2839 2840class HLessThan : public HCondition { 2841 public: 2842 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2843 : HCondition(first, second, dex_pc) {} 2844 2845 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2846 return GetBlock()->GetGraph()->GetIntConstant( 2847 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2848 } 2849 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2850 return GetBlock()->GetGraph()->GetIntConstant( 2851 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2852 } 2853 2854 DECLARE_INSTRUCTION(LessThan); 2855 2856 IfCondition GetCondition() const OVERRIDE { 2857 return kCondLT; 2858 } 2859 2860 IfCondition GetOppositeCondition() const OVERRIDE { 2861 return kCondGE; 2862 } 2863 2864 private: 2865 template <typename T> bool Compute(T x, T y) const { return x < y; } 2866 2867 DISALLOW_COPY_AND_ASSIGN(HLessThan); 2868}; 2869 2870class HLessThanOrEqual : public HCondition { 2871 public: 2872 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2873 : HCondition(first, second, dex_pc) {} 2874 2875 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2876 return GetBlock()->GetGraph()->GetIntConstant( 2877 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2878 } 2879 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2880 return GetBlock()->GetGraph()->GetIntConstant( 2881 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2882 } 2883 2884 DECLARE_INSTRUCTION(LessThanOrEqual); 2885 2886 IfCondition GetCondition() const OVERRIDE { 2887 return kCondLE; 2888 } 2889 2890 IfCondition GetOppositeCondition() const OVERRIDE { 2891 return kCondGT; 2892 } 2893 2894 private: 2895 template <typename T> bool Compute(T x, T y) const { return x <= y; } 2896 2897 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 2898}; 2899 2900class HGreaterThan : public HCondition { 2901 public: 2902 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2903 : HCondition(first, second, dex_pc) {} 2904 2905 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2906 return GetBlock()->GetGraph()->GetIntConstant( 2907 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2908 } 2909 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2910 return GetBlock()->GetGraph()->GetIntConstant( 2911 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2912 } 2913 2914 DECLARE_INSTRUCTION(GreaterThan); 2915 2916 IfCondition GetCondition() const OVERRIDE { 2917 return kCondGT; 2918 } 2919 2920 IfCondition GetOppositeCondition() const OVERRIDE { 2921 return kCondLE; 2922 } 2923 2924 private: 2925 template <typename T> bool Compute(T x, T y) const { return x > y; } 2926 2927 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 2928}; 2929 2930class HGreaterThanOrEqual : public HCondition { 2931 public: 2932 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2933 : HCondition(first, second, dex_pc) {} 2934 2935 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2936 return GetBlock()->GetGraph()->GetIntConstant( 2937 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2938 } 2939 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2940 return GetBlock()->GetGraph()->GetIntConstant( 2941 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 2942 } 2943 2944 DECLARE_INSTRUCTION(GreaterThanOrEqual); 2945 2946 IfCondition GetCondition() const OVERRIDE { 2947 return kCondGE; 2948 } 2949 2950 IfCondition GetOppositeCondition() const OVERRIDE { 2951 return kCondLT; 2952 } 2953 2954 private: 2955 template <typename T> bool Compute(T x, T y) const { return x >= y; } 2956 2957 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 2958}; 2959 2960class HBelow : public HCondition { 2961 public: 2962 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2963 : HCondition(first, second, dex_pc) {} 2964 2965 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2966 return GetBlock()->GetGraph()->GetIntConstant( 2967 Compute(static_cast<uint32_t>(x->GetValue()), 2968 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 2969 } 2970 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 2971 return GetBlock()->GetGraph()->GetIntConstant( 2972 Compute(static_cast<uint64_t>(x->GetValue()), 2973 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 2974 } 2975 2976 DECLARE_INSTRUCTION(Below); 2977 2978 IfCondition GetCondition() const OVERRIDE { 2979 return kCondB; 2980 } 2981 2982 IfCondition GetOppositeCondition() const OVERRIDE { 2983 return kCondAE; 2984 } 2985 2986 private: 2987 template <typename T> bool Compute(T x, T y) const { return x < y; } 2988 2989 DISALLOW_COPY_AND_ASSIGN(HBelow); 2990}; 2991 2992class HBelowOrEqual : public HCondition { 2993 public: 2994 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 2995 : HCondition(first, second, dex_pc) {} 2996 2997 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 2998 return GetBlock()->GetGraph()->GetIntConstant( 2999 Compute(static_cast<uint32_t>(x->GetValue()), 3000 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3001 } 3002 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3003 return GetBlock()->GetGraph()->GetIntConstant( 3004 Compute(static_cast<uint64_t>(x->GetValue()), 3005 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3006 } 3007 3008 DECLARE_INSTRUCTION(BelowOrEqual); 3009 3010 IfCondition GetCondition() const OVERRIDE { 3011 return kCondBE; 3012 } 3013 3014 IfCondition GetOppositeCondition() const OVERRIDE { 3015 return kCondA; 3016 } 3017 3018 private: 3019 template <typename T> bool Compute(T x, T y) const { return x <= y; } 3020 3021 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 3022}; 3023 3024class HAbove : public HCondition { 3025 public: 3026 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3027 : HCondition(first, second, dex_pc) {} 3028 3029 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3030 return GetBlock()->GetGraph()->GetIntConstant( 3031 Compute(static_cast<uint32_t>(x->GetValue()), 3032 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3033 } 3034 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3035 return GetBlock()->GetGraph()->GetIntConstant( 3036 Compute(static_cast<uint64_t>(x->GetValue()), 3037 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3038 } 3039 3040 DECLARE_INSTRUCTION(Above); 3041 3042 IfCondition GetCondition() const OVERRIDE { 3043 return kCondA; 3044 } 3045 3046 IfCondition GetOppositeCondition() const OVERRIDE { 3047 return kCondBE; 3048 } 3049 3050 private: 3051 template <typename T> bool Compute(T x, T y) const { return x > y; } 3052 3053 DISALLOW_COPY_AND_ASSIGN(HAbove); 3054}; 3055 3056class HAboveOrEqual : public HCondition { 3057 public: 3058 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3059 : HCondition(first, second, dex_pc) {} 3060 3061 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3062 return GetBlock()->GetGraph()->GetIntConstant( 3063 Compute(static_cast<uint32_t>(x->GetValue()), 3064 static_cast<uint32_t>(y->GetValue())), GetDexPc()); 3065 } 3066 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3067 return GetBlock()->GetGraph()->GetIntConstant( 3068 Compute(static_cast<uint64_t>(x->GetValue()), 3069 static_cast<uint64_t>(y->GetValue())), GetDexPc()); 3070 } 3071 3072 DECLARE_INSTRUCTION(AboveOrEqual); 3073 3074 IfCondition GetCondition() const OVERRIDE { 3075 return kCondAE; 3076 } 3077 3078 IfCondition GetOppositeCondition() const OVERRIDE { 3079 return kCondB; 3080 } 3081 3082 private: 3083 template <typename T> bool Compute(T x, T y) const { return x >= y; } 3084 3085 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 3086}; 3087 3088// Instruction to check how two inputs compare to each other. 3089// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 3090class HCompare : public HBinaryOperation { 3091 public: 3092 HCompare(Primitive::Type type, 3093 HInstruction* first, 3094 HInstruction* second, 3095 ComparisonBias bias, 3096 uint32_t dex_pc) 3097 : HBinaryOperation(Primitive::kPrimInt, 3098 first, 3099 second, 3100 SideEffectsForArchRuntimeCalls(type), 3101 dex_pc), 3102 bias_(bias) { 3103 DCHECK_EQ(type, first->GetType()); 3104 DCHECK_EQ(type, second->GetType()); 3105 } 3106 3107 template <typename T> 3108 int32_t Compute(T x, T y) const { return x == y ? 0 : x > y ? 1 : -1; } 3109 3110 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3111 return GetBlock()->GetGraph()->GetIntConstant( 3112 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3113 } 3114 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3115 return GetBlock()->GetGraph()->GetIntConstant( 3116 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3117 } 3118 3119 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3120 return bias_ == other->AsCompare()->bias_; 3121 } 3122 3123 ComparisonBias GetBias() const { return bias_; } 3124 3125 bool IsGtBias() { return bias_ == ComparisonBias::kGtBias; } 3126 3127 3128 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { 3129 // MIPS64 uses a runtime call for FP comparisons. 3130 return Primitive::IsFloatingPointType(type) ? SideEffects::CanTriggerGC() : SideEffects::None(); 3131 } 3132 3133 DECLARE_INSTRUCTION(Compare); 3134 3135 private: 3136 const ComparisonBias bias_; 3137 3138 DISALLOW_COPY_AND_ASSIGN(HCompare); 3139}; 3140 3141// A local in the graph. Corresponds to a Dex register. 3142class HLocal : public HTemplateInstruction<0> { 3143 public: 3144 explicit HLocal(uint16_t reg_number) 3145 : HTemplateInstruction(SideEffects::None(), kNoDexPc), reg_number_(reg_number) {} 3146 3147 DECLARE_INSTRUCTION(Local); 3148 3149 uint16_t GetRegNumber() const { return reg_number_; } 3150 3151 private: 3152 // The Dex register number. 3153 const uint16_t reg_number_; 3154 3155 DISALLOW_COPY_AND_ASSIGN(HLocal); 3156}; 3157 3158// Load a given local. The local is an input of this instruction. 3159class HLoadLocal : public HExpression<1> { 3160 public: 3161 HLoadLocal(HLocal* local, Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3162 : HExpression(type, SideEffects::None(), dex_pc) { 3163 SetRawInputAt(0, local); 3164 } 3165 3166 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3167 3168 DECLARE_INSTRUCTION(LoadLocal); 3169 3170 private: 3171 DISALLOW_COPY_AND_ASSIGN(HLoadLocal); 3172}; 3173 3174// Store a value in a given local. This instruction has two inputs: the value 3175// and the local. 3176class HStoreLocal : public HTemplateInstruction<2> { 3177 public: 3178 HStoreLocal(HLocal* local, HInstruction* value, uint32_t dex_pc = kNoDexPc) 3179 : HTemplateInstruction(SideEffects::None(), dex_pc) { 3180 SetRawInputAt(0, local); 3181 SetRawInputAt(1, value); 3182 } 3183 3184 HLocal* GetLocal() const { return reinterpret_cast<HLocal*>(InputAt(0)); } 3185 3186 DECLARE_INSTRUCTION(StoreLocal); 3187 3188 private: 3189 DISALLOW_COPY_AND_ASSIGN(HStoreLocal); 3190}; 3191 3192class HFloatConstant : public HConstant { 3193 public: 3194 float GetValue() const { return value_; } 3195 3196 uint64_t GetValueAsUint64() const OVERRIDE { 3197 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 3198 } 3199 3200 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3201 DCHECK(other->IsFloatConstant()); 3202 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 3203 } 3204 3205 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3206 3207 bool IsMinusOne() const OVERRIDE { 3208 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 3209 } 3210 bool IsZero() const OVERRIDE { 3211 return value_ == 0.0f; 3212 } 3213 bool IsOne() const OVERRIDE { 3214 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 3215 } 3216 bool IsNaN() const { 3217 return std::isnan(value_); 3218 } 3219 3220 DECLARE_INSTRUCTION(FloatConstant); 3221 3222 private: 3223 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 3224 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 3225 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 3226 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 3227 3228 const float value_; 3229 3230 // Only the SsaBuilder and HGraph can create floating-point constants. 3231 friend class SsaBuilder; 3232 friend class HGraph; 3233 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 3234}; 3235 3236class HDoubleConstant : public HConstant { 3237 public: 3238 double GetValue() const { return value_; } 3239 3240 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 3241 3242 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3243 DCHECK(other->IsDoubleConstant()); 3244 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 3245 } 3246 3247 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 3248 3249 bool IsMinusOne() const OVERRIDE { 3250 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 3251 } 3252 bool IsZero() const OVERRIDE { 3253 return value_ == 0.0; 3254 } 3255 bool IsOne() const OVERRIDE { 3256 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 3257 } 3258 bool IsNaN() const { 3259 return std::isnan(value_); 3260 } 3261 3262 DECLARE_INSTRUCTION(DoubleConstant); 3263 3264 private: 3265 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 3266 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 3267 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 3268 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 3269 3270 const double value_; 3271 3272 // Only the SsaBuilder and HGraph can create floating-point constants. 3273 friend class SsaBuilder; 3274 friend class HGraph; 3275 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 3276}; 3277 3278class HNewInstance : public HExpression<2> { 3279 public: 3280 HNewInstance(HInstruction* cls, 3281 HCurrentMethod* current_method, 3282 uint32_t dex_pc, 3283 uint16_t type_index, 3284 const DexFile& dex_file, 3285 bool can_throw, 3286 bool finalizable, 3287 QuickEntrypointEnum entrypoint) 3288 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3289 type_index_(type_index), 3290 dex_file_(dex_file), 3291 can_throw_(can_throw), 3292 finalizable_(finalizable), 3293 entrypoint_(entrypoint) { 3294 SetRawInputAt(0, cls); 3295 SetRawInputAt(1, current_method); 3296 } 3297 3298 uint16_t GetTypeIndex() const { return type_index_; } 3299 const DexFile& GetDexFile() const { return dex_file_; } 3300 3301 // Calls runtime so needs an environment. 3302 bool NeedsEnvironment() const OVERRIDE { return true; } 3303 3304 // It may throw when called on type that's not instantiable/accessible. 3305 // It can throw OOME. 3306 // TODO: distinguish between the two cases so we can for example allow allocation elimination. 3307 bool CanThrow() const OVERRIDE { return can_throw_ || true; } 3308 3309 bool IsFinalizable() const { return finalizable_; } 3310 3311 bool CanBeNull() const OVERRIDE { return false; } 3312 3313 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3314 3315 void SetEntrypoint(QuickEntrypointEnum entrypoint) { 3316 entrypoint_ = entrypoint; 3317 } 3318 3319 bool IsStringAlloc() const; 3320 3321 DECLARE_INSTRUCTION(NewInstance); 3322 3323 private: 3324 const uint16_t type_index_; 3325 const DexFile& dex_file_; 3326 const bool can_throw_; 3327 const bool finalizable_; 3328 QuickEntrypointEnum entrypoint_; 3329 3330 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3331}; 3332 3333enum class Intrinsics { 3334#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \ 3335 k ## Name, 3336#include "intrinsics_list.h" 3337 kNone, 3338 INTRINSICS_LIST(OPTIMIZING_INTRINSICS) 3339#undef INTRINSICS_LIST 3340#undef OPTIMIZING_INTRINSICS 3341}; 3342std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); 3343 3344enum IntrinsicNeedsEnvironmentOrCache { 3345 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3346 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3347}; 3348 3349enum IntrinsicSideEffects { 3350 kNoSideEffects, // Intrinsic does not have any heap memory side effects. 3351 kReadSideEffects, // Intrinsic may read heap memory. 3352 kWriteSideEffects, // Intrinsic may write heap memory. 3353 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC. 3354}; 3355 3356enum IntrinsicExceptions { 3357 kNoThrow, // Intrinsic does not throw any exceptions. 3358 kCanThrow // Intrinsic may throw exceptions. 3359}; 3360 3361class HInvoke : public HInstruction { 3362 public: 3363 size_t InputCount() const OVERRIDE { return inputs_.size(); } 3364 3365 bool NeedsEnvironment() const OVERRIDE; 3366 3367 void SetArgumentAt(size_t index, HInstruction* argument) { 3368 SetRawInputAt(index, argument); 3369 } 3370 3371 // Return the number of arguments. This number can be lower than 3372 // the number of inputs returned by InputCount(), as some invoke 3373 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3374 // inputs at the end of their list of inputs. 3375 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 3376 3377 Primitive::Type GetType() const OVERRIDE { return return_type_; } 3378 3379 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3380 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } 3381 3382 InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } 3383 3384 Intrinsics GetIntrinsic() const { 3385 return intrinsic_; 3386 } 3387 3388 void SetIntrinsic(Intrinsics intrinsic, 3389 IntrinsicNeedsEnvironmentOrCache needs_env_or_cache, 3390 IntrinsicSideEffects side_effects, 3391 IntrinsicExceptions exceptions); 3392 3393 bool IsFromInlinedInvoke() const { 3394 return GetEnvironment()->IsFromInlinedInvoke(); 3395 } 3396 3397 bool CanThrow() const OVERRIDE { return can_throw_; } 3398 3399 bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); } 3400 3401 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 3402 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_; 3403 } 3404 3405 uint32_t* GetIntrinsicOptimizations() { 3406 return &intrinsic_optimizations_; 3407 } 3408 3409 const uint32_t* GetIntrinsicOptimizations() const { 3410 return &intrinsic_optimizations_; 3411 } 3412 3413 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 3414 3415 DECLARE_ABSTRACT_INSTRUCTION(Invoke); 3416 3417 protected: 3418 HInvoke(ArenaAllocator* arena, 3419 uint32_t number_of_arguments, 3420 uint32_t number_of_other_inputs, 3421 Primitive::Type return_type, 3422 uint32_t dex_pc, 3423 uint32_t dex_method_index, 3424 InvokeType original_invoke_type) 3425 : HInstruction( 3426 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 3427 number_of_arguments_(number_of_arguments), 3428 inputs_(number_of_arguments + number_of_other_inputs, 3429 arena->Adapter(kArenaAllocInvokeInputs)), 3430 return_type_(return_type), 3431 dex_method_index_(dex_method_index), 3432 original_invoke_type_(original_invoke_type), 3433 can_throw_(true), 3434 intrinsic_(Intrinsics::kNone), 3435 intrinsic_optimizations_(0) { 3436 } 3437 3438 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 3439 return inputs_[index]; 3440 } 3441 3442 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 3443 inputs_[index] = input; 3444 } 3445 3446 void SetCanThrow(bool can_throw) { can_throw_ = can_throw; } 3447 3448 uint32_t number_of_arguments_; 3449 ArenaVector<HUserRecord<HInstruction*>> inputs_; 3450 const Primitive::Type return_type_; 3451 const uint32_t dex_method_index_; 3452 const InvokeType original_invoke_type_; 3453 bool can_throw_; 3454 Intrinsics intrinsic_; 3455 3456 // A magic word holding optimizations for intrinsics. See intrinsics.h. 3457 uint32_t intrinsic_optimizations_; 3458 3459 private: 3460 DISALLOW_COPY_AND_ASSIGN(HInvoke); 3461}; 3462 3463class HInvokeUnresolved : public HInvoke { 3464 public: 3465 HInvokeUnresolved(ArenaAllocator* arena, 3466 uint32_t number_of_arguments, 3467 Primitive::Type return_type, 3468 uint32_t dex_pc, 3469 uint32_t dex_method_index, 3470 InvokeType invoke_type) 3471 : HInvoke(arena, 3472 number_of_arguments, 3473 0u /* number_of_other_inputs */, 3474 return_type, 3475 dex_pc, 3476 dex_method_index, 3477 invoke_type) { 3478 } 3479 3480 DECLARE_INSTRUCTION(InvokeUnresolved); 3481 3482 private: 3483 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 3484}; 3485 3486class HInvokeStaticOrDirect : public HInvoke { 3487 public: 3488 // Requirements of this method call regarding the class 3489 // initialization (clinit) check of its declaring class. 3490 enum class ClinitCheckRequirement { 3491 kNone, // Class already initialized. 3492 kExplicit, // Static call having explicit clinit check as last input. 3493 kImplicit, // Static call implicitly requiring a clinit check. 3494 }; 3495 3496 // Determines how to load the target ArtMethod*. 3497 enum class MethodLoadKind { 3498 // Use a String init ArtMethod* loaded from Thread entrypoints. 3499 kStringInit, 3500 3501 // Use the method's own ArtMethod* loaded by the register allocator. 3502 kRecursive, 3503 3504 // Use ArtMethod* at a known address, embed the direct address in the code. 3505 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 3506 kDirectAddress, 3507 3508 // Use ArtMethod* at an address that will be known at link time, embed the direct 3509 // address in the code. If the image is relocatable, emit .patch_oat entry. 3510 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3511 // the image relocatable or not. 3512 kDirectAddressWithFixup, 3513 3514 // Load from resoved methods array in the dex cache using a PC-relative load. 3515 // Used when we need to use the dex cache, for example for invoke-static that 3516 // may cause class initialization (the entry may point to a resolution method), 3517 // and we know that we can access the dex cache arrays using a PC-relative load. 3518 kDexCachePcRelative, 3519 3520 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*. 3521 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind 3522 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative) 3523 // or unimplemented or impractical (i.e. slow) on a particular architecture. 3524 kDexCacheViaMethod, 3525 }; 3526 3527 // Determines the location of the code pointer. 3528 enum class CodePtrLocation { 3529 // Recursive call, use local PC-relative call instruction. 3530 kCallSelf, 3531 3532 // Use PC-relative call instruction patched at link time. 3533 // Used for calls within an oat file, boot->boot or app->app. 3534 kCallPCRelative, 3535 3536 // Call to a known target address, embed the direct address in code. 3537 // Used for app->boot call with non-relocatable image and for JIT-compiled calls. 3538 kCallDirect, 3539 3540 // Call to a target address that will be known at link time, embed the direct 3541 // address in code. If the image is relocatable, emit .patch_oat entry. 3542 // Used for app->boot calls with relocatable image and boot->boot calls, whether 3543 // the image relocatable or not. 3544 kCallDirectWithFixup, 3545 3546 // Use code pointer from the ArtMethod*. 3547 // Used when we don't know the target code. This is also the last-resort-kind used when 3548 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 3549 kCallArtMethod, 3550 }; 3551 3552 struct DispatchInfo { 3553 MethodLoadKind method_load_kind; 3554 CodePtrLocation code_ptr_location; 3555 // The method load data holds 3556 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 3557 // Note that there are multiple string init methods, each having its own offset. 3558 // - the method address for kDirectAddress 3559 // - the dex cache arrays offset for kDexCachePcRel. 3560 uint64_t method_load_data; 3561 uint64_t direct_code_ptr; 3562 }; 3563 3564 HInvokeStaticOrDirect(ArenaAllocator* arena, 3565 uint32_t number_of_arguments, 3566 Primitive::Type return_type, 3567 uint32_t dex_pc, 3568 uint32_t method_index, 3569 MethodReference target_method, 3570 DispatchInfo dispatch_info, 3571 InvokeType original_invoke_type, 3572 InvokeType optimized_invoke_type, 3573 ClinitCheckRequirement clinit_check_requirement) 3574 : HInvoke(arena, 3575 number_of_arguments, 3576 // There is potentially one extra argument for the HCurrentMethod node, and 3577 // potentially one other if the clinit check is explicit, and potentially 3578 // one other if the method is a string factory. 3579 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) + 3580 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) + 3581 (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u), 3582 return_type, 3583 dex_pc, 3584 method_index, 3585 original_invoke_type), 3586 optimized_invoke_type_(optimized_invoke_type), 3587 clinit_check_requirement_(clinit_check_requirement), 3588 target_method_(target_method), 3589 dispatch_info_(dispatch_info) { } 3590 3591 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 3592 bool had_current_method_input = HasCurrentMethodInput(); 3593 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind); 3594 3595 // Using the current method is the default and once we find a better 3596 // method load kind, we should not go back to using the current method. 3597 DCHECK(had_current_method_input || !needs_current_method_input); 3598 3599 if (had_current_method_input && !needs_current_method_input) { 3600 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod()); 3601 RemoveInputAt(GetSpecialInputIndex()); 3602 } 3603 dispatch_info_ = dispatch_info; 3604 } 3605 3606 void AddSpecialInput(HInstruction* input) { 3607 // We allow only one special input. 3608 DCHECK(!IsStringInit() && !HasCurrentMethodInput()); 3609 DCHECK(InputCount() == GetSpecialInputIndex() || 3610 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck())); 3611 InsertInputAt(GetSpecialInputIndex(), input); 3612 } 3613 3614 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 3615 // We access the method via the dex cache so we can't do an implicit null check. 3616 // TODO: for intrinsics we can generate implicit null checks. 3617 return false; 3618 } 3619 3620 bool CanBeNull() const OVERRIDE { 3621 return return_type_ == Primitive::kPrimNot && !IsStringInit(); 3622 } 3623 3624 // Get the index of the special input, if any. 3625 // 3626 // If the invoke HasCurrentMethodInput(), the "special input" is the current 3627 // method pointer; otherwise there may be one platform-specific special input, 3628 // such as PC-relative addressing base. 3629 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } 3630 3631 InvokeType GetOptimizedInvokeType() const { return optimized_invoke_type_; } 3632 void SetOptimizedInvokeType(InvokeType invoke_type) { 3633 optimized_invoke_type_ = invoke_type; 3634 } 3635 3636 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 3637 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 3638 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 3639 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 3640 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 3641 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 3642 bool HasPcRelativeDexCache() const { 3643 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative; 3644 } 3645 bool HasCurrentMethodInput() const { 3646 // This function can be called only after the invoke has been fully initialized by the builder. 3647 if (NeedsCurrentMethodInput(GetMethodLoadKind())) { 3648 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3649 return true; 3650 } else { 3651 DCHECK(InputCount() == GetSpecialInputIndex() || 3652 !InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 3653 return false; 3654 } 3655 } 3656 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } 3657 MethodReference GetTargetMethod() const { return target_method_; } 3658 void SetTargetMethod(MethodReference method) { target_method_ = method; } 3659 3660 int32_t GetStringInitOffset() const { 3661 DCHECK(IsStringInit()); 3662 return dispatch_info_.method_load_data; 3663 } 3664 3665 uint64_t GetMethodAddress() const { 3666 DCHECK(HasMethodAddress()); 3667 return dispatch_info_.method_load_data; 3668 } 3669 3670 uint32_t GetDexCacheArrayOffset() const { 3671 DCHECK(HasPcRelativeDexCache()); 3672 return dispatch_info_.method_load_data; 3673 } 3674 3675 uint64_t GetDirectCodePtr() const { 3676 DCHECK(HasDirectCodePtr()); 3677 return dispatch_info_.direct_code_ptr; 3678 } 3679 3680 ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } 3681 3682 // Is this instruction a call to a static method? 3683 bool IsStatic() const { 3684 return GetOriginalInvokeType() == kStatic; 3685 } 3686 3687 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by 3688 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck) 3689 // instruction; only relevant for static calls with explicit clinit check. 3690 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) { 3691 DCHECK(IsStaticWithExplicitClinitCheck()); 3692 size_t last_input_index = InputCount() - 1; 3693 HInstruction* last_input = InputAt(last_input_index); 3694 DCHECK(last_input != nullptr); 3695 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName(); 3696 RemoveAsUserOfInput(last_input_index); 3697 inputs_.pop_back(); 3698 clinit_check_requirement_ = new_requirement; 3699 DCHECK(!IsStaticWithExplicitClinitCheck()); 3700 } 3701 3702 HInstruction* GetAndRemoveThisArgumentOfStringInit() { 3703 DCHECK(IsStringInit()); 3704 size_t index = InputCount() - 1; 3705 HInstruction* input = InputAt(index); 3706 RemoveAsUserOfInput(index); 3707 inputs_.pop_back(); 3708 return input; 3709 } 3710 3711 // Is this a call to a static method whose declaring class has an 3712 // explicit initialization check in the graph? 3713 bool IsStaticWithExplicitClinitCheck() const { 3714 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); 3715 } 3716 3717 // Is this a call to a static method whose declaring class has an 3718 // implicit intialization check requirement? 3719 bool IsStaticWithImplicitClinitCheck() const { 3720 return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); 3721 } 3722 3723 // Does this method load kind need the current method as an input? 3724 static bool NeedsCurrentMethodInput(MethodLoadKind kind) { 3725 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod; 3726 } 3727 3728 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 3729 3730 protected: 3731 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE { 3732 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i); 3733 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) { 3734 HInstruction* input = input_record.GetInstruction(); 3735 // `input` is the last input of a static invoke marked as having 3736 // an explicit clinit check. It must either be: 3737 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 3738 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 3739 DCHECK(input != nullptr); 3740 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName(); 3741 } 3742 return input_record; 3743 } 3744 3745 void InsertInputAt(size_t index, HInstruction* input); 3746 void RemoveInputAt(size_t index); 3747 3748 private: 3749 InvokeType optimized_invoke_type_; 3750 ClinitCheckRequirement clinit_check_requirement_; 3751 // The target method may refer to different dex file or method index than the original 3752 // invoke. This happens for sharpened calls and for calls where a method was redeclared 3753 // in derived class to increase visibility. 3754 MethodReference target_method_; 3755 DispatchInfo dispatch_info_; 3756 3757 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 3758}; 3759std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs); 3760std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs); 3761 3762class HInvokeVirtual : public HInvoke { 3763 public: 3764 HInvokeVirtual(ArenaAllocator* arena, 3765 uint32_t number_of_arguments, 3766 Primitive::Type return_type, 3767 uint32_t dex_pc, 3768 uint32_t dex_method_index, 3769 uint32_t vtable_index) 3770 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), 3771 vtable_index_(vtable_index) {} 3772 3773 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3774 // TODO: Add implicit null checks in intrinsics. 3775 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3776 } 3777 3778 uint32_t GetVTableIndex() const { return vtable_index_; } 3779 3780 DECLARE_INSTRUCTION(InvokeVirtual); 3781 3782 private: 3783 const uint32_t vtable_index_; 3784 3785 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 3786}; 3787 3788class HInvokeInterface : public HInvoke { 3789 public: 3790 HInvokeInterface(ArenaAllocator* arena, 3791 uint32_t number_of_arguments, 3792 Primitive::Type return_type, 3793 uint32_t dex_pc, 3794 uint32_t dex_method_index, 3795 uint32_t imt_index) 3796 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), 3797 imt_index_(imt_index) {} 3798 3799 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 3800 // TODO: Add implicit null checks in intrinsics. 3801 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 3802 } 3803 3804 uint32_t GetImtIndex() const { return imt_index_; } 3805 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 3806 3807 DECLARE_INSTRUCTION(InvokeInterface); 3808 3809 private: 3810 const uint32_t imt_index_; 3811 3812 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 3813}; 3814 3815class HNeg : public HUnaryOperation { 3816 public: 3817 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3818 : HUnaryOperation(result_type, input, dex_pc) {} 3819 3820 template <typename T> T Compute(T x) const { return -x; } 3821 3822 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 3823 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 3824 } 3825 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 3826 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 3827 } 3828 3829 DECLARE_INSTRUCTION(Neg); 3830 3831 private: 3832 DISALLOW_COPY_AND_ASSIGN(HNeg); 3833}; 3834 3835class HNewArray : public HExpression<2> { 3836 public: 3837 HNewArray(HInstruction* length, 3838 HCurrentMethod* current_method, 3839 uint32_t dex_pc, 3840 uint16_t type_index, 3841 const DexFile& dex_file, 3842 QuickEntrypointEnum entrypoint) 3843 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3844 type_index_(type_index), 3845 dex_file_(dex_file), 3846 entrypoint_(entrypoint) { 3847 SetRawInputAt(0, length); 3848 SetRawInputAt(1, current_method); 3849 } 3850 3851 uint16_t GetTypeIndex() const { return type_index_; } 3852 const DexFile& GetDexFile() const { return dex_file_; } 3853 3854 // Calls runtime so needs an environment. 3855 bool NeedsEnvironment() const OVERRIDE { return true; } 3856 3857 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 3858 bool CanThrow() const OVERRIDE { return true; } 3859 3860 bool CanBeNull() const OVERRIDE { return false; } 3861 3862 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3863 3864 DECLARE_INSTRUCTION(NewArray); 3865 3866 private: 3867 const uint16_t type_index_; 3868 const DexFile& dex_file_; 3869 const QuickEntrypointEnum entrypoint_; 3870 3871 DISALLOW_COPY_AND_ASSIGN(HNewArray); 3872}; 3873 3874class HAdd : public HBinaryOperation { 3875 public: 3876 HAdd(Primitive::Type result_type, 3877 HInstruction* left, 3878 HInstruction* right, 3879 uint32_t dex_pc = kNoDexPc) 3880 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3881 3882 bool IsCommutative() const OVERRIDE { return true; } 3883 3884 template <typename T> T Compute(T x, T y) const { return x + y; } 3885 3886 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3887 return GetBlock()->GetGraph()->GetIntConstant( 3888 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3889 } 3890 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3891 return GetBlock()->GetGraph()->GetLongConstant( 3892 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3893 } 3894 3895 DECLARE_INSTRUCTION(Add); 3896 3897 private: 3898 DISALLOW_COPY_AND_ASSIGN(HAdd); 3899}; 3900 3901class HSub : public HBinaryOperation { 3902 public: 3903 HSub(Primitive::Type result_type, 3904 HInstruction* left, 3905 HInstruction* right, 3906 uint32_t dex_pc = kNoDexPc) 3907 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3908 3909 template <typename T> T Compute(T x, T y) const { return x - y; } 3910 3911 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3912 return GetBlock()->GetGraph()->GetIntConstant( 3913 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3914 } 3915 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3916 return GetBlock()->GetGraph()->GetLongConstant( 3917 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3918 } 3919 3920 DECLARE_INSTRUCTION(Sub); 3921 3922 private: 3923 DISALLOW_COPY_AND_ASSIGN(HSub); 3924}; 3925 3926class HMul : public HBinaryOperation { 3927 public: 3928 HMul(Primitive::Type result_type, 3929 HInstruction* left, 3930 HInstruction* right, 3931 uint32_t dex_pc = kNoDexPc) 3932 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 3933 3934 bool IsCommutative() const OVERRIDE { return true; } 3935 3936 template <typename T> T Compute(T x, T y) const { return x * y; } 3937 3938 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3939 return GetBlock()->GetGraph()->GetIntConstant( 3940 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3941 } 3942 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3943 return GetBlock()->GetGraph()->GetLongConstant( 3944 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3945 } 3946 3947 DECLARE_INSTRUCTION(Mul); 3948 3949 private: 3950 DISALLOW_COPY_AND_ASSIGN(HMul); 3951}; 3952 3953class HDiv : public HBinaryOperation { 3954 public: 3955 HDiv(Primitive::Type result_type, 3956 HInstruction* left, 3957 HInstruction* right, 3958 uint32_t dex_pc) 3959 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3960 3961 template <typename T> 3962 T Compute(T x, T y) const { 3963 // Our graph structure ensures we never have 0 for `y` during 3964 // constant folding. 3965 DCHECK_NE(y, 0); 3966 // Special case -1 to avoid getting a SIGFPE on x86(_64). 3967 return (y == -1) ? -x : x / y; 3968 } 3969 3970 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3971 return GetBlock()->GetGraph()->GetIntConstant( 3972 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3973 } 3974 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3975 return GetBlock()->GetGraph()->GetLongConstant( 3976 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3977 } 3978 3979 static SideEffects SideEffectsForArchRuntimeCalls() { 3980 // The generated code can use a runtime call. 3981 return SideEffects::CanTriggerGC(); 3982 } 3983 3984 DECLARE_INSTRUCTION(Div); 3985 3986 private: 3987 DISALLOW_COPY_AND_ASSIGN(HDiv); 3988}; 3989 3990class HRem : public HBinaryOperation { 3991 public: 3992 HRem(Primitive::Type result_type, 3993 HInstruction* left, 3994 HInstruction* right, 3995 uint32_t dex_pc) 3996 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} 3997 3998 template <typename T> 3999 T Compute(T x, T y) const { 4000 // Our graph structure ensures we never have 0 for `y` during 4001 // constant folding. 4002 DCHECK_NE(y, 0); 4003 // Special case -1 to avoid getting a SIGFPE on x86(_64). 4004 return (y == -1) ? 0 : x % y; 4005 } 4006 4007 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4008 return GetBlock()->GetGraph()->GetIntConstant( 4009 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4010 } 4011 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4012 return GetBlock()->GetGraph()->GetLongConstant( 4013 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4014 } 4015 4016 4017 static SideEffects SideEffectsForArchRuntimeCalls() { 4018 return SideEffects::CanTriggerGC(); 4019 } 4020 4021 DECLARE_INSTRUCTION(Rem); 4022 4023 private: 4024 DISALLOW_COPY_AND_ASSIGN(HRem); 4025}; 4026 4027class HDivZeroCheck : public HExpression<1> { 4028 public: 4029 // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException` 4030 // constructor. 4031 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 4032 : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) { 4033 SetRawInputAt(0, value); 4034 } 4035 4036 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 4037 4038 bool CanBeMoved() const OVERRIDE { return true; } 4039 4040 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4041 return true; 4042 } 4043 4044 bool NeedsEnvironment() const OVERRIDE { return true; } 4045 bool CanThrow() const OVERRIDE { return true; } 4046 4047 DECLARE_INSTRUCTION(DivZeroCheck); 4048 4049 private: 4050 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 4051}; 4052 4053class HShl : public HBinaryOperation { 4054 public: 4055 HShl(Primitive::Type result_type, 4056 HInstruction* left, 4057 HInstruction* right, 4058 uint32_t dex_pc = kNoDexPc) 4059 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4060 4061 template <typename T, typename U, typename V> 4062 T Compute(T x, U y, V max_shift_value) const { 4063 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4064 "V is not the unsigned integer type corresponding to T"); 4065 return x << (y & max_shift_value); 4066 } 4067 4068 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4069 return GetBlock()->GetGraph()->GetIntConstant( 4070 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4071 } 4072 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4073 // case is handled as `x << static_cast<int>(y)`. 4074 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4075 return GetBlock()->GetGraph()->GetLongConstant( 4076 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4077 } 4078 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4079 return GetBlock()->GetGraph()->GetLongConstant( 4080 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4081 } 4082 4083 DECLARE_INSTRUCTION(Shl); 4084 4085 private: 4086 DISALLOW_COPY_AND_ASSIGN(HShl); 4087}; 4088 4089class HShr : public HBinaryOperation { 4090 public: 4091 HShr(Primitive::Type result_type, 4092 HInstruction* left, 4093 HInstruction* right, 4094 uint32_t dex_pc = kNoDexPc) 4095 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4096 4097 template <typename T, typename U, typename V> 4098 T Compute(T x, U y, V max_shift_value) const { 4099 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4100 "V is not the unsigned integer type corresponding to T"); 4101 return x >> (y & max_shift_value); 4102 } 4103 4104 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4105 return GetBlock()->GetGraph()->GetIntConstant( 4106 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4107 } 4108 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4109 // case is handled as `x >> static_cast<int>(y)`. 4110 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4111 return GetBlock()->GetGraph()->GetLongConstant( 4112 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4113 } 4114 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4115 return GetBlock()->GetGraph()->GetLongConstant( 4116 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4117 } 4118 4119 DECLARE_INSTRUCTION(Shr); 4120 4121 private: 4122 DISALLOW_COPY_AND_ASSIGN(HShr); 4123}; 4124 4125class HUShr : public HBinaryOperation { 4126 public: 4127 HUShr(Primitive::Type result_type, 4128 HInstruction* left, 4129 HInstruction* right, 4130 uint32_t dex_pc = kNoDexPc) 4131 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4132 4133 template <typename T, typename U, typename V> 4134 T Compute(T x, U y, V max_shift_value) const { 4135 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4136 "V is not the unsigned integer type corresponding to T"); 4137 V ux = static_cast<V>(x); 4138 return static_cast<T>(ux >> (y & max_shift_value)); 4139 } 4140 4141 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4142 return GetBlock()->GetGraph()->GetIntConstant( 4143 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4144 } 4145 // There is no `Evaluate(HIntConstant* x, HLongConstant* y)`, as this 4146 // case is handled as `x >>> static_cast<int>(y)`. 4147 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4148 return GetBlock()->GetGraph()->GetLongConstant( 4149 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4150 } 4151 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4152 return GetBlock()->GetGraph()->GetLongConstant( 4153 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4154 } 4155 4156 DECLARE_INSTRUCTION(UShr); 4157 4158 private: 4159 DISALLOW_COPY_AND_ASSIGN(HUShr); 4160}; 4161 4162class HAnd : public HBinaryOperation { 4163 public: 4164 HAnd(Primitive::Type result_type, 4165 HInstruction* left, 4166 HInstruction* right, 4167 uint32_t dex_pc = kNoDexPc) 4168 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4169 4170 bool IsCommutative() const OVERRIDE { return true; } 4171 4172 template <typename T, typename U> 4173 auto Compute(T x, U y) const -> decltype(x & y) { return x & y; } 4174 4175 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4176 return GetBlock()->GetGraph()->GetIntConstant( 4177 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4178 } 4179 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4180 return GetBlock()->GetGraph()->GetLongConstant( 4181 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4182 } 4183 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4184 return GetBlock()->GetGraph()->GetLongConstant( 4185 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4186 } 4187 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4188 return GetBlock()->GetGraph()->GetLongConstant( 4189 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4190 } 4191 4192 DECLARE_INSTRUCTION(And); 4193 4194 private: 4195 DISALLOW_COPY_AND_ASSIGN(HAnd); 4196}; 4197 4198class HOr : public HBinaryOperation { 4199 public: 4200 HOr(Primitive::Type result_type, 4201 HInstruction* left, 4202 HInstruction* right, 4203 uint32_t dex_pc = kNoDexPc) 4204 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4205 4206 bool IsCommutative() const OVERRIDE { return true; } 4207 4208 template <typename T, typename U> 4209 auto Compute(T x, U y) const -> decltype(x | y) { return x | y; } 4210 4211 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4212 return GetBlock()->GetGraph()->GetIntConstant( 4213 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4214 } 4215 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4216 return GetBlock()->GetGraph()->GetLongConstant( 4217 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4218 } 4219 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4220 return GetBlock()->GetGraph()->GetLongConstant( 4221 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4222 } 4223 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4224 return GetBlock()->GetGraph()->GetLongConstant( 4225 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4226 } 4227 4228 DECLARE_INSTRUCTION(Or); 4229 4230 private: 4231 DISALLOW_COPY_AND_ASSIGN(HOr); 4232}; 4233 4234class HXor : public HBinaryOperation { 4235 public: 4236 HXor(Primitive::Type result_type, 4237 HInstruction* left, 4238 HInstruction* right, 4239 uint32_t dex_pc = kNoDexPc) 4240 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4241 4242 bool IsCommutative() const OVERRIDE { return true; } 4243 4244 template <typename T, typename U> 4245 auto Compute(T x, U y) const -> decltype(x ^ y) { return x ^ y; } 4246 4247 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4248 return GetBlock()->GetGraph()->GetIntConstant( 4249 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4250 } 4251 HConstant* Evaluate(HIntConstant* x, HLongConstant* y) const OVERRIDE { 4252 return GetBlock()->GetGraph()->GetLongConstant( 4253 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4254 } 4255 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4256 return GetBlock()->GetGraph()->GetLongConstant( 4257 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4258 } 4259 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4260 return GetBlock()->GetGraph()->GetLongConstant( 4261 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4262 } 4263 4264 DECLARE_INSTRUCTION(Xor); 4265 4266 private: 4267 DISALLOW_COPY_AND_ASSIGN(HXor); 4268}; 4269 4270class HRor : public HBinaryOperation { 4271 public: 4272 HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance) 4273 : HBinaryOperation(result_type, value, distance) {} 4274 4275 template <typename T, typename U, typename V> 4276 T Compute(T x, U y, V max_shift_value) const { 4277 static_assert(std::is_same<V, typename std::make_unsigned<T>::type>::value, 4278 "V is not the unsigned integer type corresponding to T"); 4279 V ux = static_cast<V>(x); 4280 if ((y & max_shift_value) == 0) { 4281 return static_cast<T>(ux); 4282 } else { 4283 const V reg_bits = sizeof(T) * 8; 4284 return static_cast<T>(ux >> (y & max_shift_value)) | 4285 (x << (reg_bits - (y & max_shift_value))); 4286 } 4287 } 4288 4289 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4290 return GetBlock()->GetGraph()->GetIntConstant( 4291 Compute(x->GetValue(), y->GetValue(), kMaxIntShiftValue), GetDexPc()); 4292 } 4293 HConstant* Evaluate(HLongConstant* x, HIntConstant* y) const OVERRIDE { 4294 return GetBlock()->GetGraph()->GetLongConstant( 4295 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4296 } 4297 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4298 return GetBlock()->GetGraph()->GetLongConstant( 4299 Compute(x->GetValue(), y->GetValue(), kMaxLongShiftValue), GetDexPc()); 4300 } 4301 4302 DECLARE_INSTRUCTION(Ror); 4303 4304 private: 4305 DISALLOW_COPY_AND_ASSIGN(HRor); 4306}; 4307 4308// The value of a parameter in this method. Its location depends on 4309// the calling convention. 4310class HParameterValue : public HExpression<0> { 4311 public: 4312 HParameterValue(const DexFile& dex_file, 4313 uint16_t type_index, 4314 uint8_t index, 4315 Primitive::Type parameter_type, 4316 bool is_this = false) 4317 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 4318 dex_file_(dex_file), 4319 type_index_(type_index), 4320 index_(index), 4321 is_this_(is_this), 4322 can_be_null_(!is_this) {} 4323 4324 const DexFile& GetDexFile() const { return dex_file_; } 4325 uint16_t GetTypeIndex() const { return type_index_; } 4326 uint8_t GetIndex() const { return index_; } 4327 bool IsThis() const { return is_this_; } 4328 4329 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4330 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4331 4332 DECLARE_INSTRUCTION(ParameterValue); 4333 4334 private: 4335 const DexFile& dex_file_; 4336 const uint16_t type_index_; 4337 // The index of this parameter in the parameters list. Must be less 4338 // than HGraph::number_of_in_vregs_. 4339 const uint8_t index_; 4340 4341 // Whether or not the parameter value corresponds to 'this' argument. 4342 const bool is_this_; 4343 4344 bool can_be_null_; 4345 4346 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 4347}; 4348 4349class HNot : public HUnaryOperation { 4350 public: 4351 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4352 : HUnaryOperation(result_type, input, dex_pc) {} 4353 4354 bool CanBeMoved() const OVERRIDE { return true; } 4355 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4356 return true; 4357 } 4358 4359 template <typename T> T Compute(T x) const { return ~x; } 4360 4361 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4362 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4363 } 4364 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4365 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4366 } 4367 4368 DECLARE_INSTRUCTION(Not); 4369 4370 private: 4371 DISALLOW_COPY_AND_ASSIGN(HNot); 4372}; 4373 4374class HBooleanNot : public HUnaryOperation { 4375 public: 4376 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 4377 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 4378 4379 bool CanBeMoved() const OVERRIDE { return true; } 4380 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4381 return true; 4382 } 4383 4384 template <typename T> bool Compute(T x) const { 4385 DCHECK(IsUint<1>(x)); 4386 return !x; 4387 } 4388 4389 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4390 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4391 } 4392 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 4393 LOG(FATAL) << DebugName() << " is not defined for long values"; 4394 UNREACHABLE(); 4395 } 4396 4397 DECLARE_INSTRUCTION(BooleanNot); 4398 4399 private: 4400 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 4401}; 4402 4403class HTypeConversion : public HExpression<1> { 4404 public: 4405 // Instantiate a type conversion of `input` to `result_type`. 4406 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 4407 : HExpression(result_type, 4408 SideEffectsForArchRuntimeCalls(input->GetType(), result_type), 4409 dex_pc) { 4410 SetRawInputAt(0, input); 4411 DCHECK_NE(input->GetType(), result_type); 4412 } 4413 4414 HInstruction* GetInput() const { return InputAt(0); } 4415 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 4416 Primitive::Type GetResultType() const { return GetType(); } 4417 4418 // Required by the x86, ARM, MIPS and MIPS64 code generators when producing calls 4419 // to the runtime. 4420 4421 bool CanBeMoved() const OVERRIDE { return true; } 4422 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; } 4423 4424 // Try to statically evaluate the conversion and return a HConstant 4425 // containing the result. If the input cannot be converted, return nullptr. 4426 HConstant* TryStaticEvaluation() const; 4427 4428 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, 4429 Primitive::Type result_type) { 4430 // Some architectures may not require the 'GC' side effects, but at this point 4431 // in the compilation process we do not know what architecture we will 4432 // generate code for, so we must be conservative. 4433 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) 4434 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { 4435 return SideEffects::CanTriggerGC(); 4436 } 4437 return SideEffects::None(); 4438 } 4439 4440 DECLARE_INSTRUCTION(TypeConversion); 4441 4442 private: 4443 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 4444}; 4445 4446static constexpr uint32_t kNoRegNumber = -1; 4447 4448class HPhi : public HInstruction { 4449 public: 4450 HPhi(ArenaAllocator* arena, 4451 uint32_t reg_number, 4452 size_t number_of_inputs, 4453 Primitive::Type type, 4454 uint32_t dex_pc = kNoDexPc) 4455 : HInstruction(SideEffects::None(), dex_pc), 4456 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), 4457 reg_number_(reg_number), 4458 type_(ToPhiType(type)), 4459 // Phis are constructed live and marked dead if conflicting or unused. 4460 // Individual steps of SsaBuilder should assume that if a phi has been 4461 // marked dead, it can be ignored and will be removed by SsaPhiElimination. 4462 is_live_(true), 4463 can_be_null_(true) { 4464 DCHECK_NE(type_, Primitive::kPrimVoid); 4465 } 4466 4467 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 4468 static Primitive::Type ToPhiType(Primitive::Type type) { 4469 switch (type) { 4470 case Primitive::kPrimBoolean: 4471 case Primitive::kPrimByte: 4472 case Primitive::kPrimShort: 4473 case Primitive::kPrimChar: 4474 return Primitive::kPrimInt; 4475 default: 4476 return type; 4477 } 4478 } 4479 4480 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 4481 4482 size_t InputCount() const OVERRIDE { return inputs_.size(); } 4483 4484 void AddInput(HInstruction* input); 4485 void RemoveInputAt(size_t index); 4486 4487 Primitive::Type GetType() const OVERRIDE { return type_; } 4488 void SetType(Primitive::Type new_type) { 4489 // Make sure that only valid type changes occur. The following are allowed: 4490 // (1) int -> float/ref (primitive type propagation), 4491 // (2) long -> double (primitive type propagation). 4492 DCHECK(type_ == new_type || 4493 (type_ == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) || 4494 (type_ == Primitive::kPrimInt && new_type == Primitive::kPrimNot) || 4495 (type_ == Primitive::kPrimLong && new_type == Primitive::kPrimDouble)); 4496 type_ = new_type; 4497 } 4498 4499 bool CanBeNull() const OVERRIDE { return can_be_null_; } 4500 void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } 4501 4502 uint32_t GetRegNumber() const { return reg_number_; } 4503 4504 void SetDead() { is_live_ = false; } 4505 void SetLive() { is_live_ = true; } 4506 bool IsDead() const { return !is_live_; } 4507 bool IsLive() const { return is_live_; } 4508 4509 bool IsVRegEquivalentOf(HInstruction* other) const { 4510 return other != nullptr 4511 && other->IsPhi() 4512 && other->AsPhi()->GetBlock() == GetBlock() 4513 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 4514 } 4515 4516 // Returns the next equivalent phi (starting from the current one) or null if there is none. 4517 // An equivalent phi is a phi having the same dex register and type. 4518 // It assumes that phis with the same dex register are adjacent. 4519 HPhi* GetNextEquivalentPhiWithSameType() { 4520 HInstruction* next = GetNext(); 4521 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 4522 if (next->GetType() == GetType()) { 4523 return next->AsPhi(); 4524 } 4525 next = next->GetNext(); 4526 } 4527 return nullptr; 4528 } 4529 4530 DECLARE_INSTRUCTION(Phi); 4531 4532 protected: 4533 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { 4534 return inputs_[index]; 4535 } 4536 4537 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE { 4538 inputs_[index] = input; 4539 } 4540 4541 private: 4542 ArenaVector<HUserRecord<HInstruction*> > inputs_; 4543 const uint32_t reg_number_; 4544 Primitive::Type type_; 4545 bool is_live_; 4546 bool can_be_null_; 4547 4548 DISALLOW_COPY_AND_ASSIGN(HPhi); 4549}; 4550 4551class HNullCheck : public HExpression<1> { 4552 public: 4553 // `HNullCheck` can trigger GC, as it may call the `NullPointerException` 4554 // constructor. 4555 HNullCheck(HInstruction* value, uint32_t dex_pc) 4556 : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) { 4557 SetRawInputAt(0, value); 4558 } 4559 4560 bool CanBeMoved() const OVERRIDE { return true; } 4561 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4562 return true; 4563 } 4564 4565 bool NeedsEnvironment() const OVERRIDE { return true; } 4566 4567 bool CanThrow() const OVERRIDE { return true; } 4568 4569 bool CanBeNull() const OVERRIDE { return false; } 4570 4571 4572 DECLARE_INSTRUCTION(NullCheck); 4573 4574 private: 4575 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 4576}; 4577 4578class FieldInfo : public ValueObject { 4579 public: 4580 FieldInfo(MemberOffset field_offset, 4581 Primitive::Type field_type, 4582 bool is_volatile, 4583 uint32_t index, 4584 uint16_t declaring_class_def_index, 4585 const DexFile& dex_file, 4586 Handle<mirror::DexCache> dex_cache) 4587 : field_offset_(field_offset), 4588 field_type_(field_type), 4589 is_volatile_(is_volatile), 4590 index_(index), 4591 declaring_class_def_index_(declaring_class_def_index), 4592 dex_file_(dex_file), 4593 dex_cache_(dex_cache) {} 4594 4595 MemberOffset GetFieldOffset() const { return field_offset_; } 4596 Primitive::Type GetFieldType() const { return field_type_; } 4597 uint32_t GetFieldIndex() const { return index_; } 4598 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 4599 const DexFile& GetDexFile() const { return dex_file_; } 4600 bool IsVolatile() const { return is_volatile_; } 4601 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; } 4602 4603 private: 4604 const MemberOffset field_offset_; 4605 const Primitive::Type field_type_; 4606 const bool is_volatile_; 4607 const uint32_t index_; 4608 const uint16_t declaring_class_def_index_; 4609 const DexFile& dex_file_; 4610 const Handle<mirror::DexCache> dex_cache_; 4611}; 4612 4613class HInstanceFieldGet : public HExpression<1> { 4614 public: 4615 HInstanceFieldGet(HInstruction* value, 4616 Primitive::Type field_type, 4617 MemberOffset field_offset, 4618 bool is_volatile, 4619 uint32_t field_idx, 4620 uint16_t declaring_class_def_index, 4621 const DexFile& dex_file, 4622 Handle<mirror::DexCache> dex_cache, 4623 uint32_t dex_pc) 4624 : HExpression(field_type, 4625 SideEffects::FieldReadOfType(field_type, is_volatile), 4626 dex_pc), 4627 field_info_(field_offset, 4628 field_type, 4629 is_volatile, 4630 field_idx, 4631 declaring_class_def_index, 4632 dex_file, 4633 dex_cache) { 4634 SetRawInputAt(0, value); 4635 } 4636 4637 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 4638 4639 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4640 HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 4641 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 4642 } 4643 4644 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4645 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4646 } 4647 4648 size_t ComputeHashCode() const OVERRIDE { 4649 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 4650 } 4651 4652 const FieldInfo& GetFieldInfo() const { return field_info_; } 4653 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4654 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4655 bool IsVolatile() const { return field_info_.IsVolatile(); } 4656 4657 DECLARE_INSTRUCTION(InstanceFieldGet); 4658 4659 private: 4660 const FieldInfo field_info_; 4661 4662 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 4663}; 4664 4665class HInstanceFieldSet : public HTemplateInstruction<2> { 4666 public: 4667 HInstanceFieldSet(HInstruction* object, 4668 HInstruction* value, 4669 Primitive::Type field_type, 4670 MemberOffset field_offset, 4671 bool is_volatile, 4672 uint32_t field_idx, 4673 uint16_t declaring_class_def_index, 4674 const DexFile& dex_file, 4675 Handle<mirror::DexCache> dex_cache, 4676 uint32_t dex_pc) 4677 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 4678 dex_pc), 4679 field_info_(field_offset, 4680 field_type, 4681 is_volatile, 4682 field_idx, 4683 declaring_class_def_index, 4684 dex_file, 4685 dex_cache), 4686 value_can_be_null_(true) { 4687 SetRawInputAt(0, object); 4688 SetRawInputAt(1, value); 4689 } 4690 4691 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4692 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize; 4693 } 4694 4695 const FieldInfo& GetFieldInfo() const { return field_info_; } 4696 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 4697 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 4698 bool IsVolatile() const { return field_info_.IsVolatile(); } 4699 HInstruction* GetValue() const { return InputAt(1); } 4700 bool GetValueCanBeNull() const { return value_can_be_null_; } 4701 void ClearValueCanBeNull() { value_can_be_null_ = false; } 4702 4703 DECLARE_INSTRUCTION(InstanceFieldSet); 4704 4705 private: 4706 const FieldInfo field_info_; 4707 bool value_can_be_null_; 4708 4709 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 4710}; 4711 4712class HArrayGet : public HExpression<2> { 4713 public: 4714 HArrayGet(HInstruction* array, 4715 HInstruction* index, 4716 Primitive::Type type, 4717 uint32_t dex_pc, 4718 SideEffects additional_side_effects = SideEffects::None()) 4719 : HExpression(type, 4720 SideEffects::ArrayReadOfType(type).Union(additional_side_effects), 4721 dex_pc) { 4722 SetRawInputAt(0, array); 4723 SetRawInputAt(1, index); 4724 } 4725 4726 bool CanBeMoved() const OVERRIDE { return true; } 4727 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4728 return true; 4729 } 4730 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4731 // TODO: We can be smarter here. 4732 // Currently, the array access is always preceded by an ArrayLength or a NullCheck 4733 // which generates the implicit null check. There are cases when these can be removed 4734 // to produce better code. If we ever add optimizations to do so we should allow an 4735 // implicit check here (as long as the address falls in the first page). 4736 return false; 4737 } 4738 4739 bool IsEquivalentOf(HArrayGet* other) const { 4740 bool result = (GetDexPc() == other->GetDexPc()); 4741 if (kIsDebugBuild && result) { 4742 DCHECK_EQ(GetBlock(), other->GetBlock()); 4743 DCHECK_EQ(GetArray(), other->GetArray()); 4744 DCHECK_EQ(GetIndex(), other->GetIndex()); 4745 if (Primitive::IsIntOrLongType(GetType())) { 4746 DCHECK(Primitive::IsFloatingPointType(other->GetType())); 4747 } else { 4748 DCHECK(Primitive::IsFloatingPointType(GetType())); 4749 DCHECK(Primitive::IsIntOrLongType(other->GetType())); 4750 } 4751 } 4752 return result; 4753 } 4754 4755 HInstruction* GetArray() const { return InputAt(0); } 4756 HInstruction* GetIndex() const { return InputAt(1); } 4757 4758 DECLARE_INSTRUCTION(ArrayGet); 4759 4760 private: 4761 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 4762}; 4763 4764class HArraySet : public HTemplateInstruction<3> { 4765 public: 4766 HArraySet(HInstruction* array, 4767 HInstruction* index, 4768 HInstruction* value, 4769 Primitive::Type expected_component_type, 4770 uint32_t dex_pc, 4771 SideEffects additional_side_effects = SideEffects::None()) 4772 : HTemplateInstruction( 4773 SideEffects::ArrayWriteOfType(expected_component_type).Union( 4774 SideEffectsForArchRuntimeCalls(value->GetType())).Union( 4775 additional_side_effects), 4776 dex_pc), 4777 expected_component_type_(expected_component_type), 4778 needs_type_check_(value->GetType() == Primitive::kPrimNot), 4779 value_can_be_null_(true), 4780 static_type_of_array_is_object_array_(false) { 4781 SetRawInputAt(0, array); 4782 SetRawInputAt(1, index); 4783 SetRawInputAt(2, value); 4784 } 4785 4786 bool NeedsEnvironment() const OVERRIDE { 4787 // We currently always call a runtime method to catch array store 4788 // exceptions. 4789 return needs_type_check_; 4790 } 4791 4792 // Can throw ArrayStoreException. 4793 bool CanThrow() const OVERRIDE { return needs_type_check_; } 4794 4795 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4796 // TODO: Same as for ArrayGet. 4797 return false; 4798 } 4799 4800 void ClearNeedsTypeCheck() { 4801 needs_type_check_ = false; 4802 } 4803 4804 void ClearValueCanBeNull() { 4805 value_can_be_null_ = false; 4806 } 4807 4808 void SetStaticTypeOfArrayIsObjectArray() { 4809 static_type_of_array_is_object_array_ = true; 4810 } 4811 4812 bool GetValueCanBeNull() const { return value_can_be_null_; } 4813 bool NeedsTypeCheck() const { return needs_type_check_; } 4814 bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } 4815 4816 HInstruction* GetArray() const { return InputAt(0); } 4817 HInstruction* GetIndex() const { return InputAt(1); } 4818 HInstruction* GetValue() const { return InputAt(2); } 4819 4820 Primitive::Type GetComponentType() const { 4821 // The Dex format does not type floating point index operations. Since the 4822 // `expected_component_type_` is set during building and can therefore not 4823 // be correct, we also check what is the value type. If it is a floating 4824 // point type, we must use that type. 4825 Primitive::Type value_type = GetValue()->GetType(); 4826 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 4827 ? value_type 4828 : expected_component_type_; 4829 } 4830 4831 Primitive::Type GetRawExpectedComponentType() const { 4832 return expected_component_type_; 4833 } 4834 4835 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 4836 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 4837 } 4838 4839 DECLARE_INSTRUCTION(ArraySet); 4840 4841 private: 4842 const Primitive::Type expected_component_type_; 4843 bool needs_type_check_; 4844 bool value_can_be_null_; 4845 // Cached information for the reference_type_info_ so that codegen 4846 // does not need to inspect the static type. 4847 bool static_type_of_array_is_object_array_; 4848 4849 DISALLOW_COPY_AND_ASSIGN(HArraySet); 4850}; 4851 4852class HArrayLength : public HExpression<1> { 4853 public: 4854 HArrayLength(HInstruction* array, uint32_t dex_pc) 4855 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 4856 // Note that arrays do not change length, so the instruction does not 4857 // depend on any write. 4858 SetRawInputAt(0, array); 4859 } 4860 4861 bool CanBeMoved() const OVERRIDE { return true; } 4862 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4863 return true; 4864 } 4865 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4866 return obj == InputAt(0); 4867 } 4868 4869 DECLARE_INSTRUCTION(ArrayLength); 4870 4871 private: 4872 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 4873}; 4874 4875class HBoundsCheck : public HExpression<2> { 4876 public: 4877 // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException` 4878 // constructor. 4879 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc) 4880 : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) { 4881 DCHECK(index->GetType() == Primitive::kPrimInt); 4882 SetRawInputAt(0, index); 4883 SetRawInputAt(1, length); 4884 } 4885 4886 bool CanBeMoved() const OVERRIDE { return true; } 4887 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4888 return true; 4889 } 4890 4891 bool NeedsEnvironment() const OVERRIDE { return true; } 4892 4893 bool CanThrow() const OVERRIDE { return true; } 4894 4895 HInstruction* GetIndex() const { return InputAt(0); } 4896 4897 DECLARE_INSTRUCTION(BoundsCheck); 4898 4899 private: 4900 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 4901}; 4902 4903/** 4904 * Some DEX instructions are folded into multiple HInstructions that need 4905 * to stay live until the last HInstruction. This class 4906 * is used as a marker for the baseline compiler to ensure its preceding 4907 * HInstruction stays live. `index` represents the stack location index of the 4908 * instruction (the actual offset is computed as index * vreg_size). 4909 */ 4910class HTemporary : public HTemplateInstruction<0> { 4911 public: 4912 explicit HTemporary(size_t index, uint32_t dex_pc = kNoDexPc) 4913 : HTemplateInstruction(SideEffects::None(), dex_pc), index_(index) {} 4914 4915 size_t GetIndex() const { return index_; } 4916 4917 Primitive::Type GetType() const OVERRIDE { 4918 // The previous instruction is the one that will be stored in the temporary location. 4919 DCHECK(GetPrevious() != nullptr); 4920 return GetPrevious()->GetType(); 4921 } 4922 4923 DECLARE_INSTRUCTION(Temporary); 4924 4925 private: 4926 const size_t index_; 4927 DISALLOW_COPY_AND_ASSIGN(HTemporary); 4928}; 4929 4930class HSuspendCheck : public HTemplateInstruction<0> { 4931 public: 4932 explicit HSuspendCheck(uint32_t dex_pc) 4933 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 4934 4935 bool NeedsEnvironment() const OVERRIDE { 4936 return true; 4937 } 4938 4939 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 4940 SlowPathCode* GetSlowPath() const { return slow_path_; } 4941 4942 DECLARE_INSTRUCTION(SuspendCheck); 4943 4944 private: 4945 // Only used for code generation, in order to share the same slow path between back edges 4946 // of a same loop. 4947 SlowPathCode* slow_path_; 4948 4949 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 4950}; 4951 4952// Pseudo-instruction which provides the native debugger with mapping information. 4953// It ensures that we can generate line number and local variables at this point. 4954class HNativeDebugInfo : public HTemplateInstruction<0> { 4955 public: 4956 explicit HNativeDebugInfo(uint32_t dex_pc) 4957 : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {} 4958 4959 bool NeedsEnvironment() const OVERRIDE { 4960 return true; 4961 } 4962 4963 DECLARE_INSTRUCTION(NativeDebugInfo); 4964 4965 private: 4966 DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo); 4967}; 4968 4969/** 4970 * Instruction to load a Class object. 4971 */ 4972class HLoadClass : public HExpression<1> { 4973 public: 4974 HLoadClass(HCurrentMethod* current_method, 4975 uint16_t type_index, 4976 const DexFile& dex_file, 4977 bool is_referrers_class, 4978 uint32_t dex_pc, 4979 bool needs_access_check, 4980 bool is_in_dex_cache) 4981 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 4982 type_index_(type_index), 4983 dex_file_(dex_file), 4984 is_referrers_class_(is_referrers_class), 4985 generate_clinit_check_(false), 4986 needs_access_check_(needs_access_check), 4987 is_in_dex_cache_(is_in_dex_cache), 4988 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 4989 // Referrers class should not need access check. We never inline unverified 4990 // methods so we can't possibly end up in this situation. 4991 DCHECK(!is_referrers_class_ || !needs_access_check_); 4992 SetRawInputAt(0, current_method); 4993 } 4994 4995 bool CanBeMoved() const OVERRIDE { return true; } 4996 4997 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 4998 // Note that we don't need to test for generate_clinit_check_. 4999 // Whether or not we need to generate the clinit check is processed in 5000 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. 5001 return other->AsLoadClass()->type_index_ == type_index_ && 5002 other->AsLoadClass()->needs_access_check_ == needs_access_check_; 5003 } 5004 5005 size_t ComputeHashCode() const OVERRIDE { return type_index_; } 5006 5007 uint16_t GetTypeIndex() const { return type_index_; } 5008 bool IsReferrersClass() const { return is_referrers_class_; } 5009 bool CanBeNull() const OVERRIDE { return false; } 5010 5011 bool NeedsEnvironment() const OVERRIDE { 5012 return CanCallRuntime(); 5013 } 5014 5015 bool MustGenerateClinitCheck() const { 5016 return generate_clinit_check_; 5017 } 5018 5019 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 5020 // The entrypoint the code generator is going to call does not do 5021 // clinit of the class. 5022 DCHECK(!NeedsAccessCheck()); 5023 generate_clinit_check_ = generate_clinit_check; 5024 } 5025 5026 bool CanCallRuntime() const { 5027 return MustGenerateClinitCheck() || 5028 (!is_referrers_class_ && !is_in_dex_cache_) || 5029 needs_access_check_; 5030 } 5031 5032 bool NeedsAccessCheck() const { 5033 return needs_access_check_; 5034 } 5035 5036 bool CanThrow() const OVERRIDE { 5037 return CanCallRuntime(); 5038 } 5039 5040 ReferenceTypeInfo GetLoadedClassRTI() { 5041 return loaded_class_rti_; 5042 } 5043 5044 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 5045 // Make sure we only set exact types (the loaded class should never be merged). 5046 DCHECK(rti.IsExact()); 5047 loaded_class_rti_ = rti; 5048 } 5049 5050 const DexFile& GetDexFile() { return dex_file_; } 5051 5052 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } 5053 5054 static SideEffects SideEffectsForArchRuntimeCalls() { 5055 return SideEffects::CanTriggerGC(); 5056 } 5057 5058 bool IsInDexCache() const { return is_in_dex_cache_; } 5059 5060 DECLARE_INSTRUCTION(LoadClass); 5061 5062 private: 5063 const uint16_t type_index_; 5064 const DexFile& dex_file_; 5065 const bool is_referrers_class_; 5066 // Whether this instruction must generate the initialization check. 5067 // Used for code generation. 5068 bool generate_clinit_check_; 5069 const bool needs_access_check_; 5070 const bool is_in_dex_cache_; 5071 5072 ReferenceTypeInfo loaded_class_rti_; 5073 5074 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 5075}; 5076 5077class HLoadString : public HExpression<1> { 5078 public: 5079 HLoadString(HCurrentMethod* current_method, 5080 uint32_t string_index, 5081 uint32_t dex_pc, 5082 bool is_in_dex_cache) 5083 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), 5084 string_index_(string_index), 5085 is_in_dex_cache_(is_in_dex_cache) { 5086 SetRawInputAt(0, current_method); 5087 } 5088 5089 bool CanBeMoved() const OVERRIDE { return true; } 5090 5091 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 5092 return other->AsLoadString()->string_index_ == string_index_; 5093 } 5094 5095 size_t ComputeHashCode() const OVERRIDE { return string_index_; } 5096 5097 uint32_t GetStringIndex() const { return string_index_; } 5098 5099 // TODO: Can we deopt or debug when we resolve a string? 5100 bool NeedsEnvironment() const OVERRIDE { return false; } 5101 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } 5102 bool CanBeNull() const OVERRIDE { return false; } 5103 bool IsInDexCache() const { return is_in_dex_cache_; } 5104 5105 static SideEffects SideEffectsForArchRuntimeCalls() { 5106 return SideEffects::CanTriggerGC(); 5107 } 5108 5109 DECLARE_INSTRUCTION(LoadString); 5110 5111 private: 5112 const uint32_t string_index_; 5113 const bool is_in_dex_cache_; 5114 5115 DISALLOW_COPY_AND_ASSIGN(HLoadString); 5116}; 5117 5118/** 5119 * Performs an initialization check on its Class object input. 5120 */ 5121class HClinitCheck : public HExpression<1> { 5122 public: 5123 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 5124 : HExpression( 5125 Primitive::kPrimNot, 5126 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 5127 dex_pc) { 5128 SetRawInputAt(0, constant); 5129 } 5130 5131 bool CanBeMoved() const OVERRIDE { return true; } 5132 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5133 return true; 5134 } 5135 5136 bool NeedsEnvironment() const OVERRIDE { 5137 // May call runtime to initialize the class. 5138 return true; 5139 } 5140 5141 bool CanThrow() const OVERRIDE { return true; } 5142 5143 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); } 5144 5145 DECLARE_INSTRUCTION(ClinitCheck); 5146 5147 private: 5148 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 5149}; 5150 5151class HStaticFieldGet : public HExpression<1> { 5152 public: 5153 HStaticFieldGet(HInstruction* cls, 5154 Primitive::Type field_type, 5155 MemberOffset field_offset, 5156 bool is_volatile, 5157 uint32_t field_idx, 5158 uint16_t declaring_class_def_index, 5159 const DexFile& dex_file, 5160 Handle<mirror::DexCache> dex_cache, 5161 uint32_t dex_pc) 5162 : HExpression(field_type, 5163 SideEffects::FieldReadOfType(field_type, is_volatile), 5164 dex_pc), 5165 field_info_(field_offset, 5166 field_type, 5167 is_volatile, 5168 field_idx, 5169 declaring_class_def_index, 5170 dex_file, 5171 dex_cache) { 5172 SetRawInputAt(0, cls); 5173 } 5174 5175 5176 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 5177 5178 bool InstructionDataEquals(HInstruction* other) const OVERRIDE { 5179 HStaticFieldGet* other_get = other->AsStaticFieldGet(); 5180 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 5181 } 5182 5183 size_t ComputeHashCode() const OVERRIDE { 5184 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 5185 } 5186 5187 const FieldInfo& GetFieldInfo() const { return field_info_; } 5188 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5189 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5190 bool IsVolatile() const { return field_info_.IsVolatile(); } 5191 5192 DECLARE_INSTRUCTION(StaticFieldGet); 5193 5194 private: 5195 const FieldInfo field_info_; 5196 5197 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 5198}; 5199 5200class HStaticFieldSet : public HTemplateInstruction<2> { 5201 public: 5202 HStaticFieldSet(HInstruction* cls, 5203 HInstruction* value, 5204 Primitive::Type field_type, 5205 MemberOffset field_offset, 5206 bool is_volatile, 5207 uint32_t field_idx, 5208 uint16_t declaring_class_def_index, 5209 const DexFile& dex_file, 5210 Handle<mirror::DexCache> dex_cache, 5211 uint32_t dex_pc) 5212 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), 5213 dex_pc), 5214 field_info_(field_offset, 5215 field_type, 5216 is_volatile, 5217 field_idx, 5218 declaring_class_def_index, 5219 dex_file, 5220 dex_cache), 5221 value_can_be_null_(true) { 5222 SetRawInputAt(0, cls); 5223 SetRawInputAt(1, value); 5224 } 5225 5226 const FieldInfo& GetFieldInfo() const { return field_info_; } 5227 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5228 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5229 bool IsVolatile() const { return field_info_.IsVolatile(); } 5230 5231 HInstruction* GetValue() const { return InputAt(1); } 5232 bool GetValueCanBeNull() const { return value_can_be_null_; } 5233 void ClearValueCanBeNull() { value_can_be_null_ = false; } 5234 5235 DECLARE_INSTRUCTION(StaticFieldSet); 5236 5237 private: 5238 const FieldInfo field_info_; 5239 bool value_can_be_null_; 5240 5241 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 5242}; 5243 5244class HUnresolvedInstanceFieldGet : public HExpression<1> { 5245 public: 5246 HUnresolvedInstanceFieldGet(HInstruction* obj, 5247 Primitive::Type field_type, 5248 uint32_t field_index, 5249 uint32_t dex_pc) 5250 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5251 field_index_(field_index) { 5252 SetRawInputAt(0, obj); 5253 } 5254 5255 bool NeedsEnvironment() const OVERRIDE { return true; } 5256 bool CanThrow() const OVERRIDE { return true; } 5257 5258 Primitive::Type GetFieldType() const { return GetType(); } 5259 uint32_t GetFieldIndex() const { return field_index_; } 5260 5261 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 5262 5263 private: 5264 const uint32_t field_index_; 5265 5266 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 5267}; 5268 5269class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { 5270 public: 5271 HUnresolvedInstanceFieldSet(HInstruction* obj, 5272 HInstruction* value, 5273 Primitive::Type field_type, 5274 uint32_t field_index, 5275 uint32_t dex_pc) 5276 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5277 field_type_(field_type), 5278 field_index_(field_index) { 5279 DCHECK_EQ(field_type, value->GetType()); 5280 SetRawInputAt(0, obj); 5281 SetRawInputAt(1, value); 5282 } 5283 5284 bool NeedsEnvironment() const OVERRIDE { return true; } 5285 bool CanThrow() const OVERRIDE { return true; } 5286 5287 Primitive::Type GetFieldType() const { return field_type_; } 5288 uint32_t GetFieldIndex() const { return field_index_; } 5289 5290 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 5291 5292 private: 5293 const Primitive::Type field_type_; 5294 const uint32_t field_index_; 5295 5296 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 5297}; 5298 5299class HUnresolvedStaticFieldGet : public HExpression<0> { 5300 public: 5301 HUnresolvedStaticFieldGet(Primitive::Type field_type, 5302 uint32_t field_index, 5303 uint32_t dex_pc) 5304 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 5305 field_index_(field_index) { 5306 } 5307 5308 bool NeedsEnvironment() const OVERRIDE { return true; } 5309 bool CanThrow() const OVERRIDE { return true; } 5310 5311 Primitive::Type GetFieldType() const { return GetType(); } 5312 uint32_t GetFieldIndex() const { return field_index_; } 5313 5314 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 5315 5316 private: 5317 const uint32_t field_index_; 5318 5319 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 5320}; 5321 5322class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { 5323 public: 5324 HUnresolvedStaticFieldSet(HInstruction* value, 5325 Primitive::Type field_type, 5326 uint32_t field_index, 5327 uint32_t dex_pc) 5328 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 5329 field_type_(field_type), 5330 field_index_(field_index) { 5331 DCHECK_EQ(field_type, value->GetType()); 5332 SetRawInputAt(0, value); 5333 } 5334 5335 bool NeedsEnvironment() const OVERRIDE { return true; } 5336 bool CanThrow() const OVERRIDE { return true; } 5337 5338 Primitive::Type GetFieldType() const { return field_type_; } 5339 uint32_t GetFieldIndex() const { return field_index_; } 5340 5341 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 5342 5343 private: 5344 const Primitive::Type field_type_; 5345 const uint32_t field_index_; 5346 5347 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 5348}; 5349 5350// Implement the move-exception DEX instruction. 5351class HLoadException : public HExpression<0> { 5352 public: 5353 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 5354 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 5355 5356 bool CanBeNull() const OVERRIDE { return false; } 5357 5358 DECLARE_INSTRUCTION(LoadException); 5359 5360 private: 5361 DISALLOW_COPY_AND_ASSIGN(HLoadException); 5362}; 5363 5364// Implicit part of move-exception which clears thread-local exception storage. 5365// Must not be removed because the runtime expects the TLS to get cleared. 5366class HClearException : public HTemplateInstruction<0> { 5367 public: 5368 explicit HClearException(uint32_t dex_pc = kNoDexPc) 5369 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 5370 5371 DECLARE_INSTRUCTION(ClearException); 5372 5373 private: 5374 DISALLOW_COPY_AND_ASSIGN(HClearException); 5375}; 5376 5377class HThrow : public HTemplateInstruction<1> { 5378 public: 5379 HThrow(HInstruction* exception, uint32_t dex_pc) 5380 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 5381 SetRawInputAt(0, exception); 5382 } 5383 5384 bool IsControlFlow() const OVERRIDE { return true; } 5385 5386 bool NeedsEnvironment() const OVERRIDE { return true; } 5387 5388 bool CanThrow() const OVERRIDE { return true; } 5389 5390 5391 DECLARE_INSTRUCTION(Throw); 5392 5393 private: 5394 DISALLOW_COPY_AND_ASSIGN(HThrow); 5395}; 5396 5397/** 5398 * Implementation strategies for the code generator of a HInstanceOf 5399 * or `HCheckCast`. 5400 */ 5401enum class TypeCheckKind { 5402 kUnresolvedCheck, // Check against an unresolved type. 5403 kExactCheck, // Can do a single class compare. 5404 kClassHierarchyCheck, // Can just walk the super class chain. 5405 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 5406 kInterfaceCheck, // No optimization yet when checking against an interface. 5407 kArrayObjectCheck, // Can just check if the array is not primitive. 5408 kArrayCheck // No optimization yet when checking against a generic array. 5409}; 5410 5411class HInstanceOf : public HExpression<2> { 5412 public: 5413 HInstanceOf(HInstruction* object, 5414 HLoadClass* constant, 5415 TypeCheckKind check_kind, 5416 uint32_t dex_pc) 5417 : HExpression(Primitive::kPrimBoolean, 5418 SideEffectsForArchRuntimeCalls(check_kind), 5419 dex_pc), 5420 check_kind_(check_kind), 5421 must_do_null_check_(true) { 5422 SetRawInputAt(0, object); 5423 SetRawInputAt(1, constant); 5424 } 5425 5426 bool CanBeMoved() const OVERRIDE { return true; } 5427 5428 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5429 return true; 5430 } 5431 5432 bool NeedsEnvironment() const OVERRIDE { 5433 return false; 5434 } 5435 5436 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5437 5438 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5439 5440 // Used only in code generation. 5441 bool MustDoNullCheck() const { return must_do_null_check_; } 5442 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5443 5444 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 5445 return (check_kind == TypeCheckKind::kExactCheck) 5446 ? SideEffects::None() 5447 // Mips currently does runtime calls for any other checks. 5448 : SideEffects::CanTriggerGC(); 5449 } 5450 5451 DECLARE_INSTRUCTION(InstanceOf); 5452 5453 private: 5454 const TypeCheckKind check_kind_; 5455 bool must_do_null_check_; 5456 5457 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 5458}; 5459 5460class HBoundType : public HExpression<1> { 5461 public: 5462 HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc) 5463 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 5464 upper_bound_(ReferenceTypeInfo::CreateInvalid()), 5465 upper_can_be_null_(true), 5466 can_be_null_(true) { 5467 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 5468 SetRawInputAt(0, input); 5469 } 5470 5471 // {Get,Set}Upper* should only be used in reference type propagation. 5472 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 5473 bool GetUpperCanBeNull() const { return upper_can_be_null_; } 5474 void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null); 5475 5476 void SetCanBeNull(bool can_be_null) { 5477 DCHECK(upper_can_be_null_ || !can_be_null); 5478 can_be_null_ = can_be_null; 5479 } 5480 5481 bool CanBeNull() const OVERRIDE { return can_be_null_; } 5482 5483 DECLARE_INSTRUCTION(BoundType); 5484 5485 private: 5486 // Encodes the most upper class that this instruction can have. In other words 5487 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 5488 // It is used to bound the type in cases like: 5489 // if (x instanceof ClassX) { 5490 // // uper_bound_ will be ClassX 5491 // } 5492 ReferenceTypeInfo upper_bound_; 5493 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 5494 // is false then can_be_null_ cannot be true). 5495 bool upper_can_be_null_; 5496 bool can_be_null_; 5497 5498 DISALLOW_COPY_AND_ASSIGN(HBoundType); 5499}; 5500 5501class HCheckCast : public HTemplateInstruction<2> { 5502 public: 5503 HCheckCast(HInstruction* object, 5504 HLoadClass* constant, 5505 TypeCheckKind check_kind, 5506 uint32_t dex_pc) 5507 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), 5508 check_kind_(check_kind), 5509 must_do_null_check_(true) { 5510 SetRawInputAt(0, object); 5511 SetRawInputAt(1, constant); 5512 } 5513 5514 bool CanBeMoved() const OVERRIDE { return true; } 5515 5516 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5517 return true; 5518 } 5519 5520 bool NeedsEnvironment() const OVERRIDE { 5521 // Instruction may throw a CheckCastError. 5522 return true; 5523 } 5524 5525 bool CanThrow() const OVERRIDE { return true; } 5526 5527 bool MustDoNullCheck() const { return must_do_null_check_; } 5528 void ClearMustDoNullCheck() { must_do_null_check_ = false; } 5529 TypeCheckKind GetTypeCheckKind() const { return check_kind_; } 5530 5531 bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } 5532 5533 DECLARE_INSTRUCTION(CheckCast); 5534 5535 private: 5536 const TypeCheckKind check_kind_; 5537 bool must_do_null_check_; 5538 5539 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 5540}; 5541 5542class HMemoryBarrier : public HTemplateInstruction<0> { 5543 public: 5544 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 5545 : HTemplateInstruction( 5546 SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. 5547 barrier_kind_(barrier_kind) {} 5548 5549 MemBarrierKind GetBarrierKind() { return barrier_kind_; } 5550 5551 DECLARE_INSTRUCTION(MemoryBarrier); 5552 5553 private: 5554 const MemBarrierKind barrier_kind_; 5555 5556 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 5557}; 5558 5559class HMonitorOperation : public HTemplateInstruction<1> { 5560 public: 5561 enum OperationKind { 5562 kEnter, 5563 kExit, 5564 }; 5565 5566 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 5567 : HTemplateInstruction( 5568 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. 5569 kind_(kind) { 5570 SetRawInputAt(0, object); 5571 } 5572 5573 // Instruction may throw a Java exception, so we need an environment. 5574 bool NeedsEnvironment() const OVERRIDE { return CanThrow(); } 5575 5576 bool CanThrow() const OVERRIDE { 5577 // Verifier guarantees that monitor-exit cannot throw. 5578 // This is important because it allows the HGraphBuilder to remove 5579 // a dead throw-catch loop generated for `synchronized` blocks/methods. 5580 return IsEnter(); 5581 } 5582 5583 5584 bool IsEnter() const { return kind_ == kEnter; } 5585 5586 DECLARE_INSTRUCTION(MonitorOperation); 5587 5588 private: 5589 const OperationKind kind_; 5590 5591 private: 5592 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 5593}; 5594 5595class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 5596 public: 5597 MoveOperands(Location source, 5598 Location destination, 5599 Primitive::Type type, 5600 HInstruction* instruction) 5601 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 5602 5603 Location GetSource() const { return source_; } 5604 Location GetDestination() const { return destination_; } 5605 5606 void SetSource(Location value) { source_ = value; } 5607 void SetDestination(Location value) { destination_ = value; } 5608 5609 // The parallel move resolver marks moves as "in-progress" by clearing the 5610 // destination (but not the source). 5611 Location MarkPending() { 5612 DCHECK(!IsPending()); 5613 Location dest = destination_; 5614 destination_ = Location::NoLocation(); 5615 return dest; 5616 } 5617 5618 void ClearPending(Location dest) { 5619 DCHECK(IsPending()); 5620 destination_ = dest; 5621 } 5622 5623 bool IsPending() const { 5624 DCHECK(source_.IsValid() || destination_.IsInvalid()); 5625 return destination_.IsInvalid() && source_.IsValid(); 5626 } 5627 5628 // True if this blocks a move from the given location. 5629 bool Blocks(Location loc) const { 5630 return !IsEliminated() && source_.OverlapsWith(loc); 5631 } 5632 5633 // A move is redundant if it's been eliminated, if its source and 5634 // destination are the same, or if its destination is unneeded. 5635 bool IsRedundant() const { 5636 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 5637 } 5638 5639 // We clear both operands to indicate move that's been eliminated. 5640 void Eliminate() { 5641 source_ = destination_ = Location::NoLocation(); 5642 } 5643 5644 bool IsEliminated() const { 5645 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 5646 return source_.IsInvalid(); 5647 } 5648 5649 Primitive::Type GetType() const { return type_; } 5650 5651 bool Is64BitMove() const { 5652 return Primitive::Is64BitType(type_); 5653 } 5654 5655 HInstruction* GetInstruction() const { return instruction_; } 5656 5657 private: 5658 Location source_; 5659 Location destination_; 5660 // The type this move is for. 5661 Primitive::Type type_; 5662 // The instruction this move is assocatied with. Null when this move is 5663 // for moving an input in the expected locations of user (including a phi user). 5664 // This is only used in debug mode, to ensure we do not connect interval siblings 5665 // in the same parallel move. 5666 HInstruction* instruction_; 5667}; 5668 5669std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs); 5670 5671static constexpr size_t kDefaultNumberOfMoves = 4; 5672 5673class HParallelMove : public HTemplateInstruction<0> { 5674 public: 5675 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 5676 : HTemplateInstruction(SideEffects::None(), dex_pc), 5677 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 5678 moves_.reserve(kDefaultNumberOfMoves); 5679 } 5680 5681 void AddMove(Location source, 5682 Location destination, 5683 Primitive::Type type, 5684 HInstruction* instruction) { 5685 DCHECK(source.IsValid()); 5686 DCHECK(destination.IsValid()); 5687 if (kIsDebugBuild) { 5688 if (instruction != nullptr) { 5689 for (const MoveOperands& move : moves_) { 5690 if (move.GetInstruction() == instruction) { 5691 // Special case the situation where the move is for the spill slot 5692 // of the instruction. 5693 if ((GetPrevious() == instruction) 5694 || ((GetPrevious() == nullptr) 5695 && instruction->IsPhi() 5696 && instruction->GetBlock() == GetBlock())) { 5697 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 5698 << "Doing parallel moves for the same instruction."; 5699 } else { 5700 DCHECK(false) << "Doing parallel moves for the same instruction."; 5701 } 5702 } 5703 } 5704 } 5705 for (const MoveOperands& move : moves_) { 5706 DCHECK(!destination.OverlapsWith(move.GetDestination())) 5707 << "Overlapped destination for two moves in a parallel move: " 5708 << move.GetSource() << " ==> " << move.GetDestination() << " and " 5709 << source << " ==> " << destination; 5710 } 5711 } 5712 moves_.emplace_back(source, destination, type, instruction); 5713 } 5714 5715 MoveOperands* MoveOperandsAt(size_t index) { 5716 return &moves_[index]; 5717 } 5718 5719 size_t NumMoves() const { return moves_.size(); } 5720 5721 DECLARE_INSTRUCTION(ParallelMove); 5722 5723 private: 5724 ArenaVector<MoveOperands> moves_; 5725 5726 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 5727}; 5728 5729} // namespace art 5730 5731#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64) 5732#include "nodes_shared.h" 5733#endif 5734#ifdef ART_ENABLE_CODEGEN_arm 5735#include "nodes_arm.h" 5736#endif 5737#ifdef ART_ENABLE_CODEGEN_arm64 5738#include "nodes_arm64.h" 5739#endif 5740#ifdef ART_ENABLE_CODEGEN_x86 5741#include "nodes_x86.h" 5742#endif 5743 5744namespace art { 5745 5746class HGraphVisitor : public ValueObject { 5747 public: 5748 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 5749 virtual ~HGraphVisitor() {} 5750 5751 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 5752 virtual void VisitBasicBlock(HBasicBlock* block); 5753 5754 // Visit the graph following basic block insertion order. 5755 void VisitInsertionOrder(); 5756 5757 // Visit the graph following dominator tree reverse post-order. 5758 void VisitReversePostOrder(); 5759 5760 HGraph* GetGraph() const { return graph_; } 5761 5762 // Visit functions for instruction classes. 5763#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5764 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 5765 5766 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5767 5768#undef DECLARE_VISIT_INSTRUCTION 5769 5770 private: 5771 HGraph* const graph_; 5772 5773 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 5774}; 5775 5776class HGraphDelegateVisitor : public HGraphVisitor { 5777 public: 5778 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 5779 virtual ~HGraphDelegateVisitor() {} 5780 5781 // Visit functions that delegate to to super class. 5782#define DECLARE_VISIT_INSTRUCTION(name, super) \ 5783 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 5784 5785 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 5786 5787#undef DECLARE_VISIT_INSTRUCTION 5788 5789 private: 5790 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 5791}; 5792 5793class HInsertionOrderIterator : public ValueObject { 5794 public: 5795 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} 5796 5797 bool Done() const { return index_ == graph_.GetBlocks().size(); } 5798 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } 5799 void Advance() { ++index_; } 5800 5801 private: 5802 const HGraph& graph_; 5803 size_t index_; 5804 5805 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); 5806}; 5807 5808class HReversePostOrderIterator : public ValueObject { 5809 public: 5810 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { 5811 // Check that reverse post order of the graph has been built. 5812 DCHECK(!graph.GetReversePostOrder().empty()); 5813 } 5814 5815 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } 5816 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } 5817 void Advance() { ++index_; } 5818 5819 private: 5820 const HGraph& graph_; 5821 size_t index_; 5822 5823 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); 5824}; 5825 5826class HPostOrderIterator : public ValueObject { 5827 public: 5828 explicit HPostOrderIterator(const HGraph& graph) 5829 : graph_(graph), index_(graph_.GetReversePostOrder().size()) { 5830 // Check that reverse post order of the graph has been built. 5831 DCHECK(!graph.GetReversePostOrder().empty()); 5832 } 5833 5834 bool Done() const { return index_ == 0; } 5835 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } 5836 void Advance() { --index_; } 5837 5838 private: 5839 const HGraph& graph_; 5840 size_t index_; 5841 5842 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); 5843}; 5844 5845class HLinearPostOrderIterator : public ValueObject { 5846 public: 5847 explicit HLinearPostOrderIterator(const HGraph& graph) 5848 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} 5849 5850 bool Done() const { return index_ == 0; } 5851 5852 HBasicBlock* Current() const { return order_[index_ - 1u]; } 5853 5854 void Advance() { 5855 --index_; 5856 DCHECK_GE(index_, 0U); 5857 } 5858 5859 private: 5860 const ArenaVector<HBasicBlock*>& order_; 5861 size_t index_; 5862 5863 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 5864}; 5865 5866class HLinearOrderIterator : public ValueObject { 5867 public: 5868 explicit HLinearOrderIterator(const HGraph& graph) 5869 : order_(graph.GetLinearOrder()), index_(0) {} 5870 5871 bool Done() const { return index_ == order_.size(); } 5872 HBasicBlock* Current() const { return order_[index_]; } 5873 void Advance() { ++index_; } 5874 5875 private: 5876 const ArenaVector<HBasicBlock*>& order_; 5877 size_t index_; 5878 5879 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 5880}; 5881 5882// Iterator over the blocks that art part of the loop. Includes blocks part 5883// of an inner loop. The order in which the blocks are iterated is on their 5884// block id. 5885class HBlocksInLoopIterator : public ValueObject { 5886 public: 5887 explicit HBlocksInLoopIterator(const HLoopInformation& info) 5888 : blocks_in_loop_(info.GetBlocks()), 5889 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 5890 index_(0) { 5891 if (!blocks_in_loop_.IsBitSet(index_)) { 5892 Advance(); 5893 } 5894 } 5895 5896 bool Done() const { return index_ == blocks_.size(); } 5897 HBasicBlock* Current() const { return blocks_[index_]; } 5898 void Advance() { 5899 ++index_; 5900 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5901 if (blocks_in_loop_.IsBitSet(index_)) { 5902 break; 5903 } 5904 } 5905 } 5906 5907 private: 5908 const BitVector& blocks_in_loop_; 5909 const ArenaVector<HBasicBlock*>& blocks_; 5910 size_t index_; 5911 5912 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 5913}; 5914 5915// Iterator over the blocks that art part of the loop. Includes blocks part 5916// of an inner loop. The order in which the blocks are iterated is reverse 5917// post order. 5918class HBlocksInLoopReversePostOrderIterator : public ValueObject { 5919 public: 5920 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 5921 : blocks_in_loop_(info.GetBlocks()), 5922 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 5923 index_(0) { 5924 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5925 Advance(); 5926 } 5927 } 5928 5929 bool Done() const { return index_ == blocks_.size(); } 5930 HBasicBlock* Current() const { return blocks_[index_]; } 5931 void Advance() { 5932 ++index_; 5933 for (size_t e = blocks_.size(); index_ < e; ++index_) { 5934 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 5935 break; 5936 } 5937 } 5938 } 5939 5940 private: 5941 const BitVector& blocks_in_loop_; 5942 const ArenaVector<HBasicBlock*>& blocks_; 5943 size_t index_; 5944 5945 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 5946}; 5947 5948inline int64_t Int64FromConstant(HConstant* constant) { 5949 DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); 5950 return constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 5951 : constant->AsLongConstant()->GetValue(); 5952} 5953 5954inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 5955 // For the purposes of the compiler, the dex files must actually be the same object 5956 // if we want to safely treat them as the same. This is especially important for JIT 5957 // as custom class loaders can open the same underlying file (or memory) multiple 5958 // times and provide different class resolution but no two class loaders should ever 5959 // use the same DexFile object - doing so is an unsupported hack that can lead to 5960 // all sorts of weird failures. 5961 return &lhs == &rhs; 5962} 5963 5964#define INSTRUCTION_TYPE_CHECK(type, super) \ 5965 inline bool HInstruction::Is##type() const { return GetKind() == k##type; } \ 5966 inline const H##type* HInstruction::As##type() const { \ 5967 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \ 5968 } \ 5969 inline H##type* HInstruction::As##type() { \ 5970 return Is##type() ? static_cast<H##type*>(this) : nullptr; \ 5971 } 5972 5973 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 5974#undef INSTRUCTION_TYPE_CHECK 5975 5976} // namespace art 5977 5978#endif // ART_COMPILER_OPTIMIZING_NODES_H_ 5979