nodes.h revision 06a46c44bf1a5cba6c78c3faffc4e7ec1442b210
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/stl_util.h"
28#include "dex/compiler_enums.h"
29#include "dex_file.h"
30#include "entrypoints/quick/quick_entrypoints_enum.h"
31#include "handle.h"
32#include "handle_scope.h"
33#include "invoke_type.h"
34#include "locations.h"
35#include "method_reference.h"
36#include "mirror/class.h"
37#include "offsets.h"
38#include "primitive.h"
39#include "utils/array_ref.h"
40#include "utils/intrusive_forward_list.h"
41#include "utils/transform_array_ref.h"
42
43namespace art {
44
45class GraphChecker;
46class HBasicBlock;
47class HCurrentMethod;
48class HDoubleConstant;
49class HEnvironment;
50class HFloatConstant;
51class HGraphBuilder;
52class HGraphVisitor;
53class HInstruction;
54class HIntConstant;
55class HInvoke;
56class HLongConstant;
57class HNullConstant;
58class HPhi;
59class HSuspendCheck;
60class HTryBoundary;
61class LiveInterval;
62class LocationSummary;
63class SlowPathCode;
64class SsaBuilder;
65
66namespace mirror {
67class DexCache;
68}  // namespace mirror
69
70static const int kDefaultNumberOfBlocks = 8;
71static const int kDefaultNumberOfSuccessors = 2;
72static const int kDefaultNumberOfPredecessors = 2;
73static const int kDefaultNumberOfExceptionalPredecessors = 0;
74static const int kDefaultNumberOfDominatedBlocks = 1;
75static const int kDefaultNumberOfBackEdges = 1;
76
77// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
78static constexpr int32_t kMaxIntShiftDistance = 0x1f;
79// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
80static constexpr int32_t kMaxLongShiftDistance = 0x3f;
81
82static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
83static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
84
85static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
86
87static constexpr uint32_t kNoDexPc = -1;
88
89inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
90  // For the purposes of the compiler, the dex files must actually be the same object
91  // if we want to safely treat them as the same. This is especially important for JIT
92  // as custom class loaders can open the same underlying file (or memory) multiple
93  // times and provide different class resolution but no two class loaders should ever
94  // use the same DexFile object - doing so is an unsupported hack that can lead to
95  // all sorts of weird failures.
96  return &lhs == &rhs;
97}
98
99enum IfCondition {
100  // All types.
101  kCondEQ,  // ==
102  kCondNE,  // !=
103  // Signed integers and floating-point numbers.
104  kCondLT,  // <
105  kCondLE,  // <=
106  kCondGT,  // >
107  kCondGE,  // >=
108  // Unsigned integers.
109  kCondB,   // <
110  kCondBE,  // <=
111  kCondA,   // >
112  kCondAE,  // >=
113};
114
115enum GraphAnalysisResult {
116  kAnalysisSkipped,
117  kAnalysisInvalidBytecode,
118  kAnalysisFailThrowCatchLoop,
119  kAnalysisFailAmbiguousArrayOp,
120  kAnalysisSuccess,
121};
122
123class HInstructionList : public ValueObject {
124 public:
125  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
126
127  void AddInstruction(HInstruction* instruction);
128  void RemoveInstruction(HInstruction* instruction);
129
130  // Insert `instruction` before/after an existing instruction `cursor`.
131  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
132  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
133
134  // Return true if this list contains `instruction`.
135  bool Contains(HInstruction* instruction) const;
136
137  // Return true if `instruction1` is found before `instruction2` in
138  // this instruction list and false otherwise.  Abort if none
139  // of these instructions is found.
140  bool FoundBefore(const HInstruction* instruction1,
141                   const HInstruction* instruction2) const;
142
143  bool IsEmpty() const { return first_instruction_ == nullptr; }
144  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
145
146  // Update the block of all instructions to be `block`.
147  void SetBlockOfInstructions(HBasicBlock* block) const;
148
149  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
150  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
151  void Add(const HInstructionList& instruction_list);
152
153  // Return the number of instructions in the list. This is an expensive operation.
154  size_t CountSize() const;
155
156 private:
157  HInstruction* first_instruction_;
158  HInstruction* last_instruction_;
159
160  friend class HBasicBlock;
161  friend class HGraph;
162  friend class HInstruction;
163  friend class HInstructionIterator;
164  friend class HBackwardInstructionIterator;
165
166  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
167};
168
169class ReferenceTypeInfo : ValueObject {
170 public:
171  typedef Handle<mirror::Class> TypeHandle;
172
173  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
174
175  static ReferenceTypeInfo Create(TypeHandle type_handle) SHARED_REQUIRES(Locks::mutator_lock_) {
176    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
177  }
178
179  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
180    return ReferenceTypeInfo(type_handle, is_exact);
181  }
182
183  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
184
185  static bool IsValidHandle(TypeHandle handle) {
186    return handle.GetReference() != nullptr;
187  }
188
189  bool IsValid() const {
190    return IsValidHandle(type_handle_);
191  }
192
193  bool IsExact() const { return is_exact_; }
194
195  bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
196    DCHECK(IsValid());
197    return GetTypeHandle()->IsObjectClass();
198  }
199
200  bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
201    DCHECK(IsValid());
202    return GetTypeHandle()->IsStringClass();
203  }
204
205  bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) {
206    DCHECK(IsValid());
207    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
208  }
209
210  bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) {
211    DCHECK(IsValid());
212    return GetTypeHandle()->IsInterface();
213  }
214
215  bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
216    DCHECK(IsValid());
217    return GetTypeHandle()->IsArrayClass();
218  }
219
220  bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
221    DCHECK(IsValid());
222    return GetTypeHandle()->IsPrimitiveArray();
223  }
224
225  bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
226    DCHECK(IsValid());
227    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
228  }
229
230  bool CanArrayHold(ReferenceTypeInfo rti)  const SHARED_REQUIRES(Locks::mutator_lock_) {
231    DCHECK(IsValid());
232    if (!IsExact()) return false;
233    if (!IsArrayClass()) return false;
234    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
235  }
236
237  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const SHARED_REQUIRES(Locks::mutator_lock_) {
238    DCHECK(IsValid());
239    if (!IsExact()) return false;
240    if (!IsArrayClass()) return false;
241    if (!rti.IsArrayClass()) return false;
242    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
243        rti.GetTypeHandle()->GetComponentType());
244  }
245
246  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
247
248  bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
249    DCHECK(IsValid());
250    DCHECK(rti.IsValid());
251    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
252  }
253
254  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
255    DCHECK(IsValid());
256    DCHECK(rti.IsValid());
257    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
258        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
259  }
260
261  // Returns true if the type information provide the same amount of details.
262  // Note that it does not mean that the instructions have the same actual type
263  // (because the type can be the result of a merge).
264  bool IsEqual(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
265    if (!IsValid() && !rti.IsValid()) {
266      // Invalid types are equal.
267      return true;
268    }
269    if (!IsValid() || !rti.IsValid()) {
270      // One is valid, the other not.
271      return false;
272    }
273    return IsExact() == rti.IsExact()
274        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
275  }
276
277 private:
278  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
279  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
280      : type_handle_(type_handle), is_exact_(is_exact) { }
281
282  // The class of the object.
283  TypeHandle type_handle_;
284  // Whether or not the type is exact or a superclass of the actual type.
285  // Whether or not we have any information about this type.
286  bool is_exact_;
287};
288
289std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
290
291// Control-flow graph of a method. Contains a list of basic blocks.
292class HGraph : public ArenaObject<kArenaAllocGraph> {
293 public:
294  HGraph(ArenaAllocator* arena,
295         const DexFile& dex_file,
296         uint32_t method_idx,
297         bool should_generate_constructor_barrier,
298         InstructionSet instruction_set,
299         InvokeType invoke_type = kInvalidInvokeType,
300         bool debuggable = false,
301         bool osr = false,
302         int start_instruction_id = 0)
303      : arena_(arena),
304        blocks_(arena->Adapter(kArenaAllocBlockList)),
305        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
306        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
307        entry_block_(nullptr),
308        exit_block_(nullptr),
309        maximum_number_of_out_vregs_(0),
310        number_of_vregs_(0),
311        number_of_in_vregs_(0),
312        temporaries_vreg_slots_(0),
313        has_bounds_checks_(false),
314        has_try_catch_(false),
315        has_irreducible_loops_(false),
316        debuggable_(debuggable),
317        current_instruction_id_(start_instruction_id),
318        dex_file_(dex_file),
319        method_idx_(method_idx),
320        invoke_type_(invoke_type),
321        in_ssa_form_(false),
322        should_generate_constructor_barrier_(should_generate_constructor_barrier),
323        instruction_set_(instruction_set),
324        cached_null_constant_(nullptr),
325        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
326        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
327        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
328        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
329        cached_current_method_(nullptr),
330        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
331        osr_(osr) {
332    blocks_.reserve(kDefaultNumberOfBlocks);
333  }
334
335  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
336  void InitializeInexactObjectRTI(StackHandleScopeCollection* handles);
337
338  ArenaAllocator* GetArena() const { return arena_; }
339  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
340
341  bool IsInSsaForm() const { return in_ssa_form_; }
342  void SetInSsaForm() { in_ssa_form_ = true; }
343
344  HBasicBlock* GetEntryBlock() const { return entry_block_; }
345  HBasicBlock* GetExitBlock() const { return exit_block_; }
346  bool HasExitBlock() const { return exit_block_ != nullptr; }
347
348  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
349  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
350
351  void AddBlock(HBasicBlock* block);
352
353  void ComputeDominanceInformation();
354  void ClearDominanceInformation();
355  void ClearLoopInformation();
356  void FindBackEdges(ArenaBitVector* visited);
357  GraphAnalysisResult BuildDominatorTree();
358  void SimplifyCFG();
359  void SimplifyCatchBlocks();
360
361  // Analyze all natural loops in this graph. Returns a code specifying that it
362  // was successful or the reason for failure. The method will fail if a loop
363  // is a throw-catch loop, i.e. the header is a catch block.
364  GraphAnalysisResult AnalyzeLoops() const;
365
366  // Iterate over blocks to compute try block membership. Needs reverse post
367  // order and loop information.
368  void ComputeTryBlockInformation();
369
370  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
371  // Returns the instruction to replace the invoke expression or null if the
372  // invoke is for a void method. Note that the caller is responsible for replacing
373  // and removing the invoke instruction.
374  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
375
376  // Update the loop and try membership of `block`, which was spawned from `reference`.
377  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
378  // should be the new back edge.
379  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
380                                             HBasicBlock* reference,
381                                             bool replace_if_back_edge);
382
383  // Need to add a couple of blocks to test if the loop body is entered and
384  // put deoptimization instructions, etc.
385  void TransformLoopHeaderForBCE(HBasicBlock* header);
386
387  // Removes `block` from the graph. Assumes `block` has been disconnected from
388  // other blocks and has no instructions or phis.
389  void DeleteDeadEmptyBlock(HBasicBlock* block);
390
391  // Splits the edge between `block` and `successor` while preserving the
392  // indices in the predecessor/successor lists. If there are multiple edges
393  // between the blocks, the lowest indices are used.
394  // Returns the new block which is empty and has the same dex pc as `successor`.
395  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
396
397  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
398  void SimplifyLoop(HBasicBlock* header);
399
400  int32_t GetNextInstructionId() {
401    DCHECK_NE(current_instruction_id_, INT32_MAX);
402    return current_instruction_id_++;
403  }
404
405  int32_t GetCurrentInstructionId() const {
406    return current_instruction_id_;
407  }
408
409  void SetCurrentInstructionId(int32_t id) {
410    DCHECK_GE(id, current_instruction_id_);
411    current_instruction_id_ = id;
412  }
413
414  uint16_t GetMaximumNumberOfOutVRegs() const {
415    return maximum_number_of_out_vregs_;
416  }
417
418  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
419    maximum_number_of_out_vregs_ = new_value;
420  }
421
422  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
423    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
424  }
425
426  void UpdateTemporariesVRegSlots(size_t slots) {
427    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
428  }
429
430  size_t GetTemporariesVRegSlots() const {
431    DCHECK(!in_ssa_form_);
432    return temporaries_vreg_slots_;
433  }
434
435  void SetNumberOfVRegs(uint16_t number_of_vregs) {
436    number_of_vregs_ = number_of_vregs;
437  }
438
439  uint16_t GetNumberOfVRegs() const {
440    return number_of_vregs_;
441  }
442
443  void SetNumberOfInVRegs(uint16_t value) {
444    number_of_in_vregs_ = value;
445  }
446
447  uint16_t GetNumberOfInVRegs() const {
448    return number_of_in_vregs_;
449  }
450
451  uint16_t GetNumberOfLocalVRegs() const {
452    DCHECK(!in_ssa_form_);
453    return number_of_vregs_ - number_of_in_vregs_;
454  }
455
456  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
457    return reverse_post_order_;
458  }
459
460  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
461    return linear_order_;
462  }
463
464  bool HasBoundsChecks() const {
465    return has_bounds_checks_;
466  }
467
468  void SetHasBoundsChecks(bool value) {
469    has_bounds_checks_ = value;
470  }
471
472  bool ShouldGenerateConstructorBarrier() const {
473    return should_generate_constructor_barrier_;
474  }
475
476  bool IsDebuggable() const { return debuggable_; }
477
478  // Returns a constant of the given type and value. If it does not exist
479  // already, it is created and inserted into the graph. This method is only for
480  // integral types.
481  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
482
483  // TODO: This is problematic for the consistency of reference type propagation
484  // because it can be created anytime after the pass and thus it will be left
485  // with an invalid type.
486  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
487
488  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
489    return CreateConstant(value, &cached_int_constants_, dex_pc);
490  }
491  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
492    return CreateConstant(value, &cached_long_constants_, dex_pc);
493  }
494  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
495    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
496  }
497  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
498    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
499  }
500
501  HCurrentMethod* GetCurrentMethod();
502
503  const DexFile& GetDexFile() const {
504    return dex_file_;
505  }
506
507  uint32_t GetMethodIdx() const {
508    return method_idx_;
509  }
510
511  InvokeType GetInvokeType() const {
512    return invoke_type_;
513  }
514
515  InstructionSet GetInstructionSet() const {
516    return instruction_set_;
517  }
518
519  bool IsCompilingOsr() const { return osr_; }
520
521  bool HasTryCatch() const { return has_try_catch_; }
522  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
523
524  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
525  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
526
527  ArtMethod* GetArtMethod() const { return art_method_; }
528  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
529
530  // Returns an instruction with the opposite boolean value from 'cond'.
531  // The instruction has been inserted into the graph, either as a constant, or
532  // before cursor.
533  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
534
535  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
536
537 private:
538  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
539  void RemoveDeadBlocks(const ArenaBitVector& visited);
540
541  template <class InstructionType, typename ValueType>
542  InstructionType* CreateConstant(ValueType value,
543                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
544                                  uint32_t dex_pc = kNoDexPc) {
545    // Try to find an existing constant of the given value.
546    InstructionType* constant = nullptr;
547    auto cached_constant = cache->find(value);
548    if (cached_constant != cache->end()) {
549      constant = cached_constant->second;
550    }
551
552    // If not found or previously deleted, create and cache a new instruction.
553    // Don't bother reviving a previously deleted instruction, for simplicity.
554    if (constant == nullptr || constant->GetBlock() == nullptr) {
555      constant = new (arena_) InstructionType(value, dex_pc);
556      cache->Overwrite(value, constant);
557      InsertConstant(constant);
558    }
559    return constant;
560  }
561
562  void InsertConstant(HConstant* instruction);
563
564  // Cache a float constant into the graph. This method should only be
565  // called by the SsaBuilder when creating "equivalent" instructions.
566  void CacheFloatConstant(HFloatConstant* constant);
567
568  // See CacheFloatConstant comment.
569  void CacheDoubleConstant(HDoubleConstant* constant);
570
571  ArenaAllocator* const arena_;
572
573  // List of blocks in insertion order.
574  ArenaVector<HBasicBlock*> blocks_;
575
576  // List of blocks to perform a reverse post order tree traversal.
577  ArenaVector<HBasicBlock*> reverse_post_order_;
578
579  // List of blocks to perform a linear order tree traversal.
580  ArenaVector<HBasicBlock*> linear_order_;
581
582  HBasicBlock* entry_block_;
583  HBasicBlock* exit_block_;
584
585  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
586  uint16_t maximum_number_of_out_vregs_;
587
588  // The number of virtual registers in this method. Contains the parameters.
589  uint16_t number_of_vregs_;
590
591  // The number of virtual registers used by parameters of this method.
592  uint16_t number_of_in_vregs_;
593
594  // Number of vreg size slots that the temporaries use (used in baseline compiler).
595  size_t temporaries_vreg_slots_;
596
597  // Has bounds checks. We can totally skip BCE if it's false.
598  bool has_bounds_checks_;
599
600  // Flag whether there are any try/catch blocks in the graph. We will skip
601  // try/catch-related passes if false.
602  bool has_try_catch_;
603
604  // Flag whether there are any irreducible loops in the graph.
605  bool has_irreducible_loops_;
606
607  // Indicates whether the graph should be compiled in a way that
608  // ensures full debuggability. If false, we can apply more
609  // aggressive optimizations that may limit the level of debugging.
610  const bool debuggable_;
611
612  // The current id to assign to a newly added instruction. See HInstruction.id_.
613  int32_t current_instruction_id_;
614
615  // The dex file from which the method is from.
616  const DexFile& dex_file_;
617
618  // The method index in the dex file.
619  const uint32_t method_idx_;
620
621  // If inlined, this encodes how the callee is being invoked.
622  const InvokeType invoke_type_;
623
624  // Whether the graph has been transformed to SSA form. Only used
625  // in debug mode to ensure we are not using properties only valid
626  // for non-SSA form (like the number of temporaries).
627  bool in_ssa_form_;
628
629  const bool should_generate_constructor_barrier_;
630
631  const InstructionSet instruction_set_;
632
633  // Cached constants.
634  HNullConstant* cached_null_constant_;
635  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
636  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
637  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
638  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
639
640  HCurrentMethod* cached_current_method_;
641
642  // The ArtMethod this graph is for. Note that for AOT, it may be null,
643  // for example for methods whose declaring class could not be resolved
644  // (such as when the superclass could not be found).
645  ArtMethod* art_method_;
646
647  // Keep the RTI of inexact Object to avoid having to pass stack handle
648  // collection pointer to passes which may create NullConstant.
649  ReferenceTypeInfo inexact_object_rti_;
650
651  // Whether we are compiling this graph for on stack replacement: this will
652  // make all loops seen as irreducible and emit special stack maps to mark
653  // compiled code entries which the interpreter can directly jump to.
654  const bool osr_;
655
656  friend class SsaBuilder;           // For caching constants.
657  friend class SsaLivenessAnalysis;  // For the linear order.
658  friend class HInliner;             // For the reverse post order.
659  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
660  DISALLOW_COPY_AND_ASSIGN(HGraph);
661};
662
663class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
664 public:
665  HLoopInformation(HBasicBlock* header, HGraph* graph)
666      : header_(header),
667        suspend_check_(nullptr),
668        irreducible_(false),
669        contains_irreducible_loop_(false),
670        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
671        // Make bit vector growable, as the number of blocks may change.
672        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
673    back_edges_.reserve(kDefaultNumberOfBackEdges);
674  }
675
676  bool IsIrreducible() const { return irreducible_; }
677  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
678
679  void Dump(std::ostream& os);
680
681  HBasicBlock* GetHeader() const {
682    return header_;
683  }
684
685  void SetHeader(HBasicBlock* block) {
686    header_ = block;
687  }
688
689  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
690  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
691  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
692
693  void AddBackEdge(HBasicBlock* back_edge) {
694    back_edges_.push_back(back_edge);
695  }
696
697  void RemoveBackEdge(HBasicBlock* back_edge) {
698    RemoveElement(back_edges_, back_edge);
699  }
700
701  bool IsBackEdge(const HBasicBlock& block) const {
702    return ContainsElement(back_edges_, &block);
703  }
704
705  size_t NumberOfBackEdges() const {
706    return back_edges_.size();
707  }
708
709  HBasicBlock* GetPreHeader() const;
710
711  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
712    return back_edges_;
713  }
714
715  // Returns the lifetime position of the back edge that has the
716  // greatest lifetime position.
717  size_t GetLifetimeEnd() const;
718
719  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
720    ReplaceElement(back_edges_, existing, new_back_edge);
721  }
722
723  // Finds blocks that are part of this loop.
724  void Populate();
725
726  // Returns whether this loop information contains `block`.
727  // Note that this loop information *must* be populated before entering this function.
728  bool Contains(const HBasicBlock& block) const;
729
730  // Returns whether this loop information is an inner loop of `other`.
731  // Note that `other` *must* be populated before entering this function.
732  bool IsIn(const HLoopInformation& other) const;
733
734  // Returns true if instruction is not defined within this loop.
735  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
736
737  const ArenaBitVector& GetBlocks() const { return blocks_; }
738
739  void Add(HBasicBlock* block);
740  void Remove(HBasicBlock* block);
741
742  void ClearAllBlocks() {
743    blocks_.ClearAllBits();
744  }
745
746  bool HasBackEdgeNotDominatedByHeader() const;
747
748  bool IsPopulated() const {
749    return blocks_.GetHighestBitSet() != -1;
750  }
751
752  bool DominatesAllBackEdges(HBasicBlock* block);
753
754 private:
755  // Internal recursive implementation of `Populate`.
756  void PopulateRecursive(HBasicBlock* block);
757  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
758
759  HBasicBlock* header_;
760  HSuspendCheck* suspend_check_;
761  bool irreducible_;
762  bool contains_irreducible_loop_;
763  ArenaVector<HBasicBlock*> back_edges_;
764  ArenaBitVector blocks_;
765
766  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
767};
768
769// Stores try/catch information for basic blocks.
770// Note that HGraph is constructed so that catch blocks cannot simultaneously
771// be try blocks.
772class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
773 public:
774  // Try block information constructor.
775  explicit TryCatchInformation(const HTryBoundary& try_entry)
776      : try_entry_(&try_entry),
777        catch_dex_file_(nullptr),
778        catch_type_index_(DexFile::kDexNoIndex16) {
779    DCHECK(try_entry_ != nullptr);
780  }
781
782  // Catch block information constructor.
783  TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file)
784      : try_entry_(nullptr),
785        catch_dex_file_(&dex_file),
786        catch_type_index_(catch_type_index) {}
787
788  bool IsTryBlock() const { return try_entry_ != nullptr; }
789
790  const HTryBoundary& GetTryEntry() const {
791    DCHECK(IsTryBlock());
792    return *try_entry_;
793  }
794
795  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
796
797  bool IsCatchAllTypeIndex() const {
798    DCHECK(IsCatchBlock());
799    return catch_type_index_ == DexFile::kDexNoIndex16;
800  }
801
802  uint16_t GetCatchTypeIndex() const {
803    DCHECK(IsCatchBlock());
804    return catch_type_index_;
805  }
806
807  const DexFile& GetCatchDexFile() const {
808    DCHECK(IsCatchBlock());
809    return *catch_dex_file_;
810  }
811
812 private:
813  // One of possibly several TryBoundary instructions entering the block's try.
814  // Only set for try blocks.
815  const HTryBoundary* try_entry_;
816
817  // Exception type information. Only set for catch blocks.
818  const DexFile* catch_dex_file_;
819  const uint16_t catch_type_index_;
820};
821
822static constexpr size_t kNoLifetime = -1;
823static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
824
825// A block in a method. Contains the list of instructions represented
826// as a double linked list. Each block knows its predecessors and
827// successors.
828
829class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
830 public:
831  HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
832      : graph_(graph),
833        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
834        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
835        loop_information_(nullptr),
836        dominator_(nullptr),
837        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
838        block_id_(kInvalidBlockId),
839        dex_pc_(dex_pc),
840        lifetime_start_(kNoLifetime),
841        lifetime_end_(kNoLifetime),
842        try_catch_information_(nullptr) {
843    predecessors_.reserve(kDefaultNumberOfPredecessors);
844    successors_.reserve(kDefaultNumberOfSuccessors);
845    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
846  }
847
848  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
849    return predecessors_;
850  }
851
852  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
853    return successors_;
854  }
855
856  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
857  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
858
859  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
860    return ContainsElement(successors_, block, start_from);
861  }
862
863  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
864    return dominated_blocks_;
865  }
866
867  bool IsEntryBlock() const {
868    return graph_->GetEntryBlock() == this;
869  }
870
871  bool IsExitBlock() const {
872    return graph_->GetExitBlock() == this;
873  }
874
875  bool IsSingleGoto() const;
876  bool IsSingleTryBoundary() const;
877
878  // Returns true if this block emits nothing but a jump.
879  bool IsSingleJump() const {
880    HLoopInformation* loop_info = GetLoopInformation();
881    return (IsSingleGoto() || IsSingleTryBoundary())
882           // Back edges generate a suspend check.
883           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
884  }
885
886  void AddBackEdge(HBasicBlock* back_edge) {
887    if (loop_information_ == nullptr) {
888      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
889    }
890    DCHECK_EQ(loop_information_->GetHeader(), this);
891    loop_information_->AddBackEdge(back_edge);
892  }
893
894  HGraph* GetGraph() const { return graph_; }
895  void SetGraph(HGraph* graph) { graph_ = graph; }
896
897  uint32_t GetBlockId() const { return block_id_; }
898  void SetBlockId(int id) { block_id_ = id; }
899  uint32_t GetDexPc() const { return dex_pc_; }
900
901  HBasicBlock* GetDominator() const { return dominator_; }
902  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
903  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
904
905  void RemoveDominatedBlock(HBasicBlock* block) {
906    RemoveElement(dominated_blocks_, block);
907  }
908
909  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
910    ReplaceElement(dominated_blocks_, existing, new_block);
911  }
912
913  void ClearDominanceInformation();
914
915  int NumberOfBackEdges() const {
916    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
917  }
918
919  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
920  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
921  const HInstructionList& GetInstructions() const { return instructions_; }
922  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
923  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
924  const HInstructionList& GetPhis() const { return phis_; }
925
926  HInstruction* GetFirstInstructionDisregardMoves() const;
927
928  void AddSuccessor(HBasicBlock* block) {
929    successors_.push_back(block);
930    block->predecessors_.push_back(this);
931  }
932
933  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
934    size_t successor_index = GetSuccessorIndexOf(existing);
935    existing->RemovePredecessor(this);
936    new_block->predecessors_.push_back(this);
937    successors_[successor_index] = new_block;
938  }
939
940  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
941    size_t predecessor_index = GetPredecessorIndexOf(existing);
942    existing->RemoveSuccessor(this);
943    new_block->successors_.push_back(this);
944    predecessors_[predecessor_index] = new_block;
945  }
946
947  // Insert `this` between `predecessor` and `successor. This method
948  // preserves the indicies, and will update the first edge found between
949  // `predecessor` and `successor`.
950  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
951    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
952    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
953    successor->predecessors_[predecessor_index] = this;
954    predecessor->successors_[successor_index] = this;
955    successors_.push_back(successor);
956    predecessors_.push_back(predecessor);
957  }
958
959  void RemovePredecessor(HBasicBlock* block) {
960    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
961  }
962
963  void RemoveSuccessor(HBasicBlock* block) {
964    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
965  }
966
967  void ClearAllPredecessors() {
968    predecessors_.clear();
969  }
970
971  void AddPredecessor(HBasicBlock* block) {
972    predecessors_.push_back(block);
973    block->successors_.push_back(this);
974  }
975
976  void SwapPredecessors() {
977    DCHECK_EQ(predecessors_.size(), 2u);
978    std::swap(predecessors_[0], predecessors_[1]);
979  }
980
981  void SwapSuccessors() {
982    DCHECK_EQ(successors_.size(), 2u);
983    std::swap(successors_[0], successors_[1]);
984  }
985
986  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
987    return IndexOfElement(predecessors_, predecessor);
988  }
989
990  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
991    return IndexOfElement(successors_, successor);
992  }
993
994  HBasicBlock* GetSinglePredecessor() const {
995    DCHECK_EQ(GetPredecessors().size(), 1u);
996    return GetPredecessors()[0];
997  }
998
999  HBasicBlock* GetSingleSuccessor() const {
1000    DCHECK_EQ(GetSuccessors().size(), 1u);
1001    return GetSuccessors()[0];
1002  }
1003
1004  // Returns whether the first occurrence of `predecessor` in the list of
1005  // predecessors is at index `idx`.
1006  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1007    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1008    return GetPredecessorIndexOf(predecessor) == idx;
1009  }
1010
1011  // Create a new block between this block and its predecessors. The new block
1012  // is added to the graph, all predecessor edges are relinked to it and an edge
1013  // is created to `this`. Returns the new empty block. Reverse post order or
1014  // loop and try/catch information are not updated.
1015  HBasicBlock* CreateImmediateDominator();
1016
1017  // Split the block into two blocks just before `cursor`. Returns the newly
1018  // created, latter block. Note that this method will add the block to the
1019  // graph, create a Goto at the end of the former block and will create an edge
1020  // between the blocks. It will not, however, update the reverse post order or
1021  // loop and try/catch information.
1022  HBasicBlock* SplitBefore(HInstruction* cursor);
1023
1024  // Split the block into two blocks just before `cursor`. Returns the newly
1025  // created block. Note that this method just updates raw block information,
1026  // like predecessors, successors, dominators, and instruction list. It does not
1027  // update the graph, reverse post order, loop information, nor make sure the
1028  // blocks are consistent (for example ending with a control flow instruction).
1029  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1030
1031  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1032  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1033
1034  // Merge `other` at the end of `this`. Successors and dominated blocks of
1035  // `other` are changed to be successors and dominated blocks of `this`. Note
1036  // that this method does not update the graph, reverse post order, loop
1037  // information, nor make sure the blocks are consistent (for example ending
1038  // with a control flow instruction).
1039  void MergeWithInlined(HBasicBlock* other);
1040
1041  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1042  // of `this` are moved to `other`.
1043  // Note that this method does not update the graph, reverse post order, loop
1044  // information, nor make sure the blocks are consistent (for example ending
1045  // with a control flow instruction).
1046  void ReplaceWith(HBasicBlock* other);
1047
1048  // Merge `other` at the end of `this`. This method updates loops, reverse post
1049  // order, links to predecessors, successors, dominators and deletes the block
1050  // from the graph. The two blocks must be successive, i.e. `this` the only
1051  // predecessor of `other` and vice versa.
1052  void MergeWith(HBasicBlock* other);
1053
1054  // Disconnects `this` from all its predecessors, successors and dominator,
1055  // removes it from all loops it is included in and eventually from the graph.
1056  // The block must not dominate any other block. Predecessors and successors
1057  // are safely updated.
1058  void DisconnectAndDelete();
1059
1060  void AddInstruction(HInstruction* instruction);
1061  // Insert `instruction` before/after an existing instruction `cursor`.
1062  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1063  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1064  // Replace instruction `initial` with `replacement` within this block.
1065  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1066                                       HInstruction* replacement);
1067  void AddPhi(HPhi* phi);
1068  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1069  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1070  // instruction list. With 'ensure_safety' set to true, it verifies that the
1071  // instruction is not in use and removes it from the use lists of its inputs.
1072  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1073  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1074  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1075
1076  bool IsLoopHeader() const {
1077    return IsInLoop() && (loop_information_->GetHeader() == this);
1078  }
1079
1080  bool IsLoopPreHeaderFirstPredecessor() const {
1081    DCHECK(IsLoopHeader());
1082    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1083  }
1084
1085  bool IsFirstPredecessorBackEdge() const {
1086    DCHECK(IsLoopHeader());
1087    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1088  }
1089
1090  HLoopInformation* GetLoopInformation() const {
1091    return loop_information_;
1092  }
1093
1094  // Set the loop_information_ on this block. Overrides the current
1095  // loop_information if it is an outer loop of the passed loop information.
1096  // Note that this method is called while creating the loop information.
1097  void SetInLoop(HLoopInformation* info) {
1098    if (IsLoopHeader()) {
1099      // Nothing to do. This just means `info` is an outer loop.
1100    } else if (!IsInLoop()) {
1101      loop_information_ = info;
1102    } else if (loop_information_->Contains(*info->GetHeader())) {
1103      // Block is currently part of an outer loop. Make it part of this inner loop.
1104      // Note that a non loop header having a loop information means this loop information
1105      // has already been populated
1106      loop_information_ = info;
1107    } else {
1108      // Block is part of an inner loop. Do not update the loop information.
1109      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1110      // at this point, because this method is being called while populating `info`.
1111    }
1112  }
1113
1114  // Raw update of the loop information.
1115  void SetLoopInformation(HLoopInformation* info) {
1116    loop_information_ = info;
1117  }
1118
1119  bool IsInLoop() const { return loop_information_ != nullptr; }
1120
1121  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1122
1123  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1124    try_catch_information_ = try_catch_information;
1125  }
1126
1127  bool IsTryBlock() const {
1128    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1129  }
1130
1131  bool IsCatchBlock() const {
1132    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1133  }
1134
1135  // Returns the try entry that this block's successors should have. They will
1136  // be in the same try, unless the block ends in a try boundary. In that case,
1137  // the appropriate try entry will be returned.
1138  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1139
1140  bool HasThrowingInstructions() const;
1141
1142  // Returns whether this block dominates the blocked passed as parameter.
1143  bool Dominates(HBasicBlock* block) const;
1144
1145  size_t GetLifetimeStart() const { return lifetime_start_; }
1146  size_t GetLifetimeEnd() const { return lifetime_end_; }
1147
1148  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1149  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1150
1151  bool EndsWithControlFlowInstruction() const;
1152  bool EndsWithIf() const;
1153  bool EndsWithTryBoundary() const;
1154  bool HasSinglePhi() const;
1155
1156 private:
1157  HGraph* graph_;
1158  ArenaVector<HBasicBlock*> predecessors_;
1159  ArenaVector<HBasicBlock*> successors_;
1160  HInstructionList instructions_;
1161  HInstructionList phis_;
1162  HLoopInformation* loop_information_;
1163  HBasicBlock* dominator_;
1164  ArenaVector<HBasicBlock*> dominated_blocks_;
1165  uint32_t block_id_;
1166  // The dex program counter of the first instruction of this block.
1167  const uint32_t dex_pc_;
1168  size_t lifetime_start_;
1169  size_t lifetime_end_;
1170  TryCatchInformation* try_catch_information_;
1171
1172  friend class HGraph;
1173  friend class HInstruction;
1174
1175  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1176};
1177
1178// Iterates over the LoopInformation of all loops which contain 'block'
1179// from the innermost to the outermost.
1180class HLoopInformationOutwardIterator : public ValueObject {
1181 public:
1182  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1183      : current_(block.GetLoopInformation()) {}
1184
1185  bool Done() const { return current_ == nullptr; }
1186
1187  void Advance() {
1188    DCHECK(!Done());
1189    current_ = current_->GetPreHeader()->GetLoopInformation();
1190  }
1191
1192  HLoopInformation* Current() const {
1193    DCHECK(!Done());
1194    return current_;
1195  }
1196
1197 private:
1198  HLoopInformation* current_;
1199
1200  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1201};
1202
1203#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1204  M(Above, Condition)                                                   \
1205  M(AboveOrEqual, Condition)                                            \
1206  M(Add, BinaryOperation)                                               \
1207  M(And, BinaryOperation)                                               \
1208  M(ArrayGet, Instruction)                                              \
1209  M(ArrayLength, Instruction)                                           \
1210  M(ArraySet, Instruction)                                              \
1211  M(Below, Condition)                                                   \
1212  M(BelowOrEqual, Condition)                                            \
1213  M(BooleanNot, UnaryOperation)                                         \
1214  M(BoundsCheck, Instruction)                                           \
1215  M(BoundType, Instruction)                                             \
1216  M(CheckCast, Instruction)                                             \
1217  M(ClassTableGet, Instruction)                                         \
1218  M(ClearException, Instruction)                                        \
1219  M(ClinitCheck, Instruction)                                           \
1220  M(Compare, BinaryOperation)                                           \
1221  M(CurrentMethod, Instruction)                                         \
1222  M(Deoptimize, Instruction)                                            \
1223  M(Div, BinaryOperation)                                               \
1224  M(DivZeroCheck, Instruction)                                          \
1225  M(DoubleConstant, Constant)                                           \
1226  M(Equal, Condition)                                                   \
1227  M(Exit, Instruction)                                                  \
1228  M(FloatConstant, Constant)                                            \
1229  M(Goto, Instruction)                                                  \
1230  M(GreaterThan, Condition)                                             \
1231  M(GreaterThanOrEqual, Condition)                                      \
1232  M(If, Instruction)                                                    \
1233  M(InstanceFieldGet, Instruction)                                      \
1234  M(InstanceFieldSet, Instruction)                                      \
1235  M(InstanceOf, Instruction)                                            \
1236  M(IntConstant, Constant)                                              \
1237  M(InvokeUnresolved, Invoke)                                           \
1238  M(InvokeInterface, Invoke)                                            \
1239  M(InvokeStaticOrDirect, Invoke)                                       \
1240  M(InvokeVirtual, Invoke)                                              \
1241  M(LessThan, Condition)                                                \
1242  M(LessThanOrEqual, Condition)                                         \
1243  M(LoadClass, Instruction)                                             \
1244  M(LoadException, Instruction)                                         \
1245  M(LoadString, Instruction)                                            \
1246  M(LongConstant, Constant)                                             \
1247  M(MemoryBarrier, Instruction)                                         \
1248  M(MonitorOperation, Instruction)                                      \
1249  M(Mul, BinaryOperation)                                               \
1250  M(NativeDebugInfo, Instruction)                                       \
1251  M(Neg, UnaryOperation)                                                \
1252  M(NewArray, Instruction)                                              \
1253  M(NewInstance, Instruction)                                           \
1254  M(Not, UnaryOperation)                                                \
1255  M(NotEqual, Condition)                                                \
1256  M(NullConstant, Instruction)                                          \
1257  M(NullCheck, Instruction)                                             \
1258  M(Or, BinaryOperation)                                                \
1259  M(PackedSwitch, Instruction)                                          \
1260  M(ParallelMove, Instruction)                                          \
1261  M(ParameterValue, Instruction)                                        \
1262  M(Phi, Instruction)                                                   \
1263  M(Rem, BinaryOperation)                                               \
1264  M(Return, Instruction)                                                \
1265  M(ReturnVoid, Instruction)                                            \
1266  M(Ror, BinaryOperation)                                               \
1267  M(Shl, BinaryOperation)                                               \
1268  M(Shr, BinaryOperation)                                               \
1269  M(StaticFieldGet, Instruction)                                        \
1270  M(StaticFieldSet, Instruction)                                        \
1271  M(UnresolvedInstanceFieldGet, Instruction)                            \
1272  M(UnresolvedInstanceFieldSet, Instruction)                            \
1273  M(UnresolvedStaticFieldGet, Instruction)                              \
1274  M(UnresolvedStaticFieldSet, Instruction)                              \
1275  M(Select, Instruction)                                                \
1276  M(Sub, BinaryOperation)                                               \
1277  M(SuspendCheck, Instruction)                                          \
1278  M(Throw, Instruction)                                                 \
1279  M(TryBoundary, Instruction)                                           \
1280  M(TypeConversion, Instruction)                                        \
1281  M(UShr, BinaryOperation)                                              \
1282  M(Xor, BinaryOperation)                                               \
1283
1284/*
1285 * Instructions, shared across several (not all) architectures.
1286 */
1287#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1288#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1289#else
1290#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1291  M(BitwiseNegatedRight, Instruction)                                   \
1292  M(MultiplyAccumulate, Instruction)                                    \
1293  M(IntermediateAddress, Instruction)
1294#endif
1295
1296#ifndef ART_ENABLE_CODEGEN_arm
1297#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1298#else
1299#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1300  M(ArmDexCacheArraysBase, Instruction)
1301#endif
1302
1303#ifndef ART_ENABLE_CODEGEN_arm64
1304#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1305#else
1306#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                          \
1307  M(Arm64DataProcWithShifterOp, Instruction)
1308#endif
1309
1310#ifndef ART_ENABLE_CODEGEN_mips
1311#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1312#else
1313#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1314  M(MipsComputeBaseMethodAddress, Instruction)                          \
1315  M(MipsDexCacheArraysBase, Instruction)
1316#endif
1317
1318#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1319
1320#ifndef ART_ENABLE_CODEGEN_x86
1321#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1322#else
1323#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1324  M(X86ComputeBaseMethodAddress, Instruction)                           \
1325  M(X86LoadFromConstantTable, Instruction)                              \
1326  M(X86FPNeg, Instruction)                                              \
1327  M(X86PackedSwitch, Instruction)
1328#endif
1329
1330#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1331
1332#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1333  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1334  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1335  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1336  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1337  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1338  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1339  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1340  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1341
1342#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1343  M(Condition, BinaryOperation)                                         \
1344  M(Constant, Instruction)                                              \
1345  M(UnaryOperation, Instruction)                                        \
1346  M(BinaryOperation, Instruction)                                       \
1347  M(Invoke, Instruction)
1348
1349#define FOR_EACH_INSTRUCTION(M)                                         \
1350  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1351  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1352
1353#define FORWARD_DECLARATION(type, super) class H##type;
1354FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1355#undef FORWARD_DECLARATION
1356
1357#define DECLARE_INSTRUCTION(type)                                         \
1358  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1359  const char* DebugName() const OVERRIDE { return #type; }                \
1360  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1361    return other->Is##type();                                             \
1362  }                                                                       \
1363  void Accept(HGraphVisitor* visitor) OVERRIDE
1364
1365#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1366  bool Is##type() const { return As##type() != nullptr; }               \
1367  const H##type* As##type() const { return this; }                      \
1368  H##type* As##type() { return this; }
1369
1370template <typename T>
1371class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1372 public:
1373  T GetUser() const { return user_; }
1374  size_t GetIndex() const { return index_; }
1375  void SetIndex(size_t index) { index_ = index; }
1376
1377  // Hook for the IntrusiveForwardList<>.
1378  // TODO: Hide this better.
1379  IntrusiveForwardListHook hook;
1380
1381 private:
1382  HUseListNode(T user, size_t index)
1383      : user_(user), index_(index) {}
1384
1385  T const user_;
1386  size_t index_;
1387
1388  friend class HInstruction;
1389
1390  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1391};
1392
1393template <typename T>
1394using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1395
1396// This class is used by HEnvironment and HInstruction classes to record the
1397// instructions they use and pointers to the corresponding HUseListNodes kept
1398// by the used instructions.
1399template <typename T>
1400class HUserRecord : public ValueObject {
1401 public:
1402  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1403  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1404
1405  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1406      : HUserRecord(old_record.instruction_, before_use_node) {}
1407  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1408      : instruction_(instruction), before_use_node_(before_use_node) {
1409    DCHECK(instruction_ != nullptr);
1410  }
1411
1412  HInstruction* GetInstruction() const { return instruction_; }
1413  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1414  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1415
1416 private:
1417  // Instruction used by the user.
1418  HInstruction* instruction_;
1419
1420  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1421  typename HUseList<T>::iterator before_use_node_;
1422};
1423
1424// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1425// This is used for HInstruction::GetInputs() to return a container wrapper providing
1426// HInstruction* values even though the underlying container has HUserRecord<>s.
1427struct HInputExtractor {
1428  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1429    return record.GetInstruction();
1430  }
1431  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1432    return record.GetInstruction();
1433  }
1434};
1435
1436using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1437using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1438
1439/**
1440 * Side-effects representation.
1441 *
1442 * For write/read dependences on fields/arrays, the dependence analysis uses
1443 * type disambiguation (e.g. a float field write cannot modify the value of an
1444 * integer field read) and the access type (e.g.  a reference array write cannot
1445 * modify the value of a reference field read [although it may modify the
1446 * reference fetch prior to reading the field, which is represented by its own
1447 * write/read dependence]). The analysis makes conservative points-to
1448 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1449 * the same, and any reference read depends on any reference read without
1450 * further regard of its type).
1451 *
1452 * The internal representation uses 38-bit and is described in the table below.
1453 * The first line indicates the side effect, and for field/array accesses the
1454 * second line indicates the type of the access (in the order of the
1455 * Primitive::Type enum).
1456 * The two numbered lines below indicate the bit position in the bitfield (read
1457 * vertically).
1458 *
1459 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1460 *   +-------------+---------+---------+--------------+---------+---------+
1461 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1462 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1463 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1464 *
1465 * Note that, to ease the implementation, 'changes' bits are least significant
1466 * bits, while 'dependency' bits are most significant bits.
1467 */
1468class SideEffects : public ValueObject {
1469 public:
1470  SideEffects() : flags_(0) {}
1471
1472  static SideEffects None() {
1473    return SideEffects(0);
1474  }
1475
1476  static SideEffects All() {
1477    return SideEffects(kAllChangeBits | kAllDependOnBits);
1478  }
1479
1480  static SideEffects AllChanges() {
1481    return SideEffects(kAllChangeBits);
1482  }
1483
1484  static SideEffects AllDependencies() {
1485    return SideEffects(kAllDependOnBits);
1486  }
1487
1488  static SideEffects AllExceptGCDependency() {
1489    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1490  }
1491
1492  static SideEffects AllWritesAndReads() {
1493    return SideEffects(kAllWrites | kAllReads);
1494  }
1495
1496  static SideEffects AllWrites() {
1497    return SideEffects(kAllWrites);
1498  }
1499
1500  static SideEffects AllReads() {
1501    return SideEffects(kAllReads);
1502  }
1503
1504  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1505    return is_volatile
1506        ? AllWritesAndReads()
1507        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1508  }
1509
1510  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1511    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1512  }
1513
1514  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1515    return is_volatile
1516        ? AllWritesAndReads()
1517        : SideEffects(TypeFlag(type, kFieldReadOffset));
1518  }
1519
1520  static SideEffects ArrayReadOfType(Primitive::Type type) {
1521    return SideEffects(TypeFlag(type, kArrayReadOffset));
1522  }
1523
1524  static SideEffects CanTriggerGC() {
1525    return SideEffects(1ULL << kCanTriggerGCBit);
1526  }
1527
1528  static SideEffects DependsOnGC() {
1529    return SideEffects(1ULL << kDependsOnGCBit);
1530  }
1531
1532  // Combines the side-effects of this and the other.
1533  SideEffects Union(SideEffects other) const {
1534    return SideEffects(flags_ | other.flags_);
1535  }
1536
1537  SideEffects Exclusion(SideEffects other) const {
1538    return SideEffects(flags_ & ~other.flags_);
1539  }
1540
1541  void Add(SideEffects other) {
1542    flags_ |= other.flags_;
1543  }
1544
1545  bool Includes(SideEffects other) const {
1546    return (other.flags_ & flags_) == other.flags_;
1547  }
1548
1549  bool HasSideEffects() const {
1550    return (flags_ & kAllChangeBits);
1551  }
1552
1553  bool HasDependencies() const {
1554    return (flags_ & kAllDependOnBits);
1555  }
1556
1557  // Returns true if there are no side effects or dependencies.
1558  bool DoesNothing() const {
1559    return flags_ == 0;
1560  }
1561
1562  // Returns true if something is written.
1563  bool DoesAnyWrite() const {
1564    return (flags_ & kAllWrites);
1565  }
1566
1567  // Returns true if something is read.
1568  bool DoesAnyRead() const {
1569    return (flags_ & kAllReads);
1570  }
1571
1572  // Returns true if potentially everything is written and read
1573  // (every type and every kind of access).
1574  bool DoesAllReadWrite() const {
1575    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1576  }
1577
1578  bool DoesAll() const {
1579    return flags_ == (kAllChangeBits | kAllDependOnBits);
1580  }
1581
1582  // Returns true if `this` may read something written by `other`.
1583  bool MayDependOn(SideEffects other) const {
1584    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1585    return (other.flags_ & depends_on_flags);
1586  }
1587
1588  // Returns string representation of flags (for debugging only).
1589  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1590  std::string ToString() const {
1591    std::string flags = "|";
1592    for (int s = kLastBit; s >= 0; s--) {
1593      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1594      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1595        // This is a bit for the GC side effect.
1596        if (current_bit_is_set) {
1597          flags += "GC";
1598        }
1599        flags += "|";
1600      } else {
1601        // This is a bit for the array/field analysis.
1602        // The underscore character stands for the 'can trigger GC' bit.
1603        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1604        if (current_bit_is_set) {
1605          flags += kDebug[s];
1606        }
1607        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1608            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1609          flags += "|";
1610        }
1611      }
1612    }
1613    return flags;
1614  }
1615
1616  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1617
1618 private:
1619  static constexpr int kFieldArrayAnalysisBits = 9;
1620
1621  static constexpr int kFieldWriteOffset = 0;
1622  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1623  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1624  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1625
1626  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1627
1628  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1629  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1630  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1631  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1632
1633  static constexpr int kLastBit = kDependsOnGCBit;
1634  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1635
1636  // Aliases.
1637
1638  static_assert(kChangeBits == kDependOnBits,
1639                "the 'change' bits should match the 'depend on' bits.");
1640
1641  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1642  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1643  static constexpr uint64_t kAllWrites =
1644      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1645  static constexpr uint64_t kAllReads =
1646      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1647
1648  // Translates type to bit flag.
1649  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1650    CHECK_NE(type, Primitive::kPrimVoid);
1651    const uint64_t one = 1;
1652    const int shift = type;  // 0-based consecutive enum
1653    DCHECK_LE(kFieldWriteOffset, shift);
1654    DCHECK_LT(shift, kArrayWriteOffset);
1655    return one << (type + offset);
1656  }
1657
1658  // Private constructor on direct flags value.
1659  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1660
1661  uint64_t flags_;
1662};
1663
1664// A HEnvironment object contains the values of virtual registers at a given location.
1665class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1666 public:
1667  HEnvironment(ArenaAllocator* arena,
1668               size_t number_of_vregs,
1669               const DexFile& dex_file,
1670               uint32_t method_idx,
1671               uint32_t dex_pc,
1672               InvokeType invoke_type,
1673               HInstruction* holder)
1674     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1675       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1676       parent_(nullptr),
1677       dex_file_(dex_file),
1678       method_idx_(method_idx),
1679       dex_pc_(dex_pc),
1680       invoke_type_(invoke_type),
1681       holder_(holder) {
1682  }
1683
1684  HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1685      : HEnvironment(arena,
1686                     to_copy.Size(),
1687                     to_copy.GetDexFile(),
1688                     to_copy.GetMethodIdx(),
1689                     to_copy.GetDexPc(),
1690                     to_copy.GetInvokeType(),
1691                     holder) {}
1692
1693  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1694    if (parent_ != nullptr) {
1695      parent_->SetAndCopyParentChain(allocator, parent);
1696    } else {
1697      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1698      parent_->CopyFrom(parent);
1699      if (parent->GetParent() != nullptr) {
1700        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1701      }
1702    }
1703  }
1704
1705  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1706  void CopyFrom(HEnvironment* environment);
1707
1708  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1709  // input to the loop phi instead. This is for inserting instructions that
1710  // require an environment (like HDeoptimization) in the loop pre-header.
1711  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1712
1713  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1714    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1715  }
1716
1717  HInstruction* GetInstructionAt(size_t index) const {
1718    return vregs_[index].GetInstruction();
1719  }
1720
1721  void RemoveAsUserOfInput(size_t index) const;
1722
1723  size_t Size() const { return vregs_.size(); }
1724
1725  HEnvironment* GetParent() const { return parent_; }
1726
1727  void SetLocationAt(size_t index, Location location) {
1728    locations_[index] = location;
1729  }
1730
1731  Location GetLocationAt(size_t index) const {
1732    return locations_[index];
1733  }
1734
1735  uint32_t GetDexPc() const {
1736    return dex_pc_;
1737  }
1738
1739  uint32_t GetMethodIdx() const {
1740    return method_idx_;
1741  }
1742
1743  InvokeType GetInvokeType() const {
1744    return invoke_type_;
1745  }
1746
1747  const DexFile& GetDexFile() const {
1748    return dex_file_;
1749  }
1750
1751  HInstruction* GetHolder() const {
1752    return holder_;
1753  }
1754
1755
1756  bool IsFromInlinedInvoke() const {
1757    return GetParent() != nullptr;
1758  }
1759
1760 private:
1761  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1762  ArenaVector<Location> locations_;
1763  HEnvironment* parent_;
1764  const DexFile& dex_file_;
1765  const uint32_t method_idx_;
1766  const uint32_t dex_pc_;
1767  const InvokeType invoke_type_;
1768
1769  // The instruction that holds this environment.
1770  HInstruction* const holder_;
1771
1772  friend class HInstruction;
1773
1774  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1775};
1776
1777class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1778 public:
1779  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1780      : previous_(nullptr),
1781        next_(nullptr),
1782        block_(nullptr),
1783        dex_pc_(dex_pc),
1784        id_(-1),
1785        ssa_index_(-1),
1786        packed_fields_(0u),
1787        environment_(nullptr),
1788        locations_(nullptr),
1789        live_interval_(nullptr),
1790        lifetime_position_(kNoLifetime),
1791        side_effects_(side_effects),
1792        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1793    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1794  }
1795
1796  virtual ~HInstruction() {}
1797
1798#define DECLARE_KIND(type, super) k##type,
1799  enum InstructionKind {
1800    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1801  };
1802#undef DECLARE_KIND
1803
1804  HInstruction* GetNext() const { return next_; }
1805  HInstruction* GetPrevious() const { return previous_; }
1806
1807  HInstruction* GetNextDisregardingMoves() const;
1808  HInstruction* GetPreviousDisregardingMoves() const;
1809
1810  HBasicBlock* GetBlock() const { return block_; }
1811  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1812  void SetBlock(HBasicBlock* block) { block_ = block; }
1813  bool IsInBlock() const { return block_ != nullptr; }
1814  bool IsInLoop() const { return block_->IsInLoop(); }
1815  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1816  bool IsIrreducibleLoopHeaderPhi() const {
1817    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1818  }
1819
1820  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1821
1822  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1823    // One virtual method is enough, just const_cast<> and then re-add the const.
1824    return ArrayRef<const HUserRecord<HInstruction*>>(
1825        const_cast<HInstruction*>(this)->GetInputRecords());
1826  }
1827
1828  HInputsRef GetInputs() {
1829    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1830  }
1831
1832  HConstInputsRef GetInputs() const {
1833    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1834  }
1835
1836  size_t InputCount() const { return GetInputRecords().size(); }
1837  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1838
1839  void SetRawInputAt(size_t index, HInstruction* input) {
1840    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1841  }
1842
1843  virtual void Accept(HGraphVisitor* visitor) = 0;
1844  virtual const char* DebugName() const = 0;
1845
1846  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1847
1848  virtual bool NeedsEnvironment() const { return false; }
1849
1850  uint32_t GetDexPc() const { return dex_pc_; }
1851
1852  virtual bool IsControlFlow() const { return false; }
1853
1854  virtual bool CanThrow() const { return false; }
1855  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1856
1857  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1858  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1859
1860  // Does not apply for all instructions, but having this at top level greatly
1861  // simplifies the null check elimination.
1862  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1863  virtual bool CanBeNull() const {
1864    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1865    return true;
1866  }
1867
1868  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1869    return false;
1870  }
1871
1872  virtual bool IsActualObject() const {
1873    return GetType() == Primitive::kPrimNot;
1874  }
1875
1876  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1877
1878  ReferenceTypeInfo GetReferenceTypeInfo() const {
1879    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1880    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1881                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1882  }
1883
1884  void AddUseAt(HInstruction* user, size_t index) {
1885    DCHECK(user != nullptr);
1886    // Note: fixup_end remains valid across push_front().
1887    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1888    HUseListNode<HInstruction*>* new_node =
1889        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1890    uses_.push_front(*new_node);
1891    FixUpUserRecordsAfterUseInsertion(fixup_end);
1892  }
1893
1894  void AddEnvUseAt(HEnvironment* user, size_t index) {
1895    DCHECK(user != nullptr);
1896    // Note: env_fixup_end remains valid across push_front().
1897    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
1898    HUseListNode<HEnvironment*>* new_node =
1899        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
1900    env_uses_.push_front(*new_node);
1901    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1902  }
1903
1904  void RemoveAsUserOfInput(size_t input) {
1905    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
1906    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1907    input_use.GetInstruction()->uses_.erase_after(before_use_node);
1908    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1909  }
1910
1911  void RemoveAsUserOfAllInputs() {
1912    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
1913      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1914      input_use.GetInstruction()->uses_.erase_after(before_use_node);
1915      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1916    }
1917  }
1918
1919  const HUseList<HInstruction*>& GetUses() const { return uses_; }
1920  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
1921
1922  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
1923  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
1924  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
1925  bool HasOnlyOneNonEnvironmentUse() const {
1926    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
1927  }
1928
1929  // Does this instruction strictly dominate `other_instruction`?
1930  // Returns false if this instruction and `other_instruction` are the same.
1931  // Aborts if this instruction and `other_instruction` are both phis.
1932  bool StrictlyDominates(HInstruction* other_instruction) const;
1933
1934  int GetId() const { return id_; }
1935  void SetId(int id) { id_ = id; }
1936
1937  int GetSsaIndex() const { return ssa_index_; }
1938  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
1939  bool HasSsaIndex() const { return ssa_index_ != -1; }
1940
1941  bool HasEnvironment() const { return environment_ != nullptr; }
1942  HEnvironment* GetEnvironment() const { return environment_; }
1943  // Set the `environment_` field. Raw because this method does not
1944  // update the uses lists.
1945  void SetRawEnvironment(HEnvironment* environment) {
1946    DCHECK(environment_ == nullptr);
1947    DCHECK_EQ(environment->GetHolder(), this);
1948    environment_ = environment;
1949  }
1950
1951  void InsertRawEnvironment(HEnvironment* environment) {
1952    DCHECK(environment_ != nullptr);
1953    DCHECK_EQ(environment->GetHolder(), this);
1954    DCHECK(environment->GetParent() == nullptr);
1955    environment->parent_ = environment_;
1956    environment_ = environment;
1957  }
1958
1959  void RemoveEnvironment();
1960
1961  // Set the environment of this instruction, copying it from `environment`. While
1962  // copying, the uses lists are being updated.
1963  void CopyEnvironmentFrom(HEnvironment* environment) {
1964    DCHECK(environment_ == nullptr);
1965    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1966    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1967    environment_->CopyFrom(environment);
1968    if (environment->GetParent() != nullptr) {
1969      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1970    }
1971  }
1972
1973  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
1974                                                HBasicBlock* block) {
1975    DCHECK(environment_ == nullptr);
1976    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1977    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1978    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
1979    if (environment->GetParent() != nullptr) {
1980      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1981    }
1982  }
1983
1984  // Returns the number of entries in the environment. Typically, that is the
1985  // number of dex registers in a method. It could be more in case of inlining.
1986  size_t EnvironmentSize() const;
1987
1988  LocationSummary* GetLocations() const { return locations_; }
1989  void SetLocations(LocationSummary* locations) { locations_ = locations; }
1990
1991  void ReplaceWith(HInstruction* instruction);
1992  void ReplaceInput(HInstruction* replacement, size_t index);
1993
1994  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
1995  // uses of this instruction by `other` are *not* updated.
1996  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
1997    ReplaceWith(other);
1998    other->ReplaceInput(this, use_index);
1999  }
2000
2001  // Move `this` instruction before `cursor`.
2002  void MoveBefore(HInstruction* cursor);
2003
2004  // Move `this` before its first user and out of any loops. If there is no
2005  // out-of-loop user that dominates all other users, move the instruction
2006  // to the end of the out-of-loop common dominator of the user's blocks.
2007  //
2008  // This can be used only on non-throwing instructions with no side effects that
2009  // have at least one use but no environment uses.
2010  void MoveBeforeFirstUserAndOutOfLoops();
2011
2012#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2013  bool Is##type() const;                                                       \
2014  const H##type* As##type() const;                                             \
2015  H##type* As##type();
2016
2017  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2018#undef INSTRUCTION_TYPE_CHECK
2019
2020#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2021  bool Is##type() const { return (As##type() != nullptr); }                    \
2022  virtual const H##type* As##type() const { return nullptr; }                  \
2023  virtual H##type* As##type() { return nullptr; }
2024  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2025#undef INSTRUCTION_TYPE_CHECK
2026
2027  // Returns whether the instruction can be moved within the graph.
2028  virtual bool CanBeMoved() const { return false; }
2029
2030  // Returns whether the two instructions are of the same kind.
2031  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2032    return false;
2033  }
2034
2035  // Returns whether any data encoded in the two instructions is equal.
2036  // This method does not look at the inputs. Both instructions must be
2037  // of the same type, otherwise the method has undefined behavior.
2038  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2039    return false;
2040  }
2041
2042  // Returns whether two instructions are equal, that is:
2043  // 1) They have the same type and contain the same data (InstructionDataEquals).
2044  // 2) Their inputs are identical.
2045  bool Equals(const HInstruction* other) const;
2046
2047  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2048  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2049  // the virtual function because the __attribute__((__pure__)) doesn't really
2050  // apply the strong requirement for virtual functions, preventing optimizations.
2051  InstructionKind GetKind() const PURE;
2052  virtual InstructionKind GetKindInternal() const = 0;
2053
2054  virtual size_t ComputeHashCode() const {
2055    size_t result = GetKind();
2056    for (const HInstruction* input : GetInputs()) {
2057      result = (result * 31) + input->GetId();
2058    }
2059    return result;
2060  }
2061
2062  SideEffects GetSideEffects() const { return side_effects_; }
2063  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2064  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2065
2066  size_t GetLifetimePosition() const { return lifetime_position_; }
2067  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2068  LiveInterval* GetLiveInterval() const { return live_interval_; }
2069  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2070  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2071
2072  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2073
2074  // Returns whether the code generation of the instruction will require to have access
2075  // to the current method. Such instructions are:
2076  // (1): Instructions that require an environment, as calling the runtime requires
2077  //      to walk the stack and have the current method stored at a specific stack address.
2078  // (2): Object literals like classes and strings, that are loaded from the dex cache
2079  //      fields of the current method.
2080  bool NeedsCurrentMethod() const {
2081    return NeedsEnvironment() || IsLoadClass() || IsLoadString();
2082  }
2083
2084  // Returns whether the code generation of the instruction will require to have access
2085  // to the dex cache of the current method's declaring class via the current method.
2086  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2087
2088  // Does this instruction have any use in an environment before
2089  // control flow hits 'other'?
2090  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2091
2092  // Remove all references to environment uses of this instruction.
2093  // The caller must ensure that this is safe to do.
2094  void RemoveEnvironmentUsers();
2095
2096  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2097  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2098
2099 protected:
2100  // If set, the machine code for this instruction is assumed to be generated by
2101  // its users. Used by liveness analysis to compute use positions accordingly.
2102  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2103  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2104  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2105  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2106
2107  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2108    return GetInputRecords()[i];
2109  }
2110
2111  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2112    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2113    input_records[index] = input;
2114  }
2115
2116  uint32_t GetPackedFields() const {
2117    return packed_fields_;
2118  }
2119
2120  template <size_t flag>
2121  bool GetPackedFlag() const {
2122    return (packed_fields_ & (1u << flag)) != 0u;
2123  }
2124
2125  template <size_t flag>
2126  void SetPackedFlag(bool value = true) {
2127    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2128  }
2129
2130  template <typename BitFieldType>
2131  typename BitFieldType::value_type GetPackedField() const {
2132    return BitFieldType::Decode(packed_fields_);
2133  }
2134
2135  template <typename BitFieldType>
2136  void SetPackedField(typename BitFieldType::value_type value) {
2137    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2138    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2139  }
2140
2141 private:
2142  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2143    auto before_use_node = uses_.before_begin();
2144    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2145      HInstruction* user = use_node->GetUser();
2146      size_t input_index = use_node->GetIndex();
2147      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2148      before_use_node = use_node;
2149    }
2150  }
2151
2152  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2153    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2154    if (next != uses_.end()) {
2155      HInstruction* next_user = next->GetUser();
2156      size_t next_index = next->GetIndex();
2157      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2158      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2159    }
2160  }
2161
2162  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2163    auto before_env_use_node = env_uses_.before_begin();
2164    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2165      HEnvironment* user = env_use_node->GetUser();
2166      size_t input_index = env_use_node->GetIndex();
2167      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2168      before_env_use_node = env_use_node;
2169    }
2170  }
2171
2172  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2173    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2174    if (next != env_uses_.end()) {
2175      HEnvironment* next_user = next->GetUser();
2176      size_t next_index = next->GetIndex();
2177      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2178      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2179    }
2180  }
2181
2182  HInstruction* previous_;
2183  HInstruction* next_;
2184  HBasicBlock* block_;
2185  const uint32_t dex_pc_;
2186
2187  // An instruction gets an id when it is added to the graph.
2188  // It reflects creation order. A negative id means the instruction
2189  // has not been added to the graph.
2190  int id_;
2191
2192  // When doing liveness analysis, instructions that have uses get an SSA index.
2193  int ssa_index_;
2194
2195  // Packed fields.
2196  uint32_t packed_fields_;
2197
2198  // List of instructions that have this instruction as input.
2199  HUseList<HInstruction*> uses_;
2200
2201  // List of environments that contain this instruction.
2202  HUseList<HEnvironment*> env_uses_;
2203
2204  // The environment associated with this instruction. Not null if the instruction
2205  // might jump out of the method.
2206  HEnvironment* environment_;
2207
2208  // Set by the code generator.
2209  LocationSummary* locations_;
2210
2211  // Set by the liveness analysis.
2212  LiveInterval* live_interval_;
2213
2214  // Set by the liveness analysis, this is the position in a linear
2215  // order of blocks where this instruction's live interval start.
2216  size_t lifetime_position_;
2217
2218  SideEffects side_effects_;
2219
2220  // The reference handle part of the reference type info.
2221  // The IsExact() flag is stored in packed fields.
2222  // TODO: for primitive types this should be marked as invalid.
2223  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2224
2225  friend class GraphChecker;
2226  friend class HBasicBlock;
2227  friend class HEnvironment;
2228  friend class HGraph;
2229  friend class HInstructionList;
2230
2231  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2232};
2233std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2234
2235class HInstructionIterator : public ValueObject {
2236 public:
2237  explicit HInstructionIterator(const HInstructionList& instructions)
2238      : instruction_(instructions.first_instruction_) {
2239    next_ = Done() ? nullptr : instruction_->GetNext();
2240  }
2241
2242  bool Done() const { return instruction_ == nullptr; }
2243  HInstruction* Current() const { return instruction_; }
2244  void Advance() {
2245    instruction_ = next_;
2246    next_ = Done() ? nullptr : instruction_->GetNext();
2247  }
2248
2249 private:
2250  HInstruction* instruction_;
2251  HInstruction* next_;
2252
2253  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2254};
2255
2256class HBackwardInstructionIterator : public ValueObject {
2257 public:
2258  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2259      : instruction_(instructions.last_instruction_) {
2260    next_ = Done() ? nullptr : instruction_->GetPrevious();
2261  }
2262
2263  bool Done() const { return instruction_ == nullptr; }
2264  HInstruction* Current() const { return instruction_; }
2265  void Advance() {
2266    instruction_ = next_;
2267    next_ = Done() ? nullptr : instruction_->GetPrevious();
2268  }
2269
2270 private:
2271  HInstruction* instruction_;
2272  HInstruction* next_;
2273
2274  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2275};
2276
2277template<size_t N>
2278class HTemplateInstruction: public HInstruction {
2279 public:
2280  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2281      : HInstruction(side_effects, dex_pc), inputs_() {}
2282  virtual ~HTemplateInstruction() {}
2283
2284  using HInstruction::GetInputRecords;  // Keep the const version visible.
2285  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2286    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2287  }
2288
2289 private:
2290  std::array<HUserRecord<HInstruction*>, N> inputs_;
2291
2292  friend class SsaBuilder;
2293};
2294
2295// HTemplateInstruction specialization for N=0.
2296template<>
2297class HTemplateInstruction<0>: public HInstruction {
2298 public:
2299  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2300      : HInstruction(side_effects, dex_pc) {}
2301
2302  virtual ~HTemplateInstruction() {}
2303
2304  using HInstruction::GetInputRecords;  // Keep the const version visible.
2305  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2306    return ArrayRef<HUserRecord<HInstruction*>>();
2307  }
2308
2309 private:
2310  friend class SsaBuilder;
2311};
2312
2313template<intptr_t N>
2314class HExpression : public HTemplateInstruction<N> {
2315 public:
2316  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2317      : HTemplateInstruction<N>(side_effects, dex_pc) {
2318    this->template SetPackedField<TypeField>(type);
2319  }
2320  virtual ~HExpression() {}
2321
2322  Primitive::Type GetType() const OVERRIDE {
2323    return TypeField::Decode(this->GetPackedFields());
2324  }
2325
2326 protected:
2327  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2328  static constexpr size_t kFieldTypeSize =
2329      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2330  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2331  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2332                "Too many packed fields.");
2333  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2334};
2335
2336// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2337// instruction that branches to the exit block.
2338class HReturnVoid FINAL : public HTemplateInstruction<0> {
2339 public:
2340  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2341      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2342
2343  bool IsControlFlow() const OVERRIDE { return true; }
2344
2345  DECLARE_INSTRUCTION(ReturnVoid);
2346
2347 private:
2348  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2349};
2350
2351// Represents dex's RETURN opcodes. A HReturn is a control flow
2352// instruction that branches to the exit block.
2353class HReturn FINAL : public HTemplateInstruction<1> {
2354 public:
2355  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2356      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2357    SetRawInputAt(0, value);
2358  }
2359
2360  bool IsControlFlow() const OVERRIDE { return true; }
2361
2362  DECLARE_INSTRUCTION(Return);
2363
2364 private:
2365  DISALLOW_COPY_AND_ASSIGN(HReturn);
2366};
2367
2368class HPhi FINAL : public HInstruction {
2369 public:
2370  HPhi(ArenaAllocator* arena,
2371       uint32_t reg_number,
2372       size_t number_of_inputs,
2373       Primitive::Type type,
2374       uint32_t dex_pc = kNoDexPc)
2375      : HInstruction(SideEffects::None(), dex_pc),
2376        inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)),
2377        reg_number_(reg_number) {
2378    SetPackedField<TypeField>(ToPhiType(type));
2379    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2380    // Phis are constructed live and marked dead if conflicting or unused.
2381    // Individual steps of SsaBuilder should assume that if a phi has been
2382    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2383    SetPackedFlag<kFlagIsLive>(true);
2384    SetPackedFlag<kFlagCanBeNull>(true);
2385  }
2386
2387  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2388  static Primitive::Type ToPhiType(Primitive::Type type) {
2389    return Primitive::PrimitiveKind(type);
2390  }
2391
2392  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2393
2394  using HInstruction::GetInputRecords;  // Keep the const version visible.
2395  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2396    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2397  }
2398
2399  void AddInput(HInstruction* input);
2400  void RemoveInputAt(size_t index);
2401
2402  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2403  void SetType(Primitive::Type new_type) {
2404    // Make sure that only valid type changes occur. The following are allowed:
2405    //  (1) int  -> float/ref (primitive type propagation),
2406    //  (2) long -> double (primitive type propagation).
2407    DCHECK(GetType() == new_type ||
2408           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2409           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2410           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2411    SetPackedField<TypeField>(new_type);
2412  }
2413
2414  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2415  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2416
2417  uint32_t GetRegNumber() const { return reg_number_; }
2418
2419  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2420  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2421  bool IsDead() const { return !IsLive(); }
2422  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2423
2424  bool IsVRegEquivalentOf(const HInstruction* other) const {
2425    return other != nullptr
2426        && other->IsPhi()
2427        && other->AsPhi()->GetBlock() == GetBlock()
2428        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2429  }
2430
2431  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2432  // An equivalent phi is a phi having the same dex register and type.
2433  // It assumes that phis with the same dex register are adjacent.
2434  HPhi* GetNextEquivalentPhiWithSameType() {
2435    HInstruction* next = GetNext();
2436    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2437      if (next->GetType() == GetType()) {
2438        return next->AsPhi();
2439      }
2440      next = next->GetNext();
2441    }
2442    return nullptr;
2443  }
2444
2445  DECLARE_INSTRUCTION(Phi);
2446
2447 private:
2448  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2449  static constexpr size_t kFieldTypeSize =
2450      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2451  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2452  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2453  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2454  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2455  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2456
2457  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2458  const uint32_t reg_number_;
2459
2460  DISALLOW_COPY_AND_ASSIGN(HPhi);
2461};
2462
2463// The exit instruction is the only instruction of the exit block.
2464// Instructions aborting the method (HThrow and HReturn) must branch to the
2465// exit block.
2466class HExit FINAL : public HTemplateInstruction<0> {
2467 public:
2468  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2469
2470  bool IsControlFlow() const OVERRIDE { return true; }
2471
2472  DECLARE_INSTRUCTION(Exit);
2473
2474 private:
2475  DISALLOW_COPY_AND_ASSIGN(HExit);
2476};
2477
2478// Jumps from one block to another.
2479class HGoto FINAL : public HTemplateInstruction<0> {
2480 public:
2481  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2482
2483  bool IsControlFlow() const OVERRIDE { return true; }
2484
2485  HBasicBlock* GetSuccessor() const {
2486    return GetBlock()->GetSingleSuccessor();
2487  }
2488
2489  DECLARE_INSTRUCTION(Goto);
2490
2491 private:
2492  DISALLOW_COPY_AND_ASSIGN(HGoto);
2493};
2494
2495class HConstant : public HExpression<0> {
2496 public:
2497  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2498      : HExpression(type, SideEffects::None(), dex_pc) {}
2499
2500  bool CanBeMoved() const OVERRIDE { return true; }
2501
2502  // Is this constant -1 in the arithmetic sense?
2503  virtual bool IsMinusOne() const { return false; }
2504  // Is this constant 0 in the arithmetic sense?
2505  virtual bool IsArithmeticZero() const { return false; }
2506  // Is this constant a 0-bit pattern?
2507  virtual bool IsZeroBitPattern() const { return false; }
2508  // Is this constant 1 in the arithmetic sense?
2509  virtual bool IsOne() const { return false; }
2510
2511  virtual uint64_t GetValueAsUint64() const = 0;
2512
2513  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2514
2515 private:
2516  DISALLOW_COPY_AND_ASSIGN(HConstant);
2517};
2518
2519class HNullConstant FINAL : public HConstant {
2520 public:
2521  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2522    return true;
2523  }
2524
2525  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2526
2527  size_t ComputeHashCode() const OVERRIDE { return 0; }
2528
2529  // The null constant representation is a 0-bit pattern.
2530  virtual bool IsZeroBitPattern() const { return true; }
2531
2532  DECLARE_INSTRUCTION(NullConstant);
2533
2534 private:
2535  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2536
2537  friend class HGraph;
2538  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2539};
2540
2541// Constants of the type int. Those can be from Dex instructions, or
2542// synthesized (for example with the if-eqz instruction).
2543class HIntConstant FINAL : public HConstant {
2544 public:
2545  int32_t GetValue() const { return value_; }
2546
2547  uint64_t GetValueAsUint64() const OVERRIDE {
2548    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2549  }
2550
2551  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2552    DCHECK(other->IsIntConstant()) << other->DebugName();
2553    return other->AsIntConstant()->value_ == value_;
2554  }
2555
2556  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2557
2558  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2559  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2560  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2561  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2562
2563  // Integer constants are used to encode Boolean values as well,
2564  // where 1 means true and 0 means false.
2565  bool IsTrue() const { return GetValue() == 1; }
2566  bool IsFalse() const { return GetValue() == 0; }
2567
2568  DECLARE_INSTRUCTION(IntConstant);
2569
2570 private:
2571  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2572      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2573  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2574      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2575
2576  const int32_t value_;
2577
2578  friend class HGraph;
2579  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2580  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2581  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2582};
2583
2584class HLongConstant FINAL : public HConstant {
2585 public:
2586  int64_t GetValue() const { return value_; }
2587
2588  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2589
2590  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2591    DCHECK(other->IsLongConstant()) << other->DebugName();
2592    return other->AsLongConstant()->value_ == value_;
2593  }
2594
2595  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2596
2597  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2598  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2599  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2600  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2601
2602  DECLARE_INSTRUCTION(LongConstant);
2603
2604 private:
2605  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2606      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2607
2608  const int64_t value_;
2609
2610  friend class HGraph;
2611  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2612};
2613
2614class HFloatConstant FINAL : public HConstant {
2615 public:
2616  float GetValue() const { return value_; }
2617
2618  uint64_t GetValueAsUint64() const OVERRIDE {
2619    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2620  }
2621
2622  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2623    DCHECK(other->IsFloatConstant()) << other->DebugName();
2624    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2625  }
2626
2627  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2628
2629  bool IsMinusOne() const OVERRIDE {
2630    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2631  }
2632  bool IsArithmeticZero() const OVERRIDE {
2633    return std::fpclassify(value_) == FP_ZERO;
2634  }
2635  bool IsArithmeticPositiveZero() const {
2636    return IsArithmeticZero() && !std::signbit(value_);
2637  }
2638  bool IsArithmeticNegativeZero() const {
2639    return IsArithmeticZero() && std::signbit(value_);
2640  }
2641  bool IsZeroBitPattern() const OVERRIDE {
2642    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2643  }
2644  bool IsOne() const OVERRIDE {
2645    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2646  }
2647  bool IsNaN() const {
2648    return std::isnan(value_);
2649  }
2650
2651  DECLARE_INSTRUCTION(FloatConstant);
2652
2653 private:
2654  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2655      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2656  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2657      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2658
2659  const float value_;
2660
2661  // Only the SsaBuilder and HGraph can create floating-point constants.
2662  friend class SsaBuilder;
2663  friend class HGraph;
2664  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2665};
2666
2667class HDoubleConstant FINAL : public HConstant {
2668 public:
2669  double GetValue() const { return value_; }
2670
2671  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2672
2673  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2674    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2675    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2676  }
2677
2678  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2679
2680  bool IsMinusOne() const OVERRIDE {
2681    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2682  }
2683  bool IsArithmeticZero() const OVERRIDE {
2684    return std::fpclassify(value_) == FP_ZERO;
2685  }
2686  bool IsArithmeticPositiveZero() const {
2687    return IsArithmeticZero() && !std::signbit(value_);
2688  }
2689  bool IsArithmeticNegativeZero() const {
2690    return IsArithmeticZero() && std::signbit(value_);
2691  }
2692  bool IsZeroBitPattern() const OVERRIDE {
2693    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2694  }
2695  bool IsOne() const OVERRIDE {
2696    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2697  }
2698  bool IsNaN() const {
2699    return std::isnan(value_);
2700  }
2701
2702  DECLARE_INSTRUCTION(DoubleConstant);
2703
2704 private:
2705  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2706      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2707  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2708      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2709
2710  const double value_;
2711
2712  // Only the SsaBuilder and HGraph can create floating-point constants.
2713  friend class SsaBuilder;
2714  friend class HGraph;
2715  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2716};
2717
2718// Conditional branch. A block ending with an HIf instruction must have
2719// two successors.
2720class HIf FINAL : public HTemplateInstruction<1> {
2721 public:
2722  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2723      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2724    SetRawInputAt(0, input);
2725  }
2726
2727  bool IsControlFlow() const OVERRIDE { return true; }
2728
2729  HBasicBlock* IfTrueSuccessor() const {
2730    return GetBlock()->GetSuccessors()[0];
2731  }
2732
2733  HBasicBlock* IfFalseSuccessor() const {
2734    return GetBlock()->GetSuccessors()[1];
2735  }
2736
2737  DECLARE_INSTRUCTION(If);
2738
2739 private:
2740  DISALLOW_COPY_AND_ASSIGN(HIf);
2741};
2742
2743
2744// Abstract instruction which marks the beginning and/or end of a try block and
2745// links it to the respective exception handlers. Behaves the same as a Goto in
2746// non-exceptional control flow.
2747// Normal-flow successor is stored at index zero, exception handlers under
2748// higher indices in no particular order.
2749class HTryBoundary FINAL : public HTemplateInstruction<0> {
2750 public:
2751  enum class BoundaryKind {
2752    kEntry,
2753    kExit,
2754    kLast = kExit
2755  };
2756
2757  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2758      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2759    SetPackedField<BoundaryKindField>(kind);
2760  }
2761
2762  bool IsControlFlow() const OVERRIDE { return true; }
2763
2764  // Returns the block's non-exceptional successor (index zero).
2765  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2766
2767  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2768    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2769  }
2770
2771  // Returns whether `handler` is among its exception handlers (non-zero index
2772  // successors).
2773  bool HasExceptionHandler(const HBasicBlock& handler) const {
2774    DCHECK(handler.IsCatchBlock());
2775    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2776  }
2777
2778  // If not present already, adds `handler` to its block's list of exception
2779  // handlers.
2780  void AddExceptionHandler(HBasicBlock* handler) {
2781    if (!HasExceptionHandler(*handler)) {
2782      GetBlock()->AddSuccessor(handler);
2783    }
2784  }
2785
2786  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2787  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2788
2789  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2790
2791  DECLARE_INSTRUCTION(TryBoundary);
2792
2793 private:
2794  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2795  static constexpr size_t kFieldBoundaryKindSize =
2796      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2797  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2798      kFieldBoundaryKind + kFieldBoundaryKindSize;
2799  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2800                "Too many packed fields.");
2801  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2802
2803  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2804};
2805
2806// Deoptimize to interpreter, upon checking a condition.
2807class HDeoptimize FINAL : public HTemplateInstruction<1> {
2808 public:
2809  // We set CanTriggerGC to prevent any intermediate address to be live
2810  // at the point of the `HDeoptimize`.
2811  HDeoptimize(HInstruction* cond, uint32_t dex_pc)
2812      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
2813    SetRawInputAt(0, cond);
2814  }
2815
2816  bool CanBeMoved() const OVERRIDE { return true; }
2817  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2818    return true;
2819  }
2820  bool NeedsEnvironment() const OVERRIDE { return true; }
2821  bool CanThrow() const OVERRIDE { return true; }
2822
2823  DECLARE_INSTRUCTION(Deoptimize);
2824
2825 private:
2826  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
2827};
2828
2829// Represents the ArtMethod that was passed as a first argument to
2830// the method. It is used by instructions that depend on it, like
2831// instructions that work with the dex cache.
2832class HCurrentMethod FINAL : public HExpression<0> {
2833 public:
2834  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2835      : HExpression(type, SideEffects::None(), dex_pc) {}
2836
2837  DECLARE_INSTRUCTION(CurrentMethod);
2838
2839 private:
2840  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
2841};
2842
2843// Fetches an ArtMethod from the virtual table or the interface method table
2844// of a class.
2845class HClassTableGet FINAL : public HExpression<1> {
2846 public:
2847  enum class TableKind {
2848    kVTable,
2849    kIMTable,
2850    kLast = kIMTable
2851  };
2852  HClassTableGet(HInstruction* cls,
2853                 Primitive::Type type,
2854                 TableKind kind,
2855                 size_t index,
2856                 uint32_t dex_pc)
2857      : HExpression(type, SideEffects::None(), dex_pc),
2858        index_(index) {
2859    SetPackedField<TableKindField>(kind);
2860    SetRawInputAt(0, cls);
2861  }
2862
2863  bool CanBeMoved() const OVERRIDE { return true; }
2864  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2865    return other->AsClassTableGet()->GetIndex() == index_ &&
2866        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
2867  }
2868
2869  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
2870  size_t GetIndex() const { return index_; }
2871
2872  DECLARE_INSTRUCTION(ClassTableGet);
2873
2874 private:
2875  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
2876  static constexpr size_t kFieldTableKindSize =
2877      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
2878  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
2879  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
2880                "Too many packed fields.");
2881  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
2882
2883  // The index of the ArtMethod in the table.
2884  const size_t index_;
2885
2886  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
2887};
2888
2889// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
2890// have one successor for each entry in the switch table, and the final successor
2891// will be the block containing the next Dex opcode.
2892class HPackedSwitch FINAL : public HTemplateInstruction<1> {
2893 public:
2894  HPackedSwitch(int32_t start_value,
2895                uint32_t num_entries,
2896                HInstruction* input,
2897                uint32_t dex_pc = kNoDexPc)
2898    : HTemplateInstruction(SideEffects::None(), dex_pc),
2899      start_value_(start_value),
2900      num_entries_(num_entries) {
2901    SetRawInputAt(0, input);
2902  }
2903
2904  bool IsControlFlow() const OVERRIDE { return true; }
2905
2906  int32_t GetStartValue() const { return start_value_; }
2907
2908  uint32_t GetNumEntries() const { return num_entries_; }
2909
2910  HBasicBlock* GetDefaultBlock() const {
2911    // Last entry is the default block.
2912    return GetBlock()->GetSuccessors()[num_entries_];
2913  }
2914  DECLARE_INSTRUCTION(PackedSwitch);
2915
2916 private:
2917  const int32_t start_value_;
2918  const uint32_t num_entries_;
2919
2920  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
2921};
2922
2923class HUnaryOperation : public HExpression<1> {
2924 public:
2925  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
2926      : HExpression(result_type, SideEffects::None(), dex_pc) {
2927    SetRawInputAt(0, input);
2928  }
2929
2930  HInstruction* GetInput() const { return InputAt(0); }
2931  Primitive::Type GetResultType() const { return GetType(); }
2932
2933  bool CanBeMoved() const OVERRIDE { return true; }
2934  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2935    return true;
2936  }
2937
2938  // Try to statically evaluate `this` and return a HConstant
2939  // containing the result of this evaluation.  If `this` cannot
2940  // be evaluated as a constant, return null.
2941  HConstant* TryStaticEvaluation() const;
2942
2943  // Apply this operation to `x`.
2944  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
2945  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
2946  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
2947  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
2948
2949  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
2950
2951 private:
2952  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
2953};
2954
2955class HBinaryOperation : public HExpression<2> {
2956 public:
2957  HBinaryOperation(Primitive::Type result_type,
2958                   HInstruction* left,
2959                   HInstruction* right,
2960                   SideEffects side_effects = SideEffects::None(),
2961                   uint32_t dex_pc = kNoDexPc)
2962      : HExpression(result_type, side_effects, dex_pc) {
2963    SetRawInputAt(0, left);
2964    SetRawInputAt(1, right);
2965  }
2966
2967  HInstruction* GetLeft() const { return InputAt(0); }
2968  HInstruction* GetRight() const { return InputAt(1); }
2969  Primitive::Type GetResultType() const { return GetType(); }
2970
2971  virtual bool IsCommutative() const { return false; }
2972
2973  // Put constant on the right.
2974  // Returns whether order is changed.
2975  bool OrderInputsWithConstantOnTheRight() {
2976    HInstruction* left = InputAt(0);
2977    HInstruction* right = InputAt(1);
2978    if (left->IsConstant() && !right->IsConstant()) {
2979      ReplaceInput(right, 0);
2980      ReplaceInput(left, 1);
2981      return true;
2982    }
2983    return false;
2984  }
2985
2986  // Order inputs by instruction id, but favor constant on the right side.
2987  // This helps GVN for commutative ops.
2988  void OrderInputs() {
2989    DCHECK(IsCommutative());
2990    HInstruction* left = InputAt(0);
2991    HInstruction* right = InputAt(1);
2992    if (left == right || (!left->IsConstant() && right->IsConstant())) {
2993      return;
2994    }
2995    if (OrderInputsWithConstantOnTheRight()) {
2996      return;
2997    }
2998    // Order according to instruction id.
2999    if (left->GetId() > right->GetId()) {
3000      ReplaceInput(right, 0);
3001      ReplaceInput(left, 1);
3002    }
3003  }
3004
3005  bool CanBeMoved() const OVERRIDE { return true; }
3006  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3007    return true;
3008  }
3009
3010  // Try to statically evaluate `this` and return a HConstant
3011  // containing the result of this evaluation.  If `this` cannot
3012  // be evaluated as a constant, return null.
3013  HConstant* TryStaticEvaluation() const;
3014
3015  // Apply this operation to `x` and `y`.
3016  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3017                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3018    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3019    UNREACHABLE();
3020  }
3021  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3022  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3023  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3024                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3025    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3026    UNREACHABLE();
3027  }
3028  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3029  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3030
3031  // Returns an input that can legally be used as the right input and is
3032  // constant, or null.
3033  HConstant* GetConstantRight() const;
3034
3035  // If `GetConstantRight()` returns one of the input, this returns the other
3036  // one. Otherwise it returns null.
3037  HInstruction* GetLeastConstantLeft() const;
3038
3039  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3040
3041 private:
3042  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3043};
3044
3045// The comparison bias applies for floating point operations and indicates how NaN
3046// comparisons are treated:
3047enum class ComparisonBias {
3048  kNoBias,  // bias is not applicable (i.e. for long operation)
3049  kGtBias,  // return 1 for NaN comparisons
3050  kLtBias,  // return -1 for NaN comparisons
3051  kLast = kLtBias
3052};
3053
3054std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3055
3056class HCondition : public HBinaryOperation {
3057 public:
3058  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3059      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3060    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3061  }
3062
3063  // For code generation purposes, returns whether this instruction is just before
3064  // `instruction`, and disregard moves in between.
3065  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3066
3067  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3068
3069  virtual IfCondition GetCondition() const = 0;
3070
3071  virtual IfCondition GetOppositeCondition() const = 0;
3072
3073  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3074  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3075
3076  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3077  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3078
3079  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3080    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3081  }
3082
3083  bool IsFPConditionTrueIfNaN() const {
3084    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3085    IfCondition if_cond = GetCondition();
3086    if (if_cond == kCondNE) {
3087      return true;
3088    } else if (if_cond == kCondEQ) {
3089      return false;
3090    }
3091    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3092  }
3093
3094  bool IsFPConditionFalseIfNaN() const {
3095    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3096    IfCondition if_cond = GetCondition();
3097    if (if_cond == kCondEQ) {
3098      return true;
3099    } else if (if_cond == kCondNE) {
3100      return false;
3101    }
3102    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3103  }
3104
3105 protected:
3106  // Needed if we merge a HCompare into a HCondition.
3107  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3108  static constexpr size_t kFieldComparisonBiasSize =
3109      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3110  static constexpr size_t kNumberOfConditionPackedBits =
3111      kFieldComparisonBias + kFieldComparisonBiasSize;
3112  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3113  using ComparisonBiasField =
3114      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3115
3116  template <typename T>
3117  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3118
3119  template <typename T>
3120  int32_t CompareFP(T x, T y) const {
3121    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3122    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3123    // Handle the bias.
3124    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3125  }
3126
3127  // Return an integer constant containing the result of a condition evaluated at compile time.
3128  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3129    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3130  }
3131
3132 private:
3133  DISALLOW_COPY_AND_ASSIGN(HCondition);
3134};
3135
3136// Instruction to check if two inputs are equal to each other.
3137class HEqual FINAL : public HCondition {
3138 public:
3139  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3140      : HCondition(first, second, dex_pc) {}
3141
3142  bool IsCommutative() const OVERRIDE { return true; }
3143
3144  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3145                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3146    return MakeConstantCondition(true, GetDexPc());
3147  }
3148  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3149    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3150  }
3151  // In the following Evaluate methods, a HCompare instruction has
3152  // been merged into this HEqual instruction; evaluate it as
3153  // `Compare(x, y) == 0`.
3154  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3155    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3156                                 GetDexPc());
3157  }
3158  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3159    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3160  }
3161  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3162    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3163  }
3164
3165  DECLARE_INSTRUCTION(Equal);
3166
3167  IfCondition GetCondition() const OVERRIDE {
3168    return kCondEQ;
3169  }
3170
3171  IfCondition GetOppositeCondition() const OVERRIDE {
3172    return kCondNE;
3173  }
3174
3175 private:
3176  template <typename T> static bool Compute(T x, T y) { return x == y; }
3177
3178  DISALLOW_COPY_AND_ASSIGN(HEqual);
3179};
3180
3181class HNotEqual FINAL : public HCondition {
3182 public:
3183  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3184      : HCondition(first, second, dex_pc) {}
3185
3186  bool IsCommutative() const OVERRIDE { return true; }
3187
3188  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3189                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3190    return MakeConstantCondition(false, GetDexPc());
3191  }
3192  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3193    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3194  }
3195  // In the following Evaluate methods, a HCompare instruction has
3196  // been merged into this HNotEqual instruction; evaluate it as
3197  // `Compare(x, y) != 0`.
3198  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3199    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3200  }
3201  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3202    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3203  }
3204  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3205    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3206  }
3207
3208  DECLARE_INSTRUCTION(NotEqual);
3209
3210  IfCondition GetCondition() const OVERRIDE {
3211    return kCondNE;
3212  }
3213
3214  IfCondition GetOppositeCondition() const OVERRIDE {
3215    return kCondEQ;
3216  }
3217
3218 private:
3219  template <typename T> static bool Compute(T x, T y) { return x != y; }
3220
3221  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3222};
3223
3224class HLessThan FINAL : public HCondition {
3225 public:
3226  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3227      : HCondition(first, second, dex_pc) {}
3228
3229  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3230    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3231  }
3232  // In the following Evaluate methods, a HCompare instruction has
3233  // been merged into this HLessThan instruction; evaluate it as
3234  // `Compare(x, y) < 0`.
3235  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3236    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3237  }
3238  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3239    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3240  }
3241  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3242    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3243  }
3244
3245  DECLARE_INSTRUCTION(LessThan);
3246
3247  IfCondition GetCondition() const OVERRIDE {
3248    return kCondLT;
3249  }
3250
3251  IfCondition GetOppositeCondition() const OVERRIDE {
3252    return kCondGE;
3253  }
3254
3255 private:
3256  template <typename T> static bool Compute(T x, T y) { return x < y; }
3257
3258  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3259};
3260
3261class HLessThanOrEqual FINAL : public HCondition {
3262 public:
3263  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3264      : HCondition(first, second, dex_pc) {}
3265
3266  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3267    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3268  }
3269  // In the following Evaluate methods, a HCompare instruction has
3270  // been merged into this HLessThanOrEqual instruction; evaluate it as
3271  // `Compare(x, y) <= 0`.
3272  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3273    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3274  }
3275  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3276    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3277  }
3278  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3279    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3280  }
3281
3282  DECLARE_INSTRUCTION(LessThanOrEqual);
3283
3284  IfCondition GetCondition() const OVERRIDE {
3285    return kCondLE;
3286  }
3287
3288  IfCondition GetOppositeCondition() const OVERRIDE {
3289    return kCondGT;
3290  }
3291
3292 private:
3293  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3294
3295  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3296};
3297
3298class HGreaterThan FINAL : public HCondition {
3299 public:
3300  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3301      : HCondition(first, second, dex_pc) {}
3302
3303  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3304    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3305  }
3306  // In the following Evaluate methods, a HCompare instruction has
3307  // been merged into this HGreaterThan instruction; evaluate it as
3308  // `Compare(x, y) > 0`.
3309  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3310    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3311  }
3312  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3313    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3314  }
3315  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3316    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3317  }
3318
3319  DECLARE_INSTRUCTION(GreaterThan);
3320
3321  IfCondition GetCondition() const OVERRIDE {
3322    return kCondGT;
3323  }
3324
3325  IfCondition GetOppositeCondition() const OVERRIDE {
3326    return kCondLE;
3327  }
3328
3329 private:
3330  template <typename T> static bool Compute(T x, T y) { return x > y; }
3331
3332  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3333};
3334
3335class HGreaterThanOrEqual FINAL : public HCondition {
3336 public:
3337  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3338      : HCondition(first, second, dex_pc) {}
3339
3340  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3341    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3342  }
3343  // In the following Evaluate methods, a HCompare instruction has
3344  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3345  // `Compare(x, y) >= 0`.
3346  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3347    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3348  }
3349  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3350    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3351  }
3352  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3353    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3354  }
3355
3356  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3357
3358  IfCondition GetCondition() const OVERRIDE {
3359    return kCondGE;
3360  }
3361
3362  IfCondition GetOppositeCondition() const OVERRIDE {
3363    return kCondLT;
3364  }
3365
3366 private:
3367  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3368
3369  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3370};
3371
3372class HBelow FINAL : public HCondition {
3373 public:
3374  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3375      : HCondition(first, second, dex_pc) {}
3376
3377  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3378    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3379  }
3380  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3381    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3382  }
3383  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3384                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3385    LOG(FATAL) << DebugName() << " is not defined for float values";
3386    UNREACHABLE();
3387  }
3388  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3389                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3390    LOG(FATAL) << DebugName() << " is not defined for double values";
3391    UNREACHABLE();
3392  }
3393
3394  DECLARE_INSTRUCTION(Below);
3395
3396  IfCondition GetCondition() const OVERRIDE {
3397    return kCondB;
3398  }
3399
3400  IfCondition GetOppositeCondition() const OVERRIDE {
3401    return kCondAE;
3402  }
3403
3404 private:
3405  template <typename T> static bool Compute(T x, T y) {
3406    return MakeUnsigned(x) < MakeUnsigned(y);
3407  }
3408
3409  DISALLOW_COPY_AND_ASSIGN(HBelow);
3410};
3411
3412class HBelowOrEqual FINAL : public HCondition {
3413 public:
3414  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3415      : HCondition(first, second, dex_pc) {}
3416
3417  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3418    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3419  }
3420  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3421    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3422  }
3423  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3424                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3425    LOG(FATAL) << DebugName() << " is not defined for float values";
3426    UNREACHABLE();
3427  }
3428  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3429                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3430    LOG(FATAL) << DebugName() << " is not defined for double values";
3431    UNREACHABLE();
3432  }
3433
3434  DECLARE_INSTRUCTION(BelowOrEqual);
3435
3436  IfCondition GetCondition() const OVERRIDE {
3437    return kCondBE;
3438  }
3439
3440  IfCondition GetOppositeCondition() const OVERRIDE {
3441    return kCondA;
3442  }
3443
3444 private:
3445  template <typename T> static bool Compute(T x, T y) {
3446    return MakeUnsigned(x) <= MakeUnsigned(y);
3447  }
3448
3449  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3450};
3451
3452class HAbove FINAL : public HCondition {
3453 public:
3454  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3455      : HCondition(first, second, dex_pc) {}
3456
3457  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3458    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3459  }
3460  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3461    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3462  }
3463  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3464                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3465    LOG(FATAL) << DebugName() << " is not defined for float values";
3466    UNREACHABLE();
3467  }
3468  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3469                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3470    LOG(FATAL) << DebugName() << " is not defined for double values";
3471    UNREACHABLE();
3472  }
3473
3474  DECLARE_INSTRUCTION(Above);
3475
3476  IfCondition GetCondition() const OVERRIDE {
3477    return kCondA;
3478  }
3479
3480  IfCondition GetOppositeCondition() const OVERRIDE {
3481    return kCondBE;
3482  }
3483
3484 private:
3485  template <typename T> static bool Compute(T x, T y) {
3486    return MakeUnsigned(x) > MakeUnsigned(y);
3487  }
3488
3489  DISALLOW_COPY_AND_ASSIGN(HAbove);
3490};
3491
3492class HAboveOrEqual FINAL : public HCondition {
3493 public:
3494  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3495      : HCondition(first, second, dex_pc) {}
3496
3497  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3498    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3499  }
3500  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3501    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3502  }
3503  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3504                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3505    LOG(FATAL) << DebugName() << " is not defined for float values";
3506    UNREACHABLE();
3507  }
3508  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3509                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3510    LOG(FATAL) << DebugName() << " is not defined for double values";
3511    UNREACHABLE();
3512  }
3513
3514  DECLARE_INSTRUCTION(AboveOrEqual);
3515
3516  IfCondition GetCondition() const OVERRIDE {
3517    return kCondAE;
3518  }
3519
3520  IfCondition GetOppositeCondition() const OVERRIDE {
3521    return kCondB;
3522  }
3523
3524 private:
3525  template <typename T> static bool Compute(T x, T y) {
3526    return MakeUnsigned(x) >= MakeUnsigned(y);
3527  }
3528
3529  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3530};
3531
3532// Instruction to check how two inputs compare to each other.
3533// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3534class HCompare FINAL : public HBinaryOperation {
3535 public:
3536  // Note that `comparison_type` is the type of comparison performed
3537  // between the comparison's inputs, not the type of the instantiated
3538  // HCompare instruction (which is always Primitive::kPrimInt).
3539  HCompare(Primitive::Type comparison_type,
3540           HInstruction* first,
3541           HInstruction* second,
3542           ComparisonBias bias,
3543           uint32_t dex_pc)
3544      : HBinaryOperation(Primitive::kPrimInt,
3545                         first,
3546                         second,
3547                         SideEffectsForArchRuntimeCalls(comparison_type),
3548                         dex_pc) {
3549    SetPackedField<ComparisonBiasField>(bias);
3550    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3551    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3552  }
3553
3554  template <typename T>
3555  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3556
3557  template <typename T>
3558  int32_t ComputeFP(T x, T y) const {
3559    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3560    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3561    // Handle the bias.
3562    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3563  }
3564
3565  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3566    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3567    // reach this code path when processing a freshly built HIR
3568    // graph. However HCompare integer instructions can be synthesized
3569    // by the instruction simplifier to implement IntegerCompare and
3570    // IntegerSignum intrinsics, so we have to handle this case.
3571    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3572  }
3573  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3574    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3575  }
3576  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3577    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3578  }
3579  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3580    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3581  }
3582
3583  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3584    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3585  }
3586
3587  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3588
3589  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3590  // Only meaningful for floating-point comparisons.
3591  bool IsGtBias() const {
3592    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3593    return GetBias() == ComparisonBias::kGtBias;
3594  }
3595
3596  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3597    // Comparisons do not require a runtime call in any back end.
3598    return SideEffects::None();
3599  }
3600
3601  DECLARE_INSTRUCTION(Compare);
3602
3603 protected:
3604  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3605  static constexpr size_t kFieldComparisonBiasSize =
3606      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3607  static constexpr size_t kNumberOfComparePackedBits =
3608      kFieldComparisonBias + kFieldComparisonBiasSize;
3609  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3610  using ComparisonBiasField =
3611      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3612
3613  // Return an integer constant containing the result of a comparison evaluated at compile time.
3614  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3615    DCHECK(value == -1 || value == 0 || value == 1) << value;
3616    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3617  }
3618
3619 private:
3620  DISALLOW_COPY_AND_ASSIGN(HCompare);
3621};
3622
3623class HNewInstance FINAL : public HExpression<2> {
3624 public:
3625  HNewInstance(HInstruction* cls,
3626               HCurrentMethod* current_method,
3627               uint32_t dex_pc,
3628               uint16_t type_index,
3629               const DexFile& dex_file,
3630               bool needs_access_check,
3631               bool finalizable,
3632               QuickEntrypointEnum entrypoint)
3633      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3634        type_index_(type_index),
3635        dex_file_(dex_file),
3636        entrypoint_(entrypoint) {
3637    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
3638    SetPackedFlag<kFlagFinalizable>(finalizable);
3639    SetRawInputAt(0, cls);
3640    SetRawInputAt(1, current_method);
3641  }
3642
3643  uint16_t GetTypeIndex() const { return type_index_; }
3644  const DexFile& GetDexFile() const { return dex_file_; }
3645
3646  // Calls runtime so needs an environment.
3647  bool NeedsEnvironment() const OVERRIDE { return true; }
3648
3649  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3650  bool CanThrow() const OVERRIDE { return true; }
3651
3652  // Needs to call into runtime to make sure it's instantiable/accessible.
3653  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
3654
3655  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3656
3657  bool CanBeNull() const OVERRIDE { return false; }
3658
3659  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3660
3661  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3662    entrypoint_ = entrypoint;
3663  }
3664
3665  bool IsStringAlloc() const;
3666
3667  DECLARE_INSTRUCTION(NewInstance);
3668
3669 private:
3670  static constexpr size_t kFlagNeedsAccessCheck = kNumberOfExpressionPackedBits;
3671  static constexpr size_t kFlagFinalizable = kFlagNeedsAccessCheck + 1;
3672  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3673  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3674                "Too many packed fields.");
3675
3676  const uint16_t type_index_;
3677  const DexFile& dex_file_;
3678  QuickEntrypointEnum entrypoint_;
3679
3680  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3681};
3682
3683enum class Intrinsics {
3684#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \
3685  k ## Name,
3686#include "intrinsics_list.h"
3687  kNone,
3688  INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3689#undef INTRINSICS_LIST
3690#undef OPTIMIZING_INTRINSICS
3691};
3692std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic);
3693
3694enum IntrinsicNeedsEnvironmentOrCache {
3695  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3696  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3697};
3698
3699enum IntrinsicSideEffects {
3700  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3701  kReadSideEffects,   // Intrinsic may read heap memory.
3702  kWriteSideEffects,  // Intrinsic may write heap memory.
3703  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3704};
3705
3706enum IntrinsicExceptions {
3707  kNoThrow,  // Intrinsic does not throw any exceptions.
3708  kCanThrow  // Intrinsic may throw exceptions.
3709};
3710
3711class HInvoke : public HInstruction {
3712 public:
3713  bool NeedsEnvironment() const OVERRIDE;
3714
3715  using HInstruction::GetInputRecords;  // Keep the const version visible.
3716  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
3717    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
3718  }
3719
3720  void SetArgumentAt(size_t index, HInstruction* argument) {
3721    SetRawInputAt(index, argument);
3722  }
3723
3724  // Return the number of arguments.  This number can be lower than
3725  // the number of inputs returned by InputCount(), as some invoke
3726  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3727  // inputs at the end of their list of inputs.
3728  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3729
3730  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3731
3732  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3733  const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
3734
3735  InvokeType GetOriginalInvokeType() const {
3736    return GetPackedField<OriginalInvokeTypeField>();
3737  }
3738
3739  Intrinsics GetIntrinsic() const {
3740    return intrinsic_;
3741  }
3742
3743  void SetIntrinsic(Intrinsics intrinsic,
3744                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3745                    IntrinsicSideEffects side_effects,
3746                    IntrinsicExceptions exceptions);
3747
3748  bool IsFromInlinedInvoke() const {
3749    return GetEnvironment()->IsFromInlinedInvoke();
3750  }
3751
3752  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
3753
3754  bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); }
3755
3756  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3757    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
3758  }
3759
3760  uint32_t* GetIntrinsicOptimizations() {
3761    return &intrinsic_optimizations_;
3762  }
3763
3764  const uint32_t* GetIntrinsicOptimizations() const {
3765    return &intrinsic_optimizations_;
3766  }
3767
3768  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
3769
3770  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
3771
3772 protected:
3773  static constexpr size_t kFieldOriginalInvokeType = kNumberOfGenericPackedBits;
3774  static constexpr size_t kFieldOriginalInvokeTypeSize =
3775      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
3776  static constexpr size_t kFieldReturnType =
3777      kFieldOriginalInvokeType + kFieldOriginalInvokeTypeSize;
3778  static constexpr size_t kFieldReturnTypeSize =
3779      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
3780  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
3781  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
3782  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3783  using OriginalInvokeTypeField =
3784      BitField<InvokeType, kFieldOriginalInvokeType, kFieldOriginalInvokeTypeSize>;
3785  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
3786
3787  HInvoke(ArenaAllocator* arena,
3788          uint32_t number_of_arguments,
3789          uint32_t number_of_other_inputs,
3790          Primitive::Type return_type,
3791          uint32_t dex_pc,
3792          uint32_t dex_method_index,
3793          InvokeType original_invoke_type)
3794    : HInstruction(
3795          SideEffects::AllExceptGCDependency(), dex_pc),  // Assume write/read on all fields/arrays.
3796      number_of_arguments_(number_of_arguments),
3797      inputs_(number_of_arguments + number_of_other_inputs,
3798              arena->Adapter(kArenaAllocInvokeInputs)),
3799      dex_method_index_(dex_method_index),
3800      intrinsic_(Intrinsics::kNone),
3801      intrinsic_optimizations_(0) {
3802    SetPackedField<ReturnTypeField>(return_type);
3803    SetPackedField<OriginalInvokeTypeField>(original_invoke_type);
3804    SetPackedFlag<kFlagCanThrow>(true);
3805  }
3806
3807  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3808
3809  uint32_t number_of_arguments_;
3810  ArenaVector<HUserRecord<HInstruction*>> inputs_;
3811  const uint32_t dex_method_index_;
3812  Intrinsics intrinsic_;
3813
3814  // A magic word holding optimizations for intrinsics. See intrinsics.h.
3815  uint32_t intrinsic_optimizations_;
3816
3817 private:
3818  DISALLOW_COPY_AND_ASSIGN(HInvoke);
3819};
3820
3821class HInvokeUnresolved FINAL : public HInvoke {
3822 public:
3823  HInvokeUnresolved(ArenaAllocator* arena,
3824                    uint32_t number_of_arguments,
3825                    Primitive::Type return_type,
3826                    uint32_t dex_pc,
3827                    uint32_t dex_method_index,
3828                    InvokeType invoke_type)
3829      : HInvoke(arena,
3830                number_of_arguments,
3831                0u /* number_of_other_inputs */,
3832                return_type,
3833                dex_pc,
3834                dex_method_index,
3835                invoke_type) {
3836  }
3837
3838  DECLARE_INSTRUCTION(InvokeUnresolved);
3839
3840 private:
3841  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
3842};
3843
3844class HInvokeStaticOrDirect FINAL : public HInvoke {
3845 public:
3846  // Requirements of this method call regarding the class
3847  // initialization (clinit) check of its declaring class.
3848  enum class ClinitCheckRequirement {
3849    kNone,      // Class already initialized.
3850    kExplicit,  // Static call having explicit clinit check as last input.
3851    kImplicit,  // Static call implicitly requiring a clinit check.
3852    kLast = kImplicit
3853  };
3854
3855  // Determines how to load the target ArtMethod*.
3856  enum class MethodLoadKind {
3857    // Use a String init ArtMethod* loaded from Thread entrypoints.
3858    kStringInit,
3859
3860    // Use the method's own ArtMethod* loaded by the register allocator.
3861    kRecursive,
3862
3863    // Use ArtMethod* at a known address, embed the direct address in the code.
3864    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
3865    kDirectAddress,
3866
3867    // Use ArtMethod* at an address that will be known at link time, embed the direct
3868    // address in the code. If the image is relocatable, emit .patch_oat entry.
3869    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3870    // the image relocatable or not.
3871    kDirectAddressWithFixup,
3872
3873    // Load from resolved methods array in the dex cache using a PC-relative load.
3874    // Used when we need to use the dex cache, for example for invoke-static that
3875    // may cause class initialization (the entry may point to a resolution method),
3876    // and we know that we can access the dex cache arrays using a PC-relative load.
3877    kDexCachePcRelative,
3878
3879    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
3880    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
3881    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
3882    // or unimplemented or impractical (i.e. slow) on a particular architecture.
3883    kDexCacheViaMethod,
3884  };
3885
3886  // Determines the location of the code pointer.
3887  enum class CodePtrLocation {
3888    // Recursive call, use local PC-relative call instruction.
3889    kCallSelf,
3890
3891    // Use PC-relative call instruction patched at link time.
3892    // Used for calls within an oat file, boot->boot or app->app.
3893    kCallPCRelative,
3894
3895    // Call to a known target address, embed the direct address in code.
3896    // Used for app->boot call with non-relocatable image and for JIT-compiled calls.
3897    kCallDirect,
3898
3899    // Call to a target address that will be known at link time, embed the direct
3900    // address in code. If the image is relocatable, emit .patch_oat entry.
3901    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3902    // the image relocatable or not.
3903    kCallDirectWithFixup,
3904
3905    // Use code pointer from the ArtMethod*.
3906    // Used when we don't know the target code. This is also the last-resort-kind used when
3907    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
3908    kCallArtMethod,
3909  };
3910
3911  struct DispatchInfo {
3912    MethodLoadKind method_load_kind;
3913    CodePtrLocation code_ptr_location;
3914    // The method load data holds
3915    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
3916    //     Note that there are multiple string init methods, each having its own offset.
3917    //   - the method address for kDirectAddress
3918    //   - the dex cache arrays offset for kDexCachePcRel.
3919    uint64_t method_load_data;
3920    uint64_t direct_code_ptr;
3921  };
3922
3923  HInvokeStaticOrDirect(ArenaAllocator* arena,
3924                        uint32_t number_of_arguments,
3925                        Primitive::Type return_type,
3926                        uint32_t dex_pc,
3927                        uint32_t method_index,
3928                        MethodReference target_method,
3929                        DispatchInfo dispatch_info,
3930                        InvokeType original_invoke_type,
3931                        InvokeType optimized_invoke_type,
3932                        ClinitCheckRequirement clinit_check_requirement)
3933      : HInvoke(arena,
3934                number_of_arguments,
3935                // There is potentially one extra argument for the HCurrentMethod node, and
3936                // potentially one other if the clinit check is explicit, and potentially
3937                // one other if the method is a string factory.
3938                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
3939                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
3940                return_type,
3941                dex_pc,
3942                method_index,
3943                original_invoke_type),
3944        target_method_(target_method),
3945        dispatch_info_(dispatch_info) {
3946    SetPackedField<OptimizedInvokeTypeField>(optimized_invoke_type);
3947    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
3948  }
3949
3950  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
3951    bool had_current_method_input = HasCurrentMethodInput();
3952    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
3953
3954    // Using the current method is the default and once we find a better
3955    // method load kind, we should not go back to using the current method.
3956    DCHECK(had_current_method_input || !needs_current_method_input);
3957
3958    if (had_current_method_input && !needs_current_method_input) {
3959      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
3960      RemoveInputAt(GetSpecialInputIndex());
3961    }
3962    dispatch_info_ = dispatch_info;
3963  }
3964
3965  void AddSpecialInput(HInstruction* input) {
3966    // We allow only one special input.
3967    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
3968    DCHECK(InputCount() == GetSpecialInputIndex() ||
3969           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
3970    InsertInputAt(GetSpecialInputIndex(), input);
3971  }
3972
3973  using HInstruction::GetInputRecords;  // Keep the const version visible.
3974  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
3975    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
3976    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
3977      DCHECK(!input_records.empty());
3978      DCHECK_GT(input_records.size(), GetNumberOfArguments());
3979      HInstruction* last_input = input_records.back().GetInstruction();
3980      // Note: `last_input` may be null during arguments setup.
3981      if (last_input != nullptr) {
3982        // `last_input` is the last input of a static invoke marked as having
3983        // an explicit clinit check. It must either be:
3984        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
3985        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
3986        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
3987      }
3988    }
3989    return input_records;
3990  }
3991
3992  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
3993    // We access the method via the dex cache so we can't do an implicit null check.
3994    // TODO: for intrinsics we can generate implicit null checks.
3995    return false;
3996  }
3997
3998  bool CanBeNull() const OVERRIDE {
3999    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
4000  }
4001
4002  // Get the index of the special input, if any.
4003  //
4004  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4005  // method pointer; otherwise there may be one platform-specific special input,
4006  // such as PC-relative addressing base.
4007  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4008  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4009
4010  InvokeType GetOptimizedInvokeType() const {
4011    return GetPackedField<OptimizedInvokeTypeField>();
4012  }
4013
4014  void SetOptimizedInvokeType(InvokeType invoke_type) {
4015    SetPackedField<OptimizedInvokeTypeField>(invoke_type);
4016  }
4017
4018  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4019  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4020  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4021  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4022  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4023  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4024  bool HasPcRelativeDexCache() const {
4025    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4026  }
4027  bool HasCurrentMethodInput() const {
4028    // This function can be called only after the invoke has been fully initialized by the builder.
4029    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4030      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4031      return true;
4032    } else {
4033      DCHECK(InputCount() == GetSpecialInputIndex() ||
4034             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4035      return false;
4036    }
4037  }
4038  bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
4039  MethodReference GetTargetMethod() const { return target_method_; }
4040  void SetTargetMethod(MethodReference method) { target_method_ = method; }
4041
4042  int32_t GetStringInitOffset() const {
4043    DCHECK(IsStringInit());
4044    return dispatch_info_.method_load_data;
4045  }
4046
4047  uint64_t GetMethodAddress() const {
4048    DCHECK(HasMethodAddress());
4049    return dispatch_info_.method_load_data;
4050  }
4051
4052  uint32_t GetDexCacheArrayOffset() const {
4053    DCHECK(HasPcRelativeDexCache());
4054    return dispatch_info_.method_load_data;
4055  }
4056
4057  uint64_t GetDirectCodePtr() const {
4058    DCHECK(HasDirectCodePtr());
4059    return dispatch_info_.direct_code_ptr;
4060  }
4061
4062  ClinitCheckRequirement GetClinitCheckRequirement() const {
4063    return GetPackedField<ClinitCheckRequirementField>();
4064  }
4065
4066  // Is this instruction a call to a static method?
4067  bool IsStatic() const {
4068    return GetOriginalInvokeType() == kStatic;
4069  }
4070
4071  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4072  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4073  // instruction; only relevant for static calls with explicit clinit check.
4074  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4075    DCHECK(IsStaticWithExplicitClinitCheck());
4076    size_t last_input_index = inputs_.size() - 1u;
4077    HInstruction* last_input = inputs_.back().GetInstruction();
4078    DCHECK(last_input != nullptr);
4079    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4080    RemoveAsUserOfInput(last_input_index);
4081    inputs_.pop_back();
4082    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4083    DCHECK(!IsStaticWithExplicitClinitCheck());
4084  }
4085
4086  // Is this a call to a static method whose declaring class has an
4087  // explicit initialization check in the graph?
4088  bool IsStaticWithExplicitClinitCheck() const {
4089    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4090  }
4091
4092  // Is this a call to a static method whose declaring class has an
4093  // implicit intialization check requirement?
4094  bool IsStaticWithImplicitClinitCheck() const {
4095    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4096  }
4097
4098  // Does this method load kind need the current method as an input?
4099  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4100    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4101  }
4102
4103  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4104
4105 protected:
4106  void InsertInputAt(size_t index, HInstruction* input);
4107  void RemoveInputAt(size_t index);
4108
4109 private:
4110  static constexpr size_t kFieldOptimizedInvokeType = kNumberOfInvokePackedBits;
4111  static constexpr size_t kFieldOptimizedInvokeTypeSize =
4112      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4113  static constexpr size_t kFieldClinitCheckRequirement =
4114      kFieldOptimizedInvokeType + kFieldOptimizedInvokeTypeSize;
4115  static constexpr size_t kFieldClinitCheckRequirementSize =
4116      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4117  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4118      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4119  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4120                "Too many packed fields.");
4121  using OptimizedInvokeTypeField =
4122      BitField<InvokeType, kFieldOptimizedInvokeType, kFieldOptimizedInvokeTypeSize>;
4123  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4124                                               kFieldClinitCheckRequirement,
4125                                               kFieldClinitCheckRequirementSize>;
4126
4127  // The target method may refer to different dex file or method index than the original
4128  // invoke. This happens for sharpened calls and for calls where a method was redeclared
4129  // in derived class to increase visibility.
4130  MethodReference target_method_;
4131  DispatchInfo dispatch_info_;
4132
4133  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4134};
4135std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4136std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4137
4138class HInvokeVirtual FINAL : public HInvoke {
4139 public:
4140  HInvokeVirtual(ArenaAllocator* arena,
4141                 uint32_t number_of_arguments,
4142                 Primitive::Type return_type,
4143                 uint32_t dex_pc,
4144                 uint32_t dex_method_index,
4145                 uint32_t vtable_index)
4146      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual),
4147        vtable_index_(vtable_index) {}
4148
4149  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4150    // TODO: Add implicit null checks in intrinsics.
4151    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4152  }
4153
4154  uint32_t GetVTableIndex() const { return vtable_index_; }
4155
4156  DECLARE_INSTRUCTION(InvokeVirtual);
4157
4158 private:
4159  const uint32_t vtable_index_;
4160
4161  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4162};
4163
4164class HInvokeInterface FINAL : public HInvoke {
4165 public:
4166  HInvokeInterface(ArenaAllocator* arena,
4167                   uint32_t number_of_arguments,
4168                   Primitive::Type return_type,
4169                   uint32_t dex_pc,
4170                   uint32_t dex_method_index,
4171                   uint32_t imt_index)
4172      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface),
4173        imt_index_(imt_index) {}
4174
4175  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4176    // TODO: Add implicit null checks in intrinsics.
4177    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4178  }
4179
4180  uint32_t GetImtIndex() const { return imt_index_; }
4181  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4182
4183  DECLARE_INSTRUCTION(InvokeInterface);
4184
4185 private:
4186  const uint32_t imt_index_;
4187
4188  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4189};
4190
4191class HNeg FINAL : public HUnaryOperation {
4192 public:
4193  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4194      : HUnaryOperation(result_type, input, dex_pc) {
4195    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4196  }
4197
4198  template <typename T> static T Compute(T x) { return -x; }
4199
4200  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4201    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4202  }
4203  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4204    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4205  }
4206  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4207    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4208  }
4209  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4210    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4211  }
4212
4213  DECLARE_INSTRUCTION(Neg);
4214
4215 private:
4216  DISALLOW_COPY_AND_ASSIGN(HNeg);
4217};
4218
4219class HNewArray FINAL : public HExpression<2> {
4220 public:
4221  HNewArray(HInstruction* length,
4222            HCurrentMethod* current_method,
4223            uint32_t dex_pc,
4224            uint16_t type_index,
4225            const DexFile& dex_file,
4226            QuickEntrypointEnum entrypoint)
4227      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
4228        type_index_(type_index),
4229        dex_file_(dex_file),
4230        entrypoint_(entrypoint) {
4231    SetRawInputAt(0, length);
4232    SetRawInputAt(1, current_method);
4233  }
4234
4235  uint16_t GetTypeIndex() const { return type_index_; }
4236  const DexFile& GetDexFile() const { return dex_file_; }
4237
4238  // Calls runtime so needs an environment.
4239  bool NeedsEnvironment() const OVERRIDE { return true; }
4240
4241  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4242  bool CanThrow() const OVERRIDE { return true; }
4243
4244  bool CanBeNull() const OVERRIDE { return false; }
4245
4246  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4247
4248  DECLARE_INSTRUCTION(NewArray);
4249
4250 private:
4251  const uint16_t type_index_;
4252  const DexFile& dex_file_;
4253  const QuickEntrypointEnum entrypoint_;
4254
4255  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4256};
4257
4258class HAdd FINAL : public HBinaryOperation {
4259 public:
4260  HAdd(Primitive::Type result_type,
4261       HInstruction* left,
4262       HInstruction* right,
4263       uint32_t dex_pc = kNoDexPc)
4264      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4265
4266  bool IsCommutative() const OVERRIDE { return true; }
4267
4268  template <typename T> static T Compute(T x, T y) { return x + y; }
4269
4270  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4271    return GetBlock()->GetGraph()->GetIntConstant(
4272        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4273  }
4274  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4275    return GetBlock()->GetGraph()->GetLongConstant(
4276        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4277  }
4278  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4279    return GetBlock()->GetGraph()->GetFloatConstant(
4280        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4281  }
4282  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4283    return GetBlock()->GetGraph()->GetDoubleConstant(
4284        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4285  }
4286
4287  DECLARE_INSTRUCTION(Add);
4288
4289 private:
4290  DISALLOW_COPY_AND_ASSIGN(HAdd);
4291};
4292
4293class HSub FINAL : public HBinaryOperation {
4294 public:
4295  HSub(Primitive::Type result_type,
4296       HInstruction* left,
4297       HInstruction* right,
4298       uint32_t dex_pc = kNoDexPc)
4299      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4300
4301  template <typename T> static T Compute(T x, T y) { return x - y; }
4302
4303  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4304    return GetBlock()->GetGraph()->GetIntConstant(
4305        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4306  }
4307  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4308    return GetBlock()->GetGraph()->GetLongConstant(
4309        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4310  }
4311  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4312    return GetBlock()->GetGraph()->GetFloatConstant(
4313        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4314  }
4315  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4316    return GetBlock()->GetGraph()->GetDoubleConstant(
4317        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4318  }
4319
4320  DECLARE_INSTRUCTION(Sub);
4321
4322 private:
4323  DISALLOW_COPY_AND_ASSIGN(HSub);
4324};
4325
4326class HMul FINAL : public HBinaryOperation {
4327 public:
4328  HMul(Primitive::Type result_type,
4329       HInstruction* left,
4330       HInstruction* right,
4331       uint32_t dex_pc = kNoDexPc)
4332      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4333
4334  bool IsCommutative() const OVERRIDE { return true; }
4335
4336  template <typename T> static T Compute(T x, T y) { return x * y; }
4337
4338  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4339    return GetBlock()->GetGraph()->GetIntConstant(
4340        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4341  }
4342  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4343    return GetBlock()->GetGraph()->GetLongConstant(
4344        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4345  }
4346  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4347    return GetBlock()->GetGraph()->GetFloatConstant(
4348        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4349  }
4350  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4351    return GetBlock()->GetGraph()->GetDoubleConstant(
4352        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4353  }
4354
4355  DECLARE_INSTRUCTION(Mul);
4356
4357 private:
4358  DISALLOW_COPY_AND_ASSIGN(HMul);
4359};
4360
4361class HDiv FINAL : public HBinaryOperation {
4362 public:
4363  HDiv(Primitive::Type result_type,
4364       HInstruction* left,
4365       HInstruction* right,
4366       uint32_t dex_pc)
4367      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4368
4369  template <typename T>
4370  T ComputeIntegral(T x, T y) const {
4371    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4372    // Our graph structure ensures we never have 0 for `y` during
4373    // constant folding.
4374    DCHECK_NE(y, 0);
4375    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4376    return (y == -1) ? -x : x / y;
4377  }
4378
4379  template <typename T>
4380  T ComputeFP(T x, T y) const {
4381    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4382    return x / y;
4383  }
4384
4385  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4386    return GetBlock()->GetGraph()->GetIntConstant(
4387        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4388  }
4389  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4390    return GetBlock()->GetGraph()->GetLongConstant(
4391        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4392  }
4393  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4394    return GetBlock()->GetGraph()->GetFloatConstant(
4395        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4396  }
4397  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4398    return GetBlock()->GetGraph()->GetDoubleConstant(
4399        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4400  }
4401
4402  static SideEffects SideEffectsForArchRuntimeCalls() {
4403    // The generated code can use a runtime call.
4404    return SideEffects::CanTriggerGC();
4405  }
4406
4407  DECLARE_INSTRUCTION(Div);
4408
4409 private:
4410  DISALLOW_COPY_AND_ASSIGN(HDiv);
4411};
4412
4413class HRem FINAL : public HBinaryOperation {
4414 public:
4415  HRem(Primitive::Type result_type,
4416       HInstruction* left,
4417       HInstruction* right,
4418       uint32_t dex_pc)
4419      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4420
4421  template <typename T>
4422  T ComputeIntegral(T x, T y) const {
4423    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4424    // Our graph structure ensures we never have 0 for `y` during
4425    // constant folding.
4426    DCHECK_NE(y, 0);
4427    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4428    return (y == -1) ? 0 : x % y;
4429  }
4430
4431  template <typename T>
4432  T ComputeFP(T x, T y) const {
4433    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4434    return std::fmod(x, y);
4435  }
4436
4437  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4438    return GetBlock()->GetGraph()->GetIntConstant(
4439        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4440  }
4441  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4442    return GetBlock()->GetGraph()->GetLongConstant(
4443        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4444  }
4445  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4446    return GetBlock()->GetGraph()->GetFloatConstant(
4447        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4448  }
4449  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4450    return GetBlock()->GetGraph()->GetDoubleConstant(
4451        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4452  }
4453
4454  static SideEffects SideEffectsForArchRuntimeCalls() {
4455    return SideEffects::CanTriggerGC();
4456  }
4457
4458  DECLARE_INSTRUCTION(Rem);
4459
4460 private:
4461  DISALLOW_COPY_AND_ASSIGN(HRem);
4462};
4463
4464class HDivZeroCheck FINAL : public HExpression<1> {
4465 public:
4466  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4467  // constructor.
4468  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4469      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4470    SetRawInputAt(0, value);
4471  }
4472
4473  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4474
4475  bool CanBeMoved() const OVERRIDE { return true; }
4476
4477  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4478    return true;
4479  }
4480
4481  bool NeedsEnvironment() const OVERRIDE { return true; }
4482  bool CanThrow() const OVERRIDE { return true; }
4483
4484  DECLARE_INSTRUCTION(DivZeroCheck);
4485
4486 private:
4487  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4488};
4489
4490class HShl FINAL : public HBinaryOperation {
4491 public:
4492  HShl(Primitive::Type result_type,
4493       HInstruction* value,
4494       HInstruction* distance,
4495       uint32_t dex_pc = kNoDexPc)
4496      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4497    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4498    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4499  }
4500
4501  template <typename T>
4502  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4503    return value << (distance & max_shift_distance);
4504  }
4505
4506  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4507    return GetBlock()->GetGraph()->GetIntConstant(
4508        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4509  }
4510  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4511    return GetBlock()->GetGraph()->GetLongConstant(
4512        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4513  }
4514  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4515                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4516    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4517    UNREACHABLE();
4518  }
4519  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4520                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4521    LOG(FATAL) << DebugName() << " is not defined for float values";
4522    UNREACHABLE();
4523  }
4524  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4525                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4526    LOG(FATAL) << DebugName() << " is not defined for double values";
4527    UNREACHABLE();
4528  }
4529
4530  DECLARE_INSTRUCTION(Shl);
4531
4532 private:
4533  DISALLOW_COPY_AND_ASSIGN(HShl);
4534};
4535
4536class HShr FINAL : public HBinaryOperation {
4537 public:
4538  HShr(Primitive::Type result_type,
4539       HInstruction* value,
4540       HInstruction* distance,
4541       uint32_t dex_pc = kNoDexPc)
4542      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4543    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4544    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4545  }
4546
4547  template <typename T>
4548  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4549    return value >> (distance & max_shift_distance);
4550  }
4551
4552  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4553    return GetBlock()->GetGraph()->GetIntConstant(
4554        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4555  }
4556  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4557    return GetBlock()->GetGraph()->GetLongConstant(
4558        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4559  }
4560  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4561                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4562    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4563    UNREACHABLE();
4564  }
4565  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4566                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4567    LOG(FATAL) << DebugName() << " is not defined for float values";
4568    UNREACHABLE();
4569  }
4570  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4571                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4572    LOG(FATAL) << DebugName() << " is not defined for double values";
4573    UNREACHABLE();
4574  }
4575
4576  DECLARE_INSTRUCTION(Shr);
4577
4578 private:
4579  DISALLOW_COPY_AND_ASSIGN(HShr);
4580};
4581
4582class HUShr FINAL : public HBinaryOperation {
4583 public:
4584  HUShr(Primitive::Type result_type,
4585        HInstruction* value,
4586        HInstruction* distance,
4587        uint32_t dex_pc = kNoDexPc)
4588      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4589    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4590    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4591  }
4592
4593  template <typename T>
4594  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4595    typedef typename std::make_unsigned<T>::type V;
4596    V ux = static_cast<V>(value);
4597    return static_cast<T>(ux >> (distance & max_shift_distance));
4598  }
4599
4600  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4601    return GetBlock()->GetGraph()->GetIntConstant(
4602        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4603  }
4604  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4605    return GetBlock()->GetGraph()->GetLongConstant(
4606        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4607  }
4608  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4609                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4610    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4611    UNREACHABLE();
4612  }
4613  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4614                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4615    LOG(FATAL) << DebugName() << " is not defined for float values";
4616    UNREACHABLE();
4617  }
4618  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4619                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4620    LOG(FATAL) << DebugName() << " is not defined for double values";
4621    UNREACHABLE();
4622  }
4623
4624  DECLARE_INSTRUCTION(UShr);
4625
4626 private:
4627  DISALLOW_COPY_AND_ASSIGN(HUShr);
4628};
4629
4630class HAnd FINAL : public HBinaryOperation {
4631 public:
4632  HAnd(Primitive::Type result_type,
4633       HInstruction* left,
4634       HInstruction* right,
4635       uint32_t dex_pc = kNoDexPc)
4636      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4637
4638  bool IsCommutative() const OVERRIDE { return true; }
4639
4640  template <typename T> static T Compute(T x, T y) { return x & y; }
4641
4642  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4643    return GetBlock()->GetGraph()->GetIntConstant(
4644        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4645  }
4646  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4647    return GetBlock()->GetGraph()->GetLongConstant(
4648        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4649  }
4650  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4651                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4652    LOG(FATAL) << DebugName() << " is not defined for float values";
4653    UNREACHABLE();
4654  }
4655  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4656                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4657    LOG(FATAL) << DebugName() << " is not defined for double values";
4658    UNREACHABLE();
4659  }
4660
4661  DECLARE_INSTRUCTION(And);
4662
4663 private:
4664  DISALLOW_COPY_AND_ASSIGN(HAnd);
4665};
4666
4667class HOr FINAL : public HBinaryOperation {
4668 public:
4669  HOr(Primitive::Type result_type,
4670      HInstruction* left,
4671      HInstruction* right,
4672      uint32_t dex_pc = kNoDexPc)
4673      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4674
4675  bool IsCommutative() const OVERRIDE { return true; }
4676
4677  template <typename T> static T Compute(T x, T y) { return x | y; }
4678
4679  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4680    return GetBlock()->GetGraph()->GetIntConstant(
4681        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4682  }
4683  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4684    return GetBlock()->GetGraph()->GetLongConstant(
4685        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4686  }
4687  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4688                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4689    LOG(FATAL) << DebugName() << " is not defined for float values";
4690    UNREACHABLE();
4691  }
4692  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4693                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4694    LOG(FATAL) << DebugName() << " is not defined for double values";
4695    UNREACHABLE();
4696  }
4697
4698  DECLARE_INSTRUCTION(Or);
4699
4700 private:
4701  DISALLOW_COPY_AND_ASSIGN(HOr);
4702};
4703
4704class HXor FINAL : public HBinaryOperation {
4705 public:
4706  HXor(Primitive::Type result_type,
4707       HInstruction* left,
4708       HInstruction* right,
4709       uint32_t dex_pc = kNoDexPc)
4710      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4711
4712  bool IsCommutative() const OVERRIDE { return true; }
4713
4714  template <typename T> static T Compute(T x, T y) { return x ^ y; }
4715
4716  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4717    return GetBlock()->GetGraph()->GetIntConstant(
4718        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4719  }
4720  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4721    return GetBlock()->GetGraph()->GetLongConstant(
4722        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4723  }
4724  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4725                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4726    LOG(FATAL) << DebugName() << " is not defined for float values";
4727    UNREACHABLE();
4728  }
4729  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4730                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4731    LOG(FATAL) << DebugName() << " is not defined for double values";
4732    UNREACHABLE();
4733  }
4734
4735  DECLARE_INSTRUCTION(Xor);
4736
4737 private:
4738  DISALLOW_COPY_AND_ASSIGN(HXor);
4739};
4740
4741class HRor FINAL : public HBinaryOperation {
4742 public:
4743  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4744    : HBinaryOperation(result_type, value, distance) {
4745    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4746    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4747  }
4748
4749  template <typename T>
4750  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
4751    typedef typename std::make_unsigned<T>::type V;
4752    V ux = static_cast<V>(value);
4753    if ((distance & max_shift_value) == 0) {
4754      return static_cast<T>(ux);
4755    } else {
4756      const V reg_bits = sizeof(T) * 8;
4757      return static_cast<T>(ux >> (distance & max_shift_value)) |
4758                           (value << (reg_bits - (distance & max_shift_value)));
4759    }
4760  }
4761
4762  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4763    return GetBlock()->GetGraph()->GetIntConstant(
4764        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4765  }
4766  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4767    return GetBlock()->GetGraph()->GetLongConstant(
4768        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4769  }
4770  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4771                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4772    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4773    UNREACHABLE();
4774  }
4775  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4776                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4777    LOG(FATAL) << DebugName() << " is not defined for float values";
4778    UNREACHABLE();
4779  }
4780  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4781                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4782    LOG(FATAL) << DebugName() << " is not defined for double values";
4783    UNREACHABLE();
4784  }
4785
4786  DECLARE_INSTRUCTION(Ror);
4787
4788 private:
4789  DISALLOW_COPY_AND_ASSIGN(HRor);
4790};
4791
4792// The value of a parameter in this method. Its location depends on
4793// the calling convention.
4794class HParameterValue FINAL : public HExpression<0> {
4795 public:
4796  HParameterValue(const DexFile& dex_file,
4797                  uint16_t type_index,
4798                  uint8_t index,
4799                  Primitive::Type parameter_type,
4800                  bool is_this = false)
4801      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
4802        dex_file_(dex_file),
4803        type_index_(type_index),
4804        index_(index) {
4805    SetPackedFlag<kFlagIsThis>(is_this);
4806    SetPackedFlag<kFlagCanBeNull>(!is_this);
4807  }
4808
4809  const DexFile& GetDexFile() const { return dex_file_; }
4810  uint16_t GetTypeIndex() const { return type_index_; }
4811  uint8_t GetIndex() const { return index_; }
4812  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
4813
4814  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
4815  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
4816
4817  DECLARE_INSTRUCTION(ParameterValue);
4818
4819 private:
4820  // Whether or not the parameter value corresponds to 'this' argument.
4821  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
4822  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
4823  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
4824  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
4825                "Too many packed fields.");
4826
4827  const DexFile& dex_file_;
4828  const uint16_t type_index_;
4829  // The index of this parameter in the parameters list. Must be less
4830  // than HGraph::number_of_in_vregs_.
4831  const uint8_t index_;
4832
4833  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
4834};
4835
4836class HNot FINAL : public HUnaryOperation {
4837 public:
4838  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4839      : HUnaryOperation(result_type, input, dex_pc) {}
4840
4841  bool CanBeMoved() const OVERRIDE { return true; }
4842  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4843    return true;
4844  }
4845
4846  template <typename T> static T Compute(T x) { return ~x; }
4847
4848  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4849    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4850  }
4851  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4852    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4853  }
4854  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4855    LOG(FATAL) << DebugName() << " is not defined for float values";
4856    UNREACHABLE();
4857  }
4858  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4859    LOG(FATAL) << DebugName() << " is not defined for double values";
4860    UNREACHABLE();
4861  }
4862
4863  DECLARE_INSTRUCTION(Not);
4864
4865 private:
4866  DISALLOW_COPY_AND_ASSIGN(HNot);
4867};
4868
4869class HBooleanNot FINAL : public HUnaryOperation {
4870 public:
4871  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
4872      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
4873
4874  bool CanBeMoved() const OVERRIDE { return true; }
4875  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4876    return true;
4877  }
4878
4879  template <typename T> static bool Compute(T x) {
4880    DCHECK(IsUint<1>(x)) << x;
4881    return !x;
4882  }
4883
4884  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4885    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4886  }
4887  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4888    LOG(FATAL) << DebugName() << " is not defined for long values";
4889    UNREACHABLE();
4890  }
4891  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4892    LOG(FATAL) << DebugName() << " is not defined for float values";
4893    UNREACHABLE();
4894  }
4895  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4896    LOG(FATAL) << DebugName() << " is not defined for double values";
4897    UNREACHABLE();
4898  }
4899
4900  DECLARE_INSTRUCTION(BooleanNot);
4901
4902 private:
4903  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
4904};
4905
4906class HTypeConversion FINAL : public HExpression<1> {
4907 public:
4908  // Instantiate a type conversion of `input` to `result_type`.
4909  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
4910      : HExpression(result_type,
4911                    SideEffectsForArchRuntimeCalls(input->GetType(), result_type),
4912                    dex_pc) {
4913    SetRawInputAt(0, input);
4914    // Invariant: We should never generate a conversion to a Boolean value.
4915    DCHECK_NE(Primitive::kPrimBoolean, result_type);
4916  }
4917
4918  HInstruction* GetInput() const { return InputAt(0); }
4919  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
4920  Primitive::Type GetResultType() const { return GetType(); }
4921
4922  bool CanBeMoved() const OVERRIDE { return true; }
4923  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4924    return true;
4925  }
4926
4927  // Try to statically evaluate the conversion and return a HConstant
4928  // containing the result.  If the input cannot be converted, return nullptr.
4929  HConstant* TryStaticEvaluation() const;
4930
4931  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type,
4932                                                    Primitive::Type result_type) {
4933    // Some architectures may not require the 'GC' side effects, but at this point
4934    // in the compilation process we do not know what architecture we will
4935    // generate code for, so we must be conservative.
4936    if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type))
4937        || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) {
4938      return SideEffects::CanTriggerGC();
4939    }
4940    return SideEffects::None();
4941  }
4942
4943  DECLARE_INSTRUCTION(TypeConversion);
4944
4945 private:
4946  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
4947};
4948
4949static constexpr uint32_t kNoRegNumber = -1;
4950
4951class HNullCheck FINAL : public HExpression<1> {
4952 public:
4953  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
4954  // constructor.
4955  HNullCheck(HInstruction* value, uint32_t dex_pc)
4956      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4957    SetRawInputAt(0, value);
4958  }
4959
4960  bool CanBeMoved() const OVERRIDE { return true; }
4961  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4962    return true;
4963  }
4964
4965  bool NeedsEnvironment() const OVERRIDE { return true; }
4966
4967  bool CanThrow() const OVERRIDE { return true; }
4968
4969  bool CanBeNull() const OVERRIDE { return false; }
4970
4971
4972  DECLARE_INSTRUCTION(NullCheck);
4973
4974 private:
4975  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
4976};
4977
4978class FieldInfo : public ValueObject {
4979 public:
4980  FieldInfo(MemberOffset field_offset,
4981            Primitive::Type field_type,
4982            bool is_volatile,
4983            uint32_t index,
4984            uint16_t declaring_class_def_index,
4985            const DexFile& dex_file,
4986            Handle<mirror::DexCache> dex_cache)
4987      : field_offset_(field_offset),
4988        field_type_(field_type),
4989        is_volatile_(is_volatile),
4990        index_(index),
4991        declaring_class_def_index_(declaring_class_def_index),
4992        dex_file_(dex_file),
4993        dex_cache_(dex_cache) {}
4994
4995  MemberOffset GetFieldOffset() const { return field_offset_; }
4996  Primitive::Type GetFieldType() const { return field_type_; }
4997  uint32_t GetFieldIndex() const { return index_; }
4998  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
4999  const DexFile& GetDexFile() const { return dex_file_; }
5000  bool IsVolatile() const { return is_volatile_; }
5001  Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; }
5002
5003 private:
5004  const MemberOffset field_offset_;
5005  const Primitive::Type field_type_;
5006  const bool is_volatile_;
5007  const uint32_t index_;
5008  const uint16_t declaring_class_def_index_;
5009  const DexFile& dex_file_;
5010  const Handle<mirror::DexCache> dex_cache_;
5011};
5012
5013class HInstanceFieldGet FINAL : public HExpression<1> {
5014 public:
5015  HInstanceFieldGet(HInstruction* value,
5016                    Primitive::Type field_type,
5017                    MemberOffset field_offset,
5018                    bool is_volatile,
5019                    uint32_t field_idx,
5020                    uint16_t declaring_class_def_index,
5021                    const DexFile& dex_file,
5022                    Handle<mirror::DexCache> dex_cache,
5023                    uint32_t dex_pc)
5024      : HExpression(field_type,
5025                    SideEffects::FieldReadOfType(field_type, is_volatile),
5026                    dex_pc),
5027        field_info_(field_offset,
5028                    field_type,
5029                    is_volatile,
5030                    field_idx,
5031                    declaring_class_def_index,
5032                    dex_file,
5033                    dex_cache) {
5034    SetRawInputAt(0, value);
5035  }
5036
5037  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5038
5039  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5040    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5041    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5042  }
5043
5044  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5045    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5046  }
5047
5048  size_t ComputeHashCode() const OVERRIDE {
5049    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5050  }
5051
5052  const FieldInfo& GetFieldInfo() const { return field_info_; }
5053  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5054  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5055  bool IsVolatile() const { return field_info_.IsVolatile(); }
5056
5057  DECLARE_INSTRUCTION(InstanceFieldGet);
5058
5059 private:
5060  const FieldInfo field_info_;
5061
5062  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5063};
5064
5065class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5066 public:
5067  HInstanceFieldSet(HInstruction* object,
5068                    HInstruction* value,
5069                    Primitive::Type field_type,
5070                    MemberOffset field_offset,
5071                    bool is_volatile,
5072                    uint32_t field_idx,
5073                    uint16_t declaring_class_def_index,
5074                    const DexFile& dex_file,
5075                    Handle<mirror::DexCache> dex_cache,
5076                    uint32_t dex_pc)
5077      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5078                             dex_pc),
5079        field_info_(field_offset,
5080                    field_type,
5081                    is_volatile,
5082                    field_idx,
5083                    declaring_class_def_index,
5084                    dex_file,
5085                    dex_cache) {
5086    SetPackedFlag<kFlagValueCanBeNull>(true);
5087    SetRawInputAt(0, object);
5088    SetRawInputAt(1, value);
5089  }
5090
5091  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5092    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5093  }
5094
5095  const FieldInfo& GetFieldInfo() const { return field_info_; }
5096  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5097  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5098  bool IsVolatile() const { return field_info_.IsVolatile(); }
5099  HInstruction* GetValue() const { return InputAt(1); }
5100  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5101  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5102
5103  DECLARE_INSTRUCTION(InstanceFieldSet);
5104
5105 private:
5106  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5107  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5108  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5109                "Too many packed fields.");
5110
5111  const FieldInfo field_info_;
5112
5113  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5114};
5115
5116class HArrayGet FINAL : public HExpression<2> {
5117 public:
5118  HArrayGet(HInstruction* array,
5119            HInstruction* index,
5120            Primitive::Type type,
5121            uint32_t dex_pc,
5122            bool is_string_char_at = false)
5123      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5124    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5125    SetRawInputAt(0, array);
5126    SetRawInputAt(1, index);
5127  }
5128
5129  bool CanBeMoved() const OVERRIDE { return true; }
5130  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5131    return true;
5132  }
5133  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5134    // TODO: We can be smarter here.
5135    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5136    // which generates the implicit null check. There are cases when these can be removed
5137    // to produce better code. If we ever add optimizations to do so we should allow an
5138    // implicit check here (as long as the address falls in the first page).
5139    return false;
5140  }
5141
5142  bool IsEquivalentOf(HArrayGet* other) const {
5143    bool result = (GetDexPc() == other->GetDexPc());
5144    if (kIsDebugBuild && result) {
5145      DCHECK_EQ(GetBlock(), other->GetBlock());
5146      DCHECK_EQ(GetArray(), other->GetArray());
5147      DCHECK_EQ(GetIndex(), other->GetIndex());
5148      if (Primitive::IsIntOrLongType(GetType())) {
5149        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5150      } else {
5151        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5152        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5153      }
5154    }
5155    return result;
5156  }
5157
5158  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5159
5160  HInstruction* GetArray() const { return InputAt(0); }
5161  HInstruction* GetIndex() const { return InputAt(1); }
5162
5163  DECLARE_INSTRUCTION(ArrayGet);
5164
5165 private:
5166  // We treat a String as an array, creating the HArrayGet from String.charAt()
5167  // intrinsic in the instruction simplifier. We can always determine whether
5168  // a particular HArrayGet is actually a String.charAt() by looking at the type
5169  // of the input but that requires holding the mutator lock, so we prefer to use
5170  // a flag, so that code generators don't need to do the locking.
5171  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5172  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5173  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5174                "Too many packed fields.");
5175
5176  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5177};
5178
5179class HArraySet FINAL : public HTemplateInstruction<3> {
5180 public:
5181  HArraySet(HInstruction* array,
5182            HInstruction* index,
5183            HInstruction* value,
5184            Primitive::Type expected_component_type,
5185            uint32_t dex_pc)
5186      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5187    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5188    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5189    SetPackedFlag<kFlagValueCanBeNull>(true);
5190    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5191    SetRawInputAt(0, array);
5192    SetRawInputAt(1, index);
5193    SetRawInputAt(2, value);
5194    // Make a best guess now, may be refined during SSA building.
5195    ComputeSideEffects();
5196  }
5197
5198  bool NeedsEnvironment() const OVERRIDE {
5199    // We call a runtime method to throw ArrayStoreException.
5200    return NeedsTypeCheck();
5201  }
5202
5203  // Can throw ArrayStoreException.
5204  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5205
5206  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5207    // TODO: Same as for ArrayGet.
5208    return false;
5209  }
5210
5211  void ClearNeedsTypeCheck() {
5212    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5213  }
5214
5215  void ClearValueCanBeNull() {
5216    SetPackedFlag<kFlagValueCanBeNull>(false);
5217  }
5218
5219  void SetStaticTypeOfArrayIsObjectArray() {
5220    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5221  }
5222
5223  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5224  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5225  bool StaticTypeOfArrayIsObjectArray() const {
5226    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5227  }
5228
5229  HInstruction* GetArray() const { return InputAt(0); }
5230  HInstruction* GetIndex() const { return InputAt(1); }
5231  HInstruction* GetValue() const { return InputAt(2); }
5232
5233  Primitive::Type GetComponentType() const {
5234    // The Dex format does not type floating point index operations. Since the
5235    // `expected_component_type_` is set during building and can therefore not
5236    // be correct, we also check what is the value type. If it is a floating
5237    // point type, we must use that type.
5238    Primitive::Type value_type = GetValue()->GetType();
5239    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5240        ? value_type
5241        : GetRawExpectedComponentType();
5242  }
5243
5244  Primitive::Type GetRawExpectedComponentType() const {
5245    return GetPackedField<ExpectedComponentTypeField>();
5246  }
5247
5248  void ComputeSideEffects() {
5249    Primitive::Type type = GetComponentType();
5250    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5251        SideEffectsForArchRuntimeCalls(type)));
5252  }
5253
5254  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5255    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5256  }
5257
5258  DECLARE_INSTRUCTION(ArraySet);
5259
5260 private:
5261  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5262  static constexpr size_t kFieldExpectedComponentTypeSize =
5263      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5264  static constexpr size_t kFlagNeedsTypeCheck =
5265      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5266  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5267  // Cached information for the reference_type_info_ so that codegen
5268  // does not need to inspect the static type.
5269  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5270  static constexpr size_t kNumberOfArraySetPackedBits =
5271      kFlagStaticTypeOfArrayIsObjectArray + 1;
5272  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5273  using ExpectedComponentTypeField =
5274      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5275
5276  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5277};
5278
5279class HArrayLength FINAL : public HExpression<1> {
5280 public:
5281  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5282      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5283    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5284    // Note that arrays do not change length, so the instruction does not
5285    // depend on any write.
5286    SetRawInputAt(0, array);
5287  }
5288
5289  bool CanBeMoved() const OVERRIDE { return true; }
5290  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5291    return true;
5292  }
5293  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5294    return obj == InputAt(0);
5295  }
5296
5297  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5298
5299  DECLARE_INSTRUCTION(ArrayLength);
5300
5301 private:
5302  // We treat a String as an array, creating the HArrayLength from String.length()
5303  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5304  // determine whether a particular HArrayLength is actually a String.length() by
5305  // looking at the type of the input but that requires holding the mutator lock, so
5306  // we prefer to use a flag, so that code generators don't need to do the locking.
5307  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5308  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5309  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5310                "Too many packed fields.");
5311
5312  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5313};
5314
5315class HBoundsCheck FINAL : public HExpression<2> {
5316 public:
5317  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5318  // constructor.
5319  HBoundsCheck(HInstruction* index,
5320               HInstruction* length,
5321               uint32_t dex_pc,
5322               uint32_t string_char_at_method_index = DexFile::kDexNoIndex)
5323      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc),
5324        string_char_at_method_index_(string_char_at_method_index) {
5325    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5326    SetRawInputAt(0, index);
5327    SetRawInputAt(1, length);
5328  }
5329
5330  bool CanBeMoved() const OVERRIDE { return true; }
5331  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5332    return true;
5333  }
5334
5335  bool NeedsEnvironment() const OVERRIDE { return true; }
5336
5337  bool CanThrow() const OVERRIDE { return true; }
5338
5339  bool IsStringCharAt() const { return GetStringCharAtMethodIndex() != DexFile::kDexNoIndex; }
5340  uint32_t GetStringCharAtMethodIndex() const { return string_char_at_method_index_; }
5341
5342  HInstruction* GetIndex() const { return InputAt(0); }
5343
5344  DECLARE_INSTRUCTION(BoundsCheck);
5345
5346 private:
5347  // We treat a String as an array, creating the HBoundsCheck from String.charAt()
5348  // intrinsic in the instruction simplifier. We want to include the String.charAt()
5349  // in the stack trace if we actually throw the StringIndexOutOfBoundsException,
5350  // so we need to create an HEnvironment which will be translated to an InlineInfo
5351  // indicating the extra stack frame. Since we add this HEnvironment quite late,
5352  // in the PrepareForRegisterAllocation pass, we need to remember the method index
5353  // from the invoke as we don't want to look again at the dex bytecode.
5354  uint32_t string_char_at_method_index_;  // DexFile::kDexNoIndex if regular array.
5355
5356  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5357};
5358
5359class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5360 public:
5361  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5362      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5363
5364  bool NeedsEnvironment() const OVERRIDE {
5365    return true;
5366  }
5367
5368  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5369  SlowPathCode* GetSlowPath() const { return slow_path_; }
5370
5371  DECLARE_INSTRUCTION(SuspendCheck);
5372
5373 private:
5374  // Only used for code generation, in order to share the same slow path between back edges
5375  // of a same loop.
5376  SlowPathCode* slow_path_;
5377
5378  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5379};
5380
5381// Pseudo-instruction which provides the native debugger with mapping information.
5382// It ensures that we can generate line number and local variables at this point.
5383class HNativeDebugInfo : public HTemplateInstruction<0> {
5384 public:
5385  explicit HNativeDebugInfo(uint32_t dex_pc)
5386      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5387
5388  bool NeedsEnvironment() const OVERRIDE {
5389    return true;
5390  }
5391
5392  DECLARE_INSTRUCTION(NativeDebugInfo);
5393
5394 private:
5395  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5396};
5397
5398/**
5399 * Instruction to load a Class object.
5400 */
5401class HLoadClass FINAL : public HInstruction {
5402 public:
5403  // Determines how to load the Class.
5404  enum class LoadKind {
5405    // Use the Class* from the method's own ArtMethod*.
5406    kReferrersClass,
5407
5408    // Use boot image Class* address that will be known at link time.
5409    // Used for boot image classes referenced by boot image code in non-PIC mode.
5410    kBootImageLinkTimeAddress,
5411
5412    // Use PC-relative boot image Class* address that will be known at link time.
5413    // Used for boot image classes referenced by boot image code in PIC mode.
5414    kBootImageLinkTimePcRelative,
5415
5416    // Use a known boot image Class* address, embedded in the code by the codegen.
5417    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5418    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5419    // GetIncludePatchInformation().
5420    kBootImageAddress,
5421
5422    // Load from the resolved types array at an absolute address.
5423    // Used for classes outside the boot image referenced by JIT-compiled code.
5424    kDexCacheAddress,
5425
5426    // Load from resolved types array in the dex cache using a PC-relative load.
5427    // Used for classes outside boot image when we know that we can access
5428    // the dex cache arrays using a PC-relative load.
5429    kDexCachePcRelative,
5430
5431    // Load from resolved types array accessed through the class loaded from
5432    // the compiled method's own ArtMethod*. This is the default access type when
5433    // all other types are unavailable.
5434    kDexCacheViaMethod,
5435
5436    kLast = kDexCacheViaMethod
5437  };
5438
5439  HLoadClass(HCurrentMethod* current_method,
5440             uint16_t type_index,
5441             const DexFile& dex_file,
5442             bool is_referrers_class,
5443             uint32_t dex_pc,
5444             bool needs_access_check,
5445             bool is_in_dex_cache)
5446      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5447        special_input_(HUserRecord<HInstruction*>(current_method)),
5448        type_index_(type_index),
5449        dex_file_(dex_file),
5450        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5451    // Referrers class should not need access check. We never inline unverified
5452    // methods so we can't possibly end up in this situation.
5453    DCHECK(!is_referrers_class || !needs_access_check);
5454
5455    SetPackedField<LoadKindField>(
5456        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
5457    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5458    SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache);
5459    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5460  }
5461
5462  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5463    DCHECK(HasAddress(load_kind));
5464    load_data_.address = address;
5465    SetLoadKindInternal(load_kind);
5466  }
5467
5468  void SetLoadKindWithTypeReference(LoadKind load_kind,
5469                                    const DexFile& dex_file,
5470                                    uint32_t type_index) {
5471    DCHECK(HasTypeReference(load_kind));
5472    DCHECK(IsSameDexFile(dex_file_, dex_file));
5473    DCHECK_EQ(type_index_, type_index);
5474    SetLoadKindInternal(load_kind);
5475  }
5476
5477  void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5478                                        const DexFile& dex_file,
5479                                        uint32_t element_index) {
5480    DCHECK(HasDexCacheReference(load_kind));
5481    DCHECK(IsSameDexFile(dex_file_, dex_file));
5482    load_data_.dex_cache_element_index = element_index;
5483    SetLoadKindInternal(load_kind);
5484  }
5485
5486  LoadKind GetLoadKind() const {
5487    return GetPackedField<LoadKindField>();
5488  }
5489
5490  bool CanBeMoved() const OVERRIDE { return true; }
5491
5492  bool InstructionDataEquals(const HInstruction* other) const;
5493
5494  size_t ComputeHashCode() const OVERRIDE { return type_index_; }
5495
5496  bool CanBeNull() const OVERRIDE { return false; }
5497
5498  bool NeedsEnvironment() const OVERRIDE {
5499    return CanCallRuntime();
5500  }
5501
5502  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5503    // The entrypoint the code generator is going to call does not do
5504    // clinit of the class.
5505    DCHECK(!NeedsAccessCheck());
5506    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5507  }
5508
5509  bool CanCallRuntime() const {
5510    return MustGenerateClinitCheck() ||
5511           (!IsReferrersClass() && !IsInDexCache()) ||
5512           NeedsAccessCheck();
5513  }
5514
5515
5516  bool CanThrow() const OVERRIDE {
5517    return CanCallRuntime();
5518  }
5519
5520  ReferenceTypeInfo GetLoadedClassRTI() {
5521    return loaded_class_rti_;
5522  }
5523
5524  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5525    // Make sure we only set exact types (the loaded class should never be merged).
5526    DCHECK(rti.IsExact());
5527    loaded_class_rti_ = rti;
5528  }
5529
5530  uint32_t GetTypeIndex() const { return type_index_; }
5531  const DexFile& GetDexFile() const { return dex_file_; }
5532
5533  uint32_t GetDexCacheElementOffset() const;
5534
5535  uint64_t GetAddress() const {
5536    DCHECK(HasAddress(GetLoadKind()));
5537    return load_data_.address;
5538  }
5539
5540  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !IsReferrersClass(); }
5541
5542  static SideEffects SideEffectsForArchRuntimeCalls() {
5543    return SideEffects::CanTriggerGC();
5544  }
5545
5546  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5547  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5548  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5549  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5550
5551  void MarkInDexCache() {
5552    SetPackedFlag<kFlagIsInDexCache>(true);
5553    DCHECK(!NeedsEnvironment());
5554    RemoveEnvironment();
5555    SetSideEffects(SideEffects::None());
5556  }
5557
5558  void AddSpecialInput(HInstruction* special_input);
5559
5560  using HInstruction::GetInputRecords;  // Keep the const version visible.
5561  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5562    return ArrayRef<HUserRecord<HInstruction*>>(
5563        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5564  }
5565
5566  Primitive::Type GetType() const OVERRIDE {
5567    return Primitive::kPrimNot;
5568  }
5569
5570  DECLARE_INSTRUCTION(LoadClass);
5571
5572 private:
5573  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5574  static constexpr size_t kFlagIsInDexCache        = kFlagNeedsAccessCheck + 1;
5575  // Whether this instruction must generate the initialization check.
5576  // Used for code generation.
5577  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInDexCache + 1;
5578  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5579  static constexpr size_t kFieldLoadKindSize =
5580      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5581  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5582  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5583  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5584
5585  static bool HasTypeReference(LoadKind load_kind) {
5586    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5587        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5588        load_kind == LoadKind::kDexCacheViaMethod ||
5589        load_kind == LoadKind::kReferrersClass;
5590  }
5591
5592  static bool HasAddress(LoadKind load_kind) {
5593    return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress;
5594  }
5595
5596  static bool HasDexCacheReference(LoadKind load_kind) {
5597    return load_kind == LoadKind::kDexCachePcRelative;
5598  }
5599
5600  void SetLoadKindInternal(LoadKind load_kind);
5601
5602  // The special input is the HCurrentMethod for kDexCacheViaMethod or kReferrersClass.
5603  // For other load kinds it's empty or possibly some architecture-specific instruction
5604  // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
5605  HUserRecord<HInstruction*> special_input_;
5606
5607  const uint16_t type_index_;
5608  const DexFile& dex_file_;
5609
5610  union {
5611    uint32_t dex_cache_element_index;   // Only for dex cache reference.
5612    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5613  } load_data_;
5614
5615  ReferenceTypeInfo loaded_class_rti_;
5616
5617  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5618};
5619std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5620
5621// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5622inline uint32_t HLoadClass::GetDexCacheElementOffset() const {
5623  DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5624  return load_data_.dex_cache_element_index;
5625}
5626
5627// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5628inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5629  // The special input is used for PC-relative loads on some architectures,
5630  // including literal pool loads, which are PC-relative too.
5631  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5632         GetLoadKind() == LoadKind::kDexCachePcRelative ||
5633         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5634         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5635  DCHECK(special_input_.GetInstruction() == nullptr);
5636  special_input_ = HUserRecord<HInstruction*>(special_input);
5637  special_input->AddUseAt(this, 0);
5638}
5639
5640class HLoadString FINAL : public HInstruction {
5641 public:
5642  // Determines how to load the String.
5643  enum class LoadKind {
5644    // Use boot image String* address that will be known at link time.
5645    // Used for boot image strings referenced by boot image code in non-PIC mode.
5646    kBootImageLinkTimeAddress,
5647
5648    // Use PC-relative boot image String* address that will be known at link time.
5649    // Used for boot image strings referenced by boot image code in PIC mode.
5650    kBootImageLinkTimePcRelative,
5651
5652    // Use a known boot image String* address, embedded in the code by the codegen.
5653    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5654    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5655    // GetIncludePatchInformation().
5656    kBootImageAddress,
5657
5658    // Load from the resolved strings array at an absolute address.
5659    // Used for strings outside the boot image referenced by JIT-compiled code.
5660    kDexCacheAddress,
5661
5662    // Load from resolved strings array in the dex cache using a PC-relative load.
5663    // Used for strings outside boot image when we know that we can access
5664    // the dex cache arrays using a PC-relative load.
5665    kDexCachePcRelative,
5666
5667    // Load from resolved strings array accessed through the class loaded from
5668    // the compiled method's own ArtMethod*. This is the default access type when
5669    // all other types are unavailable.
5670    kDexCacheViaMethod,
5671
5672    kLast = kDexCacheViaMethod
5673  };
5674
5675  HLoadString(HCurrentMethod* current_method,
5676              uint32_t string_index,
5677              const DexFile& dex_file,
5678              uint32_t dex_pc)
5679      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5680        special_input_(HUserRecord<HInstruction*>(current_method)),
5681        string_index_(string_index) {
5682    SetPackedFlag<kFlagIsInDexCache>(false);
5683    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5684    load_data_.ref.dex_file = &dex_file;
5685  }
5686
5687  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5688    DCHECK(HasAddress(load_kind));
5689    load_data_.address = address;
5690    SetLoadKindInternal(load_kind);
5691  }
5692
5693  void SetLoadKindWithStringReference(LoadKind load_kind,
5694                                      const DexFile& dex_file,
5695                                      uint32_t string_index) {
5696    DCHECK(HasStringReference(load_kind));
5697    load_data_.ref.dex_file = &dex_file;
5698    string_index_ = string_index;
5699    SetLoadKindInternal(load_kind);
5700  }
5701
5702  void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5703                                        const DexFile& dex_file,
5704                                        uint32_t element_index) {
5705    DCHECK(HasDexCacheReference(load_kind));
5706    load_data_.ref.dex_file = &dex_file;
5707    load_data_.ref.dex_cache_element_index = element_index;
5708    SetLoadKindInternal(load_kind);
5709  }
5710
5711  LoadKind GetLoadKind() const {
5712    return GetPackedField<LoadKindField>();
5713  }
5714
5715  const DexFile& GetDexFile() const;
5716
5717  uint32_t GetStringIndex() const {
5718    DCHECK(HasStringReference(GetLoadKind()) || /* For slow paths. */ !IsInDexCache());
5719    return string_index_;
5720  }
5721
5722  uint32_t GetDexCacheElementOffset() const;
5723
5724  uint64_t GetAddress() const {
5725    DCHECK(HasAddress(GetLoadKind()));
5726    return load_data_.address;
5727  }
5728
5729  bool CanBeMoved() const OVERRIDE { return true; }
5730
5731  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5732
5733  size_t ComputeHashCode() const OVERRIDE { return string_index_; }
5734
5735  // Will call the runtime if we need to load the string through
5736  // the dex cache and the string is not guaranteed to be there yet.
5737  bool NeedsEnvironment() const OVERRIDE {
5738    LoadKind load_kind = GetLoadKind();
5739    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5740        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5741        load_kind == LoadKind::kBootImageAddress) {
5742      return false;
5743    }
5744    return !IsInDexCache();
5745  }
5746
5747  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5748    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5749  }
5750
5751  bool CanBeNull() const OVERRIDE { return false; }
5752  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5753
5754  static SideEffects SideEffectsForArchRuntimeCalls() {
5755    return SideEffects::CanTriggerGC();
5756  }
5757
5758  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5759
5760  void MarkInDexCache() {
5761    SetPackedFlag<kFlagIsInDexCache>(true);
5762    DCHECK(!NeedsEnvironment());
5763    RemoveEnvironment();
5764    SetSideEffects(SideEffects::None());
5765  }
5766
5767  void AddSpecialInput(HInstruction* special_input);
5768
5769  using HInstruction::GetInputRecords;  // Keep the const version visible.
5770  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5771    return ArrayRef<HUserRecord<HInstruction*>>(
5772        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5773  }
5774
5775  Primitive::Type GetType() const OVERRIDE {
5776    return Primitive::kPrimNot;
5777  }
5778
5779  DECLARE_INSTRUCTION(LoadString);
5780
5781 private:
5782  static constexpr size_t kFlagIsInDexCache = kNumberOfGenericPackedBits;
5783  static constexpr size_t kFieldLoadKind = kFlagIsInDexCache + 1;
5784  static constexpr size_t kFieldLoadKindSize =
5785      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5786  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5787  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5788  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5789
5790  static bool HasStringReference(LoadKind load_kind) {
5791    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5792        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5793        load_kind == LoadKind::kDexCacheViaMethod;
5794  }
5795
5796  static bool HasAddress(LoadKind load_kind) {
5797    return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress;
5798  }
5799
5800  static bool HasDexCacheReference(LoadKind load_kind) {
5801    return load_kind == LoadKind::kDexCachePcRelative;
5802  }
5803
5804  void SetLoadKindInternal(LoadKind load_kind);
5805
5806  // The special input is the HCurrentMethod for kDexCacheViaMethod.
5807  // For other load kinds it's empty or possibly some architecture-specific instruction
5808  // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
5809  HUserRecord<HInstruction*> special_input_;
5810
5811  // String index serves also as the hash code and it's also needed for slow-paths,
5812  // so it must not be overwritten with other load data.
5813  uint32_t string_index_;
5814
5815  union {
5816    struct {
5817      const DexFile* dex_file;            // For string reference and dex cache reference.
5818      uint32_t dex_cache_element_index;   // Only for dex cache reference.
5819    } ref;
5820    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5821  } load_data_;
5822
5823  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5824};
5825std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5826
5827// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5828inline const DexFile& HLoadString::GetDexFile() const {
5829  DCHECK(HasStringReference(GetLoadKind()) || HasDexCacheReference(GetLoadKind()))
5830      << GetLoadKind();
5831  return *load_data_.ref.dex_file;
5832}
5833
5834// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5835inline uint32_t HLoadString::GetDexCacheElementOffset() const {
5836  DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5837  return load_data_.ref.dex_cache_element_index;
5838}
5839
5840// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5841inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5842  // The special input is used for PC-relative loads on some architectures,
5843  // including literal pool loads, which are PC-relative too.
5844  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5845         GetLoadKind() == LoadKind::kDexCachePcRelative ||
5846         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5847         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5848  // HLoadString::GetInputRecords() returns an empty array at this point,
5849  // so use the GetInputRecords() from the base class to set the input record.
5850  DCHECK(special_input_.GetInstruction() == nullptr);
5851  special_input_ = HUserRecord<HInstruction*>(special_input);
5852  special_input->AddUseAt(this, 0);
5853}
5854
5855/**
5856 * Performs an initialization check on its Class object input.
5857 */
5858class HClinitCheck FINAL : public HExpression<1> {
5859 public:
5860  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5861      : HExpression(
5862            Primitive::kPrimNot,
5863            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
5864            dex_pc) {
5865    SetRawInputAt(0, constant);
5866  }
5867
5868  bool CanBeMoved() const OVERRIDE { return true; }
5869  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5870    return true;
5871  }
5872
5873  bool NeedsEnvironment() const OVERRIDE {
5874    // May call runtime to initialize the class.
5875    return true;
5876  }
5877
5878  bool CanThrow() const OVERRIDE { return true; }
5879
5880  HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); }
5881
5882  DECLARE_INSTRUCTION(ClinitCheck);
5883
5884 private:
5885  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
5886};
5887
5888class HStaticFieldGet FINAL : public HExpression<1> {
5889 public:
5890  HStaticFieldGet(HInstruction* cls,
5891                  Primitive::Type field_type,
5892                  MemberOffset field_offset,
5893                  bool is_volatile,
5894                  uint32_t field_idx,
5895                  uint16_t declaring_class_def_index,
5896                  const DexFile& dex_file,
5897                  Handle<mirror::DexCache> dex_cache,
5898                  uint32_t dex_pc)
5899      : HExpression(field_type,
5900                    SideEffects::FieldReadOfType(field_type, is_volatile),
5901                    dex_pc),
5902        field_info_(field_offset,
5903                    field_type,
5904                    is_volatile,
5905                    field_idx,
5906                    declaring_class_def_index,
5907                    dex_file,
5908                    dex_cache) {
5909    SetRawInputAt(0, cls);
5910  }
5911
5912
5913  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5914
5915  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5916    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
5917    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5918  }
5919
5920  size_t ComputeHashCode() const OVERRIDE {
5921    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5922  }
5923
5924  const FieldInfo& GetFieldInfo() const { return field_info_; }
5925  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5926  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5927  bool IsVolatile() const { return field_info_.IsVolatile(); }
5928
5929  DECLARE_INSTRUCTION(StaticFieldGet);
5930
5931 private:
5932  const FieldInfo field_info_;
5933
5934  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
5935};
5936
5937class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
5938 public:
5939  HStaticFieldSet(HInstruction* cls,
5940                  HInstruction* value,
5941                  Primitive::Type field_type,
5942                  MemberOffset field_offset,
5943                  bool is_volatile,
5944                  uint32_t field_idx,
5945                  uint16_t declaring_class_def_index,
5946                  const DexFile& dex_file,
5947                  Handle<mirror::DexCache> dex_cache,
5948                  uint32_t dex_pc)
5949      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5950                             dex_pc),
5951        field_info_(field_offset,
5952                    field_type,
5953                    is_volatile,
5954                    field_idx,
5955                    declaring_class_def_index,
5956                    dex_file,
5957                    dex_cache) {
5958    SetPackedFlag<kFlagValueCanBeNull>(true);
5959    SetRawInputAt(0, cls);
5960    SetRawInputAt(1, value);
5961  }
5962
5963  const FieldInfo& GetFieldInfo() const { return field_info_; }
5964  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5965  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5966  bool IsVolatile() const { return field_info_.IsVolatile(); }
5967
5968  HInstruction* GetValue() const { return InputAt(1); }
5969  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5970  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5971
5972  DECLARE_INSTRUCTION(StaticFieldSet);
5973
5974 private:
5975  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5976  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
5977  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
5978                "Too many packed fields.");
5979
5980  const FieldInfo field_info_;
5981
5982  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
5983};
5984
5985class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
5986 public:
5987  HUnresolvedInstanceFieldGet(HInstruction* obj,
5988                              Primitive::Type field_type,
5989                              uint32_t field_index,
5990                              uint32_t dex_pc)
5991      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5992        field_index_(field_index) {
5993    SetRawInputAt(0, obj);
5994  }
5995
5996  bool NeedsEnvironment() const OVERRIDE { return true; }
5997  bool CanThrow() const OVERRIDE { return true; }
5998
5999  Primitive::Type GetFieldType() const { return GetType(); }
6000  uint32_t GetFieldIndex() const { return field_index_; }
6001
6002  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6003
6004 private:
6005  const uint32_t field_index_;
6006
6007  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6008};
6009
6010class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6011 public:
6012  HUnresolvedInstanceFieldSet(HInstruction* obj,
6013                              HInstruction* value,
6014                              Primitive::Type field_type,
6015                              uint32_t field_index,
6016                              uint32_t dex_pc)
6017      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6018        field_index_(field_index) {
6019    SetPackedField<FieldTypeField>(field_type);
6020    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6021    SetRawInputAt(0, obj);
6022    SetRawInputAt(1, value);
6023  }
6024
6025  bool NeedsEnvironment() const OVERRIDE { return true; }
6026  bool CanThrow() const OVERRIDE { return true; }
6027
6028  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6029  uint32_t GetFieldIndex() const { return field_index_; }
6030
6031  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6032
6033 private:
6034  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6035  static constexpr size_t kFieldFieldTypeSize =
6036      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6037  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6038      kFieldFieldType + kFieldFieldTypeSize;
6039  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6040                "Too many packed fields.");
6041  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6042
6043  const uint32_t field_index_;
6044
6045  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6046};
6047
6048class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6049 public:
6050  HUnresolvedStaticFieldGet(Primitive::Type field_type,
6051                            uint32_t field_index,
6052                            uint32_t dex_pc)
6053      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6054        field_index_(field_index) {
6055  }
6056
6057  bool NeedsEnvironment() const OVERRIDE { return true; }
6058  bool CanThrow() const OVERRIDE { return true; }
6059
6060  Primitive::Type GetFieldType() const { return GetType(); }
6061  uint32_t GetFieldIndex() const { return field_index_; }
6062
6063  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6064
6065 private:
6066  const uint32_t field_index_;
6067
6068  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6069};
6070
6071class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6072 public:
6073  HUnresolvedStaticFieldSet(HInstruction* value,
6074                            Primitive::Type field_type,
6075                            uint32_t field_index,
6076                            uint32_t dex_pc)
6077      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6078        field_index_(field_index) {
6079    SetPackedField<FieldTypeField>(field_type);
6080    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6081    SetRawInputAt(0, value);
6082  }
6083
6084  bool NeedsEnvironment() const OVERRIDE { return true; }
6085  bool CanThrow() const OVERRIDE { return true; }
6086
6087  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6088  uint32_t GetFieldIndex() const { return field_index_; }
6089
6090  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6091
6092 private:
6093  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6094  static constexpr size_t kFieldFieldTypeSize =
6095      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6096  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6097      kFieldFieldType + kFieldFieldTypeSize;
6098  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6099                "Too many packed fields.");
6100  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6101
6102  const uint32_t field_index_;
6103
6104  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6105};
6106
6107// Implement the move-exception DEX instruction.
6108class HLoadException FINAL : public HExpression<0> {
6109 public:
6110  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6111      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
6112
6113  bool CanBeNull() const OVERRIDE { return false; }
6114
6115  DECLARE_INSTRUCTION(LoadException);
6116
6117 private:
6118  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6119};
6120
6121// Implicit part of move-exception which clears thread-local exception storage.
6122// Must not be removed because the runtime expects the TLS to get cleared.
6123class HClearException FINAL : public HTemplateInstruction<0> {
6124 public:
6125  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6126      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6127
6128  DECLARE_INSTRUCTION(ClearException);
6129
6130 private:
6131  DISALLOW_COPY_AND_ASSIGN(HClearException);
6132};
6133
6134class HThrow FINAL : public HTemplateInstruction<1> {
6135 public:
6136  HThrow(HInstruction* exception, uint32_t dex_pc)
6137      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6138    SetRawInputAt(0, exception);
6139  }
6140
6141  bool IsControlFlow() const OVERRIDE { return true; }
6142
6143  bool NeedsEnvironment() const OVERRIDE { return true; }
6144
6145  bool CanThrow() const OVERRIDE { return true; }
6146
6147
6148  DECLARE_INSTRUCTION(Throw);
6149
6150 private:
6151  DISALLOW_COPY_AND_ASSIGN(HThrow);
6152};
6153
6154/**
6155 * Implementation strategies for the code generator of a HInstanceOf
6156 * or `HCheckCast`.
6157 */
6158enum class TypeCheckKind {
6159  kUnresolvedCheck,       // Check against an unresolved type.
6160  kExactCheck,            // Can do a single class compare.
6161  kClassHierarchyCheck,   // Can just walk the super class chain.
6162  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6163  kInterfaceCheck,        // No optimization yet when checking against an interface.
6164  kArrayObjectCheck,      // Can just check if the array is not primitive.
6165  kArrayCheck,            // No optimization yet when checking against a generic array.
6166  kLast = kArrayCheck
6167};
6168
6169std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6170
6171class HInstanceOf FINAL : public HExpression<2> {
6172 public:
6173  HInstanceOf(HInstruction* object,
6174              HLoadClass* constant,
6175              TypeCheckKind check_kind,
6176              uint32_t dex_pc)
6177      : HExpression(Primitive::kPrimBoolean,
6178                    SideEffectsForArchRuntimeCalls(check_kind),
6179                    dex_pc) {
6180    SetPackedField<TypeCheckKindField>(check_kind);
6181    SetPackedFlag<kFlagMustDoNullCheck>(true);
6182    SetRawInputAt(0, object);
6183    SetRawInputAt(1, constant);
6184  }
6185
6186  bool CanBeMoved() const OVERRIDE { return true; }
6187
6188  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6189    return true;
6190  }
6191
6192  bool NeedsEnvironment() const OVERRIDE {
6193    return CanCallRuntime(GetTypeCheckKind());
6194  }
6195
6196  // Used only in code generation.
6197  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6198  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6199  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6200  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6201
6202  static bool CanCallRuntime(TypeCheckKind check_kind) {
6203    // Mips currently does runtime calls for any other checks.
6204    return check_kind != TypeCheckKind::kExactCheck;
6205  }
6206
6207  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6208    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6209  }
6210
6211  DECLARE_INSTRUCTION(InstanceOf);
6212
6213 private:
6214  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6215  static constexpr size_t kFieldTypeCheckKindSize =
6216      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6217  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6218  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6219  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6220  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6221
6222  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6223};
6224
6225class HBoundType FINAL : public HExpression<1> {
6226 public:
6227  HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6228      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6229        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6230    SetPackedFlag<kFlagUpperCanBeNull>(true);
6231    SetPackedFlag<kFlagCanBeNull>(true);
6232    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6233    SetRawInputAt(0, input);
6234  }
6235
6236  // {Get,Set}Upper* should only be used in reference type propagation.
6237  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6238  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6239  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6240
6241  void SetCanBeNull(bool can_be_null) {
6242    DCHECK(GetUpperCanBeNull() || !can_be_null);
6243    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6244  }
6245
6246  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6247
6248  DECLARE_INSTRUCTION(BoundType);
6249
6250 private:
6251  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6252  // is false then CanBeNull() cannot be true).
6253  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6254  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6255  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6256  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6257
6258  // Encodes the most upper class that this instruction can have. In other words
6259  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6260  // It is used to bound the type in cases like:
6261  //   if (x instanceof ClassX) {
6262  //     // uper_bound_ will be ClassX
6263  //   }
6264  ReferenceTypeInfo upper_bound_;
6265
6266  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6267};
6268
6269class HCheckCast FINAL : public HTemplateInstruction<2> {
6270 public:
6271  HCheckCast(HInstruction* object,
6272             HLoadClass* constant,
6273             TypeCheckKind check_kind,
6274             uint32_t dex_pc)
6275      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6276    SetPackedField<TypeCheckKindField>(check_kind);
6277    SetPackedFlag<kFlagMustDoNullCheck>(true);
6278    SetRawInputAt(0, object);
6279    SetRawInputAt(1, constant);
6280  }
6281
6282  bool CanBeMoved() const OVERRIDE { return true; }
6283
6284  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6285    return true;
6286  }
6287
6288  bool NeedsEnvironment() const OVERRIDE {
6289    // Instruction may throw a CheckCastError.
6290    return true;
6291  }
6292
6293  bool CanThrow() const OVERRIDE { return true; }
6294
6295  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6296  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6297  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6298  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6299
6300  DECLARE_INSTRUCTION(CheckCast);
6301
6302 private:
6303  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6304  static constexpr size_t kFieldTypeCheckKindSize =
6305      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6306  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6307  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6308  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6309  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6310
6311  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6312};
6313
6314class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6315 public:
6316  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6317      : HTemplateInstruction(
6318            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6319    SetPackedField<BarrierKindField>(barrier_kind);
6320  }
6321
6322  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6323
6324  DECLARE_INSTRUCTION(MemoryBarrier);
6325
6326 private:
6327  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6328  static constexpr size_t kFieldBarrierKindSize =
6329      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6330  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6331      kFieldBarrierKind + kFieldBarrierKindSize;
6332  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6333                "Too many packed fields.");
6334  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6335
6336  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6337};
6338
6339class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6340 public:
6341  enum class OperationKind {
6342    kEnter,
6343    kExit,
6344    kLast = kExit
6345  };
6346
6347  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6348    : HTemplateInstruction(
6349          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6350          dex_pc) {
6351    SetPackedField<OperationKindField>(kind);
6352    SetRawInputAt(0, object);
6353  }
6354
6355  // Instruction may go into runtime, so we need an environment.
6356  bool NeedsEnvironment() const OVERRIDE { return true; }
6357
6358  bool CanThrow() const OVERRIDE {
6359    // Verifier guarantees that monitor-exit cannot throw.
6360    // This is important because it allows the HGraphBuilder to remove
6361    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6362    return IsEnter();
6363  }
6364
6365  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6366  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6367
6368  DECLARE_INSTRUCTION(MonitorOperation);
6369
6370 private:
6371  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6372  static constexpr size_t kFieldOperationKindSize =
6373      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6374  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6375      kFieldOperationKind + kFieldOperationKindSize;
6376  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6377                "Too many packed fields.");
6378  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6379
6380 private:
6381  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6382};
6383
6384class HSelect FINAL : public HExpression<3> {
6385 public:
6386  HSelect(HInstruction* condition,
6387          HInstruction* true_value,
6388          HInstruction* false_value,
6389          uint32_t dex_pc)
6390      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6391    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6392
6393    // First input must be `true_value` or `false_value` to allow codegens to
6394    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6395    // that architectures which implement HSelect as a conditional move also
6396    // will not need to invert the condition.
6397    SetRawInputAt(0, false_value);
6398    SetRawInputAt(1, true_value);
6399    SetRawInputAt(2, condition);
6400  }
6401
6402  HInstruction* GetFalseValue() const { return InputAt(0); }
6403  HInstruction* GetTrueValue() const { return InputAt(1); }
6404  HInstruction* GetCondition() const { return InputAt(2); }
6405
6406  bool CanBeMoved() const OVERRIDE { return true; }
6407  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6408    return true;
6409  }
6410
6411  bool CanBeNull() const OVERRIDE {
6412    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6413  }
6414
6415  DECLARE_INSTRUCTION(Select);
6416
6417 private:
6418  DISALLOW_COPY_AND_ASSIGN(HSelect);
6419};
6420
6421class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6422 public:
6423  MoveOperands(Location source,
6424               Location destination,
6425               Primitive::Type type,
6426               HInstruction* instruction)
6427      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6428
6429  Location GetSource() const { return source_; }
6430  Location GetDestination() const { return destination_; }
6431
6432  void SetSource(Location value) { source_ = value; }
6433  void SetDestination(Location value) { destination_ = value; }
6434
6435  // The parallel move resolver marks moves as "in-progress" by clearing the
6436  // destination (but not the source).
6437  Location MarkPending() {
6438    DCHECK(!IsPending());
6439    Location dest = destination_;
6440    destination_ = Location::NoLocation();
6441    return dest;
6442  }
6443
6444  void ClearPending(Location dest) {
6445    DCHECK(IsPending());
6446    destination_ = dest;
6447  }
6448
6449  bool IsPending() const {
6450    DCHECK(source_.IsValid() || destination_.IsInvalid());
6451    return destination_.IsInvalid() && source_.IsValid();
6452  }
6453
6454  // True if this blocks a move from the given location.
6455  bool Blocks(Location loc) const {
6456    return !IsEliminated() && source_.OverlapsWith(loc);
6457  }
6458
6459  // A move is redundant if it's been eliminated, if its source and
6460  // destination are the same, or if its destination is unneeded.
6461  bool IsRedundant() const {
6462    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6463  }
6464
6465  // We clear both operands to indicate move that's been eliminated.
6466  void Eliminate() {
6467    source_ = destination_ = Location::NoLocation();
6468  }
6469
6470  bool IsEliminated() const {
6471    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6472    return source_.IsInvalid();
6473  }
6474
6475  Primitive::Type GetType() const { return type_; }
6476
6477  bool Is64BitMove() const {
6478    return Primitive::Is64BitType(type_);
6479  }
6480
6481  HInstruction* GetInstruction() const { return instruction_; }
6482
6483 private:
6484  Location source_;
6485  Location destination_;
6486  // The type this move is for.
6487  Primitive::Type type_;
6488  // The instruction this move is assocatied with. Null when this move is
6489  // for moving an input in the expected locations of user (including a phi user).
6490  // This is only used in debug mode, to ensure we do not connect interval siblings
6491  // in the same parallel move.
6492  HInstruction* instruction_;
6493};
6494
6495std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6496
6497static constexpr size_t kDefaultNumberOfMoves = 4;
6498
6499class HParallelMove FINAL : public HTemplateInstruction<0> {
6500 public:
6501  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6502      : HTemplateInstruction(SideEffects::None(), dex_pc),
6503        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6504    moves_.reserve(kDefaultNumberOfMoves);
6505  }
6506
6507  void AddMove(Location source,
6508               Location destination,
6509               Primitive::Type type,
6510               HInstruction* instruction) {
6511    DCHECK(source.IsValid());
6512    DCHECK(destination.IsValid());
6513    if (kIsDebugBuild) {
6514      if (instruction != nullptr) {
6515        for (const MoveOperands& move : moves_) {
6516          if (move.GetInstruction() == instruction) {
6517            // Special case the situation where the move is for the spill slot
6518            // of the instruction.
6519            if ((GetPrevious() == instruction)
6520                || ((GetPrevious() == nullptr)
6521                    && instruction->IsPhi()
6522                    && instruction->GetBlock() == GetBlock())) {
6523              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6524                  << "Doing parallel moves for the same instruction.";
6525            } else {
6526              DCHECK(false) << "Doing parallel moves for the same instruction.";
6527            }
6528          }
6529        }
6530      }
6531      for (const MoveOperands& move : moves_) {
6532        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6533            << "Overlapped destination for two moves in a parallel move: "
6534            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6535            << source << " ==> " << destination;
6536      }
6537    }
6538    moves_.emplace_back(source, destination, type, instruction);
6539  }
6540
6541  MoveOperands* MoveOperandsAt(size_t index) {
6542    return &moves_[index];
6543  }
6544
6545  size_t NumMoves() const { return moves_.size(); }
6546
6547  DECLARE_INSTRUCTION(ParallelMove);
6548
6549 private:
6550  ArenaVector<MoveOperands> moves_;
6551
6552  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6553};
6554
6555}  // namespace art
6556
6557#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6558#include "nodes_shared.h"
6559#endif
6560#ifdef ART_ENABLE_CODEGEN_arm
6561#include "nodes_arm.h"
6562#endif
6563#ifdef ART_ENABLE_CODEGEN_arm64
6564#include "nodes_arm64.h"
6565#endif
6566#ifdef ART_ENABLE_CODEGEN_mips
6567#include "nodes_mips.h"
6568#endif
6569#ifdef ART_ENABLE_CODEGEN_x86
6570#include "nodes_x86.h"
6571#endif
6572
6573namespace art {
6574
6575class HGraphVisitor : public ValueObject {
6576 public:
6577  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6578  virtual ~HGraphVisitor() {}
6579
6580  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6581  virtual void VisitBasicBlock(HBasicBlock* block);
6582
6583  // Visit the graph following basic block insertion order.
6584  void VisitInsertionOrder();
6585
6586  // Visit the graph following dominator tree reverse post-order.
6587  void VisitReversePostOrder();
6588
6589  HGraph* GetGraph() const { return graph_; }
6590
6591  // Visit functions for instruction classes.
6592#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6593  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6594
6595  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6596
6597#undef DECLARE_VISIT_INSTRUCTION
6598
6599 private:
6600  HGraph* const graph_;
6601
6602  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6603};
6604
6605class HGraphDelegateVisitor : public HGraphVisitor {
6606 public:
6607  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6608  virtual ~HGraphDelegateVisitor() {}
6609
6610  // Visit functions that delegate to to super class.
6611#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6612  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6613
6614  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6615
6616#undef DECLARE_VISIT_INSTRUCTION
6617
6618 private:
6619  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6620};
6621
6622class HInsertionOrderIterator : public ValueObject {
6623 public:
6624  explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {}
6625
6626  bool Done() const { return index_ == graph_.GetBlocks().size(); }
6627  HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; }
6628  void Advance() { ++index_; }
6629
6630 private:
6631  const HGraph& graph_;
6632  size_t index_;
6633
6634  DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator);
6635};
6636
6637class HReversePostOrderIterator : public ValueObject {
6638 public:
6639  explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {
6640    // Check that reverse post order of the graph has been built.
6641    DCHECK(!graph.GetReversePostOrder().empty());
6642  }
6643
6644  bool Done() const { return index_ == graph_.GetReversePostOrder().size(); }
6645  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; }
6646  void Advance() { ++index_; }
6647
6648 private:
6649  const HGraph& graph_;
6650  size_t index_;
6651
6652  DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator);
6653};
6654
6655class HPostOrderIterator : public ValueObject {
6656 public:
6657  explicit HPostOrderIterator(const HGraph& graph)
6658      : graph_(graph), index_(graph_.GetReversePostOrder().size()) {
6659    // Check that reverse post order of the graph has been built.
6660    DCHECK(!graph.GetReversePostOrder().empty());
6661  }
6662
6663  bool Done() const { return index_ == 0; }
6664  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; }
6665  void Advance() { --index_; }
6666
6667 private:
6668  const HGraph& graph_;
6669  size_t index_;
6670
6671  DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator);
6672};
6673
6674class HLinearPostOrderIterator : public ValueObject {
6675 public:
6676  explicit HLinearPostOrderIterator(const HGraph& graph)
6677      : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {}
6678
6679  bool Done() const { return index_ == 0; }
6680
6681  HBasicBlock* Current() const { return order_[index_ - 1u]; }
6682
6683  void Advance() {
6684    --index_;
6685    DCHECK_GE(index_, 0U);
6686  }
6687
6688 private:
6689  const ArenaVector<HBasicBlock*>& order_;
6690  size_t index_;
6691
6692  DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator);
6693};
6694
6695class HLinearOrderIterator : public ValueObject {
6696 public:
6697  explicit HLinearOrderIterator(const HGraph& graph)
6698      : order_(graph.GetLinearOrder()), index_(0) {}
6699
6700  bool Done() const { return index_ == order_.size(); }
6701  HBasicBlock* Current() const { return order_[index_]; }
6702  void Advance() { ++index_; }
6703
6704 private:
6705  const ArenaVector<HBasicBlock*>& order_;
6706  size_t index_;
6707
6708  DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator);
6709};
6710
6711// Iterator over the blocks that art part of the loop. Includes blocks part
6712// of an inner loop. The order in which the blocks are iterated is on their
6713// block id.
6714class HBlocksInLoopIterator : public ValueObject {
6715 public:
6716  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6717      : blocks_in_loop_(info.GetBlocks()),
6718        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6719        index_(0) {
6720    if (!blocks_in_loop_.IsBitSet(index_)) {
6721      Advance();
6722    }
6723  }
6724
6725  bool Done() const { return index_ == blocks_.size(); }
6726  HBasicBlock* Current() const { return blocks_[index_]; }
6727  void Advance() {
6728    ++index_;
6729    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6730      if (blocks_in_loop_.IsBitSet(index_)) {
6731        break;
6732      }
6733    }
6734  }
6735
6736 private:
6737  const BitVector& blocks_in_loop_;
6738  const ArenaVector<HBasicBlock*>& blocks_;
6739  size_t index_;
6740
6741  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6742};
6743
6744// Iterator over the blocks that art part of the loop. Includes blocks part
6745// of an inner loop. The order in which the blocks are iterated is reverse
6746// post order.
6747class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6748 public:
6749  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6750      : blocks_in_loop_(info.GetBlocks()),
6751        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6752        index_(0) {
6753    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6754      Advance();
6755    }
6756  }
6757
6758  bool Done() const { return index_ == blocks_.size(); }
6759  HBasicBlock* Current() const { return blocks_[index_]; }
6760  void Advance() {
6761    ++index_;
6762    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6763      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6764        break;
6765      }
6766    }
6767  }
6768
6769 private:
6770  const BitVector& blocks_in_loop_;
6771  const ArenaVector<HBasicBlock*>& blocks_;
6772  size_t index_;
6773
6774  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6775};
6776
6777inline int64_t Int64FromConstant(HConstant* constant) {
6778  if (constant->IsIntConstant()) {
6779    return constant->AsIntConstant()->GetValue();
6780  } else if (constant->IsLongConstant()) {
6781    return constant->AsLongConstant()->GetValue();
6782  } else {
6783    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6784    return 0;
6785  }
6786}
6787
6788#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6789  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6790  inline const H##type* HInstruction::As##type() const {                       \
6791    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6792  }                                                                            \
6793  inline H##type* HInstruction::As##type() {                                   \
6794    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6795  }
6796
6797  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6798#undef INSTRUCTION_TYPE_CHECK
6799
6800// Create space in `blocks` for adding `number_of_new_blocks` entries
6801// starting at location `at`. Blocks after `at` are moved accordingly.
6802inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6803                        size_t number_of_new_blocks,
6804                        size_t after) {
6805  DCHECK_LT(after, blocks->size());
6806  size_t old_size = blocks->size();
6807  size_t new_size = old_size + number_of_new_blocks;
6808  blocks->resize(new_size);
6809  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6810}
6811
6812}  // namespace art
6813
6814#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6815