nodes.h revision bf12e4d4209ac4e8fb98b4fd5193208adc7fe3ff
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/stl_util.h"
28#include "dex/compiler_enums.h"
29#include "entrypoints/quick/quick_entrypoints_enum.h"
30#include "handle.h"
31#include "handle_scope.h"
32#include "invoke_type.h"
33#include "locations.h"
34#include "method_reference.h"
35#include "mirror/class.h"
36#include "offsets.h"
37#include "primitive.h"
38#include "utils/array_ref.h"
39#include "utils/intrusive_forward_list.h"
40
41namespace art {
42
43class GraphChecker;
44class HBasicBlock;
45class HCurrentMethod;
46class HDoubleConstant;
47class HEnvironment;
48class HFloatConstant;
49class HGraphBuilder;
50class HGraphVisitor;
51class HInstruction;
52class HIntConstant;
53class HInvoke;
54class HLongConstant;
55class HNullConstant;
56class HPhi;
57class HSuspendCheck;
58class HTryBoundary;
59class LiveInterval;
60class LocationSummary;
61class SlowPathCode;
62class SsaBuilder;
63
64namespace mirror {
65class DexCache;
66}  // namespace mirror
67
68static const int kDefaultNumberOfBlocks = 8;
69static const int kDefaultNumberOfSuccessors = 2;
70static const int kDefaultNumberOfPredecessors = 2;
71static const int kDefaultNumberOfExceptionalPredecessors = 0;
72static const int kDefaultNumberOfDominatedBlocks = 1;
73static const int kDefaultNumberOfBackEdges = 1;
74
75// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
76static constexpr int32_t kMaxIntShiftDistance = 0x1f;
77// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
78static constexpr int32_t kMaxLongShiftDistance = 0x3f;
79
80static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
81static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
82
83static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
84
85static constexpr uint32_t kNoDexPc = -1;
86
87enum IfCondition {
88  // All types.
89  kCondEQ,  // ==
90  kCondNE,  // !=
91  // Signed integers and floating-point numbers.
92  kCondLT,  // <
93  kCondLE,  // <=
94  kCondGT,  // >
95  kCondGE,  // >=
96  // Unsigned integers.
97  kCondB,   // <
98  kCondBE,  // <=
99  kCondA,   // >
100  kCondAE,  // >=
101};
102
103enum GraphAnalysisResult {
104  kAnalysisSkipped,
105  kAnalysisInvalidBytecode,
106  kAnalysisFailThrowCatchLoop,
107  kAnalysisFailAmbiguousArrayOp,
108  kAnalysisSuccess,
109};
110
111class HInstructionList : public ValueObject {
112 public:
113  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
114
115  void AddInstruction(HInstruction* instruction);
116  void RemoveInstruction(HInstruction* instruction);
117
118  // Insert `instruction` before/after an existing instruction `cursor`.
119  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
120  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
121
122  // Return true if this list contains `instruction`.
123  bool Contains(HInstruction* instruction) const;
124
125  // Return true if `instruction1` is found before `instruction2` in
126  // this instruction list and false otherwise.  Abort if none
127  // of these instructions is found.
128  bool FoundBefore(const HInstruction* instruction1,
129                   const HInstruction* instruction2) const;
130
131  bool IsEmpty() const { return first_instruction_ == nullptr; }
132  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
133
134  // Update the block of all instructions to be `block`.
135  void SetBlockOfInstructions(HBasicBlock* block) const;
136
137  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
138  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
139  void Add(const HInstructionList& instruction_list);
140
141  // Return the number of instructions in the list. This is an expensive operation.
142  size_t CountSize() const;
143
144 private:
145  HInstruction* first_instruction_;
146  HInstruction* last_instruction_;
147
148  friend class HBasicBlock;
149  friend class HGraph;
150  friend class HInstruction;
151  friend class HInstructionIterator;
152  friend class HBackwardInstructionIterator;
153
154  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
155};
156
157class ReferenceTypeInfo : ValueObject {
158 public:
159  typedef Handle<mirror::Class> TypeHandle;
160
161  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
162
163  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
164    return ReferenceTypeInfo(type_handle, is_exact);
165  }
166
167  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
168
169  static bool IsValidHandle(TypeHandle handle) {
170    return handle.GetReference() != nullptr;
171  }
172
173  bool IsValid() const {
174    return IsValidHandle(type_handle_);
175  }
176
177  bool IsExact() const { return is_exact_; }
178
179  bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
180    DCHECK(IsValid());
181    return GetTypeHandle()->IsObjectClass();
182  }
183
184  bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
185    DCHECK(IsValid());
186    return GetTypeHandle()->IsStringClass();
187  }
188
189  bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) {
190    DCHECK(IsValid());
191    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
192  }
193
194  bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) {
195    DCHECK(IsValid());
196    return GetTypeHandle()->IsInterface();
197  }
198
199  bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
200    DCHECK(IsValid());
201    return GetTypeHandle()->IsArrayClass();
202  }
203
204  bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
205    DCHECK(IsValid());
206    return GetTypeHandle()->IsPrimitiveArray();
207  }
208
209  bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
210    DCHECK(IsValid());
211    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
212  }
213
214  bool CanArrayHold(ReferenceTypeInfo rti)  const SHARED_REQUIRES(Locks::mutator_lock_) {
215    DCHECK(IsValid());
216    if (!IsExact()) return false;
217    if (!IsArrayClass()) return false;
218    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
219  }
220
221  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const SHARED_REQUIRES(Locks::mutator_lock_) {
222    DCHECK(IsValid());
223    if (!IsExact()) return false;
224    if (!IsArrayClass()) return false;
225    if (!rti.IsArrayClass()) return false;
226    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
227        rti.GetTypeHandle()->GetComponentType());
228  }
229
230  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
231
232  bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
233    DCHECK(IsValid());
234    DCHECK(rti.IsValid());
235    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
236  }
237
238  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
239    DCHECK(IsValid());
240    DCHECK(rti.IsValid());
241    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
242        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
243  }
244
245  // Returns true if the type information provide the same amount of details.
246  // Note that it does not mean that the instructions have the same actual type
247  // (because the type can be the result of a merge).
248  bool IsEqual(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
249    if (!IsValid() && !rti.IsValid()) {
250      // Invalid types are equal.
251      return true;
252    }
253    if (!IsValid() || !rti.IsValid()) {
254      // One is valid, the other not.
255      return false;
256    }
257    return IsExact() == rti.IsExact()
258        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
259  }
260
261 private:
262  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
263  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
264      : type_handle_(type_handle), is_exact_(is_exact) { }
265
266  // The class of the object.
267  TypeHandle type_handle_;
268  // Whether or not the type is exact or a superclass of the actual type.
269  // Whether or not we have any information about this type.
270  bool is_exact_;
271};
272
273std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
274
275// Control-flow graph of a method. Contains a list of basic blocks.
276class HGraph : public ArenaObject<kArenaAllocGraph> {
277 public:
278  HGraph(ArenaAllocator* arena,
279         const DexFile& dex_file,
280         uint32_t method_idx,
281         bool should_generate_constructor_barrier,
282         InstructionSet instruction_set,
283         InvokeType invoke_type = kInvalidInvokeType,
284         bool debuggable = false,
285         bool osr = false,
286         int start_instruction_id = 0)
287      : arena_(arena),
288        blocks_(arena->Adapter(kArenaAllocBlockList)),
289        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
290        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
291        entry_block_(nullptr),
292        exit_block_(nullptr),
293        maximum_number_of_out_vregs_(0),
294        number_of_vregs_(0),
295        number_of_in_vregs_(0),
296        temporaries_vreg_slots_(0),
297        has_bounds_checks_(false),
298        has_try_catch_(false),
299        has_irreducible_loops_(false),
300        debuggable_(debuggable),
301        current_instruction_id_(start_instruction_id),
302        dex_file_(dex_file),
303        method_idx_(method_idx),
304        invoke_type_(invoke_type),
305        in_ssa_form_(false),
306        should_generate_constructor_barrier_(should_generate_constructor_barrier),
307        instruction_set_(instruction_set),
308        cached_null_constant_(nullptr),
309        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
310        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
311        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
312        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
313        cached_current_method_(nullptr),
314        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
315        osr_(osr) {
316    blocks_.reserve(kDefaultNumberOfBlocks);
317  }
318
319  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
320  void InitializeInexactObjectRTI(StackHandleScopeCollection* handles);
321
322  ArenaAllocator* GetArena() const { return arena_; }
323  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
324
325  bool IsInSsaForm() const { return in_ssa_form_; }
326  void SetInSsaForm() { in_ssa_form_ = true; }
327
328  HBasicBlock* GetEntryBlock() const { return entry_block_; }
329  HBasicBlock* GetExitBlock() const { return exit_block_; }
330  bool HasExitBlock() const { return exit_block_ != nullptr; }
331
332  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
333  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
334
335  void AddBlock(HBasicBlock* block);
336
337  void ComputeDominanceInformation();
338  void ClearDominanceInformation();
339  void ClearLoopInformation();
340  void FindBackEdges(ArenaBitVector* visited);
341  GraphAnalysisResult BuildDominatorTree();
342  void SimplifyCFG();
343  void SimplifyCatchBlocks();
344
345  // Analyze all natural loops in this graph. Returns a code specifying that it
346  // was successful or the reason for failure. The method will fail if a loop
347  // is a throw-catch loop, i.e. the header is a catch block.
348  GraphAnalysisResult AnalyzeLoops() const;
349
350  // Iterate over blocks to compute try block membership. Needs reverse post
351  // order and loop information.
352  void ComputeTryBlockInformation();
353
354  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
355  // Returns the instruction to replace the invoke expression or null if the
356  // invoke is for a void method. Note that the caller is responsible for replacing
357  // and removing the invoke instruction.
358  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
359
360  // Update the loop and try membership of `block`, which was spawned from `reference`.
361  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
362  // should be the new back edge.
363  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
364                                             HBasicBlock* reference,
365                                             bool replace_if_back_edge);
366
367  // Need to add a couple of blocks to test if the loop body is entered and
368  // put deoptimization instructions, etc.
369  void TransformLoopHeaderForBCE(HBasicBlock* header);
370
371  // Removes `block` from the graph. Assumes `block` has been disconnected from
372  // other blocks and has no instructions or phis.
373  void DeleteDeadEmptyBlock(HBasicBlock* block);
374
375  // Splits the edge between `block` and `successor` while preserving the
376  // indices in the predecessor/successor lists. If there are multiple edges
377  // between the blocks, the lowest indices are used.
378  // Returns the new block which is empty and has the same dex pc as `successor`.
379  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
380
381  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
382  void SimplifyLoop(HBasicBlock* header);
383
384  int32_t GetNextInstructionId() {
385    DCHECK_NE(current_instruction_id_, INT32_MAX);
386    return current_instruction_id_++;
387  }
388
389  int32_t GetCurrentInstructionId() const {
390    return current_instruction_id_;
391  }
392
393  void SetCurrentInstructionId(int32_t id) {
394    DCHECK_GE(id, current_instruction_id_);
395    current_instruction_id_ = id;
396  }
397
398  uint16_t GetMaximumNumberOfOutVRegs() const {
399    return maximum_number_of_out_vregs_;
400  }
401
402  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
403    maximum_number_of_out_vregs_ = new_value;
404  }
405
406  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
407    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
408  }
409
410  void UpdateTemporariesVRegSlots(size_t slots) {
411    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
412  }
413
414  size_t GetTemporariesVRegSlots() const {
415    DCHECK(!in_ssa_form_);
416    return temporaries_vreg_slots_;
417  }
418
419  void SetNumberOfVRegs(uint16_t number_of_vregs) {
420    number_of_vregs_ = number_of_vregs;
421  }
422
423  uint16_t GetNumberOfVRegs() const {
424    return number_of_vregs_;
425  }
426
427  void SetNumberOfInVRegs(uint16_t value) {
428    number_of_in_vregs_ = value;
429  }
430
431  uint16_t GetNumberOfInVRegs() const {
432    return number_of_in_vregs_;
433  }
434
435  uint16_t GetNumberOfLocalVRegs() const {
436    DCHECK(!in_ssa_form_);
437    return number_of_vregs_ - number_of_in_vregs_;
438  }
439
440  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
441    return reverse_post_order_;
442  }
443
444  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
445    return linear_order_;
446  }
447
448  bool HasBoundsChecks() const {
449    return has_bounds_checks_;
450  }
451
452  void SetHasBoundsChecks(bool value) {
453    has_bounds_checks_ = value;
454  }
455
456  bool ShouldGenerateConstructorBarrier() const {
457    return should_generate_constructor_barrier_;
458  }
459
460  bool IsDebuggable() const { return debuggable_; }
461
462  // Returns a constant of the given type and value. If it does not exist
463  // already, it is created and inserted into the graph. This method is only for
464  // integral types.
465  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
466
467  // TODO: This is problematic for the consistency of reference type propagation
468  // because it can be created anytime after the pass and thus it will be left
469  // with an invalid type.
470  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
471
472  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
473    return CreateConstant(value, &cached_int_constants_, dex_pc);
474  }
475  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
476    return CreateConstant(value, &cached_long_constants_, dex_pc);
477  }
478  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
479    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
480  }
481  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
482    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
483  }
484
485  HCurrentMethod* GetCurrentMethod();
486
487  const DexFile& GetDexFile() const {
488    return dex_file_;
489  }
490
491  uint32_t GetMethodIdx() const {
492    return method_idx_;
493  }
494
495  InvokeType GetInvokeType() const {
496    return invoke_type_;
497  }
498
499  InstructionSet GetInstructionSet() const {
500    return instruction_set_;
501  }
502
503  bool IsCompilingOsr() const { return osr_; }
504
505  bool HasTryCatch() const { return has_try_catch_; }
506  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
507
508  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
509  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
510
511  ArtMethod* GetArtMethod() const { return art_method_; }
512  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
513
514  // Returns an instruction with the opposite boolean value from 'cond'.
515  // The instruction has been inserted into the graph, either as a constant, or
516  // before cursor.
517  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
518
519  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
520
521 private:
522  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
523  void RemoveDeadBlocks(const ArenaBitVector& visited);
524
525  template <class InstructionType, typename ValueType>
526  InstructionType* CreateConstant(ValueType value,
527                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
528                                  uint32_t dex_pc = kNoDexPc) {
529    // Try to find an existing constant of the given value.
530    InstructionType* constant = nullptr;
531    auto cached_constant = cache->find(value);
532    if (cached_constant != cache->end()) {
533      constant = cached_constant->second;
534    }
535
536    // If not found or previously deleted, create and cache a new instruction.
537    // Don't bother reviving a previously deleted instruction, for simplicity.
538    if (constant == nullptr || constant->GetBlock() == nullptr) {
539      constant = new (arena_) InstructionType(value, dex_pc);
540      cache->Overwrite(value, constant);
541      InsertConstant(constant);
542    }
543    return constant;
544  }
545
546  void InsertConstant(HConstant* instruction);
547
548  // Cache a float constant into the graph. This method should only be
549  // called by the SsaBuilder when creating "equivalent" instructions.
550  void CacheFloatConstant(HFloatConstant* constant);
551
552  // See CacheFloatConstant comment.
553  void CacheDoubleConstant(HDoubleConstant* constant);
554
555  ArenaAllocator* const arena_;
556
557  // List of blocks in insertion order.
558  ArenaVector<HBasicBlock*> blocks_;
559
560  // List of blocks to perform a reverse post order tree traversal.
561  ArenaVector<HBasicBlock*> reverse_post_order_;
562
563  // List of blocks to perform a linear order tree traversal.
564  ArenaVector<HBasicBlock*> linear_order_;
565
566  HBasicBlock* entry_block_;
567  HBasicBlock* exit_block_;
568
569  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
570  uint16_t maximum_number_of_out_vregs_;
571
572  // The number of virtual registers in this method. Contains the parameters.
573  uint16_t number_of_vregs_;
574
575  // The number of virtual registers used by parameters of this method.
576  uint16_t number_of_in_vregs_;
577
578  // Number of vreg size slots that the temporaries use (used in baseline compiler).
579  size_t temporaries_vreg_slots_;
580
581  // Has bounds checks. We can totally skip BCE if it's false.
582  bool has_bounds_checks_;
583
584  // Flag whether there are any try/catch blocks in the graph. We will skip
585  // try/catch-related passes if false.
586  bool has_try_catch_;
587
588  // Flag whether there are any irreducible loops in the graph.
589  bool has_irreducible_loops_;
590
591  // Indicates whether the graph should be compiled in a way that
592  // ensures full debuggability. If false, we can apply more
593  // aggressive optimizations that may limit the level of debugging.
594  const bool debuggable_;
595
596  // The current id to assign to a newly added instruction. See HInstruction.id_.
597  int32_t current_instruction_id_;
598
599  // The dex file from which the method is from.
600  const DexFile& dex_file_;
601
602  // The method index in the dex file.
603  const uint32_t method_idx_;
604
605  // If inlined, this encodes how the callee is being invoked.
606  const InvokeType invoke_type_;
607
608  // Whether the graph has been transformed to SSA form. Only used
609  // in debug mode to ensure we are not using properties only valid
610  // for non-SSA form (like the number of temporaries).
611  bool in_ssa_form_;
612
613  const bool should_generate_constructor_barrier_;
614
615  const InstructionSet instruction_set_;
616
617  // Cached constants.
618  HNullConstant* cached_null_constant_;
619  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
620  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
621  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
622  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
623
624  HCurrentMethod* cached_current_method_;
625
626  // The ArtMethod this graph is for. Note that for AOT, it may be null,
627  // for example for methods whose declaring class could not be resolved
628  // (such as when the superclass could not be found).
629  ArtMethod* art_method_;
630
631  // Keep the RTI of inexact Object to avoid having to pass stack handle
632  // collection pointer to passes which may create NullConstant.
633  ReferenceTypeInfo inexact_object_rti_;
634
635  // Whether we are compiling this graph for on stack replacement: this will
636  // make all loops seen as irreducible and emit special stack maps to mark
637  // compiled code entries which the interpreter can directly jump to.
638  const bool osr_;
639
640  friend class SsaBuilder;           // For caching constants.
641  friend class SsaLivenessAnalysis;  // For the linear order.
642  friend class HInliner;             // For the reverse post order.
643  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
644  DISALLOW_COPY_AND_ASSIGN(HGraph);
645};
646
647class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
648 public:
649  HLoopInformation(HBasicBlock* header, HGraph* graph)
650      : header_(header),
651        suspend_check_(nullptr),
652        irreducible_(false),
653        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
654        // Make bit vector growable, as the number of blocks may change.
655        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
656    back_edges_.reserve(kDefaultNumberOfBackEdges);
657  }
658
659  bool IsIrreducible() const { return irreducible_; }
660
661  void Dump(std::ostream& os);
662
663  HBasicBlock* GetHeader() const {
664    return header_;
665  }
666
667  void SetHeader(HBasicBlock* block) {
668    header_ = block;
669  }
670
671  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
672  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
673  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
674
675  void AddBackEdge(HBasicBlock* back_edge) {
676    back_edges_.push_back(back_edge);
677  }
678
679  void RemoveBackEdge(HBasicBlock* back_edge) {
680    RemoveElement(back_edges_, back_edge);
681  }
682
683  bool IsBackEdge(const HBasicBlock& block) const {
684    return ContainsElement(back_edges_, &block);
685  }
686
687  size_t NumberOfBackEdges() const {
688    return back_edges_.size();
689  }
690
691  HBasicBlock* GetPreHeader() const;
692
693  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
694    return back_edges_;
695  }
696
697  // Returns the lifetime position of the back edge that has the
698  // greatest lifetime position.
699  size_t GetLifetimeEnd() const;
700
701  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
702    ReplaceElement(back_edges_, existing, new_back_edge);
703  }
704
705  // Finds blocks that are part of this loop.
706  void Populate();
707
708  // Returns whether this loop information contains `block`.
709  // Note that this loop information *must* be populated before entering this function.
710  bool Contains(const HBasicBlock& block) const;
711
712  // Returns whether this loop information is an inner loop of `other`.
713  // Note that `other` *must* be populated before entering this function.
714  bool IsIn(const HLoopInformation& other) const;
715
716  // Returns true if instruction is not defined within this loop.
717  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
718
719  const ArenaBitVector& GetBlocks() const { return blocks_; }
720
721  void Add(HBasicBlock* block);
722  void Remove(HBasicBlock* block);
723
724  void ClearAllBlocks() {
725    blocks_.ClearAllBits();
726  }
727
728 private:
729  // Internal recursive implementation of `Populate`.
730  void PopulateRecursive(HBasicBlock* block);
731  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
732
733  HBasicBlock* header_;
734  HSuspendCheck* suspend_check_;
735  bool irreducible_;
736  ArenaVector<HBasicBlock*> back_edges_;
737  ArenaBitVector blocks_;
738
739  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
740};
741
742// Stores try/catch information for basic blocks.
743// Note that HGraph is constructed so that catch blocks cannot simultaneously
744// be try blocks.
745class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
746 public:
747  // Try block information constructor.
748  explicit TryCatchInformation(const HTryBoundary& try_entry)
749      : try_entry_(&try_entry),
750        catch_dex_file_(nullptr),
751        catch_type_index_(DexFile::kDexNoIndex16) {
752    DCHECK(try_entry_ != nullptr);
753  }
754
755  // Catch block information constructor.
756  TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file)
757      : try_entry_(nullptr),
758        catch_dex_file_(&dex_file),
759        catch_type_index_(catch_type_index) {}
760
761  bool IsTryBlock() const { return try_entry_ != nullptr; }
762
763  const HTryBoundary& GetTryEntry() const {
764    DCHECK(IsTryBlock());
765    return *try_entry_;
766  }
767
768  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
769
770  bool IsCatchAllTypeIndex() const {
771    DCHECK(IsCatchBlock());
772    return catch_type_index_ == DexFile::kDexNoIndex16;
773  }
774
775  uint16_t GetCatchTypeIndex() const {
776    DCHECK(IsCatchBlock());
777    return catch_type_index_;
778  }
779
780  const DexFile& GetCatchDexFile() const {
781    DCHECK(IsCatchBlock());
782    return *catch_dex_file_;
783  }
784
785 private:
786  // One of possibly several TryBoundary instructions entering the block's try.
787  // Only set for try blocks.
788  const HTryBoundary* try_entry_;
789
790  // Exception type information. Only set for catch blocks.
791  const DexFile* catch_dex_file_;
792  const uint16_t catch_type_index_;
793};
794
795static constexpr size_t kNoLifetime = -1;
796static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
797
798// A block in a method. Contains the list of instructions represented
799// as a double linked list. Each block knows its predecessors and
800// successors.
801
802class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
803 public:
804  HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
805      : graph_(graph),
806        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
807        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
808        loop_information_(nullptr),
809        dominator_(nullptr),
810        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
811        block_id_(kInvalidBlockId),
812        dex_pc_(dex_pc),
813        lifetime_start_(kNoLifetime),
814        lifetime_end_(kNoLifetime),
815        try_catch_information_(nullptr) {
816    predecessors_.reserve(kDefaultNumberOfPredecessors);
817    successors_.reserve(kDefaultNumberOfSuccessors);
818    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
819  }
820
821  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
822    return predecessors_;
823  }
824
825  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
826    return successors_;
827  }
828
829  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
830  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
831
832  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
833    return ContainsElement(successors_, block, start_from);
834  }
835
836  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
837    return dominated_blocks_;
838  }
839
840  bool IsEntryBlock() const {
841    return graph_->GetEntryBlock() == this;
842  }
843
844  bool IsExitBlock() const {
845    return graph_->GetExitBlock() == this;
846  }
847
848  bool IsSingleGoto() const;
849  bool IsSingleTryBoundary() const;
850
851  // Returns true if this block emits nothing but a jump.
852  bool IsSingleJump() const {
853    HLoopInformation* loop_info = GetLoopInformation();
854    return (IsSingleGoto() || IsSingleTryBoundary())
855           // Back edges generate a suspend check.
856           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
857  }
858
859  void AddBackEdge(HBasicBlock* back_edge) {
860    if (loop_information_ == nullptr) {
861      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
862    }
863    DCHECK_EQ(loop_information_->GetHeader(), this);
864    loop_information_->AddBackEdge(back_edge);
865  }
866
867  HGraph* GetGraph() const { return graph_; }
868  void SetGraph(HGraph* graph) { graph_ = graph; }
869
870  uint32_t GetBlockId() const { return block_id_; }
871  void SetBlockId(int id) { block_id_ = id; }
872  uint32_t GetDexPc() const { return dex_pc_; }
873
874  HBasicBlock* GetDominator() const { return dominator_; }
875  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
876  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
877
878  void RemoveDominatedBlock(HBasicBlock* block) {
879    RemoveElement(dominated_blocks_, block);
880  }
881
882  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
883    ReplaceElement(dominated_blocks_, existing, new_block);
884  }
885
886  void ClearDominanceInformation();
887
888  int NumberOfBackEdges() const {
889    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
890  }
891
892  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
893  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
894  const HInstructionList& GetInstructions() const { return instructions_; }
895  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
896  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
897  const HInstructionList& GetPhis() const { return phis_; }
898
899  HInstruction* GetFirstInstructionDisregardMoves() const;
900
901  void AddSuccessor(HBasicBlock* block) {
902    successors_.push_back(block);
903    block->predecessors_.push_back(this);
904  }
905
906  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
907    size_t successor_index = GetSuccessorIndexOf(existing);
908    existing->RemovePredecessor(this);
909    new_block->predecessors_.push_back(this);
910    successors_[successor_index] = new_block;
911  }
912
913  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
914    size_t predecessor_index = GetPredecessorIndexOf(existing);
915    existing->RemoveSuccessor(this);
916    new_block->successors_.push_back(this);
917    predecessors_[predecessor_index] = new_block;
918  }
919
920  // Insert `this` between `predecessor` and `successor. This method
921  // preserves the indicies, and will update the first edge found between
922  // `predecessor` and `successor`.
923  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
924    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
925    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
926    successor->predecessors_[predecessor_index] = this;
927    predecessor->successors_[successor_index] = this;
928    successors_.push_back(successor);
929    predecessors_.push_back(predecessor);
930  }
931
932  void RemovePredecessor(HBasicBlock* block) {
933    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
934  }
935
936  void RemoveSuccessor(HBasicBlock* block) {
937    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
938  }
939
940  void ClearAllPredecessors() {
941    predecessors_.clear();
942  }
943
944  void AddPredecessor(HBasicBlock* block) {
945    predecessors_.push_back(block);
946    block->successors_.push_back(this);
947  }
948
949  void SwapPredecessors() {
950    DCHECK_EQ(predecessors_.size(), 2u);
951    std::swap(predecessors_[0], predecessors_[1]);
952  }
953
954  void SwapSuccessors() {
955    DCHECK_EQ(successors_.size(), 2u);
956    std::swap(successors_[0], successors_[1]);
957  }
958
959  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
960    return IndexOfElement(predecessors_, predecessor);
961  }
962
963  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
964    return IndexOfElement(successors_, successor);
965  }
966
967  HBasicBlock* GetSinglePredecessor() const {
968    DCHECK_EQ(GetPredecessors().size(), 1u);
969    return GetPredecessors()[0];
970  }
971
972  HBasicBlock* GetSingleSuccessor() const {
973    DCHECK_EQ(GetSuccessors().size(), 1u);
974    return GetSuccessors()[0];
975  }
976
977  // Returns whether the first occurrence of `predecessor` in the list of
978  // predecessors is at index `idx`.
979  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
980    DCHECK_EQ(GetPredecessors()[idx], predecessor);
981    return GetPredecessorIndexOf(predecessor) == idx;
982  }
983
984  // Create a new block between this block and its predecessors. The new block
985  // is added to the graph, all predecessor edges are relinked to it and an edge
986  // is created to `this`. Returns the new empty block. Reverse post order or
987  // loop and try/catch information are not updated.
988  HBasicBlock* CreateImmediateDominator();
989
990  // Split the block into two blocks just before `cursor`. Returns the newly
991  // created, latter block. Note that this method will add the block to the
992  // graph, create a Goto at the end of the former block and will create an edge
993  // between the blocks. It will not, however, update the reverse post order or
994  // loop and try/catch information.
995  HBasicBlock* SplitBefore(HInstruction* cursor);
996
997  // Split the block into two blocks just before `cursor`. Returns the newly
998  // created block. Note that this method just updates raw block information,
999  // like predecessors, successors, dominators, and instruction list. It does not
1000  // update the graph, reverse post order, loop information, nor make sure the
1001  // blocks are consistent (for example ending with a control flow instruction).
1002  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1003
1004  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1005  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1006
1007  // Merge `other` at the end of `this`. Successors and dominated blocks of
1008  // `other` are changed to be successors and dominated blocks of `this`. Note
1009  // that this method does not update the graph, reverse post order, loop
1010  // information, nor make sure the blocks are consistent (for example ending
1011  // with a control flow instruction).
1012  void MergeWithInlined(HBasicBlock* other);
1013
1014  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1015  // of `this` are moved to `other`.
1016  // Note that this method does not update the graph, reverse post order, loop
1017  // information, nor make sure the blocks are consistent (for example ending
1018  // with a control flow instruction).
1019  void ReplaceWith(HBasicBlock* other);
1020
1021  // Merge `other` at the end of `this`. This method updates loops, reverse post
1022  // order, links to predecessors, successors, dominators and deletes the block
1023  // from the graph. The two blocks must be successive, i.e. `this` the only
1024  // predecessor of `other` and vice versa.
1025  void MergeWith(HBasicBlock* other);
1026
1027  // Disconnects `this` from all its predecessors, successors and dominator,
1028  // removes it from all loops it is included in and eventually from the graph.
1029  // The block must not dominate any other block. Predecessors and successors
1030  // are safely updated.
1031  void DisconnectAndDelete();
1032
1033  void AddInstruction(HInstruction* instruction);
1034  // Insert `instruction` before/after an existing instruction `cursor`.
1035  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1036  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1037  // Replace instruction `initial` with `replacement` within this block.
1038  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1039                                       HInstruction* replacement);
1040  void MoveInstructionBefore(HInstruction* insn, HInstruction* cursor);
1041  void AddPhi(HPhi* phi);
1042  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1043  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1044  // instruction list. With 'ensure_safety' set to true, it verifies that the
1045  // instruction is not in use and removes it from the use lists of its inputs.
1046  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1047  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1048  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1049
1050  bool IsLoopHeader() const {
1051    return IsInLoop() && (loop_information_->GetHeader() == this);
1052  }
1053
1054  bool IsLoopPreHeaderFirstPredecessor() const {
1055    DCHECK(IsLoopHeader());
1056    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1057  }
1058
1059  bool IsFirstPredecessorBackEdge() const {
1060    DCHECK(IsLoopHeader());
1061    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1062  }
1063
1064  HLoopInformation* GetLoopInformation() const {
1065    return loop_information_;
1066  }
1067
1068  // Set the loop_information_ on this block. Overrides the current
1069  // loop_information if it is an outer loop of the passed loop information.
1070  // Note that this method is called while creating the loop information.
1071  void SetInLoop(HLoopInformation* info) {
1072    if (IsLoopHeader()) {
1073      // Nothing to do. This just means `info` is an outer loop.
1074    } else if (!IsInLoop()) {
1075      loop_information_ = info;
1076    } else if (loop_information_->Contains(*info->GetHeader())) {
1077      // Block is currently part of an outer loop. Make it part of this inner loop.
1078      // Note that a non loop header having a loop information means this loop information
1079      // has already been populated
1080      loop_information_ = info;
1081    } else {
1082      // Block is part of an inner loop. Do not update the loop information.
1083      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1084      // at this point, because this method is being called while populating `info`.
1085    }
1086  }
1087
1088  // Raw update of the loop information.
1089  void SetLoopInformation(HLoopInformation* info) {
1090    loop_information_ = info;
1091  }
1092
1093  bool IsInLoop() const { return loop_information_ != nullptr; }
1094
1095  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1096
1097  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1098    try_catch_information_ = try_catch_information;
1099  }
1100
1101  bool IsTryBlock() const {
1102    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1103  }
1104
1105  bool IsCatchBlock() const {
1106    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1107  }
1108
1109  // Returns the try entry that this block's successors should have. They will
1110  // be in the same try, unless the block ends in a try boundary. In that case,
1111  // the appropriate try entry will be returned.
1112  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1113
1114  bool HasThrowingInstructions() const;
1115
1116  // Returns whether this block dominates the blocked passed as parameter.
1117  bool Dominates(HBasicBlock* block) const;
1118
1119  size_t GetLifetimeStart() const { return lifetime_start_; }
1120  size_t GetLifetimeEnd() const { return lifetime_end_; }
1121
1122  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1123  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1124
1125  bool EndsWithControlFlowInstruction() const;
1126  bool EndsWithIf() const;
1127  bool EndsWithTryBoundary() const;
1128  bool HasSinglePhi() const;
1129
1130 private:
1131  HGraph* graph_;
1132  ArenaVector<HBasicBlock*> predecessors_;
1133  ArenaVector<HBasicBlock*> successors_;
1134  HInstructionList instructions_;
1135  HInstructionList phis_;
1136  HLoopInformation* loop_information_;
1137  HBasicBlock* dominator_;
1138  ArenaVector<HBasicBlock*> dominated_blocks_;
1139  uint32_t block_id_;
1140  // The dex program counter of the first instruction of this block.
1141  const uint32_t dex_pc_;
1142  size_t lifetime_start_;
1143  size_t lifetime_end_;
1144  TryCatchInformation* try_catch_information_;
1145
1146  friend class HGraph;
1147  friend class HInstruction;
1148
1149  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1150};
1151
1152// Iterates over the LoopInformation of all loops which contain 'block'
1153// from the innermost to the outermost.
1154class HLoopInformationOutwardIterator : public ValueObject {
1155 public:
1156  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1157      : current_(block.GetLoopInformation()) {}
1158
1159  bool Done() const { return current_ == nullptr; }
1160
1161  void Advance() {
1162    DCHECK(!Done());
1163    current_ = current_->GetPreHeader()->GetLoopInformation();
1164  }
1165
1166  HLoopInformation* Current() const {
1167    DCHECK(!Done());
1168    return current_;
1169  }
1170
1171 private:
1172  HLoopInformation* current_;
1173
1174  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1175};
1176
1177#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1178  M(Above, Condition)                                                   \
1179  M(AboveOrEqual, Condition)                                            \
1180  M(Add, BinaryOperation)                                               \
1181  M(And, BinaryOperation)                                               \
1182  M(ArrayGet, Instruction)                                              \
1183  M(ArrayLength, Instruction)                                           \
1184  M(ArraySet, Instruction)                                              \
1185  M(Below, Condition)                                                   \
1186  M(BelowOrEqual, Condition)                                            \
1187  M(BooleanNot, UnaryOperation)                                         \
1188  M(BoundsCheck, Instruction)                                           \
1189  M(BoundType, Instruction)                                             \
1190  M(CheckCast, Instruction)                                             \
1191  M(ClassTableGet, Instruction)                                         \
1192  M(ClearException, Instruction)                                        \
1193  M(ClinitCheck, Instruction)                                           \
1194  M(Compare, BinaryOperation)                                           \
1195  M(CurrentMethod, Instruction)                                         \
1196  M(Deoptimize, Instruction)                                            \
1197  M(Div, BinaryOperation)                                               \
1198  M(DivZeroCheck, Instruction)                                          \
1199  M(DoubleConstant, Constant)                                           \
1200  M(Equal, Condition)                                                   \
1201  M(Exit, Instruction)                                                  \
1202  M(FloatConstant, Constant)                                            \
1203  M(Goto, Instruction)                                                  \
1204  M(GreaterThan, Condition)                                             \
1205  M(GreaterThanOrEqual, Condition)                                      \
1206  M(If, Instruction)                                                    \
1207  M(InstanceFieldGet, Instruction)                                      \
1208  M(InstanceFieldSet, Instruction)                                      \
1209  M(InstanceOf, Instruction)                                            \
1210  M(IntConstant, Constant)                                              \
1211  M(InvokeUnresolved, Invoke)                                           \
1212  M(InvokeInterface, Invoke)                                            \
1213  M(InvokeStaticOrDirect, Invoke)                                       \
1214  M(InvokeVirtual, Invoke)                                              \
1215  M(LessThan, Condition)                                                \
1216  M(LessThanOrEqual, Condition)                                         \
1217  M(LoadClass, Instruction)                                             \
1218  M(LoadException, Instruction)                                         \
1219  M(LoadString, Instruction)                                            \
1220  M(LongConstant, Constant)                                             \
1221  M(MemoryBarrier, Instruction)                                         \
1222  M(MonitorOperation, Instruction)                                      \
1223  M(Mul, BinaryOperation)                                               \
1224  M(NativeDebugInfo, Instruction)                                       \
1225  M(Neg, UnaryOperation)                                                \
1226  M(NewArray, Instruction)                                              \
1227  M(NewInstance, Instruction)                                           \
1228  M(Not, UnaryOperation)                                                \
1229  M(NotEqual, Condition)                                                \
1230  M(NullConstant, Instruction)                                          \
1231  M(NullCheck, Instruction)                                             \
1232  M(Or, BinaryOperation)                                                \
1233  M(PackedSwitch, Instruction)                                          \
1234  M(ParallelMove, Instruction)                                          \
1235  M(ParameterValue, Instruction)                                        \
1236  M(Phi, Instruction)                                                   \
1237  M(Rem, BinaryOperation)                                               \
1238  M(Return, Instruction)                                                \
1239  M(ReturnVoid, Instruction)                                            \
1240  M(Ror, BinaryOperation)                                               \
1241  M(Shl, BinaryOperation)                                               \
1242  M(Shr, BinaryOperation)                                               \
1243  M(StaticFieldGet, Instruction)                                        \
1244  M(StaticFieldSet, Instruction)                                        \
1245  M(UnresolvedInstanceFieldGet, Instruction)                            \
1246  M(UnresolvedInstanceFieldSet, Instruction)                            \
1247  M(UnresolvedStaticFieldGet, Instruction)                              \
1248  M(UnresolvedStaticFieldSet, Instruction)                              \
1249  M(Select, Instruction)                                                \
1250  M(Sub, BinaryOperation)                                               \
1251  M(SuspendCheck, Instruction)                                          \
1252  M(Throw, Instruction)                                                 \
1253  M(TryBoundary, Instruction)                                           \
1254  M(TypeConversion, Instruction)                                        \
1255  M(UShr, BinaryOperation)                                              \
1256  M(Xor, BinaryOperation)                                               \
1257
1258/*
1259 * Instructions, shared across several (not all) architectures.
1260 */
1261#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1262#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1263#else
1264#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1265  M(BitwiseNegatedRight, Instruction)                                   \
1266  M(MultiplyAccumulate, Instruction)
1267#endif
1268
1269#ifndef ART_ENABLE_CODEGEN_arm
1270#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1271#else
1272#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1273  M(ArmDexCacheArraysBase, Instruction)
1274#endif
1275
1276#ifndef ART_ENABLE_CODEGEN_arm64
1277#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1278#else
1279#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                          \
1280  M(Arm64DataProcWithShifterOp, Instruction)                            \
1281  M(Arm64IntermediateAddress, Instruction)
1282#endif
1283
1284#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1285
1286#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1287
1288#ifndef ART_ENABLE_CODEGEN_x86
1289#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1290#else
1291#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1292  M(X86ComputeBaseMethodAddress, Instruction)                           \
1293  M(X86LoadFromConstantTable, Instruction)                              \
1294  M(X86FPNeg, Instruction)                                              \
1295  M(X86PackedSwitch, Instruction)
1296#endif
1297
1298#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1299
1300#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1301  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1302  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1303  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1304  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1305  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1306  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1307  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1308  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1309
1310#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1311  M(Condition, BinaryOperation)                                         \
1312  M(Constant, Instruction)                                              \
1313  M(UnaryOperation, Instruction)                                        \
1314  M(BinaryOperation, Instruction)                                       \
1315  M(Invoke, Instruction)
1316
1317#define FOR_EACH_INSTRUCTION(M)                                         \
1318  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1319  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1320
1321#define FORWARD_DECLARATION(type, super) class H##type;
1322FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1323#undef FORWARD_DECLARATION
1324
1325#define DECLARE_INSTRUCTION(type)                                       \
1326  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }  \
1327  const char* DebugName() const OVERRIDE { return #type; }              \
1328  bool InstructionTypeEquals(HInstruction* other) const OVERRIDE {      \
1329    return other->Is##type();                                           \
1330  }                                                                     \
1331  void Accept(HGraphVisitor* visitor) OVERRIDE
1332
1333#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1334  bool Is##type() const { return As##type() != nullptr; }               \
1335  const H##type* As##type() const { return this; }                      \
1336  H##type* As##type() { return this; }
1337
1338template <typename T>
1339class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1340 public:
1341  T GetUser() const { return user_; }
1342  size_t GetIndex() const { return index_; }
1343  void SetIndex(size_t index) { index_ = index; }
1344
1345  // Hook for the IntrusiveForwardList<>.
1346  // TODO: Hide this better.
1347  IntrusiveForwardListHook hook;
1348
1349 private:
1350  HUseListNode(T user, size_t index)
1351      : user_(user), index_(index) {}
1352
1353  T const user_;
1354  size_t index_;
1355
1356  friend class HInstruction;
1357
1358  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1359};
1360
1361template <typename T>
1362using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1363
1364// This class is used by HEnvironment and HInstruction classes to record the
1365// instructions they use and pointers to the corresponding HUseListNodes kept
1366// by the used instructions.
1367template <typename T>
1368class HUserRecord : public ValueObject {
1369 public:
1370  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1371  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1372
1373  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1374      : HUserRecord(old_record.instruction_, before_use_node) {}
1375  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1376      : instruction_(instruction), before_use_node_(before_use_node) {
1377    DCHECK(instruction_ != nullptr);
1378  }
1379
1380  HInstruction* GetInstruction() const { return instruction_; }
1381  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1382  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1383
1384 private:
1385  // Instruction used by the user.
1386  HInstruction* instruction_;
1387
1388  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1389  typename HUseList<T>::iterator before_use_node_;
1390};
1391
1392/**
1393 * Side-effects representation.
1394 *
1395 * For write/read dependences on fields/arrays, the dependence analysis uses
1396 * type disambiguation (e.g. a float field write cannot modify the value of an
1397 * integer field read) and the access type (e.g.  a reference array write cannot
1398 * modify the value of a reference field read [although it may modify the
1399 * reference fetch prior to reading the field, which is represented by its own
1400 * write/read dependence]). The analysis makes conservative points-to
1401 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1402 * the same, and any reference read depends on any reference read without
1403 * further regard of its type).
1404 *
1405 * The internal representation uses 38-bit and is described in the table below.
1406 * The first line indicates the side effect, and for field/array accesses the
1407 * second line indicates the type of the access (in the order of the
1408 * Primitive::Type enum).
1409 * The two numbered lines below indicate the bit position in the bitfield (read
1410 * vertically).
1411 *
1412 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1413 *   +-------------+---------+---------+--------------+---------+---------+
1414 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1415 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1416 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1417 *
1418 * Note that, to ease the implementation, 'changes' bits are least significant
1419 * bits, while 'dependency' bits are most significant bits.
1420 */
1421class SideEffects : public ValueObject {
1422 public:
1423  SideEffects() : flags_(0) {}
1424
1425  static SideEffects None() {
1426    return SideEffects(0);
1427  }
1428
1429  static SideEffects All() {
1430    return SideEffects(kAllChangeBits | kAllDependOnBits);
1431  }
1432
1433  static SideEffects AllChanges() {
1434    return SideEffects(kAllChangeBits);
1435  }
1436
1437  static SideEffects AllDependencies() {
1438    return SideEffects(kAllDependOnBits);
1439  }
1440
1441  static SideEffects AllExceptGCDependency() {
1442    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1443  }
1444
1445  static SideEffects AllWritesAndReads() {
1446    return SideEffects(kAllWrites | kAllReads);
1447  }
1448
1449  static SideEffects AllWrites() {
1450    return SideEffects(kAllWrites);
1451  }
1452
1453  static SideEffects AllReads() {
1454    return SideEffects(kAllReads);
1455  }
1456
1457  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1458    return is_volatile
1459        ? AllWritesAndReads()
1460        : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset));
1461  }
1462
1463  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1464    return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset));
1465  }
1466
1467  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1468    return is_volatile
1469        ? AllWritesAndReads()
1470        : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset));
1471  }
1472
1473  static SideEffects ArrayReadOfType(Primitive::Type type) {
1474    return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset));
1475  }
1476
1477  static SideEffects CanTriggerGC() {
1478    return SideEffects(1ULL << kCanTriggerGCBit);
1479  }
1480
1481  static SideEffects DependsOnGC() {
1482    return SideEffects(1ULL << kDependsOnGCBit);
1483  }
1484
1485  // Combines the side-effects of this and the other.
1486  SideEffects Union(SideEffects other) const {
1487    return SideEffects(flags_ | other.flags_);
1488  }
1489
1490  SideEffects Exclusion(SideEffects other) const {
1491    return SideEffects(flags_ & ~other.flags_);
1492  }
1493
1494  void Add(SideEffects other) {
1495    flags_ |= other.flags_;
1496  }
1497
1498  bool Includes(SideEffects other) const {
1499    return (other.flags_ & flags_) == other.flags_;
1500  }
1501
1502  bool HasSideEffects() const {
1503    return (flags_ & kAllChangeBits);
1504  }
1505
1506  bool HasDependencies() const {
1507    return (flags_ & kAllDependOnBits);
1508  }
1509
1510  // Returns true if there are no side effects or dependencies.
1511  bool DoesNothing() const {
1512    return flags_ == 0;
1513  }
1514
1515  // Returns true if something is written.
1516  bool DoesAnyWrite() const {
1517    return (flags_ & kAllWrites);
1518  }
1519
1520  // Returns true if something is read.
1521  bool DoesAnyRead() const {
1522    return (flags_ & kAllReads);
1523  }
1524
1525  // Returns true if potentially everything is written and read
1526  // (every type and every kind of access).
1527  bool DoesAllReadWrite() const {
1528    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1529  }
1530
1531  bool DoesAll() const {
1532    return flags_ == (kAllChangeBits | kAllDependOnBits);
1533  }
1534
1535  // Returns true if `this` may read something written by `other`.
1536  bool MayDependOn(SideEffects other) const {
1537    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1538    return (other.flags_ & depends_on_flags);
1539  }
1540
1541  // Returns string representation of flags (for debugging only).
1542  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1543  std::string ToString() const {
1544    std::string flags = "|";
1545    for (int s = kLastBit; s >= 0; s--) {
1546      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1547      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1548        // This is a bit for the GC side effect.
1549        if (current_bit_is_set) {
1550          flags += "GC";
1551        }
1552        flags += "|";
1553      } else {
1554        // This is a bit for the array/field analysis.
1555        // The underscore character stands for the 'can trigger GC' bit.
1556        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1557        if (current_bit_is_set) {
1558          flags += kDebug[s];
1559        }
1560        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1561            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1562          flags += "|";
1563        }
1564      }
1565    }
1566    return flags;
1567  }
1568
1569  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1570
1571 private:
1572  static constexpr int kFieldArrayAnalysisBits = 9;
1573
1574  static constexpr int kFieldWriteOffset = 0;
1575  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1576  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1577  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1578
1579  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1580
1581  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1582  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1583  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1584  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1585
1586  static constexpr int kLastBit = kDependsOnGCBit;
1587  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1588
1589  // Aliases.
1590
1591  static_assert(kChangeBits == kDependOnBits,
1592                "the 'change' bits should match the 'depend on' bits.");
1593
1594  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1595  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1596  static constexpr uint64_t kAllWrites =
1597      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1598  static constexpr uint64_t kAllReads =
1599      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1600
1601  // Work around the fact that HIR aliases I/F and J/D.
1602  // TODO: remove this interceptor once HIR types are clean
1603  static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) {
1604    switch (type) {
1605      case Primitive::kPrimInt:
1606      case Primitive::kPrimFloat:
1607        return TypeFlag(Primitive::kPrimInt, offset) |
1608               TypeFlag(Primitive::kPrimFloat, offset);
1609      case Primitive::kPrimLong:
1610      case Primitive::kPrimDouble:
1611        return TypeFlag(Primitive::kPrimLong, offset) |
1612               TypeFlag(Primitive::kPrimDouble, offset);
1613      default:
1614        return TypeFlag(type, offset);
1615    }
1616  }
1617
1618  // Translates type to bit flag.
1619  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1620    CHECK_NE(type, Primitive::kPrimVoid);
1621    const uint64_t one = 1;
1622    const int shift = type;  // 0-based consecutive enum
1623    DCHECK_LE(kFieldWriteOffset, shift);
1624    DCHECK_LT(shift, kArrayWriteOffset);
1625    return one << (type + offset);
1626  }
1627
1628  // Private constructor on direct flags value.
1629  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1630
1631  uint64_t flags_;
1632};
1633
1634// A HEnvironment object contains the values of virtual registers at a given location.
1635class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1636 public:
1637  HEnvironment(ArenaAllocator* arena,
1638               size_t number_of_vregs,
1639               const DexFile& dex_file,
1640               uint32_t method_idx,
1641               uint32_t dex_pc,
1642               InvokeType invoke_type,
1643               HInstruction* holder)
1644     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1645       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1646       parent_(nullptr),
1647       dex_file_(dex_file),
1648       method_idx_(method_idx),
1649       dex_pc_(dex_pc),
1650       invoke_type_(invoke_type),
1651       holder_(holder) {
1652  }
1653
1654  HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1655      : HEnvironment(arena,
1656                     to_copy.Size(),
1657                     to_copy.GetDexFile(),
1658                     to_copy.GetMethodIdx(),
1659                     to_copy.GetDexPc(),
1660                     to_copy.GetInvokeType(),
1661                     holder) {}
1662
1663  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1664    if (parent_ != nullptr) {
1665      parent_->SetAndCopyParentChain(allocator, parent);
1666    } else {
1667      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1668      parent_->CopyFrom(parent);
1669      if (parent->GetParent() != nullptr) {
1670        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1671      }
1672    }
1673  }
1674
1675  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1676  void CopyFrom(HEnvironment* environment);
1677
1678  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1679  // input to the loop phi instead. This is for inserting instructions that
1680  // require an environment (like HDeoptimization) in the loop pre-header.
1681  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1682
1683  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1684    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1685  }
1686
1687  HInstruction* GetInstructionAt(size_t index) const {
1688    return vregs_[index].GetInstruction();
1689  }
1690
1691  void RemoveAsUserOfInput(size_t index) const;
1692
1693  size_t Size() const { return vregs_.size(); }
1694
1695  HEnvironment* GetParent() const { return parent_; }
1696
1697  void SetLocationAt(size_t index, Location location) {
1698    locations_[index] = location;
1699  }
1700
1701  Location GetLocationAt(size_t index) const {
1702    return locations_[index];
1703  }
1704
1705  uint32_t GetDexPc() const {
1706    return dex_pc_;
1707  }
1708
1709  uint32_t GetMethodIdx() const {
1710    return method_idx_;
1711  }
1712
1713  InvokeType GetInvokeType() const {
1714    return invoke_type_;
1715  }
1716
1717  const DexFile& GetDexFile() const {
1718    return dex_file_;
1719  }
1720
1721  HInstruction* GetHolder() const {
1722    return holder_;
1723  }
1724
1725
1726  bool IsFromInlinedInvoke() const {
1727    return GetParent() != nullptr;
1728  }
1729
1730 private:
1731  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1732  ArenaVector<Location> locations_;
1733  HEnvironment* parent_;
1734  const DexFile& dex_file_;
1735  const uint32_t method_idx_;
1736  const uint32_t dex_pc_;
1737  const InvokeType invoke_type_;
1738
1739  // The instruction that holds this environment.
1740  HInstruction* const holder_;
1741
1742  friend class HInstruction;
1743
1744  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1745};
1746
1747class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1748 public:
1749  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1750      : previous_(nullptr),
1751        next_(nullptr),
1752        block_(nullptr),
1753        dex_pc_(dex_pc),
1754        id_(-1),
1755        ssa_index_(-1),
1756        packed_fields_(0u),
1757        environment_(nullptr),
1758        locations_(nullptr),
1759        live_interval_(nullptr),
1760        lifetime_position_(kNoLifetime),
1761        side_effects_(side_effects),
1762        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1763    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1764  }
1765
1766  virtual ~HInstruction() {}
1767
1768#define DECLARE_KIND(type, super) k##type,
1769  enum InstructionKind {
1770    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1771  };
1772#undef DECLARE_KIND
1773
1774  HInstruction* GetNext() const { return next_; }
1775  HInstruction* GetPrevious() const { return previous_; }
1776
1777  HInstruction* GetNextDisregardingMoves() const;
1778  HInstruction* GetPreviousDisregardingMoves() const;
1779
1780  HBasicBlock* GetBlock() const { return block_; }
1781  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1782  void SetBlock(HBasicBlock* block) { block_ = block; }
1783  bool IsInBlock() const { return block_ != nullptr; }
1784  bool IsInLoop() const { return block_->IsInLoop(); }
1785  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1786  bool IsIrreducibleLoopHeaderPhi() const {
1787    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1788  }
1789
1790  virtual size_t InputCount() const = 0;
1791  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1792
1793  virtual void Accept(HGraphVisitor* visitor) = 0;
1794  virtual const char* DebugName() const = 0;
1795
1796  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1797  void SetRawInputAt(size_t index, HInstruction* input) {
1798    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1799  }
1800
1801  virtual bool NeedsEnvironment() const { return false; }
1802
1803  uint32_t GetDexPc() const { return dex_pc_; }
1804
1805  virtual bool IsControlFlow() const { return false; }
1806
1807  virtual bool CanThrow() const { return false; }
1808  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1809
1810  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1811  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1812
1813  // Does not apply for all instructions, but having this at top level greatly
1814  // simplifies the null check elimination.
1815  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1816  virtual bool CanBeNull() const {
1817    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1818    return true;
1819  }
1820
1821  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1822    return false;
1823  }
1824
1825  virtual bool IsActualObject() const {
1826    return GetType() == Primitive::kPrimNot;
1827  }
1828
1829  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1830
1831  ReferenceTypeInfo GetReferenceTypeInfo() const {
1832    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1833    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1834                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1835  }
1836
1837  void AddUseAt(HInstruction* user, size_t index) {
1838    DCHECK(user != nullptr);
1839    // Note: fixup_end remains valid across push_front().
1840    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1841    HUseListNode<HInstruction*>* new_node =
1842        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1843    uses_.push_front(*new_node);
1844    FixUpUserRecordsAfterUseInsertion(fixup_end);
1845  }
1846
1847  void AddEnvUseAt(HEnvironment* user, size_t index) {
1848    DCHECK(user != nullptr);
1849    // Note: env_fixup_end remains valid across push_front().
1850    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
1851    HUseListNode<HEnvironment*>* new_node =
1852        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
1853    env_uses_.push_front(*new_node);
1854    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1855  }
1856
1857  void RemoveAsUserOfInput(size_t input) {
1858    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
1859    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1860    input_use.GetInstruction()->uses_.erase_after(before_use_node);
1861    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1862  }
1863
1864  const HUseList<HInstruction*>& GetUses() const { return uses_; }
1865  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
1866
1867  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
1868  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
1869  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
1870  bool HasOnlyOneNonEnvironmentUse() const {
1871    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
1872  }
1873
1874  // Does this instruction strictly dominate `other_instruction`?
1875  // Returns false if this instruction and `other_instruction` are the same.
1876  // Aborts if this instruction and `other_instruction` are both phis.
1877  bool StrictlyDominates(HInstruction* other_instruction) const;
1878
1879  int GetId() const { return id_; }
1880  void SetId(int id) { id_ = id; }
1881
1882  int GetSsaIndex() const { return ssa_index_; }
1883  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
1884  bool HasSsaIndex() const { return ssa_index_ != -1; }
1885
1886  bool HasEnvironment() const { return environment_ != nullptr; }
1887  HEnvironment* GetEnvironment() const { return environment_; }
1888  // Set the `environment_` field. Raw because this method does not
1889  // update the uses lists.
1890  void SetRawEnvironment(HEnvironment* environment) {
1891    DCHECK(environment_ == nullptr);
1892    DCHECK_EQ(environment->GetHolder(), this);
1893    environment_ = environment;
1894  }
1895
1896  void RemoveEnvironment();
1897
1898  // Set the environment of this instruction, copying it from `environment`. While
1899  // copying, the uses lists are being updated.
1900  void CopyEnvironmentFrom(HEnvironment* environment) {
1901    DCHECK(environment_ == nullptr);
1902    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1903    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1904    environment_->CopyFrom(environment);
1905    if (environment->GetParent() != nullptr) {
1906      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1907    }
1908  }
1909
1910  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
1911                                                HBasicBlock* block) {
1912    DCHECK(environment_ == nullptr);
1913    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1914    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1915    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
1916    if (environment->GetParent() != nullptr) {
1917      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1918    }
1919  }
1920
1921  // Returns the number of entries in the environment. Typically, that is the
1922  // number of dex registers in a method. It could be more in case of inlining.
1923  size_t EnvironmentSize() const;
1924
1925  LocationSummary* GetLocations() const { return locations_; }
1926  void SetLocations(LocationSummary* locations) { locations_ = locations; }
1927
1928  void ReplaceWith(HInstruction* instruction);
1929  void ReplaceInput(HInstruction* replacement, size_t index);
1930
1931  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
1932  // uses of this instruction by `other` are *not* updated.
1933  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
1934    ReplaceWith(other);
1935    other->ReplaceInput(this, use_index);
1936  }
1937
1938  // Move `this` instruction before `cursor`.
1939  void MoveBefore(HInstruction* cursor);
1940
1941  // Move `this` before its first user and out of any loops. If there is no
1942  // out-of-loop user that dominates all other users, move the instruction
1943  // to the end of the out-of-loop common dominator of the user's blocks.
1944  //
1945  // This can be used only on non-throwing instructions with no side effects that
1946  // have at least one use but no environment uses.
1947  void MoveBeforeFirstUserAndOutOfLoops();
1948
1949#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
1950  bool Is##type() const;                                                       \
1951  const H##type* As##type() const;                                             \
1952  H##type* As##type();
1953
1954  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
1955#undef INSTRUCTION_TYPE_CHECK
1956
1957#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
1958  bool Is##type() const { return (As##type() != nullptr); }                    \
1959  virtual const H##type* As##type() const { return nullptr; }                  \
1960  virtual H##type* As##type() { return nullptr; }
1961  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
1962#undef INSTRUCTION_TYPE_CHECK
1963
1964  // Returns whether the instruction can be moved within the graph.
1965  virtual bool CanBeMoved() const { return false; }
1966
1967  // Returns whether the two instructions are of the same kind.
1968  virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
1969    return false;
1970  }
1971
1972  // Returns whether any data encoded in the two instructions is equal.
1973  // This method does not look at the inputs. Both instructions must be
1974  // of the same type, otherwise the method has undefined behavior.
1975  virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
1976    return false;
1977  }
1978
1979  // Returns whether two instructions are equal, that is:
1980  // 1) They have the same type and contain the same data (InstructionDataEquals).
1981  // 2) Their inputs are identical.
1982  bool Equals(HInstruction* other) const;
1983
1984  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
1985  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
1986  // the virtual function because the __attribute__((__pure__)) doesn't really
1987  // apply the strong requirement for virtual functions, preventing optimizations.
1988  InstructionKind GetKind() const PURE;
1989  virtual InstructionKind GetKindInternal() const = 0;
1990
1991  virtual size_t ComputeHashCode() const {
1992    size_t result = GetKind();
1993    for (size_t i = 0, e = InputCount(); i < e; ++i) {
1994      result = (result * 31) + InputAt(i)->GetId();
1995    }
1996    return result;
1997  }
1998
1999  SideEffects GetSideEffects() const { return side_effects_; }
2000  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2001  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2002
2003  size_t GetLifetimePosition() const { return lifetime_position_; }
2004  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2005  LiveInterval* GetLiveInterval() const { return live_interval_; }
2006  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2007  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2008
2009  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2010
2011  // Returns whether the code generation of the instruction will require to have access
2012  // to the current method. Such instructions are:
2013  // (1): Instructions that require an environment, as calling the runtime requires
2014  //      to walk the stack and have the current method stored at a specific stack address.
2015  // (2): Object literals like classes and strings, that are loaded from the dex cache
2016  //      fields of the current method.
2017  bool NeedsCurrentMethod() const {
2018    return NeedsEnvironment() || IsLoadClass() || IsLoadString();
2019  }
2020
2021  // Returns whether the code generation of the instruction will require to have access
2022  // to the dex cache of the current method's declaring class via the current method.
2023  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2024
2025  // Does this instruction have any use in an environment before
2026  // control flow hits 'other'?
2027  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2028
2029  // Remove all references to environment uses of this instruction.
2030  // The caller must ensure that this is safe to do.
2031  void RemoveEnvironmentUsers();
2032
2033  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2034  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2035
2036 protected:
2037  // If set, the machine code for this instruction is assumed to be generated by
2038  // its users. Used by liveness analysis to compute use positions accordingly.
2039  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2040  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2041  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2042  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2043
2044  virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0;
2045  virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0;
2046
2047  uint32_t GetPackedFields() const {
2048    return packed_fields_;
2049  }
2050
2051  template <size_t flag>
2052  bool GetPackedFlag() const {
2053    return (packed_fields_ & (1u << flag)) != 0u;
2054  }
2055
2056  template <size_t flag>
2057  void SetPackedFlag(bool value = true) {
2058    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2059  }
2060
2061  template <typename BitFieldType>
2062  typename BitFieldType::value_type GetPackedField() const {
2063    return BitFieldType::Decode(packed_fields_);
2064  }
2065
2066  template <typename BitFieldType>
2067  void SetPackedField(typename BitFieldType::value_type value) {
2068    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2069    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2070  }
2071
2072 private:
2073  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2074    auto before_use_node = uses_.before_begin();
2075    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2076      HInstruction* user = use_node->GetUser();
2077      size_t input_index = use_node->GetIndex();
2078      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2079      before_use_node = use_node;
2080    }
2081  }
2082
2083  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2084    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2085    if (next != uses_.end()) {
2086      HInstruction* next_user = next->GetUser();
2087      size_t next_index = next->GetIndex();
2088      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2089      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2090    }
2091  }
2092
2093  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2094    auto before_env_use_node = env_uses_.before_begin();
2095    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2096      HEnvironment* user = env_use_node->GetUser();
2097      size_t input_index = env_use_node->GetIndex();
2098      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2099      before_env_use_node = env_use_node;
2100    }
2101  }
2102
2103  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2104    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2105    if (next != env_uses_.end()) {
2106      HEnvironment* next_user = next->GetUser();
2107      size_t next_index = next->GetIndex();
2108      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2109      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2110    }
2111  }
2112
2113  HInstruction* previous_;
2114  HInstruction* next_;
2115  HBasicBlock* block_;
2116  const uint32_t dex_pc_;
2117
2118  // An instruction gets an id when it is added to the graph.
2119  // It reflects creation order. A negative id means the instruction
2120  // has not been added to the graph.
2121  int id_;
2122
2123  // When doing liveness analysis, instructions that have uses get an SSA index.
2124  int ssa_index_;
2125
2126  // Packed fields.
2127  uint32_t packed_fields_;
2128
2129  // List of instructions that have this instruction as input.
2130  HUseList<HInstruction*> uses_;
2131
2132  // List of environments that contain this instruction.
2133  HUseList<HEnvironment*> env_uses_;
2134
2135  // The environment associated with this instruction. Not null if the instruction
2136  // might jump out of the method.
2137  HEnvironment* environment_;
2138
2139  // Set by the code generator.
2140  LocationSummary* locations_;
2141
2142  // Set by the liveness analysis.
2143  LiveInterval* live_interval_;
2144
2145  // Set by the liveness analysis, this is the position in a linear
2146  // order of blocks where this instruction's live interval start.
2147  size_t lifetime_position_;
2148
2149  SideEffects side_effects_;
2150
2151  // The reference handle part of the reference type info.
2152  // The IsExact() flag is stored in packed fields.
2153  // TODO: for primitive types this should be marked as invalid.
2154  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2155
2156  friend class GraphChecker;
2157  friend class HBasicBlock;
2158  friend class HEnvironment;
2159  friend class HGraph;
2160  friend class HInstructionList;
2161
2162  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2163};
2164std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2165
2166class HInputIterator : public ValueObject {
2167 public:
2168  explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {}
2169
2170  bool Done() const { return index_ == instruction_->InputCount(); }
2171  HInstruction* Current() const { return instruction_->InputAt(index_); }
2172  void Advance() { index_++; }
2173
2174 private:
2175  HInstruction* instruction_;
2176  size_t index_;
2177
2178  DISALLOW_COPY_AND_ASSIGN(HInputIterator);
2179};
2180
2181class HInstructionIterator : public ValueObject {
2182 public:
2183  explicit HInstructionIterator(const HInstructionList& instructions)
2184      : instruction_(instructions.first_instruction_) {
2185    next_ = Done() ? nullptr : instruction_->GetNext();
2186  }
2187
2188  bool Done() const { return instruction_ == nullptr; }
2189  HInstruction* Current() const { return instruction_; }
2190  void Advance() {
2191    instruction_ = next_;
2192    next_ = Done() ? nullptr : instruction_->GetNext();
2193  }
2194
2195 private:
2196  HInstruction* instruction_;
2197  HInstruction* next_;
2198
2199  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2200};
2201
2202class HBackwardInstructionIterator : public ValueObject {
2203 public:
2204  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2205      : instruction_(instructions.last_instruction_) {
2206    next_ = Done() ? nullptr : instruction_->GetPrevious();
2207  }
2208
2209  bool Done() const { return instruction_ == nullptr; }
2210  HInstruction* Current() const { return instruction_; }
2211  void Advance() {
2212    instruction_ = next_;
2213    next_ = Done() ? nullptr : instruction_->GetPrevious();
2214  }
2215
2216 private:
2217  HInstruction* instruction_;
2218  HInstruction* next_;
2219
2220  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2221};
2222
2223template<size_t N>
2224class HTemplateInstruction: public HInstruction {
2225 public:
2226  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2227      : HInstruction(side_effects, dex_pc), inputs_() {}
2228  virtual ~HTemplateInstruction() {}
2229
2230  size_t InputCount() const OVERRIDE { return N; }
2231
2232 protected:
2233  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE {
2234    DCHECK_LT(i, N);
2235    return inputs_[i];
2236  }
2237
2238  void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE {
2239    DCHECK_LT(i, N);
2240    inputs_[i] = input;
2241  }
2242
2243 private:
2244  std::array<HUserRecord<HInstruction*>, N> inputs_;
2245
2246  friend class SsaBuilder;
2247};
2248
2249// HTemplateInstruction specialization for N=0.
2250template<>
2251class HTemplateInstruction<0>: public HInstruction {
2252 public:
2253  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2254      : HInstruction(side_effects, dex_pc) {}
2255
2256  virtual ~HTemplateInstruction() {}
2257
2258  size_t InputCount() const OVERRIDE { return 0; }
2259
2260 protected:
2261  const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE {
2262    LOG(FATAL) << "Unreachable";
2263    UNREACHABLE();
2264  }
2265
2266  void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED,
2267                           const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE {
2268    LOG(FATAL) << "Unreachable";
2269    UNREACHABLE();
2270  }
2271
2272 private:
2273  friend class SsaBuilder;
2274};
2275
2276template<intptr_t N>
2277class HExpression : public HTemplateInstruction<N> {
2278 public:
2279  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2280      : HTemplateInstruction<N>(side_effects, dex_pc) {
2281    this->template SetPackedField<TypeField>(type);
2282  }
2283  virtual ~HExpression() {}
2284
2285  Primitive::Type GetType() const OVERRIDE {
2286    return TypeField::Decode(this->GetPackedFields());
2287  }
2288
2289 protected:
2290  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2291  static constexpr size_t kFieldTypeSize =
2292      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2293  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2294  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2295                "Too many packed fields.");
2296  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2297};
2298
2299// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2300// instruction that branches to the exit block.
2301class HReturnVoid : public HTemplateInstruction<0> {
2302 public:
2303  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2304      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2305
2306  bool IsControlFlow() const OVERRIDE { return true; }
2307
2308  DECLARE_INSTRUCTION(ReturnVoid);
2309
2310 private:
2311  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2312};
2313
2314// Represents dex's RETURN opcodes. A HReturn is a control flow
2315// instruction that branches to the exit block.
2316class HReturn : public HTemplateInstruction<1> {
2317 public:
2318  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2319      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2320    SetRawInputAt(0, value);
2321  }
2322
2323  bool IsControlFlow() const OVERRIDE { return true; }
2324
2325  DECLARE_INSTRUCTION(Return);
2326
2327 private:
2328  DISALLOW_COPY_AND_ASSIGN(HReturn);
2329};
2330
2331class HPhi : public HInstruction {
2332 public:
2333  HPhi(ArenaAllocator* arena,
2334       uint32_t reg_number,
2335       size_t number_of_inputs,
2336       Primitive::Type type,
2337       uint32_t dex_pc = kNoDexPc)
2338      : HInstruction(SideEffects::None(), dex_pc),
2339        inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)),
2340        reg_number_(reg_number) {
2341    SetPackedField<TypeField>(ToPhiType(type));
2342    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2343    // Phis are constructed live and marked dead if conflicting or unused.
2344    // Individual steps of SsaBuilder should assume that if a phi has been
2345    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2346    SetPackedFlag<kFlagIsLive>(true);
2347    SetPackedFlag<kFlagCanBeNull>(true);
2348  }
2349
2350  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2351  static Primitive::Type ToPhiType(Primitive::Type type) {
2352    return Primitive::PrimitiveKind(type);
2353  }
2354
2355  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2356
2357  size_t InputCount() const OVERRIDE { return inputs_.size(); }
2358
2359  void AddInput(HInstruction* input);
2360  void RemoveInputAt(size_t index);
2361
2362  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2363  void SetType(Primitive::Type new_type) {
2364    // Make sure that only valid type changes occur. The following are allowed:
2365    //  (1) int  -> float/ref (primitive type propagation),
2366    //  (2) long -> double (primitive type propagation).
2367    DCHECK(GetType() == new_type ||
2368           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2369           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2370           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2371    SetPackedField<TypeField>(new_type);
2372  }
2373
2374  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2375  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2376
2377  uint32_t GetRegNumber() const { return reg_number_; }
2378
2379  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2380  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2381  bool IsDead() const { return !IsLive(); }
2382  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2383
2384  bool IsVRegEquivalentOf(HInstruction* other) const {
2385    return other != nullptr
2386        && other->IsPhi()
2387        && other->AsPhi()->GetBlock() == GetBlock()
2388        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2389  }
2390
2391  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2392  // An equivalent phi is a phi having the same dex register and type.
2393  // It assumes that phis with the same dex register are adjacent.
2394  HPhi* GetNextEquivalentPhiWithSameType() {
2395    HInstruction* next = GetNext();
2396    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2397      if (next->GetType() == GetType()) {
2398        return next->AsPhi();
2399      }
2400      next = next->GetNext();
2401    }
2402    return nullptr;
2403  }
2404
2405  DECLARE_INSTRUCTION(Phi);
2406
2407 protected:
2408  const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE {
2409    return inputs_[index];
2410  }
2411
2412  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE {
2413    inputs_[index] = input;
2414  }
2415
2416 private:
2417  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2418  static constexpr size_t kFieldTypeSize =
2419      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2420  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2421  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2422  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2423  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2424  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2425
2426  ArenaVector<HUserRecord<HInstruction*> > inputs_;
2427  const uint32_t reg_number_;
2428
2429  DISALLOW_COPY_AND_ASSIGN(HPhi);
2430};
2431
2432// The exit instruction is the only instruction of the exit block.
2433// Instructions aborting the method (HThrow and HReturn) must branch to the
2434// exit block.
2435class HExit : public HTemplateInstruction<0> {
2436 public:
2437  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2438
2439  bool IsControlFlow() const OVERRIDE { return true; }
2440
2441  DECLARE_INSTRUCTION(Exit);
2442
2443 private:
2444  DISALLOW_COPY_AND_ASSIGN(HExit);
2445};
2446
2447// Jumps from one block to another.
2448class HGoto : public HTemplateInstruction<0> {
2449 public:
2450  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2451
2452  bool IsControlFlow() const OVERRIDE { return true; }
2453
2454  HBasicBlock* GetSuccessor() const {
2455    return GetBlock()->GetSingleSuccessor();
2456  }
2457
2458  DECLARE_INSTRUCTION(Goto);
2459
2460 private:
2461  DISALLOW_COPY_AND_ASSIGN(HGoto);
2462};
2463
2464class HConstant : public HExpression<0> {
2465 public:
2466  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2467      : HExpression(type, SideEffects::None(), dex_pc) {}
2468
2469  bool CanBeMoved() const OVERRIDE { return true; }
2470
2471  // Is this constant -1 in the arithmetic sense?
2472  virtual bool IsMinusOne() const { return false; }
2473  // Is this constant 0 in the arithmetic sense?
2474  virtual bool IsArithmeticZero() const { return false; }
2475  // Is this constant a 0-bit pattern?
2476  virtual bool IsZeroBitPattern() const { return false; }
2477  // Is this constant 1 in the arithmetic sense?
2478  virtual bool IsOne() const { return false; }
2479
2480  virtual uint64_t GetValueAsUint64() const = 0;
2481
2482  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2483
2484 private:
2485  DISALLOW_COPY_AND_ASSIGN(HConstant);
2486};
2487
2488class HNullConstant : public HConstant {
2489 public:
2490  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2491    return true;
2492  }
2493
2494  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2495
2496  size_t ComputeHashCode() const OVERRIDE { return 0; }
2497
2498  // The null constant representation is a 0-bit pattern.
2499  virtual bool IsZeroBitPattern() const { return true; }
2500
2501  DECLARE_INSTRUCTION(NullConstant);
2502
2503 private:
2504  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2505
2506  friend class HGraph;
2507  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2508};
2509
2510// Constants of the type int. Those can be from Dex instructions, or
2511// synthesized (for example with the if-eqz instruction).
2512class HIntConstant : public HConstant {
2513 public:
2514  int32_t GetValue() const { return value_; }
2515
2516  uint64_t GetValueAsUint64() const OVERRIDE {
2517    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2518  }
2519
2520  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2521    DCHECK(other->IsIntConstant()) << other->DebugName();
2522    return other->AsIntConstant()->value_ == value_;
2523  }
2524
2525  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2526
2527  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2528  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2529  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2530  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2531
2532  // Integer constants are used to encode Boolean values as well,
2533  // where 1 means true and 0 means false.
2534  bool IsTrue() const { return GetValue() == 1; }
2535  bool IsFalse() const { return GetValue() == 0; }
2536
2537  DECLARE_INSTRUCTION(IntConstant);
2538
2539 private:
2540  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2541      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2542  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2543      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2544
2545  const int32_t value_;
2546
2547  friend class HGraph;
2548  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2549  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2550  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2551};
2552
2553class HLongConstant : public HConstant {
2554 public:
2555  int64_t GetValue() const { return value_; }
2556
2557  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2558
2559  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2560    DCHECK(other->IsLongConstant()) << other->DebugName();
2561    return other->AsLongConstant()->value_ == value_;
2562  }
2563
2564  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2565
2566  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2567  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2568  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2569  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2570
2571  DECLARE_INSTRUCTION(LongConstant);
2572
2573 private:
2574  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2575      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2576
2577  const int64_t value_;
2578
2579  friend class HGraph;
2580  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2581};
2582
2583class HFloatConstant : public HConstant {
2584 public:
2585  float GetValue() const { return value_; }
2586
2587  uint64_t GetValueAsUint64() const OVERRIDE {
2588    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2589  }
2590
2591  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2592    DCHECK(other->IsFloatConstant()) << other->DebugName();
2593    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2594  }
2595
2596  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2597
2598  bool IsMinusOne() const OVERRIDE {
2599    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2600  }
2601  bool IsArithmeticZero() const OVERRIDE {
2602    return std::fpclassify(value_) == FP_ZERO;
2603  }
2604  bool IsArithmeticPositiveZero() const {
2605    return IsArithmeticZero() && !std::signbit(value_);
2606  }
2607  bool IsArithmeticNegativeZero() const {
2608    return IsArithmeticZero() && std::signbit(value_);
2609  }
2610  bool IsZeroBitPattern() const OVERRIDE {
2611    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2612  }
2613  bool IsOne() const OVERRIDE {
2614    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2615  }
2616  bool IsNaN() const {
2617    return std::isnan(value_);
2618  }
2619
2620  DECLARE_INSTRUCTION(FloatConstant);
2621
2622 private:
2623  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2624      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2625  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2626      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2627
2628  const float value_;
2629
2630  // Only the SsaBuilder and HGraph can create floating-point constants.
2631  friend class SsaBuilder;
2632  friend class HGraph;
2633  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2634};
2635
2636class HDoubleConstant : public HConstant {
2637 public:
2638  double GetValue() const { return value_; }
2639
2640  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2641
2642  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2643    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2644    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2645  }
2646
2647  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2648
2649  bool IsMinusOne() const OVERRIDE {
2650    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2651  }
2652  bool IsArithmeticZero() const OVERRIDE {
2653    return std::fpclassify(value_) == FP_ZERO;
2654  }
2655  bool IsArithmeticPositiveZero() const {
2656    return IsArithmeticZero() && !std::signbit(value_);
2657  }
2658  bool IsArithmeticNegativeZero() const {
2659    return IsArithmeticZero() && std::signbit(value_);
2660  }
2661  bool IsZeroBitPattern() const OVERRIDE {
2662    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2663  }
2664  bool IsOne() const OVERRIDE {
2665    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2666  }
2667  bool IsNaN() const {
2668    return std::isnan(value_);
2669  }
2670
2671  DECLARE_INSTRUCTION(DoubleConstant);
2672
2673 private:
2674  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2675      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2676  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2677      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2678
2679  const double value_;
2680
2681  // Only the SsaBuilder and HGraph can create floating-point constants.
2682  friend class SsaBuilder;
2683  friend class HGraph;
2684  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2685};
2686
2687// Conditional branch. A block ending with an HIf instruction must have
2688// two successors.
2689class HIf : public HTemplateInstruction<1> {
2690 public:
2691  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2692      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2693    SetRawInputAt(0, input);
2694  }
2695
2696  bool IsControlFlow() const OVERRIDE { return true; }
2697
2698  HBasicBlock* IfTrueSuccessor() const {
2699    return GetBlock()->GetSuccessors()[0];
2700  }
2701
2702  HBasicBlock* IfFalseSuccessor() const {
2703    return GetBlock()->GetSuccessors()[1];
2704  }
2705
2706  DECLARE_INSTRUCTION(If);
2707
2708 private:
2709  DISALLOW_COPY_AND_ASSIGN(HIf);
2710};
2711
2712
2713// Abstract instruction which marks the beginning and/or end of a try block and
2714// links it to the respective exception handlers. Behaves the same as a Goto in
2715// non-exceptional control flow.
2716// Normal-flow successor is stored at index zero, exception handlers under
2717// higher indices in no particular order.
2718class HTryBoundary : public HTemplateInstruction<0> {
2719 public:
2720  enum class BoundaryKind {
2721    kEntry,
2722    kExit,
2723    kLast = kExit
2724  };
2725
2726  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2727      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2728    SetPackedField<BoundaryKindField>(kind);
2729  }
2730
2731  bool IsControlFlow() const OVERRIDE { return true; }
2732
2733  // Returns the block's non-exceptional successor (index zero).
2734  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2735
2736  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2737    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2738  }
2739
2740  // Returns whether `handler` is among its exception handlers (non-zero index
2741  // successors).
2742  bool HasExceptionHandler(const HBasicBlock& handler) const {
2743    DCHECK(handler.IsCatchBlock());
2744    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2745  }
2746
2747  // If not present already, adds `handler` to its block's list of exception
2748  // handlers.
2749  void AddExceptionHandler(HBasicBlock* handler) {
2750    if (!HasExceptionHandler(*handler)) {
2751      GetBlock()->AddSuccessor(handler);
2752    }
2753  }
2754
2755  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2756  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2757
2758  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2759
2760  DECLARE_INSTRUCTION(TryBoundary);
2761
2762 private:
2763  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2764  static constexpr size_t kFieldBoundaryKindSize =
2765      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2766  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2767      kFieldBoundaryKind + kFieldBoundaryKindSize;
2768  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2769                "Too many packed fields.");
2770  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2771
2772  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2773};
2774
2775// Deoptimize to interpreter, upon checking a condition.
2776class HDeoptimize : public HTemplateInstruction<1> {
2777 public:
2778  // We set CanTriggerGC to prevent any intermediate address to be live
2779  // at the point of the `HDeoptimize`.
2780  HDeoptimize(HInstruction* cond, uint32_t dex_pc)
2781      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
2782    SetRawInputAt(0, cond);
2783  }
2784
2785  bool CanBeMoved() const OVERRIDE { return true; }
2786  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2787    return true;
2788  }
2789  bool NeedsEnvironment() const OVERRIDE { return true; }
2790  bool CanThrow() const OVERRIDE { return true; }
2791
2792  DECLARE_INSTRUCTION(Deoptimize);
2793
2794 private:
2795  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
2796};
2797
2798// Represents the ArtMethod that was passed as a first argument to
2799// the method. It is used by instructions that depend on it, like
2800// instructions that work with the dex cache.
2801class HCurrentMethod : public HExpression<0> {
2802 public:
2803  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2804      : HExpression(type, SideEffects::None(), dex_pc) {}
2805
2806  DECLARE_INSTRUCTION(CurrentMethod);
2807
2808 private:
2809  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
2810};
2811
2812// Fetches an ArtMethod from the virtual table or the interface method table
2813// of a class.
2814class HClassTableGet : public HExpression<1> {
2815 public:
2816  enum class TableKind {
2817    kVTable,
2818    kIMTable,
2819    kLast = kIMTable
2820  };
2821  HClassTableGet(HInstruction* cls,
2822                 Primitive::Type type,
2823                 TableKind kind,
2824                 size_t index,
2825                 uint32_t dex_pc)
2826      : HExpression(type, SideEffects::None(), dex_pc),
2827        index_(index) {
2828    SetPackedField<TableKindField>(kind);
2829    SetRawInputAt(0, cls);
2830  }
2831
2832  bool CanBeMoved() const OVERRIDE { return true; }
2833  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2834    return other->AsClassTableGet()->GetIndex() == index_ &&
2835        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
2836  }
2837
2838  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
2839  size_t GetIndex() const { return index_; }
2840
2841  DECLARE_INSTRUCTION(ClassTableGet);
2842
2843 private:
2844  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
2845  static constexpr size_t kFieldTableKindSize =
2846      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
2847  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
2848  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
2849                "Too many packed fields.");
2850  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
2851
2852  // The index of the ArtMethod in the table.
2853  const size_t index_;
2854
2855  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
2856};
2857
2858// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
2859// have one successor for each entry in the switch table, and the final successor
2860// will be the block containing the next Dex opcode.
2861class HPackedSwitch : public HTemplateInstruction<1> {
2862 public:
2863  HPackedSwitch(int32_t start_value,
2864                uint32_t num_entries,
2865                HInstruction* input,
2866                uint32_t dex_pc = kNoDexPc)
2867    : HTemplateInstruction(SideEffects::None(), dex_pc),
2868      start_value_(start_value),
2869      num_entries_(num_entries) {
2870    SetRawInputAt(0, input);
2871  }
2872
2873  bool IsControlFlow() const OVERRIDE { return true; }
2874
2875  int32_t GetStartValue() const { return start_value_; }
2876
2877  uint32_t GetNumEntries() const { return num_entries_; }
2878
2879  HBasicBlock* GetDefaultBlock() const {
2880    // Last entry is the default block.
2881    return GetBlock()->GetSuccessors()[num_entries_];
2882  }
2883  DECLARE_INSTRUCTION(PackedSwitch);
2884
2885 private:
2886  const int32_t start_value_;
2887  const uint32_t num_entries_;
2888
2889  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
2890};
2891
2892class HUnaryOperation : public HExpression<1> {
2893 public:
2894  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
2895      : HExpression(result_type, SideEffects::None(), dex_pc) {
2896    SetRawInputAt(0, input);
2897  }
2898
2899  HInstruction* GetInput() const { return InputAt(0); }
2900  Primitive::Type GetResultType() const { return GetType(); }
2901
2902  bool CanBeMoved() const OVERRIDE { return true; }
2903  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2904    return true;
2905  }
2906
2907  // Try to statically evaluate `this` and return a HConstant
2908  // containing the result of this evaluation.  If `this` cannot
2909  // be evaluated as a constant, return null.
2910  HConstant* TryStaticEvaluation() const;
2911
2912  // Apply this operation to `x`.
2913  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
2914  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
2915  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
2916  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
2917
2918  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
2919
2920 private:
2921  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
2922};
2923
2924class HBinaryOperation : public HExpression<2> {
2925 public:
2926  HBinaryOperation(Primitive::Type result_type,
2927                   HInstruction* left,
2928                   HInstruction* right,
2929                   SideEffects side_effects = SideEffects::None(),
2930                   uint32_t dex_pc = kNoDexPc)
2931      : HExpression(result_type, side_effects, dex_pc) {
2932    SetRawInputAt(0, left);
2933    SetRawInputAt(1, right);
2934  }
2935
2936  HInstruction* GetLeft() const { return InputAt(0); }
2937  HInstruction* GetRight() const { return InputAt(1); }
2938  Primitive::Type GetResultType() const { return GetType(); }
2939
2940  virtual bool IsCommutative() const { return false; }
2941
2942  // Put constant on the right.
2943  // Returns whether order is changed.
2944  bool OrderInputsWithConstantOnTheRight() {
2945    HInstruction* left = InputAt(0);
2946    HInstruction* right = InputAt(1);
2947    if (left->IsConstant() && !right->IsConstant()) {
2948      ReplaceInput(right, 0);
2949      ReplaceInput(left, 1);
2950      return true;
2951    }
2952    return false;
2953  }
2954
2955  // Order inputs by instruction id, but favor constant on the right side.
2956  // This helps GVN for commutative ops.
2957  void OrderInputs() {
2958    DCHECK(IsCommutative());
2959    HInstruction* left = InputAt(0);
2960    HInstruction* right = InputAt(1);
2961    if (left == right || (!left->IsConstant() && right->IsConstant())) {
2962      return;
2963    }
2964    if (OrderInputsWithConstantOnTheRight()) {
2965      return;
2966    }
2967    // Order according to instruction id.
2968    if (left->GetId() > right->GetId()) {
2969      ReplaceInput(right, 0);
2970      ReplaceInput(left, 1);
2971    }
2972  }
2973
2974  bool CanBeMoved() const OVERRIDE { return true; }
2975  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2976    return true;
2977  }
2978
2979  // Try to statically evaluate `this` and return a HConstant
2980  // containing the result of this evaluation.  If `this` cannot
2981  // be evaluated as a constant, return null.
2982  HConstant* TryStaticEvaluation() const;
2983
2984  // Apply this operation to `x` and `y`.
2985  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
2986                              HNullConstant* y ATTRIBUTE_UNUSED) const {
2987    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
2988    UNREACHABLE();
2989  }
2990  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
2991  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
2992  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
2993                              HIntConstant* y ATTRIBUTE_UNUSED) const {
2994    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
2995    UNREACHABLE();
2996  }
2997  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
2998  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
2999
3000  // Returns an input that can legally be used as the right input and is
3001  // constant, or null.
3002  HConstant* GetConstantRight() const;
3003
3004  // If `GetConstantRight()` returns one of the input, this returns the other
3005  // one. Otherwise it returns null.
3006  HInstruction* GetLeastConstantLeft() const;
3007
3008  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3009
3010 private:
3011  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3012};
3013
3014// The comparison bias applies for floating point operations and indicates how NaN
3015// comparisons are treated:
3016enum class ComparisonBias {
3017  kNoBias,  // bias is not applicable (i.e. for long operation)
3018  kGtBias,  // return 1 for NaN comparisons
3019  kLtBias,  // return -1 for NaN comparisons
3020  kLast = kLtBias
3021};
3022
3023std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3024
3025class HCondition : public HBinaryOperation {
3026 public:
3027  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3028      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3029    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3030  }
3031
3032  // For code generation purposes, returns whether this instruction is just before
3033  // `instruction`, and disregard moves in between.
3034  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3035
3036  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3037
3038  virtual IfCondition GetCondition() const = 0;
3039
3040  virtual IfCondition GetOppositeCondition() const = 0;
3041
3042  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3043  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3044
3045  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3046  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3047
3048  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3049    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3050  }
3051
3052  bool IsFPConditionTrueIfNaN() const {
3053    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3054    IfCondition if_cond = GetCondition();
3055    if (if_cond == kCondNE) {
3056      return true;
3057    } else if (if_cond == kCondEQ) {
3058      return false;
3059    }
3060    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3061  }
3062
3063  bool IsFPConditionFalseIfNaN() const {
3064    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3065    IfCondition if_cond = GetCondition();
3066    if (if_cond == kCondEQ) {
3067      return true;
3068    } else if (if_cond == kCondNE) {
3069      return false;
3070    }
3071    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3072  }
3073
3074 protected:
3075  // Needed if we merge a HCompare into a HCondition.
3076  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3077  static constexpr size_t kFieldComparisonBiasSize =
3078      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3079  static constexpr size_t kNumberOfConditionPackedBits =
3080      kFieldComparisonBias + kFieldComparisonBiasSize;
3081  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3082  using ComparisonBiasField =
3083      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3084
3085  template <typename T>
3086  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3087
3088  template <typename T>
3089  int32_t CompareFP(T x, T y) const {
3090    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3091    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3092    // Handle the bias.
3093    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3094  }
3095
3096  // Return an integer constant containing the result of a condition evaluated at compile time.
3097  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3098    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3099  }
3100
3101 private:
3102  DISALLOW_COPY_AND_ASSIGN(HCondition);
3103};
3104
3105// Instruction to check if two inputs are equal to each other.
3106class HEqual : public HCondition {
3107 public:
3108  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3109      : HCondition(first, second, dex_pc) {}
3110
3111  bool IsCommutative() const OVERRIDE { return true; }
3112
3113  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3114                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3115    return MakeConstantCondition(true, GetDexPc());
3116  }
3117  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3118    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3119  }
3120  // In the following Evaluate methods, a HCompare instruction has
3121  // been merged into this HEqual instruction; evaluate it as
3122  // `Compare(x, y) == 0`.
3123  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3124    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3125                                 GetDexPc());
3126  }
3127  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3128    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3129  }
3130  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3131    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3132  }
3133
3134  DECLARE_INSTRUCTION(Equal);
3135
3136  IfCondition GetCondition() const OVERRIDE {
3137    return kCondEQ;
3138  }
3139
3140  IfCondition GetOppositeCondition() const OVERRIDE {
3141    return kCondNE;
3142  }
3143
3144 private:
3145  template <typename T> bool Compute(T x, T y) const { return x == y; }
3146
3147  DISALLOW_COPY_AND_ASSIGN(HEqual);
3148};
3149
3150class HNotEqual : public HCondition {
3151 public:
3152  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3153      : HCondition(first, second, dex_pc) {}
3154
3155  bool IsCommutative() const OVERRIDE { return true; }
3156
3157  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3158                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3159    return MakeConstantCondition(false, GetDexPc());
3160  }
3161  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3162    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3163  }
3164  // In the following Evaluate methods, a HCompare instruction has
3165  // been merged into this HNotEqual instruction; evaluate it as
3166  // `Compare(x, y) != 0`.
3167  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3168    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3169  }
3170  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3171    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3172  }
3173  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3174    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3175  }
3176
3177  DECLARE_INSTRUCTION(NotEqual);
3178
3179  IfCondition GetCondition() const OVERRIDE {
3180    return kCondNE;
3181  }
3182
3183  IfCondition GetOppositeCondition() const OVERRIDE {
3184    return kCondEQ;
3185  }
3186
3187 private:
3188  template <typename T> bool Compute(T x, T y) const { return x != y; }
3189
3190  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3191};
3192
3193class HLessThan : public HCondition {
3194 public:
3195  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3196      : HCondition(first, second, dex_pc) {}
3197
3198  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3199    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3200  }
3201  // In the following Evaluate methods, a HCompare instruction has
3202  // been merged into this HLessThan instruction; evaluate it as
3203  // `Compare(x, y) < 0`.
3204  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3205    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3206  }
3207  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3208    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3209  }
3210  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3211    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3212  }
3213
3214  DECLARE_INSTRUCTION(LessThan);
3215
3216  IfCondition GetCondition() const OVERRIDE {
3217    return kCondLT;
3218  }
3219
3220  IfCondition GetOppositeCondition() const OVERRIDE {
3221    return kCondGE;
3222  }
3223
3224 private:
3225  template <typename T> bool Compute(T x, T y) const { return x < y; }
3226
3227  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3228};
3229
3230class HLessThanOrEqual : public HCondition {
3231 public:
3232  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3233      : HCondition(first, second, dex_pc) {}
3234
3235  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3236    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3237  }
3238  // In the following Evaluate methods, a HCompare instruction has
3239  // been merged into this HLessThanOrEqual instruction; evaluate it as
3240  // `Compare(x, y) <= 0`.
3241  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3242    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3243  }
3244  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3245    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3246  }
3247  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3248    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3249  }
3250
3251  DECLARE_INSTRUCTION(LessThanOrEqual);
3252
3253  IfCondition GetCondition() const OVERRIDE {
3254    return kCondLE;
3255  }
3256
3257  IfCondition GetOppositeCondition() const OVERRIDE {
3258    return kCondGT;
3259  }
3260
3261 private:
3262  template <typename T> bool Compute(T x, T y) const { return x <= y; }
3263
3264  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3265};
3266
3267class HGreaterThan : public HCondition {
3268 public:
3269  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3270      : HCondition(first, second, dex_pc) {}
3271
3272  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3273    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3274  }
3275  // In the following Evaluate methods, a HCompare instruction has
3276  // been merged into this HGreaterThan instruction; evaluate it as
3277  // `Compare(x, y) > 0`.
3278  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3279    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3280  }
3281  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3282    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3283  }
3284  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3285    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3286  }
3287
3288  DECLARE_INSTRUCTION(GreaterThan);
3289
3290  IfCondition GetCondition() const OVERRIDE {
3291    return kCondGT;
3292  }
3293
3294  IfCondition GetOppositeCondition() const OVERRIDE {
3295    return kCondLE;
3296  }
3297
3298 private:
3299  template <typename T> bool Compute(T x, T y) const { return x > y; }
3300
3301  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3302};
3303
3304class HGreaterThanOrEqual : public HCondition {
3305 public:
3306  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3307      : HCondition(first, second, dex_pc) {}
3308
3309  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3310    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3311  }
3312  // In the following Evaluate methods, a HCompare instruction has
3313  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3314  // `Compare(x, y) >= 0`.
3315  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3316    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3317  }
3318  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3319    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3320  }
3321  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3322    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3323  }
3324
3325  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3326
3327  IfCondition GetCondition() const OVERRIDE {
3328    return kCondGE;
3329  }
3330
3331  IfCondition GetOppositeCondition() const OVERRIDE {
3332    return kCondLT;
3333  }
3334
3335 private:
3336  template <typename T> bool Compute(T x, T y) const { return x >= y; }
3337
3338  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3339};
3340
3341class HBelow : public HCondition {
3342 public:
3343  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3344      : HCondition(first, second, dex_pc) {}
3345
3346  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3347    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3348  }
3349  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3350    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3351  }
3352  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3353                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3354    LOG(FATAL) << DebugName() << " is not defined for float values";
3355    UNREACHABLE();
3356  }
3357  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3358                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3359    LOG(FATAL) << DebugName() << " is not defined for double values";
3360    UNREACHABLE();
3361  }
3362
3363  DECLARE_INSTRUCTION(Below);
3364
3365  IfCondition GetCondition() const OVERRIDE {
3366    return kCondB;
3367  }
3368
3369  IfCondition GetOppositeCondition() const OVERRIDE {
3370    return kCondAE;
3371  }
3372
3373 private:
3374  template <typename T> bool Compute(T x, T y) const {
3375    return MakeUnsigned(x) < MakeUnsigned(y);
3376  }
3377
3378  DISALLOW_COPY_AND_ASSIGN(HBelow);
3379};
3380
3381class HBelowOrEqual : public HCondition {
3382 public:
3383  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3384      : HCondition(first, second, dex_pc) {}
3385
3386  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3387    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3388  }
3389  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3390    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3391  }
3392  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3393                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3394    LOG(FATAL) << DebugName() << " is not defined for float values";
3395    UNREACHABLE();
3396  }
3397  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3398                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3399    LOG(FATAL) << DebugName() << " is not defined for double values";
3400    UNREACHABLE();
3401  }
3402
3403  DECLARE_INSTRUCTION(BelowOrEqual);
3404
3405  IfCondition GetCondition() const OVERRIDE {
3406    return kCondBE;
3407  }
3408
3409  IfCondition GetOppositeCondition() const OVERRIDE {
3410    return kCondA;
3411  }
3412
3413 private:
3414  template <typename T> bool Compute(T x, T y) const {
3415    return MakeUnsigned(x) <= MakeUnsigned(y);
3416  }
3417
3418  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3419};
3420
3421class HAbove : public HCondition {
3422 public:
3423  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3424      : HCondition(first, second, dex_pc) {}
3425
3426  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3427    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3428  }
3429  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3430    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3431  }
3432  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3433                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3434    LOG(FATAL) << DebugName() << " is not defined for float values";
3435    UNREACHABLE();
3436  }
3437  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3438                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3439    LOG(FATAL) << DebugName() << " is not defined for double values";
3440    UNREACHABLE();
3441  }
3442
3443  DECLARE_INSTRUCTION(Above);
3444
3445  IfCondition GetCondition() const OVERRIDE {
3446    return kCondA;
3447  }
3448
3449  IfCondition GetOppositeCondition() const OVERRIDE {
3450    return kCondBE;
3451  }
3452
3453 private:
3454  template <typename T> bool Compute(T x, T y) const {
3455    return MakeUnsigned(x) > MakeUnsigned(y);
3456  }
3457
3458  DISALLOW_COPY_AND_ASSIGN(HAbove);
3459};
3460
3461class HAboveOrEqual : public HCondition {
3462 public:
3463  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3464      : HCondition(first, second, dex_pc) {}
3465
3466  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3467    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3468  }
3469  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3470    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3471  }
3472  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3473                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3474    LOG(FATAL) << DebugName() << " is not defined for float values";
3475    UNREACHABLE();
3476  }
3477  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3478                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3479    LOG(FATAL) << DebugName() << " is not defined for double values";
3480    UNREACHABLE();
3481  }
3482
3483  DECLARE_INSTRUCTION(AboveOrEqual);
3484
3485  IfCondition GetCondition() const OVERRIDE {
3486    return kCondAE;
3487  }
3488
3489  IfCondition GetOppositeCondition() const OVERRIDE {
3490    return kCondB;
3491  }
3492
3493 private:
3494  template <typename T> bool Compute(T x, T y) const {
3495    return MakeUnsigned(x) >= MakeUnsigned(y);
3496  }
3497
3498  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3499};
3500
3501// Instruction to check how two inputs compare to each other.
3502// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3503class HCompare : public HBinaryOperation {
3504 public:
3505  // Note that `comparison_type` is the type of comparison performed
3506  // between the comparison's inputs, not the type of the instantiated
3507  // HCompare instruction (which is always Primitive::kPrimInt).
3508  HCompare(Primitive::Type comparison_type,
3509           HInstruction* first,
3510           HInstruction* second,
3511           ComparisonBias bias,
3512           uint32_t dex_pc)
3513      : HBinaryOperation(Primitive::kPrimInt,
3514                         first,
3515                         second,
3516                         SideEffectsForArchRuntimeCalls(comparison_type),
3517                         dex_pc) {
3518    SetPackedField<ComparisonBiasField>(bias);
3519    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3520    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3521  }
3522
3523  template <typename T>
3524  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3525
3526  template <typename T>
3527  int32_t ComputeFP(T x, T y) const {
3528    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3529    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3530    // Handle the bias.
3531    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3532  }
3533
3534  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3535    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3536    // reach this code path when processing a freshly built HIR
3537    // graph. However HCompare integer instructions can be synthesized
3538    // by the instruction simplifier to implement IntegerCompare and
3539    // IntegerSignum intrinsics, so we have to handle this case.
3540    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3541  }
3542  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3543    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3544  }
3545  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3546    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3547  }
3548  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3549    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3550  }
3551
3552  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3553    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3554  }
3555
3556  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3557
3558  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3559  // Only meaningful for floating-point comparisons.
3560  bool IsGtBias() const {
3561    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3562    return GetBias() == ComparisonBias::kGtBias;
3563  }
3564
3565  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3566    // Comparisons do not require a runtime call in any back end.
3567    return SideEffects::None();
3568  }
3569
3570  DECLARE_INSTRUCTION(Compare);
3571
3572 protected:
3573  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3574  static constexpr size_t kFieldComparisonBiasSize =
3575      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3576  static constexpr size_t kNumberOfComparePackedBits =
3577      kFieldComparisonBias + kFieldComparisonBiasSize;
3578  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3579  using ComparisonBiasField =
3580      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3581
3582  // Return an integer constant containing the result of a comparison evaluated at compile time.
3583  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3584    DCHECK(value == -1 || value == 0 || value == 1) << value;
3585    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3586  }
3587
3588 private:
3589  DISALLOW_COPY_AND_ASSIGN(HCompare);
3590};
3591
3592class HNewInstance : public HExpression<2> {
3593 public:
3594  HNewInstance(HInstruction* cls,
3595               HCurrentMethod* current_method,
3596               uint32_t dex_pc,
3597               uint16_t type_index,
3598               const DexFile& dex_file,
3599               bool can_throw,
3600               bool finalizable,
3601               QuickEntrypointEnum entrypoint)
3602      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3603        type_index_(type_index),
3604        dex_file_(dex_file),
3605        entrypoint_(entrypoint) {
3606    SetPackedFlag<kFlagCanThrow>(can_throw);
3607    SetPackedFlag<kFlagFinalizable>(finalizable);
3608    SetRawInputAt(0, cls);
3609    SetRawInputAt(1, current_method);
3610  }
3611
3612  uint16_t GetTypeIndex() const { return type_index_; }
3613  const DexFile& GetDexFile() const { return dex_file_; }
3614
3615  // Calls runtime so needs an environment.
3616  bool NeedsEnvironment() const OVERRIDE { return true; }
3617
3618  // It may throw when called on type that's not instantiable/accessible.
3619  // It can throw OOME.
3620  // TODO: distinguish between the two cases so we can for example allow allocation elimination.
3621  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>() || true; }
3622
3623  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3624
3625  bool CanBeNull() const OVERRIDE { return false; }
3626
3627  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3628
3629  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3630    entrypoint_ = entrypoint;
3631  }
3632
3633  bool IsStringAlloc() const;
3634
3635  DECLARE_INSTRUCTION(NewInstance);
3636
3637 private:
3638  static constexpr size_t kFlagCanThrow = kNumberOfExpressionPackedBits;
3639  static constexpr size_t kFlagFinalizable = kFlagCanThrow + 1;
3640  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3641  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3642                "Too many packed fields.");
3643
3644  const uint16_t type_index_;
3645  const DexFile& dex_file_;
3646  QuickEntrypointEnum entrypoint_;
3647
3648  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3649};
3650
3651enum class Intrinsics {
3652#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \
3653  k ## Name,
3654#include "intrinsics_list.h"
3655  kNone,
3656  INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3657#undef INTRINSICS_LIST
3658#undef OPTIMIZING_INTRINSICS
3659};
3660std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic);
3661
3662enum IntrinsicNeedsEnvironmentOrCache {
3663  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3664  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3665};
3666
3667enum IntrinsicSideEffects {
3668  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3669  kReadSideEffects,   // Intrinsic may read heap memory.
3670  kWriteSideEffects,  // Intrinsic may write heap memory.
3671  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3672};
3673
3674enum IntrinsicExceptions {
3675  kNoThrow,  // Intrinsic does not throw any exceptions.
3676  kCanThrow  // Intrinsic may throw exceptions.
3677};
3678
3679class HInvoke : public HInstruction {
3680 public:
3681  size_t InputCount() const OVERRIDE { return inputs_.size(); }
3682
3683  bool NeedsEnvironment() const OVERRIDE;
3684
3685  void SetArgumentAt(size_t index, HInstruction* argument) {
3686    SetRawInputAt(index, argument);
3687  }
3688
3689  // Return the number of arguments.  This number can be lower than
3690  // the number of inputs returned by InputCount(), as some invoke
3691  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3692  // inputs at the end of their list of inputs.
3693  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3694
3695  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3696
3697  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3698  const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
3699
3700  InvokeType GetOriginalInvokeType() const {
3701    return GetPackedField<OriginalInvokeTypeField>();
3702  }
3703
3704  Intrinsics GetIntrinsic() const {
3705    return intrinsic_;
3706  }
3707
3708  void SetIntrinsic(Intrinsics intrinsic,
3709                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3710                    IntrinsicSideEffects side_effects,
3711                    IntrinsicExceptions exceptions);
3712
3713  bool IsFromInlinedInvoke() const {
3714    return GetEnvironment()->IsFromInlinedInvoke();
3715  }
3716
3717  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
3718
3719  bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); }
3720
3721  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3722    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
3723  }
3724
3725  uint32_t* GetIntrinsicOptimizations() {
3726    return &intrinsic_optimizations_;
3727  }
3728
3729  const uint32_t* GetIntrinsicOptimizations() const {
3730    return &intrinsic_optimizations_;
3731  }
3732
3733  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
3734
3735  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
3736
3737 protected:
3738  static constexpr size_t kFieldOriginalInvokeType = kNumberOfGenericPackedBits;
3739  static constexpr size_t kFieldOriginalInvokeTypeSize =
3740      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
3741  static constexpr size_t kFieldReturnType =
3742      kFieldOriginalInvokeType + kFieldOriginalInvokeTypeSize;
3743  static constexpr size_t kFieldReturnTypeSize =
3744      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
3745  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
3746  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
3747  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3748  using OriginalInvokeTypeField =
3749      BitField<InvokeType, kFieldOriginalInvokeType, kFieldOriginalInvokeTypeSize>;
3750  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
3751
3752  HInvoke(ArenaAllocator* arena,
3753          uint32_t number_of_arguments,
3754          uint32_t number_of_other_inputs,
3755          Primitive::Type return_type,
3756          uint32_t dex_pc,
3757          uint32_t dex_method_index,
3758          InvokeType original_invoke_type)
3759    : HInstruction(
3760          SideEffects::AllExceptGCDependency(), dex_pc),  // Assume write/read on all fields/arrays.
3761      number_of_arguments_(number_of_arguments),
3762      inputs_(number_of_arguments + number_of_other_inputs,
3763              arena->Adapter(kArenaAllocInvokeInputs)),
3764      dex_method_index_(dex_method_index),
3765      intrinsic_(Intrinsics::kNone),
3766      intrinsic_optimizations_(0) {
3767    SetPackedField<ReturnTypeField>(return_type);
3768    SetPackedField<OriginalInvokeTypeField>(original_invoke_type);
3769    SetPackedFlag<kFlagCanThrow>(true);
3770  }
3771
3772  const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE {
3773    return inputs_[index];
3774  }
3775
3776  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE {
3777    inputs_[index] = input;
3778  }
3779
3780  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3781
3782  uint32_t number_of_arguments_;
3783  ArenaVector<HUserRecord<HInstruction*>> inputs_;
3784  const uint32_t dex_method_index_;
3785  Intrinsics intrinsic_;
3786
3787  // A magic word holding optimizations for intrinsics. See intrinsics.h.
3788  uint32_t intrinsic_optimizations_;
3789
3790 private:
3791  DISALLOW_COPY_AND_ASSIGN(HInvoke);
3792};
3793
3794class HInvokeUnresolved : public HInvoke {
3795 public:
3796  HInvokeUnresolved(ArenaAllocator* arena,
3797                    uint32_t number_of_arguments,
3798                    Primitive::Type return_type,
3799                    uint32_t dex_pc,
3800                    uint32_t dex_method_index,
3801                    InvokeType invoke_type)
3802      : HInvoke(arena,
3803                number_of_arguments,
3804                0u /* number_of_other_inputs */,
3805                return_type,
3806                dex_pc,
3807                dex_method_index,
3808                invoke_type) {
3809  }
3810
3811  DECLARE_INSTRUCTION(InvokeUnresolved);
3812
3813 private:
3814  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
3815};
3816
3817class HInvokeStaticOrDirect : public HInvoke {
3818 public:
3819  // Requirements of this method call regarding the class
3820  // initialization (clinit) check of its declaring class.
3821  enum class ClinitCheckRequirement {
3822    kNone,      // Class already initialized.
3823    kExplicit,  // Static call having explicit clinit check as last input.
3824    kImplicit,  // Static call implicitly requiring a clinit check.
3825    kLast = kImplicit
3826  };
3827
3828  // Determines how to load the target ArtMethod*.
3829  enum class MethodLoadKind {
3830    // Use a String init ArtMethod* loaded from Thread entrypoints.
3831    kStringInit,
3832
3833    // Use the method's own ArtMethod* loaded by the register allocator.
3834    kRecursive,
3835
3836    // Use ArtMethod* at a known address, embed the direct address in the code.
3837    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
3838    kDirectAddress,
3839
3840    // Use ArtMethod* at an address that will be known at link time, embed the direct
3841    // address in the code. If the image is relocatable, emit .patch_oat entry.
3842    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3843    // the image relocatable or not.
3844    kDirectAddressWithFixup,
3845
3846    // Load from resolved methods array in the dex cache using a PC-relative load.
3847    // Used when we need to use the dex cache, for example for invoke-static that
3848    // may cause class initialization (the entry may point to a resolution method),
3849    // and we know that we can access the dex cache arrays using a PC-relative load.
3850    kDexCachePcRelative,
3851
3852    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
3853    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
3854    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
3855    // or unimplemented or impractical (i.e. slow) on a particular architecture.
3856    kDexCacheViaMethod,
3857  };
3858
3859  // Determines the location of the code pointer.
3860  enum class CodePtrLocation {
3861    // Recursive call, use local PC-relative call instruction.
3862    kCallSelf,
3863
3864    // Use PC-relative call instruction patched at link time.
3865    // Used for calls within an oat file, boot->boot or app->app.
3866    kCallPCRelative,
3867
3868    // Call to a known target address, embed the direct address in code.
3869    // Used for app->boot call with non-relocatable image and for JIT-compiled calls.
3870    kCallDirect,
3871
3872    // Call to a target address that will be known at link time, embed the direct
3873    // address in code. If the image is relocatable, emit .patch_oat entry.
3874    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3875    // the image relocatable or not.
3876    kCallDirectWithFixup,
3877
3878    // Use code pointer from the ArtMethod*.
3879    // Used when we don't know the target code. This is also the last-resort-kind used when
3880    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
3881    kCallArtMethod,
3882  };
3883
3884  struct DispatchInfo {
3885    MethodLoadKind method_load_kind;
3886    CodePtrLocation code_ptr_location;
3887    // The method load data holds
3888    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
3889    //     Note that there are multiple string init methods, each having its own offset.
3890    //   - the method address for kDirectAddress
3891    //   - the dex cache arrays offset for kDexCachePcRel.
3892    uint64_t method_load_data;
3893    uint64_t direct_code_ptr;
3894  };
3895
3896  HInvokeStaticOrDirect(ArenaAllocator* arena,
3897                        uint32_t number_of_arguments,
3898                        Primitive::Type return_type,
3899                        uint32_t dex_pc,
3900                        uint32_t method_index,
3901                        MethodReference target_method,
3902                        DispatchInfo dispatch_info,
3903                        InvokeType original_invoke_type,
3904                        InvokeType optimized_invoke_type,
3905                        ClinitCheckRequirement clinit_check_requirement)
3906      : HInvoke(arena,
3907                number_of_arguments,
3908                // There is potentially one extra argument for the HCurrentMethod node, and
3909                // potentially one other if the clinit check is explicit, and potentially
3910                // one other if the method is a string factory.
3911                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
3912                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
3913                return_type,
3914                dex_pc,
3915                method_index,
3916                original_invoke_type),
3917        target_method_(target_method),
3918        dispatch_info_(dispatch_info) {
3919    SetPackedField<OptimizedInvokeTypeField>(optimized_invoke_type);
3920    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
3921  }
3922
3923  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
3924    bool had_current_method_input = HasCurrentMethodInput();
3925    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
3926
3927    // Using the current method is the default and once we find a better
3928    // method load kind, we should not go back to using the current method.
3929    DCHECK(had_current_method_input || !needs_current_method_input);
3930
3931    if (had_current_method_input && !needs_current_method_input) {
3932      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
3933      RemoveInputAt(GetSpecialInputIndex());
3934    }
3935    dispatch_info_ = dispatch_info;
3936  }
3937
3938  void AddSpecialInput(HInstruction* input) {
3939    // We allow only one special input.
3940    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
3941    DCHECK(InputCount() == GetSpecialInputIndex() ||
3942           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
3943    InsertInputAt(GetSpecialInputIndex(), input);
3944  }
3945
3946  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
3947    // We access the method via the dex cache so we can't do an implicit null check.
3948    // TODO: for intrinsics we can generate implicit null checks.
3949    return false;
3950  }
3951
3952  bool CanBeNull() const OVERRIDE {
3953    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
3954  }
3955
3956  // Get the index of the special input, if any.
3957  //
3958  // If the invoke HasCurrentMethodInput(), the "special input" is the current
3959  // method pointer; otherwise there may be one platform-specific special input,
3960  // such as PC-relative addressing base.
3961  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
3962  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
3963
3964  InvokeType GetOptimizedInvokeType() const {
3965    return GetPackedField<OptimizedInvokeTypeField>();
3966  }
3967
3968  void SetOptimizedInvokeType(InvokeType invoke_type) {
3969    SetPackedField<OptimizedInvokeTypeField>(invoke_type);
3970  }
3971
3972  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
3973  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
3974  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
3975  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
3976  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
3977  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
3978  bool HasPcRelativeDexCache() const {
3979    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
3980  }
3981  bool HasCurrentMethodInput() const {
3982    // This function can be called only after the invoke has been fully initialized by the builder.
3983    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
3984      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
3985      return true;
3986    } else {
3987      DCHECK(InputCount() == GetSpecialInputIndex() ||
3988             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
3989      return false;
3990    }
3991  }
3992  bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
3993  MethodReference GetTargetMethod() const { return target_method_; }
3994  void SetTargetMethod(MethodReference method) { target_method_ = method; }
3995
3996  int32_t GetStringInitOffset() const {
3997    DCHECK(IsStringInit());
3998    return dispatch_info_.method_load_data;
3999  }
4000
4001  uint64_t GetMethodAddress() const {
4002    DCHECK(HasMethodAddress());
4003    return dispatch_info_.method_load_data;
4004  }
4005
4006  uint32_t GetDexCacheArrayOffset() const {
4007    DCHECK(HasPcRelativeDexCache());
4008    return dispatch_info_.method_load_data;
4009  }
4010
4011  uint64_t GetDirectCodePtr() const {
4012    DCHECK(HasDirectCodePtr());
4013    return dispatch_info_.direct_code_ptr;
4014  }
4015
4016  ClinitCheckRequirement GetClinitCheckRequirement() const {
4017    return GetPackedField<ClinitCheckRequirementField>();
4018  }
4019
4020  // Is this instruction a call to a static method?
4021  bool IsStatic() const {
4022    return GetOriginalInvokeType() == kStatic;
4023  }
4024
4025  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4026  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4027  // instruction; only relevant for static calls with explicit clinit check.
4028  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4029    DCHECK(IsStaticWithExplicitClinitCheck());
4030    size_t last_input_index = InputCount() - 1;
4031    HInstruction* last_input = InputAt(last_input_index);
4032    DCHECK(last_input != nullptr);
4033    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4034    RemoveAsUserOfInput(last_input_index);
4035    inputs_.pop_back();
4036    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4037    DCHECK(!IsStaticWithExplicitClinitCheck());
4038  }
4039
4040  // Is this a call to a static method whose declaring class has an
4041  // explicit initialization check in the graph?
4042  bool IsStaticWithExplicitClinitCheck() const {
4043    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4044  }
4045
4046  // Is this a call to a static method whose declaring class has an
4047  // implicit intialization check requirement?
4048  bool IsStaticWithImplicitClinitCheck() const {
4049    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4050  }
4051
4052  // Does this method load kind need the current method as an input?
4053  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4054    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4055  }
4056
4057  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4058
4059 protected:
4060  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE {
4061    const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i);
4062    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) {
4063      HInstruction* input = input_record.GetInstruction();
4064      // `input` is the last input of a static invoke marked as having
4065      // an explicit clinit check. It must either be:
4066      // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4067      // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4068      DCHECK(input != nullptr);
4069      DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName();
4070    }
4071    return input_record;
4072  }
4073
4074  void InsertInputAt(size_t index, HInstruction* input);
4075  void RemoveInputAt(size_t index);
4076
4077 private:
4078  static constexpr size_t kFieldOptimizedInvokeType = kNumberOfInvokePackedBits;
4079  static constexpr size_t kFieldOptimizedInvokeTypeSize =
4080      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4081  static constexpr size_t kFieldClinitCheckRequirement =
4082      kFieldOptimizedInvokeType + kFieldOptimizedInvokeTypeSize;
4083  static constexpr size_t kFieldClinitCheckRequirementSize =
4084      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4085  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4086      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4087  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4088                "Too many packed fields.");
4089  using OptimizedInvokeTypeField =
4090      BitField<InvokeType, kFieldOptimizedInvokeType, kFieldOptimizedInvokeTypeSize>;
4091  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4092                                               kFieldClinitCheckRequirement,
4093                                               kFieldClinitCheckRequirementSize>;
4094
4095  // The target method may refer to different dex file or method index than the original
4096  // invoke. This happens for sharpened calls and for calls where a method was redeclared
4097  // in derived class to increase visibility.
4098  MethodReference target_method_;
4099  DispatchInfo dispatch_info_;
4100
4101  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4102};
4103std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4104std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4105
4106class HInvokeVirtual : public HInvoke {
4107 public:
4108  HInvokeVirtual(ArenaAllocator* arena,
4109                 uint32_t number_of_arguments,
4110                 Primitive::Type return_type,
4111                 uint32_t dex_pc,
4112                 uint32_t dex_method_index,
4113                 uint32_t vtable_index)
4114      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual),
4115        vtable_index_(vtable_index) {}
4116
4117  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4118    // TODO: Add implicit null checks in intrinsics.
4119    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4120  }
4121
4122  uint32_t GetVTableIndex() const { return vtable_index_; }
4123
4124  DECLARE_INSTRUCTION(InvokeVirtual);
4125
4126 private:
4127  const uint32_t vtable_index_;
4128
4129  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4130};
4131
4132class HInvokeInterface : public HInvoke {
4133 public:
4134  HInvokeInterface(ArenaAllocator* arena,
4135                   uint32_t number_of_arguments,
4136                   Primitive::Type return_type,
4137                   uint32_t dex_pc,
4138                   uint32_t dex_method_index,
4139                   uint32_t imt_index)
4140      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface),
4141        imt_index_(imt_index) {}
4142
4143  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4144    // TODO: Add implicit null checks in intrinsics.
4145    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4146  }
4147
4148  uint32_t GetImtIndex() const { return imt_index_; }
4149  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4150
4151  DECLARE_INSTRUCTION(InvokeInterface);
4152
4153 private:
4154  const uint32_t imt_index_;
4155
4156  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4157};
4158
4159class HNeg : public HUnaryOperation {
4160 public:
4161  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4162      : HUnaryOperation(result_type, input, dex_pc) {
4163    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4164  }
4165
4166  template <typename T> T Compute(T x) const { return -x; }
4167
4168  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4169    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4170  }
4171  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4172    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4173  }
4174  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4175    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4176  }
4177  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4178    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4179  }
4180
4181  DECLARE_INSTRUCTION(Neg);
4182
4183 private:
4184  DISALLOW_COPY_AND_ASSIGN(HNeg);
4185};
4186
4187class HNewArray : public HExpression<2> {
4188 public:
4189  HNewArray(HInstruction* length,
4190            HCurrentMethod* current_method,
4191            uint32_t dex_pc,
4192            uint16_t type_index,
4193            const DexFile& dex_file,
4194            QuickEntrypointEnum entrypoint)
4195      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
4196        type_index_(type_index),
4197        dex_file_(dex_file),
4198        entrypoint_(entrypoint) {
4199    SetRawInputAt(0, length);
4200    SetRawInputAt(1, current_method);
4201  }
4202
4203  uint16_t GetTypeIndex() const { return type_index_; }
4204  const DexFile& GetDexFile() const { return dex_file_; }
4205
4206  // Calls runtime so needs an environment.
4207  bool NeedsEnvironment() const OVERRIDE { return true; }
4208
4209  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4210  bool CanThrow() const OVERRIDE { return true; }
4211
4212  bool CanBeNull() const OVERRIDE { return false; }
4213
4214  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4215
4216  DECLARE_INSTRUCTION(NewArray);
4217
4218 private:
4219  const uint16_t type_index_;
4220  const DexFile& dex_file_;
4221  const QuickEntrypointEnum entrypoint_;
4222
4223  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4224};
4225
4226class HAdd : public HBinaryOperation {
4227 public:
4228  HAdd(Primitive::Type result_type,
4229       HInstruction* left,
4230       HInstruction* right,
4231       uint32_t dex_pc = kNoDexPc)
4232      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4233
4234  bool IsCommutative() const OVERRIDE { return true; }
4235
4236  template <typename T> T Compute(T x, T y) const { return x + y; }
4237
4238  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4239    return GetBlock()->GetGraph()->GetIntConstant(
4240        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4241  }
4242  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4243    return GetBlock()->GetGraph()->GetLongConstant(
4244        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4245  }
4246  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4247    return GetBlock()->GetGraph()->GetFloatConstant(
4248        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4249  }
4250  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4251    return GetBlock()->GetGraph()->GetDoubleConstant(
4252        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4253  }
4254
4255  DECLARE_INSTRUCTION(Add);
4256
4257 private:
4258  DISALLOW_COPY_AND_ASSIGN(HAdd);
4259};
4260
4261class HSub : public HBinaryOperation {
4262 public:
4263  HSub(Primitive::Type result_type,
4264       HInstruction* left,
4265       HInstruction* right,
4266       uint32_t dex_pc = kNoDexPc)
4267      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4268
4269  template <typename T> T Compute(T x, T y) const { return x - y; }
4270
4271  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4272    return GetBlock()->GetGraph()->GetIntConstant(
4273        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4274  }
4275  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4276    return GetBlock()->GetGraph()->GetLongConstant(
4277        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4278  }
4279  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4280    return GetBlock()->GetGraph()->GetFloatConstant(
4281        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4282  }
4283  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4284    return GetBlock()->GetGraph()->GetDoubleConstant(
4285        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4286  }
4287
4288  DECLARE_INSTRUCTION(Sub);
4289
4290 private:
4291  DISALLOW_COPY_AND_ASSIGN(HSub);
4292};
4293
4294class HMul : public HBinaryOperation {
4295 public:
4296  HMul(Primitive::Type result_type,
4297       HInstruction* left,
4298       HInstruction* right,
4299       uint32_t dex_pc = kNoDexPc)
4300      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4301
4302  bool IsCommutative() const OVERRIDE { return true; }
4303
4304  template <typename T> T Compute(T x, T y) const { return x * y; }
4305
4306  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4307    return GetBlock()->GetGraph()->GetIntConstant(
4308        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4309  }
4310  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4311    return GetBlock()->GetGraph()->GetLongConstant(
4312        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4313  }
4314  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4315    return GetBlock()->GetGraph()->GetFloatConstant(
4316        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4317  }
4318  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4319    return GetBlock()->GetGraph()->GetDoubleConstant(
4320        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4321  }
4322
4323  DECLARE_INSTRUCTION(Mul);
4324
4325 private:
4326  DISALLOW_COPY_AND_ASSIGN(HMul);
4327};
4328
4329class HDiv : public HBinaryOperation {
4330 public:
4331  HDiv(Primitive::Type result_type,
4332       HInstruction* left,
4333       HInstruction* right,
4334       uint32_t dex_pc)
4335      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4336
4337  template <typename T>
4338  T ComputeIntegral(T x, T y) const {
4339    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4340    // Our graph structure ensures we never have 0 for `y` during
4341    // constant folding.
4342    DCHECK_NE(y, 0);
4343    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4344    return (y == -1) ? -x : x / y;
4345  }
4346
4347  template <typename T>
4348  T ComputeFP(T x, T y) const {
4349    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4350    return x / y;
4351  }
4352
4353  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4354    return GetBlock()->GetGraph()->GetIntConstant(
4355        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4356  }
4357  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4358    return GetBlock()->GetGraph()->GetLongConstant(
4359        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4360  }
4361  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4362    return GetBlock()->GetGraph()->GetFloatConstant(
4363        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4364  }
4365  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4366    return GetBlock()->GetGraph()->GetDoubleConstant(
4367        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4368  }
4369
4370  static SideEffects SideEffectsForArchRuntimeCalls() {
4371    // The generated code can use a runtime call.
4372    return SideEffects::CanTriggerGC();
4373  }
4374
4375  DECLARE_INSTRUCTION(Div);
4376
4377 private:
4378  DISALLOW_COPY_AND_ASSIGN(HDiv);
4379};
4380
4381class HRem : public HBinaryOperation {
4382 public:
4383  HRem(Primitive::Type result_type,
4384       HInstruction* left,
4385       HInstruction* right,
4386       uint32_t dex_pc)
4387      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4388
4389  template <typename T>
4390  T ComputeIntegral(T x, T y) const {
4391    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4392    // Our graph structure ensures we never have 0 for `y` during
4393    // constant folding.
4394    DCHECK_NE(y, 0);
4395    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4396    return (y == -1) ? 0 : x % y;
4397  }
4398
4399  template <typename T>
4400  T ComputeFP(T x, T y) const {
4401    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4402    return std::fmod(x, y);
4403  }
4404
4405  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4406    return GetBlock()->GetGraph()->GetIntConstant(
4407        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4408  }
4409  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4410    return GetBlock()->GetGraph()->GetLongConstant(
4411        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4412  }
4413  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4414    return GetBlock()->GetGraph()->GetFloatConstant(
4415        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4416  }
4417  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4418    return GetBlock()->GetGraph()->GetDoubleConstant(
4419        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4420  }
4421
4422  static SideEffects SideEffectsForArchRuntimeCalls() {
4423    return SideEffects::CanTriggerGC();
4424  }
4425
4426  DECLARE_INSTRUCTION(Rem);
4427
4428 private:
4429  DISALLOW_COPY_AND_ASSIGN(HRem);
4430};
4431
4432class HDivZeroCheck : public HExpression<1> {
4433 public:
4434  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4435  // constructor.
4436  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4437      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4438    SetRawInputAt(0, value);
4439  }
4440
4441  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4442
4443  bool CanBeMoved() const OVERRIDE { return true; }
4444
4445  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4446    return true;
4447  }
4448
4449  bool NeedsEnvironment() const OVERRIDE { return true; }
4450  bool CanThrow() const OVERRIDE { return true; }
4451
4452  DECLARE_INSTRUCTION(DivZeroCheck);
4453
4454 private:
4455  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4456};
4457
4458class HShl : public HBinaryOperation {
4459 public:
4460  HShl(Primitive::Type result_type,
4461       HInstruction* value,
4462       HInstruction* distance,
4463       uint32_t dex_pc = kNoDexPc)
4464      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4465    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4466    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4467  }
4468
4469  template <typename T>
4470  T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4471    return value << (distance & max_shift_distance);
4472  }
4473
4474  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4475    return GetBlock()->GetGraph()->GetIntConstant(
4476        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4477  }
4478  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4479    return GetBlock()->GetGraph()->GetLongConstant(
4480        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4481  }
4482  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4483                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4484    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4485    UNREACHABLE();
4486  }
4487  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4488                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4489    LOG(FATAL) << DebugName() << " is not defined for float values";
4490    UNREACHABLE();
4491  }
4492  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4493                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4494    LOG(FATAL) << DebugName() << " is not defined for double values";
4495    UNREACHABLE();
4496  }
4497
4498  DECLARE_INSTRUCTION(Shl);
4499
4500 private:
4501  DISALLOW_COPY_AND_ASSIGN(HShl);
4502};
4503
4504class HShr : public HBinaryOperation {
4505 public:
4506  HShr(Primitive::Type result_type,
4507       HInstruction* value,
4508       HInstruction* distance,
4509       uint32_t dex_pc = kNoDexPc)
4510      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4511    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4512    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4513  }
4514
4515  template <typename T>
4516  T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4517    return value >> (distance & max_shift_distance);
4518  }
4519
4520  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4521    return GetBlock()->GetGraph()->GetIntConstant(
4522        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4523  }
4524  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4525    return GetBlock()->GetGraph()->GetLongConstant(
4526        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4527  }
4528  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4529                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4530    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4531    UNREACHABLE();
4532  }
4533  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4534                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4535    LOG(FATAL) << DebugName() << " is not defined for float values";
4536    UNREACHABLE();
4537  }
4538  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4539                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4540    LOG(FATAL) << DebugName() << " is not defined for double values";
4541    UNREACHABLE();
4542  }
4543
4544  DECLARE_INSTRUCTION(Shr);
4545
4546 private:
4547  DISALLOW_COPY_AND_ASSIGN(HShr);
4548};
4549
4550class HUShr : public HBinaryOperation {
4551 public:
4552  HUShr(Primitive::Type result_type,
4553        HInstruction* value,
4554        HInstruction* distance,
4555        uint32_t dex_pc = kNoDexPc)
4556      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4557    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4558    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4559  }
4560
4561  template <typename T>
4562  T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4563    typedef typename std::make_unsigned<T>::type V;
4564    V ux = static_cast<V>(value);
4565    return static_cast<T>(ux >> (distance & max_shift_distance));
4566  }
4567
4568  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4569    return GetBlock()->GetGraph()->GetIntConstant(
4570        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4571  }
4572  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4573    return GetBlock()->GetGraph()->GetLongConstant(
4574        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4575  }
4576  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4577                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4578    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4579    UNREACHABLE();
4580  }
4581  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4582                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4583    LOG(FATAL) << DebugName() << " is not defined for float values";
4584    UNREACHABLE();
4585  }
4586  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4587                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4588    LOG(FATAL) << DebugName() << " is not defined for double values";
4589    UNREACHABLE();
4590  }
4591
4592  DECLARE_INSTRUCTION(UShr);
4593
4594 private:
4595  DISALLOW_COPY_AND_ASSIGN(HUShr);
4596};
4597
4598class HAnd : public HBinaryOperation {
4599 public:
4600  HAnd(Primitive::Type result_type,
4601       HInstruction* left,
4602       HInstruction* right,
4603       uint32_t dex_pc = kNoDexPc)
4604      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4605
4606  bool IsCommutative() const OVERRIDE { return true; }
4607
4608  template <typename T> T Compute(T x, T y) const { return x & y; }
4609
4610  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4611    return GetBlock()->GetGraph()->GetIntConstant(
4612        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4613  }
4614  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4615    return GetBlock()->GetGraph()->GetLongConstant(
4616        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4617  }
4618  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4619                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4620    LOG(FATAL) << DebugName() << " is not defined for float values";
4621    UNREACHABLE();
4622  }
4623  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4624                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4625    LOG(FATAL) << DebugName() << " is not defined for double values";
4626    UNREACHABLE();
4627  }
4628
4629  DECLARE_INSTRUCTION(And);
4630
4631 private:
4632  DISALLOW_COPY_AND_ASSIGN(HAnd);
4633};
4634
4635class HOr : public HBinaryOperation {
4636 public:
4637  HOr(Primitive::Type result_type,
4638      HInstruction* left,
4639      HInstruction* right,
4640      uint32_t dex_pc = kNoDexPc)
4641      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4642
4643  bool IsCommutative() const OVERRIDE { return true; }
4644
4645  template <typename T> T Compute(T x, T y) const { return x | y; }
4646
4647  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4648    return GetBlock()->GetGraph()->GetIntConstant(
4649        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4650  }
4651  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4652    return GetBlock()->GetGraph()->GetLongConstant(
4653        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4654  }
4655  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4656                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4657    LOG(FATAL) << DebugName() << " is not defined for float values";
4658    UNREACHABLE();
4659  }
4660  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4661                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4662    LOG(FATAL) << DebugName() << " is not defined for double values";
4663    UNREACHABLE();
4664  }
4665
4666  DECLARE_INSTRUCTION(Or);
4667
4668 private:
4669  DISALLOW_COPY_AND_ASSIGN(HOr);
4670};
4671
4672class HXor : public HBinaryOperation {
4673 public:
4674  HXor(Primitive::Type result_type,
4675       HInstruction* left,
4676       HInstruction* right,
4677       uint32_t dex_pc = kNoDexPc)
4678      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4679
4680  bool IsCommutative() const OVERRIDE { return true; }
4681
4682  template <typename T> T Compute(T x, T y) const { return x ^ y; }
4683
4684  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4685    return GetBlock()->GetGraph()->GetIntConstant(
4686        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4687  }
4688  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4689    return GetBlock()->GetGraph()->GetLongConstant(
4690        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4691  }
4692  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4693                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4694    LOG(FATAL) << DebugName() << " is not defined for float values";
4695    UNREACHABLE();
4696  }
4697  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4698                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4699    LOG(FATAL) << DebugName() << " is not defined for double values";
4700    UNREACHABLE();
4701  }
4702
4703  DECLARE_INSTRUCTION(Xor);
4704
4705 private:
4706  DISALLOW_COPY_AND_ASSIGN(HXor);
4707};
4708
4709class HRor : public HBinaryOperation {
4710 public:
4711  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4712    : HBinaryOperation(result_type, value, distance) {
4713    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4714    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4715  }
4716
4717  template <typename T>
4718  T Compute(T value, int32_t distance, int32_t max_shift_value) const {
4719    typedef typename std::make_unsigned<T>::type V;
4720    V ux = static_cast<V>(value);
4721    if ((distance & max_shift_value) == 0) {
4722      return static_cast<T>(ux);
4723    } else {
4724      const V reg_bits = sizeof(T) * 8;
4725      return static_cast<T>(ux >> (distance & max_shift_value)) |
4726                           (value << (reg_bits - (distance & max_shift_value)));
4727    }
4728  }
4729
4730  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4731    return GetBlock()->GetGraph()->GetIntConstant(
4732        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4733  }
4734  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4735    return GetBlock()->GetGraph()->GetLongConstant(
4736        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4737  }
4738  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4739                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4740    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4741    UNREACHABLE();
4742  }
4743  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4744                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4745    LOG(FATAL) << DebugName() << " is not defined for float values";
4746    UNREACHABLE();
4747  }
4748  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4749                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4750    LOG(FATAL) << DebugName() << " is not defined for double values";
4751    UNREACHABLE();
4752  }
4753
4754  DECLARE_INSTRUCTION(Ror);
4755
4756 private:
4757  DISALLOW_COPY_AND_ASSIGN(HRor);
4758};
4759
4760// The value of a parameter in this method. Its location depends on
4761// the calling convention.
4762class HParameterValue : public HExpression<0> {
4763 public:
4764  HParameterValue(const DexFile& dex_file,
4765                  uint16_t type_index,
4766                  uint8_t index,
4767                  Primitive::Type parameter_type,
4768                  bool is_this = false)
4769      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
4770        dex_file_(dex_file),
4771        type_index_(type_index),
4772        index_(index) {
4773    SetPackedFlag<kFlagIsThis>(is_this);
4774    SetPackedFlag<kFlagCanBeNull>(!is_this);
4775  }
4776
4777  const DexFile& GetDexFile() const { return dex_file_; }
4778  uint16_t GetTypeIndex() const { return type_index_; }
4779  uint8_t GetIndex() const { return index_; }
4780  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
4781
4782  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
4783  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
4784
4785  DECLARE_INSTRUCTION(ParameterValue);
4786
4787 private:
4788  // Whether or not the parameter value corresponds to 'this' argument.
4789  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
4790  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
4791  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
4792  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
4793                "Too many packed fields.");
4794
4795  const DexFile& dex_file_;
4796  const uint16_t type_index_;
4797  // The index of this parameter in the parameters list. Must be less
4798  // than HGraph::number_of_in_vregs_.
4799  const uint8_t index_;
4800
4801  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
4802};
4803
4804class HNot : public HUnaryOperation {
4805 public:
4806  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4807      : HUnaryOperation(result_type, input, dex_pc) {}
4808
4809  bool CanBeMoved() const OVERRIDE { return true; }
4810  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4811    return true;
4812  }
4813
4814  template <typename T> T Compute(T x) const { return ~x; }
4815
4816  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4817    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4818  }
4819  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4820    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4821  }
4822  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4823    LOG(FATAL) << DebugName() << " is not defined for float values";
4824    UNREACHABLE();
4825  }
4826  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4827    LOG(FATAL) << DebugName() << " is not defined for double values";
4828    UNREACHABLE();
4829  }
4830
4831  DECLARE_INSTRUCTION(Not);
4832
4833 private:
4834  DISALLOW_COPY_AND_ASSIGN(HNot);
4835};
4836
4837class HBooleanNot : public HUnaryOperation {
4838 public:
4839  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
4840      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
4841
4842  bool CanBeMoved() const OVERRIDE { return true; }
4843  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4844    return true;
4845  }
4846
4847  template <typename T> bool Compute(T x) const {
4848    DCHECK(IsUint<1>(x)) << x;
4849    return !x;
4850  }
4851
4852  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4853    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4854  }
4855  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4856    LOG(FATAL) << DebugName() << " is not defined for long values";
4857    UNREACHABLE();
4858  }
4859  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4860    LOG(FATAL) << DebugName() << " is not defined for float values";
4861    UNREACHABLE();
4862  }
4863  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4864    LOG(FATAL) << DebugName() << " is not defined for double values";
4865    UNREACHABLE();
4866  }
4867
4868  DECLARE_INSTRUCTION(BooleanNot);
4869
4870 private:
4871  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
4872};
4873
4874class HTypeConversion : public HExpression<1> {
4875 public:
4876  // Instantiate a type conversion of `input` to `result_type`.
4877  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
4878      : HExpression(result_type,
4879                    SideEffectsForArchRuntimeCalls(input->GetType(), result_type),
4880                    dex_pc) {
4881    SetRawInputAt(0, input);
4882    // Invariant: We should never generate a conversion to a Boolean value.
4883    DCHECK_NE(Primitive::kPrimBoolean, result_type);
4884  }
4885
4886  HInstruction* GetInput() const { return InputAt(0); }
4887  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
4888  Primitive::Type GetResultType() const { return GetType(); }
4889
4890  bool CanBeMoved() const OVERRIDE { return true; }
4891  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; }
4892
4893  // Try to statically evaluate the conversion and return a HConstant
4894  // containing the result.  If the input cannot be converted, return nullptr.
4895  HConstant* TryStaticEvaluation() const;
4896
4897  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type,
4898                                                    Primitive::Type result_type) {
4899    // Some architectures may not require the 'GC' side effects, but at this point
4900    // in the compilation process we do not know what architecture we will
4901    // generate code for, so we must be conservative.
4902    if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type))
4903        || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) {
4904      return SideEffects::CanTriggerGC();
4905    }
4906    return SideEffects::None();
4907  }
4908
4909  DECLARE_INSTRUCTION(TypeConversion);
4910
4911 private:
4912  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
4913};
4914
4915static constexpr uint32_t kNoRegNumber = -1;
4916
4917class HNullCheck : public HExpression<1> {
4918 public:
4919  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
4920  // constructor.
4921  HNullCheck(HInstruction* value, uint32_t dex_pc)
4922      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4923    SetRawInputAt(0, value);
4924  }
4925
4926  bool CanBeMoved() const OVERRIDE { return true; }
4927  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4928    return true;
4929  }
4930
4931  bool NeedsEnvironment() const OVERRIDE { return true; }
4932
4933  bool CanThrow() const OVERRIDE { return true; }
4934
4935  bool CanBeNull() const OVERRIDE { return false; }
4936
4937
4938  DECLARE_INSTRUCTION(NullCheck);
4939
4940 private:
4941  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
4942};
4943
4944class FieldInfo : public ValueObject {
4945 public:
4946  FieldInfo(MemberOffset field_offset,
4947            Primitive::Type field_type,
4948            bool is_volatile,
4949            uint32_t index,
4950            uint16_t declaring_class_def_index,
4951            const DexFile& dex_file,
4952            Handle<mirror::DexCache> dex_cache)
4953      : field_offset_(field_offset),
4954        field_type_(field_type),
4955        is_volatile_(is_volatile),
4956        index_(index),
4957        declaring_class_def_index_(declaring_class_def_index),
4958        dex_file_(dex_file),
4959        dex_cache_(dex_cache) {}
4960
4961  MemberOffset GetFieldOffset() const { return field_offset_; }
4962  Primitive::Type GetFieldType() const { return field_type_; }
4963  uint32_t GetFieldIndex() const { return index_; }
4964  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
4965  const DexFile& GetDexFile() const { return dex_file_; }
4966  bool IsVolatile() const { return is_volatile_; }
4967  Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; }
4968
4969 private:
4970  const MemberOffset field_offset_;
4971  const Primitive::Type field_type_;
4972  const bool is_volatile_;
4973  const uint32_t index_;
4974  const uint16_t declaring_class_def_index_;
4975  const DexFile& dex_file_;
4976  const Handle<mirror::DexCache> dex_cache_;
4977};
4978
4979class HInstanceFieldGet : public HExpression<1> {
4980 public:
4981  HInstanceFieldGet(HInstruction* value,
4982                    Primitive::Type field_type,
4983                    MemberOffset field_offset,
4984                    bool is_volatile,
4985                    uint32_t field_idx,
4986                    uint16_t declaring_class_def_index,
4987                    const DexFile& dex_file,
4988                    Handle<mirror::DexCache> dex_cache,
4989                    uint32_t dex_pc)
4990      : HExpression(field_type,
4991                    SideEffects::FieldReadOfType(field_type, is_volatile),
4992                    dex_pc),
4993        field_info_(field_offset,
4994                    field_type,
4995                    is_volatile,
4996                    field_idx,
4997                    declaring_class_def_index,
4998                    dex_file,
4999                    dex_cache) {
5000    SetRawInputAt(0, value);
5001  }
5002
5003  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5004
5005  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5006    HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5007    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5008  }
5009
5010  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5011    return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize;
5012  }
5013
5014  size_t ComputeHashCode() const OVERRIDE {
5015    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5016  }
5017
5018  const FieldInfo& GetFieldInfo() const { return field_info_; }
5019  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5020  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5021  bool IsVolatile() const { return field_info_.IsVolatile(); }
5022
5023  DECLARE_INSTRUCTION(InstanceFieldGet);
5024
5025 private:
5026  const FieldInfo field_info_;
5027
5028  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5029};
5030
5031class HInstanceFieldSet : public HTemplateInstruction<2> {
5032 public:
5033  HInstanceFieldSet(HInstruction* object,
5034                    HInstruction* value,
5035                    Primitive::Type field_type,
5036                    MemberOffset field_offset,
5037                    bool is_volatile,
5038                    uint32_t field_idx,
5039                    uint16_t declaring_class_def_index,
5040                    const DexFile& dex_file,
5041                    Handle<mirror::DexCache> dex_cache,
5042                    uint32_t dex_pc)
5043      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5044                             dex_pc),
5045        field_info_(field_offset,
5046                    field_type,
5047                    is_volatile,
5048                    field_idx,
5049                    declaring_class_def_index,
5050                    dex_file,
5051                    dex_cache) {
5052    SetPackedFlag<kFlagValueCanBeNull>(true);
5053    SetRawInputAt(0, object);
5054    SetRawInputAt(1, value);
5055  }
5056
5057  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5058    return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize;
5059  }
5060
5061  const FieldInfo& GetFieldInfo() const { return field_info_; }
5062  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5063  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5064  bool IsVolatile() const { return field_info_.IsVolatile(); }
5065  HInstruction* GetValue() const { return InputAt(1); }
5066  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5067  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5068
5069  DECLARE_INSTRUCTION(InstanceFieldSet);
5070
5071 private:
5072  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5073  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5074  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5075                "Too many packed fields.");
5076
5077  const FieldInfo field_info_;
5078
5079  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5080};
5081
5082class HArrayGet : public HExpression<2> {
5083 public:
5084  HArrayGet(HInstruction* array,
5085            HInstruction* index,
5086            Primitive::Type type,
5087            uint32_t dex_pc,
5088            SideEffects additional_side_effects = SideEffects::None())
5089      : HExpression(type,
5090                    SideEffects::ArrayReadOfType(type).Union(additional_side_effects),
5091                    dex_pc) {
5092    SetRawInputAt(0, array);
5093    SetRawInputAt(1, index);
5094  }
5095
5096  bool CanBeMoved() const OVERRIDE { return true; }
5097  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5098    return true;
5099  }
5100  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5101    // TODO: We can be smarter here.
5102    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5103    // which generates the implicit null check. There are cases when these can be removed
5104    // to produce better code. If we ever add optimizations to do so we should allow an
5105    // implicit check here (as long as the address falls in the first page).
5106    return false;
5107  }
5108
5109  bool IsEquivalentOf(HArrayGet* other) const {
5110    bool result = (GetDexPc() == other->GetDexPc());
5111    if (kIsDebugBuild && result) {
5112      DCHECK_EQ(GetBlock(), other->GetBlock());
5113      DCHECK_EQ(GetArray(), other->GetArray());
5114      DCHECK_EQ(GetIndex(), other->GetIndex());
5115      if (Primitive::IsIntOrLongType(GetType())) {
5116        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5117      } else {
5118        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5119        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5120      }
5121    }
5122    return result;
5123  }
5124
5125  HInstruction* GetArray() const { return InputAt(0); }
5126  HInstruction* GetIndex() const { return InputAt(1); }
5127
5128  DECLARE_INSTRUCTION(ArrayGet);
5129
5130 private:
5131  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5132};
5133
5134class HArraySet : public HTemplateInstruction<3> {
5135 public:
5136  HArraySet(HInstruction* array,
5137            HInstruction* index,
5138            HInstruction* value,
5139            Primitive::Type expected_component_type,
5140            uint32_t dex_pc,
5141            SideEffects additional_side_effects = SideEffects::None())
5142      : HTemplateInstruction(
5143            SideEffects::ArrayWriteOfType(expected_component_type).Union(
5144                SideEffectsForArchRuntimeCalls(value->GetType())).Union(
5145                    additional_side_effects),
5146            dex_pc) {
5147    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5148    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5149    SetPackedFlag<kFlagValueCanBeNull>(true);
5150    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5151    SetRawInputAt(0, array);
5152    SetRawInputAt(1, index);
5153    SetRawInputAt(2, value);
5154  }
5155
5156  bool NeedsEnvironment() const OVERRIDE {
5157    // We call a runtime method to throw ArrayStoreException.
5158    return NeedsTypeCheck();
5159  }
5160
5161  // Can throw ArrayStoreException.
5162  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5163
5164  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5165    // TODO: Same as for ArrayGet.
5166    return false;
5167  }
5168
5169  void ClearNeedsTypeCheck() {
5170    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5171  }
5172
5173  void ClearValueCanBeNull() {
5174    SetPackedFlag<kFlagValueCanBeNull>(false);
5175  }
5176
5177  void SetStaticTypeOfArrayIsObjectArray() {
5178    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5179  }
5180
5181  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5182  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5183  bool StaticTypeOfArrayIsObjectArray() const {
5184    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5185  }
5186
5187  HInstruction* GetArray() const { return InputAt(0); }
5188  HInstruction* GetIndex() const { return InputAt(1); }
5189  HInstruction* GetValue() const { return InputAt(2); }
5190
5191  Primitive::Type GetComponentType() const {
5192    // The Dex format does not type floating point index operations. Since the
5193    // `expected_component_type_` is set during building and can therefore not
5194    // be correct, we also check what is the value type. If it is a floating
5195    // point type, we must use that type.
5196    Primitive::Type value_type = GetValue()->GetType();
5197    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5198        ? value_type
5199        : GetRawExpectedComponentType();
5200  }
5201
5202  Primitive::Type GetRawExpectedComponentType() const {
5203    return GetPackedField<ExpectedComponentTypeField>();
5204  }
5205
5206  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5207    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5208  }
5209
5210  DECLARE_INSTRUCTION(ArraySet);
5211
5212 private:
5213  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5214  static constexpr size_t kFieldExpectedComponentTypeSize =
5215      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5216  static constexpr size_t kFlagNeedsTypeCheck =
5217      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5218  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5219  // Cached information for the reference_type_info_ so that codegen
5220  // does not need to inspect the static type.
5221  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5222  static constexpr size_t kNumberOfArraySetPackedBits =
5223      kFlagStaticTypeOfArrayIsObjectArray + 1;
5224  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5225  using ExpectedComponentTypeField =
5226      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5227
5228  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5229};
5230
5231class HArrayLength : public HExpression<1> {
5232 public:
5233  HArrayLength(HInstruction* array, uint32_t dex_pc)
5234      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5235    // Note that arrays do not change length, so the instruction does not
5236    // depend on any write.
5237    SetRawInputAt(0, array);
5238  }
5239
5240  bool CanBeMoved() const OVERRIDE { return true; }
5241  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5242    return true;
5243  }
5244  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5245    return obj == InputAt(0);
5246  }
5247
5248  DECLARE_INSTRUCTION(ArrayLength);
5249
5250 private:
5251  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5252};
5253
5254class HBoundsCheck : public HExpression<2> {
5255 public:
5256  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5257  // constructor.
5258  HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc)
5259      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5260    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5261    SetRawInputAt(0, index);
5262    SetRawInputAt(1, length);
5263  }
5264
5265  bool CanBeMoved() const OVERRIDE { return true; }
5266  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5267    return true;
5268  }
5269
5270  bool NeedsEnvironment() const OVERRIDE { return true; }
5271
5272  bool CanThrow() const OVERRIDE { return true; }
5273
5274  HInstruction* GetIndex() const { return InputAt(0); }
5275
5276  DECLARE_INSTRUCTION(BoundsCheck);
5277
5278 private:
5279  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5280};
5281
5282class HSuspendCheck : public HTemplateInstruction<0> {
5283 public:
5284  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5285      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5286
5287  bool NeedsEnvironment() const OVERRIDE {
5288    return true;
5289  }
5290
5291  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5292  SlowPathCode* GetSlowPath() const { return slow_path_; }
5293
5294  DECLARE_INSTRUCTION(SuspendCheck);
5295
5296 private:
5297  // Only used for code generation, in order to share the same slow path between back edges
5298  // of a same loop.
5299  SlowPathCode* slow_path_;
5300
5301  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5302};
5303
5304// Pseudo-instruction which provides the native debugger with mapping information.
5305// It ensures that we can generate line number and local variables at this point.
5306class HNativeDebugInfo : public HTemplateInstruction<0> {
5307 public:
5308  explicit HNativeDebugInfo(uint32_t dex_pc)
5309      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5310
5311  bool NeedsEnvironment() const OVERRIDE {
5312    return true;
5313  }
5314
5315  DECLARE_INSTRUCTION(NativeDebugInfo);
5316
5317 private:
5318  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5319};
5320
5321/**
5322 * Instruction to load a Class object.
5323 */
5324class HLoadClass : public HExpression<1> {
5325 public:
5326  HLoadClass(HCurrentMethod* current_method,
5327             uint16_t type_index,
5328             const DexFile& dex_file,
5329             bool is_referrers_class,
5330             uint32_t dex_pc,
5331             bool needs_access_check,
5332             bool is_in_dex_cache)
5333      : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc),
5334        type_index_(type_index),
5335        dex_file_(dex_file),
5336        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5337    // Referrers class should not need access check. We never inline unverified
5338    // methods so we can't possibly end up in this situation.
5339    DCHECK(!is_referrers_class || !needs_access_check);
5340
5341    SetPackedFlag<kFlagIsReferrersClass>(is_referrers_class);
5342    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5343    SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache);
5344    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5345    SetRawInputAt(0, current_method);
5346  }
5347
5348  bool CanBeMoved() const OVERRIDE { return true; }
5349
5350  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5351    // Note that we don't need to test for generate_clinit_check_.
5352    // Whether or not we need to generate the clinit check is processed in
5353    // prepare_for_register_allocator based on existing HInvokes and HClinitChecks.
5354    return other->AsLoadClass()->type_index_ == type_index_ &&
5355        other->AsLoadClass()->GetPackedFields() == GetPackedFields();
5356  }
5357
5358  size_t ComputeHashCode() const OVERRIDE { return type_index_; }
5359
5360  uint16_t GetTypeIndex() const { return type_index_; }
5361  bool CanBeNull() const OVERRIDE { return false; }
5362
5363  bool NeedsEnvironment() const OVERRIDE {
5364    return CanCallRuntime();
5365  }
5366
5367  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5368    // The entrypoint the code generator is going to call does not do
5369    // clinit of the class.
5370    DCHECK(!NeedsAccessCheck());
5371    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5372  }
5373
5374  bool CanCallRuntime() const {
5375    return MustGenerateClinitCheck() ||
5376           (!IsReferrersClass() && !IsInDexCache()) ||
5377           NeedsAccessCheck();
5378  }
5379
5380
5381  bool CanThrow() const OVERRIDE {
5382    return CanCallRuntime();
5383  }
5384
5385  ReferenceTypeInfo GetLoadedClassRTI() {
5386    return loaded_class_rti_;
5387  }
5388
5389  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5390    // Make sure we only set exact types (the loaded class should never be merged).
5391    DCHECK(rti.IsExact());
5392    loaded_class_rti_ = rti;
5393  }
5394
5395  const DexFile& GetDexFile() { return dex_file_; }
5396
5397  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !IsReferrersClass(); }
5398
5399  static SideEffects SideEffectsForArchRuntimeCalls() {
5400    return SideEffects::CanTriggerGC();
5401  }
5402
5403  bool IsReferrersClass() const { return GetPackedFlag<kFlagIsReferrersClass>(); }
5404  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5405  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5406  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5407
5408  DECLARE_INSTRUCTION(LoadClass);
5409
5410 private:
5411  static constexpr size_t kFlagIsReferrersClass    = kNumberOfExpressionPackedBits;
5412  static constexpr size_t kFlagNeedsAccessCheck    = kFlagIsReferrersClass + 1;
5413  static constexpr size_t kFlagIsInDexCache        = kFlagNeedsAccessCheck + 1;
5414  // Whether this instruction must generate the initialization check.
5415  // Used for code generation.
5416  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInDexCache + 1;
5417  static constexpr size_t kNumberOfLoadClassPackedBits = kFlagGenerateClInitCheck + 1;
5418  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5419
5420  const uint16_t type_index_;
5421  const DexFile& dex_file_;
5422
5423  ReferenceTypeInfo loaded_class_rti_;
5424
5425  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5426};
5427
5428class HLoadString : public HExpression<1> {
5429 public:
5430  // Determines how to load the String.
5431  enum class LoadKind {
5432    // Use boot image String* address that will be known at link time.
5433    // Used for boot image strings referenced by boot image code in non-PIC mode.
5434    kBootImageLinkTimeAddress,
5435
5436    // Use PC-relative boot image String* address that will be known at link time.
5437    // Used for boot image strings referenced by boot image code in PIC mode.
5438    kBootImageLinkTimePcRelative,
5439
5440    // Use a known boot image String* address, embedded in the code by the codegen.
5441    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5442    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5443    // GetIncludePatchInformation().
5444    kBootImageAddress,
5445
5446    // Load from the resolved strings array at an absolute address.
5447    // Used for strings outside the boot image referenced by JIT-compiled code.
5448    kDexCacheAddress,
5449
5450    // Load from resolved strings array in the dex cache using a PC-relative load.
5451    // Used for strings outside boot image when we know that we can access
5452    // the dex cache arrays using a PC-relative load.
5453    kDexCachePcRelative,
5454
5455    // Load from resolved strings array accessed through the class loaded from
5456    // the compiled method's own ArtMethod*. This is the default access type when
5457    // all other types are unavailable.
5458    kDexCacheViaMethod,
5459
5460    kLast = kDexCacheViaMethod
5461  };
5462
5463  HLoadString(HCurrentMethod* current_method,
5464              uint32_t string_index,
5465              const DexFile& dex_file,
5466              uint32_t dex_pc)
5467      : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc),
5468        string_index_(string_index) {
5469    SetPackedFlag<kFlagIsInDexCache>(false);
5470    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5471    load_data_.ref.dex_file = &dex_file;
5472    SetRawInputAt(0, current_method);
5473  }
5474
5475  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5476    DCHECK(HasAddress(load_kind));
5477    load_data_.address = address;
5478    SetLoadKindInternal(load_kind);
5479  }
5480
5481  void SetLoadKindWithStringReference(LoadKind load_kind,
5482                                      const DexFile& dex_file,
5483                                      uint32_t string_index) {
5484    DCHECK(HasStringReference(load_kind));
5485    load_data_.ref.dex_file = &dex_file;
5486    string_index_ = string_index;
5487    SetLoadKindInternal(load_kind);
5488  }
5489
5490  void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5491                                        const DexFile& dex_file,
5492                                        uint32_t element_index) {
5493    DCHECK(HasDexCacheReference(load_kind));
5494    load_data_.ref.dex_file = &dex_file;
5495    load_data_.ref.dex_cache_element_index = element_index;
5496    SetLoadKindInternal(load_kind);
5497  }
5498
5499  LoadKind GetLoadKind() const {
5500    return GetPackedField<LoadKindField>();
5501  }
5502
5503  const DexFile& GetDexFile() const;
5504
5505  uint32_t GetStringIndex() const {
5506    DCHECK(HasStringReference(GetLoadKind()) || /* For slow paths. */ !IsInDexCache());
5507    return string_index_;
5508  }
5509
5510  uint32_t GetDexCacheElementOffset() const;
5511
5512  uint64_t GetAddress() const {
5513    DCHECK(HasAddress(GetLoadKind()));
5514    return load_data_.address;
5515  }
5516
5517  bool CanBeMoved() const OVERRIDE { return true; }
5518
5519  bool InstructionDataEquals(HInstruction* other) const OVERRIDE;
5520
5521  size_t ComputeHashCode() const OVERRIDE { return string_index_; }
5522
5523  // Will call the runtime if we need to load the string through
5524  // the dex cache and the string is not guaranteed to be there yet.
5525  bool NeedsEnvironment() const OVERRIDE {
5526    LoadKind load_kind = GetLoadKind();
5527    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5528        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5529        load_kind == LoadKind::kBootImageAddress) {
5530      return false;
5531    }
5532    return !IsInDexCache();
5533  }
5534
5535  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5536    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5537  }
5538
5539  bool CanBeNull() const OVERRIDE { return false; }
5540  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5541
5542  static SideEffects SideEffectsForArchRuntimeCalls() {
5543    return SideEffects::CanTriggerGC();
5544  }
5545
5546  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5547
5548  void MarkInDexCache() {
5549    SetPackedFlag<kFlagIsInDexCache>(true);
5550    DCHECK(!NeedsEnvironment());
5551    RemoveEnvironment();
5552    SetSideEffects(SideEffects::None());
5553  }
5554
5555  size_t InputCount() const OVERRIDE {
5556    return (InputAt(0) != nullptr) ? 1u : 0u;
5557  }
5558
5559  void AddSpecialInput(HInstruction* special_input);
5560
5561  DECLARE_INSTRUCTION(LoadString);
5562
5563 private:
5564  static constexpr size_t kFlagIsInDexCache = kNumberOfExpressionPackedBits;
5565  static constexpr size_t kFieldLoadKind = kFlagIsInDexCache + 1;
5566  static constexpr size_t kFieldLoadKindSize =
5567      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5568  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5569  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5570  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5571
5572  static bool HasStringReference(LoadKind load_kind) {
5573    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5574        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5575        load_kind == LoadKind::kDexCacheViaMethod;
5576  }
5577
5578  static bool HasAddress(LoadKind load_kind) {
5579    return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress;
5580  }
5581
5582  static bool HasDexCacheReference(LoadKind load_kind) {
5583    return load_kind == LoadKind::kDexCachePcRelative;
5584  }
5585
5586  void SetLoadKindInternal(LoadKind load_kind);
5587
5588  // String index serves also as the hash code and it's also needed for slow-paths,
5589  // so it must not be overwritten with other load data.
5590  uint32_t string_index_;
5591
5592  union {
5593    struct {
5594      const DexFile* dex_file;            // For string reference and dex cache reference.
5595      uint32_t dex_cache_element_index;   // Only for dex cache reference.
5596    } ref;
5597    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5598  } load_data_;
5599
5600  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5601};
5602std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5603
5604// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5605inline const DexFile& HLoadString::GetDexFile() const {
5606  DCHECK(HasStringReference(GetLoadKind()) || HasDexCacheReference(GetLoadKind()))
5607      << GetLoadKind();
5608  return *load_data_.ref.dex_file;
5609}
5610
5611// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5612inline uint32_t HLoadString::GetDexCacheElementOffset() const {
5613  DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5614  return load_data_.ref.dex_cache_element_index;
5615}
5616
5617// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5618inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5619  // The special input is used for PC-relative loads on some architectures.
5620  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5621         GetLoadKind() == LoadKind::kDexCachePcRelative) << GetLoadKind();
5622  DCHECK(InputAt(0) == nullptr);
5623  SetRawInputAt(0u, special_input);
5624  special_input->AddUseAt(this, 0);
5625}
5626
5627/**
5628 * Performs an initialization check on its Class object input.
5629 */
5630class HClinitCheck : public HExpression<1> {
5631 public:
5632  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5633      : HExpression(
5634            Primitive::kPrimNot,
5635            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
5636            dex_pc) {
5637    SetRawInputAt(0, constant);
5638  }
5639
5640  bool CanBeMoved() const OVERRIDE { return true; }
5641  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5642    return true;
5643  }
5644
5645  bool NeedsEnvironment() const OVERRIDE {
5646    // May call runtime to initialize the class.
5647    return true;
5648  }
5649
5650  bool CanThrow() const OVERRIDE { return true; }
5651
5652  HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); }
5653
5654  DECLARE_INSTRUCTION(ClinitCheck);
5655
5656 private:
5657  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
5658};
5659
5660class HStaticFieldGet : public HExpression<1> {
5661 public:
5662  HStaticFieldGet(HInstruction* cls,
5663                  Primitive::Type field_type,
5664                  MemberOffset field_offset,
5665                  bool is_volatile,
5666                  uint32_t field_idx,
5667                  uint16_t declaring_class_def_index,
5668                  const DexFile& dex_file,
5669                  Handle<mirror::DexCache> dex_cache,
5670                  uint32_t dex_pc)
5671      : HExpression(field_type,
5672                    SideEffects::FieldReadOfType(field_type, is_volatile),
5673                    dex_pc),
5674        field_info_(field_offset,
5675                    field_type,
5676                    is_volatile,
5677                    field_idx,
5678                    declaring_class_def_index,
5679                    dex_file,
5680                    dex_cache) {
5681    SetRawInputAt(0, cls);
5682  }
5683
5684
5685  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5686
5687  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5688    HStaticFieldGet* other_get = other->AsStaticFieldGet();
5689    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5690  }
5691
5692  size_t ComputeHashCode() const OVERRIDE {
5693    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5694  }
5695
5696  const FieldInfo& GetFieldInfo() const { return field_info_; }
5697  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5698  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5699  bool IsVolatile() const { return field_info_.IsVolatile(); }
5700
5701  DECLARE_INSTRUCTION(StaticFieldGet);
5702
5703 private:
5704  const FieldInfo field_info_;
5705
5706  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
5707};
5708
5709class HStaticFieldSet : public HTemplateInstruction<2> {
5710 public:
5711  HStaticFieldSet(HInstruction* cls,
5712                  HInstruction* value,
5713                  Primitive::Type field_type,
5714                  MemberOffset field_offset,
5715                  bool is_volatile,
5716                  uint32_t field_idx,
5717                  uint16_t declaring_class_def_index,
5718                  const DexFile& dex_file,
5719                  Handle<mirror::DexCache> dex_cache,
5720                  uint32_t dex_pc)
5721      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5722                             dex_pc),
5723        field_info_(field_offset,
5724                    field_type,
5725                    is_volatile,
5726                    field_idx,
5727                    declaring_class_def_index,
5728                    dex_file,
5729                    dex_cache) {
5730    SetPackedFlag<kFlagValueCanBeNull>(true);
5731    SetRawInputAt(0, cls);
5732    SetRawInputAt(1, value);
5733  }
5734
5735  const FieldInfo& GetFieldInfo() const { return field_info_; }
5736  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5737  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5738  bool IsVolatile() const { return field_info_.IsVolatile(); }
5739
5740  HInstruction* GetValue() const { return InputAt(1); }
5741  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5742  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5743
5744  DECLARE_INSTRUCTION(StaticFieldSet);
5745
5746 private:
5747  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5748  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
5749  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
5750                "Too many packed fields.");
5751
5752  const FieldInfo field_info_;
5753
5754  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
5755};
5756
5757class HUnresolvedInstanceFieldGet : public HExpression<1> {
5758 public:
5759  HUnresolvedInstanceFieldGet(HInstruction* obj,
5760                              Primitive::Type field_type,
5761                              uint32_t field_index,
5762                              uint32_t dex_pc)
5763      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5764        field_index_(field_index) {
5765    SetRawInputAt(0, obj);
5766  }
5767
5768  bool NeedsEnvironment() const OVERRIDE { return true; }
5769  bool CanThrow() const OVERRIDE { return true; }
5770
5771  Primitive::Type GetFieldType() const { return GetType(); }
5772  uint32_t GetFieldIndex() const { return field_index_; }
5773
5774  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
5775
5776 private:
5777  const uint32_t field_index_;
5778
5779  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
5780};
5781
5782class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> {
5783 public:
5784  HUnresolvedInstanceFieldSet(HInstruction* obj,
5785                              HInstruction* value,
5786                              Primitive::Type field_type,
5787                              uint32_t field_index,
5788                              uint32_t dex_pc)
5789      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
5790        field_index_(field_index) {
5791    SetPackedField<FieldTypeField>(field_type);
5792    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
5793    SetRawInputAt(0, obj);
5794    SetRawInputAt(1, value);
5795  }
5796
5797  bool NeedsEnvironment() const OVERRIDE { return true; }
5798  bool CanThrow() const OVERRIDE { return true; }
5799
5800  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
5801  uint32_t GetFieldIndex() const { return field_index_; }
5802
5803  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
5804
5805 private:
5806  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
5807  static constexpr size_t kFieldFieldTypeSize =
5808      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5809  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
5810      kFieldFieldType + kFieldFieldTypeSize;
5811  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5812                "Too many packed fields.");
5813  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
5814
5815  const uint32_t field_index_;
5816
5817  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
5818};
5819
5820class HUnresolvedStaticFieldGet : public HExpression<0> {
5821 public:
5822  HUnresolvedStaticFieldGet(Primitive::Type field_type,
5823                            uint32_t field_index,
5824                            uint32_t dex_pc)
5825      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5826        field_index_(field_index) {
5827  }
5828
5829  bool NeedsEnvironment() const OVERRIDE { return true; }
5830  bool CanThrow() const OVERRIDE { return true; }
5831
5832  Primitive::Type GetFieldType() const { return GetType(); }
5833  uint32_t GetFieldIndex() const { return field_index_; }
5834
5835  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
5836
5837 private:
5838  const uint32_t field_index_;
5839
5840  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
5841};
5842
5843class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> {
5844 public:
5845  HUnresolvedStaticFieldSet(HInstruction* value,
5846                            Primitive::Type field_type,
5847                            uint32_t field_index,
5848                            uint32_t dex_pc)
5849      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
5850        field_index_(field_index) {
5851    SetPackedField<FieldTypeField>(field_type);
5852    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
5853    SetRawInputAt(0, value);
5854  }
5855
5856  bool NeedsEnvironment() const OVERRIDE { return true; }
5857  bool CanThrow() const OVERRIDE { return true; }
5858
5859  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
5860  uint32_t GetFieldIndex() const { return field_index_; }
5861
5862  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
5863
5864 private:
5865  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
5866  static constexpr size_t kFieldFieldTypeSize =
5867      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5868  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
5869      kFieldFieldType + kFieldFieldTypeSize;
5870  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5871                "Too many packed fields.");
5872  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
5873
5874  const uint32_t field_index_;
5875
5876  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
5877};
5878
5879// Implement the move-exception DEX instruction.
5880class HLoadException : public HExpression<0> {
5881 public:
5882  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
5883      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
5884
5885  bool CanBeNull() const OVERRIDE { return false; }
5886
5887  DECLARE_INSTRUCTION(LoadException);
5888
5889 private:
5890  DISALLOW_COPY_AND_ASSIGN(HLoadException);
5891};
5892
5893// Implicit part of move-exception which clears thread-local exception storage.
5894// Must not be removed because the runtime expects the TLS to get cleared.
5895class HClearException : public HTemplateInstruction<0> {
5896 public:
5897  explicit HClearException(uint32_t dex_pc = kNoDexPc)
5898      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
5899
5900  DECLARE_INSTRUCTION(ClearException);
5901
5902 private:
5903  DISALLOW_COPY_AND_ASSIGN(HClearException);
5904};
5905
5906class HThrow : public HTemplateInstruction<1> {
5907 public:
5908  HThrow(HInstruction* exception, uint32_t dex_pc)
5909      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
5910    SetRawInputAt(0, exception);
5911  }
5912
5913  bool IsControlFlow() const OVERRIDE { return true; }
5914
5915  bool NeedsEnvironment() const OVERRIDE { return true; }
5916
5917  bool CanThrow() const OVERRIDE { return true; }
5918
5919
5920  DECLARE_INSTRUCTION(Throw);
5921
5922 private:
5923  DISALLOW_COPY_AND_ASSIGN(HThrow);
5924};
5925
5926/**
5927 * Implementation strategies for the code generator of a HInstanceOf
5928 * or `HCheckCast`.
5929 */
5930enum class TypeCheckKind {
5931  kUnresolvedCheck,       // Check against an unresolved type.
5932  kExactCheck,            // Can do a single class compare.
5933  kClassHierarchyCheck,   // Can just walk the super class chain.
5934  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
5935  kInterfaceCheck,        // No optimization yet when checking against an interface.
5936  kArrayObjectCheck,      // Can just check if the array is not primitive.
5937  kArrayCheck,            // No optimization yet when checking against a generic array.
5938  kLast = kArrayCheck
5939};
5940
5941std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
5942
5943class HInstanceOf : public HExpression<2> {
5944 public:
5945  HInstanceOf(HInstruction* object,
5946              HLoadClass* constant,
5947              TypeCheckKind check_kind,
5948              uint32_t dex_pc)
5949      : HExpression(Primitive::kPrimBoolean,
5950                    SideEffectsForArchRuntimeCalls(check_kind),
5951                    dex_pc) {
5952    SetPackedField<TypeCheckKindField>(check_kind);
5953    SetPackedFlag<kFlagMustDoNullCheck>(true);
5954    SetRawInputAt(0, object);
5955    SetRawInputAt(1, constant);
5956  }
5957
5958  bool CanBeMoved() const OVERRIDE { return true; }
5959
5960  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5961    return true;
5962  }
5963
5964  bool NeedsEnvironment() const OVERRIDE {
5965    return CanCallRuntime(GetTypeCheckKind());
5966  }
5967
5968  // Used only in code generation.
5969  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
5970  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
5971  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
5972  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
5973
5974  static bool CanCallRuntime(TypeCheckKind check_kind) {
5975    // Mips currently does runtime calls for any other checks.
5976    return check_kind != TypeCheckKind::kExactCheck;
5977  }
5978
5979  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
5980    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
5981  }
5982
5983  DECLARE_INSTRUCTION(InstanceOf);
5984
5985 private:
5986  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
5987  static constexpr size_t kFieldTypeCheckKindSize =
5988      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
5989  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
5990  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
5991  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5992  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
5993
5994  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
5995};
5996
5997class HBoundType : public HExpression<1> {
5998 public:
5999  HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6000      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6001        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6002    SetPackedFlag<kFlagUpperCanBeNull>(true);
6003    SetPackedFlag<kFlagCanBeNull>(true);
6004    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6005    SetRawInputAt(0, input);
6006  }
6007
6008  // {Get,Set}Upper* should only be used in reference type propagation.
6009  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6010  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6011  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6012
6013  void SetCanBeNull(bool can_be_null) {
6014    DCHECK(GetUpperCanBeNull() || !can_be_null);
6015    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6016  }
6017
6018  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6019
6020  DECLARE_INSTRUCTION(BoundType);
6021
6022 private:
6023  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6024  // is false then CanBeNull() cannot be true).
6025  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6026  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6027  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6028  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6029
6030  // Encodes the most upper class that this instruction can have. In other words
6031  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6032  // It is used to bound the type in cases like:
6033  //   if (x instanceof ClassX) {
6034  //     // uper_bound_ will be ClassX
6035  //   }
6036  ReferenceTypeInfo upper_bound_;
6037
6038  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6039};
6040
6041class HCheckCast : public HTemplateInstruction<2> {
6042 public:
6043  HCheckCast(HInstruction* object,
6044             HLoadClass* constant,
6045             TypeCheckKind check_kind,
6046             uint32_t dex_pc)
6047      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6048    SetPackedField<TypeCheckKindField>(check_kind);
6049    SetPackedFlag<kFlagMustDoNullCheck>(true);
6050    SetRawInputAt(0, object);
6051    SetRawInputAt(1, constant);
6052  }
6053
6054  bool CanBeMoved() const OVERRIDE { return true; }
6055
6056  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6057    return true;
6058  }
6059
6060  bool NeedsEnvironment() const OVERRIDE {
6061    // Instruction may throw a CheckCastError.
6062    return true;
6063  }
6064
6065  bool CanThrow() const OVERRIDE { return true; }
6066
6067  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6068  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6069  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6070  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6071
6072  DECLARE_INSTRUCTION(CheckCast);
6073
6074 private:
6075  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6076  static constexpr size_t kFieldTypeCheckKindSize =
6077      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6078  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6079  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6080  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6081  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6082
6083  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6084};
6085
6086class HMemoryBarrier : public HTemplateInstruction<0> {
6087 public:
6088  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6089      : HTemplateInstruction(
6090            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6091    SetPackedField<BarrierKindField>(barrier_kind);
6092  }
6093
6094  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6095
6096  DECLARE_INSTRUCTION(MemoryBarrier);
6097
6098 private:
6099  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6100  static constexpr size_t kFieldBarrierKindSize =
6101      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6102  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6103      kFieldBarrierKind + kFieldBarrierKindSize;
6104  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6105                "Too many packed fields.");
6106  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6107
6108  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6109};
6110
6111class HMonitorOperation : public HTemplateInstruction<1> {
6112 public:
6113  enum class OperationKind {
6114    kEnter,
6115    kExit,
6116    kLast = kExit
6117  };
6118
6119  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6120    : HTemplateInstruction(
6121          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6122          dex_pc) {
6123    SetPackedField<OperationKindField>(kind);
6124    SetRawInputAt(0, object);
6125  }
6126
6127  // Instruction may go into runtime, so we need an environment.
6128  bool NeedsEnvironment() const OVERRIDE { return true; }
6129
6130  bool CanThrow() const OVERRIDE {
6131    // Verifier guarantees that monitor-exit cannot throw.
6132    // This is important because it allows the HGraphBuilder to remove
6133    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6134    return IsEnter();
6135  }
6136
6137  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6138  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6139
6140  DECLARE_INSTRUCTION(MonitorOperation);
6141
6142 private:
6143  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6144  static constexpr size_t kFieldOperationKindSize =
6145      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6146  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6147      kFieldOperationKind + kFieldOperationKindSize;
6148  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6149                "Too many packed fields.");
6150  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6151
6152 private:
6153  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6154};
6155
6156class HSelect : public HExpression<3> {
6157 public:
6158  HSelect(HInstruction* condition,
6159          HInstruction* true_value,
6160          HInstruction* false_value,
6161          uint32_t dex_pc)
6162      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6163    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6164
6165    // First input must be `true_value` or `false_value` to allow codegens to
6166    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6167    // that architectures which implement HSelect as a conditional move also
6168    // will not need to invert the condition.
6169    SetRawInputAt(0, false_value);
6170    SetRawInputAt(1, true_value);
6171    SetRawInputAt(2, condition);
6172  }
6173
6174  HInstruction* GetFalseValue() const { return InputAt(0); }
6175  HInstruction* GetTrueValue() const { return InputAt(1); }
6176  HInstruction* GetCondition() const { return InputAt(2); }
6177
6178  bool CanBeMoved() const OVERRIDE { return true; }
6179  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; }
6180
6181  bool CanBeNull() const OVERRIDE {
6182    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6183  }
6184
6185  DECLARE_INSTRUCTION(Select);
6186
6187 private:
6188  DISALLOW_COPY_AND_ASSIGN(HSelect);
6189};
6190
6191class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6192 public:
6193  MoveOperands(Location source,
6194               Location destination,
6195               Primitive::Type type,
6196               HInstruction* instruction)
6197      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6198
6199  Location GetSource() const { return source_; }
6200  Location GetDestination() const { return destination_; }
6201
6202  void SetSource(Location value) { source_ = value; }
6203  void SetDestination(Location value) { destination_ = value; }
6204
6205  // The parallel move resolver marks moves as "in-progress" by clearing the
6206  // destination (but not the source).
6207  Location MarkPending() {
6208    DCHECK(!IsPending());
6209    Location dest = destination_;
6210    destination_ = Location::NoLocation();
6211    return dest;
6212  }
6213
6214  void ClearPending(Location dest) {
6215    DCHECK(IsPending());
6216    destination_ = dest;
6217  }
6218
6219  bool IsPending() const {
6220    DCHECK(source_.IsValid() || destination_.IsInvalid());
6221    return destination_.IsInvalid() && source_.IsValid();
6222  }
6223
6224  // True if this blocks a move from the given location.
6225  bool Blocks(Location loc) const {
6226    return !IsEliminated() && source_.OverlapsWith(loc);
6227  }
6228
6229  // A move is redundant if it's been eliminated, if its source and
6230  // destination are the same, or if its destination is unneeded.
6231  bool IsRedundant() const {
6232    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6233  }
6234
6235  // We clear both operands to indicate move that's been eliminated.
6236  void Eliminate() {
6237    source_ = destination_ = Location::NoLocation();
6238  }
6239
6240  bool IsEliminated() const {
6241    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6242    return source_.IsInvalid();
6243  }
6244
6245  Primitive::Type GetType() const { return type_; }
6246
6247  bool Is64BitMove() const {
6248    return Primitive::Is64BitType(type_);
6249  }
6250
6251  HInstruction* GetInstruction() const { return instruction_; }
6252
6253 private:
6254  Location source_;
6255  Location destination_;
6256  // The type this move is for.
6257  Primitive::Type type_;
6258  // The instruction this move is assocatied with. Null when this move is
6259  // for moving an input in the expected locations of user (including a phi user).
6260  // This is only used in debug mode, to ensure we do not connect interval siblings
6261  // in the same parallel move.
6262  HInstruction* instruction_;
6263};
6264
6265std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6266
6267static constexpr size_t kDefaultNumberOfMoves = 4;
6268
6269class HParallelMove : public HTemplateInstruction<0> {
6270 public:
6271  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6272      : HTemplateInstruction(SideEffects::None(), dex_pc),
6273        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6274    moves_.reserve(kDefaultNumberOfMoves);
6275  }
6276
6277  void AddMove(Location source,
6278               Location destination,
6279               Primitive::Type type,
6280               HInstruction* instruction) {
6281    DCHECK(source.IsValid());
6282    DCHECK(destination.IsValid());
6283    if (kIsDebugBuild) {
6284      if (instruction != nullptr) {
6285        for (const MoveOperands& move : moves_) {
6286          if (move.GetInstruction() == instruction) {
6287            // Special case the situation where the move is for the spill slot
6288            // of the instruction.
6289            if ((GetPrevious() == instruction)
6290                || ((GetPrevious() == nullptr)
6291                    && instruction->IsPhi()
6292                    && instruction->GetBlock() == GetBlock())) {
6293              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6294                  << "Doing parallel moves for the same instruction.";
6295            } else {
6296              DCHECK(false) << "Doing parallel moves for the same instruction.";
6297            }
6298          }
6299        }
6300      }
6301      for (const MoveOperands& move : moves_) {
6302        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6303            << "Overlapped destination for two moves in a parallel move: "
6304            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6305            << source << " ==> " << destination;
6306      }
6307    }
6308    moves_.emplace_back(source, destination, type, instruction);
6309  }
6310
6311  MoveOperands* MoveOperandsAt(size_t index) {
6312    return &moves_[index];
6313  }
6314
6315  size_t NumMoves() const { return moves_.size(); }
6316
6317  DECLARE_INSTRUCTION(ParallelMove);
6318
6319 private:
6320  ArenaVector<MoveOperands> moves_;
6321
6322  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6323};
6324
6325}  // namespace art
6326
6327#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6328#include "nodes_shared.h"
6329#endif
6330#ifdef ART_ENABLE_CODEGEN_arm
6331#include "nodes_arm.h"
6332#endif
6333#ifdef ART_ENABLE_CODEGEN_arm64
6334#include "nodes_arm64.h"
6335#endif
6336#ifdef ART_ENABLE_CODEGEN_x86
6337#include "nodes_x86.h"
6338#endif
6339
6340namespace art {
6341
6342class HGraphVisitor : public ValueObject {
6343 public:
6344  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6345  virtual ~HGraphVisitor() {}
6346
6347  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6348  virtual void VisitBasicBlock(HBasicBlock* block);
6349
6350  // Visit the graph following basic block insertion order.
6351  void VisitInsertionOrder();
6352
6353  // Visit the graph following dominator tree reverse post-order.
6354  void VisitReversePostOrder();
6355
6356  HGraph* GetGraph() const { return graph_; }
6357
6358  // Visit functions for instruction classes.
6359#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6360  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6361
6362  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6363
6364#undef DECLARE_VISIT_INSTRUCTION
6365
6366 private:
6367  HGraph* const graph_;
6368
6369  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6370};
6371
6372class HGraphDelegateVisitor : public HGraphVisitor {
6373 public:
6374  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6375  virtual ~HGraphDelegateVisitor() {}
6376
6377  // Visit functions that delegate to to super class.
6378#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6379  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6380
6381  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6382
6383#undef DECLARE_VISIT_INSTRUCTION
6384
6385 private:
6386  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6387};
6388
6389class HInsertionOrderIterator : public ValueObject {
6390 public:
6391  explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {}
6392
6393  bool Done() const { return index_ == graph_.GetBlocks().size(); }
6394  HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; }
6395  void Advance() { ++index_; }
6396
6397 private:
6398  const HGraph& graph_;
6399  size_t index_;
6400
6401  DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator);
6402};
6403
6404class HReversePostOrderIterator : public ValueObject {
6405 public:
6406  explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {
6407    // Check that reverse post order of the graph has been built.
6408    DCHECK(!graph.GetReversePostOrder().empty());
6409  }
6410
6411  bool Done() const { return index_ == graph_.GetReversePostOrder().size(); }
6412  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; }
6413  void Advance() { ++index_; }
6414
6415 private:
6416  const HGraph& graph_;
6417  size_t index_;
6418
6419  DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator);
6420};
6421
6422class HPostOrderIterator : public ValueObject {
6423 public:
6424  explicit HPostOrderIterator(const HGraph& graph)
6425      : graph_(graph), index_(graph_.GetReversePostOrder().size()) {
6426    // Check that reverse post order of the graph has been built.
6427    DCHECK(!graph.GetReversePostOrder().empty());
6428  }
6429
6430  bool Done() const { return index_ == 0; }
6431  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; }
6432  void Advance() { --index_; }
6433
6434 private:
6435  const HGraph& graph_;
6436  size_t index_;
6437
6438  DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator);
6439};
6440
6441class HLinearPostOrderIterator : public ValueObject {
6442 public:
6443  explicit HLinearPostOrderIterator(const HGraph& graph)
6444      : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {}
6445
6446  bool Done() const { return index_ == 0; }
6447
6448  HBasicBlock* Current() const { return order_[index_ - 1u]; }
6449
6450  void Advance() {
6451    --index_;
6452    DCHECK_GE(index_, 0U);
6453  }
6454
6455 private:
6456  const ArenaVector<HBasicBlock*>& order_;
6457  size_t index_;
6458
6459  DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator);
6460};
6461
6462class HLinearOrderIterator : public ValueObject {
6463 public:
6464  explicit HLinearOrderIterator(const HGraph& graph)
6465      : order_(graph.GetLinearOrder()), index_(0) {}
6466
6467  bool Done() const { return index_ == order_.size(); }
6468  HBasicBlock* Current() const { return order_[index_]; }
6469  void Advance() { ++index_; }
6470
6471 private:
6472  const ArenaVector<HBasicBlock*>& order_;
6473  size_t index_;
6474
6475  DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator);
6476};
6477
6478// Iterator over the blocks that art part of the loop. Includes blocks part
6479// of an inner loop. The order in which the blocks are iterated is on their
6480// block id.
6481class HBlocksInLoopIterator : public ValueObject {
6482 public:
6483  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6484      : blocks_in_loop_(info.GetBlocks()),
6485        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6486        index_(0) {
6487    if (!blocks_in_loop_.IsBitSet(index_)) {
6488      Advance();
6489    }
6490  }
6491
6492  bool Done() const { return index_ == blocks_.size(); }
6493  HBasicBlock* Current() const { return blocks_[index_]; }
6494  void Advance() {
6495    ++index_;
6496    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6497      if (blocks_in_loop_.IsBitSet(index_)) {
6498        break;
6499      }
6500    }
6501  }
6502
6503 private:
6504  const BitVector& blocks_in_loop_;
6505  const ArenaVector<HBasicBlock*>& blocks_;
6506  size_t index_;
6507
6508  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6509};
6510
6511// Iterator over the blocks that art part of the loop. Includes blocks part
6512// of an inner loop. The order in which the blocks are iterated is reverse
6513// post order.
6514class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6515 public:
6516  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6517      : blocks_in_loop_(info.GetBlocks()),
6518        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6519        index_(0) {
6520    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6521      Advance();
6522    }
6523  }
6524
6525  bool Done() const { return index_ == blocks_.size(); }
6526  HBasicBlock* Current() const { return blocks_[index_]; }
6527  void Advance() {
6528    ++index_;
6529    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6530      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6531        break;
6532      }
6533    }
6534  }
6535
6536 private:
6537  const BitVector& blocks_in_loop_;
6538  const ArenaVector<HBasicBlock*>& blocks_;
6539  size_t index_;
6540
6541  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6542};
6543
6544inline int64_t Int64FromConstant(HConstant* constant) {
6545  if (constant->IsIntConstant()) {
6546    return constant->AsIntConstant()->GetValue();
6547  } else if (constant->IsLongConstant()) {
6548    return constant->AsLongConstant()->GetValue();
6549  } else {
6550    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6551    return 0;
6552  }
6553}
6554
6555inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
6556  // For the purposes of the compiler, the dex files must actually be the same object
6557  // if we want to safely treat them as the same. This is especially important for JIT
6558  // as custom class loaders can open the same underlying file (or memory) multiple
6559  // times and provide different class resolution but no two class loaders should ever
6560  // use the same DexFile object - doing so is an unsupported hack that can lead to
6561  // all sorts of weird failures.
6562  return &lhs == &rhs;
6563}
6564
6565#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6566  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6567  inline const H##type* HInstruction::As##type() const {                       \
6568    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6569  }                                                                            \
6570  inline H##type* HInstruction::As##type() {                                   \
6571    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6572  }
6573
6574  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6575#undef INSTRUCTION_TYPE_CHECK
6576
6577// Create space in `blocks` for adding `number_of_new_blocks` entries
6578// starting at location `at`. Blocks after `at` are moved accordingly.
6579inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6580                        size_t number_of_new_blocks,
6581                        size_t after) {
6582  DCHECK_LT(after, blocks->size());
6583  size_t old_size = blocks->size();
6584  size_t new_size = old_size + number_of_new_blocks;
6585  blocks->resize(new_size);
6586  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6587}
6588
6589}  // namespace art
6590
6591#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6592