nodes.h revision 18b36abc7cc03076fe1c399c0bb8ec8793cc6806
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/stl_util.h"
28#include "dex/compiler_enums.h"
29#include "entrypoints/quick/quick_entrypoints_enum.h"
30#include "handle.h"
31#include "handle_scope.h"
32#include "invoke_type.h"
33#include "locations.h"
34#include "method_reference.h"
35#include "mirror/class.h"
36#include "offsets.h"
37#include "primitive.h"
38#include "utils/array_ref.h"
39
40namespace art {
41
42class GraphChecker;
43class HBasicBlock;
44class HCurrentMethod;
45class HDoubleConstant;
46class HEnvironment;
47class HFloatConstant;
48class HGraphBuilder;
49class HGraphVisitor;
50class HInstruction;
51class HIntConstant;
52class HInvoke;
53class HLongConstant;
54class HNullConstant;
55class HPhi;
56class HSuspendCheck;
57class HTryBoundary;
58class LiveInterval;
59class LocationSummary;
60class SlowPathCode;
61class SsaBuilder;
62
63namespace mirror {
64class DexCache;
65}  // namespace mirror
66
67static const int kDefaultNumberOfBlocks = 8;
68static const int kDefaultNumberOfSuccessors = 2;
69static const int kDefaultNumberOfPredecessors = 2;
70static const int kDefaultNumberOfExceptionalPredecessors = 0;
71static const int kDefaultNumberOfDominatedBlocks = 1;
72static const int kDefaultNumberOfBackEdges = 1;
73
74// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
75static constexpr int32_t kMaxIntShiftDistance = 0x1f;
76// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
77static constexpr int32_t kMaxLongShiftDistance = 0x3f;
78
79static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
80static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
81
82static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
83
84static constexpr uint32_t kNoDexPc = -1;
85
86enum IfCondition {
87  // All types.
88  kCondEQ,  // ==
89  kCondNE,  // !=
90  // Signed integers and floating-point numbers.
91  kCondLT,  // <
92  kCondLE,  // <=
93  kCondGT,  // >
94  kCondGE,  // >=
95  // Unsigned integers.
96  kCondB,   // <
97  kCondBE,  // <=
98  kCondA,   // >
99  kCondAE,  // >=
100};
101
102enum GraphAnalysisResult {
103  kAnalysisSkipped,
104  kAnalysisInvalidBytecode,
105  kAnalysisFailThrowCatchLoop,
106  kAnalysisFailAmbiguousArrayOp,
107  kAnalysisSuccess,
108};
109
110class HInstructionList : public ValueObject {
111 public:
112  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
113
114  void AddInstruction(HInstruction* instruction);
115  void RemoveInstruction(HInstruction* instruction);
116
117  // Insert `instruction` before/after an existing instruction `cursor`.
118  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
119  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
120
121  // Return true if this list contains `instruction`.
122  bool Contains(HInstruction* instruction) const;
123
124  // Return true if `instruction1` is found before `instruction2` in
125  // this instruction list and false otherwise.  Abort if none
126  // of these instructions is found.
127  bool FoundBefore(const HInstruction* instruction1,
128                   const HInstruction* instruction2) const;
129
130  bool IsEmpty() const { return first_instruction_ == nullptr; }
131  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
132
133  // Update the block of all instructions to be `block`.
134  void SetBlockOfInstructions(HBasicBlock* block) const;
135
136  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
137  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
138  void Add(const HInstructionList& instruction_list);
139
140  // Return the number of instructions in the list. This is an expensive operation.
141  size_t CountSize() const;
142
143 private:
144  HInstruction* first_instruction_;
145  HInstruction* last_instruction_;
146
147  friend class HBasicBlock;
148  friend class HGraph;
149  friend class HInstruction;
150  friend class HInstructionIterator;
151  friend class HBackwardInstructionIterator;
152
153  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
154};
155
156class ReferenceTypeInfo : ValueObject {
157 public:
158  typedef Handle<mirror::Class> TypeHandle;
159
160  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
161
162  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
163    return ReferenceTypeInfo(type_handle, is_exact);
164  }
165
166  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
167
168  static bool IsValidHandle(TypeHandle handle) {
169    return handle.GetReference() != nullptr;
170  }
171
172  bool IsValid() const SHARED_REQUIRES(Locks::mutator_lock_) {
173    return IsValidHandle(type_handle_);
174  }
175
176  bool IsExact() const { return is_exact_; }
177
178  bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
179    DCHECK(IsValid());
180    return GetTypeHandle()->IsObjectClass();
181  }
182
183  bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
184    DCHECK(IsValid());
185    return GetTypeHandle()->IsStringClass();
186  }
187
188  bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) {
189    DCHECK(IsValid());
190    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
191  }
192
193  bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) {
194    DCHECK(IsValid());
195    return GetTypeHandle()->IsInterface();
196  }
197
198  bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
199    DCHECK(IsValid());
200    return GetTypeHandle()->IsArrayClass();
201  }
202
203  bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
204    DCHECK(IsValid());
205    return GetTypeHandle()->IsPrimitiveArray();
206  }
207
208  bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
209    DCHECK(IsValid());
210    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
211  }
212
213  bool CanArrayHold(ReferenceTypeInfo rti)  const SHARED_REQUIRES(Locks::mutator_lock_) {
214    DCHECK(IsValid());
215    if (!IsExact()) return false;
216    if (!IsArrayClass()) return false;
217    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
218  }
219
220  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const SHARED_REQUIRES(Locks::mutator_lock_) {
221    DCHECK(IsValid());
222    if (!IsExact()) return false;
223    if (!IsArrayClass()) return false;
224    if (!rti.IsArrayClass()) return false;
225    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
226        rti.GetTypeHandle()->GetComponentType());
227  }
228
229  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
230
231  bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
232    DCHECK(IsValid());
233    DCHECK(rti.IsValid());
234    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
235  }
236
237  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
238    DCHECK(IsValid());
239    DCHECK(rti.IsValid());
240    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
241        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
242  }
243
244  // Returns true if the type information provide the same amount of details.
245  // Note that it does not mean that the instructions have the same actual type
246  // (because the type can be the result of a merge).
247  bool IsEqual(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
248    if (!IsValid() && !rti.IsValid()) {
249      // Invalid types are equal.
250      return true;
251    }
252    if (!IsValid() || !rti.IsValid()) {
253      // One is valid, the other not.
254      return false;
255    }
256    return IsExact() == rti.IsExact()
257        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
258  }
259
260 private:
261  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
262  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
263      : type_handle_(type_handle), is_exact_(is_exact) { }
264
265  // The class of the object.
266  TypeHandle type_handle_;
267  // Whether or not the type is exact or a superclass of the actual type.
268  // Whether or not we have any information about this type.
269  bool is_exact_;
270};
271
272std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
273
274// Control-flow graph of a method. Contains a list of basic blocks.
275class HGraph : public ArenaObject<kArenaAllocGraph> {
276 public:
277  HGraph(ArenaAllocator* arena,
278         const DexFile& dex_file,
279         uint32_t method_idx,
280         bool should_generate_constructor_barrier,
281         InstructionSet instruction_set,
282         InvokeType invoke_type = kInvalidInvokeType,
283         bool debuggable = false,
284         bool osr = false,
285         int start_instruction_id = 0)
286      : arena_(arena),
287        blocks_(arena->Adapter(kArenaAllocBlockList)),
288        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
289        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
290        entry_block_(nullptr),
291        exit_block_(nullptr),
292        maximum_number_of_out_vregs_(0),
293        number_of_vregs_(0),
294        number_of_in_vregs_(0),
295        temporaries_vreg_slots_(0),
296        has_bounds_checks_(false),
297        has_try_catch_(false),
298        has_irreducible_loops_(false),
299        debuggable_(debuggable),
300        current_instruction_id_(start_instruction_id),
301        dex_file_(dex_file),
302        method_idx_(method_idx),
303        invoke_type_(invoke_type),
304        in_ssa_form_(false),
305        should_generate_constructor_barrier_(should_generate_constructor_barrier),
306        instruction_set_(instruction_set),
307        cached_null_constant_(nullptr),
308        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
309        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
310        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
311        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
312        cached_current_method_(nullptr),
313        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
314        osr_(osr) {
315    blocks_.reserve(kDefaultNumberOfBlocks);
316  }
317
318  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
319  void InitializeInexactObjectRTI(StackHandleScopeCollection* handles);
320
321  ArenaAllocator* GetArena() const { return arena_; }
322  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
323
324  bool IsInSsaForm() const { return in_ssa_form_; }
325  void SetInSsaForm() { in_ssa_form_ = true; }
326
327  HBasicBlock* GetEntryBlock() const { return entry_block_; }
328  HBasicBlock* GetExitBlock() const { return exit_block_; }
329  bool HasExitBlock() const { return exit_block_ != nullptr; }
330
331  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
332  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
333
334  void AddBlock(HBasicBlock* block);
335
336  void ComputeDominanceInformation();
337  void ClearDominanceInformation();
338  void ClearLoopInformation();
339  void FindBackEdges(ArenaBitVector* visited);
340  GraphAnalysisResult BuildDominatorTree();
341  void SimplifyCFG();
342  void SimplifyCatchBlocks();
343
344  // Analyze all natural loops in this graph. Returns a code specifying that it
345  // was successful or the reason for failure. The method will fail if a loop
346  // is a throw-catch loop, i.e. the header is a catch block.
347  GraphAnalysisResult AnalyzeLoops() const;
348
349  // Iterate over blocks to compute try block membership. Needs reverse post
350  // order and loop information.
351  void ComputeTryBlockInformation();
352
353  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
354  // Returns the instruction to replace the invoke expression or null if the
355  // invoke is for a void method. Note that the caller is responsible for replacing
356  // and removing the invoke instruction.
357  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
358
359  // Update the loop and try membership of `block`, which was spawned from `reference`.
360  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
361  // should be the new back edge.
362  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
363                                             HBasicBlock* reference,
364                                             bool replace_if_back_edge);
365
366  // Need to add a couple of blocks to test if the loop body is entered and
367  // put deoptimization instructions, etc.
368  void TransformLoopHeaderForBCE(HBasicBlock* header);
369
370  // Removes `block` from the graph. Assumes `block` has been disconnected from
371  // other blocks and has no instructions or phis.
372  void DeleteDeadEmptyBlock(HBasicBlock* block);
373
374  // Splits the edge between `block` and `successor` while preserving the
375  // indices in the predecessor/successor lists. If there are multiple edges
376  // between the blocks, the lowest indices are used.
377  // Returns the new block which is empty and has the same dex pc as `successor`.
378  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
379
380  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
381  void SimplifyLoop(HBasicBlock* header);
382
383  int32_t GetNextInstructionId() {
384    DCHECK_NE(current_instruction_id_, INT32_MAX);
385    return current_instruction_id_++;
386  }
387
388  int32_t GetCurrentInstructionId() const {
389    return current_instruction_id_;
390  }
391
392  void SetCurrentInstructionId(int32_t id) {
393    DCHECK_GE(id, current_instruction_id_);
394    current_instruction_id_ = id;
395  }
396
397  uint16_t GetMaximumNumberOfOutVRegs() const {
398    return maximum_number_of_out_vregs_;
399  }
400
401  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
402    maximum_number_of_out_vregs_ = new_value;
403  }
404
405  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
406    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
407  }
408
409  void UpdateTemporariesVRegSlots(size_t slots) {
410    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
411  }
412
413  size_t GetTemporariesVRegSlots() const {
414    DCHECK(!in_ssa_form_);
415    return temporaries_vreg_slots_;
416  }
417
418  void SetNumberOfVRegs(uint16_t number_of_vregs) {
419    number_of_vregs_ = number_of_vregs;
420  }
421
422  uint16_t GetNumberOfVRegs() const {
423    return number_of_vregs_;
424  }
425
426  void SetNumberOfInVRegs(uint16_t value) {
427    number_of_in_vregs_ = value;
428  }
429
430  uint16_t GetNumberOfInVRegs() const {
431    return number_of_in_vregs_;
432  }
433
434  uint16_t GetNumberOfLocalVRegs() const {
435    DCHECK(!in_ssa_form_);
436    return number_of_vregs_ - number_of_in_vregs_;
437  }
438
439  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
440    return reverse_post_order_;
441  }
442
443  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
444    return linear_order_;
445  }
446
447  bool HasBoundsChecks() const {
448    return has_bounds_checks_;
449  }
450
451  void SetHasBoundsChecks(bool value) {
452    has_bounds_checks_ = value;
453  }
454
455  bool ShouldGenerateConstructorBarrier() const {
456    return should_generate_constructor_barrier_;
457  }
458
459  bool IsDebuggable() const { return debuggable_; }
460
461  // Returns a constant of the given type and value. If it does not exist
462  // already, it is created and inserted into the graph. This method is only for
463  // integral types.
464  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
465
466  // TODO: This is problematic for the consistency of reference type propagation
467  // because it can be created anytime after the pass and thus it will be left
468  // with an invalid type.
469  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
470
471  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
472    return CreateConstant(value, &cached_int_constants_, dex_pc);
473  }
474  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
475    return CreateConstant(value, &cached_long_constants_, dex_pc);
476  }
477  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
478    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
479  }
480  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
481    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
482  }
483
484  HCurrentMethod* GetCurrentMethod();
485
486  const DexFile& GetDexFile() const {
487    return dex_file_;
488  }
489
490  uint32_t GetMethodIdx() const {
491    return method_idx_;
492  }
493
494  InvokeType GetInvokeType() const {
495    return invoke_type_;
496  }
497
498  InstructionSet GetInstructionSet() const {
499    return instruction_set_;
500  }
501
502  bool IsCompilingOsr() const { return osr_; }
503
504  bool HasTryCatch() const { return has_try_catch_; }
505  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
506
507  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
508  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
509
510  ArtMethod* GetArtMethod() const { return art_method_; }
511  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
512
513  // Returns an instruction with the opposite boolean value from 'cond'.
514  // The instruction has been inserted into the graph, either as a constant, or
515  // before cursor.
516  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
517
518  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
519
520 private:
521  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
522  void RemoveDeadBlocks(const ArenaBitVector& visited);
523
524  template <class InstructionType, typename ValueType>
525  InstructionType* CreateConstant(ValueType value,
526                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
527                                  uint32_t dex_pc = kNoDexPc) {
528    // Try to find an existing constant of the given value.
529    InstructionType* constant = nullptr;
530    auto cached_constant = cache->find(value);
531    if (cached_constant != cache->end()) {
532      constant = cached_constant->second;
533    }
534
535    // If not found or previously deleted, create and cache a new instruction.
536    // Don't bother reviving a previously deleted instruction, for simplicity.
537    if (constant == nullptr || constant->GetBlock() == nullptr) {
538      constant = new (arena_) InstructionType(value, dex_pc);
539      cache->Overwrite(value, constant);
540      InsertConstant(constant);
541    }
542    return constant;
543  }
544
545  void InsertConstant(HConstant* instruction);
546
547  // Cache a float constant into the graph. This method should only be
548  // called by the SsaBuilder when creating "equivalent" instructions.
549  void CacheFloatConstant(HFloatConstant* constant);
550
551  // See CacheFloatConstant comment.
552  void CacheDoubleConstant(HDoubleConstant* constant);
553
554  ArenaAllocator* const arena_;
555
556  // List of blocks in insertion order.
557  ArenaVector<HBasicBlock*> blocks_;
558
559  // List of blocks to perform a reverse post order tree traversal.
560  ArenaVector<HBasicBlock*> reverse_post_order_;
561
562  // List of blocks to perform a linear order tree traversal.
563  ArenaVector<HBasicBlock*> linear_order_;
564
565  HBasicBlock* entry_block_;
566  HBasicBlock* exit_block_;
567
568  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
569  uint16_t maximum_number_of_out_vregs_;
570
571  // The number of virtual registers in this method. Contains the parameters.
572  uint16_t number_of_vregs_;
573
574  // The number of virtual registers used by parameters of this method.
575  uint16_t number_of_in_vregs_;
576
577  // Number of vreg size slots that the temporaries use (used in baseline compiler).
578  size_t temporaries_vreg_slots_;
579
580  // Has bounds checks. We can totally skip BCE if it's false.
581  bool has_bounds_checks_;
582
583  // Flag whether there are any try/catch blocks in the graph. We will skip
584  // try/catch-related passes if false.
585  bool has_try_catch_;
586
587  // Flag whether there are any irreducible loops in the graph.
588  bool has_irreducible_loops_;
589
590  // Indicates whether the graph should be compiled in a way that
591  // ensures full debuggability. If false, we can apply more
592  // aggressive optimizations that may limit the level of debugging.
593  const bool debuggable_;
594
595  // The current id to assign to a newly added instruction. See HInstruction.id_.
596  int32_t current_instruction_id_;
597
598  // The dex file from which the method is from.
599  const DexFile& dex_file_;
600
601  // The method index in the dex file.
602  const uint32_t method_idx_;
603
604  // If inlined, this encodes how the callee is being invoked.
605  const InvokeType invoke_type_;
606
607  // Whether the graph has been transformed to SSA form. Only used
608  // in debug mode to ensure we are not using properties only valid
609  // for non-SSA form (like the number of temporaries).
610  bool in_ssa_form_;
611
612  const bool should_generate_constructor_barrier_;
613
614  const InstructionSet instruction_set_;
615
616  // Cached constants.
617  HNullConstant* cached_null_constant_;
618  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
619  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
620  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
621  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
622
623  HCurrentMethod* cached_current_method_;
624
625  // The ArtMethod this graph is for. Note that for AOT, it may be null,
626  // for example for methods whose declaring class could not be resolved
627  // (such as when the superclass could not be found).
628  ArtMethod* art_method_;
629
630  // Keep the RTI of inexact Object to avoid having to pass stack handle
631  // collection pointer to passes which may create NullConstant.
632  ReferenceTypeInfo inexact_object_rti_;
633
634  // Whether we are compiling this graph for on stack replacement: this will
635  // make all loops seen as irreducible and emit special stack maps to mark
636  // compiled code entries which the interpreter can directly jump to.
637  const bool osr_;
638
639  friend class SsaBuilder;           // For caching constants.
640  friend class SsaLivenessAnalysis;  // For the linear order.
641  friend class HInliner;             // For the reverse post order.
642  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
643  DISALLOW_COPY_AND_ASSIGN(HGraph);
644};
645
646class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
647 public:
648  HLoopInformation(HBasicBlock* header, HGraph* graph)
649      : header_(header),
650        suspend_check_(nullptr),
651        irreducible_(false),
652        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
653        // Make bit vector growable, as the number of blocks may change.
654        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
655    back_edges_.reserve(kDefaultNumberOfBackEdges);
656  }
657
658  bool IsIrreducible() const { return irreducible_; }
659
660  void Dump(std::ostream& os);
661
662  HBasicBlock* GetHeader() const {
663    return header_;
664  }
665
666  void SetHeader(HBasicBlock* block) {
667    header_ = block;
668  }
669
670  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
671  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
672  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
673
674  void AddBackEdge(HBasicBlock* back_edge) {
675    back_edges_.push_back(back_edge);
676  }
677
678  void RemoveBackEdge(HBasicBlock* back_edge) {
679    RemoveElement(back_edges_, back_edge);
680  }
681
682  bool IsBackEdge(const HBasicBlock& block) const {
683    return ContainsElement(back_edges_, &block);
684  }
685
686  size_t NumberOfBackEdges() const {
687    return back_edges_.size();
688  }
689
690  HBasicBlock* GetPreHeader() const;
691
692  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
693    return back_edges_;
694  }
695
696  // Returns the lifetime position of the back edge that has the
697  // greatest lifetime position.
698  size_t GetLifetimeEnd() const;
699
700  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
701    ReplaceElement(back_edges_, existing, new_back_edge);
702  }
703
704  // Finds blocks that are part of this loop.
705  void Populate();
706
707  // Returns whether this loop information contains `block`.
708  // Note that this loop information *must* be populated before entering this function.
709  bool Contains(const HBasicBlock& block) const;
710
711  // Returns whether this loop information is an inner loop of `other`.
712  // Note that `other` *must* be populated before entering this function.
713  bool IsIn(const HLoopInformation& other) const;
714
715  // Returns true if instruction is not defined within this loop.
716  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
717
718  const ArenaBitVector& GetBlocks() const { return blocks_; }
719
720  void Add(HBasicBlock* block);
721  void Remove(HBasicBlock* block);
722
723  void ClearAllBlocks() {
724    blocks_.ClearAllBits();
725  }
726
727 private:
728  // Internal recursive implementation of `Populate`.
729  void PopulateRecursive(HBasicBlock* block);
730  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
731
732  HBasicBlock* header_;
733  HSuspendCheck* suspend_check_;
734  bool irreducible_;
735  ArenaVector<HBasicBlock*> back_edges_;
736  ArenaBitVector blocks_;
737
738  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
739};
740
741// Stores try/catch information for basic blocks.
742// Note that HGraph is constructed so that catch blocks cannot simultaneously
743// be try blocks.
744class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
745 public:
746  // Try block information constructor.
747  explicit TryCatchInformation(const HTryBoundary& try_entry)
748      : try_entry_(&try_entry),
749        catch_dex_file_(nullptr),
750        catch_type_index_(DexFile::kDexNoIndex16) {
751    DCHECK(try_entry_ != nullptr);
752  }
753
754  // Catch block information constructor.
755  TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file)
756      : try_entry_(nullptr),
757        catch_dex_file_(&dex_file),
758        catch_type_index_(catch_type_index) {}
759
760  bool IsTryBlock() const { return try_entry_ != nullptr; }
761
762  const HTryBoundary& GetTryEntry() const {
763    DCHECK(IsTryBlock());
764    return *try_entry_;
765  }
766
767  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
768
769  bool IsCatchAllTypeIndex() const {
770    DCHECK(IsCatchBlock());
771    return catch_type_index_ == DexFile::kDexNoIndex16;
772  }
773
774  uint16_t GetCatchTypeIndex() const {
775    DCHECK(IsCatchBlock());
776    return catch_type_index_;
777  }
778
779  const DexFile& GetCatchDexFile() const {
780    DCHECK(IsCatchBlock());
781    return *catch_dex_file_;
782  }
783
784 private:
785  // One of possibly several TryBoundary instructions entering the block's try.
786  // Only set for try blocks.
787  const HTryBoundary* try_entry_;
788
789  // Exception type information. Only set for catch blocks.
790  const DexFile* catch_dex_file_;
791  const uint16_t catch_type_index_;
792};
793
794static constexpr size_t kNoLifetime = -1;
795static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
796
797// A block in a method. Contains the list of instructions represented
798// as a double linked list. Each block knows its predecessors and
799// successors.
800
801class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
802 public:
803  HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
804      : graph_(graph),
805        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
806        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
807        loop_information_(nullptr),
808        dominator_(nullptr),
809        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
810        block_id_(kInvalidBlockId),
811        dex_pc_(dex_pc),
812        lifetime_start_(kNoLifetime),
813        lifetime_end_(kNoLifetime),
814        try_catch_information_(nullptr) {
815    predecessors_.reserve(kDefaultNumberOfPredecessors);
816    successors_.reserve(kDefaultNumberOfSuccessors);
817    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
818  }
819
820  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
821    return predecessors_;
822  }
823
824  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
825    return successors_;
826  }
827
828  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
829  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
830
831  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
832    return ContainsElement(successors_, block, start_from);
833  }
834
835  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
836    return dominated_blocks_;
837  }
838
839  bool IsEntryBlock() const {
840    return graph_->GetEntryBlock() == this;
841  }
842
843  bool IsExitBlock() const {
844    return graph_->GetExitBlock() == this;
845  }
846
847  bool IsSingleGoto() const;
848  bool IsSingleTryBoundary() const;
849
850  // Returns true if this block emits nothing but a jump.
851  bool IsSingleJump() const {
852    HLoopInformation* loop_info = GetLoopInformation();
853    return (IsSingleGoto() || IsSingleTryBoundary())
854           // Back edges generate a suspend check.
855           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
856  }
857
858  void AddBackEdge(HBasicBlock* back_edge) {
859    if (loop_information_ == nullptr) {
860      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
861    }
862    DCHECK_EQ(loop_information_->GetHeader(), this);
863    loop_information_->AddBackEdge(back_edge);
864  }
865
866  HGraph* GetGraph() const { return graph_; }
867  void SetGraph(HGraph* graph) { graph_ = graph; }
868
869  uint32_t GetBlockId() const { return block_id_; }
870  void SetBlockId(int id) { block_id_ = id; }
871  uint32_t GetDexPc() const { return dex_pc_; }
872
873  HBasicBlock* GetDominator() const { return dominator_; }
874  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
875  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
876
877  void RemoveDominatedBlock(HBasicBlock* block) {
878    RemoveElement(dominated_blocks_, block);
879  }
880
881  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
882    ReplaceElement(dominated_blocks_, existing, new_block);
883  }
884
885  void ClearDominanceInformation();
886
887  int NumberOfBackEdges() const {
888    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
889  }
890
891  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
892  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
893  const HInstructionList& GetInstructions() const { return instructions_; }
894  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
895  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
896  const HInstructionList& GetPhis() const { return phis_; }
897
898  HInstruction* GetFirstInstructionDisregardMoves() const;
899
900  void AddSuccessor(HBasicBlock* block) {
901    successors_.push_back(block);
902    block->predecessors_.push_back(this);
903  }
904
905  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
906    size_t successor_index = GetSuccessorIndexOf(existing);
907    existing->RemovePredecessor(this);
908    new_block->predecessors_.push_back(this);
909    successors_[successor_index] = new_block;
910  }
911
912  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
913    size_t predecessor_index = GetPredecessorIndexOf(existing);
914    existing->RemoveSuccessor(this);
915    new_block->successors_.push_back(this);
916    predecessors_[predecessor_index] = new_block;
917  }
918
919  // Insert `this` between `predecessor` and `successor. This method
920  // preserves the indicies, and will update the first edge found between
921  // `predecessor` and `successor`.
922  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
923    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
924    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
925    successor->predecessors_[predecessor_index] = this;
926    predecessor->successors_[successor_index] = this;
927    successors_.push_back(successor);
928    predecessors_.push_back(predecessor);
929  }
930
931  void RemovePredecessor(HBasicBlock* block) {
932    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
933  }
934
935  void RemoveSuccessor(HBasicBlock* block) {
936    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
937  }
938
939  void ClearAllPredecessors() {
940    predecessors_.clear();
941  }
942
943  void AddPredecessor(HBasicBlock* block) {
944    predecessors_.push_back(block);
945    block->successors_.push_back(this);
946  }
947
948  void SwapPredecessors() {
949    DCHECK_EQ(predecessors_.size(), 2u);
950    std::swap(predecessors_[0], predecessors_[1]);
951  }
952
953  void SwapSuccessors() {
954    DCHECK_EQ(successors_.size(), 2u);
955    std::swap(successors_[0], successors_[1]);
956  }
957
958  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
959    return IndexOfElement(predecessors_, predecessor);
960  }
961
962  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
963    return IndexOfElement(successors_, successor);
964  }
965
966  HBasicBlock* GetSinglePredecessor() const {
967    DCHECK_EQ(GetPredecessors().size(), 1u);
968    return GetPredecessors()[0];
969  }
970
971  HBasicBlock* GetSingleSuccessor() const {
972    DCHECK_EQ(GetSuccessors().size(), 1u);
973    return GetSuccessors()[0];
974  }
975
976  // Returns whether the first occurrence of `predecessor` in the list of
977  // predecessors is at index `idx`.
978  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
979    DCHECK_EQ(GetPredecessors()[idx], predecessor);
980    return GetPredecessorIndexOf(predecessor) == idx;
981  }
982
983  // Create a new block between this block and its predecessors. The new block
984  // is added to the graph, all predecessor edges are relinked to it and an edge
985  // is created to `this`. Returns the new empty block. Reverse post order or
986  // loop and try/catch information are not updated.
987  HBasicBlock* CreateImmediateDominator();
988
989  // Split the block into two blocks just before `cursor`. Returns the newly
990  // created, latter block. Note that this method will add the block to the
991  // graph, create a Goto at the end of the former block and will create an edge
992  // between the blocks. It will not, however, update the reverse post order or
993  // loop and try/catch information.
994  HBasicBlock* SplitBefore(HInstruction* cursor);
995
996  // Split the block into two blocks just before `cursor`. Returns the newly
997  // created block. Note that this method just updates raw block information,
998  // like predecessors, successors, dominators, and instruction list. It does not
999  // update the graph, reverse post order, loop information, nor make sure the
1000  // blocks are consistent (for example ending with a control flow instruction).
1001  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1002
1003  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1004  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1005
1006  // Merge `other` at the end of `this`. Successors and dominated blocks of
1007  // `other` are changed to be successors and dominated blocks of `this`. Note
1008  // that this method does not update the graph, reverse post order, loop
1009  // information, nor make sure the blocks are consistent (for example ending
1010  // with a control flow instruction).
1011  void MergeWithInlined(HBasicBlock* other);
1012
1013  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1014  // of `this` are moved to `other`.
1015  // Note that this method does not update the graph, reverse post order, loop
1016  // information, nor make sure the blocks are consistent (for example ending
1017  // with a control flow instruction).
1018  void ReplaceWith(HBasicBlock* other);
1019
1020  // Merge `other` at the end of `this`. This method updates loops, reverse post
1021  // order, links to predecessors, successors, dominators and deletes the block
1022  // from the graph. The two blocks must be successive, i.e. `this` the only
1023  // predecessor of `other` and vice versa.
1024  void MergeWith(HBasicBlock* other);
1025
1026  // Disconnects `this` from all its predecessors, successors and dominator,
1027  // removes it from all loops it is included in and eventually from the graph.
1028  // The block must not dominate any other block. Predecessors and successors
1029  // are safely updated.
1030  void DisconnectAndDelete();
1031
1032  void AddInstruction(HInstruction* instruction);
1033  // Insert `instruction` before/after an existing instruction `cursor`.
1034  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1035  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1036  // Replace instruction `initial` with `replacement` within this block.
1037  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1038                                       HInstruction* replacement);
1039  void MoveInstructionBefore(HInstruction* insn, HInstruction* cursor);
1040  void AddPhi(HPhi* phi);
1041  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1042  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1043  // instruction list. With 'ensure_safety' set to true, it verifies that the
1044  // instruction is not in use and removes it from the use lists of its inputs.
1045  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1046  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1047  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1048
1049  bool IsLoopHeader() const {
1050    return IsInLoop() && (loop_information_->GetHeader() == this);
1051  }
1052
1053  bool IsLoopPreHeaderFirstPredecessor() const {
1054    DCHECK(IsLoopHeader());
1055    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1056  }
1057
1058  bool IsFirstPredecessorBackEdge() const {
1059    DCHECK(IsLoopHeader());
1060    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1061  }
1062
1063  HLoopInformation* GetLoopInformation() const {
1064    return loop_information_;
1065  }
1066
1067  // Set the loop_information_ on this block. Overrides the current
1068  // loop_information if it is an outer loop of the passed loop information.
1069  // Note that this method is called while creating the loop information.
1070  void SetInLoop(HLoopInformation* info) {
1071    if (IsLoopHeader()) {
1072      // Nothing to do. This just means `info` is an outer loop.
1073    } else if (!IsInLoop()) {
1074      loop_information_ = info;
1075    } else if (loop_information_->Contains(*info->GetHeader())) {
1076      // Block is currently part of an outer loop. Make it part of this inner loop.
1077      // Note that a non loop header having a loop information means this loop information
1078      // has already been populated
1079      loop_information_ = info;
1080    } else {
1081      // Block is part of an inner loop. Do not update the loop information.
1082      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1083      // at this point, because this method is being called while populating `info`.
1084    }
1085  }
1086
1087  // Raw update of the loop information.
1088  void SetLoopInformation(HLoopInformation* info) {
1089    loop_information_ = info;
1090  }
1091
1092  bool IsInLoop() const { return loop_information_ != nullptr; }
1093
1094  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1095
1096  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1097    try_catch_information_ = try_catch_information;
1098  }
1099
1100  bool IsTryBlock() const {
1101    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1102  }
1103
1104  bool IsCatchBlock() const {
1105    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1106  }
1107
1108  // Returns the try entry that this block's successors should have. They will
1109  // be in the same try, unless the block ends in a try boundary. In that case,
1110  // the appropriate try entry will be returned.
1111  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1112
1113  bool HasThrowingInstructions() const;
1114
1115  // Returns whether this block dominates the blocked passed as parameter.
1116  bool Dominates(HBasicBlock* block) const;
1117
1118  size_t GetLifetimeStart() const { return lifetime_start_; }
1119  size_t GetLifetimeEnd() const { return lifetime_end_; }
1120
1121  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1122  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1123
1124  bool EndsWithControlFlowInstruction() const;
1125  bool EndsWithIf() const;
1126  bool EndsWithTryBoundary() const;
1127  bool HasSinglePhi() const;
1128
1129 private:
1130  HGraph* graph_;
1131  ArenaVector<HBasicBlock*> predecessors_;
1132  ArenaVector<HBasicBlock*> successors_;
1133  HInstructionList instructions_;
1134  HInstructionList phis_;
1135  HLoopInformation* loop_information_;
1136  HBasicBlock* dominator_;
1137  ArenaVector<HBasicBlock*> dominated_blocks_;
1138  uint32_t block_id_;
1139  // The dex program counter of the first instruction of this block.
1140  const uint32_t dex_pc_;
1141  size_t lifetime_start_;
1142  size_t lifetime_end_;
1143  TryCatchInformation* try_catch_information_;
1144
1145  friend class HGraph;
1146  friend class HInstruction;
1147
1148  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1149};
1150
1151// Iterates over the LoopInformation of all loops which contain 'block'
1152// from the innermost to the outermost.
1153class HLoopInformationOutwardIterator : public ValueObject {
1154 public:
1155  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1156      : current_(block.GetLoopInformation()) {}
1157
1158  bool Done() const { return current_ == nullptr; }
1159
1160  void Advance() {
1161    DCHECK(!Done());
1162    current_ = current_->GetPreHeader()->GetLoopInformation();
1163  }
1164
1165  HLoopInformation* Current() const {
1166    DCHECK(!Done());
1167    return current_;
1168  }
1169
1170 private:
1171  HLoopInformation* current_;
1172
1173  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1174};
1175
1176#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1177  M(Above, Condition)                                                   \
1178  M(AboveOrEqual, Condition)                                            \
1179  M(Add, BinaryOperation)                                               \
1180  M(And, BinaryOperation)                                               \
1181  M(ArrayGet, Instruction)                                              \
1182  M(ArrayLength, Instruction)                                           \
1183  M(ArraySet, Instruction)                                              \
1184  M(Below, Condition)                                                   \
1185  M(BelowOrEqual, Condition)                                            \
1186  M(BooleanNot, UnaryOperation)                                         \
1187  M(BoundsCheck, Instruction)                                           \
1188  M(BoundType, Instruction)                                             \
1189  M(CheckCast, Instruction)                                             \
1190  M(ClassTableGet, Instruction)                                         \
1191  M(ClearException, Instruction)                                        \
1192  M(ClinitCheck, Instruction)                                           \
1193  M(Compare, BinaryOperation)                                           \
1194  M(CurrentMethod, Instruction)                                         \
1195  M(Deoptimize, Instruction)                                            \
1196  M(Div, BinaryOperation)                                               \
1197  M(DivZeroCheck, Instruction)                                          \
1198  M(DoubleConstant, Constant)                                           \
1199  M(Equal, Condition)                                                   \
1200  M(Exit, Instruction)                                                  \
1201  M(FloatConstant, Constant)                                            \
1202  M(Goto, Instruction)                                                  \
1203  M(GreaterThan, Condition)                                             \
1204  M(GreaterThanOrEqual, Condition)                                      \
1205  M(If, Instruction)                                                    \
1206  M(InstanceFieldGet, Instruction)                                      \
1207  M(InstanceFieldSet, Instruction)                                      \
1208  M(InstanceOf, Instruction)                                            \
1209  M(IntConstant, Constant)                                              \
1210  M(InvokeUnresolved, Invoke)                                           \
1211  M(InvokeInterface, Invoke)                                            \
1212  M(InvokeStaticOrDirect, Invoke)                                       \
1213  M(InvokeVirtual, Invoke)                                              \
1214  M(LessThan, Condition)                                                \
1215  M(LessThanOrEqual, Condition)                                         \
1216  M(LoadClass, Instruction)                                             \
1217  M(LoadException, Instruction)                                         \
1218  M(LoadString, Instruction)                                            \
1219  M(LongConstant, Constant)                                             \
1220  M(MemoryBarrier, Instruction)                                         \
1221  M(MonitorOperation, Instruction)                                      \
1222  M(Mul, BinaryOperation)                                               \
1223  M(NativeDebugInfo, Instruction)                                       \
1224  M(Neg, UnaryOperation)                                                \
1225  M(NewArray, Instruction)                                              \
1226  M(NewInstance, Instruction)                                           \
1227  M(Not, UnaryOperation)                                                \
1228  M(NotEqual, Condition)                                                \
1229  M(NullConstant, Instruction)                                          \
1230  M(NullCheck, Instruction)                                             \
1231  M(Or, BinaryOperation)                                                \
1232  M(PackedSwitch, Instruction)                                          \
1233  M(ParallelMove, Instruction)                                          \
1234  M(ParameterValue, Instruction)                                        \
1235  M(Phi, Instruction)                                                   \
1236  M(Rem, BinaryOperation)                                               \
1237  M(Return, Instruction)                                                \
1238  M(ReturnVoid, Instruction)                                            \
1239  M(Ror, BinaryOperation)                                               \
1240  M(Shl, BinaryOperation)                                               \
1241  M(Shr, BinaryOperation)                                               \
1242  M(StaticFieldGet, Instruction)                                        \
1243  M(StaticFieldSet, Instruction)                                        \
1244  M(UnresolvedInstanceFieldGet, Instruction)                            \
1245  M(UnresolvedInstanceFieldSet, Instruction)                            \
1246  M(UnresolvedStaticFieldGet, Instruction)                              \
1247  M(UnresolvedStaticFieldSet, Instruction)                              \
1248  M(Select, Instruction)                                                \
1249  M(Sub, BinaryOperation)                                               \
1250  M(SuspendCheck, Instruction)                                          \
1251  M(Throw, Instruction)                                                 \
1252  M(TryBoundary, Instruction)                                           \
1253  M(TypeConversion, Instruction)                                        \
1254  M(UShr, BinaryOperation)                                              \
1255  M(Xor, BinaryOperation)                                               \
1256
1257/*
1258 * Instructions, shared across several (not all) architectures.
1259 */
1260#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1261#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1262#else
1263#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1264  M(BitwiseNegatedRight, Instruction)                                   \
1265  M(MultiplyAccumulate, Instruction)
1266#endif
1267
1268#ifndef ART_ENABLE_CODEGEN_arm
1269#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1270#else
1271#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1272  M(ArmDexCacheArraysBase, Instruction)
1273#endif
1274
1275#ifndef ART_ENABLE_CODEGEN_arm64
1276#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1277#else
1278#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                          \
1279  M(Arm64DataProcWithShifterOp, Instruction)                            \
1280  M(Arm64IntermediateAddress, Instruction)
1281#endif
1282
1283#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1284
1285#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1286
1287#ifndef ART_ENABLE_CODEGEN_x86
1288#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1289#else
1290#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1291  M(X86ComputeBaseMethodAddress, Instruction)                           \
1292  M(X86LoadFromConstantTable, Instruction)                              \
1293  M(X86FPNeg, Instruction)                                              \
1294  M(X86PackedSwitch, Instruction)
1295#endif
1296
1297#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1298
1299#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1300  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1301  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1302  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1303  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1304  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1305  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1306  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1307  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1308
1309#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1310  M(Condition, BinaryOperation)                                         \
1311  M(Constant, Instruction)                                              \
1312  M(UnaryOperation, Instruction)                                        \
1313  M(BinaryOperation, Instruction)                                       \
1314  M(Invoke, Instruction)
1315
1316#define FOR_EACH_INSTRUCTION(M)                                         \
1317  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1318  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1319
1320#define FORWARD_DECLARATION(type, super) class H##type;
1321FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1322#undef FORWARD_DECLARATION
1323
1324#define DECLARE_INSTRUCTION(type)                                       \
1325  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }  \
1326  const char* DebugName() const OVERRIDE { return #type; }              \
1327  bool InstructionTypeEquals(HInstruction* other) const OVERRIDE {      \
1328    return other->Is##type();                                           \
1329  }                                                                     \
1330  void Accept(HGraphVisitor* visitor) OVERRIDE
1331
1332#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1333  bool Is##type() const { return As##type() != nullptr; }               \
1334  const H##type* As##type() const { return this; }                      \
1335  H##type* As##type() { return this; }
1336
1337template <typename T> class HUseList;
1338
1339template <typename T>
1340class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1341 public:
1342  HUseListNode* GetPrevious() const { return prev_; }
1343  HUseListNode* GetNext() const { return next_; }
1344  T GetUser() const { return user_; }
1345  size_t GetIndex() const { return index_; }
1346  void SetIndex(size_t index) { index_ = index; }
1347
1348 private:
1349  HUseListNode(T user, size_t index)
1350      : user_(user), index_(index), prev_(nullptr), next_(nullptr) {}
1351
1352  T const user_;
1353  size_t index_;
1354  HUseListNode<T>* prev_;
1355  HUseListNode<T>* next_;
1356
1357  friend class HUseList<T>;
1358
1359  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1360};
1361
1362template <typename T>
1363class HUseList : public ValueObject {
1364 public:
1365  HUseList() : first_(nullptr) {}
1366
1367  void Clear() {
1368    first_ = nullptr;
1369  }
1370
1371  // Adds a new entry at the beginning of the use list and returns
1372  // the newly created node.
1373  HUseListNode<T>* AddUse(T user, size_t index, ArenaAllocator* arena) {
1374    HUseListNode<T>* new_node = new (arena) HUseListNode<T>(user, index);
1375    if (IsEmpty()) {
1376      first_ = new_node;
1377    } else {
1378      first_->prev_ = new_node;
1379      new_node->next_ = first_;
1380      first_ = new_node;
1381    }
1382    return new_node;
1383  }
1384
1385  HUseListNode<T>* GetFirst() const {
1386    return first_;
1387  }
1388
1389  void Remove(HUseListNode<T>* node) {
1390    DCHECK(node != nullptr);
1391    DCHECK(Contains(node));
1392
1393    if (node->prev_ != nullptr) {
1394      node->prev_->next_ = node->next_;
1395    }
1396    if (node->next_ != nullptr) {
1397      node->next_->prev_ = node->prev_;
1398    }
1399    if (node == first_) {
1400      first_ = node->next_;
1401    }
1402  }
1403
1404  bool Contains(const HUseListNode<T>* node) const {
1405    if (node == nullptr) {
1406      return false;
1407    }
1408    for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) {
1409      if (current == node) {
1410        return true;
1411      }
1412    }
1413    return false;
1414  }
1415
1416  bool IsEmpty() const {
1417    return first_ == nullptr;
1418  }
1419
1420  bool HasOnlyOneUse() const {
1421    return first_ != nullptr && first_->next_ == nullptr;
1422  }
1423
1424  size_t SizeSlow() const {
1425    size_t count = 0;
1426    for (HUseListNode<T>* current = first_; current != nullptr; current = current->GetNext()) {
1427      ++count;
1428    }
1429    return count;
1430  }
1431
1432 private:
1433  HUseListNode<T>* first_;
1434};
1435
1436template<typename T>
1437class HUseIterator : public ValueObject {
1438 public:
1439  explicit HUseIterator(const HUseList<T>& uses) : current_(uses.GetFirst()) {}
1440
1441  bool Done() const { return current_ == nullptr; }
1442
1443  void Advance() {
1444    DCHECK(!Done());
1445    current_ = current_->GetNext();
1446  }
1447
1448  HUseListNode<T>* Current() const {
1449    DCHECK(!Done());
1450    return current_;
1451  }
1452
1453 private:
1454  HUseListNode<T>* current_;
1455
1456  friend class HValue;
1457};
1458
1459// This class is used by HEnvironment and HInstruction classes to record the
1460// instructions they use and pointers to the corresponding HUseListNodes kept
1461// by the used instructions.
1462template <typename T>
1463class HUserRecord : public ValueObject {
1464 public:
1465  HUserRecord() : instruction_(nullptr), use_node_(nullptr) {}
1466  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), use_node_(nullptr) {}
1467
1468  HUserRecord(const HUserRecord<T>& old_record, HUseListNode<T>* use_node)
1469    : instruction_(old_record.instruction_), use_node_(use_node) {
1470    DCHECK(instruction_ != nullptr);
1471    DCHECK(use_node_ != nullptr);
1472    DCHECK(old_record.use_node_ == nullptr);
1473  }
1474
1475  HInstruction* GetInstruction() const { return instruction_; }
1476  HUseListNode<T>* GetUseNode() const { return use_node_; }
1477
1478 private:
1479  // Instruction used by the user.
1480  HInstruction* instruction_;
1481
1482  // Corresponding entry in the use list kept by 'instruction_'.
1483  HUseListNode<T>* use_node_;
1484};
1485
1486/**
1487 * Side-effects representation.
1488 *
1489 * For write/read dependences on fields/arrays, the dependence analysis uses
1490 * type disambiguation (e.g. a float field write cannot modify the value of an
1491 * integer field read) and the access type (e.g.  a reference array write cannot
1492 * modify the value of a reference field read [although it may modify the
1493 * reference fetch prior to reading the field, which is represented by its own
1494 * write/read dependence]). The analysis makes conservative points-to
1495 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1496 * the same, and any reference read depends on any reference read without
1497 * further regard of its type).
1498 *
1499 * The internal representation uses 38-bit and is described in the table below.
1500 * The first line indicates the side effect, and for field/array accesses the
1501 * second line indicates the type of the access (in the order of the
1502 * Primitive::Type enum).
1503 * The two numbered lines below indicate the bit position in the bitfield (read
1504 * vertically).
1505 *
1506 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1507 *   +-------------+---------+---------+--------------+---------+---------+
1508 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1509 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1510 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1511 *
1512 * Note that, to ease the implementation, 'changes' bits are least significant
1513 * bits, while 'dependency' bits are most significant bits.
1514 */
1515class SideEffects : public ValueObject {
1516 public:
1517  SideEffects() : flags_(0) {}
1518
1519  static SideEffects None() {
1520    return SideEffects(0);
1521  }
1522
1523  static SideEffects All() {
1524    return SideEffects(kAllChangeBits | kAllDependOnBits);
1525  }
1526
1527  static SideEffects AllChanges() {
1528    return SideEffects(kAllChangeBits);
1529  }
1530
1531  static SideEffects AllDependencies() {
1532    return SideEffects(kAllDependOnBits);
1533  }
1534
1535  static SideEffects AllExceptGCDependency() {
1536    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1537  }
1538
1539  static SideEffects AllWritesAndReads() {
1540    return SideEffects(kAllWrites | kAllReads);
1541  }
1542
1543  static SideEffects AllWrites() {
1544    return SideEffects(kAllWrites);
1545  }
1546
1547  static SideEffects AllReads() {
1548    return SideEffects(kAllReads);
1549  }
1550
1551  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1552    return is_volatile
1553        ? AllWritesAndReads()
1554        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1555  }
1556
1557  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1558    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1559  }
1560
1561  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1562    return is_volatile
1563        ? AllWritesAndReads()
1564        : SideEffects(TypeFlag(type, kFieldReadOffset));
1565  }
1566
1567  static SideEffects ArrayReadOfType(Primitive::Type type) {
1568    return SideEffects(TypeFlag(type, kArrayReadOffset));
1569  }
1570
1571  static SideEffects CanTriggerGC() {
1572    return SideEffects(1ULL << kCanTriggerGCBit);
1573  }
1574
1575  static SideEffects DependsOnGC() {
1576    return SideEffects(1ULL << kDependsOnGCBit);
1577  }
1578
1579  // Combines the side-effects of this and the other.
1580  SideEffects Union(SideEffects other) const {
1581    return SideEffects(flags_ | other.flags_);
1582  }
1583
1584  SideEffects Exclusion(SideEffects other) const {
1585    return SideEffects(flags_ & ~other.flags_);
1586  }
1587
1588  void Add(SideEffects other) {
1589    flags_ |= other.flags_;
1590  }
1591
1592  bool Includes(SideEffects other) const {
1593    return (other.flags_ & flags_) == other.flags_;
1594  }
1595
1596  bool HasSideEffects() const {
1597    return (flags_ & kAllChangeBits);
1598  }
1599
1600  bool HasDependencies() const {
1601    return (flags_ & kAllDependOnBits);
1602  }
1603
1604  // Returns true if there are no side effects or dependencies.
1605  bool DoesNothing() const {
1606    return flags_ == 0;
1607  }
1608
1609  // Returns true if something is written.
1610  bool DoesAnyWrite() const {
1611    return (flags_ & kAllWrites);
1612  }
1613
1614  // Returns true if something is read.
1615  bool DoesAnyRead() const {
1616    return (flags_ & kAllReads);
1617  }
1618
1619  // Returns true if potentially everything is written and read
1620  // (every type and every kind of access).
1621  bool DoesAllReadWrite() const {
1622    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1623  }
1624
1625  bool DoesAll() const {
1626    return flags_ == (kAllChangeBits | kAllDependOnBits);
1627  }
1628
1629  // Returns true if `this` may read something written by `other`.
1630  bool MayDependOn(SideEffects other) const {
1631    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1632    return (other.flags_ & depends_on_flags);
1633  }
1634
1635  // Returns string representation of flags (for debugging only).
1636  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1637  std::string ToString() const {
1638    std::string flags = "|";
1639    for (int s = kLastBit; s >= 0; s--) {
1640      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1641      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1642        // This is a bit for the GC side effect.
1643        if (current_bit_is_set) {
1644          flags += "GC";
1645        }
1646        flags += "|";
1647      } else {
1648        // This is a bit for the array/field analysis.
1649        // The underscore character stands for the 'can trigger GC' bit.
1650        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1651        if (current_bit_is_set) {
1652          flags += kDebug[s];
1653        }
1654        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1655            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1656          flags += "|";
1657        }
1658      }
1659    }
1660    return flags;
1661  }
1662
1663  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1664
1665 private:
1666  static constexpr int kFieldArrayAnalysisBits = 9;
1667
1668  static constexpr int kFieldWriteOffset = 0;
1669  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1670  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1671  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1672
1673  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1674
1675  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1676  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1677  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1678  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1679
1680  static constexpr int kLastBit = kDependsOnGCBit;
1681  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1682
1683  // Aliases.
1684
1685  static_assert(kChangeBits == kDependOnBits,
1686                "the 'change' bits should match the 'depend on' bits.");
1687
1688  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1689  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1690  static constexpr uint64_t kAllWrites =
1691      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1692  static constexpr uint64_t kAllReads =
1693      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1694
1695  // Translates type to bit flag.
1696  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1697    CHECK_NE(type, Primitive::kPrimVoid);
1698    const uint64_t one = 1;
1699    const int shift = type;  // 0-based consecutive enum
1700    DCHECK_LE(kFieldWriteOffset, shift);
1701    DCHECK_LT(shift, kArrayWriteOffset);
1702    return one << (type + offset);
1703  }
1704
1705  // Private constructor on direct flags value.
1706  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1707
1708  uint64_t flags_;
1709};
1710
1711// A HEnvironment object contains the values of virtual registers at a given location.
1712class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1713 public:
1714  HEnvironment(ArenaAllocator* arena,
1715               size_t number_of_vregs,
1716               const DexFile& dex_file,
1717               uint32_t method_idx,
1718               uint32_t dex_pc,
1719               InvokeType invoke_type,
1720               HInstruction* holder)
1721     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1722       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1723       parent_(nullptr),
1724       dex_file_(dex_file),
1725       method_idx_(method_idx),
1726       dex_pc_(dex_pc),
1727       invoke_type_(invoke_type),
1728       holder_(holder) {
1729  }
1730
1731  HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1732      : HEnvironment(arena,
1733                     to_copy.Size(),
1734                     to_copy.GetDexFile(),
1735                     to_copy.GetMethodIdx(),
1736                     to_copy.GetDexPc(),
1737                     to_copy.GetInvokeType(),
1738                     holder) {}
1739
1740  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1741    if (parent_ != nullptr) {
1742      parent_->SetAndCopyParentChain(allocator, parent);
1743    } else {
1744      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1745      parent_->CopyFrom(parent);
1746      if (parent->GetParent() != nullptr) {
1747        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1748      }
1749    }
1750  }
1751
1752  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1753  void CopyFrom(HEnvironment* environment);
1754
1755  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1756  // input to the loop phi instead. This is for inserting instructions that
1757  // require an environment (like HDeoptimization) in the loop pre-header.
1758  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1759
1760  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1761    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1762  }
1763
1764  HInstruction* GetInstructionAt(size_t index) const {
1765    return vregs_[index].GetInstruction();
1766  }
1767
1768  void RemoveAsUserOfInput(size_t index) const;
1769
1770  size_t Size() const { return vregs_.size(); }
1771
1772  HEnvironment* GetParent() const { return parent_; }
1773
1774  void SetLocationAt(size_t index, Location location) {
1775    locations_[index] = location;
1776  }
1777
1778  Location GetLocationAt(size_t index) const {
1779    return locations_[index];
1780  }
1781
1782  uint32_t GetDexPc() const {
1783    return dex_pc_;
1784  }
1785
1786  uint32_t GetMethodIdx() const {
1787    return method_idx_;
1788  }
1789
1790  InvokeType GetInvokeType() const {
1791    return invoke_type_;
1792  }
1793
1794  const DexFile& GetDexFile() const {
1795    return dex_file_;
1796  }
1797
1798  HInstruction* GetHolder() const {
1799    return holder_;
1800  }
1801
1802
1803  bool IsFromInlinedInvoke() const {
1804    return GetParent() != nullptr;
1805  }
1806
1807 private:
1808  // Record instructions' use entries of this environment for constant-time removal.
1809  // It should only be called by HInstruction when a new environment use is added.
1810  void RecordEnvUse(HUseListNode<HEnvironment*>* env_use) {
1811    DCHECK(env_use->GetUser() == this);
1812    size_t index = env_use->GetIndex();
1813    vregs_[index] = HUserRecord<HEnvironment*>(vregs_[index], env_use);
1814  }
1815
1816  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1817  ArenaVector<Location> locations_;
1818  HEnvironment* parent_;
1819  const DexFile& dex_file_;
1820  const uint32_t method_idx_;
1821  const uint32_t dex_pc_;
1822  const InvokeType invoke_type_;
1823
1824  // The instruction that holds this environment.
1825  HInstruction* const holder_;
1826
1827  friend class HInstruction;
1828
1829  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1830};
1831
1832class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1833 public:
1834  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1835      : previous_(nullptr),
1836        next_(nullptr),
1837        block_(nullptr),
1838        dex_pc_(dex_pc),
1839        id_(-1),
1840        ssa_index_(-1),
1841        packed_fields_(0u),
1842        environment_(nullptr),
1843        locations_(nullptr),
1844        live_interval_(nullptr),
1845        lifetime_position_(kNoLifetime),
1846        side_effects_(side_effects),
1847        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1848    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1849  }
1850
1851  virtual ~HInstruction() {}
1852
1853#define DECLARE_KIND(type, super) k##type,
1854  enum InstructionKind {
1855    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1856  };
1857#undef DECLARE_KIND
1858
1859  HInstruction* GetNext() const { return next_; }
1860  HInstruction* GetPrevious() const { return previous_; }
1861
1862  HInstruction* GetNextDisregardingMoves() const;
1863  HInstruction* GetPreviousDisregardingMoves() const;
1864
1865  HBasicBlock* GetBlock() const { return block_; }
1866  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1867  void SetBlock(HBasicBlock* block) { block_ = block; }
1868  bool IsInBlock() const { return block_ != nullptr; }
1869  bool IsInLoop() const { return block_->IsInLoop(); }
1870  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1871  bool IsIrreducibleLoopHeaderPhi() const {
1872    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1873  }
1874
1875  virtual size_t InputCount() const = 0;
1876  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1877
1878  virtual void Accept(HGraphVisitor* visitor) = 0;
1879  virtual const char* DebugName() const = 0;
1880
1881  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1882  void SetRawInputAt(size_t index, HInstruction* input) {
1883    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1884  }
1885
1886  virtual bool NeedsEnvironment() const { return false; }
1887
1888  uint32_t GetDexPc() const { return dex_pc_; }
1889
1890  virtual bool IsControlFlow() const { return false; }
1891
1892  virtual bool CanThrow() const { return false; }
1893  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1894
1895  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1896  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1897
1898  // Does not apply for all instructions, but having this at top level greatly
1899  // simplifies the null check elimination.
1900  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1901  virtual bool CanBeNull() const {
1902    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1903    return true;
1904  }
1905
1906  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1907    return false;
1908  }
1909
1910  virtual bool IsActualObject() const {
1911    return GetType() == Primitive::kPrimNot;
1912  }
1913
1914  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1915
1916  ReferenceTypeInfo GetReferenceTypeInfo() const {
1917    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1918    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1919                                              GetPackedFlag<kFlagReferenceTypeIsExact>());;
1920  }
1921
1922  void AddUseAt(HInstruction* user, size_t index) {
1923    DCHECK(user != nullptr);
1924    HUseListNode<HInstruction*>* use =
1925        uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena());
1926    user->SetRawInputRecordAt(index, HUserRecord<HInstruction*>(user->InputRecordAt(index), use));
1927  }
1928
1929  void AddEnvUseAt(HEnvironment* user, size_t index) {
1930    DCHECK(user != nullptr);
1931    HUseListNode<HEnvironment*>* env_use =
1932        env_uses_.AddUse(user, index, GetBlock()->GetGraph()->GetArena());
1933    user->RecordEnvUse(env_use);
1934  }
1935
1936  void RemoveAsUserOfInput(size_t input) {
1937    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
1938    input_use.GetInstruction()->uses_.Remove(input_use.GetUseNode());
1939  }
1940
1941  const HUseList<HInstruction*>& GetUses() const { return uses_; }
1942  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
1943
1944  bool HasUses() const { return !uses_.IsEmpty() || !env_uses_.IsEmpty(); }
1945  bool HasEnvironmentUses() const { return !env_uses_.IsEmpty(); }
1946  bool HasNonEnvironmentUses() const { return !uses_.IsEmpty(); }
1947  bool HasOnlyOneNonEnvironmentUse() const {
1948    return !HasEnvironmentUses() && GetUses().HasOnlyOneUse();
1949  }
1950
1951  // Does this instruction strictly dominate `other_instruction`?
1952  // Returns false if this instruction and `other_instruction` are the same.
1953  // Aborts if this instruction and `other_instruction` are both phis.
1954  bool StrictlyDominates(HInstruction* other_instruction) const;
1955
1956  int GetId() const { return id_; }
1957  void SetId(int id) { id_ = id; }
1958
1959  int GetSsaIndex() const { return ssa_index_; }
1960  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
1961  bool HasSsaIndex() const { return ssa_index_ != -1; }
1962
1963  bool HasEnvironment() const { return environment_ != nullptr; }
1964  HEnvironment* GetEnvironment() const { return environment_; }
1965  // Set the `environment_` field. Raw because this method does not
1966  // update the uses lists.
1967  void SetRawEnvironment(HEnvironment* environment) {
1968    DCHECK(environment_ == nullptr);
1969    DCHECK_EQ(environment->GetHolder(), this);
1970    environment_ = environment;
1971  }
1972
1973  void RemoveEnvironment();
1974
1975  // Set the environment of this instruction, copying it from `environment`. While
1976  // copying, the uses lists are being updated.
1977  void CopyEnvironmentFrom(HEnvironment* environment) {
1978    DCHECK(environment_ == nullptr);
1979    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1980    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1981    environment_->CopyFrom(environment);
1982    if (environment->GetParent() != nullptr) {
1983      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1984    }
1985  }
1986
1987  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
1988                                                HBasicBlock* block) {
1989    DCHECK(environment_ == nullptr);
1990    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1991    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1992    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
1993    if (environment->GetParent() != nullptr) {
1994      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1995    }
1996  }
1997
1998  // Returns the number of entries in the environment. Typically, that is the
1999  // number of dex registers in a method. It could be more in case of inlining.
2000  size_t EnvironmentSize() const;
2001
2002  LocationSummary* GetLocations() const { return locations_; }
2003  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2004
2005  void ReplaceWith(HInstruction* instruction);
2006  void ReplaceInput(HInstruction* replacement, size_t index);
2007
2008  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2009  // uses of this instruction by `other` are *not* updated.
2010  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2011    ReplaceWith(other);
2012    other->ReplaceInput(this, use_index);
2013  }
2014
2015  // Move `this` instruction before `cursor`.
2016  void MoveBefore(HInstruction* cursor);
2017
2018  // Move `this` before its first user and out of any loops. If there is no
2019  // out-of-loop user that dominates all other users, move the instruction
2020  // to the end of the out-of-loop common dominator of the user's blocks.
2021  //
2022  // This can be used only on non-throwing instructions with no side effects that
2023  // have at least one use but no environment uses.
2024  void MoveBeforeFirstUserAndOutOfLoops();
2025
2026#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2027  bool Is##type() const;                                                       \
2028  const H##type* As##type() const;                                             \
2029  H##type* As##type();
2030
2031  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2032#undef INSTRUCTION_TYPE_CHECK
2033
2034#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2035  bool Is##type() const { return (As##type() != nullptr); }                    \
2036  virtual const H##type* As##type() const { return nullptr; }                  \
2037  virtual H##type* As##type() { return nullptr; }
2038  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2039#undef INSTRUCTION_TYPE_CHECK
2040
2041  // Returns whether the instruction can be moved within the graph.
2042  virtual bool CanBeMoved() const { return false; }
2043
2044  // Returns whether the two instructions are of the same kind.
2045  virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
2046    return false;
2047  }
2048
2049  // Returns whether any data encoded in the two instructions is equal.
2050  // This method does not look at the inputs. Both instructions must be
2051  // of the same type, otherwise the method has undefined behavior.
2052  virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
2053    return false;
2054  }
2055
2056  // Returns whether two instructions are equal, that is:
2057  // 1) They have the same type and contain the same data (InstructionDataEquals).
2058  // 2) Their inputs are identical.
2059  bool Equals(HInstruction* other) const;
2060
2061  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2062  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2063  // the virtual function because the __attribute__((__pure__)) doesn't really
2064  // apply the strong requirement for virtual functions, preventing optimizations.
2065  InstructionKind GetKind() const PURE;
2066  virtual InstructionKind GetKindInternal() const = 0;
2067
2068  virtual size_t ComputeHashCode() const {
2069    size_t result = GetKind();
2070    for (size_t i = 0, e = InputCount(); i < e; ++i) {
2071      result = (result * 31) + InputAt(i)->GetId();
2072    }
2073    return result;
2074  }
2075
2076  SideEffects GetSideEffects() const { return side_effects_; }
2077  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2078  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2079
2080  size_t GetLifetimePosition() const { return lifetime_position_; }
2081  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2082  LiveInterval* GetLiveInterval() const { return live_interval_; }
2083  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2084  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2085
2086  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2087
2088  // Returns whether the code generation of the instruction will require to have access
2089  // to the current method. Such instructions are:
2090  // (1): Instructions that require an environment, as calling the runtime requires
2091  //      to walk the stack and have the current method stored at a specific stack address.
2092  // (2): Object literals like classes and strings, that are loaded from the dex cache
2093  //      fields of the current method.
2094  bool NeedsCurrentMethod() const {
2095    return NeedsEnvironment() || IsLoadClass() || IsLoadString();
2096  }
2097
2098  // Returns whether the code generation of the instruction will require to have access
2099  // to the dex cache of the current method's declaring class via the current method.
2100  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2101
2102  // Does this instruction have any use in an environment before
2103  // control flow hits 'other'?
2104  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2105
2106  // Remove all references to environment uses of this instruction.
2107  // The caller must ensure that this is safe to do.
2108  void RemoveEnvironmentUsers();
2109
2110  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2111  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2112
2113 protected:
2114  // If set, the machine code for this instruction is assumed to be generated by
2115  // its users. Used by liveness analysis to compute use positions accordingly.
2116  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2117  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2118  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2119  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2120
2121  virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0;
2122  virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0;
2123
2124  uint32_t GetPackedFields() const {
2125    return packed_fields_;
2126  }
2127
2128  template <size_t flag>
2129  bool GetPackedFlag() const {
2130    return (packed_fields_ & (1u << flag)) != 0u;
2131  }
2132
2133  template <size_t flag>
2134  void SetPackedFlag(bool value = true) {
2135    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2136  }
2137
2138  template <typename BitFieldType>
2139  typename BitFieldType::value_type GetPackedField() const {
2140    return BitFieldType::Decode(packed_fields_);
2141  }
2142
2143  template <typename BitFieldType>
2144  void SetPackedField(typename BitFieldType::value_type value) {
2145    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2146    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2147  }
2148
2149 private:
2150  void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); }
2151
2152  HInstruction* previous_;
2153  HInstruction* next_;
2154  HBasicBlock* block_;
2155  const uint32_t dex_pc_;
2156
2157  // An instruction gets an id when it is added to the graph.
2158  // It reflects creation order. A negative id means the instruction
2159  // has not been added to the graph.
2160  int id_;
2161
2162  // When doing liveness analysis, instructions that have uses get an SSA index.
2163  int ssa_index_;
2164
2165  // Packed fields.
2166  uint32_t packed_fields_;
2167
2168  // List of instructions that have this instruction as input.
2169  HUseList<HInstruction*> uses_;
2170
2171  // List of environments that contain this instruction.
2172  HUseList<HEnvironment*> env_uses_;
2173
2174  // The environment associated with this instruction. Not null if the instruction
2175  // might jump out of the method.
2176  HEnvironment* environment_;
2177
2178  // Set by the code generator.
2179  LocationSummary* locations_;
2180
2181  // Set by the liveness analysis.
2182  LiveInterval* live_interval_;
2183
2184  // Set by the liveness analysis, this is the position in a linear
2185  // order of blocks where this instruction's live interval start.
2186  size_t lifetime_position_;
2187
2188  SideEffects side_effects_;
2189
2190  // The reference handle part of the reference type info.
2191  // The IsExact() flag is stored in packed fields.
2192  // TODO: for primitive types this should be marked as invalid.
2193  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2194
2195  friend class GraphChecker;
2196  friend class HBasicBlock;
2197  friend class HEnvironment;
2198  friend class HGraph;
2199  friend class HInstructionList;
2200
2201  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2202};
2203std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2204
2205class HInputIterator : public ValueObject {
2206 public:
2207  explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {}
2208
2209  bool Done() const { return index_ == instruction_->InputCount(); }
2210  HInstruction* Current() const { return instruction_->InputAt(index_); }
2211  void Advance() { index_++; }
2212
2213 private:
2214  HInstruction* instruction_;
2215  size_t index_;
2216
2217  DISALLOW_COPY_AND_ASSIGN(HInputIterator);
2218};
2219
2220class HInstructionIterator : public ValueObject {
2221 public:
2222  explicit HInstructionIterator(const HInstructionList& instructions)
2223      : instruction_(instructions.first_instruction_) {
2224    next_ = Done() ? nullptr : instruction_->GetNext();
2225  }
2226
2227  bool Done() const { return instruction_ == nullptr; }
2228  HInstruction* Current() const { return instruction_; }
2229  void Advance() {
2230    instruction_ = next_;
2231    next_ = Done() ? nullptr : instruction_->GetNext();
2232  }
2233
2234 private:
2235  HInstruction* instruction_;
2236  HInstruction* next_;
2237
2238  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2239};
2240
2241class HBackwardInstructionIterator : public ValueObject {
2242 public:
2243  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2244      : instruction_(instructions.last_instruction_) {
2245    next_ = Done() ? nullptr : instruction_->GetPrevious();
2246  }
2247
2248  bool Done() const { return instruction_ == nullptr; }
2249  HInstruction* Current() const { return instruction_; }
2250  void Advance() {
2251    instruction_ = next_;
2252    next_ = Done() ? nullptr : instruction_->GetPrevious();
2253  }
2254
2255 private:
2256  HInstruction* instruction_;
2257  HInstruction* next_;
2258
2259  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2260};
2261
2262template<size_t N>
2263class HTemplateInstruction: public HInstruction {
2264 public:
2265  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2266      : HInstruction(side_effects, dex_pc), inputs_() {}
2267  virtual ~HTemplateInstruction() {}
2268
2269  size_t InputCount() const OVERRIDE { return N; }
2270
2271 protected:
2272  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE {
2273    DCHECK_LT(i, N);
2274    return inputs_[i];
2275  }
2276
2277  void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE {
2278    DCHECK_LT(i, N);
2279    inputs_[i] = input;
2280  }
2281
2282 private:
2283  std::array<HUserRecord<HInstruction*>, N> inputs_;
2284
2285  friend class SsaBuilder;
2286};
2287
2288// HTemplateInstruction specialization for N=0.
2289template<>
2290class HTemplateInstruction<0>: public HInstruction {
2291 public:
2292  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2293      : HInstruction(side_effects, dex_pc) {}
2294
2295  virtual ~HTemplateInstruction() {}
2296
2297  size_t InputCount() const OVERRIDE { return 0; }
2298
2299 protected:
2300  const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE {
2301    LOG(FATAL) << "Unreachable";
2302    UNREACHABLE();
2303  }
2304
2305  void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED,
2306                           const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE {
2307    LOG(FATAL) << "Unreachable";
2308    UNREACHABLE();
2309  }
2310
2311 private:
2312  friend class SsaBuilder;
2313};
2314
2315template<intptr_t N>
2316class HExpression : public HTemplateInstruction<N> {
2317 public:
2318  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2319      : HTemplateInstruction<N>(side_effects, dex_pc) {
2320    this->template SetPackedField<TypeField>(type);
2321  }
2322  virtual ~HExpression() {}
2323
2324  Primitive::Type GetType() const OVERRIDE {
2325    return TypeField::Decode(this->GetPackedFields());
2326  }
2327
2328 protected:
2329  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2330  static constexpr size_t kFieldTypeSize =
2331      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2332  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2333  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2334                "Too many packed fields.");
2335  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2336};
2337
2338// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2339// instruction that branches to the exit block.
2340class HReturnVoid : public HTemplateInstruction<0> {
2341 public:
2342  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2343      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2344
2345  bool IsControlFlow() const OVERRIDE { return true; }
2346
2347  DECLARE_INSTRUCTION(ReturnVoid);
2348
2349 private:
2350  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2351};
2352
2353// Represents dex's RETURN opcodes. A HReturn is a control flow
2354// instruction that branches to the exit block.
2355class HReturn : public HTemplateInstruction<1> {
2356 public:
2357  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2358      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2359    SetRawInputAt(0, value);
2360  }
2361
2362  bool IsControlFlow() const OVERRIDE { return true; }
2363
2364  DECLARE_INSTRUCTION(Return);
2365
2366 private:
2367  DISALLOW_COPY_AND_ASSIGN(HReturn);
2368};
2369
2370class HPhi : public HInstruction {
2371 public:
2372  HPhi(ArenaAllocator* arena,
2373       uint32_t reg_number,
2374       size_t number_of_inputs,
2375       Primitive::Type type,
2376       uint32_t dex_pc = kNoDexPc)
2377      : HInstruction(SideEffects::None(), dex_pc),
2378        inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)),
2379        reg_number_(reg_number) {
2380    SetPackedField<TypeField>(ToPhiType(type));
2381    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2382    // Phis are constructed live and marked dead if conflicting or unused.
2383    // Individual steps of SsaBuilder should assume that if a phi has been
2384    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2385    SetPackedFlag<kFlagIsLive>(true);
2386    SetPackedFlag<kFlagCanBeNull>(true);
2387  }
2388
2389  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2390  static Primitive::Type ToPhiType(Primitive::Type type) {
2391    return Primitive::PrimitiveKind(type);
2392  }
2393
2394  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2395
2396  size_t InputCount() const OVERRIDE { return inputs_.size(); }
2397
2398  void AddInput(HInstruction* input);
2399  void RemoveInputAt(size_t index);
2400
2401  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2402  void SetType(Primitive::Type new_type) {
2403    // Make sure that only valid type changes occur. The following are allowed:
2404    //  (1) int  -> float/ref (primitive type propagation),
2405    //  (2) long -> double (primitive type propagation).
2406    DCHECK(GetType() == new_type ||
2407           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2408           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2409           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2410    SetPackedField<TypeField>(new_type);
2411  }
2412
2413  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2414  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2415
2416  uint32_t GetRegNumber() const { return reg_number_; }
2417
2418  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2419  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2420  bool IsDead() const { return !IsLive(); }
2421  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2422
2423  bool IsVRegEquivalentOf(HInstruction* other) const {
2424    return other != nullptr
2425        && other->IsPhi()
2426        && other->AsPhi()->GetBlock() == GetBlock()
2427        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2428  }
2429
2430  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2431  // An equivalent phi is a phi having the same dex register and type.
2432  // It assumes that phis with the same dex register are adjacent.
2433  HPhi* GetNextEquivalentPhiWithSameType() {
2434    HInstruction* next = GetNext();
2435    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2436      if (next->GetType() == GetType()) {
2437        return next->AsPhi();
2438      }
2439      next = next->GetNext();
2440    }
2441    return nullptr;
2442  }
2443
2444  DECLARE_INSTRUCTION(Phi);
2445
2446 protected:
2447  const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE {
2448    return inputs_[index];
2449  }
2450
2451  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE {
2452    inputs_[index] = input;
2453  }
2454
2455 private:
2456  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2457  static constexpr size_t kFieldTypeSize =
2458      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2459  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2460  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2461  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2462  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2463  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2464
2465  ArenaVector<HUserRecord<HInstruction*> > inputs_;
2466  const uint32_t reg_number_;
2467
2468  DISALLOW_COPY_AND_ASSIGN(HPhi);
2469};
2470
2471// The exit instruction is the only instruction of the exit block.
2472// Instructions aborting the method (HThrow and HReturn) must branch to the
2473// exit block.
2474class HExit : public HTemplateInstruction<0> {
2475 public:
2476  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2477
2478  bool IsControlFlow() const OVERRIDE { return true; }
2479
2480  DECLARE_INSTRUCTION(Exit);
2481
2482 private:
2483  DISALLOW_COPY_AND_ASSIGN(HExit);
2484};
2485
2486// Jumps from one block to another.
2487class HGoto : public HTemplateInstruction<0> {
2488 public:
2489  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2490
2491  bool IsControlFlow() const OVERRIDE { return true; }
2492
2493  HBasicBlock* GetSuccessor() const {
2494    return GetBlock()->GetSingleSuccessor();
2495  }
2496
2497  DECLARE_INSTRUCTION(Goto);
2498
2499 private:
2500  DISALLOW_COPY_AND_ASSIGN(HGoto);
2501};
2502
2503class HConstant : public HExpression<0> {
2504 public:
2505  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2506      : HExpression(type, SideEffects::None(), dex_pc) {}
2507
2508  bool CanBeMoved() const OVERRIDE { return true; }
2509
2510  // Is this constant -1 in the arithmetic sense?
2511  virtual bool IsMinusOne() const { return false; }
2512  // Is this constant 0 in the arithmetic sense?
2513  virtual bool IsArithmeticZero() const { return false; }
2514  // Is this constant a 0-bit pattern?
2515  virtual bool IsZeroBitPattern() const { return false; }
2516  // Is this constant 1 in the arithmetic sense?
2517  virtual bool IsOne() const { return false; }
2518
2519  virtual uint64_t GetValueAsUint64() const = 0;
2520
2521  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2522
2523 private:
2524  DISALLOW_COPY_AND_ASSIGN(HConstant);
2525};
2526
2527class HNullConstant : public HConstant {
2528 public:
2529  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2530    return true;
2531  }
2532
2533  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2534
2535  size_t ComputeHashCode() const OVERRIDE { return 0; }
2536
2537  // The null constant representation is a 0-bit pattern.
2538  virtual bool IsZeroBitPattern() const { return true; }
2539
2540  DECLARE_INSTRUCTION(NullConstant);
2541
2542 private:
2543  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2544
2545  friend class HGraph;
2546  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2547};
2548
2549// Constants of the type int. Those can be from Dex instructions, or
2550// synthesized (for example with the if-eqz instruction).
2551class HIntConstant : public HConstant {
2552 public:
2553  int32_t GetValue() const { return value_; }
2554
2555  uint64_t GetValueAsUint64() const OVERRIDE {
2556    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2557  }
2558
2559  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2560    DCHECK(other->IsIntConstant()) << other->DebugName();
2561    return other->AsIntConstant()->value_ == value_;
2562  }
2563
2564  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2565
2566  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2567  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2568  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2569  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2570
2571  // Integer constants are used to encode Boolean values as well,
2572  // where 1 means true and 0 means false.
2573  bool IsTrue() const { return GetValue() == 1; }
2574  bool IsFalse() const { return GetValue() == 0; }
2575
2576  DECLARE_INSTRUCTION(IntConstant);
2577
2578 private:
2579  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2580      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2581  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2582      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2583
2584  const int32_t value_;
2585
2586  friend class HGraph;
2587  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2588  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2589  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2590};
2591
2592class HLongConstant : public HConstant {
2593 public:
2594  int64_t GetValue() const { return value_; }
2595
2596  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2597
2598  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2599    DCHECK(other->IsLongConstant()) << other->DebugName();
2600    return other->AsLongConstant()->value_ == value_;
2601  }
2602
2603  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2604
2605  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2606  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2607  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2608  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2609
2610  DECLARE_INSTRUCTION(LongConstant);
2611
2612 private:
2613  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2614      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2615
2616  const int64_t value_;
2617
2618  friend class HGraph;
2619  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2620};
2621
2622class HFloatConstant : public HConstant {
2623 public:
2624  float GetValue() const { return value_; }
2625
2626  uint64_t GetValueAsUint64() const OVERRIDE {
2627    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2628  }
2629
2630  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2631    DCHECK(other->IsFloatConstant()) << other->DebugName();
2632    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2633  }
2634
2635  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2636
2637  bool IsMinusOne() const OVERRIDE {
2638    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2639  }
2640  bool IsArithmeticZero() const OVERRIDE {
2641    return std::fpclassify(value_) == FP_ZERO;
2642  }
2643  bool IsArithmeticPositiveZero() const {
2644    return IsArithmeticZero() && !std::signbit(value_);
2645  }
2646  bool IsArithmeticNegativeZero() const {
2647    return IsArithmeticZero() && std::signbit(value_);
2648  }
2649  bool IsZeroBitPattern() const OVERRIDE {
2650    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2651  }
2652  bool IsOne() const OVERRIDE {
2653    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2654  }
2655  bool IsNaN() const {
2656    return std::isnan(value_);
2657  }
2658
2659  DECLARE_INSTRUCTION(FloatConstant);
2660
2661 private:
2662  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2663      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2664  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2665      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2666
2667  const float value_;
2668
2669  // Only the SsaBuilder and HGraph can create floating-point constants.
2670  friend class SsaBuilder;
2671  friend class HGraph;
2672  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2673};
2674
2675class HDoubleConstant : public HConstant {
2676 public:
2677  double GetValue() const { return value_; }
2678
2679  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2680
2681  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2682    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2683    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2684  }
2685
2686  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2687
2688  bool IsMinusOne() const OVERRIDE {
2689    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2690  }
2691  bool IsArithmeticZero() const OVERRIDE {
2692    return std::fpclassify(value_) == FP_ZERO;
2693  }
2694  bool IsArithmeticPositiveZero() const {
2695    return IsArithmeticZero() && !std::signbit(value_);
2696  }
2697  bool IsArithmeticNegativeZero() const {
2698    return IsArithmeticZero() && std::signbit(value_);
2699  }
2700  bool IsZeroBitPattern() const OVERRIDE {
2701    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2702  }
2703  bool IsOne() const OVERRIDE {
2704    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2705  }
2706  bool IsNaN() const {
2707    return std::isnan(value_);
2708  }
2709
2710  DECLARE_INSTRUCTION(DoubleConstant);
2711
2712 private:
2713  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2714      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2715  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2716      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2717
2718  const double value_;
2719
2720  // Only the SsaBuilder and HGraph can create floating-point constants.
2721  friend class SsaBuilder;
2722  friend class HGraph;
2723  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2724};
2725
2726// Conditional branch. A block ending with an HIf instruction must have
2727// two successors.
2728class HIf : public HTemplateInstruction<1> {
2729 public:
2730  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2731      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2732    SetRawInputAt(0, input);
2733  }
2734
2735  bool IsControlFlow() const OVERRIDE { return true; }
2736
2737  HBasicBlock* IfTrueSuccessor() const {
2738    return GetBlock()->GetSuccessors()[0];
2739  }
2740
2741  HBasicBlock* IfFalseSuccessor() const {
2742    return GetBlock()->GetSuccessors()[1];
2743  }
2744
2745  DECLARE_INSTRUCTION(If);
2746
2747 private:
2748  DISALLOW_COPY_AND_ASSIGN(HIf);
2749};
2750
2751
2752// Abstract instruction which marks the beginning and/or end of a try block and
2753// links it to the respective exception handlers. Behaves the same as a Goto in
2754// non-exceptional control flow.
2755// Normal-flow successor is stored at index zero, exception handlers under
2756// higher indices in no particular order.
2757class HTryBoundary : public HTemplateInstruction<0> {
2758 public:
2759  enum class BoundaryKind {
2760    kEntry,
2761    kExit,
2762    kLast = kExit
2763  };
2764
2765  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2766      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2767    SetPackedField<BoundaryKindField>(kind);
2768  }
2769
2770  bool IsControlFlow() const OVERRIDE { return true; }
2771
2772  // Returns the block's non-exceptional successor (index zero).
2773  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2774
2775  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2776    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2777  }
2778
2779  // Returns whether `handler` is among its exception handlers (non-zero index
2780  // successors).
2781  bool HasExceptionHandler(const HBasicBlock& handler) const {
2782    DCHECK(handler.IsCatchBlock());
2783    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2784  }
2785
2786  // If not present already, adds `handler` to its block's list of exception
2787  // handlers.
2788  void AddExceptionHandler(HBasicBlock* handler) {
2789    if (!HasExceptionHandler(*handler)) {
2790      GetBlock()->AddSuccessor(handler);
2791    }
2792  }
2793
2794  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2795  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2796
2797  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2798
2799  DECLARE_INSTRUCTION(TryBoundary);
2800
2801 private:
2802  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2803  static constexpr size_t kFieldBoundaryKindSize =
2804      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2805  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2806      kFieldBoundaryKind + kFieldBoundaryKindSize;
2807  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2808                "Too many packed fields.");
2809  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2810
2811  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2812};
2813
2814// Deoptimize to interpreter, upon checking a condition.
2815class HDeoptimize : public HTemplateInstruction<1> {
2816 public:
2817  // We set CanTriggerGC to prevent any intermediate address to be live
2818  // at the point of the `HDeoptimize`.
2819  HDeoptimize(HInstruction* cond, uint32_t dex_pc)
2820      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
2821    SetRawInputAt(0, cond);
2822  }
2823
2824  bool CanBeMoved() const OVERRIDE { return true; }
2825  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2826    return true;
2827  }
2828  bool NeedsEnvironment() const OVERRIDE { return true; }
2829  bool CanThrow() const OVERRIDE { return true; }
2830
2831  DECLARE_INSTRUCTION(Deoptimize);
2832
2833 private:
2834  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
2835};
2836
2837// Represents the ArtMethod that was passed as a first argument to
2838// the method. It is used by instructions that depend on it, like
2839// instructions that work with the dex cache.
2840class HCurrentMethod : public HExpression<0> {
2841 public:
2842  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2843      : HExpression(type, SideEffects::None(), dex_pc) {}
2844
2845  DECLARE_INSTRUCTION(CurrentMethod);
2846
2847 private:
2848  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
2849};
2850
2851// Fetches an ArtMethod from the virtual table or the interface method table
2852// of a class.
2853class HClassTableGet : public HExpression<1> {
2854 public:
2855  enum class TableKind {
2856    kVTable,
2857    kIMTable,
2858    kLast = kIMTable
2859  };
2860  HClassTableGet(HInstruction* cls,
2861                 Primitive::Type type,
2862                 TableKind kind,
2863                 size_t index,
2864                 uint32_t dex_pc)
2865      : HExpression(type, SideEffects::None(), dex_pc),
2866        index_(index) {
2867    SetPackedField<TableKindField>(kind);
2868    SetRawInputAt(0, cls);
2869  }
2870
2871  bool CanBeMoved() const OVERRIDE { return true; }
2872  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2873    return other->AsClassTableGet()->GetIndex() == index_ &&
2874        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
2875  }
2876
2877  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
2878  size_t GetIndex() const { return index_; }
2879
2880  DECLARE_INSTRUCTION(ClassTableGet);
2881
2882 private:
2883  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
2884  static constexpr size_t kFieldTableKindSize =
2885      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
2886  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
2887  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
2888                "Too many packed fields.");
2889  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
2890
2891  // The index of the ArtMethod in the table.
2892  const size_t index_;
2893
2894  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
2895};
2896
2897// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
2898// have one successor for each entry in the switch table, and the final successor
2899// will be the block containing the next Dex opcode.
2900class HPackedSwitch : public HTemplateInstruction<1> {
2901 public:
2902  HPackedSwitch(int32_t start_value,
2903                uint32_t num_entries,
2904                HInstruction* input,
2905                uint32_t dex_pc = kNoDexPc)
2906    : HTemplateInstruction(SideEffects::None(), dex_pc),
2907      start_value_(start_value),
2908      num_entries_(num_entries) {
2909    SetRawInputAt(0, input);
2910  }
2911
2912  bool IsControlFlow() const OVERRIDE { return true; }
2913
2914  int32_t GetStartValue() const { return start_value_; }
2915
2916  uint32_t GetNumEntries() const { return num_entries_; }
2917
2918  HBasicBlock* GetDefaultBlock() const {
2919    // Last entry is the default block.
2920    return GetBlock()->GetSuccessors()[num_entries_];
2921  }
2922  DECLARE_INSTRUCTION(PackedSwitch);
2923
2924 private:
2925  const int32_t start_value_;
2926  const uint32_t num_entries_;
2927
2928  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
2929};
2930
2931class HUnaryOperation : public HExpression<1> {
2932 public:
2933  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
2934      : HExpression(result_type, SideEffects::None(), dex_pc) {
2935    SetRawInputAt(0, input);
2936  }
2937
2938  HInstruction* GetInput() const { return InputAt(0); }
2939  Primitive::Type GetResultType() const { return GetType(); }
2940
2941  bool CanBeMoved() const OVERRIDE { return true; }
2942  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2943    return true;
2944  }
2945
2946  // Try to statically evaluate `this` and return a HConstant
2947  // containing the result of this evaluation.  If `this` cannot
2948  // be evaluated as a constant, return null.
2949  HConstant* TryStaticEvaluation() const;
2950
2951  // Apply this operation to `x`.
2952  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
2953  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
2954  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
2955  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
2956
2957  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
2958
2959 private:
2960  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
2961};
2962
2963class HBinaryOperation : public HExpression<2> {
2964 public:
2965  HBinaryOperation(Primitive::Type result_type,
2966                   HInstruction* left,
2967                   HInstruction* right,
2968                   SideEffects side_effects = SideEffects::None(),
2969                   uint32_t dex_pc = kNoDexPc)
2970      : HExpression(result_type, side_effects, dex_pc) {
2971    SetRawInputAt(0, left);
2972    SetRawInputAt(1, right);
2973  }
2974
2975  HInstruction* GetLeft() const { return InputAt(0); }
2976  HInstruction* GetRight() const { return InputAt(1); }
2977  Primitive::Type GetResultType() const { return GetType(); }
2978
2979  virtual bool IsCommutative() const { return false; }
2980
2981  // Put constant on the right.
2982  // Returns whether order is changed.
2983  bool OrderInputsWithConstantOnTheRight() {
2984    HInstruction* left = InputAt(0);
2985    HInstruction* right = InputAt(1);
2986    if (left->IsConstant() && !right->IsConstant()) {
2987      ReplaceInput(right, 0);
2988      ReplaceInput(left, 1);
2989      return true;
2990    }
2991    return false;
2992  }
2993
2994  // Order inputs by instruction id, but favor constant on the right side.
2995  // This helps GVN for commutative ops.
2996  void OrderInputs() {
2997    DCHECK(IsCommutative());
2998    HInstruction* left = InputAt(0);
2999    HInstruction* right = InputAt(1);
3000    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3001      return;
3002    }
3003    if (OrderInputsWithConstantOnTheRight()) {
3004      return;
3005    }
3006    // Order according to instruction id.
3007    if (left->GetId() > right->GetId()) {
3008      ReplaceInput(right, 0);
3009      ReplaceInput(left, 1);
3010    }
3011  }
3012
3013  bool CanBeMoved() const OVERRIDE { return true; }
3014  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3015    return true;
3016  }
3017
3018  // Try to statically evaluate `this` and return a HConstant
3019  // containing the result of this evaluation.  If `this` cannot
3020  // be evaluated as a constant, return null.
3021  HConstant* TryStaticEvaluation() const;
3022
3023  // Apply this operation to `x` and `y`.
3024  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3025                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3026    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3027    UNREACHABLE();
3028  }
3029  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3030  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3031  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3032                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3033    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3034    UNREACHABLE();
3035  }
3036  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3037  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3038
3039  // Returns an input that can legally be used as the right input and is
3040  // constant, or null.
3041  HConstant* GetConstantRight() const;
3042
3043  // If `GetConstantRight()` returns one of the input, this returns the other
3044  // one. Otherwise it returns null.
3045  HInstruction* GetLeastConstantLeft() const;
3046
3047  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3048
3049 private:
3050  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3051};
3052
3053// The comparison bias applies for floating point operations and indicates how NaN
3054// comparisons are treated:
3055enum class ComparisonBias {
3056  kNoBias,  // bias is not applicable (i.e. for long operation)
3057  kGtBias,  // return 1 for NaN comparisons
3058  kLtBias,  // return -1 for NaN comparisons
3059  kLast = kLtBias
3060};
3061
3062std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3063
3064class HCondition : public HBinaryOperation {
3065 public:
3066  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3067      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3068    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3069  }
3070
3071  // For code generation purposes, returns whether this instruction is just before
3072  // `instruction`, and disregard moves in between.
3073  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3074
3075  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3076
3077  virtual IfCondition GetCondition() const = 0;
3078
3079  virtual IfCondition GetOppositeCondition() const = 0;
3080
3081  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3082  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3083
3084  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3085  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3086
3087  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3088    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3089  }
3090
3091  bool IsFPConditionTrueIfNaN() const {
3092    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3093    IfCondition if_cond = GetCondition();
3094    if (if_cond == kCondNE) {
3095      return true;
3096    } else if (if_cond == kCondEQ) {
3097      return false;
3098    }
3099    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3100  }
3101
3102  bool IsFPConditionFalseIfNaN() const {
3103    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3104    IfCondition if_cond = GetCondition();
3105    if (if_cond == kCondEQ) {
3106      return true;
3107    } else if (if_cond == kCondNE) {
3108      return false;
3109    }
3110    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3111  }
3112
3113 protected:
3114  // Needed if we merge a HCompare into a HCondition.
3115  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3116  static constexpr size_t kFieldComparisonBiasSize =
3117      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3118  static constexpr size_t kNumberOfConditionPackedBits =
3119      kFieldComparisonBias + kFieldComparisonBiasSize;
3120  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3121  using ComparisonBiasField =
3122      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3123
3124  template <typename T>
3125  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3126
3127  template <typename T>
3128  int32_t CompareFP(T x, T y) const {
3129    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3130    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3131    // Handle the bias.
3132    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3133  }
3134
3135  // Return an integer constant containing the result of a condition evaluated at compile time.
3136  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3137    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3138  }
3139
3140 private:
3141  DISALLOW_COPY_AND_ASSIGN(HCondition);
3142};
3143
3144// Instruction to check if two inputs are equal to each other.
3145class HEqual : public HCondition {
3146 public:
3147  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3148      : HCondition(first, second, dex_pc) {}
3149
3150  bool IsCommutative() const OVERRIDE { return true; }
3151
3152  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3153                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3154    return MakeConstantCondition(true, GetDexPc());
3155  }
3156  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3157    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3158  }
3159  // In the following Evaluate methods, a HCompare instruction has
3160  // been merged into this HEqual instruction; evaluate it as
3161  // `Compare(x, y) == 0`.
3162  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3163    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3164                                 GetDexPc());
3165  }
3166  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3167    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3168  }
3169  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3170    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3171  }
3172
3173  DECLARE_INSTRUCTION(Equal);
3174
3175  IfCondition GetCondition() const OVERRIDE {
3176    return kCondEQ;
3177  }
3178
3179  IfCondition GetOppositeCondition() const OVERRIDE {
3180    return kCondNE;
3181  }
3182
3183 private:
3184  template <typename T> bool Compute(T x, T y) const { return x == y; }
3185
3186  DISALLOW_COPY_AND_ASSIGN(HEqual);
3187};
3188
3189class HNotEqual : public HCondition {
3190 public:
3191  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3192      : HCondition(first, second, dex_pc) {}
3193
3194  bool IsCommutative() const OVERRIDE { return true; }
3195
3196  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3197                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3198    return MakeConstantCondition(false, GetDexPc());
3199  }
3200  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3201    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3202  }
3203  // In the following Evaluate methods, a HCompare instruction has
3204  // been merged into this HNotEqual instruction; evaluate it as
3205  // `Compare(x, y) != 0`.
3206  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3207    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3208  }
3209  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3210    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3211  }
3212  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3213    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3214  }
3215
3216  DECLARE_INSTRUCTION(NotEqual);
3217
3218  IfCondition GetCondition() const OVERRIDE {
3219    return kCondNE;
3220  }
3221
3222  IfCondition GetOppositeCondition() const OVERRIDE {
3223    return kCondEQ;
3224  }
3225
3226 private:
3227  template <typename T> bool Compute(T x, T y) const { return x != y; }
3228
3229  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3230};
3231
3232class HLessThan : public HCondition {
3233 public:
3234  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3235      : HCondition(first, second, dex_pc) {}
3236
3237  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3238    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3239  }
3240  // In the following Evaluate methods, a HCompare instruction has
3241  // been merged into this HLessThan instruction; evaluate it as
3242  // `Compare(x, y) < 0`.
3243  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3244    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3245  }
3246  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3247    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3248  }
3249  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3250    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3251  }
3252
3253  DECLARE_INSTRUCTION(LessThan);
3254
3255  IfCondition GetCondition() const OVERRIDE {
3256    return kCondLT;
3257  }
3258
3259  IfCondition GetOppositeCondition() const OVERRIDE {
3260    return kCondGE;
3261  }
3262
3263 private:
3264  template <typename T> bool Compute(T x, T y) const { return x < y; }
3265
3266  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3267};
3268
3269class HLessThanOrEqual : public HCondition {
3270 public:
3271  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3272      : HCondition(first, second, dex_pc) {}
3273
3274  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3275    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3276  }
3277  // In the following Evaluate methods, a HCompare instruction has
3278  // been merged into this HLessThanOrEqual instruction; evaluate it as
3279  // `Compare(x, y) <= 0`.
3280  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3281    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3282  }
3283  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3284    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3285  }
3286  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3287    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3288  }
3289
3290  DECLARE_INSTRUCTION(LessThanOrEqual);
3291
3292  IfCondition GetCondition() const OVERRIDE {
3293    return kCondLE;
3294  }
3295
3296  IfCondition GetOppositeCondition() const OVERRIDE {
3297    return kCondGT;
3298  }
3299
3300 private:
3301  template <typename T> bool Compute(T x, T y) const { return x <= y; }
3302
3303  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3304};
3305
3306class HGreaterThan : public HCondition {
3307 public:
3308  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3309      : HCondition(first, second, dex_pc) {}
3310
3311  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3312    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3313  }
3314  // In the following Evaluate methods, a HCompare instruction has
3315  // been merged into this HGreaterThan instruction; evaluate it as
3316  // `Compare(x, y) > 0`.
3317  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3318    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3319  }
3320  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3321    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3322  }
3323  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3324    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3325  }
3326
3327  DECLARE_INSTRUCTION(GreaterThan);
3328
3329  IfCondition GetCondition() const OVERRIDE {
3330    return kCondGT;
3331  }
3332
3333  IfCondition GetOppositeCondition() const OVERRIDE {
3334    return kCondLE;
3335  }
3336
3337 private:
3338  template <typename T> bool Compute(T x, T y) const { return x > y; }
3339
3340  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3341};
3342
3343class HGreaterThanOrEqual : public HCondition {
3344 public:
3345  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3346      : HCondition(first, second, dex_pc) {}
3347
3348  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3349    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3350  }
3351  // In the following Evaluate methods, a HCompare instruction has
3352  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3353  // `Compare(x, y) >= 0`.
3354  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3355    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3356  }
3357  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3358    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3359  }
3360  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3361    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3362  }
3363
3364  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3365
3366  IfCondition GetCondition() const OVERRIDE {
3367    return kCondGE;
3368  }
3369
3370  IfCondition GetOppositeCondition() const OVERRIDE {
3371    return kCondLT;
3372  }
3373
3374 private:
3375  template <typename T> bool Compute(T x, T y) const { return x >= y; }
3376
3377  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3378};
3379
3380class HBelow : public HCondition {
3381 public:
3382  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3383      : HCondition(first, second, dex_pc) {}
3384
3385  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3386    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3387  }
3388  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3389    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3390  }
3391  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3392                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3393    LOG(FATAL) << DebugName() << " is not defined for float values";
3394    UNREACHABLE();
3395  }
3396  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3397                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3398    LOG(FATAL) << DebugName() << " is not defined for double values";
3399    UNREACHABLE();
3400  }
3401
3402  DECLARE_INSTRUCTION(Below);
3403
3404  IfCondition GetCondition() const OVERRIDE {
3405    return kCondB;
3406  }
3407
3408  IfCondition GetOppositeCondition() const OVERRIDE {
3409    return kCondAE;
3410  }
3411
3412 private:
3413  template <typename T> bool Compute(T x, T y) const {
3414    return MakeUnsigned(x) < MakeUnsigned(y);
3415  }
3416
3417  DISALLOW_COPY_AND_ASSIGN(HBelow);
3418};
3419
3420class HBelowOrEqual : public HCondition {
3421 public:
3422  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3423      : HCondition(first, second, dex_pc) {}
3424
3425  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3426    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3427  }
3428  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3429    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3430  }
3431  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3432                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3433    LOG(FATAL) << DebugName() << " is not defined for float values";
3434    UNREACHABLE();
3435  }
3436  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3437                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3438    LOG(FATAL) << DebugName() << " is not defined for double values";
3439    UNREACHABLE();
3440  }
3441
3442  DECLARE_INSTRUCTION(BelowOrEqual);
3443
3444  IfCondition GetCondition() const OVERRIDE {
3445    return kCondBE;
3446  }
3447
3448  IfCondition GetOppositeCondition() const OVERRIDE {
3449    return kCondA;
3450  }
3451
3452 private:
3453  template <typename T> bool Compute(T x, T y) const {
3454    return MakeUnsigned(x) <= MakeUnsigned(y);
3455  }
3456
3457  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3458};
3459
3460class HAbove : public HCondition {
3461 public:
3462  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3463      : HCondition(first, second, dex_pc) {}
3464
3465  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3466    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3467  }
3468  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3469    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3470  }
3471  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3472                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3473    LOG(FATAL) << DebugName() << " is not defined for float values";
3474    UNREACHABLE();
3475  }
3476  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3477                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3478    LOG(FATAL) << DebugName() << " is not defined for double values";
3479    UNREACHABLE();
3480  }
3481
3482  DECLARE_INSTRUCTION(Above);
3483
3484  IfCondition GetCondition() const OVERRIDE {
3485    return kCondA;
3486  }
3487
3488  IfCondition GetOppositeCondition() const OVERRIDE {
3489    return kCondBE;
3490  }
3491
3492 private:
3493  template <typename T> bool Compute(T x, T y) const {
3494    return MakeUnsigned(x) > MakeUnsigned(y);
3495  }
3496
3497  DISALLOW_COPY_AND_ASSIGN(HAbove);
3498};
3499
3500class HAboveOrEqual : public HCondition {
3501 public:
3502  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3503      : HCondition(first, second, dex_pc) {}
3504
3505  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3506    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3507  }
3508  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3509    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3510  }
3511  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3512                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3513    LOG(FATAL) << DebugName() << " is not defined for float values";
3514    UNREACHABLE();
3515  }
3516  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3517                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3518    LOG(FATAL) << DebugName() << " is not defined for double values";
3519    UNREACHABLE();
3520  }
3521
3522  DECLARE_INSTRUCTION(AboveOrEqual);
3523
3524  IfCondition GetCondition() const OVERRIDE {
3525    return kCondAE;
3526  }
3527
3528  IfCondition GetOppositeCondition() const OVERRIDE {
3529    return kCondB;
3530  }
3531
3532 private:
3533  template <typename T> bool Compute(T x, T y) const {
3534    return MakeUnsigned(x) >= MakeUnsigned(y);
3535  }
3536
3537  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3538};
3539
3540// Instruction to check how two inputs compare to each other.
3541// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3542class HCompare : public HBinaryOperation {
3543 public:
3544  // Note that `comparison_type` is the type of comparison performed
3545  // between the comparison's inputs, not the type of the instantiated
3546  // HCompare instruction (which is always Primitive::kPrimInt).
3547  HCompare(Primitive::Type comparison_type,
3548           HInstruction* first,
3549           HInstruction* second,
3550           ComparisonBias bias,
3551           uint32_t dex_pc)
3552      : HBinaryOperation(Primitive::kPrimInt,
3553                         first,
3554                         second,
3555                         SideEffectsForArchRuntimeCalls(comparison_type),
3556                         dex_pc) {
3557    SetPackedField<ComparisonBiasField>(bias);
3558    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3559    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3560  }
3561
3562  template <typename T>
3563  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3564
3565  template <typename T>
3566  int32_t ComputeFP(T x, T y) const {
3567    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3568    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3569    // Handle the bias.
3570    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3571  }
3572
3573  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3574    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3575    // reach this code path when processing a freshly built HIR
3576    // graph. However HCompare integer instructions can be synthesized
3577    // by the instruction simplifier to implement IntegerCompare and
3578    // IntegerSignum intrinsics, so we have to handle this case.
3579    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3580  }
3581  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3582    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3583  }
3584  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3585    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3586  }
3587  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3588    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3589  }
3590
3591  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3592    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3593  }
3594
3595  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3596
3597  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3598  // Only meaningful for floating-point comparisons.
3599  bool IsGtBias() const {
3600    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3601    return GetBias() == ComparisonBias::kGtBias;
3602  }
3603
3604  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3605    // Comparisons do not require a runtime call in any back end.
3606    return SideEffects::None();
3607  }
3608
3609  DECLARE_INSTRUCTION(Compare);
3610
3611 protected:
3612  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3613  static constexpr size_t kFieldComparisonBiasSize =
3614      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3615  static constexpr size_t kNumberOfComparePackedBits =
3616      kFieldComparisonBias + kFieldComparisonBiasSize;
3617  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3618  using ComparisonBiasField =
3619      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3620
3621  // Return an integer constant containing the result of a comparison evaluated at compile time.
3622  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3623    DCHECK(value == -1 || value == 0 || value == 1) << value;
3624    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3625  }
3626
3627 private:
3628  DISALLOW_COPY_AND_ASSIGN(HCompare);
3629};
3630
3631class HNewInstance : public HExpression<2> {
3632 public:
3633  HNewInstance(HInstruction* cls,
3634               HCurrentMethod* current_method,
3635               uint32_t dex_pc,
3636               uint16_t type_index,
3637               const DexFile& dex_file,
3638               bool can_throw,
3639               bool finalizable,
3640               QuickEntrypointEnum entrypoint)
3641      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3642        type_index_(type_index),
3643        dex_file_(dex_file),
3644        entrypoint_(entrypoint) {
3645    SetPackedFlag<kFlagCanThrow>(can_throw);
3646    SetPackedFlag<kFlagFinalizable>(finalizable);
3647    SetRawInputAt(0, cls);
3648    SetRawInputAt(1, current_method);
3649  }
3650
3651  uint16_t GetTypeIndex() const { return type_index_; }
3652  const DexFile& GetDexFile() const { return dex_file_; }
3653
3654  // Calls runtime so needs an environment.
3655  bool NeedsEnvironment() const OVERRIDE { return true; }
3656
3657  // It may throw when called on type that's not instantiable/accessible.
3658  // It can throw OOME.
3659  // TODO: distinguish between the two cases so we can for example allow allocation elimination.
3660  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>() || true; }
3661
3662  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3663
3664  bool CanBeNull() const OVERRIDE { return false; }
3665
3666  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3667
3668  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3669    entrypoint_ = entrypoint;
3670  }
3671
3672  bool IsStringAlloc() const;
3673
3674  DECLARE_INSTRUCTION(NewInstance);
3675
3676 private:
3677  static constexpr size_t kFlagCanThrow = kNumberOfExpressionPackedBits;
3678  static constexpr size_t kFlagFinalizable = kFlagCanThrow + 1;
3679  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3680  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3681                "Too many packed fields.");
3682
3683  const uint16_t type_index_;
3684  const DexFile& dex_file_;
3685  QuickEntrypointEnum entrypoint_;
3686
3687  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3688};
3689
3690enum class Intrinsics {
3691#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \
3692  k ## Name,
3693#include "intrinsics_list.h"
3694  kNone,
3695  INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3696#undef INTRINSICS_LIST
3697#undef OPTIMIZING_INTRINSICS
3698};
3699std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic);
3700
3701enum IntrinsicNeedsEnvironmentOrCache {
3702  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3703  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3704};
3705
3706enum IntrinsicSideEffects {
3707  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3708  kReadSideEffects,   // Intrinsic may read heap memory.
3709  kWriteSideEffects,  // Intrinsic may write heap memory.
3710  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3711};
3712
3713enum IntrinsicExceptions {
3714  kNoThrow,  // Intrinsic does not throw any exceptions.
3715  kCanThrow  // Intrinsic may throw exceptions.
3716};
3717
3718class HInvoke : public HInstruction {
3719 public:
3720  size_t InputCount() const OVERRIDE { return inputs_.size(); }
3721
3722  bool NeedsEnvironment() const OVERRIDE;
3723
3724  void SetArgumentAt(size_t index, HInstruction* argument) {
3725    SetRawInputAt(index, argument);
3726  }
3727
3728  // Return the number of arguments.  This number can be lower than
3729  // the number of inputs returned by InputCount(), as some invoke
3730  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3731  // inputs at the end of their list of inputs.
3732  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3733
3734  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3735
3736  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3737  const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
3738
3739  InvokeType GetOriginalInvokeType() const {
3740    return GetPackedField<OriginalInvokeTypeField>();
3741  }
3742
3743  Intrinsics GetIntrinsic() const {
3744    return intrinsic_;
3745  }
3746
3747  void SetIntrinsic(Intrinsics intrinsic,
3748                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3749                    IntrinsicSideEffects side_effects,
3750                    IntrinsicExceptions exceptions);
3751
3752  bool IsFromInlinedInvoke() const {
3753    return GetEnvironment()->IsFromInlinedInvoke();
3754  }
3755
3756  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
3757
3758  bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); }
3759
3760  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3761    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
3762  }
3763
3764  uint32_t* GetIntrinsicOptimizations() {
3765    return &intrinsic_optimizations_;
3766  }
3767
3768  const uint32_t* GetIntrinsicOptimizations() const {
3769    return &intrinsic_optimizations_;
3770  }
3771
3772  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
3773
3774  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
3775
3776 protected:
3777  static constexpr size_t kFieldOriginalInvokeType = kNumberOfGenericPackedBits;
3778  static constexpr size_t kFieldOriginalInvokeTypeSize =
3779      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
3780  static constexpr size_t kFieldReturnType =
3781      kFieldOriginalInvokeType + kFieldOriginalInvokeTypeSize;
3782  static constexpr size_t kFieldReturnTypeSize =
3783      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
3784  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
3785  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
3786  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3787  using OriginalInvokeTypeField =
3788      BitField<InvokeType, kFieldOriginalInvokeType, kFieldOriginalInvokeTypeSize>;
3789  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
3790
3791  HInvoke(ArenaAllocator* arena,
3792          uint32_t number_of_arguments,
3793          uint32_t number_of_other_inputs,
3794          Primitive::Type return_type,
3795          uint32_t dex_pc,
3796          uint32_t dex_method_index,
3797          InvokeType original_invoke_type)
3798    : HInstruction(
3799          SideEffects::AllExceptGCDependency(), dex_pc),  // Assume write/read on all fields/arrays.
3800      number_of_arguments_(number_of_arguments),
3801      inputs_(number_of_arguments + number_of_other_inputs,
3802              arena->Adapter(kArenaAllocInvokeInputs)),
3803      dex_method_index_(dex_method_index),
3804      intrinsic_(Intrinsics::kNone),
3805      intrinsic_optimizations_(0) {
3806    SetPackedField<ReturnTypeField>(return_type);
3807    SetPackedField<OriginalInvokeTypeField>(original_invoke_type);
3808    SetPackedFlag<kFlagCanThrow>(true);
3809  }
3810
3811  const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE {
3812    return inputs_[index];
3813  }
3814
3815  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE {
3816    inputs_[index] = input;
3817  }
3818
3819  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3820
3821  uint32_t number_of_arguments_;
3822  ArenaVector<HUserRecord<HInstruction*>> inputs_;
3823  const uint32_t dex_method_index_;
3824  Intrinsics intrinsic_;
3825
3826  // A magic word holding optimizations for intrinsics. See intrinsics.h.
3827  uint32_t intrinsic_optimizations_;
3828
3829 private:
3830  DISALLOW_COPY_AND_ASSIGN(HInvoke);
3831};
3832
3833class HInvokeUnresolved : public HInvoke {
3834 public:
3835  HInvokeUnresolved(ArenaAllocator* arena,
3836                    uint32_t number_of_arguments,
3837                    Primitive::Type return_type,
3838                    uint32_t dex_pc,
3839                    uint32_t dex_method_index,
3840                    InvokeType invoke_type)
3841      : HInvoke(arena,
3842                number_of_arguments,
3843                0u /* number_of_other_inputs */,
3844                return_type,
3845                dex_pc,
3846                dex_method_index,
3847                invoke_type) {
3848  }
3849
3850  DECLARE_INSTRUCTION(InvokeUnresolved);
3851
3852 private:
3853  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
3854};
3855
3856class HInvokeStaticOrDirect : public HInvoke {
3857 public:
3858  // Requirements of this method call regarding the class
3859  // initialization (clinit) check of its declaring class.
3860  enum class ClinitCheckRequirement {
3861    kNone,      // Class already initialized.
3862    kExplicit,  // Static call having explicit clinit check as last input.
3863    kImplicit,  // Static call implicitly requiring a clinit check.
3864    kLast = kImplicit
3865  };
3866
3867  // Determines how to load the target ArtMethod*.
3868  enum class MethodLoadKind {
3869    // Use a String init ArtMethod* loaded from Thread entrypoints.
3870    kStringInit,
3871
3872    // Use the method's own ArtMethod* loaded by the register allocator.
3873    kRecursive,
3874
3875    // Use ArtMethod* at a known address, embed the direct address in the code.
3876    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
3877    kDirectAddress,
3878
3879    // Use ArtMethod* at an address that will be known at link time, embed the direct
3880    // address in the code. If the image is relocatable, emit .patch_oat entry.
3881    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3882    // the image relocatable or not.
3883    kDirectAddressWithFixup,
3884
3885    // Load from resolved methods array in the dex cache using a PC-relative load.
3886    // Used when we need to use the dex cache, for example for invoke-static that
3887    // may cause class initialization (the entry may point to a resolution method),
3888    // and we know that we can access the dex cache arrays using a PC-relative load.
3889    kDexCachePcRelative,
3890
3891    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
3892    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
3893    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
3894    // or unimplemented or impractical (i.e. slow) on a particular architecture.
3895    kDexCacheViaMethod,
3896  };
3897
3898  // Determines the location of the code pointer.
3899  enum class CodePtrLocation {
3900    // Recursive call, use local PC-relative call instruction.
3901    kCallSelf,
3902
3903    // Use PC-relative call instruction patched at link time.
3904    // Used for calls within an oat file, boot->boot or app->app.
3905    kCallPCRelative,
3906
3907    // Call to a known target address, embed the direct address in code.
3908    // Used for app->boot call with non-relocatable image and for JIT-compiled calls.
3909    kCallDirect,
3910
3911    // Call to a target address that will be known at link time, embed the direct
3912    // address in code. If the image is relocatable, emit .patch_oat entry.
3913    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3914    // the image relocatable or not.
3915    kCallDirectWithFixup,
3916
3917    // Use code pointer from the ArtMethod*.
3918    // Used when we don't know the target code. This is also the last-resort-kind used when
3919    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
3920    kCallArtMethod,
3921  };
3922
3923  struct DispatchInfo {
3924    MethodLoadKind method_load_kind;
3925    CodePtrLocation code_ptr_location;
3926    // The method load data holds
3927    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
3928    //     Note that there are multiple string init methods, each having its own offset.
3929    //   - the method address for kDirectAddress
3930    //   - the dex cache arrays offset for kDexCachePcRel.
3931    uint64_t method_load_data;
3932    uint64_t direct_code_ptr;
3933  };
3934
3935  HInvokeStaticOrDirect(ArenaAllocator* arena,
3936                        uint32_t number_of_arguments,
3937                        Primitive::Type return_type,
3938                        uint32_t dex_pc,
3939                        uint32_t method_index,
3940                        MethodReference target_method,
3941                        DispatchInfo dispatch_info,
3942                        InvokeType original_invoke_type,
3943                        InvokeType optimized_invoke_type,
3944                        ClinitCheckRequirement clinit_check_requirement)
3945      : HInvoke(arena,
3946                number_of_arguments,
3947                // There is potentially one extra argument for the HCurrentMethod node, and
3948                // potentially one other if the clinit check is explicit, and potentially
3949                // one other if the method is a string factory.
3950                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
3951                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
3952                return_type,
3953                dex_pc,
3954                method_index,
3955                original_invoke_type),
3956        target_method_(target_method),
3957        dispatch_info_(dispatch_info) {
3958    SetPackedField<OptimizedInvokeTypeField>(optimized_invoke_type);
3959    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
3960  }
3961
3962  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
3963    bool had_current_method_input = HasCurrentMethodInput();
3964    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
3965
3966    // Using the current method is the default and once we find a better
3967    // method load kind, we should not go back to using the current method.
3968    DCHECK(had_current_method_input || !needs_current_method_input);
3969
3970    if (had_current_method_input && !needs_current_method_input) {
3971      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
3972      RemoveInputAt(GetSpecialInputIndex());
3973    }
3974    dispatch_info_ = dispatch_info;
3975  }
3976
3977  void AddSpecialInput(HInstruction* input) {
3978    // We allow only one special input.
3979    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
3980    DCHECK(InputCount() == GetSpecialInputIndex() ||
3981           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
3982    InsertInputAt(GetSpecialInputIndex(), input);
3983  }
3984
3985  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
3986    // We access the method via the dex cache so we can't do an implicit null check.
3987    // TODO: for intrinsics we can generate implicit null checks.
3988    return false;
3989  }
3990
3991  bool CanBeNull() const OVERRIDE {
3992    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
3993  }
3994
3995  // Get the index of the special input, if any.
3996  //
3997  // If the invoke HasCurrentMethodInput(), the "special input" is the current
3998  // method pointer; otherwise there may be one platform-specific special input,
3999  // such as PC-relative addressing base.
4000  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4001  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4002
4003  InvokeType GetOptimizedInvokeType() const {
4004    return GetPackedField<OptimizedInvokeTypeField>();
4005  }
4006
4007  void SetOptimizedInvokeType(InvokeType invoke_type) {
4008    SetPackedField<OptimizedInvokeTypeField>(invoke_type);
4009  }
4010
4011  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4012  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4013  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4014  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4015  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4016  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4017  bool HasPcRelativeDexCache() const {
4018    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4019  }
4020  bool HasCurrentMethodInput() const {
4021    // This function can be called only after the invoke has been fully initialized by the builder.
4022    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4023      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4024      return true;
4025    } else {
4026      DCHECK(InputCount() == GetSpecialInputIndex() ||
4027             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4028      return false;
4029    }
4030  }
4031  bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
4032  MethodReference GetTargetMethod() const { return target_method_; }
4033  void SetTargetMethod(MethodReference method) { target_method_ = method; }
4034
4035  int32_t GetStringInitOffset() const {
4036    DCHECK(IsStringInit());
4037    return dispatch_info_.method_load_data;
4038  }
4039
4040  uint64_t GetMethodAddress() const {
4041    DCHECK(HasMethodAddress());
4042    return dispatch_info_.method_load_data;
4043  }
4044
4045  uint32_t GetDexCacheArrayOffset() const {
4046    DCHECK(HasPcRelativeDexCache());
4047    return dispatch_info_.method_load_data;
4048  }
4049
4050  uint64_t GetDirectCodePtr() const {
4051    DCHECK(HasDirectCodePtr());
4052    return dispatch_info_.direct_code_ptr;
4053  }
4054
4055  ClinitCheckRequirement GetClinitCheckRequirement() const {
4056    return GetPackedField<ClinitCheckRequirementField>();
4057  }
4058
4059  // Is this instruction a call to a static method?
4060  bool IsStatic() const {
4061    return GetOriginalInvokeType() == kStatic;
4062  }
4063
4064  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4065  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4066  // instruction; only relevant for static calls with explicit clinit check.
4067  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4068    DCHECK(IsStaticWithExplicitClinitCheck());
4069    size_t last_input_index = InputCount() - 1;
4070    HInstruction* last_input = InputAt(last_input_index);
4071    DCHECK(last_input != nullptr);
4072    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4073    RemoveAsUserOfInput(last_input_index);
4074    inputs_.pop_back();
4075    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4076    DCHECK(!IsStaticWithExplicitClinitCheck());
4077  }
4078
4079  // Is this a call to a static method whose declaring class has an
4080  // explicit initialization check in the graph?
4081  bool IsStaticWithExplicitClinitCheck() const {
4082    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4083  }
4084
4085  // Is this a call to a static method whose declaring class has an
4086  // implicit intialization check requirement?
4087  bool IsStaticWithImplicitClinitCheck() const {
4088    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4089  }
4090
4091  // Does this method load kind need the current method as an input?
4092  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4093    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4094  }
4095
4096  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4097
4098 protected:
4099  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE {
4100    const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i);
4101    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) {
4102      HInstruction* input = input_record.GetInstruction();
4103      // `input` is the last input of a static invoke marked as having
4104      // an explicit clinit check. It must either be:
4105      // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4106      // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4107      DCHECK(input != nullptr);
4108      DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName();
4109    }
4110    return input_record;
4111  }
4112
4113  void InsertInputAt(size_t index, HInstruction* input);
4114  void RemoveInputAt(size_t index);
4115
4116 private:
4117  static constexpr size_t kFieldOptimizedInvokeType = kNumberOfInvokePackedBits;
4118  static constexpr size_t kFieldOptimizedInvokeTypeSize =
4119      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4120  static constexpr size_t kFieldClinitCheckRequirement =
4121      kFieldOptimizedInvokeType + kFieldOptimizedInvokeTypeSize;
4122  static constexpr size_t kFieldClinitCheckRequirementSize =
4123      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4124  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4125      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4126  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4127                "Too many packed fields.");
4128  using OptimizedInvokeTypeField =
4129      BitField<InvokeType, kFieldOptimizedInvokeType, kFieldOptimizedInvokeTypeSize>;
4130  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4131                                               kFieldClinitCheckRequirement,
4132                                               kFieldClinitCheckRequirementSize>;
4133
4134  // The target method may refer to different dex file or method index than the original
4135  // invoke. This happens for sharpened calls and for calls where a method was redeclared
4136  // in derived class to increase visibility.
4137  MethodReference target_method_;
4138  DispatchInfo dispatch_info_;
4139
4140  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4141};
4142std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4143std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4144
4145class HInvokeVirtual : public HInvoke {
4146 public:
4147  HInvokeVirtual(ArenaAllocator* arena,
4148                 uint32_t number_of_arguments,
4149                 Primitive::Type return_type,
4150                 uint32_t dex_pc,
4151                 uint32_t dex_method_index,
4152                 uint32_t vtable_index)
4153      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual),
4154        vtable_index_(vtable_index) {}
4155
4156  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4157    // TODO: Add implicit null checks in intrinsics.
4158    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4159  }
4160
4161  uint32_t GetVTableIndex() const { return vtable_index_; }
4162
4163  DECLARE_INSTRUCTION(InvokeVirtual);
4164
4165 private:
4166  const uint32_t vtable_index_;
4167
4168  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4169};
4170
4171class HInvokeInterface : public HInvoke {
4172 public:
4173  HInvokeInterface(ArenaAllocator* arena,
4174                   uint32_t number_of_arguments,
4175                   Primitive::Type return_type,
4176                   uint32_t dex_pc,
4177                   uint32_t dex_method_index,
4178                   uint32_t imt_index)
4179      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface),
4180        imt_index_(imt_index) {}
4181
4182  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4183    // TODO: Add implicit null checks in intrinsics.
4184    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4185  }
4186
4187  uint32_t GetImtIndex() const { return imt_index_; }
4188  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4189
4190  DECLARE_INSTRUCTION(InvokeInterface);
4191
4192 private:
4193  const uint32_t imt_index_;
4194
4195  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4196};
4197
4198class HNeg : public HUnaryOperation {
4199 public:
4200  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4201      : HUnaryOperation(result_type, input, dex_pc) {
4202    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4203  }
4204
4205  template <typename T> T Compute(T x) const { return -x; }
4206
4207  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4208    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4209  }
4210  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4211    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4212  }
4213  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4214    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4215  }
4216  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4217    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4218  }
4219
4220  DECLARE_INSTRUCTION(Neg);
4221
4222 private:
4223  DISALLOW_COPY_AND_ASSIGN(HNeg);
4224};
4225
4226class HNewArray : public HExpression<2> {
4227 public:
4228  HNewArray(HInstruction* length,
4229            HCurrentMethod* current_method,
4230            uint32_t dex_pc,
4231            uint16_t type_index,
4232            const DexFile& dex_file,
4233            QuickEntrypointEnum entrypoint)
4234      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
4235        type_index_(type_index),
4236        dex_file_(dex_file),
4237        entrypoint_(entrypoint) {
4238    SetRawInputAt(0, length);
4239    SetRawInputAt(1, current_method);
4240  }
4241
4242  uint16_t GetTypeIndex() const { return type_index_; }
4243  const DexFile& GetDexFile() const { return dex_file_; }
4244
4245  // Calls runtime so needs an environment.
4246  bool NeedsEnvironment() const OVERRIDE { return true; }
4247
4248  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4249  bool CanThrow() const OVERRIDE { return true; }
4250
4251  bool CanBeNull() const OVERRIDE { return false; }
4252
4253  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4254
4255  DECLARE_INSTRUCTION(NewArray);
4256
4257 private:
4258  const uint16_t type_index_;
4259  const DexFile& dex_file_;
4260  const QuickEntrypointEnum entrypoint_;
4261
4262  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4263};
4264
4265class HAdd : public HBinaryOperation {
4266 public:
4267  HAdd(Primitive::Type result_type,
4268       HInstruction* left,
4269       HInstruction* right,
4270       uint32_t dex_pc = kNoDexPc)
4271      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4272
4273  bool IsCommutative() const OVERRIDE { return true; }
4274
4275  template <typename T> T Compute(T x, T y) const { return x + y; }
4276
4277  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4278    return GetBlock()->GetGraph()->GetIntConstant(
4279        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4280  }
4281  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4282    return GetBlock()->GetGraph()->GetLongConstant(
4283        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4284  }
4285  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4286    return GetBlock()->GetGraph()->GetFloatConstant(
4287        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4288  }
4289  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4290    return GetBlock()->GetGraph()->GetDoubleConstant(
4291        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4292  }
4293
4294  DECLARE_INSTRUCTION(Add);
4295
4296 private:
4297  DISALLOW_COPY_AND_ASSIGN(HAdd);
4298};
4299
4300class HSub : public HBinaryOperation {
4301 public:
4302  HSub(Primitive::Type result_type,
4303       HInstruction* left,
4304       HInstruction* right,
4305       uint32_t dex_pc = kNoDexPc)
4306      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4307
4308  template <typename T> T Compute(T x, T y) const { return x - y; }
4309
4310  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4311    return GetBlock()->GetGraph()->GetIntConstant(
4312        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4313  }
4314  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4315    return GetBlock()->GetGraph()->GetLongConstant(
4316        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4317  }
4318  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4319    return GetBlock()->GetGraph()->GetFloatConstant(
4320        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4321  }
4322  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4323    return GetBlock()->GetGraph()->GetDoubleConstant(
4324        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4325  }
4326
4327  DECLARE_INSTRUCTION(Sub);
4328
4329 private:
4330  DISALLOW_COPY_AND_ASSIGN(HSub);
4331};
4332
4333class HMul : public HBinaryOperation {
4334 public:
4335  HMul(Primitive::Type result_type,
4336       HInstruction* left,
4337       HInstruction* right,
4338       uint32_t dex_pc = kNoDexPc)
4339      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4340
4341  bool IsCommutative() const OVERRIDE { return true; }
4342
4343  template <typename T> T Compute(T x, T y) const { return x * y; }
4344
4345  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4346    return GetBlock()->GetGraph()->GetIntConstant(
4347        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4348  }
4349  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4350    return GetBlock()->GetGraph()->GetLongConstant(
4351        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4352  }
4353  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4354    return GetBlock()->GetGraph()->GetFloatConstant(
4355        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4356  }
4357  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4358    return GetBlock()->GetGraph()->GetDoubleConstant(
4359        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4360  }
4361
4362  DECLARE_INSTRUCTION(Mul);
4363
4364 private:
4365  DISALLOW_COPY_AND_ASSIGN(HMul);
4366};
4367
4368class HDiv : public HBinaryOperation {
4369 public:
4370  HDiv(Primitive::Type result_type,
4371       HInstruction* left,
4372       HInstruction* right,
4373       uint32_t dex_pc)
4374      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4375
4376  template <typename T>
4377  T ComputeIntegral(T x, T y) const {
4378    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4379    // Our graph structure ensures we never have 0 for `y` during
4380    // constant folding.
4381    DCHECK_NE(y, 0);
4382    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4383    return (y == -1) ? -x : x / y;
4384  }
4385
4386  template <typename T>
4387  T ComputeFP(T x, T y) const {
4388    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4389    return x / y;
4390  }
4391
4392  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4393    return GetBlock()->GetGraph()->GetIntConstant(
4394        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4395  }
4396  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4397    return GetBlock()->GetGraph()->GetLongConstant(
4398        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4399  }
4400  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4401    return GetBlock()->GetGraph()->GetFloatConstant(
4402        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4403  }
4404  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4405    return GetBlock()->GetGraph()->GetDoubleConstant(
4406        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4407  }
4408
4409  static SideEffects SideEffectsForArchRuntimeCalls() {
4410    // The generated code can use a runtime call.
4411    return SideEffects::CanTriggerGC();
4412  }
4413
4414  DECLARE_INSTRUCTION(Div);
4415
4416 private:
4417  DISALLOW_COPY_AND_ASSIGN(HDiv);
4418};
4419
4420class HRem : public HBinaryOperation {
4421 public:
4422  HRem(Primitive::Type result_type,
4423       HInstruction* left,
4424       HInstruction* right,
4425       uint32_t dex_pc)
4426      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4427
4428  template <typename T>
4429  T ComputeIntegral(T x, T y) const {
4430    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4431    // Our graph structure ensures we never have 0 for `y` during
4432    // constant folding.
4433    DCHECK_NE(y, 0);
4434    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4435    return (y == -1) ? 0 : x % y;
4436  }
4437
4438  template <typename T>
4439  T ComputeFP(T x, T y) const {
4440    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4441    return std::fmod(x, y);
4442  }
4443
4444  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4445    return GetBlock()->GetGraph()->GetIntConstant(
4446        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4447  }
4448  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4449    return GetBlock()->GetGraph()->GetLongConstant(
4450        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4451  }
4452  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4453    return GetBlock()->GetGraph()->GetFloatConstant(
4454        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4455  }
4456  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4457    return GetBlock()->GetGraph()->GetDoubleConstant(
4458        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4459  }
4460
4461  static SideEffects SideEffectsForArchRuntimeCalls() {
4462    return SideEffects::CanTriggerGC();
4463  }
4464
4465  DECLARE_INSTRUCTION(Rem);
4466
4467 private:
4468  DISALLOW_COPY_AND_ASSIGN(HRem);
4469};
4470
4471class HDivZeroCheck : public HExpression<1> {
4472 public:
4473  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4474  // constructor.
4475  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4476      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4477    SetRawInputAt(0, value);
4478  }
4479
4480  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4481
4482  bool CanBeMoved() const OVERRIDE { return true; }
4483
4484  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4485    return true;
4486  }
4487
4488  bool NeedsEnvironment() const OVERRIDE { return true; }
4489  bool CanThrow() const OVERRIDE { return true; }
4490
4491  DECLARE_INSTRUCTION(DivZeroCheck);
4492
4493 private:
4494  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4495};
4496
4497class HShl : public HBinaryOperation {
4498 public:
4499  HShl(Primitive::Type result_type,
4500       HInstruction* value,
4501       HInstruction* distance,
4502       uint32_t dex_pc = kNoDexPc)
4503      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4504    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4505    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4506  }
4507
4508  template <typename T>
4509  T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4510    return value << (distance & max_shift_distance);
4511  }
4512
4513  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4514    return GetBlock()->GetGraph()->GetIntConstant(
4515        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4516  }
4517  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4518    return GetBlock()->GetGraph()->GetLongConstant(
4519        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4520  }
4521  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4522                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4523    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4524    UNREACHABLE();
4525  }
4526  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4527                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4528    LOG(FATAL) << DebugName() << " is not defined for float values";
4529    UNREACHABLE();
4530  }
4531  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4532                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4533    LOG(FATAL) << DebugName() << " is not defined for double values";
4534    UNREACHABLE();
4535  }
4536
4537  DECLARE_INSTRUCTION(Shl);
4538
4539 private:
4540  DISALLOW_COPY_AND_ASSIGN(HShl);
4541};
4542
4543class HShr : public HBinaryOperation {
4544 public:
4545  HShr(Primitive::Type result_type,
4546       HInstruction* value,
4547       HInstruction* distance,
4548       uint32_t dex_pc = kNoDexPc)
4549      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4550    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4551    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4552  }
4553
4554  template <typename T>
4555  T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4556    return value >> (distance & max_shift_distance);
4557  }
4558
4559  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4560    return GetBlock()->GetGraph()->GetIntConstant(
4561        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4562  }
4563  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4564    return GetBlock()->GetGraph()->GetLongConstant(
4565        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4566  }
4567  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4568                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4569    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4570    UNREACHABLE();
4571  }
4572  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4573                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4574    LOG(FATAL) << DebugName() << " is not defined for float values";
4575    UNREACHABLE();
4576  }
4577  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4578                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4579    LOG(FATAL) << DebugName() << " is not defined for double values";
4580    UNREACHABLE();
4581  }
4582
4583  DECLARE_INSTRUCTION(Shr);
4584
4585 private:
4586  DISALLOW_COPY_AND_ASSIGN(HShr);
4587};
4588
4589class HUShr : public HBinaryOperation {
4590 public:
4591  HUShr(Primitive::Type result_type,
4592        HInstruction* value,
4593        HInstruction* distance,
4594        uint32_t dex_pc = kNoDexPc)
4595      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4596    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4597    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4598  }
4599
4600  template <typename T>
4601  T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4602    typedef typename std::make_unsigned<T>::type V;
4603    V ux = static_cast<V>(value);
4604    return static_cast<T>(ux >> (distance & max_shift_distance));
4605  }
4606
4607  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4608    return GetBlock()->GetGraph()->GetIntConstant(
4609        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4610  }
4611  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4612    return GetBlock()->GetGraph()->GetLongConstant(
4613        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4614  }
4615  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4616                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4617    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4618    UNREACHABLE();
4619  }
4620  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4621                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4622    LOG(FATAL) << DebugName() << " is not defined for float values";
4623    UNREACHABLE();
4624  }
4625  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4626                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4627    LOG(FATAL) << DebugName() << " is not defined for double values";
4628    UNREACHABLE();
4629  }
4630
4631  DECLARE_INSTRUCTION(UShr);
4632
4633 private:
4634  DISALLOW_COPY_AND_ASSIGN(HUShr);
4635};
4636
4637class HAnd : public HBinaryOperation {
4638 public:
4639  HAnd(Primitive::Type result_type,
4640       HInstruction* left,
4641       HInstruction* right,
4642       uint32_t dex_pc = kNoDexPc)
4643      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4644
4645  bool IsCommutative() const OVERRIDE { return true; }
4646
4647  template <typename T> T Compute(T x, T y) const { return x & y; }
4648
4649  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4650    return GetBlock()->GetGraph()->GetIntConstant(
4651        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4652  }
4653  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4654    return GetBlock()->GetGraph()->GetLongConstant(
4655        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4656  }
4657  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4658                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4659    LOG(FATAL) << DebugName() << " is not defined for float values";
4660    UNREACHABLE();
4661  }
4662  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4663                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4664    LOG(FATAL) << DebugName() << " is not defined for double values";
4665    UNREACHABLE();
4666  }
4667
4668  DECLARE_INSTRUCTION(And);
4669
4670 private:
4671  DISALLOW_COPY_AND_ASSIGN(HAnd);
4672};
4673
4674class HOr : public HBinaryOperation {
4675 public:
4676  HOr(Primitive::Type result_type,
4677      HInstruction* left,
4678      HInstruction* right,
4679      uint32_t dex_pc = kNoDexPc)
4680      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4681
4682  bool IsCommutative() const OVERRIDE { return true; }
4683
4684  template <typename T> T Compute(T x, T y) const { return x | y; }
4685
4686  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4687    return GetBlock()->GetGraph()->GetIntConstant(
4688        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4689  }
4690  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4691    return GetBlock()->GetGraph()->GetLongConstant(
4692        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4693  }
4694  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4695                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4696    LOG(FATAL) << DebugName() << " is not defined for float values";
4697    UNREACHABLE();
4698  }
4699  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4700                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4701    LOG(FATAL) << DebugName() << " is not defined for double values";
4702    UNREACHABLE();
4703  }
4704
4705  DECLARE_INSTRUCTION(Or);
4706
4707 private:
4708  DISALLOW_COPY_AND_ASSIGN(HOr);
4709};
4710
4711class HXor : public HBinaryOperation {
4712 public:
4713  HXor(Primitive::Type result_type,
4714       HInstruction* left,
4715       HInstruction* right,
4716       uint32_t dex_pc = kNoDexPc)
4717      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4718
4719  bool IsCommutative() const OVERRIDE { return true; }
4720
4721  template <typename T> T Compute(T x, T y) const { return x ^ y; }
4722
4723  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4724    return GetBlock()->GetGraph()->GetIntConstant(
4725        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4726  }
4727  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4728    return GetBlock()->GetGraph()->GetLongConstant(
4729        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4730  }
4731  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4732                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4733    LOG(FATAL) << DebugName() << " is not defined for float values";
4734    UNREACHABLE();
4735  }
4736  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4737                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4738    LOG(FATAL) << DebugName() << " is not defined for double values";
4739    UNREACHABLE();
4740  }
4741
4742  DECLARE_INSTRUCTION(Xor);
4743
4744 private:
4745  DISALLOW_COPY_AND_ASSIGN(HXor);
4746};
4747
4748class HRor : public HBinaryOperation {
4749 public:
4750  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4751    : HBinaryOperation(result_type, value, distance) {
4752    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4753    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4754  }
4755
4756  template <typename T>
4757  T Compute(T value, int32_t distance, int32_t max_shift_value) const {
4758    typedef typename std::make_unsigned<T>::type V;
4759    V ux = static_cast<V>(value);
4760    if ((distance & max_shift_value) == 0) {
4761      return static_cast<T>(ux);
4762    } else {
4763      const V reg_bits = sizeof(T) * 8;
4764      return static_cast<T>(ux >> (distance & max_shift_value)) |
4765                           (value << (reg_bits - (distance & max_shift_value)));
4766    }
4767  }
4768
4769  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4770    return GetBlock()->GetGraph()->GetIntConstant(
4771        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4772  }
4773  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4774    return GetBlock()->GetGraph()->GetLongConstant(
4775        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4776  }
4777  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4778                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4779    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4780    UNREACHABLE();
4781  }
4782  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4783                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4784    LOG(FATAL) << DebugName() << " is not defined for float values";
4785    UNREACHABLE();
4786  }
4787  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4788                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4789    LOG(FATAL) << DebugName() << " is not defined for double values";
4790    UNREACHABLE();
4791  }
4792
4793  DECLARE_INSTRUCTION(Ror);
4794
4795 private:
4796  DISALLOW_COPY_AND_ASSIGN(HRor);
4797};
4798
4799// The value of a parameter in this method. Its location depends on
4800// the calling convention.
4801class HParameterValue : public HExpression<0> {
4802 public:
4803  HParameterValue(const DexFile& dex_file,
4804                  uint16_t type_index,
4805                  uint8_t index,
4806                  Primitive::Type parameter_type,
4807                  bool is_this = false)
4808      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
4809        dex_file_(dex_file),
4810        type_index_(type_index),
4811        index_(index) {
4812    SetPackedFlag<kFlagIsThis>(is_this);
4813    SetPackedFlag<kFlagCanBeNull>(!is_this);
4814  }
4815
4816  const DexFile& GetDexFile() const { return dex_file_; }
4817  uint16_t GetTypeIndex() const { return type_index_; }
4818  uint8_t GetIndex() const { return index_; }
4819  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
4820
4821  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
4822  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
4823
4824  DECLARE_INSTRUCTION(ParameterValue);
4825
4826 private:
4827  // Whether or not the parameter value corresponds to 'this' argument.
4828  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
4829  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
4830  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
4831  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
4832                "Too many packed fields.");
4833
4834  const DexFile& dex_file_;
4835  const uint16_t type_index_;
4836  // The index of this parameter in the parameters list. Must be less
4837  // than HGraph::number_of_in_vregs_.
4838  const uint8_t index_;
4839
4840  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
4841};
4842
4843class HNot : public HUnaryOperation {
4844 public:
4845  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4846      : HUnaryOperation(result_type, input, dex_pc) {}
4847
4848  bool CanBeMoved() const OVERRIDE { return true; }
4849  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4850    return true;
4851  }
4852
4853  template <typename T> T Compute(T x) const { return ~x; }
4854
4855  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4856    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4857  }
4858  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4859    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4860  }
4861  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4862    LOG(FATAL) << DebugName() << " is not defined for float values";
4863    UNREACHABLE();
4864  }
4865  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4866    LOG(FATAL) << DebugName() << " is not defined for double values";
4867    UNREACHABLE();
4868  }
4869
4870  DECLARE_INSTRUCTION(Not);
4871
4872 private:
4873  DISALLOW_COPY_AND_ASSIGN(HNot);
4874};
4875
4876class HBooleanNot : public HUnaryOperation {
4877 public:
4878  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
4879      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
4880
4881  bool CanBeMoved() const OVERRIDE { return true; }
4882  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4883    return true;
4884  }
4885
4886  template <typename T> bool Compute(T x) const {
4887    DCHECK(IsUint<1>(x)) << x;
4888    return !x;
4889  }
4890
4891  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4892    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4893  }
4894  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4895    LOG(FATAL) << DebugName() << " is not defined for long values";
4896    UNREACHABLE();
4897  }
4898  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4899    LOG(FATAL) << DebugName() << " is not defined for float values";
4900    UNREACHABLE();
4901  }
4902  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4903    LOG(FATAL) << DebugName() << " is not defined for double values";
4904    UNREACHABLE();
4905  }
4906
4907  DECLARE_INSTRUCTION(BooleanNot);
4908
4909 private:
4910  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
4911};
4912
4913class HTypeConversion : public HExpression<1> {
4914 public:
4915  // Instantiate a type conversion of `input` to `result_type`.
4916  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
4917      : HExpression(result_type,
4918                    SideEffectsForArchRuntimeCalls(input->GetType(), result_type),
4919                    dex_pc) {
4920    SetRawInputAt(0, input);
4921    // Invariant: We should never generate a conversion to a Boolean value.
4922    DCHECK_NE(Primitive::kPrimBoolean, result_type);
4923  }
4924
4925  HInstruction* GetInput() const { return InputAt(0); }
4926  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
4927  Primitive::Type GetResultType() const { return GetType(); }
4928
4929  bool CanBeMoved() const OVERRIDE { return true; }
4930  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; }
4931
4932  // Try to statically evaluate the conversion and return a HConstant
4933  // containing the result.  If the input cannot be converted, return nullptr.
4934  HConstant* TryStaticEvaluation() const;
4935
4936  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type,
4937                                                    Primitive::Type result_type) {
4938    // Some architectures may not require the 'GC' side effects, but at this point
4939    // in the compilation process we do not know what architecture we will
4940    // generate code for, so we must be conservative.
4941    if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type))
4942        || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) {
4943      return SideEffects::CanTriggerGC();
4944    }
4945    return SideEffects::None();
4946  }
4947
4948  DECLARE_INSTRUCTION(TypeConversion);
4949
4950 private:
4951  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
4952};
4953
4954static constexpr uint32_t kNoRegNumber = -1;
4955
4956class HNullCheck : public HExpression<1> {
4957 public:
4958  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
4959  // constructor.
4960  HNullCheck(HInstruction* value, uint32_t dex_pc)
4961      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4962    SetRawInputAt(0, value);
4963  }
4964
4965  bool CanBeMoved() const OVERRIDE { return true; }
4966  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4967    return true;
4968  }
4969
4970  bool NeedsEnvironment() const OVERRIDE { return true; }
4971
4972  bool CanThrow() const OVERRIDE { return true; }
4973
4974  bool CanBeNull() const OVERRIDE { return false; }
4975
4976
4977  DECLARE_INSTRUCTION(NullCheck);
4978
4979 private:
4980  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
4981};
4982
4983class FieldInfo : public ValueObject {
4984 public:
4985  FieldInfo(MemberOffset field_offset,
4986            Primitive::Type field_type,
4987            bool is_volatile,
4988            uint32_t index,
4989            uint16_t declaring_class_def_index,
4990            const DexFile& dex_file,
4991            Handle<mirror::DexCache> dex_cache)
4992      : field_offset_(field_offset),
4993        field_type_(field_type),
4994        is_volatile_(is_volatile),
4995        index_(index),
4996        declaring_class_def_index_(declaring_class_def_index),
4997        dex_file_(dex_file),
4998        dex_cache_(dex_cache) {}
4999
5000  MemberOffset GetFieldOffset() const { return field_offset_; }
5001  Primitive::Type GetFieldType() const { return field_type_; }
5002  uint32_t GetFieldIndex() const { return index_; }
5003  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5004  const DexFile& GetDexFile() const { return dex_file_; }
5005  bool IsVolatile() const { return is_volatile_; }
5006  Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; }
5007
5008 private:
5009  const MemberOffset field_offset_;
5010  const Primitive::Type field_type_;
5011  const bool is_volatile_;
5012  const uint32_t index_;
5013  const uint16_t declaring_class_def_index_;
5014  const DexFile& dex_file_;
5015  const Handle<mirror::DexCache> dex_cache_;
5016};
5017
5018class HInstanceFieldGet : public HExpression<1> {
5019 public:
5020  HInstanceFieldGet(HInstruction* value,
5021                    Primitive::Type field_type,
5022                    MemberOffset field_offset,
5023                    bool is_volatile,
5024                    uint32_t field_idx,
5025                    uint16_t declaring_class_def_index,
5026                    const DexFile& dex_file,
5027                    Handle<mirror::DexCache> dex_cache,
5028                    uint32_t dex_pc)
5029      : HExpression(field_type,
5030                    SideEffects::FieldReadOfType(field_type, is_volatile),
5031                    dex_pc),
5032        field_info_(field_offset,
5033                    field_type,
5034                    is_volatile,
5035                    field_idx,
5036                    declaring_class_def_index,
5037                    dex_file,
5038                    dex_cache) {
5039    SetRawInputAt(0, value);
5040  }
5041
5042  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5043
5044  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5045    HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5046    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5047  }
5048
5049  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5050    return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize;
5051  }
5052
5053  size_t ComputeHashCode() const OVERRIDE {
5054    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5055  }
5056
5057  const FieldInfo& GetFieldInfo() const { return field_info_; }
5058  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5059  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5060  bool IsVolatile() const { return field_info_.IsVolatile(); }
5061
5062  DECLARE_INSTRUCTION(InstanceFieldGet);
5063
5064 private:
5065  const FieldInfo field_info_;
5066
5067  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5068};
5069
5070class HInstanceFieldSet : public HTemplateInstruction<2> {
5071 public:
5072  HInstanceFieldSet(HInstruction* object,
5073                    HInstruction* value,
5074                    Primitive::Type field_type,
5075                    MemberOffset field_offset,
5076                    bool is_volatile,
5077                    uint32_t field_idx,
5078                    uint16_t declaring_class_def_index,
5079                    const DexFile& dex_file,
5080                    Handle<mirror::DexCache> dex_cache,
5081                    uint32_t dex_pc)
5082      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5083                             dex_pc),
5084        field_info_(field_offset,
5085                    field_type,
5086                    is_volatile,
5087                    field_idx,
5088                    declaring_class_def_index,
5089                    dex_file,
5090                    dex_cache) {
5091    SetPackedFlag<kFlagValueCanBeNull>(true);
5092    SetRawInputAt(0, object);
5093    SetRawInputAt(1, value);
5094  }
5095
5096  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5097    return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize;
5098  }
5099
5100  const FieldInfo& GetFieldInfo() const { return field_info_; }
5101  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5102  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5103  bool IsVolatile() const { return field_info_.IsVolatile(); }
5104  HInstruction* GetValue() const { return InputAt(1); }
5105  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5106  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5107
5108  DECLARE_INSTRUCTION(InstanceFieldSet);
5109
5110 private:
5111  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5112  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5113  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5114                "Too many packed fields.");
5115
5116  const FieldInfo field_info_;
5117
5118  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5119};
5120
5121class HArrayGet : public HExpression<2> {
5122 public:
5123  HArrayGet(HInstruction* array, HInstruction* index, Primitive::Type type, uint32_t dex_pc)
5124      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5125    SetRawInputAt(0, array);
5126    SetRawInputAt(1, index);
5127  }
5128
5129  bool CanBeMoved() const OVERRIDE { return true; }
5130  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5131    return true;
5132  }
5133  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5134    // TODO: We can be smarter here.
5135    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5136    // which generates the implicit null check. There are cases when these can be removed
5137    // to produce better code. If we ever add optimizations to do so we should allow an
5138    // implicit check here (as long as the address falls in the first page).
5139    return false;
5140  }
5141
5142  bool IsEquivalentOf(HArrayGet* other) const {
5143    bool result = (GetDexPc() == other->GetDexPc());
5144    if (kIsDebugBuild && result) {
5145      DCHECK_EQ(GetBlock(), other->GetBlock());
5146      DCHECK_EQ(GetArray(), other->GetArray());
5147      DCHECK_EQ(GetIndex(), other->GetIndex());
5148      if (Primitive::IsIntOrLongType(GetType())) {
5149        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5150      } else {
5151        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5152        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5153      }
5154    }
5155    return result;
5156  }
5157
5158  HInstruction* GetArray() const { return InputAt(0); }
5159  HInstruction* GetIndex() const { return InputAt(1); }
5160
5161  DECLARE_INSTRUCTION(ArrayGet);
5162
5163 private:
5164  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5165};
5166
5167class HArraySet : public HTemplateInstruction<3> {
5168 public:
5169  HArraySet(HInstruction* array,
5170            HInstruction* index,
5171            HInstruction* value,
5172            Primitive::Type expected_component_type,
5173            uint32_t dex_pc)
5174      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5175    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5176    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5177    SetPackedFlag<kFlagValueCanBeNull>(true);
5178    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5179    SetRawInputAt(0, array);
5180    SetRawInputAt(1, index);
5181    SetRawInputAt(2, value);
5182    // Make a best guess now, may be refined during SSA building.
5183    ComputeSideEffects();
5184  }
5185
5186  bool NeedsEnvironment() const OVERRIDE {
5187    // We call a runtime method to throw ArrayStoreException.
5188    return NeedsTypeCheck();
5189  }
5190
5191  // Can throw ArrayStoreException.
5192  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5193
5194  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5195    // TODO: Same as for ArrayGet.
5196    return false;
5197  }
5198
5199  void ClearNeedsTypeCheck() {
5200    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5201  }
5202
5203  void ClearValueCanBeNull() {
5204    SetPackedFlag<kFlagValueCanBeNull>(false);
5205  }
5206
5207  void SetStaticTypeOfArrayIsObjectArray() {
5208    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5209  }
5210
5211  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5212  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5213  bool StaticTypeOfArrayIsObjectArray() const {
5214    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5215  }
5216
5217  HInstruction* GetArray() const { return InputAt(0); }
5218  HInstruction* GetIndex() const { return InputAt(1); }
5219  HInstruction* GetValue() const { return InputAt(2); }
5220
5221  Primitive::Type GetComponentType() const {
5222    // The Dex format does not type floating point index operations. Since the
5223    // `expected_component_type_` is set during building and can therefore not
5224    // be correct, we also check what is the value type. If it is a floating
5225    // point type, we must use that type.
5226    Primitive::Type value_type = GetValue()->GetType();
5227    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5228        ? value_type
5229        : GetRawExpectedComponentType();
5230  }
5231
5232  Primitive::Type GetRawExpectedComponentType() const {
5233    return GetPackedField<ExpectedComponentTypeField>();
5234  }
5235
5236  void ComputeSideEffects() {
5237    Primitive::Type type = GetComponentType();
5238    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5239        SideEffectsForArchRuntimeCalls(type)));
5240  }
5241
5242  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5243    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5244  }
5245
5246  DECLARE_INSTRUCTION(ArraySet);
5247
5248 private:
5249  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5250  static constexpr size_t kFieldExpectedComponentTypeSize =
5251      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5252  static constexpr size_t kFlagNeedsTypeCheck =
5253      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5254  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5255  // Cached information for the reference_type_info_ so that codegen
5256  // does not need to inspect the static type.
5257  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5258  static constexpr size_t kNumberOfArraySetPackedBits =
5259      kFlagStaticTypeOfArrayIsObjectArray + 1;
5260  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5261  using ExpectedComponentTypeField =
5262      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5263
5264  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5265};
5266
5267class HArrayLength : public HExpression<1> {
5268 public:
5269  HArrayLength(HInstruction* array, uint32_t dex_pc)
5270      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5271    // Note that arrays do not change length, so the instruction does not
5272    // depend on any write.
5273    SetRawInputAt(0, array);
5274  }
5275
5276  bool CanBeMoved() const OVERRIDE { return true; }
5277  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5278    return true;
5279  }
5280  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5281    return obj == InputAt(0);
5282  }
5283
5284  DECLARE_INSTRUCTION(ArrayLength);
5285
5286 private:
5287  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5288};
5289
5290class HBoundsCheck : public HExpression<2> {
5291 public:
5292  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5293  // constructor.
5294  HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc)
5295      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5296    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5297    SetRawInputAt(0, index);
5298    SetRawInputAt(1, length);
5299  }
5300
5301  bool CanBeMoved() const OVERRIDE { return true; }
5302  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5303    return true;
5304  }
5305
5306  bool NeedsEnvironment() const OVERRIDE { return true; }
5307
5308  bool CanThrow() const OVERRIDE { return true; }
5309
5310  HInstruction* GetIndex() const { return InputAt(0); }
5311
5312  DECLARE_INSTRUCTION(BoundsCheck);
5313
5314 private:
5315  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5316};
5317
5318class HSuspendCheck : public HTemplateInstruction<0> {
5319 public:
5320  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5321      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5322
5323  bool NeedsEnvironment() const OVERRIDE {
5324    return true;
5325  }
5326
5327  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5328  SlowPathCode* GetSlowPath() const { return slow_path_; }
5329
5330  DECLARE_INSTRUCTION(SuspendCheck);
5331
5332 private:
5333  // Only used for code generation, in order to share the same slow path between back edges
5334  // of a same loop.
5335  SlowPathCode* slow_path_;
5336
5337  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5338};
5339
5340// Pseudo-instruction which provides the native debugger with mapping information.
5341// It ensures that we can generate line number and local variables at this point.
5342class HNativeDebugInfo : public HTemplateInstruction<0> {
5343 public:
5344  explicit HNativeDebugInfo(uint32_t dex_pc)
5345      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5346
5347  bool NeedsEnvironment() const OVERRIDE {
5348    return true;
5349  }
5350
5351  DECLARE_INSTRUCTION(NativeDebugInfo);
5352
5353 private:
5354  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5355};
5356
5357/**
5358 * Instruction to load a Class object.
5359 */
5360class HLoadClass : public HExpression<1> {
5361 public:
5362  HLoadClass(HCurrentMethod* current_method,
5363             uint16_t type_index,
5364             const DexFile& dex_file,
5365             bool is_referrers_class,
5366             uint32_t dex_pc,
5367             bool needs_access_check,
5368             bool is_in_dex_cache)
5369      : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc),
5370        type_index_(type_index),
5371        dex_file_(dex_file),
5372        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5373    // Referrers class should not need access check. We never inline unverified
5374    // methods so we can't possibly end up in this situation.
5375    DCHECK(!is_referrers_class || !needs_access_check);
5376
5377    SetPackedFlag<kFlagIsReferrersClass>(is_referrers_class);
5378    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5379    SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache);
5380    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5381    SetRawInputAt(0, current_method);
5382  }
5383
5384  bool CanBeMoved() const OVERRIDE { return true; }
5385
5386  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5387    // Note that we don't need to test for generate_clinit_check_.
5388    // Whether or not we need to generate the clinit check is processed in
5389    // prepare_for_register_allocator based on existing HInvokes and HClinitChecks.
5390    return other->AsLoadClass()->type_index_ == type_index_ &&
5391        other->AsLoadClass()->GetPackedFields() == GetPackedFields();
5392  }
5393
5394  size_t ComputeHashCode() const OVERRIDE { return type_index_; }
5395
5396  uint16_t GetTypeIndex() const { return type_index_; }
5397  bool CanBeNull() const OVERRIDE { return false; }
5398
5399  bool NeedsEnvironment() const OVERRIDE {
5400    return CanCallRuntime();
5401  }
5402
5403  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5404    // The entrypoint the code generator is going to call does not do
5405    // clinit of the class.
5406    DCHECK(!NeedsAccessCheck());
5407    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5408  }
5409
5410  bool CanCallRuntime() const {
5411    return MustGenerateClinitCheck() ||
5412           (!IsReferrersClass() && !IsInDexCache()) ||
5413           NeedsAccessCheck();
5414  }
5415
5416
5417  bool CanThrow() const OVERRIDE {
5418    return CanCallRuntime();
5419  }
5420
5421  ReferenceTypeInfo GetLoadedClassRTI() {
5422    return loaded_class_rti_;
5423  }
5424
5425  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5426    // Make sure we only set exact types (the loaded class should never be merged).
5427    DCHECK(rti.IsExact());
5428    loaded_class_rti_ = rti;
5429  }
5430
5431  const DexFile& GetDexFile() { return dex_file_; }
5432
5433  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !IsReferrersClass(); }
5434
5435  static SideEffects SideEffectsForArchRuntimeCalls() {
5436    return SideEffects::CanTriggerGC();
5437  }
5438
5439  bool IsReferrersClass() const { return GetPackedFlag<kFlagIsReferrersClass>(); }
5440  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5441  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5442  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5443
5444  DECLARE_INSTRUCTION(LoadClass);
5445
5446 private:
5447  static constexpr size_t kFlagIsReferrersClass    = kNumberOfExpressionPackedBits;
5448  static constexpr size_t kFlagNeedsAccessCheck    = kFlagIsReferrersClass + 1;
5449  static constexpr size_t kFlagIsInDexCache        = kFlagNeedsAccessCheck + 1;
5450  // Whether this instruction must generate the initialization check.
5451  // Used for code generation.
5452  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInDexCache + 1;
5453  static constexpr size_t kNumberOfLoadClassPackedBits = kFlagGenerateClInitCheck + 1;
5454  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5455
5456  const uint16_t type_index_;
5457  const DexFile& dex_file_;
5458
5459  ReferenceTypeInfo loaded_class_rti_;
5460
5461  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5462};
5463
5464class HLoadString : public HExpression<1> {
5465 public:
5466  // Determines how to load the String.
5467  enum class LoadKind {
5468    // Use boot image String* address that will be known at link time.
5469    // Used for boot image strings referenced by boot image code in non-PIC mode.
5470    kBootImageLinkTimeAddress,
5471
5472    // Use PC-relative boot image String* address that will be known at link time.
5473    // Used for boot image strings referenced by boot image code in PIC mode.
5474    kBootImageLinkTimePcRelative,
5475
5476    // Use a known boot image String* address, embedded in the code by the codegen.
5477    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5478    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5479    // GetIncludePatchInformation().
5480    kBootImageAddress,
5481
5482    // Load from the resolved strings array at an absolute address.
5483    // Used for strings outside the boot image referenced by JIT-compiled code.
5484    kDexCacheAddress,
5485
5486    // Load from resolved strings array in the dex cache using a PC-relative load.
5487    // Used for strings outside boot image when we know that we can access
5488    // the dex cache arrays using a PC-relative load.
5489    kDexCachePcRelative,
5490
5491    // Load from resolved strings array accessed through the class loaded from
5492    // the compiled method's own ArtMethod*. This is the default access type when
5493    // all other types are unavailable.
5494    kDexCacheViaMethod,
5495
5496    kLast = kDexCacheViaMethod
5497  };
5498
5499  HLoadString(HCurrentMethod* current_method,
5500              uint32_t string_index,
5501              const DexFile& dex_file,
5502              uint32_t dex_pc)
5503      : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc),
5504        string_index_(string_index) {
5505    SetPackedFlag<kFlagIsInDexCache>(false);
5506    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5507    load_data_.ref.dex_file = &dex_file;
5508    SetRawInputAt(0, current_method);
5509  }
5510
5511  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5512    DCHECK(HasAddress(load_kind));
5513    load_data_.address = address;
5514    SetLoadKindInternal(load_kind);
5515  }
5516
5517  void SetLoadKindWithStringReference(LoadKind load_kind,
5518                                      const DexFile& dex_file,
5519                                      uint32_t string_index) {
5520    DCHECK(HasStringReference(load_kind));
5521    load_data_.ref.dex_file = &dex_file;
5522    string_index_ = string_index;
5523    SetLoadKindInternal(load_kind);
5524  }
5525
5526  void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5527                                        const DexFile& dex_file,
5528                                        uint32_t element_index) {
5529    DCHECK(HasDexCacheReference(load_kind));
5530    load_data_.ref.dex_file = &dex_file;
5531    load_data_.ref.dex_cache_element_index = element_index;
5532    SetLoadKindInternal(load_kind);
5533  }
5534
5535  LoadKind GetLoadKind() const {
5536    return GetPackedField<LoadKindField>();
5537  }
5538
5539  const DexFile& GetDexFile() const;
5540
5541  uint32_t GetStringIndex() const {
5542    DCHECK(HasStringReference(GetLoadKind()) || /* For slow paths. */ !IsInDexCache());
5543    return string_index_;
5544  }
5545
5546  uint32_t GetDexCacheElementOffset() const;
5547
5548  uint64_t GetAddress() const {
5549    DCHECK(HasAddress(GetLoadKind()));
5550    return load_data_.address;
5551  }
5552
5553  bool CanBeMoved() const OVERRIDE { return true; }
5554
5555  bool InstructionDataEquals(HInstruction* other) const OVERRIDE;
5556
5557  size_t ComputeHashCode() const OVERRIDE { return string_index_; }
5558
5559  // Will call the runtime if we need to load the string through
5560  // the dex cache and the string is not guaranteed to be there yet.
5561  bool NeedsEnvironment() const OVERRIDE {
5562    LoadKind load_kind = GetLoadKind();
5563    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5564        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5565        load_kind == LoadKind::kBootImageAddress) {
5566      return false;
5567    }
5568    return !IsInDexCache();
5569  }
5570
5571  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5572    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5573  }
5574
5575  bool CanBeNull() const OVERRIDE { return false; }
5576  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5577
5578  static SideEffects SideEffectsForArchRuntimeCalls() {
5579    return SideEffects::CanTriggerGC();
5580  }
5581
5582  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5583
5584  void MarkInDexCache() {
5585    SetPackedFlag<kFlagIsInDexCache>(true);
5586    DCHECK(!NeedsEnvironment());
5587    RemoveEnvironment();
5588  }
5589
5590  size_t InputCount() const OVERRIDE {
5591    return (InputAt(0) != nullptr) ? 1u : 0u;
5592  }
5593
5594  void AddSpecialInput(HInstruction* special_input);
5595
5596  DECLARE_INSTRUCTION(LoadString);
5597
5598 private:
5599  static constexpr size_t kFlagIsInDexCache = kNumberOfExpressionPackedBits;
5600  static constexpr size_t kFieldLoadKind = kFlagIsInDexCache + 1;
5601  static constexpr size_t kFieldLoadKindSize =
5602      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5603  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5604  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5605  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5606
5607  static bool HasStringReference(LoadKind load_kind) {
5608    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5609        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5610        load_kind == LoadKind::kDexCacheViaMethod;
5611  }
5612
5613  static bool HasAddress(LoadKind load_kind) {
5614    return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress;
5615  }
5616
5617  static bool HasDexCacheReference(LoadKind load_kind) {
5618    return load_kind == LoadKind::kDexCachePcRelative;
5619  }
5620
5621  void SetLoadKindInternal(LoadKind load_kind);
5622
5623  // String index serves also as the hash code and it's also needed for slow-paths,
5624  // so it must not be overwritten with other load data.
5625  uint32_t string_index_;
5626
5627  union {
5628    struct {
5629      const DexFile* dex_file;            // For string reference and dex cache reference.
5630      uint32_t dex_cache_element_index;   // Only for dex cache reference.
5631    } ref;
5632    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5633  } load_data_;
5634
5635  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5636};
5637std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5638
5639// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5640inline const DexFile& HLoadString::GetDexFile() const {
5641  DCHECK(HasStringReference(GetLoadKind()) || HasDexCacheReference(GetLoadKind()))
5642      << GetLoadKind();
5643  return *load_data_.ref.dex_file;
5644}
5645
5646// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5647inline uint32_t HLoadString::GetDexCacheElementOffset() const {
5648  DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5649  return load_data_.ref.dex_cache_element_index;
5650}
5651
5652// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5653inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5654  // The special input is used for PC-relative loads on some architectures.
5655  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5656         GetLoadKind() == LoadKind::kDexCachePcRelative) << GetLoadKind();
5657  DCHECK(InputAt(0) == nullptr);
5658  SetRawInputAt(0u, special_input);
5659  special_input->AddUseAt(this, 0);
5660}
5661
5662/**
5663 * Performs an initialization check on its Class object input.
5664 */
5665class HClinitCheck : public HExpression<1> {
5666 public:
5667  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5668      : HExpression(
5669            Primitive::kPrimNot,
5670            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
5671            dex_pc) {
5672    SetRawInputAt(0, constant);
5673  }
5674
5675  bool CanBeMoved() const OVERRIDE { return true; }
5676  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5677    return true;
5678  }
5679
5680  bool NeedsEnvironment() const OVERRIDE {
5681    // May call runtime to initialize the class.
5682    return true;
5683  }
5684
5685  bool CanThrow() const OVERRIDE { return true; }
5686
5687  HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); }
5688
5689  DECLARE_INSTRUCTION(ClinitCheck);
5690
5691 private:
5692  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
5693};
5694
5695class HStaticFieldGet : public HExpression<1> {
5696 public:
5697  HStaticFieldGet(HInstruction* cls,
5698                  Primitive::Type field_type,
5699                  MemberOffset field_offset,
5700                  bool is_volatile,
5701                  uint32_t field_idx,
5702                  uint16_t declaring_class_def_index,
5703                  const DexFile& dex_file,
5704                  Handle<mirror::DexCache> dex_cache,
5705                  uint32_t dex_pc)
5706      : HExpression(field_type,
5707                    SideEffects::FieldReadOfType(field_type, is_volatile),
5708                    dex_pc),
5709        field_info_(field_offset,
5710                    field_type,
5711                    is_volatile,
5712                    field_idx,
5713                    declaring_class_def_index,
5714                    dex_file,
5715                    dex_cache) {
5716    SetRawInputAt(0, cls);
5717  }
5718
5719
5720  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5721
5722  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5723    HStaticFieldGet* other_get = other->AsStaticFieldGet();
5724    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5725  }
5726
5727  size_t ComputeHashCode() const OVERRIDE {
5728    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5729  }
5730
5731  const FieldInfo& GetFieldInfo() const { return field_info_; }
5732  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5733  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5734  bool IsVolatile() const { return field_info_.IsVolatile(); }
5735
5736  DECLARE_INSTRUCTION(StaticFieldGet);
5737
5738 private:
5739  const FieldInfo field_info_;
5740
5741  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
5742};
5743
5744class HStaticFieldSet : public HTemplateInstruction<2> {
5745 public:
5746  HStaticFieldSet(HInstruction* cls,
5747                  HInstruction* value,
5748                  Primitive::Type field_type,
5749                  MemberOffset field_offset,
5750                  bool is_volatile,
5751                  uint32_t field_idx,
5752                  uint16_t declaring_class_def_index,
5753                  const DexFile& dex_file,
5754                  Handle<mirror::DexCache> dex_cache,
5755                  uint32_t dex_pc)
5756      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5757                             dex_pc),
5758        field_info_(field_offset,
5759                    field_type,
5760                    is_volatile,
5761                    field_idx,
5762                    declaring_class_def_index,
5763                    dex_file,
5764                    dex_cache) {
5765    SetPackedFlag<kFlagValueCanBeNull>(true);
5766    SetRawInputAt(0, cls);
5767    SetRawInputAt(1, value);
5768  }
5769
5770  const FieldInfo& GetFieldInfo() const { return field_info_; }
5771  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5772  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5773  bool IsVolatile() const { return field_info_.IsVolatile(); }
5774
5775  HInstruction* GetValue() const { return InputAt(1); }
5776  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5777  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5778
5779  DECLARE_INSTRUCTION(StaticFieldSet);
5780
5781 private:
5782  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5783  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
5784  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
5785                "Too many packed fields.");
5786
5787  const FieldInfo field_info_;
5788
5789  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
5790};
5791
5792class HUnresolvedInstanceFieldGet : public HExpression<1> {
5793 public:
5794  HUnresolvedInstanceFieldGet(HInstruction* obj,
5795                              Primitive::Type field_type,
5796                              uint32_t field_index,
5797                              uint32_t dex_pc)
5798      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5799        field_index_(field_index) {
5800    SetRawInputAt(0, obj);
5801  }
5802
5803  bool NeedsEnvironment() const OVERRIDE { return true; }
5804  bool CanThrow() const OVERRIDE { return true; }
5805
5806  Primitive::Type GetFieldType() const { return GetType(); }
5807  uint32_t GetFieldIndex() const { return field_index_; }
5808
5809  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
5810
5811 private:
5812  const uint32_t field_index_;
5813
5814  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
5815};
5816
5817class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> {
5818 public:
5819  HUnresolvedInstanceFieldSet(HInstruction* obj,
5820                              HInstruction* value,
5821                              Primitive::Type field_type,
5822                              uint32_t field_index,
5823                              uint32_t dex_pc)
5824      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
5825        field_index_(field_index) {
5826    SetPackedField<FieldTypeField>(field_type);
5827    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
5828    SetRawInputAt(0, obj);
5829    SetRawInputAt(1, value);
5830  }
5831
5832  bool NeedsEnvironment() const OVERRIDE { return true; }
5833  bool CanThrow() const OVERRIDE { return true; }
5834
5835  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
5836  uint32_t GetFieldIndex() const { return field_index_; }
5837
5838  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
5839
5840 private:
5841  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
5842  static constexpr size_t kFieldFieldTypeSize =
5843      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5844  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
5845      kFieldFieldType + kFieldFieldTypeSize;
5846  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5847                "Too many packed fields.");
5848  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
5849
5850  const uint32_t field_index_;
5851
5852  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
5853};
5854
5855class HUnresolvedStaticFieldGet : public HExpression<0> {
5856 public:
5857  HUnresolvedStaticFieldGet(Primitive::Type field_type,
5858                            uint32_t field_index,
5859                            uint32_t dex_pc)
5860      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5861        field_index_(field_index) {
5862  }
5863
5864  bool NeedsEnvironment() const OVERRIDE { return true; }
5865  bool CanThrow() const OVERRIDE { return true; }
5866
5867  Primitive::Type GetFieldType() const { return GetType(); }
5868  uint32_t GetFieldIndex() const { return field_index_; }
5869
5870  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
5871
5872 private:
5873  const uint32_t field_index_;
5874
5875  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
5876};
5877
5878class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> {
5879 public:
5880  HUnresolvedStaticFieldSet(HInstruction* value,
5881                            Primitive::Type field_type,
5882                            uint32_t field_index,
5883                            uint32_t dex_pc)
5884      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
5885        field_index_(field_index) {
5886    SetPackedField<FieldTypeField>(field_type);
5887    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
5888    SetRawInputAt(0, value);
5889  }
5890
5891  bool NeedsEnvironment() const OVERRIDE { return true; }
5892  bool CanThrow() const OVERRIDE { return true; }
5893
5894  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
5895  uint32_t GetFieldIndex() const { return field_index_; }
5896
5897  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
5898
5899 private:
5900  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
5901  static constexpr size_t kFieldFieldTypeSize =
5902      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5903  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
5904      kFieldFieldType + kFieldFieldTypeSize;
5905  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5906                "Too many packed fields.");
5907  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
5908
5909  const uint32_t field_index_;
5910
5911  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
5912};
5913
5914// Implement the move-exception DEX instruction.
5915class HLoadException : public HExpression<0> {
5916 public:
5917  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
5918      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
5919
5920  bool CanBeNull() const OVERRIDE { return false; }
5921
5922  DECLARE_INSTRUCTION(LoadException);
5923
5924 private:
5925  DISALLOW_COPY_AND_ASSIGN(HLoadException);
5926};
5927
5928// Implicit part of move-exception which clears thread-local exception storage.
5929// Must not be removed because the runtime expects the TLS to get cleared.
5930class HClearException : public HTemplateInstruction<0> {
5931 public:
5932  explicit HClearException(uint32_t dex_pc = kNoDexPc)
5933      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
5934
5935  DECLARE_INSTRUCTION(ClearException);
5936
5937 private:
5938  DISALLOW_COPY_AND_ASSIGN(HClearException);
5939};
5940
5941class HThrow : public HTemplateInstruction<1> {
5942 public:
5943  HThrow(HInstruction* exception, uint32_t dex_pc)
5944      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
5945    SetRawInputAt(0, exception);
5946  }
5947
5948  bool IsControlFlow() const OVERRIDE { return true; }
5949
5950  bool NeedsEnvironment() const OVERRIDE { return true; }
5951
5952  bool CanThrow() const OVERRIDE { return true; }
5953
5954
5955  DECLARE_INSTRUCTION(Throw);
5956
5957 private:
5958  DISALLOW_COPY_AND_ASSIGN(HThrow);
5959};
5960
5961/**
5962 * Implementation strategies for the code generator of a HInstanceOf
5963 * or `HCheckCast`.
5964 */
5965enum class TypeCheckKind {
5966  kUnresolvedCheck,       // Check against an unresolved type.
5967  kExactCheck,            // Can do a single class compare.
5968  kClassHierarchyCheck,   // Can just walk the super class chain.
5969  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
5970  kInterfaceCheck,        // No optimization yet when checking against an interface.
5971  kArrayObjectCheck,      // Can just check if the array is not primitive.
5972  kArrayCheck,            // No optimization yet when checking against a generic array.
5973  kLast = kArrayCheck
5974};
5975
5976std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
5977
5978class HInstanceOf : public HExpression<2> {
5979 public:
5980  HInstanceOf(HInstruction* object,
5981              HLoadClass* constant,
5982              TypeCheckKind check_kind,
5983              uint32_t dex_pc)
5984      : HExpression(Primitive::kPrimBoolean,
5985                    SideEffectsForArchRuntimeCalls(check_kind),
5986                    dex_pc) {
5987    SetPackedField<TypeCheckKindField>(check_kind);
5988    SetPackedFlag<kFlagMustDoNullCheck>(true);
5989    SetRawInputAt(0, object);
5990    SetRawInputAt(1, constant);
5991  }
5992
5993  bool CanBeMoved() const OVERRIDE { return true; }
5994
5995  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5996    return true;
5997  }
5998
5999  bool NeedsEnvironment() const OVERRIDE {
6000    return CanCallRuntime(GetTypeCheckKind());
6001  }
6002
6003  // Used only in code generation.
6004  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6005  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6006  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6007  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6008
6009  static bool CanCallRuntime(TypeCheckKind check_kind) {
6010    // Mips currently does runtime calls for any other checks.
6011    return check_kind != TypeCheckKind::kExactCheck;
6012  }
6013
6014  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6015    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6016  }
6017
6018  DECLARE_INSTRUCTION(InstanceOf);
6019
6020 private:
6021  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6022  static constexpr size_t kFieldTypeCheckKindSize =
6023      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6024  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6025  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6026  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6027  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6028
6029  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6030};
6031
6032class HBoundType : public HExpression<1> {
6033 public:
6034  HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6035      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6036        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6037    SetPackedFlag<kFlagUpperCanBeNull>(true);
6038    SetPackedFlag<kFlagCanBeNull>(true);
6039    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6040    SetRawInputAt(0, input);
6041  }
6042
6043  // {Get,Set}Upper* should only be used in reference type propagation.
6044  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6045  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6046  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6047
6048  void SetCanBeNull(bool can_be_null) {
6049    DCHECK(GetUpperCanBeNull() || !can_be_null);
6050    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6051  }
6052
6053  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6054
6055  DECLARE_INSTRUCTION(BoundType);
6056
6057 private:
6058  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6059  // is false then CanBeNull() cannot be true).
6060  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6061  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6062  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6063  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6064
6065  // Encodes the most upper class that this instruction can have. In other words
6066  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6067  // It is used to bound the type in cases like:
6068  //   if (x instanceof ClassX) {
6069  //     // uper_bound_ will be ClassX
6070  //   }
6071  ReferenceTypeInfo upper_bound_;
6072
6073  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6074};
6075
6076class HCheckCast : public HTemplateInstruction<2> {
6077 public:
6078  HCheckCast(HInstruction* object,
6079             HLoadClass* constant,
6080             TypeCheckKind check_kind,
6081             uint32_t dex_pc)
6082      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6083    SetPackedField<TypeCheckKindField>(check_kind);
6084    SetPackedFlag<kFlagMustDoNullCheck>(true);
6085    SetRawInputAt(0, object);
6086    SetRawInputAt(1, constant);
6087  }
6088
6089  bool CanBeMoved() const OVERRIDE { return true; }
6090
6091  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6092    return true;
6093  }
6094
6095  bool NeedsEnvironment() const OVERRIDE {
6096    // Instruction may throw a CheckCastError.
6097    return true;
6098  }
6099
6100  bool CanThrow() const OVERRIDE { return true; }
6101
6102  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6103  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6104  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6105  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6106
6107  DECLARE_INSTRUCTION(CheckCast);
6108
6109 private:
6110  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6111  static constexpr size_t kFieldTypeCheckKindSize =
6112      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6113  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6114  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6115  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6116  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6117
6118  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6119};
6120
6121class HMemoryBarrier : public HTemplateInstruction<0> {
6122 public:
6123  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6124      : HTemplateInstruction(
6125            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6126    SetPackedField<BarrierKindField>(barrier_kind);
6127  }
6128
6129  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6130
6131  DECLARE_INSTRUCTION(MemoryBarrier);
6132
6133 private:
6134  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6135  static constexpr size_t kFieldBarrierKindSize =
6136      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6137  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6138      kFieldBarrierKind + kFieldBarrierKindSize;
6139  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6140                "Too many packed fields.");
6141  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6142
6143  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6144};
6145
6146class HMonitorOperation : public HTemplateInstruction<1> {
6147 public:
6148  enum class OperationKind {
6149    kEnter,
6150    kExit,
6151    kLast = kExit
6152  };
6153
6154  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6155    : HTemplateInstruction(
6156          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6157          dex_pc) {
6158    SetPackedField<OperationKindField>(kind);
6159    SetRawInputAt(0, object);
6160  }
6161
6162  // Instruction may go into runtime, so we need an environment.
6163  bool NeedsEnvironment() const OVERRIDE { return true; }
6164
6165  bool CanThrow() const OVERRIDE {
6166    // Verifier guarantees that monitor-exit cannot throw.
6167    // This is important because it allows the HGraphBuilder to remove
6168    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6169    return IsEnter();
6170  }
6171
6172  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6173  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6174
6175  DECLARE_INSTRUCTION(MonitorOperation);
6176
6177 private:
6178  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6179  static constexpr size_t kFieldOperationKindSize =
6180      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6181  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6182      kFieldOperationKind + kFieldOperationKindSize;
6183  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6184                "Too many packed fields.");
6185  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6186
6187 private:
6188  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6189};
6190
6191class HSelect : public HExpression<3> {
6192 public:
6193  HSelect(HInstruction* condition,
6194          HInstruction* true_value,
6195          HInstruction* false_value,
6196          uint32_t dex_pc)
6197      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6198    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6199
6200    // First input must be `true_value` or `false_value` to allow codegens to
6201    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6202    // that architectures which implement HSelect as a conditional move also
6203    // will not need to invert the condition.
6204    SetRawInputAt(0, false_value);
6205    SetRawInputAt(1, true_value);
6206    SetRawInputAt(2, condition);
6207  }
6208
6209  HInstruction* GetFalseValue() const { return InputAt(0); }
6210  HInstruction* GetTrueValue() const { return InputAt(1); }
6211  HInstruction* GetCondition() const { return InputAt(2); }
6212
6213  bool CanBeMoved() const OVERRIDE { return true; }
6214  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; }
6215
6216  bool CanBeNull() const OVERRIDE {
6217    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6218  }
6219
6220  DECLARE_INSTRUCTION(Select);
6221
6222 private:
6223  DISALLOW_COPY_AND_ASSIGN(HSelect);
6224};
6225
6226class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6227 public:
6228  MoveOperands(Location source,
6229               Location destination,
6230               Primitive::Type type,
6231               HInstruction* instruction)
6232      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6233
6234  Location GetSource() const { return source_; }
6235  Location GetDestination() const { return destination_; }
6236
6237  void SetSource(Location value) { source_ = value; }
6238  void SetDestination(Location value) { destination_ = value; }
6239
6240  // The parallel move resolver marks moves as "in-progress" by clearing the
6241  // destination (but not the source).
6242  Location MarkPending() {
6243    DCHECK(!IsPending());
6244    Location dest = destination_;
6245    destination_ = Location::NoLocation();
6246    return dest;
6247  }
6248
6249  void ClearPending(Location dest) {
6250    DCHECK(IsPending());
6251    destination_ = dest;
6252  }
6253
6254  bool IsPending() const {
6255    DCHECK(source_.IsValid() || destination_.IsInvalid());
6256    return destination_.IsInvalid() && source_.IsValid();
6257  }
6258
6259  // True if this blocks a move from the given location.
6260  bool Blocks(Location loc) const {
6261    return !IsEliminated() && source_.OverlapsWith(loc);
6262  }
6263
6264  // A move is redundant if it's been eliminated, if its source and
6265  // destination are the same, or if its destination is unneeded.
6266  bool IsRedundant() const {
6267    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6268  }
6269
6270  // We clear both operands to indicate move that's been eliminated.
6271  void Eliminate() {
6272    source_ = destination_ = Location::NoLocation();
6273  }
6274
6275  bool IsEliminated() const {
6276    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6277    return source_.IsInvalid();
6278  }
6279
6280  Primitive::Type GetType() const { return type_; }
6281
6282  bool Is64BitMove() const {
6283    return Primitive::Is64BitType(type_);
6284  }
6285
6286  HInstruction* GetInstruction() const { return instruction_; }
6287
6288 private:
6289  Location source_;
6290  Location destination_;
6291  // The type this move is for.
6292  Primitive::Type type_;
6293  // The instruction this move is assocatied with. Null when this move is
6294  // for moving an input in the expected locations of user (including a phi user).
6295  // This is only used in debug mode, to ensure we do not connect interval siblings
6296  // in the same parallel move.
6297  HInstruction* instruction_;
6298};
6299
6300std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6301
6302static constexpr size_t kDefaultNumberOfMoves = 4;
6303
6304class HParallelMove : public HTemplateInstruction<0> {
6305 public:
6306  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6307      : HTemplateInstruction(SideEffects::None(), dex_pc),
6308        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6309    moves_.reserve(kDefaultNumberOfMoves);
6310  }
6311
6312  void AddMove(Location source,
6313               Location destination,
6314               Primitive::Type type,
6315               HInstruction* instruction) {
6316    DCHECK(source.IsValid());
6317    DCHECK(destination.IsValid());
6318    if (kIsDebugBuild) {
6319      if (instruction != nullptr) {
6320        for (const MoveOperands& move : moves_) {
6321          if (move.GetInstruction() == instruction) {
6322            // Special case the situation where the move is for the spill slot
6323            // of the instruction.
6324            if ((GetPrevious() == instruction)
6325                || ((GetPrevious() == nullptr)
6326                    && instruction->IsPhi()
6327                    && instruction->GetBlock() == GetBlock())) {
6328              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6329                  << "Doing parallel moves for the same instruction.";
6330            } else {
6331              DCHECK(false) << "Doing parallel moves for the same instruction.";
6332            }
6333          }
6334        }
6335      }
6336      for (const MoveOperands& move : moves_) {
6337        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6338            << "Overlapped destination for two moves in a parallel move: "
6339            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6340            << source << " ==> " << destination;
6341      }
6342    }
6343    moves_.emplace_back(source, destination, type, instruction);
6344  }
6345
6346  MoveOperands* MoveOperandsAt(size_t index) {
6347    return &moves_[index];
6348  }
6349
6350  size_t NumMoves() const { return moves_.size(); }
6351
6352  DECLARE_INSTRUCTION(ParallelMove);
6353
6354 private:
6355  ArenaVector<MoveOperands> moves_;
6356
6357  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6358};
6359
6360}  // namespace art
6361
6362#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6363#include "nodes_shared.h"
6364#endif
6365#ifdef ART_ENABLE_CODEGEN_arm
6366#include "nodes_arm.h"
6367#endif
6368#ifdef ART_ENABLE_CODEGEN_arm64
6369#include "nodes_arm64.h"
6370#endif
6371#ifdef ART_ENABLE_CODEGEN_x86
6372#include "nodes_x86.h"
6373#endif
6374
6375namespace art {
6376
6377class HGraphVisitor : public ValueObject {
6378 public:
6379  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6380  virtual ~HGraphVisitor() {}
6381
6382  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6383  virtual void VisitBasicBlock(HBasicBlock* block);
6384
6385  // Visit the graph following basic block insertion order.
6386  void VisitInsertionOrder();
6387
6388  // Visit the graph following dominator tree reverse post-order.
6389  void VisitReversePostOrder();
6390
6391  HGraph* GetGraph() const { return graph_; }
6392
6393  // Visit functions for instruction classes.
6394#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6395  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6396
6397  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6398
6399#undef DECLARE_VISIT_INSTRUCTION
6400
6401 private:
6402  HGraph* const graph_;
6403
6404  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6405};
6406
6407class HGraphDelegateVisitor : public HGraphVisitor {
6408 public:
6409  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6410  virtual ~HGraphDelegateVisitor() {}
6411
6412  // Visit functions that delegate to to super class.
6413#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6414  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6415
6416  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6417
6418#undef DECLARE_VISIT_INSTRUCTION
6419
6420 private:
6421  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6422};
6423
6424class HInsertionOrderIterator : public ValueObject {
6425 public:
6426  explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {}
6427
6428  bool Done() const { return index_ == graph_.GetBlocks().size(); }
6429  HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; }
6430  void Advance() { ++index_; }
6431
6432 private:
6433  const HGraph& graph_;
6434  size_t index_;
6435
6436  DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator);
6437};
6438
6439class HReversePostOrderIterator : public ValueObject {
6440 public:
6441  explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {
6442    // Check that reverse post order of the graph has been built.
6443    DCHECK(!graph.GetReversePostOrder().empty());
6444  }
6445
6446  bool Done() const { return index_ == graph_.GetReversePostOrder().size(); }
6447  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; }
6448  void Advance() { ++index_; }
6449
6450 private:
6451  const HGraph& graph_;
6452  size_t index_;
6453
6454  DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator);
6455};
6456
6457class HPostOrderIterator : public ValueObject {
6458 public:
6459  explicit HPostOrderIterator(const HGraph& graph)
6460      : graph_(graph), index_(graph_.GetReversePostOrder().size()) {
6461    // Check that reverse post order of the graph has been built.
6462    DCHECK(!graph.GetReversePostOrder().empty());
6463  }
6464
6465  bool Done() const { return index_ == 0; }
6466  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; }
6467  void Advance() { --index_; }
6468
6469 private:
6470  const HGraph& graph_;
6471  size_t index_;
6472
6473  DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator);
6474};
6475
6476class HLinearPostOrderIterator : public ValueObject {
6477 public:
6478  explicit HLinearPostOrderIterator(const HGraph& graph)
6479      : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {}
6480
6481  bool Done() const { return index_ == 0; }
6482
6483  HBasicBlock* Current() const { return order_[index_ - 1u]; }
6484
6485  void Advance() {
6486    --index_;
6487    DCHECK_GE(index_, 0U);
6488  }
6489
6490 private:
6491  const ArenaVector<HBasicBlock*>& order_;
6492  size_t index_;
6493
6494  DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator);
6495};
6496
6497class HLinearOrderIterator : public ValueObject {
6498 public:
6499  explicit HLinearOrderIterator(const HGraph& graph)
6500      : order_(graph.GetLinearOrder()), index_(0) {}
6501
6502  bool Done() const { return index_ == order_.size(); }
6503  HBasicBlock* Current() const { return order_[index_]; }
6504  void Advance() { ++index_; }
6505
6506 private:
6507  const ArenaVector<HBasicBlock*>& order_;
6508  size_t index_;
6509
6510  DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator);
6511};
6512
6513// Iterator over the blocks that art part of the loop. Includes blocks part
6514// of an inner loop. The order in which the blocks are iterated is on their
6515// block id.
6516class HBlocksInLoopIterator : public ValueObject {
6517 public:
6518  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6519      : blocks_in_loop_(info.GetBlocks()),
6520        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6521        index_(0) {
6522    if (!blocks_in_loop_.IsBitSet(index_)) {
6523      Advance();
6524    }
6525  }
6526
6527  bool Done() const { return index_ == blocks_.size(); }
6528  HBasicBlock* Current() const { return blocks_[index_]; }
6529  void Advance() {
6530    ++index_;
6531    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6532      if (blocks_in_loop_.IsBitSet(index_)) {
6533        break;
6534      }
6535    }
6536  }
6537
6538 private:
6539  const BitVector& blocks_in_loop_;
6540  const ArenaVector<HBasicBlock*>& blocks_;
6541  size_t index_;
6542
6543  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6544};
6545
6546// Iterator over the blocks that art part of the loop. Includes blocks part
6547// of an inner loop. The order in which the blocks are iterated is reverse
6548// post order.
6549class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6550 public:
6551  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6552      : blocks_in_loop_(info.GetBlocks()),
6553        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6554        index_(0) {
6555    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6556      Advance();
6557    }
6558  }
6559
6560  bool Done() const { return index_ == blocks_.size(); }
6561  HBasicBlock* Current() const { return blocks_[index_]; }
6562  void Advance() {
6563    ++index_;
6564    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6565      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6566        break;
6567      }
6568    }
6569  }
6570
6571 private:
6572  const BitVector& blocks_in_loop_;
6573  const ArenaVector<HBasicBlock*>& blocks_;
6574  size_t index_;
6575
6576  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6577};
6578
6579inline int64_t Int64FromConstant(HConstant* constant) {
6580  if (constant->IsIntConstant()) {
6581    return constant->AsIntConstant()->GetValue();
6582  } else if (constant->IsLongConstant()) {
6583    return constant->AsLongConstant()->GetValue();
6584  } else {
6585    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6586    return 0;
6587  }
6588}
6589
6590inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
6591  // For the purposes of the compiler, the dex files must actually be the same object
6592  // if we want to safely treat them as the same. This is especially important for JIT
6593  // as custom class loaders can open the same underlying file (or memory) multiple
6594  // times and provide different class resolution but no two class loaders should ever
6595  // use the same DexFile object - doing so is an unsupported hack that can lead to
6596  // all sorts of weird failures.
6597  return &lhs == &rhs;
6598}
6599
6600#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6601  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6602  inline const H##type* HInstruction::As##type() const {                       \
6603    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6604  }                                                                            \
6605  inline H##type* HInstruction::As##type() {                                   \
6606    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6607  }
6608
6609  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6610#undef INSTRUCTION_TYPE_CHECK
6611
6612// Create space in `blocks` for adding `number_of_new_blocks` entries
6613// starting at location `at`. Blocks after `at` are moved accordingly.
6614inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6615                        size_t number_of_new_blocks,
6616                        size_t after) {
6617  DCHECK_LT(after, blocks->size());
6618  size_t old_size = blocks->size();
6619  size_t new_size = old_size + number_of_new_blocks;
6620  blocks->resize(new_size);
6621  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6622}
6623
6624}  // namespace art
6625
6626#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6627