nodes.h revision 7e589feab1b35203fbb8c431213f1d2b2a4ad530
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/stl_util.h"
28#include "dex/compiler_enums.h"
29#include "entrypoints/quick/quick_entrypoints_enum.h"
30#include "handle.h"
31#include "handle_scope.h"
32#include "invoke_type.h"
33#include "locations.h"
34#include "method_reference.h"
35#include "mirror/class.h"
36#include "offsets.h"
37#include "primitive.h"
38#include "utils/array_ref.h"
39#include "utils/intrusive_forward_list.h"
40
41namespace art {
42
43class GraphChecker;
44class HBasicBlock;
45class HCurrentMethod;
46class HDoubleConstant;
47class HEnvironment;
48class HFloatConstant;
49class HGraphBuilder;
50class HGraphVisitor;
51class HInstruction;
52class HIntConstant;
53class HInvoke;
54class HLongConstant;
55class HNullConstant;
56class HPhi;
57class HSuspendCheck;
58class HTryBoundary;
59class LiveInterval;
60class LocationSummary;
61class SlowPathCode;
62class SsaBuilder;
63
64namespace mirror {
65class DexCache;
66}  // namespace mirror
67
68static const int kDefaultNumberOfBlocks = 8;
69static const int kDefaultNumberOfSuccessors = 2;
70static const int kDefaultNumberOfPredecessors = 2;
71static const int kDefaultNumberOfExceptionalPredecessors = 0;
72static const int kDefaultNumberOfDominatedBlocks = 1;
73static const int kDefaultNumberOfBackEdges = 1;
74
75// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
76static constexpr int32_t kMaxIntShiftDistance = 0x1f;
77// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
78static constexpr int32_t kMaxLongShiftDistance = 0x3f;
79
80static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
81static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
82
83static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
84
85static constexpr uint32_t kNoDexPc = -1;
86
87enum IfCondition {
88  // All types.
89  kCondEQ,  // ==
90  kCondNE,  // !=
91  // Signed integers and floating-point numbers.
92  kCondLT,  // <
93  kCondLE,  // <=
94  kCondGT,  // >
95  kCondGE,  // >=
96  // Unsigned integers.
97  kCondB,   // <
98  kCondBE,  // <=
99  kCondA,   // >
100  kCondAE,  // >=
101};
102
103enum GraphAnalysisResult {
104  kAnalysisSkipped,
105  kAnalysisInvalidBytecode,
106  kAnalysisFailThrowCatchLoop,
107  kAnalysisFailAmbiguousArrayOp,
108  kAnalysisSuccess,
109};
110
111class HInstructionList : public ValueObject {
112 public:
113  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
114
115  void AddInstruction(HInstruction* instruction);
116  void RemoveInstruction(HInstruction* instruction);
117
118  // Insert `instruction` before/after an existing instruction `cursor`.
119  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
120  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
121
122  // Return true if this list contains `instruction`.
123  bool Contains(HInstruction* instruction) const;
124
125  // Return true if `instruction1` is found before `instruction2` in
126  // this instruction list and false otherwise.  Abort if none
127  // of these instructions is found.
128  bool FoundBefore(const HInstruction* instruction1,
129                   const HInstruction* instruction2) const;
130
131  bool IsEmpty() const { return first_instruction_ == nullptr; }
132  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
133
134  // Update the block of all instructions to be `block`.
135  void SetBlockOfInstructions(HBasicBlock* block) const;
136
137  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
138  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
139  void Add(const HInstructionList& instruction_list);
140
141  // Return the number of instructions in the list. This is an expensive operation.
142  size_t CountSize() const;
143
144 private:
145  HInstruction* first_instruction_;
146  HInstruction* last_instruction_;
147
148  friend class HBasicBlock;
149  friend class HGraph;
150  friend class HInstruction;
151  friend class HInstructionIterator;
152  friend class HBackwardInstructionIterator;
153
154  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
155};
156
157class ReferenceTypeInfo : ValueObject {
158 public:
159  typedef Handle<mirror::Class> TypeHandle;
160
161  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
162
163  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
164    return ReferenceTypeInfo(type_handle, is_exact);
165  }
166
167  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
168
169  static bool IsValidHandle(TypeHandle handle) {
170    return handle.GetReference() != nullptr;
171  }
172
173  bool IsValid() const {
174    return IsValidHandle(type_handle_);
175  }
176
177  bool IsExact() const { return is_exact_; }
178
179  bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
180    DCHECK(IsValid());
181    return GetTypeHandle()->IsObjectClass();
182  }
183
184  bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
185    DCHECK(IsValid());
186    return GetTypeHandle()->IsStringClass();
187  }
188
189  bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) {
190    DCHECK(IsValid());
191    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
192  }
193
194  bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) {
195    DCHECK(IsValid());
196    return GetTypeHandle()->IsInterface();
197  }
198
199  bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
200    DCHECK(IsValid());
201    return GetTypeHandle()->IsArrayClass();
202  }
203
204  bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
205    DCHECK(IsValid());
206    return GetTypeHandle()->IsPrimitiveArray();
207  }
208
209  bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
210    DCHECK(IsValid());
211    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
212  }
213
214  bool CanArrayHold(ReferenceTypeInfo rti)  const SHARED_REQUIRES(Locks::mutator_lock_) {
215    DCHECK(IsValid());
216    if (!IsExact()) return false;
217    if (!IsArrayClass()) return false;
218    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
219  }
220
221  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const SHARED_REQUIRES(Locks::mutator_lock_) {
222    DCHECK(IsValid());
223    if (!IsExact()) return false;
224    if (!IsArrayClass()) return false;
225    if (!rti.IsArrayClass()) return false;
226    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
227        rti.GetTypeHandle()->GetComponentType());
228  }
229
230  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
231
232  bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
233    DCHECK(IsValid());
234    DCHECK(rti.IsValid());
235    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
236  }
237
238  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
239    DCHECK(IsValid());
240    DCHECK(rti.IsValid());
241    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
242        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
243  }
244
245  // Returns true if the type information provide the same amount of details.
246  // Note that it does not mean that the instructions have the same actual type
247  // (because the type can be the result of a merge).
248  bool IsEqual(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
249    if (!IsValid() && !rti.IsValid()) {
250      // Invalid types are equal.
251      return true;
252    }
253    if (!IsValid() || !rti.IsValid()) {
254      // One is valid, the other not.
255      return false;
256    }
257    return IsExact() == rti.IsExact()
258        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
259  }
260
261 private:
262  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
263  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
264      : type_handle_(type_handle), is_exact_(is_exact) { }
265
266  // The class of the object.
267  TypeHandle type_handle_;
268  // Whether or not the type is exact or a superclass of the actual type.
269  // Whether or not we have any information about this type.
270  bool is_exact_;
271};
272
273std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
274
275// Control-flow graph of a method. Contains a list of basic blocks.
276class HGraph : public ArenaObject<kArenaAllocGraph> {
277 public:
278  HGraph(ArenaAllocator* arena,
279         const DexFile& dex_file,
280         uint32_t method_idx,
281         bool should_generate_constructor_barrier,
282         InstructionSet instruction_set,
283         InvokeType invoke_type = kInvalidInvokeType,
284         bool debuggable = false,
285         bool osr = false,
286         int start_instruction_id = 0)
287      : arena_(arena),
288        blocks_(arena->Adapter(kArenaAllocBlockList)),
289        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
290        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
291        entry_block_(nullptr),
292        exit_block_(nullptr),
293        maximum_number_of_out_vregs_(0),
294        number_of_vregs_(0),
295        number_of_in_vregs_(0),
296        temporaries_vreg_slots_(0),
297        has_bounds_checks_(false),
298        has_try_catch_(false),
299        has_irreducible_loops_(false),
300        debuggable_(debuggable),
301        current_instruction_id_(start_instruction_id),
302        dex_file_(dex_file),
303        method_idx_(method_idx),
304        invoke_type_(invoke_type),
305        in_ssa_form_(false),
306        should_generate_constructor_barrier_(should_generate_constructor_barrier),
307        instruction_set_(instruction_set),
308        cached_null_constant_(nullptr),
309        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
310        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
311        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
312        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
313        cached_current_method_(nullptr),
314        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
315        osr_(osr) {
316    blocks_.reserve(kDefaultNumberOfBlocks);
317  }
318
319  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
320  void InitializeInexactObjectRTI(StackHandleScopeCollection* handles);
321
322  ArenaAllocator* GetArena() const { return arena_; }
323  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
324
325  bool IsInSsaForm() const { return in_ssa_form_; }
326  void SetInSsaForm() { in_ssa_form_ = true; }
327
328  HBasicBlock* GetEntryBlock() const { return entry_block_; }
329  HBasicBlock* GetExitBlock() const { return exit_block_; }
330  bool HasExitBlock() const { return exit_block_ != nullptr; }
331
332  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
333  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
334
335  void AddBlock(HBasicBlock* block);
336
337  void ComputeDominanceInformation();
338  void ClearDominanceInformation();
339  void ClearLoopInformation();
340  void FindBackEdges(ArenaBitVector* visited);
341  GraphAnalysisResult BuildDominatorTree();
342  void SimplifyCFG();
343  void SimplifyCatchBlocks();
344
345  // Analyze all natural loops in this graph. Returns a code specifying that it
346  // was successful or the reason for failure. The method will fail if a loop
347  // is a throw-catch loop, i.e. the header is a catch block.
348  GraphAnalysisResult AnalyzeLoops() const;
349
350  // Iterate over blocks to compute try block membership. Needs reverse post
351  // order and loop information.
352  void ComputeTryBlockInformation();
353
354  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
355  // Returns the instruction to replace the invoke expression or null if the
356  // invoke is for a void method. Note that the caller is responsible for replacing
357  // and removing the invoke instruction.
358  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
359
360  // Update the loop and try membership of `block`, which was spawned from `reference`.
361  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
362  // should be the new back edge.
363  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
364                                             HBasicBlock* reference,
365                                             bool replace_if_back_edge);
366
367  // Need to add a couple of blocks to test if the loop body is entered and
368  // put deoptimization instructions, etc.
369  void TransformLoopHeaderForBCE(HBasicBlock* header);
370
371  // Removes `block` from the graph. Assumes `block` has been disconnected from
372  // other blocks and has no instructions or phis.
373  void DeleteDeadEmptyBlock(HBasicBlock* block);
374
375  // Splits the edge between `block` and `successor` while preserving the
376  // indices in the predecessor/successor lists. If there are multiple edges
377  // between the blocks, the lowest indices are used.
378  // Returns the new block which is empty and has the same dex pc as `successor`.
379  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
380
381  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
382  void SimplifyLoop(HBasicBlock* header);
383
384  int32_t GetNextInstructionId() {
385    DCHECK_NE(current_instruction_id_, INT32_MAX);
386    return current_instruction_id_++;
387  }
388
389  int32_t GetCurrentInstructionId() const {
390    return current_instruction_id_;
391  }
392
393  void SetCurrentInstructionId(int32_t id) {
394    DCHECK_GE(id, current_instruction_id_);
395    current_instruction_id_ = id;
396  }
397
398  uint16_t GetMaximumNumberOfOutVRegs() const {
399    return maximum_number_of_out_vregs_;
400  }
401
402  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
403    maximum_number_of_out_vregs_ = new_value;
404  }
405
406  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
407    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
408  }
409
410  void UpdateTemporariesVRegSlots(size_t slots) {
411    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
412  }
413
414  size_t GetTemporariesVRegSlots() const {
415    DCHECK(!in_ssa_form_);
416    return temporaries_vreg_slots_;
417  }
418
419  void SetNumberOfVRegs(uint16_t number_of_vregs) {
420    number_of_vregs_ = number_of_vregs;
421  }
422
423  uint16_t GetNumberOfVRegs() const {
424    return number_of_vregs_;
425  }
426
427  void SetNumberOfInVRegs(uint16_t value) {
428    number_of_in_vregs_ = value;
429  }
430
431  uint16_t GetNumberOfInVRegs() const {
432    return number_of_in_vregs_;
433  }
434
435  uint16_t GetNumberOfLocalVRegs() const {
436    DCHECK(!in_ssa_form_);
437    return number_of_vregs_ - number_of_in_vregs_;
438  }
439
440  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
441    return reverse_post_order_;
442  }
443
444  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
445    return linear_order_;
446  }
447
448  bool HasBoundsChecks() const {
449    return has_bounds_checks_;
450  }
451
452  void SetHasBoundsChecks(bool value) {
453    has_bounds_checks_ = value;
454  }
455
456  bool ShouldGenerateConstructorBarrier() const {
457    return should_generate_constructor_barrier_;
458  }
459
460  bool IsDebuggable() const { return debuggable_; }
461
462  // Returns a constant of the given type and value. If it does not exist
463  // already, it is created and inserted into the graph. This method is only for
464  // integral types.
465  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
466
467  // TODO: This is problematic for the consistency of reference type propagation
468  // because it can be created anytime after the pass and thus it will be left
469  // with an invalid type.
470  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
471
472  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
473    return CreateConstant(value, &cached_int_constants_, dex_pc);
474  }
475  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
476    return CreateConstant(value, &cached_long_constants_, dex_pc);
477  }
478  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
479    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
480  }
481  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
482    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
483  }
484
485  HCurrentMethod* GetCurrentMethod();
486
487  const DexFile& GetDexFile() const {
488    return dex_file_;
489  }
490
491  uint32_t GetMethodIdx() const {
492    return method_idx_;
493  }
494
495  InvokeType GetInvokeType() const {
496    return invoke_type_;
497  }
498
499  InstructionSet GetInstructionSet() const {
500    return instruction_set_;
501  }
502
503  bool IsCompilingOsr() const { return osr_; }
504
505  bool HasTryCatch() const { return has_try_catch_; }
506  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
507
508  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
509  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
510
511  ArtMethod* GetArtMethod() const { return art_method_; }
512  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
513
514  // Returns an instruction with the opposite boolean value from 'cond'.
515  // The instruction has been inserted into the graph, either as a constant, or
516  // before cursor.
517  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
518
519  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
520
521 private:
522  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
523  void RemoveDeadBlocks(const ArenaBitVector& visited);
524
525  template <class InstructionType, typename ValueType>
526  InstructionType* CreateConstant(ValueType value,
527                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
528                                  uint32_t dex_pc = kNoDexPc) {
529    // Try to find an existing constant of the given value.
530    InstructionType* constant = nullptr;
531    auto cached_constant = cache->find(value);
532    if (cached_constant != cache->end()) {
533      constant = cached_constant->second;
534    }
535
536    // If not found or previously deleted, create and cache a new instruction.
537    // Don't bother reviving a previously deleted instruction, for simplicity.
538    if (constant == nullptr || constant->GetBlock() == nullptr) {
539      constant = new (arena_) InstructionType(value, dex_pc);
540      cache->Overwrite(value, constant);
541      InsertConstant(constant);
542    }
543    return constant;
544  }
545
546  void InsertConstant(HConstant* instruction);
547
548  // Cache a float constant into the graph. This method should only be
549  // called by the SsaBuilder when creating "equivalent" instructions.
550  void CacheFloatConstant(HFloatConstant* constant);
551
552  // See CacheFloatConstant comment.
553  void CacheDoubleConstant(HDoubleConstant* constant);
554
555  ArenaAllocator* const arena_;
556
557  // List of blocks in insertion order.
558  ArenaVector<HBasicBlock*> blocks_;
559
560  // List of blocks to perform a reverse post order tree traversal.
561  ArenaVector<HBasicBlock*> reverse_post_order_;
562
563  // List of blocks to perform a linear order tree traversal.
564  ArenaVector<HBasicBlock*> linear_order_;
565
566  HBasicBlock* entry_block_;
567  HBasicBlock* exit_block_;
568
569  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
570  uint16_t maximum_number_of_out_vregs_;
571
572  // The number of virtual registers in this method. Contains the parameters.
573  uint16_t number_of_vregs_;
574
575  // The number of virtual registers used by parameters of this method.
576  uint16_t number_of_in_vregs_;
577
578  // Number of vreg size slots that the temporaries use (used in baseline compiler).
579  size_t temporaries_vreg_slots_;
580
581  // Has bounds checks. We can totally skip BCE if it's false.
582  bool has_bounds_checks_;
583
584  // Flag whether there are any try/catch blocks in the graph. We will skip
585  // try/catch-related passes if false.
586  bool has_try_catch_;
587
588  // Flag whether there are any irreducible loops in the graph.
589  bool has_irreducible_loops_;
590
591  // Indicates whether the graph should be compiled in a way that
592  // ensures full debuggability. If false, we can apply more
593  // aggressive optimizations that may limit the level of debugging.
594  const bool debuggable_;
595
596  // The current id to assign to a newly added instruction. See HInstruction.id_.
597  int32_t current_instruction_id_;
598
599  // The dex file from which the method is from.
600  const DexFile& dex_file_;
601
602  // The method index in the dex file.
603  const uint32_t method_idx_;
604
605  // If inlined, this encodes how the callee is being invoked.
606  const InvokeType invoke_type_;
607
608  // Whether the graph has been transformed to SSA form. Only used
609  // in debug mode to ensure we are not using properties only valid
610  // for non-SSA form (like the number of temporaries).
611  bool in_ssa_form_;
612
613  const bool should_generate_constructor_barrier_;
614
615  const InstructionSet instruction_set_;
616
617  // Cached constants.
618  HNullConstant* cached_null_constant_;
619  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
620  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
621  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
622  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
623
624  HCurrentMethod* cached_current_method_;
625
626  // The ArtMethod this graph is for. Note that for AOT, it may be null,
627  // for example for methods whose declaring class could not be resolved
628  // (such as when the superclass could not be found).
629  ArtMethod* art_method_;
630
631  // Keep the RTI of inexact Object to avoid having to pass stack handle
632  // collection pointer to passes which may create NullConstant.
633  ReferenceTypeInfo inexact_object_rti_;
634
635  // Whether we are compiling this graph for on stack replacement: this will
636  // make all loops seen as irreducible and emit special stack maps to mark
637  // compiled code entries which the interpreter can directly jump to.
638  const bool osr_;
639
640  friend class SsaBuilder;           // For caching constants.
641  friend class SsaLivenessAnalysis;  // For the linear order.
642  friend class HInliner;             // For the reverse post order.
643  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
644  DISALLOW_COPY_AND_ASSIGN(HGraph);
645};
646
647class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
648 public:
649  HLoopInformation(HBasicBlock* header, HGraph* graph)
650      : header_(header),
651        suspend_check_(nullptr),
652        irreducible_(false),
653        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
654        // Make bit vector growable, as the number of blocks may change.
655        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
656    back_edges_.reserve(kDefaultNumberOfBackEdges);
657  }
658
659  bool IsIrreducible() const { return irreducible_; }
660
661  void Dump(std::ostream& os);
662
663  HBasicBlock* GetHeader() const {
664    return header_;
665  }
666
667  void SetHeader(HBasicBlock* block) {
668    header_ = block;
669  }
670
671  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
672  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
673  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
674
675  void AddBackEdge(HBasicBlock* back_edge) {
676    back_edges_.push_back(back_edge);
677  }
678
679  void RemoveBackEdge(HBasicBlock* back_edge) {
680    RemoveElement(back_edges_, back_edge);
681  }
682
683  bool IsBackEdge(const HBasicBlock& block) const {
684    return ContainsElement(back_edges_, &block);
685  }
686
687  size_t NumberOfBackEdges() const {
688    return back_edges_.size();
689  }
690
691  HBasicBlock* GetPreHeader() const;
692
693  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
694    return back_edges_;
695  }
696
697  // Returns the lifetime position of the back edge that has the
698  // greatest lifetime position.
699  size_t GetLifetimeEnd() const;
700
701  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
702    ReplaceElement(back_edges_, existing, new_back_edge);
703  }
704
705  // Finds blocks that are part of this loop.
706  void Populate();
707
708  // Returns whether this loop information contains `block`.
709  // Note that this loop information *must* be populated before entering this function.
710  bool Contains(const HBasicBlock& block) const;
711
712  // Returns whether this loop information is an inner loop of `other`.
713  // Note that `other` *must* be populated before entering this function.
714  bool IsIn(const HLoopInformation& other) const;
715
716  // Returns true if instruction is not defined within this loop.
717  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
718
719  const ArenaBitVector& GetBlocks() const { return blocks_; }
720
721  void Add(HBasicBlock* block);
722  void Remove(HBasicBlock* block);
723
724  void ClearAllBlocks() {
725    blocks_.ClearAllBits();
726  }
727
728  bool HasBackEdgeNotDominatedByHeader() const;
729
730 private:
731  // Internal recursive implementation of `Populate`.
732  void PopulateRecursive(HBasicBlock* block);
733  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
734
735  HBasicBlock* header_;
736  HSuspendCheck* suspend_check_;
737  bool irreducible_;
738  ArenaVector<HBasicBlock*> back_edges_;
739  ArenaBitVector blocks_;
740
741  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
742};
743
744// Stores try/catch information for basic blocks.
745// Note that HGraph is constructed so that catch blocks cannot simultaneously
746// be try blocks.
747class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
748 public:
749  // Try block information constructor.
750  explicit TryCatchInformation(const HTryBoundary& try_entry)
751      : try_entry_(&try_entry),
752        catch_dex_file_(nullptr),
753        catch_type_index_(DexFile::kDexNoIndex16) {
754    DCHECK(try_entry_ != nullptr);
755  }
756
757  // Catch block information constructor.
758  TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file)
759      : try_entry_(nullptr),
760        catch_dex_file_(&dex_file),
761        catch_type_index_(catch_type_index) {}
762
763  bool IsTryBlock() const { return try_entry_ != nullptr; }
764
765  const HTryBoundary& GetTryEntry() const {
766    DCHECK(IsTryBlock());
767    return *try_entry_;
768  }
769
770  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
771
772  bool IsCatchAllTypeIndex() const {
773    DCHECK(IsCatchBlock());
774    return catch_type_index_ == DexFile::kDexNoIndex16;
775  }
776
777  uint16_t GetCatchTypeIndex() const {
778    DCHECK(IsCatchBlock());
779    return catch_type_index_;
780  }
781
782  const DexFile& GetCatchDexFile() const {
783    DCHECK(IsCatchBlock());
784    return *catch_dex_file_;
785  }
786
787 private:
788  // One of possibly several TryBoundary instructions entering the block's try.
789  // Only set for try blocks.
790  const HTryBoundary* try_entry_;
791
792  // Exception type information. Only set for catch blocks.
793  const DexFile* catch_dex_file_;
794  const uint16_t catch_type_index_;
795};
796
797static constexpr size_t kNoLifetime = -1;
798static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
799
800// A block in a method. Contains the list of instructions represented
801// as a double linked list. Each block knows its predecessors and
802// successors.
803
804class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
805 public:
806  HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
807      : graph_(graph),
808        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
809        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
810        loop_information_(nullptr),
811        dominator_(nullptr),
812        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
813        block_id_(kInvalidBlockId),
814        dex_pc_(dex_pc),
815        lifetime_start_(kNoLifetime),
816        lifetime_end_(kNoLifetime),
817        try_catch_information_(nullptr) {
818    predecessors_.reserve(kDefaultNumberOfPredecessors);
819    successors_.reserve(kDefaultNumberOfSuccessors);
820    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
821  }
822
823  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
824    return predecessors_;
825  }
826
827  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
828    return successors_;
829  }
830
831  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
832  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
833
834  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
835    return ContainsElement(successors_, block, start_from);
836  }
837
838  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
839    return dominated_blocks_;
840  }
841
842  bool IsEntryBlock() const {
843    return graph_->GetEntryBlock() == this;
844  }
845
846  bool IsExitBlock() const {
847    return graph_->GetExitBlock() == this;
848  }
849
850  bool IsSingleGoto() const;
851  bool IsSingleTryBoundary() const;
852
853  // Returns true if this block emits nothing but a jump.
854  bool IsSingleJump() const {
855    HLoopInformation* loop_info = GetLoopInformation();
856    return (IsSingleGoto() || IsSingleTryBoundary())
857           // Back edges generate a suspend check.
858           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
859  }
860
861  void AddBackEdge(HBasicBlock* back_edge) {
862    if (loop_information_ == nullptr) {
863      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
864    }
865    DCHECK_EQ(loop_information_->GetHeader(), this);
866    loop_information_->AddBackEdge(back_edge);
867  }
868
869  HGraph* GetGraph() const { return graph_; }
870  void SetGraph(HGraph* graph) { graph_ = graph; }
871
872  uint32_t GetBlockId() const { return block_id_; }
873  void SetBlockId(int id) { block_id_ = id; }
874  uint32_t GetDexPc() const { return dex_pc_; }
875
876  HBasicBlock* GetDominator() const { return dominator_; }
877  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
878  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
879
880  void RemoveDominatedBlock(HBasicBlock* block) {
881    RemoveElement(dominated_blocks_, block);
882  }
883
884  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
885    ReplaceElement(dominated_blocks_, existing, new_block);
886  }
887
888  void ClearDominanceInformation();
889
890  int NumberOfBackEdges() const {
891    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
892  }
893
894  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
895  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
896  const HInstructionList& GetInstructions() const { return instructions_; }
897  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
898  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
899  const HInstructionList& GetPhis() const { return phis_; }
900
901  HInstruction* GetFirstInstructionDisregardMoves() const;
902
903  void AddSuccessor(HBasicBlock* block) {
904    successors_.push_back(block);
905    block->predecessors_.push_back(this);
906  }
907
908  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
909    size_t successor_index = GetSuccessorIndexOf(existing);
910    existing->RemovePredecessor(this);
911    new_block->predecessors_.push_back(this);
912    successors_[successor_index] = new_block;
913  }
914
915  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
916    size_t predecessor_index = GetPredecessorIndexOf(existing);
917    existing->RemoveSuccessor(this);
918    new_block->successors_.push_back(this);
919    predecessors_[predecessor_index] = new_block;
920  }
921
922  // Insert `this` between `predecessor` and `successor. This method
923  // preserves the indicies, and will update the first edge found between
924  // `predecessor` and `successor`.
925  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
926    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
927    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
928    successor->predecessors_[predecessor_index] = this;
929    predecessor->successors_[successor_index] = this;
930    successors_.push_back(successor);
931    predecessors_.push_back(predecessor);
932  }
933
934  void RemovePredecessor(HBasicBlock* block) {
935    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
936  }
937
938  void RemoveSuccessor(HBasicBlock* block) {
939    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
940  }
941
942  void ClearAllPredecessors() {
943    predecessors_.clear();
944  }
945
946  void AddPredecessor(HBasicBlock* block) {
947    predecessors_.push_back(block);
948    block->successors_.push_back(this);
949  }
950
951  void SwapPredecessors() {
952    DCHECK_EQ(predecessors_.size(), 2u);
953    std::swap(predecessors_[0], predecessors_[1]);
954  }
955
956  void SwapSuccessors() {
957    DCHECK_EQ(successors_.size(), 2u);
958    std::swap(successors_[0], successors_[1]);
959  }
960
961  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
962    return IndexOfElement(predecessors_, predecessor);
963  }
964
965  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
966    return IndexOfElement(successors_, successor);
967  }
968
969  HBasicBlock* GetSinglePredecessor() const {
970    DCHECK_EQ(GetPredecessors().size(), 1u);
971    return GetPredecessors()[0];
972  }
973
974  HBasicBlock* GetSingleSuccessor() const {
975    DCHECK_EQ(GetSuccessors().size(), 1u);
976    return GetSuccessors()[0];
977  }
978
979  // Returns whether the first occurrence of `predecessor` in the list of
980  // predecessors is at index `idx`.
981  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
982    DCHECK_EQ(GetPredecessors()[idx], predecessor);
983    return GetPredecessorIndexOf(predecessor) == idx;
984  }
985
986  // Create a new block between this block and its predecessors. The new block
987  // is added to the graph, all predecessor edges are relinked to it and an edge
988  // is created to `this`. Returns the new empty block. Reverse post order or
989  // loop and try/catch information are not updated.
990  HBasicBlock* CreateImmediateDominator();
991
992  // Split the block into two blocks just before `cursor`. Returns the newly
993  // created, latter block. Note that this method will add the block to the
994  // graph, create a Goto at the end of the former block and will create an edge
995  // between the blocks. It will not, however, update the reverse post order or
996  // loop and try/catch information.
997  HBasicBlock* SplitBefore(HInstruction* cursor);
998
999  // Split the block into two blocks just before `cursor`. Returns the newly
1000  // created block. Note that this method just updates raw block information,
1001  // like predecessors, successors, dominators, and instruction list. It does not
1002  // update the graph, reverse post order, loop information, nor make sure the
1003  // blocks are consistent (for example ending with a control flow instruction).
1004  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1005
1006  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1007  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1008
1009  // Merge `other` at the end of `this`. Successors and dominated blocks of
1010  // `other` are changed to be successors and dominated blocks of `this`. Note
1011  // that this method does not update the graph, reverse post order, loop
1012  // information, nor make sure the blocks are consistent (for example ending
1013  // with a control flow instruction).
1014  void MergeWithInlined(HBasicBlock* other);
1015
1016  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1017  // of `this` are moved to `other`.
1018  // Note that this method does not update the graph, reverse post order, loop
1019  // information, nor make sure the blocks are consistent (for example ending
1020  // with a control flow instruction).
1021  void ReplaceWith(HBasicBlock* other);
1022
1023  // Merge `other` at the end of `this`. This method updates loops, reverse post
1024  // order, links to predecessors, successors, dominators and deletes the block
1025  // from the graph. The two blocks must be successive, i.e. `this` the only
1026  // predecessor of `other` and vice versa.
1027  void MergeWith(HBasicBlock* other);
1028
1029  // Disconnects `this` from all its predecessors, successors and dominator,
1030  // removes it from all loops it is included in and eventually from the graph.
1031  // The block must not dominate any other block. Predecessors and successors
1032  // are safely updated.
1033  void DisconnectAndDelete();
1034
1035  void AddInstruction(HInstruction* instruction);
1036  // Insert `instruction` before/after an existing instruction `cursor`.
1037  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1038  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1039  // Replace instruction `initial` with `replacement` within this block.
1040  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1041                                       HInstruction* replacement);
1042  void MoveInstructionBefore(HInstruction* insn, HInstruction* cursor);
1043  void AddPhi(HPhi* phi);
1044  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1045  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1046  // instruction list. With 'ensure_safety' set to true, it verifies that the
1047  // instruction is not in use and removes it from the use lists of its inputs.
1048  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1049  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1050  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1051
1052  bool IsLoopHeader() const {
1053    return IsInLoop() && (loop_information_->GetHeader() == this);
1054  }
1055
1056  bool IsLoopPreHeaderFirstPredecessor() const {
1057    DCHECK(IsLoopHeader());
1058    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1059  }
1060
1061  bool IsFirstPredecessorBackEdge() const {
1062    DCHECK(IsLoopHeader());
1063    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1064  }
1065
1066  HLoopInformation* GetLoopInformation() const {
1067    return loop_information_;
1068  }
1069
1070  // Set the loop_information_ on this block. Overrides the current
1071  // loop_information if it is an outer loop of the passed loop information.
1072  // Note that this method is called while creating the loop information.
1073  void SetInLoop(HLoopInformation* info) {
1074    if (IsLoopHeader()) {
1075      // Nothing to do. This just means `info` is an outer loop.
1076    } else if (!IsInLoop()) {
1077      loop_information_ = info;
1078    } else if (loop_information_->Contains(*info->GetHeader())) {
1079      // Block is currently part of an outer loop. Make it part of this inner loop.
1080      // Note that a non loop header having a loop information means this loop information
1081      // has already been populated
1082      loop_information_ = info;
1083    } else {
1084      // Block is part of an inner loop. Do not update the loop information.
1085      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1086      // at this point, because this method is being called while populating `info`.
1087    }
1088  }
1089
1090  // Raw update of the loop information.
1091  void SetLoopInformation(HLoopInformation* info) {
1092    loop_information_ = info;
1093  }
1094
1095  bool IsInLoop() const { return loop_information_ != nullptr; }
1096
1097  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1098
1099  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1100    try_catch_information_ = try_catch_information;
1101  }
1102
1103  bool IsTryBlock() const {
1104    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1105  }
1106
1107  bool IsCatchBlock() const {
1108    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1109  }
1110
1111  // Returns the try entry that this block's successors should have. They will
1112  // be in the same try, unless the block ends in a try boundary. In that case,
1113  // the appropriate try entry will be returned.
1114  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1115
1116  bool HasThrowingInstructions() const;
1117
1118  // Returns whether this block dominates the blocked passed as parameter.
1119  bool Dominates(HBasicBlock* block) const;
1120
1121  size_t GetLifetimeStart() const { return lifetime_start_; }
1122  size_t GetLifetimeEnd() const { return lifetime_end_; }
1123
1124  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1125  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1126
1127  bool EndsWithControlFlowInstruction() const;
1128  bool EndsWithIf() const;
1129  bool EndsWithTryBoundary() const;
1130  bool HasSinglePhi() const;
1131
1132 private:
1133  HGraph* graph_;
1134  ArenaVector<HBasicBlock*> predecessors_;
1135  ArenaVector<HBasicBlock*> successors_;
1136  HInstructionList instructions_;
1137  HInstructionList phis_;
1138  HLoopInformation* loop_information_;
1139  HBasicBlock* dominator_;
1140  ArenaVector<HBasicBlock*> dominated_blocks_;
1141  uint32_t block_id_;
1142  // The dex program counter of the first instruction of this block.
1143  const uint32_t dex_pc_;
1144  size_t lifetime_start_;
1145  size_t lifetime_end_;
1146  TryCatchInformation* try_catch_information_;
1147
1148  friend class HGraph;
1149  friend class HInstruction;
1150
1151  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1152};
1153
1154// Iterates over the LoopInformation of all loops which contain 'block'
1155// from the innermost to the outermost.
1156class HLoopInformationOutwardIterator : public ValueObject {
1157 public:
1158  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1159      : current_(block.GetLoopInformation()) {}
1160
1161  bool Done() const { return current_ == nullptr; }
1162
1163  void Advance() {
1164    DCHECK(!Done());
1165    current_ = current_->GetPreHeader()->GetLoopInformation();
1166  }
1167
1168  HLoopInformation* Current() const {
1169    DCHECK(!Done());
1170    return current_;
1171  }
1172
1173 private:
1174  HLoopInformation* current_;
1175
1176  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1177};
1178
1179#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1180  M(Above, Condition)                                                   \
1181  M(AboveOrEqual, Condition)                                            \
1182  M(Add, BinaryOperation)                                               \
1183  M(And, BinaryOperation)                                               \
1184  M(ArrayGet, Instruction)                                              \
1185  M(ArrayLength, Instruction)                                           \
1186  M(ArraySet, Instruction)                                              \
1187  M(Below, Condition)                                                   \
1188  M(BelowOrEqual, Condition)                                            \
1189  M(BooleanNot, UnaryOperation)                                         \
1190  M(BoundsCheck, Instruction)                                           \
1191  M(BoundType, Instruction)                                             \
1192  M(CheckCast, Instruction)                                             \
1193  M(ClassTableGet, Instruction)                                         \
1194  M(ClearException, Instruction)                                        \
1195  M(ClinitCheck, Instruction)                                           \
1196  M(Compare, BinaryOperation)                                           \
1197  M(CurrentMethod, Instruction)                                         \
1198  M(Deoptimize, Instruction)                                            \
1199  M(Div, BinaryOperation)                                               \
1200  M(DivZeroCheck, Instruction)                                          \
1201  M(DoubleConstant, Constant)                                           \
1202  M(Equal, Condition)                                                   \
1203  M(Exit, Instruction)                                                  \
1204  M(FloatConstant, Constant)                                            \
1205  M(Goto, Instruction)                                                  \
1206  M(GreaterThan, Condition)                                             \
1207  M(GreaterThanOrEqual, Condition)                                      \
1208  M(If, Instruction)                                                    \
1209  M(InstanceFieldGet, Instruction)                                      \
1210  M(InstanceFieldSet, Instruction)                                      \
1211  M(InstanceOf, Instruction)                                            \
1212  M(IntConstant, Constant)                                              \
1213  M(InvokeUnresolved, Invoke)                                           \
1214  M(InvokeInterface, Invoke)                                            \
1215  M(InvokeStaticOrDirect, Invoke)                                       \
1216  M(InvokeVirtual, Invoke)                                              \
1217  M(LessThan, Condition)                                                \
1218  M(LessThanOrEqual, Condition)                                         \
1219  M(LoadClass, Instruction)                                             \
1220  M(LoadException, Instruction)                                         \
1221  M(LoadString, Instruction)                                            \
1222  M(LongConstant, Constant)                                             \
1223  M(MemoryBarrier, Instruction)                                         \
1224  M(MonitorOperation, Instruction)                                      \
1225  M(Mul, BinaryOperation)                                               \
1226  M(NativeDebugInfo, Instruction)                                       \
1227  M(Neg, UnaryOperation)                                                \
1228  M(NewArray, Instruction)                                              \
1229  M(NewInstance, Instruction)                                           \
1230  M(Not, UnaryOperation)                                                \
1231  M(NotEqual, Condition)                                                \
1232  M(NullConstant, Instruction)                                          \
1233  M(NullCheck, Instruction)                                             \
1234  M(Or, BinaryOperation)                                                \
1235  M(PackedSwitch, Instruction)                                          \
1236  M(ParallelMove, Instruction)                                          \
1237  M(ParameterValue, Instruction)                                        \
1238  M(Phi, Instruction)                                                   \
1239  M(Rem, BinaryOperation)                                               \
1240  M(Return, Instruction)                                                \
1241  M(ReturnVoid, Instruction)                                            \
1242  M(Ror, BinaryOperation)                                               \
1243  M(Shl, BinaryOperation)                                               \
1244  M(Shr, BinaryOperation)                                               \
1245  M(StaticFieldGet, Instruction)                                        \
1246  M(StaticFieldSet, Instruction)                                        \
1247  M(UnresolvedInstanceFieldGet, Instruction)                            \
1248  M(UnresolvedInstanceFieldSet, Instruction)                            \
1249  M(UnresolvedStaticFieldGet, Instruction)                              \
1250  M(UnresolvedStaticFieldSet, Instruction)                              \
1251  M(Select, Instruction)                                                \
1252  M(Sub, BinaryOperation)                                               \
1253  M(SuspendCheck, Instruction)                                          \
1254  M(Throw, Instruction)                                                 \
1255  M(TryBoundary, Instruction)                                           \
1256  M(TypeConversion, Instruction)                                        \
1257  M(UShr, BinaryOperation)                                              \
1258  M(Xor, BinaryOperation)                                               \
1259
1260/*
1261 * Instructions, shared across several (not all) architectures.
1262 */
1263#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1264#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1265#else
1266#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1267  M(BitwiseNegatedRight, Instruction)                                   \
1268  M(MultiplyAccumulate, Instruction)
1269#endif
1270
1271#ifndef ART_ENABLE_CODEGEN_arm
1272#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1273#else
1274#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1275  M(ArmDexCacheArraysBase, Instruction)
1276#endif
1277
1278#ifndef ART_ENABLE_CODEGEN_arm64
1279#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1280#else
1281#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                          \
1282  M(Arm64DataProcWithShifterOp, Instruction)                            \
1283  M(Arm64IntermediateAddress, Instruction)
1284#endif
1285
1286#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1287
1288#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1289
1290#ifndef ART_ENABLE_CODEGEN_x86
1291#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1292#else
1293#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1294  M(X86ComputeBaseMethodAddress, Instruction)                           \
1295  M(X86LoadFromConstantTable, Instruction)                              \
1296  M(X86FPNeg, Instruction)                                              \
1297  M(X86PackedSwitch, Instruction)
1298#endif
1299
1300#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1301
1302#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1303  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1304  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1305  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1306  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1307  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1308  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1309  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1310  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1311
1312#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1313  M(Condition, BinaryOperation)                                         \
1314  M(Constant, Instruction)                                              \
1315  M(UnaryOperation, Instruction)                                        \
1316  M(BinaryOperation, Instruction)                                       \
1317  M(Invoke, Instruction)
1318
1319#define FOR_EACH_INSTRUCTION(M)                                         \
1320  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1321  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1322
1323#define FORWARD_DECLARATION(type, super) class H##type;
1324FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1325#undef FORWARD_DECLARATION
1326
1327#define DECLARE_INSTRUCTION(type)                                       \
1328  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }  \
1329  const char* DebugName() const OVERRIDE { return #type; }              \
1330  bool InstructionTypeEquals(HInstruction* other) const OVERRIDE {      \
1331    return other->Is##type();                                           \
1332  }                                                                     \
1333  void Accept(HGraphVisitor* visitor) OVERRIDE
1334
1335#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1336  bool Is##type() const { return As##type() != nullptr; }               \
1337  const H##type* As##type() const { return this; }                      \
1338  H##type* As##type() { return this; }
1339
1340template <typename T>
1341class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1342 public:
1343  T GetUser() const { return user_; }
1344  size_t GetIndex() const { return index_; }
1345  void SetIndex(size_t index) { index_ = index; }
1346
1347  // Hook for the IntrusiveForwardList<>.
1348  // TODO: Hide this better.
1349  IntrusiveForwardListHook hook;
1350
1351 private:
1352  HUseListNode(T user, size_t index)
1353      : user_(user), index_(index) {}
1354
1355  T const user_;
1356  size_t index_;
1357
1358  friend class HInstruction;
1359
1360  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1361};
1362
1363template <typename T>
1364using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1365
1366// This class is used by HEnvironment and HInstruction classes to record the
1367// instructions they use and pointers to the corresponding HUseListNodes kept
1368// by the used instructions.
1369template <typename T>
1370class HUserRecord : public ValueObject {
1371 public:
1372  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1373  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1374
1375  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1376      : HUserRecord(old_record.instruction_, before_use_node) {}
1377  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1378      : instruction_(instruction), before_use_node_(before_use_node) {
1379    DCHECK(instruction_ != nullptr);
1380  }
1381
1382  HInstruction* GetInstruction() const { return instruction_; }
1383  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1384  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1385
1386 private:
1387  // Instruction used by the user.
1388  HInstruction* instruction_;
1389
1390  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1391  typename HUseList<T>::iterator before_use_node_;
1392};
1393
1394/**
1395 * Side-effects representation.
1396 *
1397 * For write/read dependences on fields/arrays, the dependence analysis uses
1398 * type disambiguation (e.g. a float field write cannot modify the value of an
1399 * integer field read) and the access type (e.g.  a reference array write cannot
1400 * modify the value of a reference field read [although it may modify the
1401 * reference fetch prior to reading the field, which is represented by its own
1402 * write/read dependence]). The analysis makes conservative points-to
1403 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1404 * the same, and any reference read depends on any reference read without
1405 * further regard of its type).
1406 *
1407 * The internal representation uses 38-bit and is described in the table below.
1408 * The first line indicates the side effect, and for field/array accesses the
1409 * second line indicates the type of the access (in the order of the
1410 * Primitive::Type enum).
1411 * The two numbered lines below indicate the bit position in the bitfield (read
1412 * vertically).
1413 *
1414 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1415 *   +-------------+---------+---------+--------------+---------+---------+
1416 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1417 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1418 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1419 *
1420 * Note that, to ease the implementation, 'changes' bits are least significant
1421 * bits, while 'dependency' bits are most significant bits.
1422 */
1423class SideEffects : public ValueObject {
1424 public:
1425  SideEffects() : flags_(0) {}
1426
1427  static SideEffects None() {
1428    return SideEffects(0);
1429  }
1430
1431  static SideEffects All() {
1432    return SideEffects(kAllChangeBits | kAllDependOnBits);
1433  }
1434
1435  static SideEffects AllChanges() {
1436    return SideEffects(kAllChangeBits);
1437  }
1438
1439  static SideEffects AllDependencies() {
1440    return SideEffects(kAllDependOnBits);
1441  }
1442
1443  static SideEffects AllExceptGCDependency() {
1444    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1445  }
1446
1447  static SideEffects AllWritesAndReads() {
1448    return SideEffects(kAllWrites | kAllReads);
1449  }
1450
1451  static SideEffects AllWrites() {
1452    return SideEffects(kAllWrites);
1453  }
1454
1455  static SideEffects AllReads() {
1456    return SideEffects(kAllReads);
1457  }
1458
1459  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1460    return is_volatile
1461        ? AllWritesAndReads()
1462        : SideEffects(TypeFlagWithAlias(type, kFieldWriteOffset));
1463  }
1464
1465  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1466    return SideEffects(TypeFlagWithAlias(type, kArrayWriteOffset));
1467  }
1468
1469  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1470    return is_volatile
1471        ? AllWritesAndReads()
1472        : SideEffects(TypeFlagWithAlias(type, kFieldReadOffset));
1473  }
1474
1475  static SideEffects ArrayReadOfType(Primitive::Type type) {
1476    return SideEffects(TypeFlagWithAlias(type, kArrayReadOffset));
1477  }
1478
1479  static SideEffects CanTriggerGC() {
1480    return SideEffects(1ULL << kCanTriggerGCBit);
1481  }
1482
1483  static SideEffects DependsOnGC() {
1484    return SideEffects(1ULL << kDependsOnGCBit);
1485  }
1486
1487  // Combines the side-effects of this and the other.
1488  SideEffects Union(SideEffects other) const {
1489    return SideEffects(flags_ | other.flags_);
1490  }
1491
1492  SideEffects Exclusion(SideEffects other) const {
1493    return SideEffects(flags_ & ~other.flags_);
1494  }
1495
1496  void Add(SideEffects other) {
1497    flags_ |= other.flags_;
1498  }
1499
1500  bool Includes(SideEffects other) const {
1501    return (other.flags_ & flags_) == other.flags_;
1502  }
1503
1504  bool HasSideEffects() const {
1505    return (flags_ & kAllChangeBits);
1506  }
1507
1508  bool HasDependencies() const {
1509    return (flags_ & kAllDependOnBits);
1510  }
1511
1512  // Returns true if there are no side effects or dependencies.
1513  bool DoesNothing() const {
1514    return flags_ == 0;
1515  }
1516
1517  // Returns true if something is written.
1518  bool DoesAnyWrite() const {
1519    return (flags_ & kAllWrites);
1520  }
1521
1522  // Returns true if something is read.
1523  bool DoesAnyRead() const {
1524    return (flags_ & kAllReads);
1525  }
1526
1527  // Returns true if potentially everything is written and read
1528  // (every type and every kind of access).
1529  bool DoesAllReadWrite() const {
1530    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1531  }
1532
1533  bool DoesAll() const {
1534    return flags_ == (kAllChangeBits | kAllDependOnBits);
1535  }
1536
1537  // Returns true if `this` may read something written by `other`.
1538  bool MayDependOn(SideEffects other) const {
1539    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1540    return (other.flags_ & depends_on_flags);
1541  }
1542
1543  // Returns string representation of flags (for debugging only).
1544  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1545  std::string ToString() const {
1546    std::string flags = "|";
1547    for (int s = kLastBit; s >= 0; s--) {
1548      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1549      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1550        // This is a bit for the GC side effect.
1551        if (current_bit_is_set) {
1552          flags += "GC";
1553        }
1554        flags += "|";
1555      } else {
1556        // This is a bit for the array/field analysis.
1557        // The underscore character stands for the 'can trigger GC' bit.
1558        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1559        if (current_bit_is_set) {
1560          flags += kDebug[s];
1561        }
1562        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1563            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1564          flags += "|";
1565        }
1566      }
1567    }
1568    return flags;
1569  }
1570
1571  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1572
1573 private:
1574  static constexpr int kFieldArrayAnalysisBits = 9;
1575
1576  static constexpr int kFieldWriteOffset = 0;
1577  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1578  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1579  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1580
1581  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1582
1583  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1584  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1585  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1586  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1587
1588  static constexpr int kLastBit = kDependsOnGCBit;
1589  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1590
1591  // Aliases.
1592
1593  static_assert(kChangeBits == kDependOnBits,
1594                "the 'change' bits should match the 'depend on' bits.");
1595
1596  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1597  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1598  static constexpr uint64_t kAllWrites =
1599      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1600  static constexpr uint64_t kAllReads =
1601      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1602
1603  // Work around the fact that HIR aliases I/F and J/D.
1604  // TODO: remove this interceptor once HIR types are clean
1605  static uint64_t TypeFlagWithAlias(Primitive::Type type, int offset) {
1606    switch (type) {
1607      case Primitive::kPrimInt:
1608      case Primitive::kPrimFloat:
1609        return TypeFlag(Primitive::kPrimInt, offset) |
1610               TypeFlag(Primitive::kPrimFloat, offset);
1611      case Primitive::kPrimLong:
1612      case Primitive::kPrimDouble:
1613        return TypeFlag(Primitive::kPrimLong, offset) |
1614               TypeFlag(Primitive::kPrimDouble, offset);
1615      default:
1616        return TypeFlag(type, offset);
1617    }
1618  }
1619
1620  // Translates type to bit flag.
1621  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1622    CHECK_NE(type, Primitive::kPrimVoid);
1623    const uint64_t one = 1;
1624    const int shift = type;  // 0-based consecutive enum
1625    DCHECK_LE(kFieldWriteOffset, shift);
1626    DCHECK_LT(shift, kArrayWriteOffset);
1627    return one << (type + offset);
1628  }
1629
1630  // Private constructor on direct flags value.
1631  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1632
1633  uint64_t flags_;
1634};
1635
1636// A HEnvironment object contains the values of virtual registers at a given location.
1637class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1638 public:
1639  HEnvironment(ArenaAllocator* arena,
1640               size_t number_of_vregs,
1641               const DexFile& dex_file,
1642               uint32_t method_idx,
1643               uint32_t dex_pc,
1644               InvokeType invoke_type,
1645               HInstruction* holder)
1646     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1647       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1648       parent_(nullptr),
1649       dex_file_(dex_file),
1650       method_idx_(method_idx),
1651       dex_pc_(dex_pc),
1652       invoke_type_(invoke_type),
1653       holder_(holder) {
1654  }
1655
1656  HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1657      : HEnvironment(arena,
1658                     to_copy.Size(),
1659                     to_copy.GetDexFile(),
1660                     to_copy.GetMethodIdx(),
1661                     to_copy.GetDexPc(),
1662                     to_copy.GetInvokeType(),
1663                     holder) {}
1664
1665  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1666    if (parent_ != nullptr) {
1667      parent_->SetAndCopyParentChain(allocator, parent);
1668    } else {
1669      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1670      parent_->CopyFrom(parent);
1671      if (parent->GetParent() != nullptr) {
1672        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1673      }
1674    }
1675  }
1676
1677  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1678  void CopyFrom(HEnvironment* environment);
1679
1680  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1681  // input to the loop phi instead. This is for inserting instructions that
1682  // require an environment (like HDeoptimization) in the loop pre-header.
1683  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1684
1685  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1686    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1687  }
1688
1689  HInstruction* GetInstructionAt(size_t index) const {
1690    return vregs_[index].GetInstruction();
1691  }
1692
1693  void RemoveAsUserOfInput(size_t index) const;
1694
1695  size_t Size() const { return vregs_.size(); }
1696
1697  HEnvironment* GetParent() const { return parent_; }
1698
1699  void SetLocationAt(size_t index, Location location) {
1700    locations_[index] = location;
1701  }
1702
1703  Location GetLocationAt(size_t index) const {
1704    return locations_[index];
1705  }
1706
1707  uint32_t GetDexPc() const {
1708    return dex_pc_;
1709  }
1710
1711  uint32_t GetMethodIdx() const {
1712    return method_idx_;
1713  }
1714
1715  InvokeType GetInvokeType() const {
1716    return invoke_type_;
1717  }
1718
1719  const DexFile& GetDexFile() const {
1720    return dex_file_;
1721  }
1722
1723  HInstruction* GetHolder() const {
1724    return holder_;
1725  }
1726
1727
1728  bool IsFromInlinedInvoke() const {
1729    return GetParent() != nullptr;
1730  }
1731
1732 private:
1733  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1734  ArenaVector<Location> locations_;
1735  HEnvironment* parent_;
1736  const DexFile& dex_file_;
1737  const uint32_t method_idx_;
1738  const uint32_t dex_pc_;
1739  const InvokeType invoke_type_;
1740
1741  // The instruction that holds this environment.
1742  HInstruction* const holder_;
1743
1744  friend class HInstruction;
1745
1746  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1747};
1748
1749class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1750 public:
1751  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1752      : previous_(nullptr),
1753        next_(nullptr),
1754        block_(nullptr),
1755        dex_pc_(dex_pc),
1756        id_(-1),
1757        ssa_index_(-1),
1758        packed_fields_(0u),
1759        environment_(nullptr),
1760        locations_(nullptr),
1761        live_interval_(nullptr),
1762        lifetime_position_(kNoLifetime),
1763        side_effects_(side_effects),
1764        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1765    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1766  }
1767
1768  virtual ~HInstruction() {}
1769
1770#define DECLARE_KIND(type, super) k##type,
1771  enum InstructionKind {
1772    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1773  };
1774#undef DECLARE_KIND
1775
1776  HInstruction* GetNext() const { return next_; }
1777  HInstruction* GetPrevious() const { return previous_; }
1778
1779  HInstruction* GetNextDisregardingMoves() const;
1780  HInstruction* GetPreviousDisregardingMoves() const;
1781
1782  HBasicBlock* GetBlock() const { return block_; }
1783  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1784  void SetBlock(HBasicBlock* block) { block_ = block; }
1785  bool IsInBlock() const { return block_ != nullptr; }
1786  bool IsInLoop() const { return block_->IsInLoop(); }
1787  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1788  bool IsIrreducibleLoopHeaderPhi() const {
1789    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1790  }
1791
1792  virtual size_t InputCount() const = 0;
1793  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1794
1795  virtual void Accept(HGraphVisitor* visitor) = 0;
1796  virtual const char* DebugName() const = 0;
1797
1798  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1799  void SetRawInputAt(size_t index, HInstruction* input) {
1800    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1801  }
1802
1803  virtual bool NeedsEnvironment() const { return false; }
1804
1805  uint32_t GetDexPc() const { return dex_pc_; }
1806
1807  virtual bool IsControlFlow() const { return false; }
1808
1809  virtual bool CanThrow() const { return false; }
1810  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1811
1812  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1813  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1814
1815  // Does not apply for all instructions, but having this at top level greatly
1816  // simplifies the null check elimination.
1817  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1818  virtual bool CanBeNull() const {
1819    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1820    return true;
1821  }
1822
1823  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1824    return false;
1825  }
1826
1827  virtual bool IsActualObject() const {
1828    return GetType() == Primitive::kPrimNot;
1829  }
1830
1831  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1832
1833  ReferenceTypeInfo GetReferenceTypeInfo() const {
1834    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1835    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1836                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1837  }
1838
1839  void AddUseAt(HInstruction* user, size_t index) {
1840    DCHECK(user != nullptr);
1841    // Note: fixup_end remains valid across push_front().
1842    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1843    HUseListNode<HInstruction*>* new_node =
1844        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1845    uses_.push_front(*new_node);
1846    FixUpUserRecordsAfterUseInsertion(fixup_end);
1847  }
1848
1849  void AddEnvUseAt(HEnvironment* user, size_t index) {
1850    DCHECK(user != nullptr);
1851    // Note: env_fixup_end remains valid across push_front().
1852    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
1853    HUseListNode<HEnvironment*>* new_node =
1854        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
1855    env_uses_.push_front(*new_node);
1856    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1857  }
1858
1859  void RemoveAsUserOfInput(size_t input) {
1860    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
1861    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1862    input_use.GetInstruction()->uses_.erase_after(before_use_node);
1863    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1864  }
1865
1866  const HUseList<HInstruction*>& GetUses() const { return uses_; }
1867  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
1868
1869  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
1870  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
1871  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
1872  bool HasOnlyOneNonEnvironmentUse() const {
1873    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
1874  }
1875
1876  // Does this instruction strictly dominate `other_instruction`?
1877  // Returns false if this instruction and `other_instruction` are the same.
1878  // Aborts if this instruction and `other_instruction` are both phis.
1879  bool StrictlyDominates(HInstruction* other_instruction) const;
1880
1881  int GetId() const { return id_; }
1882  void SetId(int id) { id_ = id; }
1883
1884  int GetSsaIndex() const { return ssa_index_; }
1885  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
1886  bool HasSsaIndex() const { return ssa_index_ != -1; }
1887
1888  bool HasEnvironment() const { return environment_ != nullptr; }
1889  HEnvironment* GetEnvironment() const { return environment_; }
1890  // Set the `environment_` field. Raw because this method does not
1891  // update the uses lists.
1892  void SetRawEnvironment(HEnvironment* environment) {
1893    DCHECK(environment_ == nullptr);
1894    DCHECK_EQ(environment->GetHolder(), this);
1895    environment_ = environment;
1896  }
1897
1898  void RemoveEnvironment();
1899
1900  // Set the environment of this instruction, copying it from `environment`. While
1901  // copying, the uses lists are being updated.
1902  void CopyEnvironmentFrom(HEnvironment* environment) {
1903    DCHECK(environment_ == nullptr);
1904    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1905    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1906    environment_->CopyFrom(environment);
1907    if (environment->GetParent() != nullptr) {
1908      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1909    }
1910  }
1911
1912  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
1913                                                HBasicBlock* block) {
1914    DCHECK(environment_ == nullptr);
1915    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1916    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1917    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
1918    if (environment->GetParent() != nullptr) {
1919      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1920    }
1921  }
1922
1923  // Returns the number of entries in the environment. Typically, that is the
1924  // number of dex registers in a method. It could be more in case of inlining.
1925  size_t EnvironmentSize() const;
1926
1927  LocationSummary* GetLocations() const { return locations_; }
1928  void SetLocations(LocationSummary* locations) { locations_ = locations; }
1929
1930  void ReplaceWith(HInstruction* instruction);
1931  void ReplaceInput(HInstruction* replacement, size_t index);
1932
1933  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
1934  // uses of this instruction by `other` are *not* updated.
1935  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
1936    ReplaceWith(other);
1937    other->ReplaceInput(this, use_index);
1938  }
1939
1940  // Move `this` instruction before `cursor`.
1941  void MoveBefore(HInstruction* cursor);
1942
1943  // Move `this` before its first user and out of any loops. If there is no
1944  // out-of-loop user that dominates all other users, move the instruction
1945  // to the end of the out-of-loop common dominator of the user's blocks.
1946  //
1947  // This can be used only on non-throwing instructions with no side effects that
1948  // have at least one use but no environment uses.
1949  void MoveBeforeFirstUserAndOutOfLoops();
1950
1951#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
1952  bool Is##type() const;                                                       \
1953  const H##type* As##type() const;                                             \
1954  H##type* As##type();
1955
1956  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
1957#undef INSTRUCTION_TYPE_CHECK
1958
1959#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
1960  bool Is##type() const { return (As##type() != nullptr); }                    \
1961  virtual const H##type* As##type() const { return nullptr; }                  \
1962  virtual H##type* As##type() { return nullptr; }
1963  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
1964#undef INSTRUCTION_TYPE_CHECK
1965
1966  // Returns whether the instruction can be moved within the graph.
1967  virtual bool CanBeMoved() const { return false; }
1968
1969  // Returns whether the two instructions are of the same kind.
1970  virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
1971    return false;
1972  }
1973
1974  // Returns whether any data encoded in the two instructions is equal.
1975  // This method does not look at the inputs. Both instructions must be
1976  // of the same type, otherwise the method has undefined behavior.
1977  virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
1978    return false;
1979  }
1980
1981  // Returns whether two instructions are equal, that is:
1982  // 1) They have the same type and contain the same data (InstructionDataEquals).
1983  // 2) Their inputs are identical.
1984  bool Equals(HInstruction* other) const;
1985
1986  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
1987  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
1988  // the virtual function because the __attribute__((__pure__)) doesn't really
1989  // apply the strong requirement for virtual functions, preventing optimizations.
1990  InstructionKind GetKind() const PURE;
1991  virtual InstructionKind GetKindInternal() const = 0;
1992
1993  virtual size_t ComputeHashCode() const {
1994    size_t result = GetKind();
1995    for (size_t i = 0, e = InputCount(); i < e; ++i) {
1996      result = (result * 31) + InputAt(i)->GetId();
1997    }
1998    return result;
1999  }
2000
2001  SideEffects GetSideEffects() const { return side_effects_; }
2002  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2003  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2004
2005  size_t GetLifetimePosition() const { return lifetime_position_; }
2006  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2007  LiveInterval* GetLiveInterval() const { return live_interval_; }
2008  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2009  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2010
2011  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2012
2013  // Returns whether the code generation of the instruction will require to have access
2014  // to the current method. Such instructions are:
2015  // (1): Instructions that require an environment, as calling the runtime requires
2016  //      to walk the stack and have the current method stored at a specific stack address.
2017  // (2): Object literals like classes and strings, that are loaded from the dex cache
2018  //      fields of the current method.
2019  bool NeedsCurrentMethod() const {
2020    return NeedsEnvironment() || IsLoadClass() || IsLoadString();
2021  }
2022
2023  // Returns whether the code generation of the instruction will require to have access
2024  // to the dex cache of the current method's declaring class via the current method.
2025  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2026
2027  // Does this instruction have any use in an environment before
2028  // control flow hits 'other'?
2029  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2030
2031  // Remove all references to environment uses of this instruction.
2032  // The caller must ensure that this is safe to do.
2033  void RemoveEnvironmentUsers();
2034
2035  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2036  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2037
2038 protected:
2039  // If set, the machine code for this instruction is assumed to be generated by
2040  // its users. Used by liveness analysis to compute use positions accordingly.
2041  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2042  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2043  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2044  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2045
2046  virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0;
2047  virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0;
2048
2049  uint32_t GetPackedFields() const {
2050    return packed_fields_;
2051  }
2052
2053  template <size_t flag>
2054  bool GetPackedFlag() const {
2055    return (packed_fields_ & (1u << flag)) != 0u;
2056  }
2057
2058  template <size_t flag>
2059  void SetPackedFlag(bool value = true) {
2060    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2061  }
2062
2063  template <typename BitFieldType>
2064  typename BitFieldType::value_type GetPackedField() const {
2065    return BitFieldType::Decode(packed_fields_);
2066  }
2067
2068  template <typename BitFieldType>
2069  void SetPackedField(typename BitFieldType::value_type value) {
2070    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2071    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2072  }
2073
2074 private:
2075  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2076    auto before_use_node = uses_.before_begin();
2077    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2078      HInstruction* user = use_node->GetUser();
2079      size_t input_index = use_node->GetIndex();
2080      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2081      before_use_node = use_node;
2082    }
2083  }
2084
2085  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2086    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2087    if (next != uses_.end()) {
2088      HInstruction* next_user = next->GetUser();
2089      size_t next_index = next->GetIndex();
2090      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2091      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2092    }
2093  }
2094
2095  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2096    auto before_env_use_node = env_uses_.before_begin();
2097    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2098      HEnvironment* user = env_use_node->GetUser();
2099      size_t input_index = env_use_node->GetIndex();
2100      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2101      before_env_use_node = env_use_node;
2102    }
2103  }
2104
2105  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2106    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2107    if (next != env_uses_.end()) {
2108      HEnvironment* next_user = next->GetUser();
2109      size_t next_index = next->GetIndex();
2110      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2111      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2112    }
2113  }
2114
2115  HInstruction* previous_;
2116  HInstruction* next_;
2117  HBasicBlock* block_;
2118  const uint32_t dex_pc_;
2119
2120  // An instruction gets an id when it is added to the graph.
2121  // It reflects creation order. A negative id means the instruction
2122  // has not been added to the graph.
2123  int id_;
2124
2125  // When doing liveness analysis, instructions that have uses get an SSA index.
2126  int ssa_index_;
2127
2128  // Packed fields.
2129  uint32_t packed_fields_;
2130
2131  // List of instructions that have this instruction as input.
2132  HUseList<HInstruction*> uses_;
2133
2134  // List of environments that contain this instruction.
2135  HUseList<HEnvironment*> env_uses_;
2136
2137  // The environment associated with this instruction. Not null if the instruction
2138  // might jump out of the method.
2139  HEnvironment* environment_;
2140
2141  // Set by the code generator.
2142  LocationSummary* locations_;
2143
2144  // Set by the liveness analysis.
2145  LiveInterval* live_interval_;
2146
2147  // Set by the liveness analysis, this is the position in a linear
2148  // order of blocks where this instruction's live interval start.
2149  size_t lifetime_position_;
2150
2151  SideEffects side_effects_;
2152
2153  // The reference handle part of the reference type info.
2154  // The IsExact() flag is stored in packed fields.
2155  // TODO: for primitive types this should be marked as invalid.
2156  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2157
2158  friend class GraphChecker;
2159  friend class HBasicBlock;
2160  friend class HEnvironment;
2161  friend class HGraph;
2162  friend class HInstructionList;
2163
2164  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2165};
2166std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2167
2168class HInputIterator : public ValueObject {
2169 public:
2170  explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {}
2171
2172  bool Done() const { return index_ == instruction_->InputCount(); }
2173  HInstruction* Current() const { return instruction_->InputAt(index_); }
2174  void Advance() { index_++; }
2175
2176 private:
2177  HInstruction* instruction_;
2178  size_t index_;
2179
2180  DISALLOW_COPY_AND_ASSIGN(HInputIterator);
2181};
2182
2183class HInstructionIterator : public ValueObject {
2184 public:
2185  explicit HInstructionIterator(const HInstructionList& instructions)
2186      : instruction_(instructions.first_instruction_) {
2187    next_ = Done() ? nullptr : instruction_->GetNext();
2188  }
2189
2190  bool Done() const { return instruction_ == nullptr; }
2191  HInstruction* Current() const { return instruction_; }
2192  void Advance() {
2193    instruction_ = next_;
2194    next_ = Done() ? nullptr : instruction_->GetNext();
2195  }
2196
2197 private:
2198  HInstruction* instruction_;
2199  HInstruction* next_;
2200
2201  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2202};
2203
2204class HBackwardInstructionIterator : public ValueObject {
2205 public:
2206  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2207      : instruction_(instructions.last_instruction_) {
2208    next_ = Done() ? nullptr : instruction_->GetPrevious();
2209  }
2210
2211  bool Done() const { return instruction_ == nullptr; }
2212  HInstruction* Current() const { return instruction_; }
2213  void Advance() {
2214    instruction_ = next_;
2215    next_ = Done() ? nullptr : instruction_->GetPrevious();
2216  }
2217
2218 private:
2219  HInstruction* instruction_;
2220  HInstruction* next_;
2221
2222  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2223};
2224
2225template<size_t N>
2226class HTemplateInstruction: public HInstruction {
2227 public:
2228  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2229      : HInstruction(side_effects, dex_pc), inputs_() {}
2230  virtual ~HTemplateInstruction() {}
2231
2232  size_t InputCount() const OVERRIDE { return N; }
2233
2234 protected:
2235  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE {
2236    DCHECK_LT(i, N);
2237    return inputs_[i];
2238  }
2239
2240  void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE {
2241    DCHECK_LT(i, N);
2242    inputs_[i] = input;
2243  }
2244
2245 private:
2246  std::array<HUserRecord<HInstruction*>, N> inputs_;
2247
2248  friend class SsaBuilder;
2249};
2250
2251// HTemplateInstruction specialization for N=0.
2252template<>
2253class HTemplateInstruction<0>: public HInstruction {
2254 public:
2255  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2256      : HInstruction(side_effects, dex_pc) {}
2257
2258  virtual ~HTemplateInstruction() {}
2259
2260  size_t InputCount() const OVERRIDE { return 0; }
2261
2262 protected:
2263  const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE {
2264    LOG(FATAL) << "Unreachable";
2265    UNREACHABLE();
2266  }
2267
2268  void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED,
2269                           const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE {
2270    LOG(FATAL) << "Unreachable";
2271    UNREACHABLE();
2272  }
2273
2274 private:
2275  friend class SsaBuilder;
2276};
2277
2278template<intptr_t N>
2279class HExpression : public HTemplateInstruction<N> {
2280 public:
2281  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2282      : HTemplateInstruction<N>(side_effects, dex_pc) {
2283    this->template SetPackedField<TypeField>(type);
2284  }
2285  virtual ~HExpression() {}
2286
2287  Primitive::Type GetType() const OVERRIDE {
2288    return TypeField::Decode(this->GetPackedFields());
2289  }
2290
2291 protected:
2292  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2293  static constexpr size_t kFieldTypeSize =
2294      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2295  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2296  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2297                "Too many packed fields.");
2298  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2299};
2300
2301// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2302// instruction that branches to the exit block.
2303class HReturnVoid : public HTemplateInstruction<0> {
2304 public:
2305  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2306      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2307
2308  bool IsControlFlow() const OVERRIDE { return true; }
2309
2310  DECLARE_INSTRUCTION(ReturnVoid);
2311
2312 private:
2313  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2314};
2315
2316// Represents dex's RETURN opcodes. A HReturn is a control flow
2317// instruction that branches to the exit block.
2318class HReturn : public HTemplateInstruction<1> {
2319 public:
2320  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2321      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2322    SetRawInputAt(0, value);
2323  }
2324
2325  bool IsControlFlow() const OVERRIDE { return true; }
2326
2327  DECLARE_INSTRUCTION(Return);
2328
2329 private:
2330  DISALLOW_COPY_AND_ASSIGN(HReturn);
2331};
2332
2333class HPhi : public HInstruction {
2334 public:
2335  HPhi(ArenaAllocator* arena,
2336       uint32_t reg_number,
2337       size_t number_of_inputs,
2338       Primitive::Type type,
2339       uint32_t dex_pc = kNoDexPc)
2340      : HInstruction(SideEffects::None(), dex_pc),
2341        inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)),
2342        reg_number_(reg_number) {
2343    SetPackedField<TypeField>(ToPhiType(type));
2344    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2345    // Phis are constructed live and marked dead if conflicting or unused.
2346    // Individual steps of SsaBuilder should assume that if a phi has been
2347    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2348    SetPackedFlag<kFlagIsLive>(true);
2349    SetPackedFlag<kFlagCanBeNull>(true);
2350  }
2351
2352  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2353  static Primitive::Type ToPhiType(Primitive::Type type) {
2354    return Primitive::PrimitiveKind(type);
2355  }
2356
2357  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2358
2359  size_t InputCount() const OVERRIDE { return inputs_.size(); }
2360
2361  void AddInput(HInstruction* input);
2362  void RemoveInputAt(size_t index);
2363
2364  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2365  void SetType(Primitive::Type new_type) {
2366    // Make sure that only valid type changes occur. The following are allowed:
2367    //  (1) int  -> float/ref (primitive type propagation),
2368    //  (2) long -> double (primitive type propagation).
2369    DCHECK(GetType() == new_type ||
2370           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2371           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2372           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2373    SetPackedField<TypeField>(new_type);
2374  }
2375
2376  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2377  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2378
2379  uint32_t GetRegNumber() const { return reg_number_; }
2380
2381  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2382  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2383  bool IsDead() const { return !IsLive(); }
2384  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2385
2386  bool IsVRegEquivalentOf(HInstruction* other) const {
2387    return other != nullptr
2388        && other->IsPhi()
2389        && other->AsPhi()->GetBlock() == GetBlock()
2390        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2391  }
2392
2393  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2394  // An equivalent phi is a phi having the same dex register and type.
2395  // It assumes that phis with the same dex register are adjacent.
2396  HPhi* GetNextEquivalentPhiWithSameType() {
2397    HInstruction* next = GetNext();
2398    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2399      if (next->GetType() == GetType()) {
2400        return next->AsPhi();
2401      }
2402      next = next->GetNext();
2403    }
2404    return nullptr;
2405  }
2406
2407  DECLARE_INSTRUCTION(Phi);
2408
2409 protected:
2410  const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE {
2411    return inputs_[index];
2412  }
2413
2414  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE {
2415    inputs_[index] = input;
2416  }
2417
2418 private:
2419  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2420  static constexpr size_t kFieldTypeSize =
2421      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2422  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2423  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2424  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2425  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2426  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2427
2428  ArenaVector<HUserRecord<HInstruction*> > inputs_;
2429  const uint32_t reg_number_;
2430
2431  DISALLOW_COPY_AND_ASSIGN(HPhi);
2432};
2433
2434// The exit instruction is the only instruction of the exit block.
2435// Instructions aborting the method (HThrow and HReturn) must branch to the
2436// exit block.
2437class HExit : public HTemplateInstruction<0> {
2438 public:
2439  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2440
2441  bool IsControlFlow() const OVERRIDE { return true; }
2442
2443  DECLARE_INSTRUCTION(Exit);
2444
2445 private:
2446  DISALLOW_COPY_AND_ASSIGN(HExit);
2447};
2448
2449// Jumps from one block to another.
2450class HGoto : public HTemplateInstruction<0> {
2451 public:
2452  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2453
2454  bool IsControlFlow() const OVERRIDE { return true; }
2455
2456  HBasicBlock* GetSuccessor() const {
2457    return GetBlock()->GetSingleSuccessor();
2458  }
2459
2460  DECLARE_INSTRUCTION(Goto);
2461
2462 private:
2463  DISALLOW_COPY_AND_ASSIGN(HGoto);
2464};
2465
2466class HConstant : public HExpression<0> {
2467 public:
2468  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2469      : HExpression(type, SideEffects::None(), dex_pc) {}
2470
2471  bool CanBeMoved() const OVERRIDE { return true; }
2472
2473  // Is this constant -1 in the arithmetic sense?
2474  virtual bool IsMinusOne() const { return false; }
2475  // Is this constant 0 in the arithmetic sense?
2476  virtual bool IsArithmeticZero() const { return false; }
2477  // Is this constant a 0-bit pattern?
2478  virtual bool IsZeroBitPattern() const { return false; }
2479  // Is this constant 1 in the arithmetic sense?
2480  virtual bool IsOne() const { return false; }
2481
2482  virtual uint64_t GetValueAsUint64() const = 0;
2483
2484  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2485
2486 private:
2487  DISALLOW_COPY_AND_ASSIGN(HConstant);
2488};
2489
2490class HNullConstant : public HConstant {
2491 public:
2492  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2493    return true;
2494  }
2495
2496  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2497
2498  size_t ComputeHashCode() const OVERRIDE { return 0; }
2499
2500  // The null constant representation is a 0-bit pattern.
2501  virtual bool IsZeroBitPattern() const { return true; }
2502
2503  DECLARE_INSTRUCTION(NullConstant);
2504
2505 private:
2506  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2507
2508  friend class HGraph;
2509  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2510};
2511
2512// Constants of the type int. Those can be from Dex instructions, or
2513// synthesized (for example with the if-eqz instruction).
2514class HIntConstant : public HConstant {
2515 public:
2516  int32_t GetValue() const { return value_; }
2517
2518  uint64_t GetValueAsUint64() const OVERRIDE {
2519    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2520  }
2521
2522  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2523    DCHECK(other->IsIntConstant()) << other->DebugName();
2524    return other->AsIntConstant()->value_ == value_;
2525  }
2526
2527  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2528
2529  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2530  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2531  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2532  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2533
2534  // Integer constants are used to encode Boolean values as well,
2535  // where 1 means true and 0 means false.
2536  bool IsTrue() const { return GetValue() == 1; }
2537  bool IsFalse() const { return GetValue() == 0; }
2538
2539  DECLARE_INSTRUCTION(IntConstant);
2540
2541 private:
2542  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2543      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2544  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2545      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2546
2547  const int32_t value_;
2548
2549  friend class HGraph;
2550  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2551  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2552  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2553};
2554
2555class HLongConstant : public HConstant {
2556 public:
2557  int64_t GetValue() const { return value_; }
2558
2559  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2560
2561  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2562    DCHECK(other->IsLongConstant()) << other->DebugName();
2563    return other->AsLongConstant()->value_ == value_;
2564  }
2565
2566  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2567
2568  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2569  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2570  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2571  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2572
2573  DECLARE_INSTRUCTION(LongConstant);
2574
2575 private:
2576  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2577      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2578
2579  const int64_t value_;
2580
2581  friend class HGraph;
2582  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2583};
2584
2585class HFloatConstant : public HConstant {
2586 public:
2587  float GetValue() const { return value_; }
2588
2589  uint64_t GetValueAsUint64() const OVERRIDE {
2590    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2591  }
2592
2593  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2594    DCHECK(other->IsFloatConstant()) << other->DebugName();
2595    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2596  }
2597
2598  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2599
2600  bool IsMinusOne() const OVERRIDE {
2601    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2602  }
2603  bool IsArithmeticZero() const OVERRIDE {
2604    return std::fpclassify(value_) == FP_ZERO;
2605  }
2606  bool IsArithmeticPositiveZero() const {
2607    return IsArithmeticZero() && !std::signbit(value_);
2608  }
2609  bool IsArithmeticNegativeZero() const {
2610    return IsArithmeticZero() && std::signbit(value_);
2611  }
2612  bool IsZeroBitPattern() const OVERRIDE {
2613    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2614  }
2615  bool IsOne() const OVERRIDE {
2616    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2617  }
2618  bool IsNaN() const {
2619    return std::isnan(value_);
2620  }
2621
2622  DECLARE_INSTRUCTION(FloatConstant);
2623
2624 private:
2625  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2626      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2627  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2628      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2629
2630  const float value_;
2631
2632  // Only the SsaBuilder and HGraph can create floating-point constants.
2633  friend class SsaBuilder;
2634  friend class HGraph;
2635  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2636};
2637
2638class HDoubleConstant : public HConstant {
2639 public:
2640  double GetValue() const { return value_; }
2641
2642  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2643
2644  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2645    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2646    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2647  }
2648
2649  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2650
2651  bool IsMinusOne() const OVERRIDE {
2652    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2653  }
2654  bool IsArithmeticZero() const OVERRIDE {
2655    return std::fpclassify(value_) == FP_ZERO;
2656  }
2657  bool IsArithmeticPositiveZero() const {
2658    return IsArithmeticZero() && !std::signbit(value_);
2659  }
2660  bool IsArithmeticNegativeZero() const {
2661    return IsArithmeticZero() && std::signbit(value_);
2662  }
2663  bool IsZeroBitPattern() const OVERRIDE {
2664    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2665  }
2666  bool IsOne() const OVERRIDE {
2667    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2668  }
2669  bool IsNaN() const {
2670    return std::isnan(value_);
2671  }
2672
2673  DECLARE_INSTRUCTION(DoubleConstant);
2674
2675 private:
2676  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2677      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2678  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2679      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2680
2681  const double value_;
2682
2683  // Only the SsaBuilder and HGraph can create floating-point constants.
2684  friend class SsaBuilder;
2685  friend class HGraph;
2686  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2687};
2688
2689// Conditional branch. A block ending with an HIf instruction must have
2690// two successors.
2691class HIf : public HTemplateInstruction<1> {
2692 public:
2693  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2694      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2695    SetRawInputAt(0, input);
2696  }
2697
2698  bool IsControlFlow() const OVERRIDE { return true; }
2699
2700  HBasicBlock* IfTrueSuccessor() const {
2701    return GetBlock()->GetSuccessors()[0];
2702  }
2703
2704  HBasicBlock* IfFalseSuccessor() const {
2705    return GetBlock()->GetSuccessors()[1];
2706  }
2707
2708  DECLARE_INSTRUCTION(If);
2709
2710 private:
2711  DISALLOW_COPY_AND_ASSIGN(HIf);
2712};
2713
2714
2715// Abstract instruction which marks the beginning and/or end of a try block and
2716// links it to the respective exception handlers. Behaves the same as a Goto in
2717// non-exceptional control flow.
2718// Normal-flow successor is stored at index zero, exception handlers under
2719// higher indices in no particular order.
2720class HTryBoundary : public HTemplateInstruction<0> {
2721 public:
2722  enum class BoundaryKind {
2723    kEntry,
2724    kExit,
2725    kLast = kExit
2726  };
2727
2728  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2729      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2730    SetPackedField<BoundaryKindField>(kind);
2731  }
2732
2733  bool IsControlFlow() const OVERRIDE { return true; }
2734
2735  // Returns the block's non-exceptional successor (index zero).
2736  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2737
2738  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2739    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2740  }
2741
2742  // Returns whether `handler` is among its exception handlers (non-zero index
2743  // successors).
2744  bool HasExceptionHandler(const HBasicBlock& handler) const {
2745    DCHECK(handler.IsCatchBlock());
2746    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2747  }
2748
2749  // If not present already, adds `handler` to its block's list of exception
2750  // handlers.
2751  void AddExceptionHandler(HBasicBlock* handler) {
2752    if (!HasExceptionHandler(*handler)) {
2753      GetBlock()->AddSuccessor(handler);
2754    }
2755  }
2756
2757  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2758  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2759
2760  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2761
2762  DECLARE_INSTRUCTION(TryBoundary);
2763
2764 private:
2765  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2766  static constexpr size_t kFieldBoundaryKindSize =
2767      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2768  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2769      kFieldBoundaryKind + kFieldBoundaryKindSize;
2770  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2771                "Too many packed fields.");
2772  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2773
2774  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2775};
2776
2777// Deoptimize to interpreter, upon checking a condition.
2778class HDeoptimize : public HTemplateInstruction<1> {
2779 public:
2780  // We set CanTriggerGC to prevent any intermediate address to be live
2781  // at the point of the `HDeoptimize`.
2782  HDeoptimize(HInstruction* cond, uint32_t dex_pc)
2783      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
2784    SetRawInputAt(0, cond);
2785  }
2786
2787  bool CanBeMoved() const OVERRIDE { return true; }
2788  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2789    return true;
2790  }
2791  bool NeedsEnvironment() const OVERRIDE { return true; }
2792  bool CanThrow() const OVERRIDE { return true; }
2793
2794  DECLARE_INSTRUCTION(Deoptimize);
2795
2796 private:
2797  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
2798};
2799
2800// Represents the ArtMethod that was passed as a first argument to
2801// the method. It is used by instructions that depend on it, like
2802// instructions that work with the dex cache.
2803class HCurrentMethod : public HExpression<0> {
2804 public:
2805  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2806      : HExpression(type, SideEffects::None(), dex_pc) {}
2807
2808  DECLARE_INSTRUCTION(CurrentMethod);
2809
2810 private:
2811  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
2812};
2813
2814// Fetches an ArtMethod from the virtual table or the interface method table
2815// of a class.
2816class HClassTableGet : public HExpression<1> {
2817 public:
2818  enum class TableKind {
2819    kVTable,
2820    kIMTable,
2821    kLast = kIMTable
2822  };
2823  HClassTableGet(HInstruction* cls,
2824                 Primitive::Type type,
2825                 TableKind kind,
2826                 size_t index,
2827                 uint32_t dex_pc)
2828      : HExpression(type, SideEffects::None(), dex_pc),
2829        index_(index) {
2830    SetPackedField<TableKindField>(kind);
2831    SetRawInputAt(0, cls);
2832  }
2833
2834  bool CanBeMoved() const OVERRIDE { return true; }
2835  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2836    return other->AsClassTableGet()->GetIndex() == index_ &&
2837        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
2838  }
2839
2840  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
2841  size_t GetIndex() const { return index_; }
2842
2843  DECLARE_INSTRUCTION(ClassTableGet);
2844
2845 private:
2846  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
2847  static constexpr size_t kFieldTableKindSize =
2848      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
2849  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
2850  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
2851                "Too many packed fields.");
2852  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
2853
2854  // The index of the ArtMethod in the table.
2855  const size_t index_;
2856
2857  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
2858};
2859
2860// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
2861// have one successor for each entry in the switch table, and the final successor
2862// will be the block containing the next Dex opcode.
2863class HPackedSwitch : public HTemplateInstruction<1> {
2864 public:
2865  HPackedSwitch(int32_t start_value,
2866                uint32_t num_entries,
2867                HInstruction* input,
2868                uint32_t dex_pc = kNoDexPc)
2869    : HTemplateInstruction(SideEffects::None(), dex_pc),
2870      start_value_(start_value),
2871      num_entries_(num_entries) {
2872    SetRawInputAt(0, input);
2873  }
2874
2875  bool IsControlFlow() const OVERRIDE { return true; }
2876
2877  int32_t GetStartValue() const { return start_value_; }
2878
2879  uint32_t GetNumEntries() const { return num_entries_; }
2880
2881  HBasicBlock* GetDefaultBlock() const {
2882    // Last entry is the default block.
2883    return GetBlock()->GetSuccessors()[num_entries_];
2884  }
2885  DECLARE_INSTRUCTION(PackedSwitch);
2886
2887 private:
2888  const int32_t start_value_;
2889  const uint32_t num_entries_;
2890
2891  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
2892};
2893
2894class HUnaryOperation : public HExpression<1> {
2895 public:
2896  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
2897      : HExpression(result_type, SideEffects::None(), dex_pc) {
2898    SetRawInputAt(0, input);
2899  }
2900
2901  HInstruction* GetInput() const { return InputAt(0); }
2902  Primitive::Type GetResultType() const { return GetType(); }
2903
2904  bool CanBeMoved() const OVERRIDE { return true; }
2905  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2906    return true;
2907  }
2908
2909  // Try to statically evaluate `this` and return a HConstant
2910  // containing the result of this evaluation.  If `this` cannot
2911  // be evaluated as a constant, return null.
2912  HConstant* TryStaticEvaluation() const;
2913
2914  // Apply this operation to `x`.
2915  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
2916  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
2917  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
2918  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
2919
2920  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
2921
2922 private:
2923  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
2924};
2925
2926class HBinaryOperation : public HExpression<2> {
2927 public:
2928  HBinaryOperation(Primitive::Type result_type,
2929                   HInstruction* left,
2930                   HInstruction* right,
2931                   SideEffects side_effects = SideEffects::None(),
2932                   uint32_t dex_pc = kNoDexPc)
2933      : HExpression(result_type, side_effects, dex_pc) {
2934    SetRawInputAt(0, left);
2935    SetRawInputAt(1, right);
2936  }
2937
2938  HInstruction* GetLeft() const { return InputAt(0); }
2939  HInstruction* GetRight() const { return InputAt(1); }
2940  Primitive::Type GetResultType() const { return GetType(); }
2941
2942  virtual bool IsCommutative() const { return false; }
2943
2944  // Put constant on the right.
2945  // Returns whether order is changed.
2946  bool OrderInputsWithConstantOnTheRight() {
2947    HInstruction* left = InputAt(0);
2948    HInstruction* right = InputAt(1);
2949    if (left->IsConstant() && !right->IsConstant()) {
2950      ReplaceInput(right, 0);
2951      ReplaceInput(left, 1);
2952      return true;
2953    }
2954    return false;
2955  }
2956
2957  // Order inputs by instruction id, but favor constant on the right side.
2958  // This helps GVN for commutative ops.
2959  void OrderInputs() {
2960    DCHECK(IsCommutative());
2961    HInstruction* left = InputAt(0);
2962    HInstruction* right = InputAt(1);
2963    if (left == right || (!left->IsConstant() && right->IsConstant())) {
2964      return;
2965    }
2966    if (OrderInputsWithConstantOnTheRight()) {
2967      return;
2968    }
2969    // Order according to instruction id.
2970    if (left->GetId() > right->GetId()) {
2971      ReplaceInput(right, 0);
2972      ReplaceInput(left, 1);
2973    }
2974  }
2975
2976  bool CanBeMoved() const OVERRIDE { return true; }
2977  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2978    return true;
2979  }
2980
2981  // Try to statically evaluate `this` and return a HConstant
2982  // containing the result of this evaluation.  If `this` cannot
2983  // be evaluated as a constant, return null.
2984  HConstant* TryStaticEvaluation() const;
2985
2986  // Apply this operation to `x` and `y`.
2987  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
2988                              HNullConstant* y ATTRIBUTE_UNUSED) const {
2989    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
2990    UNREACHABLE();
2991  }
2992  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
2993  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
2994  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
2995                              HIntConstant* y ATTRIBUTE_UNUSED) const {
2996    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
2997    UNREACHABLE();
2998  }
2999  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3000  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3001
3002  // Returns an input that can legally be used as the right input and is
3003  // constant, or null.
3004  HConstant* GetConstantRight() const;
3005
3006  // If `GetConstantRight()` returns one of the input, this returns the other
3007  // one. Otherwise it returns null.
3008  HInstruction* GetLeastConstantLeft() const;
3009
3010  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3011
3012 private:
3013  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3014};
3015
3016// The comparison bias applies for floating point operations and indicates how NaN
3017// comparisons are treated:
3018enum class ComparisonBias {
3019  kNoBias,  // bias is not applicable (i.e. for long operation)
3020  kGtBias,  // return 1 for NaN comparisons
3021  kLtBias,  // return -1 for NaN comparisons
3022  kLast = kLtBias
3023};
3024
3025std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3026
3027class HCondition : public HBinaryOperation {
3028 public:
3029  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3030      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3031    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3032  }
3033
3034  // For code generation purposes, returns whether this instruction is just before
3035  // `instruction`, and disregard moves in between.
3036  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3037
3038  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3039
3040  virtual IfCondition GetCondition() const = 0;
3041
3042  virtual IfCondition GetOppositeCondition() const = 0;
3043
3044  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3045  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3046
3047  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3048  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3049
3050  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3051    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3052  }
3053
3054  bool IsFPConditionTrueIfNaN() const {
3055    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3056    IfCondition if_cond = GetCondition();
3057    if (if_cond == kCondNE) {
3058      return true;
3059    } else if (if_cond == kCondEQ) {
3060      return false;
3061    }
3062    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3063  }
3064
3065  bool IsFPConditionFalseIfNaN() const {
3066    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3067    IfCondition if_cond = GetCondition();
3068    if (if_cond == kCondEQ) {
3069      return true;
3070    } else if (if_cond == kCondNE) {
3071      return false;
3072    }
3073    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3074  }
3075
3076 protected:
3077  // Needed if we merge a HCompare into a HCondition.
3078  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3079  static constexpr size_t kFieldComparisonBiasSize =
3080      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3081  static constexpr size_t kNumberOfConditionPackedBits =
3082      kFieldComparisonBias + kFieldComparisonBiasSize;
3083  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3084  using ComparisonBiasField =
3085      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3086
3087  template <typename T>
3088  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3089
3090  template <typename T>
3091  int32_t CompareFP(T x, T y) const {
3092    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3093    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3094    // Handle the bias.
3095    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3096  }
3097
3098  // Return an integer constant containing the result of a condition evaluated at compile time.
3099  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3100    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3101  }
3102
3103 private:
3104  DISALLOW_COPY_AND_ASSIGN(HCondition);
3105};
3106
3107// Instruction to check if two inputs are equal to each other.
3108class HEqual : public HCondition {
3109 public:
3110  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3111      : HCondition(first, second, dex_pc) {}
3112
3113  bool IsCommutative() const OVERRIDE { return true; }
3114
3115  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3116                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3117    return MakeConstantCondition(true, GetDexPc());
3118  }
3119  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3120    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3121  }
3122  // In the following Evaluate methods, a HCompare instruction has
3123  // been merged into this HEqual instruction; evaluate it as
3124  // `Compare(x, y) == 0`.
3125  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3126    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3127                                 GetDexPc());
3128  }
3129  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3130    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3131  }
3132  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3133    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3134  }
3135
3136  DECLARE_INSTRUCTION(Equal);
3137
3138  IfCondition GetCondition() const OVERRIDE {
3139    return kCondEQ;
3140  }
3141
3142  IfCondition GetOppositeCondition() const OVERRIDE {
3143    return kCondNE;
3144  }
3145
3146 private:
3147  template <typename T> bool Compute(T x, T y) const { return x == y; }
3148
3149  DISALLOW_COPY_AND_ASSIGN(HEqual);
3150};
3151
3152class HNotEqual : public HCondition {
3153 public:
3154  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3155      : HCondition(first, second, dex_pc) {}
3156
3157  bool IsCommutative() const OVERRIDE { return true; }
3158
3159  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3160                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3161    return MakeConstantCondition(false, GetDexPc());
3162  }
3163  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3164    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3165  }
3166  // In the following Evaluate methods, a HCompare instruction has
3167  // been merged into this HNotEqual instruction; evaluate it as
3168  // `Compare(x, y) != 0`.
3169  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3170    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3171  }
3172  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3173    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3174  }
3175  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3176    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3177  }
3178
3179  DECLARE_INSTRUCTION(NotEqual);
3180
3181  IfCondition GetCondition() const OVERRIDE {
3182    return kCondNE;
3183  }
3184
3185  IfCondition GetOppositeCondition() const OVERRIDE {
3186    return kCondEQ;
3187  }
3188
3189 private:
3190  template <typename T> bool Compute(T x, T y) const { return x != y; }
3191
3192  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3193};
3194
3195class HLessThan : public HCondition {
3196 public:
3197  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3198      : HCondition(first, second, dex_pc) {}
3199
3200  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3201    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3202  }
3203  // In the following Evaluate methods, a HCompare instruction has
3204  // been merged into this HLessThan instruction; evaluate it as
3205  // `Compare(x, y) < 0`.
3206  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3207    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3208  }
3209  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3210    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3211  }
3212  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3213    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3214  }
3215
3216  DECLARE_INSTRUCTION(LessThan);
3217
3218  IfCondition GetCondition() const OVERRIDE {
3219    return kCondLT;
3220  }
3221
3222  IfCondition GetOppositeCondition() const OVERRIDE {
3223    return kCondGE;
3224  }
3225
3226 private:
3227  template <typename T> bool Compute(T x, T y) const { return x < y; }
3228
3229  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3230};
3231
3232class HLessThanOrEqual : public HCondition {
3233 public:
3234  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3235      : HCondition(first, second, dex_pc) {}
3236
3237  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3238    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3239  }
3240  // In the following Evaluate methods, a HCompare instruction has
3241  // been merged into this HLessThanOrEqual instruction; evaluate it as
3242  // `Compare(x, y) <= 0`.
3243  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3244    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3245  }
3246  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3247    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3248  }
3249  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3250    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3251  }
3252
3253  DECLARE_INSTRUCTION(LessThanOrEqual);
3254
3255  IfCondition GetCondition() const OVERRIDE {
3256    return kCondLE;
3257  }
3258
3259  IfCondition GetOppositeCondition() const OVERRIDE {
3260    return kCondGT;
3261  }
3262
3263 private:
3264  template <typename T> bool Compute(T x, T y) const { return x <= y; }
3265
3266  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3267};
3268
3269class HGreaterThan : public HCondition {
3270 public:
3271  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3272      : HCondition(first, second, dex_pc) {}
3273
3274  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3275    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3276  }
3277  // In the following Evaluate methods, a HCompare instruction has
3278  // been merged into this HGreaterThan instruction; evaluate it as
3279  // `Compare(x, y) > 0`.
3280  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3281    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3282  }
3283  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3284    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3285  }
3286  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3287    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3288  }
3289
3290  DECLARE_INSTRUCTION(GreaterThan);
3291
3292  IfCondition GetCondition() const OVERRIDE {
3293    return kCondGT;
3294  }
3295
3296  IfCondition GetOppositeCondition() const OVERRIDE {
3297    return kCondLE;
3298  }
3299
3300 private:
3301  template <typename T> bool Compute(T x, T y) const { return x > y; }
3302
3303  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3304};
3305
3306class HGreaterThanOrEqual : public HCondition {
3307 public:
3308  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3309      : HCondition(first, second, dex_pc) {}
3310
3311  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3312    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3313  }
3314  // In the following Evaluate methods, a HCompare instruction has
3315  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3316  // `Compare(x, y) >= 0`.
3317  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3318    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3319  }
3320  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3321    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3322  }
3323  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3324    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3325  }
3326
3327  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3328
3329  IfCondition GetCondition() const OVERRIDE {
3330    return kCondGE;
3331  }
3332
3333  IfCondition GetOppositeCondition() const OVERRIDE {
3334    return kCondLT;
3335  }
3336
3337 private:
3338  template <typename T> bool Compute(T x, T y) const { return x >= y; }
3339
3340  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3341};
3342
3343class HBelow : public HCondition {
3344 public:
3345  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3346      : HCondition(first, second, dex_pc) {}
3347
3348  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3349    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3350  }
3351  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3352    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3353  }
3354  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3355                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3356    LOG(FATAL) << DebugName() << " is not defined for float values";
3357    UNREACHABLE();
3358  }
3359  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3360                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3361    LOG(FATAL) << DebugName() << " is not defined for double values";
3362    UNREACHABLE();
3363  }
3364
3365  DECLARE_INSTRUCTION(Below);
3366
3367  IfCondition GetCondition() const OVERRIDE {
3368    return kCondB;
3369  }
3370
3371  IfCondition GetOppositeCondition() const OVERRIDE {
3372    return kCondAE;
3373  }
3374
3375 private:
3376  template <typename T> bool Compute(T x, T y) const {
3377    return MakeUnsigned(x) < MakeUnsigned(y);
3378  }
3379
3380  DISALLOW_COPY_AND_ASSIGN(HBelow);
3381};
3382
3383class HBelowOrEqual : public HCondition {
3384 public:
3385  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3386      : HCondition(first, second, dex_pc) {}
3387
3388  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3389    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3390  }
3391  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3392    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3393  }
3394  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3395                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3396    LOG(FATAL) << DebugName() << " is not defined for float values";
3397    UNREACHABLE();
3398  }
3399  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3400                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3401    LOG(FATAL) << DebugName() << " is not defined for double values";
3402    UNREACHABLE();
3403  }
3404
3405  DECLARE_INSTRUCTION(BelowOrEqual);
3406
3407  IfCondition GetCondition() const OVERRIDE {
3408    return kCondBE;
3409  }
3410
3411  IfCondition GetOppositeCondition() const OVERRIDE {
3412    return kCondA;
3413  }
3414
3415 private:
3416  template <typename T> bool Compute(T x, T y) const {
3417    return MakeUnsigned(x) <= MakeUnsigned(y);
3418  }
3419
3420  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3421};
3422
3423class HAbove : public HCondition {
3424 public:
3425  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3426      : HCondition(first, second, dex_pc) {}
3427
3428  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3429    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3430  }
3431  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3432    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3433  }
3434  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3435                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3436    LOG(FATAL) << DebugName() << " is not defined for float values";
3437    UNREACHABLE();
3438  }
3439  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3440                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3441    LOG(FATAL) << DebugName() << " is not defined for double values";
3442    UNREACHABLE();
3443  }
3444
3445  DECLARE_INSTRUCTION(Above);
3446
3447  IfCondition GetCondition() const OVERRIDE {
3448    return kCondA;
3449  }
3450
3451  IfCondition GetOppositeCondition() const OVERRIDE {
3452    return kCondBE;
3453  }
3454
3455 private:
3456  template <typename T> bool Compute(T x, T y) const {
3457    return MakeUnsigned(x) > MakeUnsigned(y);
3458  }
3459
3460  DISALLOW_COPY_AND_ASSIGN(HAbove);
3461};
3462
3463class HAboveOrEqual : public HCondition {
3464 public:
3465  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3466      : HCondition(first, second, dex_pc) {}
3467
3468  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3469    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3470  }
3471  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3472    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3473  }
3474  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3475                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3476    LOG(FATAL) << DebugName() << " is not defined for float values";
3477    UNREACHABLE();
3478  }
3479  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3480                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3481    LOG(FATAL) << DebugName() << " is not defined for double values";
3482    UNREACHABLE();
3483  }
3484
3485  DECLARE_INSTRUCTION(AboveOrEqual);
3486
3487  IfCondition GetCondition() const OVERRIDE {
3488    return kCondAE;
3489  }
3490
3491  IfCondition GetOppositeCondition() const OVERRIDE {
3492    return kCondB;
3493  }
3494
3495 private:
3496  template <typename T> bool Compute(T x, T y) const {
3497    return MakeUnsigned(x) >= MakeUnsigned(y);
3498  }
3499
3500  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3501};
3502
3503// Instruction to check how two inputs compare to each other.
3504// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3505class HCompare : public HBinaryOperation {
3506 public:
3507  // Note that `comparison_type` is the type of comparison performed
3508  // between the comparison's inputs, not the type of the instantiated
3509  // HCompare instruction (which is always Primitive::kPrimInt).
3510  HCompare(Primitive::Type comparison_type,
3511           HInstruction* first,
3512           HInstruction* second,
3513           ComparisonBias bias,
3514           uint32_t dex_pc)
3515      : HBinaryOperation(Primitive::kPrimInt,
3516                         first,
3517                         second,
3518                         SideEffectsForArchRuntimeCalls(comparison_type),
3519                         dex_pc) {
3520    SetPackedField<ComparisonBiasField>(bias);
3521    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3522    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3523  }
3524
3525  template <typename T>
3526  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3527
3528  template <typename T>
3529  int32_t ComputeFP(T x, T y) const {
3530    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3531    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3532    // Handle the bias.
3533    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3534  }
3535
3536  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3537    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3538    // reach this code path when processing a freshly built HIR
3539    // graph. However HCompare integer instructions can be synthesized
3540    // by the instruction simplifier to implement IntegerCompare and
3541    // IntegerSignum intrinsics, so we have to handle this case.
3542    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3543  }
3544  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3545    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3546  }
3547  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3548    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3549  }
3550  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3551    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3552  }
3553
3554  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3555    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3556  }
3557
3558  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3559
3560  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3561  // Only meaningful for floating-point comparisons.
3562  bool IsGtBias() const {
3563    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3564    return GetBias() == ComparisonBias::kGtBias;
3565  }
3566
3567  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3568    // Comparisons do not require a runtime call in any back end.
3569    return SideEffects::None();
3570  }
3571
3572  DECLARE_INSTRUCTION(Compare);
3573
3574 protected:
3575  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3576  static constexpr size_t kFieldComparisonBiasSize =
3577      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3578  static constexpr size_t kNumberOfComparePackedBits =
3579      kFieldComparisonBias + kFieldComparisonBiasSize;
3580  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3581  using ComparisonBiasField =
3582      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3583
3584  // Return an integer constant containing the result of a comparison evaluated at compile time.
3585  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3586    DCHECK(value == -1 || value == 0 || value == 1) << value;
3587    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3588  }
3589
3590 private:
3591  DISALLOW_COPY_AND_ASSIGN(HCompare);
3592};
3593
3594class HNewInstance : public HExpression<2> {
3595 public:
3596  HNewInstance(HInstruction* cls,
3597               HCurrentMethod* current_method,
3598               uint32_t dex_pc,
3599               uint16_t type_index,
3600               const DexFile& dex_file,
3601               bool can_throw,
3602               bool finalizable,
3603               QuickEntrypointEnum entrypoint)
3604      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3605        type_index_(type_index),
3606        dex_file_(dex_file),
3607        entrypoint_(entrypoint) {
3608    SetPackedFlag<kFlagCanThrow>(can_throw);
3609    SetPackedFlag<kFlagFinalizable>(finalizable);
3610    SetRawInputAt(0, cls);
3611    SetRawInputAt(1, current_method);
3612  }
3613
3614  uint16_t GetTypeIndex() const { return type_index_; }
3615  const DexFile& GetDexFile() const { return dex_file_; }
3616
3617  // Calls runtime so needs an environment.
3618  bool NeedsEnvironment() const OVERRIDE { return true; }
3619
3620  // It may throw when called on type that's not instantiable/accessible.
3621  // It can throw OOME.
3622  // TODO: distinguish between the two cases so we can for example allow allocation elimination.
3623  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>() || true; }
3624
3625  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3626
3627  bool CanBeNull() const OVERRIDE { return false; }
3628
3629  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3630
3631  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3632    entrypoint_ = entrypoint;
3633  }
3634
3635  bool IsStringAlloc() const;
3636
3637  DECLARE_INSTRUCTION(NewInstance);
3638
3639 private:
3640  static constexpr size_t kFlagCanThrow = kNumberOfExpressionPackedBits;
3641  static constexpr size_t kFlagFinalizable = kFlagCanThrow + 1;
3642  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3643  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3644                "Too many packed fields.");
3645
3646  const uint16_t type_index_;
3647  const DexFile& dex_file_;
3648  QuickEntrypointEnum entrypoint_;
3649
3650  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3651};
3652
3653enum class Intrinsics {
3654#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \
3655  k ## Name,
3656#include "intrinsics_list.h"
3657  kNone,
3658  INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3659#undef INTRINSICS_LIST
3660#undef OPTIMIZING_INTRINSICS
3661};
3662std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic);
3663
3664enum IntrinsicNeedsEnvironmentOrCache {
3665  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3666  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3667};
3668
3669enum IntrinsicSideEffects {
3670  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3671  kReadSideEffects,   // Intrinsic may read heap memory.
3672  kWriteSideEffects,  // Intrinsic may write heap memory.
3673  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3674};
3675
3676enum IntrinsicExceptions {
3677  kNoThrow,  // Intrinsic does not throw any exceptions.
3678  kCanThrow  // Intrinsic may throw exceptions.
3679};
3680
3681class HInvoke : public HInstruction {
3682 public:
3683  size_t InputCount() const OVERRIDE { return inputs_.size(); }
3684
3685  bool NeedsEnvironment() const OVERRIDE;
3686
3687  void SetArgumentAt(size_t index, HInstruction* argument) {
3688    SetRawInputAt(index, argument);
3689  }
3690
3691  // Return the number of arguments.  This number can be lower than
3692  // the number of inputs returned by InputCount(), as some invoke
3693  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3694  // inputs at the end of their list of inputs.
3695  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3696
3697  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3698
3699  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3700  const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
3701
3702  InvokeType GetOriginalInvokeType() const {
3703    return GetPackedField<OriginalInvokeTypeField>();
3704  }
3705
3706  Intrinsics GetIntrinsic() const {
3707    return intrinsic_;
3708  }
3709
3710  void SetIntrinsic(Intrinsics intrinsic,
3711                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3712                    IntrinsicSideEffects side_effects,
3713                    IntrinsicExceptions exceptions);
3714
3715  bool IsFromInlinedInvoke() const {
3716    return GetEnvironment()->IsFromInlinedInvoke();
3717  }
3718
3719  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
3720
3721  bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); }
3722
3723  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3724    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
3725  }
3726
3727  uint32_t* GetIntrinsicOptimizations() {
3728    return &intrinsic_optimizations_;
3729  }
3730
3731  const uint32_t* GetIntrinsicOptimizations() const {
3732    return &intrinsic_optimizations_;
3733  }
3734
3735  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
3736
3737  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
3738
3739 protected:
3740  static constexpr size_t kFieldOriginalInvokeType = kNumberOfGenericPackedBits;
3741  static constexpr size_t kFieldOriginalInvokeTypeSize =
3742      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
3743  static constexpr size_t kFieldReturnType =
3744      kFieldOriginalInvokeType + kFieldOriginalInvokeTypeSize;
3745  static constexpr size_t kFieldReturnTypeSize =
3746      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
3747  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
3748  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
3749  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3750  using OriginalInvokeTypeField =
3751      BitField<InvokeType, kFieldOriginalInvokeType, kFieldOriginalInvokeTypeSize>;
3752  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
3753
3754  HInvoke(ArenaAllocator* arena,
3755          uint32_t number_of_arguments,
3756          uint32_t number_of_other_inputs,
3757          Primitive::Type return_type,
3758          uint32_t dex_pc,
3759          uint32_t dex_method_index,
3760          InvokeType original_invoke_type)
3761    : HInstruction(
3762          SideEffects::AllExceptGCDependency(), dex_pc),  // Assume write/read on all fields/arrays.
3763      number_of_arguments_(number_of_arguments),
3764      inputs_(number_of_arguments + number_of_other_inputs,
3765              arena->Adapter(kArenaAllocInvokeInputs)),
3766      dex_method_index_(dex_method_index),
3767      intrinsic_(Intrinsics::kNone),
3768      intrinsic_optimizations_(0) {
3769    SetPackedField<ReturnTypeField>(return_type);
3770    SetPackedField<OriginalInvokeTypeField>(original_invoke_type);
3771    SetPackedFlag<kFlagCanThrow>(true);
3772  }
3773
3774  const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE {
3775    return inputs_[index];
3776  }
3777
3778  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE {
3779    inputs_[index] = input;
3780  }
3781
3782  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3783
3784  uint32_t number_of_arguments_;
3785  ArenaVector<HUserRecord<HInstruction*>> inputs_;
3786  const uint32_t dex_method_index_;
3787  Intrinsics intrinsic_;
3788
3789  // A magic word holding optimizations for intrinsics. See intrinsics.h.
3790  uint32_t intrinsic_optimizations_;
3791
3792 private:
3793  DISALLOW_COPY_AND_ASSIGN(HInvoke);
3794};
3795
3796class HInvokeUnresolved : public HInvoke {
3797 public:
3798  HInvokeUnresolved(ArenaAllocator* arena,
3799                    uint32_t number_of_arguments,
3800                    Primitive::Type return_type,
3801                    uint32_t dex_pc,
3802                    uint32_t dex_method_index,
3803                    InvokeType invoke_type)
3804      : HInvoke(arena,
3805                number_of_arguments,
3806                0u /* number_of_other_inputs */,
3807                return_type,
3808                dex_pc,
3809                dex_method_index,
3810                invoke_type) {
3811  }
3812
3813  DECLARE_INSTRUCTION(InvokeUnresolved);
3814
3815 private:
3816  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
3817};
3818
3819class HInvokeStaticOrDirect : public HInvoke {
3820 public:
3821  // Requirements of this method call regarding the class
3822  // initialization (clinit) check of its declaring class.
3823  enum class ClinitCheckRequirement {
3824    kNone,      // Class already initialized.
3825    kExplicit,  // Static call having explicit clinit check as last input.
3826    kImplicit,  // Static call implicitly requiring a clinit check.
3827    kLast = kImplicit
3828  };
3829
3830  // Determines how to load the target ArtMethod*.
3831  enum class MethodLoadKind {
3832    // Use a String init ArtMethod* loaded from Thread entrypoints.
3833    kStringInit,
3834
3835    // Use the method's own ArtMethod* loaded by the register allocator.
3836    kRecursive,
3837
3838    // Use ArtMethod* at a known address, embed the direct address in the code.
3839    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
3840    kDirectAddress,
3841
3842    // Use ArtMethod* at an address that will be known at link time, embed the direct
3843    // address in the code. If the image is relocatable, emit .patch_oat entry.
3844    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3845    // the image relocatable or not.
3846    kDirectAddressWithFixup,
3847
3848    // Load from resolved methods array in the dex cache using a PC-relative load.
3849    // Used when we need to use the dex cache, for example for invoke-static that
3850    // may cause class initialization (the entry may point to a resolution method),
3851    // and we know that we can access the dex cache arrays using a PC-relative load.
3852    kDexCachePcRelative,
3853
3854    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
3855    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
3856    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
3857    // or unimplemented or impractical (i.e. slow) on a particular architecture.
3858    kDexCacheViaMethod,
3859  };
3860
3861  // Determines the location of the code pointer.
3862  enum class CodePtrLocation {
3863    // Recursive call, use local PC-relative call instruction.
3864    kCallSelf,
3865
3866    // Use PC-relative call instruction patched at link time.
3867    // Used for calls within an oat file, boot->boot or app->app.
3868    kCallPCRelative,
3869
3870    // Call to a known target address, embed the direct address in code.
3871    // Used for app->boot call with non-relocatable image and for JIT-compiled calls.
3872    kCallDirect,
3873
3874    // Call to a target address that will be known at link time, embed the direct
3875    // address in code. If the image is relocatable, emit .patch_oat entry.
3876    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3877    // the image relocatable or not.
3878    kCallDirectWithFixup,
3879
3880    // Use code pointer from the ArtMethod*.
3881    // Used when we don't know the target code. This is also the last-resort-kind used when
3882    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
3883    kCallArtMethod,
3884  };
3885
3886  struct DispatchInfo {
3887    MethodLoadKind method_load_kind;
3888    CodePtrLocation code_ptr_location;
3889    // The method load data holds
3890    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
3891    //     Note that there are multiple string init methods, each having its own offset.
3892    //   - the method address for kDirectAddress
3893    //   - the dex cache arrays offset for kDexCachePcRel.
3894    uint64_t method_load_data;
3895    uint64_t direct_code_ptr;
3896  };
3897
3898  HInvokeStaticOrDirect(ArenaAllocator* arena,
3899                        uint32_t number_of_arguments,
3900                        Primitive::Type return_type,
3901                        uint32_t dex_pc,
3902                        uint32_t method_index,
3903                        MethodReference target_method,
3904                        DispatchInfo dispatch_info,
3905                        InvokeType original_invoke_type,
3906                        InvokeType optimized_invoke_type,
3907                        ClinitCheckRequirement clinit_check_requirement)
3908      : HInvoke(arena,
3909                number_of_arguments,
3910                // There is potentially one extra argument for the HCurrentMethod node, and
3911                // potentially one other if the clinit check is explicit, and potentially
3912                // one other if the method is a string factory.
3913                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
3914                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
3915                return_type,
3916                dex_pc,
3917                method_index,
3918                original_invoke_type),
3919        target_method_(target_method),
3920        dispatch_info_(dispatch_info) {
3921    SetPackedField<OptimizedInvokeTypeField>(optimized_invoke_type);
3922    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
3923  }
3924
3925  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
3926    bool had_current_method_input = HasCurrentMethodInput();
3927    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
3928
3929    // Using the current method is the default and once we find a better
3930    // method load kind, we should not go back to using the current method.
3931    DCHECK(had_current_method_input || !needs_current_method_input);
3932
3933    if (had_current_method_input && !needs_current_method_input) {
3934      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
3935      RemoveInputAt(GetSpecialInputIndex());
3936    }
3937    dispatch_info_ = dispatch_info;
3938  }
3939
3940  void AddSpecialInput(HInstruction* input) {
3941    // We allow only one special input.
3942    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
3943    DCHECK(InputCount() == GetSpecialInputIndex() ||
3944           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
3945    InsertInputAt(GetSpecialInputIndex(), input);
3946  }
3947
3948  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
3949    // We access the method via the dex cache so we can't do an implicit null check.
3950    // TODO: for intrinsics we can generate implicit null checks.
3951    return false;
3952  }
3953
3954  bool CanBeNull() const OVERRIDE {
3955    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
3956  }
3957
3958  // Get the index of the special input, if any.
3959  //
3960  // If the invoke HasCurrentMethodInput(), the "special input" is the current
3961  // method pointer; otherwise there may be one platform-specific special input,
3962  // such as PC-relative addressing base.
3963  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
3964  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
3965
3966  InvokeType GetOptimizedInvokeType() const {
3967    return GetPackedField<OptimizedInvokeTypeField>();
3968  }
3969
3970  void SetOptimizedInvokeType(InvokeType invoke_type) {
3971    SetPackedField<OptimizedInvokeTypeField>(invoke_type);
3972  }
3973
3974  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
3975  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
3976  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
3977  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
3978  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
3979  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
3980  bool HasPcRelativeDexCache() const {
3981    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
3982  }
3983  bool HasCurrentMethodInput() const {
3984    // This function can be called only after the invoke has been fully initialized by the builder.
3985    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
3986      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
3987      return true;
3988    } else {
3989      DCHECK(InputCount() == GetSpecialInputIndex() ||
3990             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
3991      return false;
3992    }
3993  }
3994  bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
3995  MethodReference GetTargetMethod() const { return target_method_; }
3996  void SetTargetMethod(MethodReference method) { target_method_ = method; }
3997
3998  int32_t GetStringInitOffset() const {
3999    DCHECK(IsStringInit());
4000    return dispatch_info_.method_load_data;
4001  }
4002
4003  uint64_t GetMethodAddress() const {
4004    DCHECK(HasMethodAddress());
4005    return dispatch_info_.method_load_data;
4006  }
4007
4008  uint32_t GetDexCacheArrayOffset() const {
4009    DCHECK(HasPcRelativeDexCache());
4010    return dispatch_info_.method_load_data;
4011  }
4012
4013  uint64_t GetDirectCodePtr() const {
4014    DCHECK(HasDirectCodePtr());
4015    return dispatch_info_.direct_code_ptr;
4016  }
4017
4018  ClinitCheckRequirement GetClinitCheckRequirement() const {
4019    return GetPackedField<ClinitCheckRequirementField>();
4020  }
4021
4022  // Is this instruction a call to a static method?
4023  bool IsStatic() const {
4024    return GetOriginalInvokeType() == kStatic;
4025  }
4026
4027  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4028  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4029  // instruction; only relevant for static calls with explicit clinit check.
4030  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4031    DCHECK(IsStaticWithExplicitClinitCheck());
4032    size_t last_input_index = InputCount() - 1;
4033    HInstruction* last_input = InputAt(last_input_index);
4034    DCHECK(last_input != nullptr);
4035    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4036    RemoveAsUserOfInput(last_input_index);
4037    inputs_.pop_back();
4038    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4039    DCHECK(!IsStaticWithExplicitClinitCheck());
4040  }
4041
4042  // Is this a call to a static method whose declaring class has an
4043  // explicit initialization check in the graph?
4044  bool IsStaticWithExplicitClinitCheck() const {
4045    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4046  }
4047
4048  // Is this a call to a static method whose declaring class has an
4049  // implicit intialization check requirement?
4050  bool IsStaticWithImplicitClinitCheck() const {
4051    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4052  }
4053
4054  // Does this method load kind need the current method as an input?
4055  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4056    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4057  }
4058
4059  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4060
4061 protected:
4062  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE {
4063    const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i);
4064    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) {
4065      HInstruction* input = input_record.GetInstruction();
4066      // `input` is the last input of a static invoke marked as having
4067      // an explicit clinit check. It must either be:
4068      // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4069      // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4070      DCHECK(input != nullptr);
4071      DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName();
4072    }
4073    return input_record;
4074  }
4075
4076  void InsertInputAt(size_t index, HInstruction* input);
4077  void RemoveInputAt(size_t index);
4078
4079 private:
4080  static constexpr size_t kFieldOptimizedInvokeType = kNumberOfInvokePackedBits;
4081  static constexpr size_t kFieldOptimizedInvokeTypeSize =
4082      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4083  static constexpr size_t kFieldClinitCheckRequirement =
4084      kFieldOptimizedInvokeType + kFieldOptimizedInvokeTypeSize;
4085  static constexpr size_t kFieldClinitCheckRequirementSize =
4086      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4087  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4088      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4089  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4090                "Too many packed fields.");
4091  using OptimizedInvokeTypeField =
4092      BitField<InvokeType, kFieldOptimizedInvokeType, kFieldOptimizedInvokeTypeSize>;
4093  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4094                                               kFieldClinitCheckRequirement,
4095                                               kFieldClinitCheckRequirementSize>;
4096
4097  // The target method may refer to different dex file or method index than the original
4098  // invoke. This happens for sharpened calls and for calls where a method was redeclared
4099  // in derived class to increase visibility.
4100  MethodReference target_method_;
4101  DispatchInfo dispatch_info_;
4102
4103  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4104};
4105std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4106std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4107
4108class HInvokeVirtual : public HInvoke {
4109 public:
4110  HInvokeVirtual(ArenaAllocator* arena,
4111                 uint32_t number_of_arguments,
4112                 Primitive::Type return_type,
4113                 uint32_t dex_pc,
4114                 uint32_t dex_method_index,
4115                 uint32_t vtable_index)
4116      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual),
4117        vtable_index_(vtable_index) {}
4118
4119  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4120    // TODO: Add implicit null checks in intrinsics.
4121    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4122  }
4123
4124  uint32_t GetVTableIndex() const { return vtable_index_; }
4125
4126  DECLARE_INSTRUCTION(InvokeVirtual);
4127
4128 private:
4129  const uint32_t vtable_index_;
4130
4131  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4132};
4133
4134class HInvokeInterface : public HInvoke {
4135 public:
4136  HInvokeInterface(ArenaAllocator* arena,
4137                   uint32_t number_of_arguments,
4138                   Primitive::Type return_type,
4139                   uint32_t dex_pc,
4140                   uint32_t dex_method_index,
4141                   uint32_t imt_index)
4142      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface),
4143        imt_index_(imt_index) {}
4144
4145  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4146    // TODO: Add implicit null checks in intrinsics.
4147    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4148  }
4149
4150  uint32_t GetImtIndex() const { return imt_index_; }
4151  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4152
4153  DECLARE_INSTRUCTION(InvokeInterface);
4154
4155 private:
4156  const uint32_t imt_index_;
4157
4158  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4159};
4160
4161class HNeg : public HUnaryOperation {
4162 public:
4163  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4164      : HUnaryOperation(result_type, input, dex_pc) {
4165    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4166  }
4167
4168  template <typename T> T Compute(T x) const { return -x; }
4169
4170  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4171    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4172  }
4173  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4174    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4175  }
4176  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4177    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4178  }
4179  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4180    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4181  }
4182
4183  DECLARE_INSTRUCTION(Neg);
4184
4185 private:
4186  DISALLOW_COPY_AND_ASSIGN(HNeg);
4187};
4188
4189class HNewArray : public HExpression<2> {
4190 public:
4191  HNewArray(HInstruction* length,
4192            HCurrentMethod* current_method,
4193            uint32_t dex_pc,
4194            uint16_t type_index,
4195            const DexFile& dex_file,
4196            QuickEntrypointEnum entrypoint)
4197      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
4198        type_index_(type_index),
4199        dex_file_(dex_file),
4200        entrypoint_(entrypoint) {
4201    SetRawInputAt(0, length);
4202    SetRawInputAt(1, current_method);
4203  }
4204
4205  uint16_t GetTypeIndex() const { return type_index_; }
4206  const DexFile& GetDexFile() const { return dex_file_; }
4207
4208  // Calls runtime so needs an environment.
4209  bool NeedsEnvironment() const OVERRIDE { return true; }
4210
4211  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4212  bool CanThrow() const OVERRIDE { return true; }
4213
4214  bool CanBeNull() const OVERRIDE { return false; }
4215
4216  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4217
4218  DECLARE_INSTRUCTION(NewArray);
4219
4220 private:
4221  const uint16_t type_index_;
4222  const DexFile& dex_file_;
4223  const QuickEntrypointEnum entrypoint_;
4224
4225  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4226};
4227
4228class HAdd : public HBinaryOperation {
4229 public:
4230  HAdd(Primitive::Type result_type,
4231       HInstruction* left,
4232       HInstruction* right,
4233       uint32_t dex_pc = kNoDexPc)
4234      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4235
4236  bool IsCommutative() const OVERRIDE { return true; }
4237
4238  template <typename T> T Compute(T x, T y) const { return x + y; }
4239
4240  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4241    return GetBlock()->GetGraph()->GetIntConstant(
4242        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4243  }
4244  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4245    return GetBlock()->GetGraph()->GetLongConstant(
4246        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4247  }
4248  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4249    return GetBlock()->GetGraph()->GetFloatConstant(
4250        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4251  }
4252  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4253    return GetBlock()->GetGraph()->GetDoubleConstant(
4254        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4255  }
4256
4257  DECLARE_INSTRUCTION(Add);
4258
4259 private:
4260  DISALLOW_COPY_AND_ASSIGN(HAdd);
4261};
4262
4263class HSub : public HBinaryOperation {
4264 public:
4265  HSub(Primitive::Type result_type,
4266       HInstruction* left,
4267       HInstruction* right,
4268       uint32_t dex_pc = kNoDexPc)
4269      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4270
4271  template <typename T> T Compute(T x, T y) const { return x - y; }
4272
4273  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4274    return GetBlock()->GetGraph()->GetIntConstant(
4275        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4276  }
4277  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4278    return GetBlock()->GetGraph()->GetLongConstant(
4279        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4280  }
4281  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4282    return GetBlock()->GetGraph()->GetFloatConstant(
4283        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4284  }
4285  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4286    return GetBlock()->GetGraph()->GetDoubleConstant(
4287        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4288  }
4289
4290  DECLARE_INSTRUCTION(Sub);
4291
4292 private:
4293  DISALLOW_COPY_AND_ASSIGN(HSub);
4294};
4295
4296class HMul : public HBinaryOperation {
4297 public:
4298  HMul(Primitive::Type result_type,
4299       HInstruction* left,
4300       HInstruction* right,
4301       uint32_t dex_pc = kNoDexPc)
4302      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4303
4304  bool IsCommutative() const OVERRIDE { return true; }
4305
4306  template <typename T> T Compute(T x, T y) const { return x * y; }
4307
4308  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4309    return GetBlock()->GetGraph()->GetIntConstant(
4310        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4311  }
4312  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4313    return GetBlock()->GetGraph()->GetLongConstant(
4314        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4315  }
4316  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4317    return GetBlock()->GetGraph()->GetFloatConstant(
4318        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4319  }
4320  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4321    return GetBlock()->GetGraph()->GetDoubleConstant(
4322        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4323  }
4324
4325  DECLARE_INSTRUCTION(Mul);
4326
4327 private:
4328  DISALLOW_COPY_AND_ASSIGN(HMul);
4329};
4330
4331class HDiv : public HBinaryOperation {
4332 public:
4333  HDiv(Primitive::Type result_type,
4334       HInstruction* left,
4335       HInstruction* right,
4336       uint32_t dex_pc)
4337      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4338
4339  template <typename T>
4340  T ComputeIntegral(T x, T y) const {
4341    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4342    // Our graph structure ensures we never have 0 for `y` during
4343    // constant folding.
4344    DCHECK_NE(y, 0);
4345    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4346    return (y == -1) ? -x : x / y;
4347  }
4348
4349  template <typename T>
4350  T ComputeFP(T x, T y) const {
4351    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4352    return x / y;
4353  }
4354
4355  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4356    return GetBlock()->GetGraph()->GetIntConstant(
4357        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4358  }
4359  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4360    return GetBlock()->GetGraph()->GetLongConstant(
4361        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4362  }
4363  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4364    return GetBlock()->GetGraph()->GetFloatConstant(
4365        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4366  }
4367  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4368    return GetBlock()->GetGraph()->GetDoubleConstant(
4369        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4370  }
4371
4372  static SideEffects SideEffectsForArchRuntimeCalls() {
4373    // The generated code can use a runtime call.
4374    return SideEffects::CanTriggerGC();
4375  }
4376
4377  DECLARE_INSTRUCTION(Div);
4378
4379 private:
4380  DISALLOW_COPY_AND_ASSIGN(HDiv);
4381};
4382
4383class HRem : public HBinaryOperation {
4384 public:
4385  HRem(Primitive::Type result_type,
4386       HInstruction* left,
4387       HInstruction* right,
4388       uint32_t dex_pc)
4389      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4390
4391  template <typename T>
4392  T ComputeIntegral(T x, T y) const {
4393    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4394    // Our graph structure ensures we never have 0 for `y` during
4395    // constant folding.
4396    DCHECK_NE(y, 0);
4397    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4398    return (y == -1) ? 0 : x % y;
4399  }
4400
4401  template <typename T>
4402  T ComputeFP(T x, T y) const {
4403    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4404    return std::fmod(x, y);
4405  }
4406
4407  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4408    return GetBlock()->GetGraph()->GetIntConstant(
4409        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4410  }
4411  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4412    return GetBlock()->GetGraph()->GetLongConstant(
4413        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4414  }
4415  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4416    return GetBlock()->GetGraph()->GetFloatConstant(
4417        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4418  }
4419  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4420    return GetBlock()->GetGraph()->GetDoubleConstant(
4421        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4422  }
4423
4424  static SideEffects SideEffectsForArchRuntimeCalls() {
4425    return SideEffects::CanTriggerGC();
4426  }
4427
4428  DECLARE_INSTRUCTION(Rem);
4429
4430 private:
4431  DISALLOW_COPY_AND_ASSIGN(HRem);
4432};
4433
4434class HDivZeroCheck : public HExpression<1> {
4435 public:
4436  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4437  // constructor.
4438  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4439      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4440    SetRawInputAt(0, value);
4441  }
4442
4443  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4444
4445  bool CanBeMoved() const OVERRIDE { return true; }
4446
4447  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4448    return true;
4449  }
4450
4451  bool NeedsEnvironment() const OVERRIDE { return true; }
4452  bool CanThrow() const OVERRIDE { return true; }
4453
4454  DECLARE_INSTRUCTION(DivZeroCheck);
4455
4456 private:
4457  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4458};
4459
4460class HShl : public HBinaryOperation {
4461 public:
4462  HShl(Primitive::Type result_type,
4463       HInstruction* value,
4464       HInstruction* distance,
4465       uint32_t dex_pc = kNoDexPc)
4466      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4467    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4468    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4469  }
4470
4471  template <typename T>
4472  T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4473    return value << (distance & max_shift_distance);
4474  }
4475
4476  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4477    return GetBlock()->GetGraph()->GetIntConstant(
4478        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4479  }
4480  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4481    return GetBlock()->GetGraph()->GetLongConstant(
4482        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4483  }
4484  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4485                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4486    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4487    UNREACHABLE();
4488  }
4489  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4490                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4491    LOG(FATAL) << DebugName() << " is not defined for float values";
4492    UNREACHABLE();
4493  }
4494  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4495                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4496    LOG(FATAL) << DebugName() << " is not defined for double values";
4497    UNREACHABLE();
4498  }
4499
4500  DECLARE_INSTRUCTION(Shl);
4501
4502 private:
4503  DISALLOW_COPY_AND_ASSIGN(HShl);
4504};
4505
4506class HShr : public HBinaryOperation {
4507 public:
4508  HShr(Primitive::Type result_type,
4509       HInstruction* value,
4510       HInstruction* distance,
4511       uint32_t dex_pc = kNoDexPc)
4512      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4513    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4514    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4515  }
4516
4517  template <typename T>
4518  T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4519    return value >> (distance & max_shift_distance);
4520  }
4521
4522  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4523    return GetBlock()->GetGraph()->GetIntConstant(
4524        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4525  }
4526  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4527    return GetBlock()->GetGraph()->GetLongConstant(
4528        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4529  }
4530  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4531                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4532    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4533    UNREACHABLE();
4534  }
4535  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4536                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4537    LOG(FATAL) << DebugName() << " is not defined for float values";
4538    UNREACHABLE();
4539  }
4540  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4541                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4542    LOG(FATAL) << DebugName() << " is not defined for double values";
4543    UNREACHABLE();
4544  }
4545
4546  DECLARE_INSTRUCTION(Shr);
4547
4548 private:
4549  DISALLOW_COPY_AND_ASSIGN(HShr);
4550};
4551
4552class HUShr : public HBinaryOperation {
4553 public:
4554  HUShr(Primitive::Type result_type,
4555        HInstruction* value,
4556        HInstruction* distance,
4557        uint32_t dex_pc = kNoDexPc)
4558      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4559    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4560    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4561  }
4562
4563  template <typename T>
4564  T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4565    typedef typename std::make_unsigned<T>::type V;
4566    V ux = static_cast<V>(value);
4567    return static_cast<T>(ux >> (distance & max_shift_distance));
4568  }
4569
4570  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4571    return GetBlock()->GetGraph()->GetIntConstant(
4572        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4573  }
4574  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4575    return GetBlock()->GetGraph()->GetLongConstant(
4576        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4577  }
4578  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4579                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4580    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4581    UNREACHABLE();
4582  }
4583  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4584                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4585    LOG(FATAL) << DebugName() << " is not defined for float values";
4586    UNREACHABLE();
4587  }
4588  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4589                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4590    LOG(FATAL) << DebugName() << " is not defined for double values";
4591    UNREACHABLE();
4592  }
4593
4594  DECLARE_INSTRUCTION(UShr);
4595
4596 private:
4597  DISALLOW_COPY_AND_ASSIGN(HUShr);
4598};
4599
4600class HAnd : public HBinaryOperation {
4601 public:
4602  HAnd(Primitive::Type result_type,
4603       HInstruction* left,
4604       HInstruction* right,
4605       uint32_t dex_pc = kNoDexPc)
4606      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4607
4608  bool IsCommutative() const OVERRIDE { return true; }
4609
4610  template <typename T> T Compute(T x, T y) const { return x & y; }
4611
4612  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4613    return GetBlock()->GetGraph()->GetIntConstant(
4614        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4615  }
4616  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4617    return GetBlock()->GetGraph()->GetLongConstant(
4618        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4619  }
4620  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4621                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4622    LOG(FATAL) << DebugName() << " is not defined for float values";
4623    UNREACHABLE();
4624  }
4625  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4626                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4627    LOG(FATAL) << DebugName() << " is not defined for double values";
4628    UNREACHABLE();
4629  }
4630
4631  DECLARE_INSTRUCTION(And);
4632
4633 private:
4634  DISALLOW_COPY_AND_ASSIGN(HAnd);
4635};
4636
4637class HOr : public HBinaryOperation {
4638 public:
4639  HOr(Primitive::Type result_type,
4640      HInstruction* left,
4641      HInstruction* right,
4642      uint32_t dex_pc = kNoDexPc)
4643      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4644
4645  bool IsCommutative() const OVERRIDE { return true; }
4646
4647  template <typename T> T Compute(T x, T y) const { return x | y; }
4648
4649  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4650    return GetBlock()->GetGraph()->GetIntConstant(
4651        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4652  }
4653  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4654    return GetBlock()->GetGraph()->GetLongConstant(
4655        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4656  }
4657  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4658                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4659    LOG(FATAL) << DebugName() << " is not defined for float values";
4660    UNREACHABLE();
4661  }
4662  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4663                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4664    LOG(FATAL) << DebugName() << " is not defined for double values";
4665    UNREACHABLE();
4666  }
4667
4668  DECLARE_INSTRUCTION(Or);
4669
4670 private:
4671  DISALLOW_COPY_AND_ASSIGN(HOr);
4672};
4673
4674class HXor : public HBinaryOperation {
4675 public:
4676  HXor(Primitive::Type result_type,
4677       HInstruction* left,
4678       HInstruction* right,
4679       uint32_t dex_pc = kNoDexPc)
4680      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4681
4682  bool IsCommutative() const OVERRIDE { return true; }
4683
4684  template <typename T> T Compute(T x, T y) const { return x ^ y; }
4685
4686  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4687    return GetBlock()->GetGraph()->GetIntConstant(
4688        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4689  }
4690  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4691    return GetBlock()->GetGraph()->GetLongConstant(
4692        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4693  }
4694  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4695                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4696    LOG(FATAL) << DebugName() << " is not defined for float values";
4697    UNREACHABLE();
4698  }
4699  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4700                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4701    LOG(FATAL) << DebugName() << " is not defined for double values";
4702    UNREACHABLE();
4703  }
4704
4705  DECLARE_INSTRUCTION(Xor);
4706
4707 private:
4708  DISALLOW_COPY_AND_ASSIGN(HXor);
4709};
4710
4711class HRor : public HBinaryOperation {
4712 public:
4713  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4714    : HBinaryOperation(result_type, value, distance) {
4715    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4716    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4717  }
4718
4719  template <typename T>
4720  T Compute(T value, int32_t distance, int32_t max_shift_value) const {
4721    typedef typename std::make_unsigned<T>::type V;
4722    V ux = static_cast<V>(value);
4723    if ((distance & max_shift_value) == 0) {
4724      return static_cast<T>(ux);
4725    } else {
4726      const V reg_bits = sizeof(T) * 8;
4727      return static_cast<T>(ux >> (distance & max_shift_value)) |
4728                           (value << (reg_bits - (distance & max_shift_value)));
4729    }
4730  }
4731
4732  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4733    return GetBlock()->GetGraph()->GetIntConstant(
4734        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4735  }
4736  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4737    return GetBlock()->GetGraph()->GetLongConstant(
4738        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4739  }
4740  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4741                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4742    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4743    UNREACHABLE();
4744  }
4745  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4746                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4747    LOG(FATAL) << DebugName() << " is not defined for float values";
4748    UNREACHABLE();
4749  }
4750  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4751                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4752    LOG(FATAL) << DebugName() << " is not defined for double values";
4753    UNREACHABLE();
4754  }
4755
4756  DECLARE_INSTRUCTION(Ror);
4757
4758 private:
4759  DISALLOW_COPY_AND_ASSIGN(HRor);
4760};
4761
4762// The value of a parameter in this method. Its location depends on
4763// the calling convention.
4764class HParameterValue : public HExpression<0> {
4765 public:
4766  HParameterValue(const DexFile& dex_file,
4767                  uint16_t type_index,
4768                  uint8_t index,
4769                  Primitive::Type parameter_type,
4770                  bool is_this = false)
4771      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
4772        dex_file_(dex_file),
4773        type_index_(type_index),
4774        index_(index) {
4775    SetPackedFlag<kFlagIsThis>(is_this);
4776    SetPackedFlag<kFlagCanBeNull>(!is_this);
4777  }
4778
4779  const DexFile& GetDexFile() const { return dex_file_; }
4780  uint16_t GetTypeIndex() const { return type_index_; }
4781  uint8_t GetIndex() const { return index_; }
4782  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
4783
4784  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
4785  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
4786
4787  DECLARE_INSTRUCTION(ParameterValue);
4788
4789 private:
4790  // Whether or not the parameter value corresponds to 'this' argument.
4791  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
4792  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
4793  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
4794  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
4795                "Too many packed fields.");
4796
4797  const DexFile& dex_file_;
4798  const uint16_t type_index_;
4799  // The index of this parameter in the parameters list. Must be less
4800  // than HGraph::number_of_in_vregs_.
4801  const uint8_t index_;
4802
4803  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
4804};
4805
4806class HNot : public HUnaryOperation {
4807 public:
4808  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4809      : HUnaryOperation(result_type, input, dex_pc) {}
4810
4811  bool CanBeMoved() const OVERRIDE { return true; }
4812  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4813    return true;
4814  }
4815
4816  template <typename T> T Compute(T x) const { return ~x; }
4817
4818  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4819    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4820  }
4821  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4822    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4823  }
4824  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4825    LOG(FATAL) << DebugName() << " is not defined for float values";
4826    UNREACHABLE();
4827  }
4828  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4829    LOG(FATAL) << DebugName() << " is not defined for double values";
4830    UNREACHABLE();
4831  }
4832
4833  DECLARE_INSTRUCTION(Not);
4834
4835 private:
4836  DISALLOW_COPY_AND_ASSIGN(HNot);
4837};
4838
4839class HBooleanNot : public HUnaryOperation {
4840 public:
4841  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
4842      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
4843
4844  bool CanBeMoved() const OVERRIDE { return true; }
4845  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4846    return true;
4847  }
4848
4849  template <typename T> bool Compute(T x) const {
4850    DCHECK(IsUint<1>(x)) << x;
4851    return !x;
4852  }
4853
4854  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4855    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4856  }
4857  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4858    LOG(FATAL) << DebugName() << " is not defined for long values";
4859    UNREACHABLE();
4860  }
4861  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4862    LOG(FATAL) << DebugName() << " is not defined for float values";
4863    UNREACHABLE();
4864  }
4865  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4866    LOG(FATAL) << DebugName() << " is not defined for double values";
4867    UNREACHABLE();
4868  }
4869
4870  DECLARE_INSTRUCTION(BooleanNot);
4871
4872 private:
4873  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
4874};
4875
4876class HTypeConversion : public HExpression<1> {
4877 public:
4878  // Instantiate a type conversion of `input` to `result_type`.
4879  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
4880      : HExpression(result_type,
4881                    SideEffectsForArchRuntimeCalls(input->GetType(), result_type),
4882                    dex_pc) {
4883    SetRawInputAt(0, input);
4884    // Invariant: We should never generate a conversion to a Boolean value.
4885    DCHECK_NE(Primitive::kPrimBoolean, result_type);
4886  }
4887
4888  HInstruction* GetInput() const { return InputAt(0); }
4889  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
4890  Primitive::Type GetResultType() const { return GetType(); }
4891
4892  bool CanBeMoved() const OVERRIDE { return true; }
4893  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; }
4894
4895  // Try to statically evaluate the conversion and return a HConstant
4896  // containing the result.  If the input cannot be converted, return nullptr.
4897  HConstant* TryStaticEvaluation() const;
4898
4899  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type,
4900                                                    Primitive::Type result_type) {
4901    // Some architectures may not require the 'GC' side effects, but at this point
4902    // in the compilation process we do not know what architecture we will
4903    // generate code for, so we must be conservative.
4904    if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type))
4905        || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) {
4906      return SideEffects::CanTriggerGC();
4907    }
4908    return SideEffects::None();
4909  }
4910
4911  DECLARE_INSTRUCTION(TypeConversion);
4912
4913 private:
4914  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
4915};
4916
4917static constexpr uint32_t kNoRegNumber = -1;
4918
4919class HNullCheck : public HExpression<1> {
4920 public:
4921  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
4922  // constructor.
4923  HNullCheck(HInstruction* value, uint32_t dex_pc)
4924      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4925    SetRawInputAt(0, value);
4926  }
4927
4928  bool CanBeMoved() const OVERRIDE { return true; }
4929  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4930    return true;
4931  }
4932
4933  bool NeedsEnvironment() const OVERRIDE { return true; }
4934
4935  bool CanThrow() const OVERRIDE { return true; }
4936
4937  bool CanBeNull() const OVERRIDE { return false; }
4938
4939
4940  DECLARE_INSTRUCTION(NullCheck);
4941
4942 private:
4943  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
4944};
4945
4946class FieldInfo : public ValueObject {
4947 public:
4948  FieldInfo(MemberOffset field_offset,
4949            Primitive::Type field_type,
4950            bool is_volatile,
4951            uint32_t index,
4952            uint16_t declaring_class_def_index,
4953            const DexFile& dex_file,
4954            Handle<mirror::DexCache> dex_cache)
4955      : field_offset_(field_offset),
4956        field_type_(field_type),
4957        is_volatile_(is_volatile),
4958        index_(index),
4959        declaring_class_def_index_(declaring_class_def_index),
4960        dex_file_(dex_file),
4961        dex_cache_(dex_cache) {}
4962
4963  MemberOffset GetFieldOffset() const { return field_offset_; }
4964  Primitive::Type GetFieldType() const { return field_type_; }
4965  uint32_t GetFieldIndex() const { return index_; }
4966  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
4967  const DexFile& GetDexFile() const { return dex_file_; }
4968  bool IsVolatile() const { return is_volatile_; }
4969  Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; }
4970
4971 private:
4972  const MemberOffset field_offset_;
4973  const Primitive::Type field_type_;
4974  const bool is_volatile_;
4975  const uint32_t index_;
4976  const uint16_t declaring_class_def_index_;
4977  const DexFile& dex_file_;
4978  const Handle<mirror::DexCache> dex_cache_;
4979};
4980
4981class HInstanceFieldGet : public HExpression<1> {
4982 public:
4983  HInstanceFieldGet(HInstruction* value,
4984                    Primitive::Type field_type,
4985                    MemberOffset field_offset,
4986                    bool is_volatile,
4987                    uint32_t field_idx,
4988                    uint16_t declaring_class_def_index,
4989                    const DexFile& dex_file,
4990                    Handle<mirror::DexCache> dex_cache,
4991                    uint32_t dex_pc)
4992      : HExpression(field_type,
4993                    SideEffects::FieldReadOfType(field_type, is_volatile),
4994                    dex_pc),
4995        field_info_(field_offset,
4996                    field_type,
4997                    is_volatile,
4998                    field_idx,
4999                    declaring_class_def_index,
5000                    dex_file,
5001                    dex_cache) {
5002    SetRawInputAt(0, value);
5003  }
5004
5005  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5006
5007  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5008    HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5009    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5010  }
5011
5012  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5013    return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize;
5014  }
5015
5016  size_t ComputeHashCode() const OVERRIDE {
5017    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5018  }
5019
5020  const FieldInfo& GetFieldInfo() const { return field_info_; }
5021  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5022  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5023  bool IsVolatile() const { return field_info_.IsVolatile(); }
5024
5025  DECLARE_INSTRUCTION(InstanceFieldGet);
5026
5027 private:
5028  const FieldInfo field_info_;
5029
5030  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5031};
5032
5033class HInstanceFieldSet : public HTemplateInstruction<2> {
5034 public:
5035  HInstanceFieldSet(HInstruction* object,
5036                    HInstruction* value,
5037                    Primitive::Type field_type,
5038                    MemberOffset field_offset,
5039                    bool is_volatile,
5040                    uint32_t field_idx,
5041                    uint16_t declaring_class_def_index,
5042                    const DexFile& dex_file,
5043                    Handle<mirror::DexCache> dex_cache,
5044                    uint32_t dex_pc)
5045      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5046                             dex_pc),
5047        field_info_(field_offset,
5048                    field_type,
5049                    is_volatile,
5050                    field_idx,
5051                    declaring_class_def_index,
5052                    dex_file,
5053                    dex_cache) {
5054    SetPackedFlag<kFlagValueCanBeNull>(true);
5055    SetRawInputAt(0, object);
5056    SetRawInputAt(1, value);
5057  }
5058
5059  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5060    return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize;
5061  }
5062
5063  const FieldInfo& GetFieldInfo() const { return field_info_; }
5064  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5065  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5066  bool IsVolatile() const { return field_info_.IsVolatile(); }
5067  HInstruction* GetValue() const { return InputAt(1); }
5068  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5069  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5070
5071  DECLARE_INSTRUCTION(InstanceFieldSet);
5072
5073 private:
5074  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5075  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5076  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5077                "Too many packed fields.");
5078
5079  const FieldInfo field_info_;
5080
5081  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5082};
5083
5084class HArrayGet : public HExpression<2> {
5085 public:
5086  HArrayGet(HInstruction* array,
5087            HInstruction* index,
5088            Primitive::Type type,
5089            uint32_t dex_pc,
5090            SideEffects additional_side_effects = SideEffects::None())
5091      : HExpression(type,
5092                    SideEffects::ArrayReadOfType(type).Union(additional_side_effects),
5093                    dex_pc) {
5094    SetRawInputAt(0, array);
5095    SetRawInputAt(1, index);
5096  }
5097
5098  bool CanBeMoved() const OVERRIDE { return true; }
5099  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5100    return true;
5101  }
5102  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5103    // TODO: We can be smarter here.
5104    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5105    // which generates the implicit null check. There are cases when these can be removed
5106    // to produce better code. If we ever add optimizations to do so we should allow an
5107    // implicit check here (as long as the address falls in the first page).
5108    return false;
5109  }
5110
5111  bool IsEquivalentOf(HArrayGet* other) const {
5112    bool result = (GetDexPc() == other->GetDexPc());
5113    if (kIsDebugBuild && result) {
5114      DCHECK_EQ(GetBlock(), other->GetBlock());
5115      DCHECK_EQ(GetArray(), other->GetArray());
5116      DCHECK_EQ(GetIndex(), other->GetIndex());
5117      if (Primitive::IsIntOrLongType(GetType())) {
5118        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5119      } else {
5120        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5121        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5122      }
5123    }
5124    return result;
5125  }
5126
5127  HInstruction* GetArray() const { return InputAt(0); }
5128  HInstruction* GetIndex() const { return InputAt(1); }
5129
5130  DECLARE_INSTRUCTION(ArrayGet);
5131
5132 private:
5133  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5134};
5135
5136class HArraySet : public HTemplateInstruction<3> {
5137 public:
5138  HArraySet(HInstruction* array,
5139            HInstruction* index,
5140            HInstruction* value,
5141            Primitive::Type expected_component_type,
5142            uint32_t dex_pc,
5143            SideEffects additional_side_effects = SideEffects::None())
5144      : HTemplateInstruction(
5145            SideEffects::ArrayWriteOfType(expected_component_type).Union(
5146                SideEffectsForArchRuntimeCalls(value->GetType())).Union(
5147                    additional_side_effects),
5148            dex_pc) {
5149    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5150    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5151    SetPackedFlag<kFlagValueCanBeNull>(true);
5152    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5153    SetRawInputAt(0, array);
5154    SetRawInputAt(1, index);
5155    SetRawInputAt(2, value);
5156  }
5157
5158  bool NeedsEnvironment() const OVERRIDE {
5159    // We call a runtime method to throw ArrayStoreException.
5160    return NeedsTypeCheck();
5161  }
5162
5163  // Can throw ArrayStoreException.
5164  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5165
5166  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5167    // TODO: Same as for ArrayGet.
5168    return false;
5169  }
5170
5171  void ClearNeedsTypeCheck() {
5172    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5173  }
5174
5175  void ClearValueCanBeNull() {
5176    SetPackedFlag<kFlagValueCanBeNull>(false);
5177  }
5178
5179  void SetStaticTypeOfArrayIsObjectArray() {
5180    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5181  }
5182
5183  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5184  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5185  bool StaticTypeOfArrayIsObjectArray() const {
5186    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5187  }
5188
5189  HInstruction* GetArray() const { return InputAt(0); }
5190  HInstruction* GetIndex() const { return InputAt(1); }
5191  HInstruction* GetValue() const { return InputAt(2); }
5192
5193  Primitive::Type GetComponentType() const {
5194    // The Dex format does not type floating point index operations. Since the
5195    // `expected_component_type_` is set during building and can therefore not
5196    // be correct, we also check what is the value type. If it is a floating
5197    // point type, we must use that type.
5198    Primitive::Type value_type = GetValue()->GetType();
5199    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5200        ? value_type
5201        : GetRawExpectedComponentType();
5202  }
5203
5204  Primitive::Type GetRawExpectedComponentType() const {
5205    return GetPackedField<ExpectedComponentTypeField>();
5206  }
5207
5208  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5209    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5210  }
5211
5212  DECLARE_INSTRUCTION(ArraySet);
5213
5214 private:
5215  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5216  static constexpr size_t kFieldExpectedComponentTypeSize =
5217      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5218  static constexpr size_t kFlagNeedsTypeCheck =
5219      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5220  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5221  // Cached information for the reference_type_info_ so that codegen
5222  // does not need to inspect the static type.
5223  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5224  static constexpr size_t kNumberOfArraySetPackedBits =
5225      kFlagStaticTypeOfArrayIsObjectArray + 1;
5226  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5227  using ExpectedComponentTypeField =
5228      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5229
5230  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5231};
5232
5233class HArrayLength : public HExpression<1> {
5234 public:
5235  HArrayLength(HInstruction* array, uint32_t dex_pc)
5236      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5237    // Note that arrays do not change length, so the instruction does not
5238    // depend on any write.
5239    SetRawInputAt(0, array);
5240  }
5241
5242  bool CanBeMoved() const OVERRIDE { return true; }
5243  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5244    return true;
5245  }
5246  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5247    return obj == InputAt(0);
5248  }
5249
5250  DECLARE_INSTRUCTION(ArrayLength);
5251
5252 private:
5253  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5254};
5255
5256class HBoundsCheck : public HExpression<2> {
5257 public:
5258  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5259  // constructor.
5260  HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc)
5261      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5262    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5263    SetRawInputAt(0, index);
5264    SetRawInputAt(1, length);
5265  }
5266
5267  bool CanBeMoved() const OVERRIDE { return true; }
5268  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5269    return true;
5270  }
5271
5272  bool NeedsEnvironment() const OVERRIDE { return true; }
5273
5274  bool CanThrow() const OVERRIDE { return true; }
5275
5276  HInstruction* GetIndex() const { return InputAt(0); }
5277
5278  DECLARE_INSTRUCTION(BoundsCheck);
5279
5280 private:
5281  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5282};
5283
5284class HSuspendCheck : public HTemplateInstruction<0> {
5285 public:
5286  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5287      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5288
5289  bool NeedsEnvironment() const OVERRIDE {
5290    return true;
5291  }
5292
5293  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5294  SlowPathCode* GetSlowPath() const { return slow_path_; }
5295
5296  DECLARE_INSTRUCTION(SuspendCheck);
5297
5298 private:
5299  // Only used for code generation, in order to share the same slow path between back edges
5300  // of a same loop.
5301  SlowPathCode* slow_path_;
5302
5303  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5304};
5305
5306// Pseudo-instruction which provides the native debugger with mapping information.
5307// It ensures that we can generate line number and local variables at this point.
5308class HNativeDebugInfo : public HTemplateInstruction<0> {
5309 public:
5310  explicit HNativeDebugInfo(uint32_t dex_pc)
5311      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5312
5313  bool NeedsEnvironment() const OVERRIDE {
5314    return true;
5315  }
5316
5317  DECLARE_INSTRUCTION(NativeDebugInfo);
5318
5319 private:
5320  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5321};
5322
5323/**
5324 * Instruction to load a Class object.
5325 */
5326class HLoadClass : public HExpression<1> {
5327 public:
5328  HLoadClass(HCurrentMethod* current_method,
5329             uint16_t type_index,
5330             const DexFile& dex_file,
5331             bool is_referrers_class,
5332             uint32_t dex_pc,
5333             bool needs_access_check,
5334             bool is_in_dex_cache)
5335      : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc),
5336        type_index_(type_index),
5337        dex_file_(dex_file),
5338        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5339    // Referrers class should not need access check. We never inline unverified
5340    // methods so we can't possibly end up in this situation.
5341    DCHECK(!is_referrers_class || !needs_access_check);
5342
5343    SetPackedFlag<kFlagIsReferrersClass>(is_referrers_class);
5344    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5345    SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache);
5346    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5347    SetRawInputAt(0, current_method);
5348  }
5349
5350  bool CanBeMoved() const OVERRIDE { return true; }
5351
5352  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5353    // Note that we don't need to test for generate_clinit_check_.
5354    // Whether or not we need to generate the clinit check is processed in
5355    // prepare_for_register_allocator based on existing HInvokes and HClinitChecks.
5356    return other->AsLoadClass()->type_index_ == type_index_ &&
5357        other->AsLoadClass()->GetPackedFields() == GetPackedFields();
5358  }
5359
5360  size_t ComputeHashCode() const OVERRIDE { return type_index_; }
5361
5362  uint16_t GetTypeIndex() const { return type_index_; }
5363  bool CanBeNull() const OVERRIDE { return false; }
5364
5365  bool NeedsEnvironment() const OVERRIDE {
5366    return CanCallRuntime();
5367  }
5368
5369  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5370    // The entrypoint the code generator is going to call does not do
5371    // clinit of the class.
5372    DCHECK(!NeedsAccessCheck());
5373    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5374  }
5375
5376  bool CanCallRuntime() const {
5377    return MustGenerateClinitCheck() ||
5378           (!IsReferrersClass() && !IsInDexCache()) ||
5379           NeedsAccessCheck();
5380  }
5381
5382
5383  bool CanThrow() const OVERRIDE {
5384    return CanCallRuntime();
5385  }
5386
5387  ReferenceTypeInfo GetLoadedClassRTI() {
5388    return loaded_class_rti_;
5389  }
5390
5391  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5392    // Make sure we only set exact types (the loaded class should never be merged).
5393    DCHECK(rti.IsExact());
5394    loaded_class_rti_ = rti;
5395  }
5396
5397  const DexFile& GetDexFile() { return dex_file_; }
5398
5399  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !IsReferrersClass(); }
5400
5401  static SideEffects SideEffectsForArchRuntimeCalls() {
5402    return SideEffects::CanTriggerGC();
5403  }
5404
5405  bool IsReferrersClass() const { return GetPackedFlag<kFlagIsReferrersClass>(); }
5406  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5407  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5408  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5409
5410  DECLARE_INSTRUCTION(LoadClass);
5411
5412 private:
5413  static constexpr size_t kFlagIsReferrersClass    = kNumberOfExpressionPackedBits;
5414  static constexpr size_t kFlagNeedsAccessCheck    = kFlagIsReferrersClass + 1;
5415  static constexpr size_t kFlagIsInDexCache        = kFlagNeedsAccessCheck + 1;
5416  // Whether this instruction must generate the initialization check.
5417  // Used for code generation.
5418  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInDexCache + 1;
5419  static constexpr size_t kNumberOfLoadClassPackedBits = kFlagGenerateClInitCheck + 1;
5420  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5421
5422  const uint16_t type_index_;
5423  const DexFile& dex_file_;
5424
5425  ReferenceTypeInfo loaded_class_rti_;
5426
5427  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5428};
5429
5430class HLoadString : public HExpression<1> {
5431 public:
5432  // Determines how to load the String.
5433  enum class LoadKind {
5434    // Use boot image String* address that will be known at link time.
5435    // Used for boot image strings referenced by boot image code in non-PIC mode.
5436    kBootImageLinkTimeAddress,
5437
5438    // Use PC-relative boot image String* address that will be known at link time.
5439    // Used for boot image strings referenced by boot image code in PIC mode.
5440    kBootImageLinkTimePcRelative,
5441
5442    // Use a known boot image String* address, embedded in the code by the codegen.
5443    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5444    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5445    // GetIncludePatchInformation().
5446    kBootImageAddress,
5447
5448    // Load from the resolved strings array at an absolute address.
5449    // Used for strings outside the boot image referenced by JIT-compiled code.
5450    kDexCacheAddress,
5451
5452    // Load from resolved strings array in the dex cache using a PC-relative load.
5453    // Used for strings outside boot image when we know that we can access
5454    // the dex cache arrays using a PC-relative load.
5455    kDexCachePcRelative,
5456
5457    // Load from resolved strings array accessed through the class loaded from
5458    // the compiled method's own ArtMethod*. This is the default access type when
5459    // all other types are unavailable.
5460    kDexCacheViaMethod,
5461
5462    kLast = kDexCacheViaMethod
5463  };
5464
5465  HLoadString(HCurrentMethod* current_method,
5466              uint32_t string_index,
5467              const DexFile& dex_file,
5468              uint32_t dex_pc)
5469      : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc),
5470        string_index_(string_index) {
5471    SetPackedFlag<kFlagIsInDexCache>(false);
5472    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5473    load_data_.ref.dex_file = &dex_file;
5474    SetRawInputAt(0, current_method);
5475  }
5476
5477  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5478    DCHECK(HasAddress(load_kind));
5479    load_data_.address = address;
5480    SetLoadKindInternal(load_kind);
5481  }
5482
5483  void SetLoadKindWithStringReference(LoadKind load_kind,
5484                                      const DexFile& dex_file,
5485                                      uint32_t string_index) {
5486    DCHECK(HasStringReference(load_kind));
5487    load_data_.ref.dex_file = &dex_file;
5488    string_index_ = string_index;
5489    SetLoadKindInternal(load_kind);
5490  }
5491
5492  void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5493                                        const DexFile& dex_file,
5494                                        uint32_t element_index) {
5495    DCHECK(HasDexCacheReference(load_kind));
5496    load_data_.ref.dex_file = &dex_file;
5497    load_data_.ref.dex_cache_element_index = element_index;
5498    SetLoadKindInternal(load_kind);
5499  }
5500
5501  LoadKind GetLoadKind() const {
5502    return GetPackedField<LoadKindField>();
5503  }
5504
5505  const DexFile& GetDexFile() const;
5506
5507  uint32_t GetStringIndex() const {
5508    DCHECK(HasStringReference(GetLoadKind()) || /* For slow paths. */ !IsInDexCache());
5509    return string_index_;
5510  }
5511
5512  uint32_t GetDexCacheElementOffset() const;
5513
5514  uint64_t GetAddress() const {
5515    DCHECK(HasAddress(GetLoadKind()));
5516    return load_data_.address;
5517  }
5518
5519  bool CanBeMoved() const OVERRIDE { return true; }
5520
5521  bool InstructionDataEquals(HInstruction* other) const OVERRIDE;
5522
5523  size_t ComputeHashCode() const OVERRIDE { return string_index_; }
5524
5525  // Will call the runtime if we need to load the string through
5526  // the dex cache and the string is not guaranteed to be there yet.
5527  bool NeedsEnvironment() const OVERRIDE {
5528    LoadKind load_kind = GetLoadKind();
5529    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5530        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5531        load_kind == LoadKind::kBootImageAddress) {
5532      return false;
5533    }
5534    return !IsInDexCache();
5535  }
5536
5537  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5538    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5539  }
5540
5541  bool CanBeNull() const OVERRIDE { return false; }
5542  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5543
5544  static SideEffects SideEffectsForArchRuntimeCalls() {
5545    return SideEffects::CanTriggerGC();
5546  }
5547
5548  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5549
5550  void MarkInDexCache() {
5551    SetPackedFlag<kFlagIsInDexCache>(true);
5552    DCHECK(!NeedsEnvironment());
5553    RemoveEnvironment();
5554  }
5555
5556  size_t InputCount() const OVERRIDE {
5557    return (InputAt(0) != nullptr) ? 1u : 0u;
5558  }
5559
5560  void AddSpecialInput(HInstruction* special_input);
5561
5562  DECLARE_INSTRUCTION(LoadString);
5563
5564 private:
5565  static constexpr size_t kFlagIsInDexCache = kNumberOfExpressionPackedBits;
5566  static constexpr size_t kFieldLoadKind = kFlagIsInDexCache + 1;
5567  static constexpr size_t kFieldLoadKindSize =
5568      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5569  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5570  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5571  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5572
5573  static bool HasStringReference(LoadKind load_kind) {
5574    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5575        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5576        load_kind == LoadKind::kDexCacheViaMethod;
5577  }
5578
5579  static bool HasAddress(LoadKind load_kind) {
5580    return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress;
5581  }
5582
5583  static bool HasDexCacheReference(LoadKind load_kind) {
5584    return load_kind == LoadKind::kDexCachePcRelative;
5585  }
5586
5587  void SetLoadKindInternal(LoadKind load_kind);
5588
5589  // String index serves also as the hash code and it's also needed for slow-paths,
5590  // so it must not be overwritten with other load data.
5591  uint32_t string_index_;
5592
5593  union {
5594    struct {
5595      const DexFile* dex_file;            // For string reference and dex cache reference.
5596      uint32_t dex_cache_element_index;   // Only for dex cache reference.
5597    } ref;
5598    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5599  } load_data_;
5600
5601  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5602};
5603std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5604
5605// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5606inline const DexFile& HLoadString::GetDexFile() const {
5607  DCHECK(HasStringReference(GetLoadKind()) || HasDexCacheReference(GetLoadKind()))
5608      << GetLoadKind();
5609  return *load_data_.ref.dex_file;
5610}
5611
5612// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5613inline uint32_t HLoadString::GetDexCacheElementOffset() const {
5614  DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5615  return load_data_.ref.dex_cache_element_index;
5616}
5617
5618// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5619inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5620  // The special input is used for PC-relative loads on some architectures.
5621  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5622         GetLoadKind() == LoadKind::kDexCachePcRelative) << GetLoadKind();
5623  DCHECK(InputAt(0) == nullptr);
5624  SetRawInputAt(0u, special_input);
5625  special_input->AddUseAt(this, 0);
5626}
5627
5628/**
5629 * Performs an initialization check on its Class object input.
5630 */
5631class HClinitCheck : public HExpression<1> {
5632 public:
5633  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5634      : HExpression(
5635            Primitive::kPrimNot,
5636            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
5637            dex_pc) {
5638    SetRawInputAt(0, constant);
5639  }
5640
5641  bool CanBeMoved() const OVERRIDE { return true; }
5642  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5643    return true;
5644  }
5645
5646  bool NeedsEnvironment() const OVERRIDE {
5647    // May call runtime to initialize the class.
5648    return true;
5649  }
5650
5651  bool CanThrow() const OVERRIDE { return true; }
5652
5653  HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); }
5654
5655  DECLARE_INSTRUCTION(ClinitCheck);
5656
5657 private:
5658  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
5659};
5660
5661class HStaticFieldGet : public HExpression<1> {
5662 public:
5663  HStaticFieldGet(HInstruction* cls,
5664                  Primitive::Type field_type,
5665                  MemberOffset field_offset,
5666                  bool is_volatile,
5667                  uint32_t field_idx,
5668                  uint16_t declaring_class_def_index,
5669                  const DexFile& dex_file,
5670                  Handle<mirror::DexCache> dex_cache,
5671                  uint32_t dex_pc)
5672      : HExpression(field_type,
5673                    SideEffects::FieldReadOfType(field_type, is_volatile),
5674                    dex_pc),
5675        field_info_(field_offset,
5676                    field_type,
5677                    is_volatile,
5678                    field_idx,
5679                    declaring_class_def_index,
5680                    dex_file,
5681                    dex_cache) {
5682    SetRawInputAt(0, cls);
5683  }
5684
5685
5686  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5687
5688  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5689    HStaticFieldGet* other_get = other->AsStaticFieldGet();
5690    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5691  }
5692
5693  size_t ComputeHashCode() const OVERRIDE {
5694    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5695  }
5696
5697  const FieldInfo& GetFieldInfo() const { return field_info_; }
5698  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5699  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5700  bool IsVolatile() const { return field_info_.IsVolatile(); }
5701
5702  DECLARE_INSTRUCTION(StaticFieldGet);
5703
5704 private:
5705  const FieldInfo field_info_;
5706
5707  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
5708};
5709
5710class HStaticFieldSet : public HTemplateInstruction<2> {
5711 public:
5712  HStaticFieldSet(HInstruction* cls,
5713                  HInstruction* value,
5714                  Primitive::Type field_type,
5715                  MemberOffset field_offset,
5716                  bool is_volatile,
5717                  uint32_t field_idx,
5718                  uint16_t declaring_class_def_index,
5719                  const DexFile& dex_file,
5720                  Handle<mirror::DexCache> dex_cache,
5721                  uint32_t dex_pc)
5722      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5723                             dex_pc),
5724        field_info_(field_offset,
5725                    field_type,
5726                    is_volatile,
5727                    field_idx,
5728                    declaring_class_def_index,
5729                    dex_file,
5730                    dex_cache) {
5731    SetPackedFlag<kFlagValueCanBeNull>(true);
5732    SetRawInputAt(0, cls);
5733    SetRawInputAt(1, value);
5734  }
5735
5736  const FieldInfo& GetFieldInfo() const { return field_info_; }
5737  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5738  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5739  bool IsVolatile() const { return field_info_.IsVolatile(); }
5740
5741  HInstruction* GetValue() const { return InputAt(1); }
5742  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5743  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5744
5745  DECLARE_INSTRUCTION(StaticFieldSet);
5746
5747 private:
5748  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5749  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
5750  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
5751                "Too many packed fields.");
5752
5753  const FieldInfo field_info_;
5754
5755  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
5756};
5757
5758class HUnresolvedInstanceFieldGet : public HExpression<1> {
5759 public:
5760  HUnresolvedInstanceFieldGet(HInstruction* obj,
5761                              Primitive::Type field_type,
5762                              uint32_t field_index,
5763                              uint32_t dex_pc)
5764      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5765        field_index_(field_index) {
5766    SetRawInputAt(0, obj);
5767  }
5768
5769  bool NeedsEnvironment() const OVERRIDE { return true; }
5770  bool CanThrow() const OVERRIDE { return true; }
5771
5772  Primitive::Type GetFieldType() const { return GetType(); }
5773  uint32_t GetFieldIndex() const { return field_index_; }
5774
5775  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
5776
5777 private:
5778  const uint32_t field_index_;
5779
5780  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
5781};
5782
5783class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> {
5784 public:
5785  HUnresolvedInstanceFieldSet(HInstruction* obj,
5786                              HInstruction* value,
5787                              Primitive::Type field_type,
5788                              uint32_t field_index,
5789                              uint32_t dex_pc)
5790      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
5791        field_index_(field_index) {
5792    SetPackedField<FieldTypeField>(field_type);
5793    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
5794    SetRawInputAt(0, obj);
5795    SetRawInputAt(1, value);
5796  }
5797
5798  bool NeedsEnvironment() const OVERRIDE { return true; }
5799  bool CanThrow() const OVERRIDE { return true; }
5800
5801  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
5802  uint32_t GetFieldIndex() const { return field_index_; }
5803
5804  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
5805
5806 private:
5807  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
5808  static constexpr size_t kFieldFieldTypeSize =
5809      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5810  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
5811      kFieldFieldType + kFieldFieldTypeSize;
5812  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5813                "Too many packed fields.");
5814  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
5815
5816  const uint32_t field_index_;
5817
5818  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
5819};
5820
5821class HUnresolvedStaticFieldGet : public HExpression<0> {
5822 public:
5823  HUnresolvedStaticFieldGet(Primitive::Type field_type,
5824                            uint32_t field_index,
5825                            uint32_t dex_pc)
5826      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5827        field_index_(field_index) {
5828  }
5829
5830  bool NeedsEnvironment() const OVERRIDE { return true; }
5831  bool CanThrow() const OVERRIDE { return true; }
5832
5833  Primitive::Type GetFieldType() const { return GetType(); }
5834  uint32_t GetFieldIndex() const { return field_index_; }
5835
5836  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
5837
5838 private:
5839  const uint32_t field_index_;
5840
5841  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
5842};
5843
5844class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> {
5845 public:
5846  HUnresolvedStaticFieldSet(HInstruction* value,
5847                            Primitive::Type field_type,
5848                            uint32_t field_index,
5849                            uint32_t dex_pc)
5850      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
5851        field_index_(field_index) {
5852    SetPackedField<FieldTypeField>(field_type);
5853    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
5854    SetRawInputAt(0, value);
5855  }
5856
5857  bool NeedsEnvironment() const OVERRIDE { return true; }
5858  bool CanThrow() const OVERRIDE { return true; }
5859
5860  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
5861  uint32_t GetFieldIndex() const { return field_index_; }
5862
5863  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
5864
5865 private:
5866  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
5867  static constexpr size_t kFieldFieldTypeSize =
5868      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5869  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
5870      kFieldFieldType + kFieldFieldTypeSize;
5871  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5872                "Too many packed fields.");
5873  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
5874
5875  const uint32_t field_index_;
5876
5877  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
5878};
5879
5880// Implement the move-exception DEX instruction.
5881class HLoadException : public HExpression<0> {
5882 public:
5883  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
5884      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
5885
5886  bool CanBeNull() const OVERRIDE { return false; }
5887
5888  DECLARE_INSTRUCTION(LoadException);
5889
5890 private:
5891  DISALLOW_COPY_AND_ASSIGN(HLoadException);
5892};
5893
5894// Implicit part of move-exception which clears thread-local exception storage.
5895// Must not be removed because the runtime expects the TLS to get cleared.
5896class HClearException : public HTemplateInstruction<0> {
5897 public:
5898  explicit HClearException(uint32_t dex_pc = kNoDexPc)
5899      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
5900
5901  DECLARE_INSTRUCTION(ClearException);
5902
5903 private:
5904  DISALLOW_COPY_AND_ASSIGN(HClearException);
5905};
5906
5907class HThrow : public HTemplateInstruction<1> {
5908 public:
5909  HThrow(HInstruction* exception, uint32_t dex_pc)
5910      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
5911    SetRawInputAt(0, exception);
5912  }
5913
5914  bool IsControlFlow() const OVERRIDE { return true; }
5915
5916  bool NeedsEnvironment() const OVERRIDE { return true; }
5917
5918  bool CanThrow() const OVERRIDE { return true; }
5919
5920
5921  DECLARE_INSTRUCTION(Throw);
5922
5923 private:
5924  DISALLOW_COPY_AND_ASSIGN(HThrow);
5925};
5926
5927/**
5928 * Implementation strategies for the code generator of a HInstanceOf
5929 * or `HCheckCast`.
5930 */
5931enum class TypeCheckKind {
5932  kUnresolvedCheck,       // Check against an unresolved type.
5933  kExactCheck,            // Can do a single class compare.
5934  kClassHierarchyCheck,   // Can just walk the super class chain.
5935  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
5936  kInterfaceCheck,        // No optimization yet when checking against an interface.
5937  kArrayObjectCheck,      // Can just check if the array is not primitive.
5938  kArrayCheck,            // No optimization yet when checking against a generic array.
5939  kLast = kArrayCheck
5940};
5941
5942std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
5943
5944class HInstanceOf : public HExpression<2> {
5945 public:
5946  HInstanceOf(HInstruction* object,
5947              HLoadClass* constant,
5948              TypeCheckKind check_kind,
5949              uint32_t dex_pc)
5950      : HExpression(Primitive::kPrimBoolean,
5951                    SideEffectsForArchRuntimeCalls(check_kind),
5952                    dex_pc) {
5953    SetPackedField<TypeCheckKindField>(check_kind);
5954    SetPackedFlag<kFlagMustDoNullCheck>(true);
5955    SetRawInputAt(0, object);
5956    SetRawInputAt(1, constant);
5957  }
5958
5959  bool CanBeMoved() const OVERRIDE { return true; }
5960
5961  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5962    return true;
5963  }
5964
5965  bool NeedsEnvironment() const OVERRIDE {
5966    return CanCallRuntime(GetTypeCheckKind());
5967  }
5968
5969  // Used only in code generation.
5970  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
5971  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
5972  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
5973  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
5974
5975  static bool CanCallRuntime(TypeCheckKind check_kind) {
5976    // Mips currently does runtime calls for any other checks.
5977    return check_kind != TypeCheckKind::kExactCheck;
5978  }
5979
5980  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
5981    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
5982  }
5983
5984  DECLARE_INSTRUCTION(InstanceOf);
5985
5986 private:
5987  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
5988  static constexpr size_t kFieldTypeCheckKindSize =
5989      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
5990  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
5991  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
5992  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5993  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
5994
5995  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
5996};
5997
5998class HBoundType : public HExpression<1> {
5999 public:
6000  HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6001      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6002        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6003    SetPackedFlag<kFlagUpperCanBeNull>(true);
6004    SetPackedFlag<kFlagCanBeNull>(true);
6005    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6006    SetRawInputAt(0, input);
6007  }
6008
6009  // {Get,Set}Upper* should only be used in reference type propagation.
6010  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6011  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6012  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6013
6014  void SetCanBeNull(bool can_be_null) {
6015    DCHECK(GetUpperCanBeNull() || !can_be_null);
6016    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6017  }
6018
6019  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6020
6021  DECLARE_INSTRUCTION(BoundType);
6022
6023 private:
6024  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6025  // is false then CanBeNull() cannot be true).
6026  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6027  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6028  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6029  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6030
6031  // Encodes the most upper class that this instruction can have. In other words
6032  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6033  // It is used to bound the type in cases like:
6034  //   if (x instanceof ClassX) {
6035  //     // uper_bound_ will be ClassX
6036  //   }
6037  ReferenceTypeInfo upper_bound_;
6038
6039  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6040};
6041
6042class HCheckCast : public HTemplateInstruction<2> {
6043 public:
6044  HCheckCast(HInstruction* object,
6045             HLoadClass* constant,
6046             TypeCheckKind check_kind,
6047             uint32_t dex_pc)
6048      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6049    SetPackedField<TypeCheckKindField>(check_kind);
6050    SetPackedFlag<kFlagMustDoNullCheck>(true);
6051    SetRawInputAt(0, object);
6052    SetRawInputAt(1, constant);
6053  }
6054
6055  bool CanBeMoved() const OVERRIDE { return true; }
6056
6057  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6058    return true;
6059  }
6060
6061  bool NeedsEnvironment() const OVERRIDE {
6062    // Instruction may throw a CheckCastError.
6063    return true;
6064  }
6065
6066  bool CanThrow() const OVERRIDE { return true; }
6067
6068  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6069  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6070  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6071  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6072
6073  DECLARE_INSTRUCTION(CheckCast);
6074
6075 private:
6076  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6077  static constexpr size_t kFieldTypeCheckKindSize =
6078      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6079  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6080  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6081  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6082  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6083
6084  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6085};
6086
6087class HMemoryBarrier : public HTemplateInstruction<0> {
6088 public:
6089  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6090      : HTemplateInstruction(
6091            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6092    SetPackedField<BarrierKindField>(barrier_kind);
6093  }
6094
6095  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6096
6097  DECLARE_INSTRUCTION(MemoryBarrier);
6098
6099 private:
6100  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6101  static constexpr size_t kFieldBarrierKindSize =
6102      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6103  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6104      kFieldBarrierKind + kFieldBarrierKindSize;
6105  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6106                "Too many packed fields.");
6107  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6108
6109  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6110};
6111
6112class HMonitorOperation : public HTemplateInstruction<1> {
6113 public:
6114  enum class OperationKind {
6115    kEnter,
6116    kExit,
6117    kLast = kExit
6118  };
6119
6120  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6121    : HTemplateInstruction(
6122          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6123          dex_pc) {
6124    SetPackedField<OperationKindField>(kind);
6125    SetRawInputAt(0, object);
6126  }
6127
6128  // Instruction may go into runtime, so we need an environment.
6129  bool NeedsEnvironment() const OVERRIDE { return true; }
6130
6131  bool CanThrow() const OVERRIDE {
6132    // Verifier guarantees that monitor-exit cannot throw.
6133    // This is important because it allows the HGraphBuilder to remove
6134    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6135    return IsEnter();
6136  }
6137
6138  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6139  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6140
6141  DECLARE_INSTRUCTION(MonitorOperation);
6142
6143 private:
6144  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6145  static constexpr size_t kFieldOperationKindSize =
6146      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6147  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6148      kFieldOperationKind + kFieldOperationKindSize;
6149  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6150                "Too many packed fields.");
6151  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6152
6153 private:
6154  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6155};
6156
6157class HSelect : public HExpression<3> {
6158 public:
6159  HSelect(HInstruction* condition,
6160          HInstruction* true_value,
6161          HInstruction* false_value,
6162          uint32_t dex_pc)
6163      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6164    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6165
6166    // First input must be `true_value` or `false_value` to allow codegens to
6167    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6168    // that architectures which implement HSelect as a conditional move also
6169    // will not need to invert the condition.
6170    SetRawInputAt(0, false_value);
6171    SetRawInputAt(1, true_value);
6172    SetRawInputAt(2, condition);
6173  }
6174
6175  HInstruction* GetFalseValue() const { return InputAt(0); }
6176  HInstruction* GetTrueValue() const { return InputAt(1); }
6177  HInstruction* GetCondition() const { return InputAt(2); }
6178
6179  bool CanBeMoved() const OVERRIDE { return true; }
6180  bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; }
6181
6182  bool CanBeNull() const OVERRIDE {
6183    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6184  }
6185
6186  DECLARE_INSTRUCTION(Select);
6187
6188 private:
6189  DISALLOW_COPY_AND_ASSIGN(HSelect);
6190};
6191
6192class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6193 public:
6194  MoveOperands(Location source,
6195               Location destination,
6196               Primitive::Type type,
6197               HInstruction* instruction)
6198      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6199
6200  Location GetSource() const { return source_; }
6201  Location GetDestination() const { return destination_; }
6202
6203  void SetSource(Location value) { source_ = value; }
6204  void SetDestination(Location value) { destination_ = value; }
6205
6206  // The parallel move resolver marks moves as "in-progress" by clearing the
6207  // destination (but not the source).
6208  Location MarkPending() {
6209    DCHECK(!IsPending());
6210    Location dest = destination_;
6211    destination_ = Location::NoLocation();
6212    return dest;
6213  }
6214
6215  void ClearPending(Location dest) {
6216    DCHECK(IsPending());
6217    destination_ = dest;
6218  }
6219
6220  bool IsPending() const {
6221    DCHECK(source_.IsValid() || destination_.IsInvalid());
6222    return destination_.IsInvalid() && source_.IsValid();
6223  }
6224
6225  // True if this blocks a move from the given location.
6226  bool Blocks(Location loc) const {
6227    return !IsEliminated() && source_.OverlapsWith(loc);
6228  }
6229
6230  // A move is redundant if it's been eliminated, if its source and
6231  // destination are the same, or if its destination is unneeded.
6232  bool IsRedundant() const {
6233    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6234  }
6235
6236  // We clear both operands to indicate move that's been eliminated.
6237  void Eliminate() {
6238    source_ = destination_ = Location::NoLocation();
6239  }
6240
6241  bool IsEliminated() const {
6242    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6243    return source_.IsInvalid();
6244  }
6245
6246  Primitive::Type GetType() const { return type_; }
6247
6248  bool Is64BitMove() const {
6249    return Primitive::Is64BitType(type_);
6250  }
6251
6252  HInstruction* GetInstruction() const { return instruction_; }
6253
6254 private:
6255  Location source_;
6256  Location destination_;
6257  // The type this move is for.
6258  Primitive::Type type_;
6259  // The instruction this move is assocatied with. Null when this move is
6260  // for moving an input in the expected locations of user (including a phi user).
6261  // This is only used in debug mode, to ensure we do not connect interval siblings
6262  // in the same parallel move.
6263  HInstruction* instruction_;
6264};
6265
6266std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6267
6268static constexpr size_t kDefaultNumberOfMoves = 4;
6269
6270class HParallelMove : public HTemplateInstruction<0> {
6271 public:
6272  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6273      : HTemplateInstruction(SideEffects::None(), dex_pc),
6274        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6275    moves_.reserve(kDefaultNumberOfMoves);
6276  }
6277
6278  void AddMove(Location source,
6279               Location destination,
6280               Primitive::Type type,
6281               HInstruction* instruction) {
6282    DCHECK(source.IsValid());
6283    DCHECK(destination.IsValid());
6284    if (kIsDebugBuild) {
6285      if (instruction != nullptr) {
6286        for (const MoveOperands& move : moves_) {
6287          if (move.GetInstruction() == instruction) {
6288            // Special case the situation where the move is for the spill slot
6289            // of the instruction.
6290            if ((GetPrevious() == instruction)
6291                || ((GetPrevious() == nullptr)
6292                    && instruction->IsPhi()
6293                    && instruction->GetBlock() == GetBlock())) {
6294              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6295                  << "Doing parallel moves for the same instruction.";
6296            } else {
6297              DCHECK(false) << "Doing parallel moves for the same instruction.";
6298            }
6299          }
6300        }
6301      }
6302      for (const MoveOperands& move : moves_) {
6303        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6304            << "Overlapped destination for two moves in a parallel move: "
6305            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6306            << source << " ==> " << destination;
6307      }
6308    }
6309    moves_.emplace_back(source, destination, type, instruction);
6310  }
6311
6312  MoveOperands* MoveOperandsAt(size_t index) {
6313    return &moves_[index];
6314  }
6315
6316  size_t NumMoves() const { return moves_.size(); }
6317
6318  DECLARE_INSTRUCTION(ParallelMove);
6319
6320 private:
6321  ArenaVector<MoveOperands> moves_;
6322
6323  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6324};
6325
6326}  // namespace art
6327
6328#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6329#include "nodes_shared.h"
6330#endif
6331#ifdef ART_ENABLE_CODEGEN_arm
6332#include "nodes_arm.h"
6333#endif
6334#ifdef ART_ENABLE_CODEGEN_arm64
6335#include "nodes_arm64.h"
6336#endif
6337#ifdef ART_ENABLE_CODEGEN_x86
6338#include "nodes_x86.h"
6339#endif
6340
6341namespace art {
6342
6343class HGraphVisitor : public ValueObject {
6344 public:
6345  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6346  virtual ~HGraphVisitor() {}
6347
6348  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6349  virtual void VisitBasicBlock(HBasicBlock* block);
6350
6351  // Visit the graph following basic block insertion order.
6352  void VisitInsertionOrder();
6353
6354  // Visit the graph following dominator tree reverse post-order.
6355  void VisitReversePostOrder();
6356
6357  HGraph* GetGraph() const { return graph_; }
6358
6359  // Visit functions for instruction classes.
6360#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6361  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6362
6363  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6364
6365#undef DECLARE_VISIT_INSTRUCTION
6366
6367 private:
6368  HGraph* const graph_;
6369
6370  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6371};
6372
6373class HGraphDelegateVisitor : public HGraphVisitor {
6374 public:
6375  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6376  virtual ~HGraphDelegateVisitor() {}
6377
6378  // Visit functions that delegate to to super class.
6379#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6380  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6381
6382  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6383
6384#undef DECLARE_VISIT_INSTRUCTION
6385
6386 private:
6387  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6388};
6389
6390class HInsertionOrderIterator : public ValueObject {
6391 public:
6392  explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {}
6393
6394  bool Done() const { return index_ == graph_.GetBlocks().size(); }
6395  HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; }
6396  void Advance() { ++index_; }
6397
6398 private:
6399  const HGraph& graph_;
6400  size_t index_;
6401
6402  DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator);
6403};
6404
6405class HReversePostOrderIterator : public ValueObject {
6406 public:
6407  explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {
6408    // Check that reverse post order of the graph has been built.
6409    DCHECK(!graph.GetReversePostOrder().empty());
6410  }
6411
6412  bool Done() const { return index_ == graph_.GetReversePostOrder().size(); }
6413  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; }
6414  void Advance() { ++index_; }
6415
6416 private:
6417  const HGraph& graph_;
6418  size_t index_;
6419
6420  DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator);
6421};
6422
6423class HPostOrderIterator : public ValueObject {
6424 public:
6425  explicit HPostOrderIterator(const HGraph& graph)
6426      : graph_(graph), index_(graph_.GetReversePostOrder().size()) {
6427    // Check that reverse post order of the graph has been built.
6428    DCHECK(!graph.GetReversePostOrder().empty());
6429  }
6430
6431  bool Done() const { return index_ == 0; }
6432  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; }
6433  void Advance() { --index_; }
6434
6435 private:
6436  const HGraph& graph_;
6437  size_t index_;
6438
6439  DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator);
6440};
6441
6442class HLinearPostOrderIterator : public ValueObject {
6443 public:
6444  explicit HLinearPostOrderIterator(const HGraph& graph)
6445      : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {}
6446
6447  bool Done() const { return index_ == 0; }
6448
6449  HBasicBlock* Current() const { return order_[index_ - 1u]; }
6450
6451  void Advance() {
6452    --index_;
6453    DCHECK_GE(index_, 0U);
6454  }
6455
6456 private:
6457  const ArenaVector<HBasicBlock*>& order_;
6458  size_t index_;
6459
6460  DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator);
6461};
6462
6463class HLinearOrderIterator : public ValueObject {
6464 public:
6465  explicit HLinearOrderIterator(const HGraph& graph)
6466      : order_(graph.GetLinearOrder()), index_(0) {}
6467
6468  bool Done() const { return index_ == order_.size(); }
6469  HBasicBlock* Current() const { return order_[index_]; }
6470  void Advance() { ++index_; }
6471
6472 private:
6473  const ArenaVector<HBasicBlock*>& order_;
6474  size_t index_;
6475
6476  DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator);
6477};
6478
6479// Iterator over the blocks that art part of the loop. Includes blocks part
6480// of an inner loop. The order in which the blocks are iterated is on their
6481// block id.
6482class HBlocksInLoopIterator : public ValueObject {
6483 public:
6484  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6485      : blocks_in_loop_(info.GetBlocks()),
6486        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6487        index_(0) {
6488    if (!blocks_in_loop_.IsBitSet(index_)) {
6489      Advance();
6490    }
6491  }
6492
6493  bool Done() const { return index_ == blocks_.size(); }
6494  HBasicBlock* Current() const { return blocks_[index_]; }
6495  void Advance() {
6496    ++index_;
6497    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6498      if (blocks_in_loop_.IsBitSet(index_)) {
6499        break;
6500      }
6501    }
6502  }
6503
6504 private:
6505  const BitVector& blocks_in_loop_;
6506  const ArenaVector<HBasicBlock*>& blocks_;
6507  size_t index_;
6508
6509  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6510};
6511
6512// Iterator over the blocks that art part of the loop. Includes blocks part
6513// of an inner loop. The order in which the blocks are iterated is reverse
6514// post order.
6515class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6516 public:
6517  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6518      : blocks_in_loop_(info.GetBlocks()),
6519        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6520        index_(0) {
6521    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6522      Advance();
6523    }
6524  }
6525
6526  bool Done() const { return index_ == blocks_.size(); }
6527  HBasicBlock* Current() const { return blocks_[index_]; }
6528  void Advance() {
6529    ++index_;
6530    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6531      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6532        break;
6533      }
6534    }
6535  }
6536
6537 private:
6538  const BitVector& blocks_in_loop_;
6539  const ArenaVector<HBasicBlock*>& blocks_;
6540  size_t index_;
6541
6542  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6543};
6544
6545inline int64_t Int64FromConstant(HConstant* constant) {
6546  if (constant->IsIntConstant()) {
6547    return constant->AsIntConstant()->GetValue();
6548  } else if (constant->IsLongConstant()) {
6549    return constant->AsLongConstant()->GetValue();
6550  } else {
6551    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6552    return 0;
6553  }
6554}
6555
6556inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
6557  // For the purposes of the compiler, the dex files must actually be the same object
6558  // if we want to safely treat them as the same. This is especially important for JIT
6559  // as custom class loaders can open the same underlying file (or memory) multiple
6560  // times and provide different class resolution but no two class loaders should ever
6561  // use the same DexFile object - doing so is an unsupported hack that can lead to
6562  // all sorts of weird failures.
6563  return &lhs == &rhs;
6564}
6565
6566#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6567  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6568  inline const H##type* HInstruction::As##type() const {                       \
6569    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6570  }                                                                            \
6571  inline H##type* HInstruction::As##type() {                                   \
6572    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6573  }
6574
6575  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6576#undef INSTRUCTION_TYPE_CHECK
6577
6578// Create space in `blocks` for adding `number_of_new_blocks` entries
6579// starting at location `at`. Blocks after `at` are moved accordingly.
6580inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6581                        size_t number_of_new_blocks,
6582                        size_t after) {
6583  DCHECK_LT(after, blocks->size());
6584  size_t old_size = blocks->size();
6585  size_t new_size = old_size + number_of_new_blocks;
6586  blocks->resize(new_size);
6587  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6588}
6589
6590}  // namespace art
6591
6592#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6593