nodes.h revision 8de5916666ab5d146ac1bdac7d7748e197ae347e
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "dex_file.h"
32#include "dex_file_types.h"
33#include "entrypoints/quick/quick_entrypoints_enum.h"
34#include "handle.h"
35#include "handle_scope.h"
36#include "invoke_type.h"
37#include "intrinsics_enum.h"
38#include "locations.h"
39#include "method_reference.h"
40#include "mirror/class.h"
41#include "offsets.h"
42#include "primitive.h"
43#include "utils/intrusive_forward_list.h"
44
45namespace art {
46
47class GraphChecker;
48class HBasicBlock;
49class HCurrentMethod;
50class HDoubleConstant;
51class HEnvironment;
52class HFloatConstant;
53class HGraphBuilder;
54class HGraphVisitor;
55class HInstruction;
56class HIntConstant;
57class HInvoke;
58class HLongConstant;
59class HNullConstant;
60class HPhi;
61class HSuspendCheck;
62class HTryBoundary;
63class LiveInterval;
64class LocationSummary;
65class SlowPathCode;
66class SsaBuilder;
67
68namespace mirror {
69class DexCache;
70}  // namespace mirror
71
72static const int kDefaultNumberOfBlocks = 8;
73static const int kDefaultNumberOfSuccessors = 2;
74static const int kDefaultNumberOfPredecessors = 2;
75static const int kDefaultNumberOfExceptionalPredecessors = 0;
76static const int kDefaultNumberOfDominatedBlocks = 1;
77static const int kDefaultNumberOfBackEdges = 1;
78
79// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
80static constexpr int32_t kMaxIntShiftDistance = 0x1f;
81// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
82static constexpr int32_t kMaxLongShiftDistance = 0x3f;
83
84static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
85static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
86
87static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
88
89static constexpr uint32_t kNoDexPc = -1;
90
91inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
92  // For the purposes of the compiler, the dex files must actually be the same object
93  // if we want to safely treat them as the same. This is especially important for JIT
94  // as custom class loaders can open the same underlying file (or memory) multiple
95  // times and provide different class resolution but no two class loaders should ever
96  // use the same DexFile object - doing so is an unsupported hack that can lead to
97  // all sorts of weird failures.
98  return &lhs == &rhs;
99}
100
101enum IfCondition {
102  // All types.
103  kCondEQ,  // ==
104  kCondNE,  // !=
105  // Signed integers and floating-point numbers.
106  kCondLT,  // <
107  kCondLE,  // <=
108  kCondGT,  // >
109  kCondGE,  // >=
110  // Unsigned integers.
111  kCondB,   // <
112  kCondBE,  // <=
113  kCondA,   // >
114  kCondAE,  // >=
115  // First and last aliases.
116  kCondFirst = kCondEQ,
117  kCondLast = kCondAE,
118};
119
120enum GraphAnalysisResult {
121  kAnalysisSkipped,
122  kAnalysisInvalidBytecode,
123  kAnalysisFailThrowCatchLoop,
124  kAnalysisFailAmbiguousArrayOp,
125  kAnalysisSuccess,
126};
127
128template <typename T>
129static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
130  return static_cast<typename std::make_unsigned<T>::type>(x);
131}
132
133class HInstructionList : public ValueObject {
134 public:
135  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
136
137  void AddInstruction(HInstruction* instruction);
138  void RemoveInstruction(HInstruction* instruction);
139
140  // Insert `instruction` before/after an existing instruction `cursor`.
141  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
142  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
143
144  // Return true if this list contains `instruction`.
145  bool Contains(HInstruction* instruction) const;
146
147  // Return true if `instruction1` is found before `instruction2` in
148  // this instruction list and false otherwise.  Abort if none
149  // of these instructions is found.
150  bool FoundBefore(const HInstruction* instruction1,
151                   const HInstruction* instruction2) const;
152
153  bool IsEmpty() const { return first_instruction_ == nullptr; }
154  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
155
156  // Update the block of all instructions to be `block`.
157  void SetBlockOfInstructions(HBasicBlock* block) const;
158
159  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
160  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
161  void Add(const HInstructionList& instruction_list);
162
163  // Return the number of instructions in the list. This is an expensive operation.
164  size_t CountSize() const;
165
166 private:
167  HInstruction* first_instruction_;
168  HInstruction* last_instruction_;
169
170  friend class HBasicBlock;
171  friend class HGraph;
172  friend class HInstruction;
173  friend class HInstructionIterator;
174  friend class HInstructionIteratorHandleChanges;
175  friend class HBackwardInstructionIterator;
176
177  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
178};
179
180class ReferenceTypeInfo : ValueObject {
181 public:
182  typedef Handle<mirror::Class> TypeHandle;
183
184  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
185
186  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
187    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
188  }
189
190  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
191    return ReferenceTypeInfo(type_handle, is_exact);
192  }
193
194  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
195
196  static bool IsValidHandle(TypeHandle handle) {
197    return handle.GetReference() != nullptr;
198  }
199
200  bool IsValid() const {
201    return IsValidHandle(type_handle_);
202  }
203
204  bool IsExact() const { return is_exact_; }
205
206  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
207    DCHECK(IsValid());
208    return GetTypeHandle()->IsObjectClass();
209  }
210
211  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
212    DCHECK(IsValid());
213    return GetTypeHandle()->IsStringClass();
214  }
215
216  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
217    DCHECK(IsValid());
218    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
219  }
220
221  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
222    DCHECK(IsValid());
223    return GetTypeHandle()->IsInterface();
224  }
225
226  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
227    DCHECK(IsValid());
228    return GetTypeHandle()->IsArrayClass();
229  }
230
231  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
232    DCHECK(IsValid());
233    return GetTypeHandle()->IsPrimitiveArray();
234  }
235
236  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
237    DCHECK(IsValid());
238    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
239  }
240
241  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
242    DCHECK(IsValid());
243    if (!IsExact()) return false;
244    if (!IsArrayClass()) return false;
245    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
246  }
247
248  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
249    DCHECK(IsValid());
250    if (!IsExact()) return false;
251    if (!IsArrayClass()) return false;
252    if (!rti.IsArrayClass()) return false;
253    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
254        rti.GetTypeHandle()->GetComponentType());
255  }
256
257  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
258
259  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
260    DCHECK(IsValid());
261    DCHECK(rti.IsValid());
262    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
263  }
264
265  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
266    DCHECK(IsValid());
267    DCHECK(rti.IsValid());
268    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
269        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
270  }
271
272  // Returns true if the type information provide the same amount of details.
273  // Note that it does not mean that the instructions have the same actual type
274  // (because the type can be the result of a merge).
275  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
276    if (!IsValid() && !rti.IsValid()) {
277      // Invalid types are equal.
278      return true;
279    }
280    if (!IsValid() || !rti.IsValid()) {
281      // One is valid, the other not.
282      return false;
283    }
284    return IsExact() == rti.IsExact()
285        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
286  }
287
288 private:
289  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
290  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
291      : type_handle_(type_handle), is_exact_(is_exact) { }
292
293  // The class of the object.
294  TypeHandle type_handle_;
295  // Whether or not the type is exact or a superclass of the actual type.
296  // Whether or not we have any information about this type.
297  bool is_exact_;
298};
299
300std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
301
302// Control-flow graph of a method. Contains a list of basic blocks.
303class HGraph : public ArenaObject<kArenaAllocGraph> {
304 public:
305  HGraph(ArenaAllocator* arena,
306         const DexFile& dex_file,
307         uint32_t method_idx,
308         InstructionSet instruction_set,
309         InvokeType invoke_type = kInvalidInvokeType,
310         bool debuggable = false,
311         bool osr = false,
312         int start_instruction_id = 0)
313      : arena_(arena),
314        blocks_(arena->Adapter(kArenaAllocBlockList)),
315        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
316        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
317        entry_block_(nullptr),
318        exit_block_(nullptr),
319        maximum_number_of_out_vregs_(0),
320        number_of_vregs_(0),
321        number_of_in_vregs_(0),
322        temporaries_vreg_slots_(0),
323        has_bounds_checks_(false),
324        has_try_catch_(false),
325        has_simd_(false),
326        has_loops_(false),
327        has_irreducible_loops_(false),
328        debuggable_(debuggable),
329        current_instruction_id_(start_instruction_id),
330        dex_file_(dex_file),
331        method_idx_(method_idx),
332        invoke_type_(invoke_type),
333        in_ssa_form_(false),
334        number_of_cha_guards_(0),
335        instruction_set_(instruction_set),
336        cached_null_constant_(nullptr),
337        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
338        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
339        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
340        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
341        cached_current_method_(nullptr),
342        art_method_(nullptr),
343        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
344        osr_(osr),
345        cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) {
346    blocks_.reserve(kDefaultNumberOfBlocks);
347  }
348
349  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
350  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
351
352  ArenaAllocator* GetArena() const { return arena_; }
353  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
354
355  bool IsInSsaForm() const { return in_ssa_form_; }
356  void SetInSsaForm() { in_ssa_form_ = true; }
357
358  HBasicBlock* GetEntryBlock() const { return entry_block_; }
359  HBasicBlock* GetExitBlock() const { return exit_block_; }
360  bool HasExitBlock() const { return exit_block_ != nullptr; }
361
362  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
363  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
364
365  void AddBlock(HBasicBlock* block);
366
367  void ComputeDominanceInformation();
368  void ClearDominanceInformation();
369  void ClearLoopInformation();
370  void FindBackEdges(ArenaBitVector* visited);
371  GraphAnalysisResult BuildDominatorTree();
372  void SimplifyCFG();
373  void SimplifyCatchBlocks();
374
375  // Analyze all natural loops in this graph. Returns a code specifying that it
376  // was successful or the reason for failure. The method will fail if a loop
377  // is a throw-catch loop, i.e. the header is a catch block.
378  GraphAnalysisResult AnalyzeLoops() const;
379
380  // Iterate over blocks to compute try block membership. Needs reverse post
381  // order and loop information.
382  void ComputeTryBlockInformation();
383
384  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
385  // Returns the instruction to replace the invoke expression or null if the
386  // invoke is for a void method. Note that the caller is responsible for replacing
387  // and removing the invoke instruction.
388  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
389
390  // Update the loop and try membership of `block`, which was spawned from `reference`.
391  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
392  // should be the new back edge.
393  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
394                                             HBasicBlock* reference,
395                                             bool replace_if_back_edge);
396
397  // Need to add a couple of blocks to test if the loop body is entered and
398  // put deoptimization instructions, etc.
399  void TransformLoopHeaderForBCE(HBasicBlock* header);
400
401  // Adds a new loop directly after the loop with the given header and exit.
402  // Returns the new preheader.
403  HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
404                                             HBasicBlock* body,
405                                             HBasicBlock* exit);
406
407  // Removes `block` from the graph. Assumes `block` has been disconnected from
408  // other blocks and has no instructions or phis.
409  void DeleteDeadEmptyBlock(HBasicBlock* block);
410
411  // Splits the edge between `block` and `successor` while preserving the
412  // indices in the predecessor/successor lists. If there are multiple edges
413  // between the blocks, the lowest indices are used.
414  // Returns the new block which is empty and has the same dex pc as `successor`.
415  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
416
417  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
418  void SimplifyLoop(HBasicBlock* header);
419
420  int32_t GetNextInstructionId() {
421    DCHECK_NE(current_instruction_id_, INT32_MAX);
422    return current_instruction_id_++;
423  }
424
425  int32_t GetCurrentInstructionId() const {
426    return current_instruction_id_;
427  }
428
429  void SetCurrentInstructionId(int32_t id) {
430    DCHECK_GE(id, current_instruction_id_);
431    current_instruction_id_ = id;
432  }
433
434  uint16_t GetMaximumNumberOfOutVRegs() const {
435    return maximum_number_of_out_vregs_;
436  }
437
438  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
439    maximum_number_of_out_vregs_ = new_value;
440  }
441
442  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
443    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
444  }
445
446  void UpdateTemporariesVRegSlots(size_t slots) {
447    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
448  }
449
450  size_t GetTemporariesVRegSlots() const {
451    DCHECK(!in_ssa_form_);
452    return temporaries_vreg_slots_;
453  }
454
455  void SetNumberOfVRegs(uint16_t number_of_vregs) {
456    number_of_vregs_ = number_of_vregs;
457  }
458
459  uint16_t GetNumberOfVRegs() const {
460    return number_of_vregs_;
461  }
462
463  void SetNumberOfInVRegs(uint16_t value) {
464    number_of_in_vregs_ = value;
465  }
466
467  uint16_t GetNumberOfInVRegs() const {
468    return number_of_in_vregs_;
469  }
470
471  uint16_t GetNumberOfLocalVRegs() const {
472    DCHECK(!in_ssa_form_);
473    return number_of_vregs_ - number_of_in_vregs_;
474  }
475
476  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
477    return reverse_post_order_;
478  }
479
480  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
481    DCHECK(GetReversePostOrder()[0] == entry_block_);
482    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
483  }
484
485  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
486    return ReverseRange(GetReversePostOrder());
487  }
488
489  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
490    return linear_order_;
491  }
492
493  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
494    return ReverseRange(GetLinearOrder());
495  }
496
497  bool HasBoundsChecks() const {
498    return has_bounds_checks_;
499  }
500
501  void SetHasBoundsChecks(bool value) {
502    has_bounds_checks_ = value;
503  }
504
505  bool IsDebuggable() const { return debuggable_; }
506
507  // Returns a constant of the given type and value. If it does not exist
508  // already, it is created and inserted into the graph. This method is only for
509  // integral types.
510  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
511
512  // TODO: This is problematic for the consistency of reference type propagation
513  // because it can be created anytime after the pass and thus it will be left
514  // with an invalid type.
515  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
516
517  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
518    return CreateConstant(value, &cached_int_constants_, dex_pc);
519  }
520  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
521    return CreateConstant(value, &cached_long_constants_, dex_pc);
522  }
523  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
524    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
525  }
526  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
527    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
528  }
529
530  HCurrentMethod* GetCurrentMethod();
531
532  const DexFile& GetDexFile() const {
533    return dex_file_;
534  }
535
536  uint32_t GetMethodIdx() const {
537    return method_idx_;
538  }
539
540  InvokeType GetInvokeType() const {
541    return invoke_type_;
542  }
543
544  InstructionSet GetInstructionSet() const {
545    return instruction_set_;
546  }
547
548  bool IsCompilingOsr() const { return osr_; }
549
550  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
551    return cha_single_implementation_list_;
552  }
553
554  void AddCHASingleImplementationDependency(ArtMethod* method) {
555    cha_single_implementation_list_.insert(method);
556  }
557
558  bool HasShouldDeoptimizeFlag() const {
559    return number_of_cha_guards_ != 0;
560  }
561
562  bool HasTryCatch() const { return has_try_catch_; }
563  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
564
565  bool HasSIMD() const { return has_simd_; }
566  void SetHasSIMD(bool value) { has_simd_ = value; }
567
568  bool HasLoops() const { return has_loops_; }
569  void SetHasLoops(bool value) { has_loops_ = value; }
570
571  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
572  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
573
574  ArtMethod* GetArtMethod() const { return art_method_; }
575  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
576
577  // Returns an instruction with the opposite Boolean value from 'cond'.
578  // The instruction has been inserted into the graph, either as a constant, or
579  // before cursor.
580  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
581
582  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
583
584  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
585  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
586  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
587
588 private:
589  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
590  void RemoveDeadBlocks(const ArenaBitVector& visited);
591
592  template <class InstructionType, typename ValueType>
593  InstructionType* CreateConstant(ValueType value,
594                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
595                                  uint32_t dex_pc = kNoDexPc) {
596    // Try to find an existing constant of the given value.
597    InstructionType* constant = nullptr;
598    auto cached_constant = cache->find(value);
599    if (cached_constant != cache->end()) {
600      constant = cached_constant->second;
601    }
602
603    // If not found or previously deleted, create and cache a new instruction.
604    // Don't bother reviving a previously deleted instruction, for simplicity.
605    if (constant == nullptr || constant->GetBlock() == nullptr) {
606      constant = new (arena_) InstructionType(value, dex_pc);
607      cache->Overwrite(value, constant);
608      InsertConstant(constant);
609    }
610    return constant;
611  }
612
613  void InsertConstant(HConstant* instruction);
614
615  // Cache a float constant into the graph. This method should only be
616  // called by the SsaBuilder when creating "equivalent" instructions.
617  void CacheFloatConstant(HFloatConstant* constant);
618
619  // See CacheFloatConstant comment.
620  void CacheDoubleConstant(HDoubleConstant* constant);
621
622  ArenaAllocator* const arena_;
623
624  // List of blocks in insertion order.
625  ArenaVector<HBasicBlock*> blocks_;
626
627  // List of blocks to perform a reverse post order tree traversal.
628  ArenaVector<HBasicBlock*> reverse_post_order_;
629
630  // List of blocks to perform a linear order tree traversal. Unlike the reverse
631  // post order, this order is not incrementally kept up-to-date.
632  ArenaVector<HBasicBlock*> linear_order_;
633
634  HBasicBlock* entry_block_;
635  HBasicBlock* exit_block_;
636
637  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
638  uint16_t maximum_number_of_out_vregs_;
639
640  // The number of virtual registers in this method. Contains the parameters.
641  uint16_t number_of_vregs_;
642
643  // The number of virtual registers used by parameters of this method.
644  uint16_t number_of_in_vregs_;
645
646  // Number of vreg size slots that the temporaries use (used in baseline compiler).
647  size_t temporaries_vreg_slots_;
648
649  // Flag whether there are bounds checks in the graph. We can skip
650  // BCE if it's false. It's only best effort to keep it up to date in
651  // the presence of code elimination so there might be false positives.
652  bool has_bounds_checks_;
653
654  // Flag whether there are try/catch blocks in the graph. We will skip
655  // try/catch-related passes if it's false. It's only best effort to keep
656  // it up to date in the presence of code elimination so there might be
657  // false positives.
658  bool has_try_catch_;
659
660  // Flag whether SIMD instructions appear in the graph. If true, the
661  // code generators may have to be more careful spilling the wider
662  // contents of SIMD registers.
663  bool has_simd_;
664
665  // Flag whether there are any loops in the graph. We can skip loop
666  // optimization if it's false. It's only best effort to keep it up
667  // to date in the presence of code elimination so there might be false
668  // positives.
669  bool has_loops_;
670
671  // Flag whether there are any irreducible loops in the graph. It's only
672  // best effort to keep it up to date in the presence of code elimination
673  // so there might be false positives.
674  bool has_irreducible_loops_;
675
676  // Indicates whether the graph should be compiled in a way that
677  // ensures full debuggability. If false, we can apply more
678  // aggressive optimizations that may limit the level of debugging.
679  const bool debuggable_;
680
681  // The current id to assign to a newly added instruction. See HInstruction.id_.
682  int32_t current_instruction_id_;
683
684  // The dex file from which the method is from.
685  const DexFile& dex_file_;
686
687  // The method index in the dex file.
688  const uint32_t method_idx_;
689
690  // If inlined, this encodes how the callee is being invoked.
691  const InvokeType invoke_type_;
692
693  // Whether the graph has been transformed to SSA form. Only used
694  // in debug mode to ensure we are not using properties only valid
695  // for non-SSA form (like the number of temporaries).
696  bool in_ssa_form_;
697
698  // Number of CHA guards in the graph. Used to short-circuit the
699  // CHA guard optimization pass when there is no CHA guard left.
700  uint32_t number_of_cha_guards_;
701
702  const InstructionSet instruction_set_;
703
704  // Cached constants.
705  HNullConstant* cached_null_constant_;
706  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
707  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
708  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
709  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
710
711  HCurrentMethod* cached_current_method_;
712
713  // The ArtMethod this graph is for. Note that for AOT, it may be null,
714  // for example for methods whose declaring class could not be resolved
715  // (such as when the superclass could not be found).
716  ArtMethod* art_method_;
717
718  // Keep the RTI of inexact Object to avoid having to pass stack handle
719  // collection pointer to passes which may create NullConstant.
720  ReferenceTypeInfo inexact_object_rti_;
721
722  // Whether we are compiling this graph for on stack replacement: this will
723  // make all loops seen as irreducible and emit special stack maps to mark
724  // compiled code entries which the interpreter can directly jump to.
725  const bool osr_;
726
727  // List of methods that are assumed to have single implementation.
728  ArenaSet<ArtMethod*> cha_single_implementation_list_;
729
730  friend class SsaBuilder;           // For caching constants.
731  friend class SsaLivenessAnalysis;  // For the linear order.
732  friend class HInliner;             // For the reverse post order.
733  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
734  DISALLOW_COPY_AND_ASSIGN(HGraph);
735};
736
737class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
738 public:
739  HLoopInformation(HBasicBlock* header, HGraph* graph)
740      : header_(header),
741        suspend_check_(nullptr),
742        irreducible_(false),
743        contains_irreducible_loop_(false),
744        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
745        // Make bit vector growable, as the number of blocks may change.
746        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
747    back_edges_.reserve(kDefaultNumberOfBackEdges);
748  }
749
750  bool IsIrreducible() const { return irreducible_; }
751  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
752
753  void Dump(std::ostream& os);
754
755  HBasicBlock* GetHeader() const {
756    return header_;
757  }
758
759  void SetHeader(HBasicBlock* block) {
760    header_ = block;
761  }
762
763  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
764  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
765  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
766
767  void AddBackEdge(HBasicBlock* back_edge) {
768    back_edges_.push_back(back_edge);
769  }
770
771  void RemoveBackEdge(HBasicBlock* back_edge) {
772    RemoveElement(back_edges_, back_edge);
773  }
774
775  bool IsBackEdge(const HBasicBlock& block) const {
776    return ContainsElement(back_edges_, &block);
777  }
778
779  size_t NumberOfBackEdges() const {
780    return back_edges_.size();
781  }
782
783  HBasicBlock* GetPreHeader() const;
784
785  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
786    return back_edges_;
787  }
788
789  // Returns the lifetime position of the back edge that has the
790  // greatest lifetime position.
791  size_t GetLifetimeEnd() const;
792
793  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
794    ReplaceElement(back_edges_, existing, new_back_edge);
795  }
796
797  // Finds blocks that are part of this loop.
798  void Populate();
799
800  // Returns whether this loop information contains `block`.
801  // Note that this loop information *must* be populated before entering this function.
802  bool Contains(const HBasicBlock& block) const;
803
804  // Returns whether this loop information is an inner loop of `other`.
805  // Note that `other` *must* be populated before entering this function.
806  bool IsIn(const HLoopInformation& other) const;
807
808  // Returns true if instruction is not defined within this loop.
809  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
810
811  const ArenaBitVector& GetBlocks() const { return blocks_; }
812
813  void Add(HBasicBlock* block);
814  void Remove(HBasicBlock* block);
815
816  void ClearAllBlocks() {
817    blocks_.ClearAllBits();
818  }
819
820  bool HasBackEdgeNotDominatedByHeader() const;
821
822  bool IsPopulated() const {
823    return blocks_.GetHighestBitSet() != -1;
824  }
825
826  bool DominatesAllBackEdges(HBasicBlock* block);
827
828  bool HasExitEdge() const;
829
830 private:
831  // Internal recursive implementation of `Populate`.
832  void PopulateRecursive(HBasicBlock* block);
833  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
834
835  HBasicBlock* header_;
836  HSuspendCheck* suspend_check_;
837  bool irreducible_;
838  bool contains_irreducible_loop_;
839  ArenaVector<HBasicBlock*> back_edges_;
840  ArenaBitVector blocks_;
841
842  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
843};
844
845// Stores try/catch information for basic blocks.
846// Note that HGraph is constructed so that catch blocks cannot simultaneously
847// be try blocks.
848class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
849 public:
850  // Try block information constructor.
851  explicit TryCatchInformation(const HTryBoundary& try_entry)
852      : try_entry_(&try_entry),
853        catch_dex_file_(nullptr),
854        catch_type_index_(DexFile::kDexNoIndex16) {
855    DCHECK(try_entry_ != nullptr);
856  }
857
858  // Catch block information constructor.
859  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
860      : try_entry_(nullptr),
861        catch_dex_file_(&dex_file),
862        catch_type_index_(catch_type_index) {}
863
864  bool IsTryBlock() const { return try_entry_ != nullptr; }
865
866  const HTryBoundary& GetTryEntry() const {
867    DCHECK(IsTryBlock());
868    return *try_entry_;
869  }
870
871  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
872
873  bool IsCatchAllTypeIndex() const {
874    DCHECK(IsCatchBlock());
875    return !catch_type_index_.IsValid();
876  }
877
878  dex::TypeIndex GetCatchTypeIndex() const {
879    DCHECK(IsCatchBlock());
880    return catch_type_index_;
881  }
882
883  const DexFile& GetCatchDexFile() const {
884    DCHECK(IsCatchBlock());
885    return *catch_dex_file_;
886  }
887
888 private:
889  // One of possibly several TryBoundary instructions entering the block's try.
890  // Only set for try blocks.
891  const HTryBoundary* try_entry_;
892
893  // Exception type information. Only set for catch blocks.
894  const DexFile* catch_dex_file_;
895  const dex::TypeIndex catch_type_index_;
896};
897
898static constexpr size_t kNoLifetime = -1;
899static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
900
901// A block in a method. Contains the list of instructions represented
902// as a double linked list. Each block knows its predecessors and
903// successors.
904
905class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
906 public:
907  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
908      : graph_(graph),
909        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
910        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
911        loop_information_(nullptr),
912        dominator_(nullptr),
913        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
914        block_id_(kInvalidBlockId),
915        dex_pc_(dex_pc),
916        lifetime_start_(kNoLifetime),
917        lifetime_end_(kNoLifetime),
918        try_catch_information_(nullptr) {
919    predecessors_.reserve(kDefaultNumberOfPredecessors);
920    successors_.reserve(kDefaultNumberOfSuccessors);
921    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
922  }
923
924  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
925    return predecessors_;
926  }
927
928  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
929    return successors_;
930  }
931
932  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
933  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
934
935  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
936    return ContainsElement(successors_, block, start_from);
937  }
938
939  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
940    return dominated_blocks_;
941  }
942
943  bool IsEntryBlock() const {
944    return graph_->GetEntryBlock() == this;
945  }
946
947  bool IsExitBlock() const {
948    return graph_->GetExitBlock() == this;
949  }
950
951  bool IsSingleGoto() const;
952  bool IsSingleTryBoundary() const;
953
954  // Returns true if this block emits nothing but a jump.
955  bool IsSingleJump() const {
956    HLoopInformation* loop_info = GetLoopInformation();
957    return (IsSingleGoto() || IsSingleTryBoundary())
958           // Back edges generate a suspend check.
959           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
960  }
961
962  void AddBackEdge(HBasicBlock* back_edge) {
963    if (loop_information_ == nullptr) {
964      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
965    }
966    DCHECK_EQ(loop_information_->GetHeader(), this);
967    loop_information_->AddBackEdge(back_edge);
968  }
969
970  HGraph* GetGraph() const { return graph_; }
971  void SetGraph(HGraph* graph) { graph_ = graph; }
972
973  uint32_t GetBlockId() const { return block_id_; }
974  void SetBlockId(int id) { block_id_ = id; }
975  uint32_t GetDexPc() const { return dex_pc_; }
976
977  HBasicBlock* GetDominator() const { return dominator_; }
978  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
979  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
980
981  void RemoveDominatedBlock(HBasicBlock* block) {
982    RemoveElement(dominated_blocks_, block);
983  }
984
985  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
986    ReplaceElement(dominated_blocks_, existing, new_block);
987  }
988
989  void ClearDominanceInformation();
990
991  int NumberOfBackEdges() const {
992    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
993  }
994
995  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
996  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
997  const HInstructionList& GetInstructions() const { return instructions_; }
998  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
999  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
1000  const HInstructionList& GetPhis() const { return phis_; }
1001
1002  HInstruction* GetFirstInstructionDisregardMoves() const;
1003
1004  void AddSuccessor(HBasicBlock* block) {
1005    successors_.push_back(block);
1006    block->predecessors_.push_back(this);
1007  }
1008
1009  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1010    size_t successor_index = GetSuccessorIndexOf(existing);
1011    existing->RemovePredecessor(this);
1012    new_block->predecessors_.push_back(this);
1013    successors_[successor_index] = new_block;
1014  }
1015
1016  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1017    size_t predecessor_index = GetPredecessorIndexOf(existing);
1018    existing->RemoveSuccessor(this);
1019    new_block->successors_.push_back(this);
1020    predecessors_[predecessor_index] = new_block;
1021  }
1022
1023  // Insert `this` between `predecessor` and `successor. This method
1024  // preserves the indicies, and will update the first edge found between
1025  // `predecessor` and `successor`.
1026  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1027    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1028    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1029    successor->predecessors_[predecessor_index] = this;
1030    predecessor->successors_[successor_index] = this;
1031    successors_.push_back(successor);
1032    predecessors_.push_back(predecessor);
1033  }
1034
1035  void RemovePredecessor(HBasicBlock* block) {
1036    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1037  }
1038
1039  void RemoveSuccessor(HBasicBlock* block) {
1040    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1041  }
1042
1043  void ClearAllPredecessors() {
1044    predecessors_.clear();
1045  }
1046
1047  void AddPredecessor(HBasicBlock* block) {
1048    predecessors_.push_back(block);
1049    block->successors_.push_back(this);
1050  }
1051
1052  void SwapPredecessors() {
1053    DCHECK_EQ(predecessors_.size(), 2u);
1054    std::swap(predecessors_[0], predecessors_[1]);
1055  }
1056
1057  void SwapSuccessors() {
1058    DCHECK_EQ(successors_.size(), 2u);
1059    std::swap(successors_[0], successors_[1]);
1060  }
1061
1062  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1063    return IndexOfElement(predecessors_, predecessor);
1064  }
1065
1066  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1067    return IndexOfElement(successors_, successor);
1068  }
1069
1070  HBasicBlock* GetSinglePredecessor() const {
1071    DCHECK_EQ(GetPredecessors().size(), 1u);
1072    return GetPredecessors()[0];
1073  }
1074
1075  HBasicBlock* GetSingleSuccessor() const {
1076    DCHECK_EQ(GetSuccessors().size(), 1u);
1077    return GetSuccessors()[0];
1078  }
1079
1080  // Returns whether the first occurrence of `predecessor` in the list of
1081  // predecessors is at index `idx`.
1082  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1083    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1084    return GetPredecessorIndexOf(predecessor) == idx;
1085  }
1086
1087  // Create a new block between this block and its predecessors. The new block
1088  // is added to the graph, all predecessor edges are relinked to it and an edge
1089  // is created to `this`. Returns the new empty block. Reverse post order or
1090  // loop and try/catch information are not updated.
1091  HBasicBlock* CreateImmediateDominator();
1092
1093  // Split the block into two blocks just before `cursor`. Returns the newly
1094  // created, latter block. Note that this method will add the block to the
1095  // graph, create a Goto at the end of the former block and will create an edge
1096  // between the blocks. It will not, however, update the reverse post order or
1097  // loop and try/catch information.
1098  HBasicBlock* SplitBefore(HInstruction* cursor);
1099
1100  // Split the block into two blocks just before `cursor`. Returns the newly
1101  // created block. Note that this method just updates raw block information,
1102  // like predecessors, successors, dominators, and instruction list. It does not
1103  // update the graph, reverse post order, loop information, nor make sure the
1104  // blocks are consistent (for example ending with a control flow instruction).
1105  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1106
1107  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1108  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1109
1110  // Merge `other` at the end of `this`. Successors and dominated blocks of
1111  // `other` are changed to be successors and dominated blocks of `this`. Note
1112  // that this method does not update the graph, reverse post order, loop
1113  // information, nor make sure the blocks are consistent (for example ending
1114  // with a control flow instruction).
1115  void MergeWithInlined(HBasicBlock* other);
1116
1117  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1118  // of `this` are moved to `other`.
1119  // Note that this method does not update the graph, reverse post order, loop
1120  // information, nor make sure the blocks are consistent (for example ending
1121  // with a control flow instruction).
1122  void ReplaceWith(HBasicBlock* other);
1123
1124  // Merges the instructions of `other` at the end of `this`.
1125  void MergeInstructionsWith(HBasicBlock* other);
1126
1127  // Merge `other` at the end of `this`. This method updates loops, reverse post
1128  // order, links to predecessors, successors, dominators and deletes the block
1129  // from the graph. The two blocks must be successive, i.e. `this` the only
1130  // predecessor of `other` and vice versa.
1131  void MergeWith(HBasicBlock* other);
1132
1133  // Disconnects `this` from all its predecessors, successors and dominator,
1134  // removes it from all loops it is included in and eventually from the graph.
1135  // The block must not dominate any other block. Predecessors and successors
1136  // are safely updated.
1137  void DisconnectAndDelete();
1138
1139  void AddInstruction(HInstruction* instruction);
1140  // Insert `instruction` before/after an existing instruction `cursor`.
1141  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1142  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1143  // Replace instruction `initial` with `replacement` within this block.
1144  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1145                                       HInstruction* replacement);
1146  void AddPhi(HPhi* phi);
1147  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1148  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1149  // instruction list. With 'ensure_safety' set to true, it verifies that the
1150  // instruction is not in use and removes it from the use lists of its inputs.
1151  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1152  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1153  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1154
1155  bool IsLoopHeader() const {
1156    return IsInLoop() && (loop_information_->GetHeader() == this);
1157  }
1158
1159  bool IsLoopPreHeaderFirstPredecessor() const {
1160    DCHECK(IsLoopHeader());
1161    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1162  }
1163
1164  bool IsFirstPredecessorBackEdge() const {
1165    DCHECK(IsLoopHeader());
1166    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1167  }
1168
1169  HLoopInformation* GetLoopInformation() const {
1170    return loop_information_;
1171  }
1172
1173  // Set the loop_information_ on this block. Overrides the current
1174  // loop_information if it is an outer loop of the passed loop information.
1175  // Note that this method is called while creating the loop information.
1176  void SetInLoop(HLoopInformation* info) {
1177    if (IsLoopHeader()) {
1178      // Nothing to do. This just means `info` is an outer loop.
1179    } else if (!IsInLoop()) {
1180      loop_information_ = info;
1181    } else if (loop_information_->Contains(*info->GetHeader())) {
1182      // Block is currently part of an outer loop. Make it part of this inner loop.
1183      // Note that a non loop header having a loop information means this loop information
1184      // has already been populated
1185      loop_information_ = info;
1186    } else {
1187      // Block is part of an inner loop. Do not update the loop information.
1188      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1189      // at this point, because this method is being called while populating `info`.
1190    }
1191  }
1192
1193  // Raw update of the loop information.
1194  void SetLoopInformation(HLoopInformation* info) {
1195    loop_information_ = info;
1196  }
1197
1198  bool IsInLoop() const { return loop_information_ != nullptr; }
1199
1200  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1201
1202  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1203    try_catch_information_ = try_catch_information;
1204  }
1205
1206  bool IsTryBlock() const {
1207    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1208  }
1209
1210  bool IsCatchBlock() const {
1211    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1212  }
1213
1214  // Returns the try entry that this block's successors should have. They will
1215  // be in the same try, unless the block ends in a try boundary. In that case,
1216  // the appropriate try entry will be returned.
1217  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1218
1219  bool HasThrowingInstructions() const;
1220
1221  // Returns whether this block dominates the blocked passed as parameter.
1222  bool Dominates(HBasicBlock* block) const;
1223
1224  size_t GetLifetimeStart() const { return lifetime_start_; }
1225  size_t GetLifetimeEnd() const { return lifetime_end_; }
1226
1227  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1228  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1229
1230  bool EndsWithControlFlowInstruction() const;
1231  bool EndsWithIf() const;
1232  bool EndsWithTryBoundary() const;
1233  bool HasSinglePhi() const;
1234
1235 private:
1236  HGraph* graph_;
1237  ArenaVector<HBasicBlock*> predecessors_;
1238  ArenaVector<HBasicBlock*> successors_;
1239  HInstructionList instructions_;
1240  HInstructionList phis_;
1241  HLoopInformation* loop_information_;
1242  HBasicBlock* dominator_;
1243  ArenaVector<HBasicBlock*> dominated_blocks_;
1244  uint32_t block_id_;
1245  // The dex program counter of the first instruction of this block.
1246  const uint32_t dex_pc_;
1247  size_t lifetime_start_;
1248  size_t lifetime_end_;
1249  TryCatchInformation* try_catch_information_;
1250
1251  friend class HGraph;
1252  friend class HInstruction;
1253
1254  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1255};
1256
1257// Iterates over the LoopInformation of all loops which contain 'block'
1258// from the innermost to the outermost.
1259class HLoopInformationOutwardIterator : public ValueObject {
1260 public:
1261  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1262      : current_(block.GetLoopInformation()) {}
1263
1264  bool Done() const { return current_ == nullptr; }
1265
1266  void Advance() {
1267    DCHECK(!Done());
1268    current_ = current_->GetPreHeader()->GetLoopInformation();
1269  }
1270
1271  HLoopInformation* Current() const {
1272    DCHECK(!Done());
1273    return current_;
1274  }
1275
1276 private:
1277  HLoopInformation* current_;
1278
1279  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1280};
1281
1282#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1283  M(Above, Condition)                                                   \
1284  M(AboveOrEqual, Condition)                                            \
1285  M(Add, BinaryOperation)                                               \
1286  M(And, BinaryOperation)                                               \
1287  M(ArrayGet, Instruction)                                              \
1288  M(ArrayLength, Instruction)                                           \
1289  M(ArraySet, Instruction)                                              \
1290  M(Below, Condition)                                                   \
1291  M(BelowOrEqual, Condition)                                            \
1292  M(BooleanNot, UnaryOperation)                                         \
1293  M(BoundsCheck, Instruction)                                           \
1294  M(BoundType, Instruction)                                             \
1295  M(CheckCast, Instruction)                                             \
1296  M(ClassTableGet, Instruction)                                         \
1297  M(ClearException, Instruction)                                        \
1298  M(ClinitCheck, Instruction)                                           \
1299  M(Compare, BinaryOperation)                                           \
1300  M(CurrentMethod, Instruction)                                         \
1301  M(ShouldDeoptimizeFlag, Instruction)                                  \
1302  M(Deoptimize, Instruction)                                            \
1303  M(Div, BinaryOperation)                                               \
1304  M(DivZeroCheck, Instruction)                                          \
1305  M(DoubleConstant, Constant)                                           \
1306  M(Equal, Condition)                                                   \
1307  M(Exit, Instruction)                                                  \
1308  M(FloatConstant, Constant)                                            \
1309  M(Goto, Instruction)                                                  \
1310  M(GreaterThan, Condition)                                             \
1311  M(GreaterThanOrEqual, Condition)                                      \
1312  M(If, Instruction)                                                    \
1313  M(InstanceFieldGet, Instruction)                                      \
1314  M(InstanceFieldSet, Instruction)                                      \
1315  M(InstanceOf, Instruction)                                            \
1316  M(IntConstant, Constant)                                              \
1317  M(InvokeUnresolved, Invoke)                                           \
1318  M(InvokeInterface, Invoke)                                            \
1319  M(InvokeStaticOrDirect, Invoke)                                       \
1320  M(InvokeVirtual, Invoke)                                              \
1321  M(InvokePolymorphic, Invoke)                                          \
1322  M(LessThan, Condition)                                                \
1323  M(LessThanOrEqual, Condition)                                         \
1324  M(LoadClass, Instruction)                                             \
1325  M(LoadException, Instruction)                                         \
1326  M(LoadString, Instruction)                                            \
1327  M(LongConstant, Constant)                                             \
1328  M(MemoryBarrier, Instruction)                                         \
1329  M(MonitorOperation, Instruction)                                      \
1330  M(Mul, BinaryOperation)                                               \
1331  M(NativeDebugInfo, Instruction)                                       \
1332  M(Neg, UnaryOperation)                                                \
1333  M(NewArray, Instruction)                                              \
1334  M(NewInstance, Instruction)                                           \
1335  M(Not, UnaryOperation)                                                \
1336  M(NotEqual, Condition)                                                \
1337  M(NullConstant, Instruction)                                          \
1338  M(NullCheck, Instruction)                                             \
1339  M(Or, BinaryOperation)                                                \
1340  M(PackedSwitch, Instruction)                                          \
1341  M(ParallelMove, Instruction)                                          \
1342  M(ParameterValue, Instruction)                                        \
1343  M(Phi, Instruction)                                                   \
1344  M(Rem, BinaryOperation)                                               \
1345  M(Return, Instruction)                                                \
1346  M(ReturnVoid, Instruction)                                            \
1347  M(Ror, BinaryOperation)                                               \
1348  M(Shl, BinaryOperation)                                               \
1349  M(Shr, BinaryOperation)                                               \
1350  M(StaticFieldGet, Instruction)                                        \
1351  M(StaticFieldSet, Instruction)                                        \
1352  M(UnresolvedInstanceFieldGet, Instruction)                            \
1353  M(UnresolvedInstanceFieldSet, Instruction)                            \
1354  M(UnresolvedStaticFieldGet, Instruction)                              \
1355  M(UnresolvedStaticFieldSet, Instruction)                              \
1356  M(Select, Instruction)                                                \
1357  M(Sub, BinaryOperation)                                               \
1358  M(SuspendCheck, Instruction)                                          \
1359  M(Throw, Instruction)                                                 \
1360  M(TryBoundary, Instruction)                                           \
1361  M(TypeConversion, Instruction)                                        \
1362  M(UShr, BinaryOperation)                                              \
1363  M(Xor, BinaryOperation)                                               \
1364  M(VecReplicateScalar, VecUnaryOperation)                              \
1365  M(VecSumReduce, VecUnaryOperation)                                    \
1366  M(VecCnv, VecUnaryOperation)                                          \
1367  M(VecNeg, VecUnaryOperation)                                          \
1368  M(VecAbs, VecUnaryOperation)                                          \
1369  M(VecNot, VecUnaryOperation)                                          \
1370  M(VecAdd, VecBinaryOperation)                                         \
1371  M(VecHalvingAdd, VecBinaryOperation)                                  \
1372  M(VecSub, VecBinaryOperation)                                         \
1373  M(VecMul, VecBinaryOperation)                                         \
1374  M(VecDiv, VecBinaryOperation)                                         \
1375  M(VecMin, VecBinaryOperation)                                         \
1376  M(VecMax, VecBinaryOperation)                                         \
1377  M(VecAnd, VecBinaryOperation)                                         \
1378  M(VecAndNot, VecBinaryOperation)                                      \
1379  M(VecOr, VecBinaryOperation)                                          \
1380  M(VecXor, VecBinaryOperation)                                         \
1381  M(VecShl, VecBinaryOperation)                                         \
1382  M(VecShr, VecBinaryOperation)                                         \
1383  M(VecUShr, VecBinaryOperation)                                        \
1384  M(VecSetScalars, VecOperation)                                        \
1385  M(VecMultiplyAccumulate, VecOperation)                                \
1386  M(VecLoad, VecMemoryOperation)                                        \
1387  M(VecStore, VecMemoryOperation)                                       \
1388
1389/*
1390 * Instructions, shared across several (not all) architectures.
1391 */
1392#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1393#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1394#else
1395#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1396  M(BitwiseNegatedRight, Instruction)                                   \
1397  M(DataProcWithShifterOp, Instruction)                                 \
1398  M(MultiplyAccumulate, Instruction)                                    \
1399  M(IntermediateAddress, Instruction)
1400#endif
1401
1402#ifndef ART_ENABLE_CODEGEN_arm
1403#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1404#else
1405#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1406  M(ArmDexCacheArraysBase, Instruction)
1407#endif
1408
1409#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1410
1411#ifndef ART_ENABLE_CODEGEN_mips
1412#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1413#else
1414#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1415  M(MipsComputeBaseMethodAddress, Instruction)                          \
1416  M(MipsDexCacheArraysBase, Instruction)                                \
1417  M(MipsPackedSwitch, Instruction)
1418#endif
1419
1420#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1421
1422#ifndef ART_ENABLE_CODEGEN_x86
1423#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1424#else
1425#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1426  M(X86ComputeBaseMethodAddress, Instruction)                           \
1427  M(X86LoadFromConstantTable, Instruction)                              \
1428  M(X86FPNeg, Instruction)                                              \
1429  M(X86PackedSwitch, Instruction)
1430#endif
1431
1432#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1433
1434#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1435  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1436  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1437  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1438  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1439  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1440  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1441  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1442  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1443
1444#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1445  M(Condition, BinaryOperation)                                         \
1446  M(Constant, Instruction)                                              \
1447  M(UnaryOperation, Instruction)                                        \
1448  M(BinaryOperation, Instruction)                                       \
1449  M(Invoke, Instruction)                                                \
1450  M(VecOperation, Instruction)                                          \
1451  M(VecUnaryOperation, VecOperation)                                    \
1452  M(VecBinaryOperation, VecOperation)                                   \
1453  M(VecMemoryOperation, VecOperation)
1454
1455#define FOR_EACH_INSTRUCTION(M)                                         \
1456  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1457  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1458
1459#define FORWARD_DECLARATION(type, super) class H##type;
1460FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1461#undef FORWARD_DECLARATION
1462
1463#define DECLARE_INSTRUCTION(type)                                         \
1464  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1465  const char* DebugName() const OVERRIDE { return #type; }                \
1466  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1467    return other->Is##type();                                             \
1468  }                                                                       \
1469  void Accept(HGraphVisitor* visitor) OVERRIDE
1470
1471#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1472  bool Is##type() const { return As##type() != nullptr; }               \
1473  const H##type* As##type() const { return this; }                      \
1474  H##type* As##type() { return this; }
1475
1476template <typename T>
1477class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1478 public:
1479  T GetUser() const { return user_; }
1480  size_t GetIndex() const { return index_; }
1481  void SetIndex(size_t index) { index_ = index; }
1482
1483  // Hook for the IntrusiveForwardList<>.
1484  // TODO: Hide this better.
1485  IntrusiveForwardListHook hook;
1486
1487 private:
1488  HUseListNode(T user, size_t index)
1489      : user_(user), index_(index) {}
1490
1491  T const user_;
1492  size_t index_;
1493
1494  friend class HInstruction;
1495
1496  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1497};
1498
1499template <typename T>
1500using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1501
1502// This class is used by HEnvironment and HInstruction classes to record the
1503// instructions they use and pointers to the corresponding HUseListNodes kept
1504// by the used instructions.
1505template <typename T>
1506class HUserRecord : public ValueObject {
1507 public:
1508  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1509  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1510
1511  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1512      : HUserRecord(old_record.instruction_, before_use_node) {}
1513  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1514      : instruction_(instruction), before_use_node_(before_use_node) {
1515    DCHECK(instruction_ != nullptr);
1516  }
1517
1518  HInstruction* GetInstruction() const { return instruction_; }
1519  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1520  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1521
1522 private:
1523  // Instruction used by the user.
1524  HInstruction* instruction_;
1525
1526  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1527  typename HUseList<T>::iterator before_use_node_;
1528};
1529
1530// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1531// This is used for HInstruction::GetInputs() to return a container wrapper providing
1532// HInstruction* values even though the underlying container has HUserRecord<>s.
1533struct HInputExtractor {
1534  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1535    return record.GetInstruction();
1536  }
1537  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1538    return record.GetInstruction();
1539  }
1540};
1541
1542using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1543using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1544
1545/**
1546 * Side-effects representation.
1547 *
1548 * For write/read dependences on fields/arrays, the dependence analysis uses
1549 * type disambiguation (e.g. a float field write cannot modify the value of an
1550 * integer field read) and the access type (e.g.  a reference array write cannot
1551 * modify the value of a reference field read [although it may modify the
1552 * reference fetch prior to reading the field, which is represented by its own
1553 * write/read dependence]). The analysis makes conservative points-to
1554 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1555 * the same, and any reference read depends on any reference read without
1556 * further regard of its type).
1557 *
1558 * The internal representation uses 38-bit and is described in the table below.
1559 * The first line indicates the side effect, and for field/array accesses the
1560 * second line indicates the type of the access (in the order of the
1561 * Primitive::Type enum).
1562 * The two numbered lines below indicate the bit position in the bitfield (read
1563 * vertically).
1564 *
1565 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1566 *   +-------------+---------+---------+--------------+---------+---------+
1567 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1568 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1569 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1570 *
1571 * Note that, to ease the implementation, 'changes' bits are least significant
1572 * bits, while 'dependency' bits are most significant bits.
1573 */
1574class SideEffects : public ValueObject {
1575 public:
1576  SideEffects() : flags_(0) {}
1577
1578  static SideEffects None() {
1579    return SideEffects(0);
1580  }
1581
1582  static SideEffects All() {
1583    return SideEffects(kAllChangeBits | kAllDependOnBits);
1584  }
1585
1586  static SideEffects AllChanges() {
1587    return SideEffects(kAllChangeBits);
1588  }
1589
1590  static SideEffects AllDependencies() {
1591    return SideEffects(kAllDependOnBits);
1592  }
1593
1594  static SideEffects AllExceptGCDependency() {
1595    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1596  }
1597
1598  static SideEffects AllWritesAndReads() {
1599    return SideEffects(kAllWrites | kAllReads);
1600  }
1601
1602  static SideEffects AllWrites() {
1603    return SideEffects(kAllWrites);
1604  }
1605
1606  static SideEffects AllReads() {
1607    return SideEffects(kAllReads);
1608  }
1609
1610  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1611    return is_volatile
1612        ? AllWritesAndReads()
1613        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1614  }
1615
1616  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1617    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1618  }
1619
1620  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1621    return is_volatile
1622        ? AllWritesAndReads()
1623        : SideEffects(TypeFlag(type, kFieldReadOffset));
1624  }
1625
1626  static SideEffects ArrayReadOfType(Primitive::Type type) {
1627    return SideEffects(TypeFlag(type, kArrayReadOffset));
1628  }
1629
1630  static SideEffects CanTriggerGC() {
1631    return SideEffects(1ULL << kCanTriggerGCBit);
1632  }
1633
1634  static SideEffects DependsOnGC() {
1635    return SideEffects(1ULL << kDependsOnGCBit);
1636  }
1637
1638  // Combines the side-effects of this and the other.
1639  SideEffects Union(SideEffects other) const {
1640    return SideEffects(flags_ | other.flags_);
1641  }
1642
1643  SideEffects Exclusion(SideEffects other) const {
1644    return SideEffects(flags_ & ~other.flags_);
1645  }
1646
1647  void Add(SideEffects other) {
1648    flags_ |= other.flags_;
1649  }
1650
1651  bool Includes(SideEffects other) const {
1652    return (other.flags_ & flags_) == other.flags_;
1653  }
1654
1655  bool HasSideEffects() const {
1656    return (flags_ & kAllChangeBits);
1657  }
1658
1659  bool HasDependencies() const {
1660    return (flags_ & kAllDependOnBits);
1661  }
1662
1663  // Returns true if there are no side effects or dependencies.
1664  bool DoesNothing() const {
1665    return flags_ == 0;
1666  }
1667
1668  // Returns true if something is written.
1669  bool DoesAnyWrite() const {
1670    return (flags_ & kAllWrites);
1671  }
1672
1673  // Returns true if something is read.
1674  bool DoesAnyRead() const {
1675    return (flags_ & kAllReads);
1676  }
1677
1678  // Returns true if potentially everything is written and read
1679  // (every type and every kind of access).
1680  bool DoesAllReadWrite() const {
1681    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1682  }
1683
1684  bool DoesAll() const {
1685    return flags_ == (kAllChangeBits | kAllDependOnBits);
1686  }
1687
1688  // Returns true if `this` may read something written by `other`.
1689  bool MayDependOn(SideEffects other) const {
1690    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1691    return (other.flags_ & depends_on_flags);
1692  }
1693
1694  // Returns string representation of flags (for debugging only).
1695  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1696  std::string ToString() const {
1697    std::string flags = "|";
1698    for (int s = kLastBit; s >= 0; s--) {
1699      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1700      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1701        // This is a bit for the GC side effect.
1702        if (current_bit_is_set) {
1703          flags += "GC";
1704        }
1705        flags += "|";
1706      } else {
1707        // This is a bit for the array/field analysis.
1708        // The underscore character stands for the 'can trigger GC' bit.
1709        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1710        if (current_bit_is_set) {
1711          flags += kDebug[s];
1712        }
1713        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1714            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1715          flags += "|";
1716        }
1717      }
1718    }
1719    return flags;
1720  }
1721
1722  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1723
1724 private:
1725  static constexpr int kFieldArrayAnalysisBits = 9;
1726
1727  static constexpr int kFieldWriteOffset = 0;
1728  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1729  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1730  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1731
1732  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1733
1734  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1735  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1736  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1737  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1738
1739  static constexpr int kLastBit = kDependsOnGCBit;
1740  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1741
1742  // Aliases.
1743
1744  static_assert(kChangeBits == kDependOnBits,
1745                "the 'change' bits should match the 'depend on' bits.");
1746
1747  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1748  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1749  static constexpr uint64_t kAllWrites =
1750      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1751  static constexpr uint64_t kAllReads =
1752      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1753
1754  // Translates type to bit flag.
1755  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1756    CHECK_NE(type, Primitive::kPrimVoid);
1757    const uint64_t one = 1;
1758    const int shift = type;  // 0-based consecutive enum
1759    DCHECK_LE(kFieldWriteOffset, shift);
1760    DCHECK_LT(shift, kArrayWriteOffset);
1761    return one << (type + offset);
1762  }
1763
1764  // Private constructor on direct flags value.
1765  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1766
1767  uint64_t flags_;
1768};
1769
1770// A HEnvironment object contains the values of virtual registers at a given location.
1771class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1772 public:
1773  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena,
1774                             size_t number_of_vregs,
1775                             ArtMethod* method,
1776                             uint32_t dex_pc,
1777                             HInstruction* holder)
1778     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1779       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1780       parent_(nullptr),
1781       method_(method),
1782       dex_pc_(dex_pc),
1783       holder_(holder) {
1784  }
1785
1786  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1787      : HEnvironment(arena,
1788                     to_copy.Size(),
1789                     to_copy.GetMethod(),
1790                     to_copy.GetDexPc(),
1791                     holder) {}
1792
1793  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1794    if (parent_ != nullptr) {
1795      parent_->SetAndCopyParentChain(allocator, parent);
1796    } else {
1797      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1798      parent_->CopyFrom(parent);
1799      if (parent->GetParent() != nullptr) {
1800        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1801      }
1802    }
1803  }
1804
1805  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1806  void CopyFrom(HEnvironment* environment);
1807
1808  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1809  // input to the loop phi instead. This is for inserting instructions that
1810  // require an environment (like HDeoptimization) in the loop pre-header.
1811  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1812
1813  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1814    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1815  }
1816
1817  HInstruction* GetInstructionAt(size_t index) const {
1818    return vregs_[index].GetInstruction();
1819  }
1820
1821  void RemoveAsUserOfInput(size_t index) const;
1822
1823  size_t Size() const { return vregs_.size(); }
1824
1825  HEnvironment* GetParent() const { return parent_; }
1826
1827  void SetLocationAt(size_t index, Location location) {
1828    locations_[index] = location;
1829  }
1830
1831  Location GetLocationAt(size_t index) const {
1832    return locations_[index];
1833  }
1834
1835  uint32_t GetDexPc() const {
1836    return dex_pc_;
1837  }
1838
1839  ArtMethod* GetMethod() const {
1840    return method_;
1841  }
1842
1843  HInstruction* GetHolder() const {
1844    return holder_;
1845  }
1846
1847
1848  bool IsFromInlinedInvoke() const {
1849    return GetParent() != nullptr;
1850  }
1851
1852 private:
1853  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1854  ArenaVector<Location> locations_;
1855  HEnvironment* parent_;
1856  ArtMethod* method_;
1857  const uint32_t dex_pc_;
1858
1859  // The instruction that holds this environment.
1860  HInstruction* const holder_;
1861
1862  friend class HInstruction;
1863
1864  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1865};
1866
1867class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1868 public:
1869  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1870      : previous_(nullptr),
1871        next_(nullptr),
1872        block_(nullptr),
1873        dex_pc_(dex_pc),
1874        id_(-1),
1875        ssa_index_(-1),
1876        packed_fields_(0u),
1877        environment_(nullptr),
1878        locations_(nullptr),
1879        live_interval_(nullptr),
1880        lifetime_position_(kNoLifetime),
1881        side_effects_(side_effects),
1882        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1883    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1884  }
1885
1886  virtual ~HInstruction() {}
1887
1888#define DECLARE_KIND(type, super) k##type,
1889  enum InstructionKind {
1890    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1891  };
1892#undef DECLARE_KIND
1893
1894  HInstruction* GetNext() const { return next_; }
1895  HInstruction* GetPrevious() const { return previous_; }
1896
1897  HInstruction* GetNextDisregardingMoves() const;
1898  HInstruction* GetPreviousDisregardingMoves() const;
1899
1900  HBasicBlock* GetBlock() const { return block_; }
1901  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1902  void SetBlock(HBasicBlock* block) { block_ = block; }
1903  bool IsInBlock() const { return block_ != nullptr; }
1904  bool IsInLoop() const { return block_->IsInLoop(); }
1905  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1906  bool IsIrreducibleLoopHeaderPhi() const {
1907    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1908  }
1909
1910  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1911
1912  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1913    // One virtual method is enough, just const_cast<> and then re-add the const.
1914    return ArrayRef<const HUserRecord<HInstruction*>>(
1915        const_cast<HInstruction*>(this)->GetInputRecords());
1916  }
1917
1918  HInputsRef GetInputs() {
1919    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1920  }
1921
1922  HConstInputsRef GetInputs() const {
1923    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1924  }
1925
1926  size_t InputCount() const { return GetInputRecords().size(); }
1927  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1928
1929  bool HasInput(HInstruction* input) const {
1930    for (const HInstruction* i : GetInputs()) {
1931      if (i == input) {
1932        return true;
1933      }
1934    }
1935    return false;
1936  }
1937
1938  void SetRawInputAt(size_t index, HInstruction* input) {
1939    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1940  }
1941
1942  virtual void Accept(HGraphVisitor* visitor) = 0;
1943  virtual const char* DebugName() const = 0;
1944
1945  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1946
1947  virtual bool NeedsEnvironment() const { return false; }
1948
1949  uint32_t GetDexPc() const { return dex_pc_; }
1950
1951  virtual bool IsControlFlow() const { return false; }
1952
1953  // Can the instruction throw?
1954  // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
1955  // could throw OOME, but it is still OK to remove them if they are unused.
1956  virtual bool CanThrow() const { return false; }
1957  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1958
1959  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1960  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1961
1962  // Does not apply for all instructions, but having this at top level greatly
1963  // simplifies the null check elimination.
1964  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1965  virtual bool CanBeNull() const {
1966    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1967    return true;
1968  }
1969
1970  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1971    return false;
1972  }
1973
1974  virtual bool IsActualObject() const {
1975    return GetType() == Primitive::kPrimNot;
1976  }
1977
1978  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1979
1980  ReferenceTypeInfo GetReferenceTypeInfo() const {
1981    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1982    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1983                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1984  }
1985
1986  void AddUseAt(HInstruction* user, size_t index) {
1987    DCHECK(user != nullptr);
1988    // Note: fixup_end remains valid across push_front().
1989    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1990    HUseListNode<HInstruction*>* new_node =
1991        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1992    uses_.push_front(*new_node);
1993    FixUpUserRecordsAfterUseInsertion(fixup_end);
1994  }
1995
1996  void AddEnvUseAt(HEnvironment* user, size_t index) {
1997    DCHECK(user != nullptr);
1998    // Note: env_fixup_end remains valid across push_front().
1999    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2000    HUseListNode<HEnvironment*>* new_node =
2001        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
2002    env_uses_.push_front(*new_node);
2003    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2004  }
2005
2006  void RemoveAsUserOfInput(size_t input) {
2007    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2008    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2009    input_use.GetInstruction()->uses_.erase_after(before_use_node);
2010    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2011  }
2012
2013  void RemoveAsUserOfAllInputs() {
2014    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2015      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2016      input_use.GetInstruction()->uses_.erase_after(before_use_node);
2017      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2018    }
2019  }
2020
2021  const HUseList<HInstruction*>& GetUses() const { return uses_; }
2022  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2023
2024  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
2025  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
2026  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
2027  bool HasOnlyOneNonEnvironmentUse() const {
2028    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2029  }
2030
2031  bool IsRemovable() const {
2032    return
2033        !DoesAnyWrite() &&
2034        !CanThrow() &&
2035        !IsSuspendCheck() &&
2036        !IsControlFlow() &&
2037        !IsNativeDebugInfo() &&
2038        !IsParameterValue() &&
2039        // If we added an explicit barrier then we should keep it.
2040        !IsMemoryBarrier();
2041  }
2042
2043  bool IsDeadAndRemovable() const {
2044    return IsRemovable() && !HasUses();
2045  }
2046
2047  // Does this instruction strictly dominate `other_instruction`?
2048  // Returns false if this instruction and `other_instruction` are the same.
2049  // Aborts if this instruction and `other_instruction` are both phis.
2050  bool StrictlyDominates(HInstruction* other_instruction) const;
2051
2052  int GetId() const { return id_; }
2053  void SetId(int id) { id_ = id; }
2054
2055  int GetSsaIndex() const { return ssa_index_; }
2056  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2057  bool HasSsaIndex() const { return ssa_index_ != -1; }
2058
2059  bool HasEnvironment() const { return environment_ != nullptr; }
2060  HEnvironment* GetEnvironment() const { return environment_; }
2061  // Set the `environment_` field. Raw because this method does not
2062  // update the uses lists.
2063  void SetRawEnvironment(HEnvironment* environment) {
2064    DCHECK(environment_ == nullptr);
2065    DCHECK_EQ(environment->GetHolder(), this);
2066    environment_ = environment;
2067  }
2068
2069  void InsertRawEnvironment(HEnvironment* environment) {
2070    DCHECK(environment_ != nullptr);
2071    DCHECK_EQ(environment->GetHolder(), this);
2072    DCHECK(environment->GetParent() == nullptr);
2073    environment->parent_ = environment_;
2074    environment_ = environment;
2075  }
2076
2077  void RemoveEnvironment();
2078
2079  // Set the environment of this instruction, copying it from `environment`. While
2080  // copying, the uses lists are being updated.
2081  void CopyEnvironmentFrom(HEnvironment* environment) {
2082    DCHECK(environment_ == nullptr);
2083    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2084    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2085    environment_->CopyFrom(environment);
2086    if (environment->GetParent() != nullptr) {
2087      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2088    }
2089  }
2090
2091  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2092                                                HBasicBlock* block) {
2093    DCHECK(environment_ == nullptr);
2094    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2095    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2096    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2097    if (environment->GetParent() != nullptr) {
2098      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2099    }
2100  }
2101
2102  // Returns the number of entries in the environment. Typically, that is the
2103  // number of dex registers in a method. It could be more in case of inlining.
2104  size_t EnvironmentSize() const;
2105
2106  LocationSummary* GetLocations() const { return locations_; }
2107  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2108
2109  void ReplaceWith(HInstruction* instruction);
2110  void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2111  void ReplaceInput(HInstruction* replacement, size_t index);
2112
2113  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2114  // uses of this instruction by `other` are *not* updated.
2115  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2116    ReplaceWith(other);
2117    other->ReplaceInput(this, use_index);
2118  }
2119
2120  // Move `this` instruction before `cursor`
2121  void MoveBefore(HInstruction* cursor, bool do_checks = true);
2122
2123  // Move `this` before its first user and out of any loops. If there is no
2124  // out-of-loop user that dominates all other users, move the instruction
2125  // to the end of the out-of-loop common dominator of the user's blocks.
2126  //
2127  // This can be used only on non-throwing instructions with no side effects that
2128  // have at least one use but no environment uses.
2129  void MoveBeforeFirstUserAndOutOfLoops();
2130
2131#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2132  bool Is##type() const;                                                       \
2133  const H##type* As##type() const;                                             \
2134  H##type* As##type();
2135
2136  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2137#undef INSTRUCTION_TYPE_CHECK
2138
2139#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2140  bool Is##type() const { return (As##type() != nullptr); }                    \
2141  virtual const H##type* As##type() const { return nullptr; }                  \
2142  virtual H##type* As##type() { return nullptr; }
2143  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2144#undef INSTRUCTION_TYPE_CHECK
2145
2146  // Returns whether the instruction can be moved within the graph.
2147  // TODO: this method is used by LICM and GVN with possibly different
2148  //       meanings? split and rename?
2149  virtual bool CanBeMoved() const { return false; }
2150
2151  // Returns whether the two instructions are of the same kind.
2152  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2153    return false;
2154  }
2155
2156  // Returns whether any data encoded in the two instructions is equal.
2157  // This method does not look at the inputs. Both instructions must be
2158  // of the same type, otherwise the method has undefined behavior.
2159  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2160    return false;
2161  }
2162
2163  // Returns whether two instructions are equal, that is:
2164  // 1) They have the same type and contain the same data (InstructionDataEquals).
2165  // 2) Their inputs are identical.
2166  bool Equals(const HInstruction* other) const;
2167
2168  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2169  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2170  // the virtual function because the __attribute__((__pure__)) doesn't really
2171  // apply the strong requirement for virtual functions, preventing optimizations.
2172  InstructionKind GetKind() const PURE;
2173  virtual InstructionKind GetKindInternal() const = 0;
2174
2175  virtual size_t ComputeHashCode() const {
2176    size_t result = GetKind();
2177    for (const HInstruction* input : GetInputs()) {
2178      result = (result * 31) + input->GetId();
2179    }
2180    return result;
2181  }
2182
2183  SideEffects GetSideEffects() const { return side_effects_; }
2184  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2185  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2186
2187  size_t GetLifetimePosition() const { return lifetime_position_; }
2188  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2189  LiveInterval* GetLiveInterval() const { return live_interval_; }
2190  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2191  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2192
2193  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2194
2195  // Returns whether the code generation of the instruction will require to have access
2196  // to the current method. Such instructions are:
2197  // (1): Instructions that require an environment, as calling the runtime requires
2198  //      to walk the stack and have the current method stored at a specific stack address.
2199  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2200  //      to access the dex cache.
2201  bool NeedsCurrentMethod() const {
2202    return NeedsEnvironment() || IsCurrentMethod();
2203  }
2204
2205  // Returns whether the code generation of the instruction will require to have access
2206  // to the dex cache of the current method's declaring class via the current method.
2207  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2208
2209  // Does this instruction have any use in an environment before
2210  // control flow hits 'other'?
2211  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2212
2213  // Remove all references to environment uses of this instruction.
2214  // The caller must ensure that this is safe to do.
2215  void RemoveEnvironmentUsers();
2216
2217  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2218  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2219
2220 protected:
2221  // If set, the machine code for this instruction is assumed to be generated by
2222  // its users. Used by liveness analysis to compute use positions accordingly.
2223  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2224  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2225  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2226  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2227
2228  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2229    return GetInputRecords()[i];
2230  }
2231
2232  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2233    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2234    input_records[index] = input;
2235  }
2236
2237  uint32_t GetPackedFields() const {
2238    return packed_fields_;
2239  }
2240
2241  template <size_t flag>
2242  bool GetPackedFlag() const {
2243    return (packed_fields_ & (1u << flag)) != 0u;
2244  }
2245
2246  template <size_t flag>
2247  void SetPackedFlag(bool value = true) {
2248    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2249  }
2250
2251  template <typename BitFieldType>
2252  typename BitFieldType::value_type GetPackedField() const {
2253    return BitFieldType::Decode(packed_fields_);
2254  }
2255
2256  template <typename BitFieldType>
2257  void SetPackedField(typename BitFieldType::value_type value) {
2258    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2259    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2260  }
2261
2262 private:
2263  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2264    auto before_use_node = uses_.before_begin();
2265    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2266      HInstruction* user = use_node->GetUser();
2267      size_t input_index = use_node->GetIndex();
2268      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2269      before_use_node = use_node;
2270    }
2271  }
2272
2273  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2274    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2275    if (next != uses_.end()) {
2276      HInstruction* next_user = next->GetUser();
2277      size_t next_index = next->GetIndex();
2278      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2279      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2280    }
2281  }
2282
2283  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2284    auto before_env_use_node = env_uses_.before_begin();
2285    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2286      HEnvironment* user = env_use_node->GetUser();
2287      size_t input_index = env_use_node->GetIndex();
2288      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2289      before_env_use_node = env_use_node;
2290    }
2291  }
2292
2293  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2294    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2295    if (next != env_uses_.end()) {
2296      HEnvironment* next_user = next->GetUser();
2297      size_t next_index = next->GetIndex();
2298      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2299      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2300    }
2301  }
2302
2303  HInstruction* previous_;
2304  HInstruction* next_;
2305  HBasicBlock* block_;
2306  const uint32_t dex_pc_;
2307
2308  // An instruction gets an id when it is added to the graph.
2309  // It reflects creation order. A negative id means the instruction
2310  // has not been added to the graph.
2311  int id_;
2312
2313  // When doing liveness analysis, instructions that have uses get an SSA index.
2314  int ssa_index_;
2315
2316  // Packed fields.
2317  uint32_t packed_fields_;
2318
2319  // List of instructions that have this instruction as input.
2320  HUseList<HInstruction*> uses_;
2321
2322  // List of environments that contain this instruction.
2323  HUseList<HEnvironment*> env_uses_;
2324
2325  // The environment associated with this instruction. Not null if the instruction
2326  // might jump out of the method.
2327  HEnvironment* environment_;
2328
2329  // Set by the code generator.
2330  LocationSummary* locations_;
2331
2332  // Set by the liveness analysis.
2333  LiveInterval* live_interval_;
2334
2335  // Set by the liveness analysis, this is the position in a linear
2336  // order of blocks where this instruction's live interval start.
2337  size_t lifetime_position_;
2338
2339  SideEffects side_effects_;
2340
2341  // The reference handle part of the reference type info.
2342  // The IsExact() flag is stored in packed fields.
2343  // TODO: for primitive types this should be marked as invalid.
2344  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2345
2346  friend class GraphChecker;
2347  friend class HBasicBlock;
2348  friend class HEnvironment;
2349  friend class HGraph;
2350  friend class HInstructionList;
2351
2352  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2353};
2354std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2355
2356// Iterates over the instructions, while preserving the next instruction
2357// in case the current instruction gets removed from the list by the user
2358// of this iterator.
2359class HInstructionIterator : public ValueObject {
2360 public:
2361  explicit HInstructionIterator(const HInstructionList& instructions)
2362      : instruction_(instructions.first_instruction_) {
2363    next_ = Done() ? nullptr : instruction_->GetNext();
2364  }
2365
2366  bool Done() const { return instruction_ == nullptr; }
2367  HInstruction* Current() const { return instruction_; }
2368  void Advance() {
2369    instruction_ = next_;
2370    next_ = Done() ? nullptr : instruction_->GetNext();
2371  }
2372
2373 private:
2374  HInstruction* instruction_;
2375  HInstruction* next_;
2376
2377  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2378};
2379
2380// Iterates over the instructions without saving the next instruction,
2381// therefore handling changes in the graph potentially made by the user
2382// of this iterator.
2383class HInstructionIteratorHandleChanges : public ValueObject {
2384 public:
2385  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2386      : instruction_(instructions.first_instruction_) {
2387  }
2388
2389  bool Done() const { return instruction_ == nullptr; }
2390  HInstruction* Current() const { return instruction_; }
2391  void Advance() {
2392    instruction_ = instruction_->GetNext();
2393  }
2394
2395 private:
2396  HInstruction* instruction_;
2397
2398  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2399};
2400
2401
2402class HBackwardInstructionIterator : public ValueObject {
2403 public:
2404  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2405      : instruction_(instructions.last_instruction_) {
2406    next_ = Done() ? nullptr : instruction_->GetPrevious();
2407  }
2408
2409  bool Done() const { return instruction_ == nullptr; }
2410  HInstruction* Current() const { return instruction_; }
2411  void Advance() {
2412    instruction_ = next_;
2413    next_ = Done() ? nullptr : instruction_->GetPrevious();
2414  }
2415
2416 private:
2417  HInstruction* instruction_;
2418  HInstruction* next_;
2419
2420  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2421};
2422
2423class HVariableInputSizeInstruction : public HInstruction {
2424 public:
2425  using HInstruction::GetInputRecords;  // Keep the const version visible.
2426  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2427    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2428  }
2429
2430  void AddInput(HInstruction* input);
2431  void InsertInputAt(size_t index, HInstruction* input);
2432  void RemoveInputAt(size_t index);
2433
2434 protected:
2435  HVariableInputSizeInstruction(SideEffects side_effects,
2436                                uint32_t dex_pc,
2437                                ArenaAllocator* arena,
2438                                size_t number_of_inputs,
2439                                ArenaAllocKind kind)
2440      : HInstruction(side_effects, dex_pc),
2441        inputs_(number_of_inputs, arena->Adapter(kind)) {}
2442
2443  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2444
2445 private:
2446  DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
2447};
2448
2449template<size_t N>
2450class HTemplateInstruction: public HInstruction {
2451 public:
2452  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2453      : HInstruction(side_effects, dex_pc), inputs_() {}
2454  virtual ~HTemplateInstruction() {}
2455
2456  using HInstruction::GetInputRecords;  // Keep the const version visible.
2457  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2458    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2459  }
2460
2461 private:
2462  std::array<HUserRecord<HInstruction*>, N> inputs_;
2463
2464  friend class SsaBuilder;
2465};
2466
2467// HTemplateInstruction specialization for N=0.
2468template<>
2469class HTemplateInstruction<0>: public HInstruction {
2470 public:
2471  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2472      : HInstruction(side_effects, dex_pc) {}
2473
2474  virtual ~HTemplateInstruction() {}
2475
2476  using HInstruction::GetInputRecords;  // Keep the const version visible.
2477  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2478    return ArrayRef<HUserRecord<HInstruction*>>();
2479  }
2480
2481 private:
2482  friend class SsaBuilder;
2483};
2484
2485template<intptr_t N>
2486class HExpression : public HTemplateInstruction<N> {
2487 public:
2488  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2489      : HTemplateInstruction<N>(side_effects, dex_pc) {
2490    this->template SetPackedField<TypeField>(type);
2491  }
2492  virtual ~HExpression() {}
2493
2494  Primitive::Type GetType() const OVERRIDE {
2495    return TypeField::Decode(this->GetPackedFields());
2496  }
2497
2498 protected:
2499  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2500  static constexpr size_t kFieldTypeSize =
2501      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2502  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2503  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2504                "Too many packed fields.");
2505  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2506};
2507
2508// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2509// instruction that branches to the exit block.
2510class HReturnVoid FINAL : public HTemplateInstruction<0> {
2511 public:
2512  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2513      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2514
2515  bool IsControlFlow() const OVERRIDE { return true; }
2516
2517  DECLARE_INSTRUCTION(ReturnVoid);
2518
2519 private:
2520  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2521};
2522
2523// Represents dex's RETURN opcodes. A HReturn is a control flow
2524// instruction that branches to the exit block.
2525class HReturn FINAL : public HTemplateInstruction<1> {
2526 public:
2527  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2528      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2529    SetRawInputAt(0, value);
2530  }
2531
2532  bool IsControlFlow() const OVERRIDE { return true; }
2533
2534  DECLARE_INSTRUCTION(Return);
2535
2536 private:
2537  DISALLOW_COPY_AND_ASSIGN(HReturn);
2538};
2539
2540class HPhi FINAL : public HVariableInputSizeInstruction {
2541 public:
2542  HPhi(ArenaAllocator* arena,
2543       uint32_t reg_number,
2544       size_t number_of_inputs,
2545       Primitive::Type type,
2546       uint32_t dex_pc = kNoDexPc)
2547      : HVariableInputSizeInstruction(
2548            SideEffects::None(),
2549            dex_pc,
2550            arena,
2551            number_of_inputs,
2552            kArenaAllocPhiInputs),
2553        reg_number_(reg_number) {
2554    SetPackedField<TypeField>(ToPhiType(type));
2555    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2556    // Phis are constructed live and marked dead if conflicting or unused.
2557    // Individual steps of SsaBuilder should assume that if a phi has been
2558    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2559    SetPackedFlag<kFlagIsLive>(true);
2560    SetPackedFlag<kFlagCanBeNull>(true);
2561  }
2562
2563  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2564  static Primitive::Type ToPhiType(Primitive::Type type) {
2565    return Primitive::PrimitiveKind(type);
2566  }
2567
2568  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2569
2570  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2571  void SetType(Primitive::Type new_type) {
2572    // Make sure that only valid type changes occur. The following are allowed:
2573    //  (1) int  -> float/ref (primitive type propagation),
2574    //  (2) long -> double (primitive type propagation).
2575    DCHECK(GetType() == new_type ||
2576           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2577           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2578           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2579    SetPackedField<TypeField>(new_type);
2580  }
2581
2582  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2583  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2584
2585  uint32_t GetRegNumber() const { return reg_number_; }
2586
2587  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2588  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2589  bool IsDead() const { return !IsLive(); }
2590  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2591
2592  bool IsVRegEquivalentOf(const HInstruction* other) const {
2593    return other != nullptr
2594        && other->IsPhi()
2595        && other->AsPhi()->GetBlock() == GetBlock()
2596        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2597  }
2598
2599  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2600  // An equivalent phi is a phi having the same dex register and type.
2601  // It assumes that phis with the same dex register are adjacent.
2602  HPhi* GetNextEquivalentPhiWithSameType() {
2603    HInstruction* next = GetNext();
2604    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2605      if (next->GetType() == GetType()) {
2606        return next->AsPhi();
2607      }
2608      next = next->GetNext();
2609    }
2610    return nullptr;
2611  }
2612
2613  DECLARE_INSTRUCTION(Phi);
2614
2615 private:
2616  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2617  static constexpr size_t kFieldTypeSize =
2618      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2619  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2620  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2621  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2622  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2623  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2624
2625  const uint32_t reg_number_;
2626
2627  DISALLOW_COPY_AND_ASSIGN(HPhi);
2628};
2629
2630// The exit instruction is the only instruction of the exit block.
2631// Instructions aborting the method (HThrow and HReturn) must branch to the
2632// exit block.
2633class HExit FINAL : public HTemplateInstruction<0> {
2634 public:
2635  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2636
2637  bool IsControlFlow() const OVERRIDE { return true; }
2638
2639  DECLARE_INSTRUCTION(Exit);
2640
2641 private:
2642  DISALLOW_COPY_AND_ASSIGN(HExit);
2643};
2644
2645// Jumps from one block to another.
2646class HGoto FINAL : public HTemplateInstruction<0> {
2647 public:
2648  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2649
2650  bool IsControlFlow() const OVERRIDE { return true; }
2651
2652  HBasicBlock* GetSuccessor() const {
2653    return GetBlock()->GetSingleSuccessor();
2654  }
2655
2656  DECLARE_INSTRUCTION(Goto);
2657
2658 private:
2659  DISALLOW_COPY_AND_ASSIGN(HGoto);
2660};
2661
2662class HConstant : public HExpression<0> {
2663 public:
2664  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2665      : HExpression(type, SideEffects::None(), dex_pc) {}
2666
2667  bool CanBeMoved() const OVERRIDE { return true; }
2668
2669  // Is this constant -1 in the arithmetic sense?
2670  virtual bool IsMinusOne() const { return false; }
2671  // Is this constant 0 in the arithmetic sense?
2672  virtual bool IsArithmeticZero() const { return false; }
2673  // Is this constant a 0-bit pattern?
2674  virtual bool IsZeroBitPattern() const { return false; }
2675  // Is this constant 1 in the arithmetic sense?
2676  virtual bool IsOne() const { return false; }
2677
2678  virtual uint64_t GetValueAsUint64() const = 0;
2679
2680  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2681
2682 private:
2683  DISALLOW_COPY_AND_ASSIGN(HConstant);
2684};
2685
2686class HNullConstant FINAL : public HConstant {
2687 public:
2688  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2689    return true;
2690  }
2691
2692  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2693
2694  size_t ComputeHashCode() const OVERRIDE { return 0; }
2695
2696  // The null constant representation is a 0-bit pattern.
2697  virtual bool IsZeroBitPattern() const { return true; }
2698
2699  DECLARE_INSTRUCTION(NullConstant);
2700
2701 private:
2702  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2703
2704  friend class HGraph;
2705  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2706};
2707
2708// Constants of the type int. Those can be from Dex instructions, or
2709// synthesized (for example with the if-eqz instruction).
2710class HIntConstant FINAL : public HConstant {
2711 public:
2712  int32_t GetValue() const { return value_; }
2713
2714  uint64_t GetValueAsUint64() const OVERRIDE {
2715    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2716  }
2717
2718  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2719    DCHECK(other->IsIntConstant()) << other->DebugName();
2720    return other->AsIntConstant()->value_ == value_;
2721  }
2722
2723  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2724
2725  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2726  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2727  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2728  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2729
2730  // Integer constants are used to encode Boolean values as well,
2731  // where 1 means true and 0 means false.
2732  bool IsTrue() const { return GetValue() == 1; }
2733  bool IsFalse() const { return GetValue() == 0; }
2734
2735  DECLARE_INSTRUCTION(IntConstant);
2736
2737 private:
2738  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2739      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2740  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2741      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2742
2743  const int32_t value_;
2744
2745  friend class HGraph;
2746  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2747  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2748  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2749};
2750
2751class HLongConstant FINAL : public HConstant {
2752 public:
2753  int64_t GetValue() const { return value_; }
2754
2755  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2756
2757  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2758    DCHECK(other->IsLongConstant()) << other->DebugName();
2759    return other->AsLongConstant()->value_ == value_;
2760  }
2761
2762  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2763
2764  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2765  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2766  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2767  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2768
2769  DECLARE_INSTRUCTION(LongConstant);
2770
2771 private:
2772  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2773      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2774
2775  const int64_t value_;
2776
2777  friend class HGraph;
2778  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2779};
2780
2781class HFloatConstant FINAL : public HConstant {
2782 public:
2783  float GetValue() const { return value_; }
2784
2785  uint64_t GetValueAsUint64() const OVERRIDE {
2786    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2787  }
2788
2789  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2790    DCHECK(other->IsFloatConstant()) << other->DebugName();
2791    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2792  }
2793
2794  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2795
2796  bool IsMinusOne() const OVERRIDE {
2797    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2798  }
2799  bool IsArithmeticZero() const OVERRIDE {
2800    return std::fpclassify(value_) == FP_ZERO;
2801  }
2802  bool IsArithmeticPositiveZero() const {
2803    return IsArithmeticZero() && !std::signbit(value_);
2804  }
2805  bool IsArithmeticNegativeZero() const {
2806    return IsArithmeticZero() && std::signbit(value_);
2807  }
2808  bool IsZeroBitPattern() const OVERRIDE {
2809    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2810  }
2811  bool IsOne() const OVERRIDE {
2812    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2813  }
2814  bool IsNaN() const {
2815    return std::isnan(value_);
2816  }
2817
2818  DECLARE_INSTRUCTION(FloatConstant);
2819
2820 private:
2821  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2822      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2823  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2824      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2825
2826  const float value_;
2827
2828  // Only the SsaBuilder and HGraph can create floating-point constants.
2829  friend class SsaBuilder;
2830  friend class HGraph;
2831  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2832};
2833
2834class HDoubleConstant FINAL : public HConstant {
2835 public:
2836  double GetValue() const { return value_; }
2837
2838  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2839
2840  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2841    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2842    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2843  }
2844
2845  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2846
2847  bool IsMinusOne() const OVERRIDE {
2848    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2849  }
2850  bool IsArithmeticZero() const OVERRIDE {
2851    return std::fpclassify(value_) == FP_ZERO;
2852  }
2853  bool IsArithmeticPositiveZero() const {
2854    return IsArithmeticZero() && !std::signbit(value_);
2855  }
2856  bool IsArithmeticNegativeZero() const {
2857    return IsArithmeticZero() && std::signbit(value_);
2858  }
2859  bool IsZeroBitPattern() const OVERRIDE {
2860    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2861  }
2862  bool IsOne() const OVERRIDE {
2863    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2864  }
2865  bool IsNaN() const {
2866    return std::isnan(value_);
2867  }
2868
2869  DECLARE_INSTRUCTION(DoubleConstant);
2870
2871 private:
2872  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2873      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2874  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2875      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2876
2877  const double value_;
2878
2879  // Only the SsaBuilder and HGraph can create floating-point constants.
2880  friend class SsaBuilder;
2881  friend class HGraph;
2882  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2883};
2884
2885// Conditional branch. A block ending with an HIf instruction must have
2886// two successors.
2887class HIf FINAL : public HTemplateInstruction<1> {
2888 public:
2889  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2890      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2891    SetRawInputAt(0, input);
2892  }
2893
2894  bool IsControlFlow() const OVERRIDE { return true; }
2895
2896  HBasicBlock* IfTrueSuccessor() const {
2897    return GetBlock()->GetSuccessors()[0];
2898  }
2899
2900  HBasicBlock* IfFalseSuccessor() const {
2901    return GetBlock()->GetSuccessors()[1];
2902  }
2903
2904  DECLARE_INSTRUCTION(If);
2905
2906 private:
2907  DISALLOW_COPY_AND_ASSIGN(HIf);
2908};
2909
2910
2911// Abstract instruction which marks the beginning and/or end of a try block and
2912// links it to the respective exception handlers. Behaves the same as a Goto in
2913// non-exceptional control flow.
2914// Normal-flow successor is stored at index zero, exception handlers under
2915// higher indices in no particular order.
2916class HTryBoundary FINAL : public HTemplateInstruction<0> {
2917 public:
2918  enum class BoundaryKind {
2919    kEntry,
2920    kExit,
2921    kLast = kExit
2922  };
2923
2924  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2925      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2926    SetPackedField<BoundaryKindField>(kind);
2927  }
2928
2929  bool IsControlFlow() const OVERRIDE { return true; }
2930
2931  // Returns the block's non-exceptional successor (index zero).
2932  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2933
2934  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2935    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2936  }
2937
2938  // Returns whether `handler` is among its exception handlers (non-zero index
2939  // successors).
2940  bool HasExceptionHandler(const HBasicBlock& handler) const {
2941    DCHECK(handler.IsCatchBlock());
2942    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2943  }
2944
2945  // If not present already, adds `handler` to its block's list of exception
2946  // handlers.
2947  void AddExceptionHandler(HBasicBlock* handler) {
2948    if (!HasExceptionHandler(*handler)) {
2949      GetBlock()->AddSuccessor(handler);
2950    }
2951  }
2952
2953  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2954  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2955
2956  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2957
2958  DECLARE_INSTRUCTION(TryBoundary);
2959
2960 private:
2961  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2962  static constexpr size_t kFieldBoundaryKindSize =
2963      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2964  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2965      kFieldBoundaryKind + kFieldBoundaryKindSize;
2966  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2967                "Too many packed fields.");
2968  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2969
2970  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2971};
2972
2973// Deoptimize to interpreter, upon checking a condition.
2974class HDeoptimize FINAL : public HVariableInputSizeInstruction {
2975 public:
2976  enum class Kind {
2977    kBCE,
2978    kInline,
2979    kLast = kInline
2980  };
2981
2982  // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
2983  // across.
2984  HDeoptimize(ArenaAllocator* arena, HInstruction* cond, Kind kind, uint32_t dex_pc)
2985      : HVariableInputSizeInstruction(
2986            SideEffects::All(),
2987            dex_pc,
2988            arena,
2989            /* number_of_inputs */ 1,
2990            kArenaAllocMisc) {
2991    SetPackedFlag<kFieldCanBeMoved>(false);
2992    SetPackedField<DeoptimizeKindField>(kind);
2993    SetRawInputAt(0, cond);
2994  }
2995
2996  // Use this constructor when the `HDeoptimize` guards an instruction, and any user
2997  // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
2998  // instead of `guard`.
2999  // We set CanTriggerGC to prevent any intermediate address to be live
3000  // at the point of the `HDeoptimize`.
3001  HDeoptimize(ArenaAllocator* arena,
3002              HInstruction* cond,
3003              HInstruction* guard,
3004              Kind kind,
3005              uint32_t dex_pc)
3006      : HVariableInputSizeInstruction(
3007            SideEffects::CanTriggerGC(),
3008            dex_pc,
3009            arena,
3010            /* number_of_inputs */ 2,
3011            kArenaAllocMisc) {
3012    SetPackedFlag<kFieldCanBeMoved>(true);
3013    SetPackedField<DeoptimizeKindField>(kind);
3014    SetRawInputAt(0, cond);
3015    SetRawInputAt(1, guard);
3016  }
3017
3018  bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); }
3019
3020  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3021    return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3022  }
3023
3024  bool NeedsEnvironment() const OVERRIDE { return true; }
3025
3026  bool CanThrow() const OVERRIDE { return true; }
3027
3028  Kind GetKind() const { return GetPackedField<DeoptimizeKindField>(); }
3029
3030  Primitive::Type GetType() const OVERRIDE {
3031    return GuardsAnInput() ? GuardedInput()->GetType() : Primitive::kPrimVoid;
3032  }
3033
3034  bool GuardsAnInput() const {
3035    return InputCount() == 2;
3036  }
3037
3038  HInstruction* GuardedInput() const {
3039    DCHECK(GuardsAnInput());
3040    return InputAt(1);
3041  }
3042
3043  void RemoveGuard() {
3044    RemoveInputAt(1);
3045  }
3046
3047  DECLARE_INSTRUCTION(Deoptimize);
3048
3049 private:
3050  static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3051  static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3052  static constexpr size_t kFieldDeoptimizeKindSize =
3053      MinimumBitsToStore(static_cast<size_t>(Kind::kLast));
3054  static constexpr size_t kNumberOfDeoptimizePackedBits =
3055      kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3056  static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3057                "Too many packed fields.");
3058  using DeoptimizeKindField = BitField<Kind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3059
3060  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
3061};
3062
3063std::ostream& operator<<(std::ostream& os, const HDeoptimize::Kind& rhs);
3064
3065// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3066// The compiled code checks this flag value in a guard before devirtualized call and
3067// if it's true, starts to do deoptimization.
3068// It has a 4-byte slot on stack.
3069// TODO: allocate a register for this flag.
3070class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
3071 public:
3072  // CHA guards are only optimized in a separate pass and it has no side effects
3073  // with regard to other passes.
3074  HShouldDeoptimizeFlag(ArenaAllocator* arena, uint32_t dex_pc)
3075      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, arena, 0, kArenaAllocCHA) {
3076  }
3077
3078  Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimInt; }
3079
3080  // We do all CHA guard elimination/motion in a single pass, after which there is no
3081  // further guard elimination/motion since a guard might have been used for justification
3082  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3083  // to avoid other optimizations trying to move it.
3084  bool CanBeMoved() const OVERRIDE { return false; }
3085
3086  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3087
3088 private:
3089  DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
3090};
3091
3092// Represents the ArtMethod that was passed as a first argument to
3093// the method. It is used by instructions that depend on it, like
3094// instructions that work with the dex cache.
3095class HCurrentMethod FINAL : public HExpression<0> {
3096 public:
3097  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
3098      : HExpression(type, SideEffects::None(), dex_pc) {}
3099
3100  DECLARE_INSTRUCTION(CurrentMethod);
3101
3102 private:
3103  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
3104};
3105
3106// Fetches an ArtMethod from the virtual table or the interface method table
3107// of a class.
3108class HClassTableGet FINAL : public HExpression<1> {
3109 public:
3110  enum class TableKind {
3111    kVTable,
3112    kIMTable,
3113    kLast = kIMTable
3114  };
3115  HClassTableGet(HInstruction* cls,
3116                 Primitive::Type type,
3117                 TableKind kind,
3118                 size_t index,
3119                 uint32_t dex_pc)
3120      : HExpression(type, SideEffects::None(), dex_pc),
3121        index_(index) {
3122    SetPackedField<TableKindField>(kind);
3123    SetRawInputAt(0, cls);
3124  }
3125
3126  bool CanBeMoved() const OVERRIDE { return true; }
3127  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3128    return other->AsClassTableGet()->GetIndex() == index_ &&
3129        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3130  }
3131
3132  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3133  size_t GetIndex() const { return index_; }
3134
3135  DECLARE_INSTRUCTION(ClassTableGet);
3136
3137 private:
3138  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3139  static constexpr size_t kFieldTableKindSize =
3140      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3141  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3142  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3143                "Too many packed fields.");
3144  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3145
3146  // The index of the ArtMethod in the table.
3147  const size_t index_;
3148
3149  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
3150};
3151
3152// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3153// have one successor for each entry in the switch table, and the final successor
3154// will be the block containing the next Dex opcode.
3155class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3156 public:
3157  HPackedSwitch(int32_t start_value,
3158                uint32_t num_entries,
3159                HInstruction* input,
3160                uint32_t dex_pc = kNoDexPc)
3161    : HTemplateInstruction(SideEffects::None(), dex_pc),
3162      start_value_(start_value),
3163      num_entries_(num_entries) {
3164    SetRawInputAt(0, input);
3165  }
3166
3167  bool IsControlFlow() const OVERRIDE { return true; }
3168
3169  int32_t GetStartValue() const { return start_value_; }
3170
3171  uint32_t GetNumEntries() const { return num_entries_; }
3172
3173  HBasicBlock* GetDefaultBlock() const {
3174    // Last entry is the default block.
3175    return GetBlock()->GetSuccessors()[num_entries_];
3176  }
3177  DECLARE_INSTRUCTION(PackedSwitch);
3178
3179 private:
3180  const int32_t start_value_;
3181  const uint32_t num_entries_;
3182
3183  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
3184};
3185
3186class HUnaryOperation : public HExpression<1> {
3187 public:
3188  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3189      : HExpression(result_type, SideEffects::None(), dex_pc) {
3190    SetRawInputAt(0, input);
3191  }
3192
3193  HInstruction* GetInput() const { return InputAt(0); }
3194  Primitive::Type GetResultType() const { return GetType(); }
3195
3196  bool CanBeMoved() const OVERRIDE { return true; }
3197  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3198    return true;
3199  }
3200
3201  // Try to statically evaluate `this` and return a HConstant
3202  // containing the result of this evaluation.  If `this` cannot
3203  // be evaluated as a constant, return null.
3204  HConstant* TryStaticEvaluation() const;
3205
3206  // Apply this operation to `x`.
3207  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3208  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3209  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3210  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3211
3212  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3213
3214 private:
3215  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
3216};
3217
3218class HBinaryOperation : public HExpression<2> {
3219 public:
3220  HBinaryOperation(Primitive::Type result_type,
3221                   HInstruction* left,
3222                   HInstruction* right,
3223                   SideEffects side_effects = SideEffects::None(),
3224                   uint32_t dex_pc = kNoDexPc)
3225      : HExpression(result_type, side_effects, dex_pc) {
3226    SetRawInputAt(0, left);
3227    SetRawInputAt(1, right);
3228  }
3229
3230  HInstruction* GetLeft() const { return InputAt(0); }
3231  HInstruction* GetRight() const { return InputAt(1); }
3232  Primitive::Type GetResultType() const { return GetType(); }
3233
3234  virtual bool IsCommutative() const { return false; }
3235
3236  // Put constant on the right.
3237  // Returns whether order is changed.
3238  bool OrderInputsWithConstantOnTheRight() {
3239    HInstruction* left = InputAt(0);
3240    HInstruction* right = InputAt(1);
3241    if (left->IsConstant() && !right->IsConstant()) {
3242      ReplaceInput(right, 0);
3243      ReplaceInput(left, 1);
3244      return true;
3245    }
3246    return false;
3247  }
3248
3249  // Order inputs by instruction id, but favor constant on the right side.
3250  // This helps GVN for commutative ops.
3251  void OrderInputs() {
3252    DCHECK(IsCommutative());
3253    HInstruction* left = InputAt(0);
3254    HInstruction* right = InputAt(1);
3255    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3256      return;
3257    }
3258    if (OrderInputsWithConstantOnTheRight()) {
3259      return;
3260    }
3261    // Order according to instruction id.
3262    if (left->GetId() > right->GetId()) {
3263      ReplaceInput(right, 0);
3264      ReplaceInput(left, 1);
3265    }
3266  }
3267
3268  bool CanBeMoved() const OVERRIDE { return true; }
3269  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3270    return true;
3271  }
3272
3273  // Try to statically evaluate `this` and return a HConstant
3274  // containing the result of this evaluation.  If `this` cannot
3275  // be evaluated as a constant, return null.
3276  HConstant* TryStaticEvaluation() const;
3277
3278  // Apply this operation to `x` and `y`.
3279  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3280                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3281    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3282    UNREACHABLE();
3283  }
3284  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3285  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3286  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3287                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3288    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3289    UNREACHABLE();
3290  }
3291  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3292  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3293
3294  // Returns an input that can legally be used as the right input and is
3295  // constant, or null.
3296  HConstant* GetConstantRight() const;
3297
3298  // If `GetConstantRight()` returns one of the input, this returns the other
3299  // one. Otherwise it returns null.
3300  HInstruction* GetLeastConstantLeft() const;
3301
3302  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3303
3304 private:
3305  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3306};
3307
3308// The comparison bias applies for floating point operations and indicates how NaN
3309// comparisons are treated:
3310enum class ComparisonBias {
3311  kNoBias,  // bias is not applicable (i.e. for long operation)
3312  kGtBias,  // return 1 for NaN comparisons
3313  kLtBias,  // return -1 for NaN comparisons
3314  kLast = kLtBias
3315};
3316
3317std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3318
3319class HCondition : public HBinaryOperation {
3320 public:
3321  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3322      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3323    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3324  }
3325
3326  // For code generation purposes, returns whether this instruction is just before
3327  // `instruction`, and disregard moves in between.
3328  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3329
3330  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3331
3332  virtual IfCondition GetCondition() const = 0;
3333
3334  virtual IfCondition GetOppositeCondition() const = 0;
3335
3336  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3337  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3338
3339  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3340  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3341
3342  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3343    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3344  }
3345
3346  bool IsFPConditionTrueIfNaN() const {
3347    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3348    IfCondition if_cond = GetCondition();
3349    if (if_cond == kCondNE) {
3350      return true;
3351    } else if (if_cond == kCondEQ) {
3352      return false;
3353    }
3354    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3355  }
3356
3357  bool IsFPConditionFalseIfNaN() const {
3358    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3359    IfCondition if_cond = GetCondition();
3360    if (if_cond == kCondEQ) {
3361      return true;
3362    } else if (if_cond == kCondNE) {
3363      return false;
3364    }
3365    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3366  }
3367
3368 protected:
3369  // Needed if we merge a HCompare into a HCondition.
3370  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3371  static constexpr size_t kFieldComparisonBiasSize =
3372      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3373  static constexpr size_t kNumberOfConditionPackedBits =
3374      kFieldComparisonBias + kFieldComparisonBiasSize;
3375  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3376  using ComparisonBiasField =
3377      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3378
3379  template <typename T>
3380  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3381
3382  template <typename T>
3383  int32_t CompareFP(T x, T y) const {
3384    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3385    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3386    // Handle the bias.
3387    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3388  }
3389
3390  // Return an integer constant containing the result of a condition evaluated at compile time.
3391  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3392    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3393  }
3394
3395 private:
3396  DISALLOW_COPY_AND_ASSIGN(HCondition);
3397};
3398
3399// Instruction to check if two inputs are equal to each other.
3400class HEqual FINAL : public HCondition {
3401 public:
3402  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3403      : HCondition(first, second, dex_pc) {}
3404
3405  bool IsCommutative() const OVERRIDE { return true; }
3406
3407  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3408                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3409    return MakeConstantCondition(true, GetDexPc());
3410  }
3411  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3412    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3413  }
3414  // In the following Evaluate methods, a HCompare instruction has
3415  // been merged into this HEqual instruction; evaluate it as
3416  // `Compare(x, y) == 0`.
3417  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3418    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3419                                 GetDexPc());
3420  }
3421  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3422    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3423  }
3424  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3425    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3426  }
3427
3428  DECLARE_INSTRUCTION(Equal);
3429
3430  IfCondition GetCondition() const OVERRIDE {
3431    return kCondEQ;
3432  }
3433
3434  IfCondition GetOppositeCondition() const OVERRIDE {
3435    return kCondNE;
3436  }
3437
3438 private:
3439  template <typename T> static bool Compute(T x, T y) { return x == y; }
3440
3441  DISALLOW_COPY_AND_ASSIGN(HEqual);
3442};
3443
3444class HNotEqual FINAL : public HCondition {
3445 public:
3446  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3447      : HCondition(first, second, dex_pc) {}
3448
3449  bool IsCommutative() const OVERRIDE { return true; }
3450
3451  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3452                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3453    return MakeConstantCondition(false, GetDexPc());
3454  }
3455  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3456    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3457  }
3458  // In the following Evaluate methods, a HCompare instruction has
3459  // been merged into this HNotEqual instruction; evaluate it as
3460  // `Compare(x, y) != 0`.
3461  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3462    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3463  }
3464  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3465    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3466  }
3467  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3468    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3469  }
3470
3471  DECLARE_INSTRUCTION(NotEqual);
3472
3473  IfCondition GetCondition() const OVERRIDE {
3474    return kCondNE;
3475  }
3476
3477  IfCondition GetOppositeCondition() const OVERRIDE {
3478    return kCondEQ;
3479  }
3480
3481 private:
3482  template <typename T> static bool Compute(T x, T y) { return x != y; }
3483
3484  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3485};
3486
3487class HLessThan FINAL : public HCondition {
3488 public:
3489  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3490      : HCondition(first, second, dex_pc) {}
3491
3492  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3493    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3494  }
3495  // In the following Evaluate methods, a HCompare instruction has
3496  // been merged into this HLessThan instruction; evaluate it as
3497  // `Compare(x, y) < 0`.
3498  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3499    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3500  }
3501  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3502    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3503  }
3504  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3505    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3506  }
3507
3508  DECLARE_INSTRUCTION(LessThan);
3509
3510  IfCondition GetCondition() const OVERRIDE {
3511    return kCondLT;
3512  }
3513
3514  IfCondition GetOppositeCondition() const OVERRIDE {
3515    return kCondGE;
3516  }
3517
3518 private:
3519  template <typename T> static bool Compute(T x, T y) { return x < y; }
3520
3521  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3522};
3523
3524class HLessThanOrEqual FINAL : public HCondition {
3525 public:
3526  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3527      : HCondition(first, second, dex_pc) {}
3528
3529  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3530    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3531  }
3532  // In the following Evaluate methods, a HCompare instruction has
3533  // been merged into this HLessThanOrEqual instruction; evaluate it as
3534  // `Compare(x, y) <= 0`.
3535  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3536    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3537  }
3538  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3539    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3540  }
3541  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3542    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3543  }
3544
3545  DECLARE_INSTRUCTION(LessThanOrEqual);
3546
3547  IfCondition GetCondition() const OVERRIDE {
3548    return kCondLE;
3549  }
3550
3551  IfCondition GetOppositeCondition() const OVERRIDE {
3552    return kCondGT;
3553  }
3554
3555 private:
3556  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3557
3558  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3559};
3560
3561class HGreaterThan FINAL : public HCondition {
3562 public:
3563  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3564      : HCondition(first, second, dex_pc) {}
3565
3566  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3567    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3568  }
3569  // In the following Evaluate methods, a HCompare instruction has
3570  // been merged into this HGreaterThan instruction; evaluate it as
3571  // `Compare(x, y) > 0`.
3572  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3573    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3574  }
3575  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3576    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3577  }
3578  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3579    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3580  }
3581
3582  DECLARE_INSTRUCTION(GreaterThan);
3583
3584  IfCondition GetCondition() const OVERRIDE {
3585    return kCondGT;
3586  }
3587
3588  IfCondition GetOppositeCondition() const OVERRIDE {
3589    return kCondLE;
3590  }
3591
3592 private:
3593  template <typename T> static bool Compute(T x, T y) { return x > y; }
3594
3595  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3596};
3597
3598class HGreaterThanOrEqual FINAL : public HCondition {
3599 public:
3600  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3601      : HCondition(first, second, dex_pc) {}
3602
3603  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3604    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3605  }
3606  // In the following Evaluate methods, a HCompare instruction has
3607  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3608  // `Compare(x, y) >= 0`.
3609  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3610    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3611  }
3612  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3613    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3614  }
3615  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3616    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3617  }
3618
3619  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3620
3621  IfCondition GetCondition() const OVERRIDE {
3622    return kCondGE;
3623  }
3624
3625  IfCondition GetOppositeCondition() const OVERRIDE {
3626    return kCondLT;
3627  }
3628
3629 private:
3630  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3631
3632  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3633};
3634
3635class HBelow FINAL : public HCondition {
3636 public:
3637  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3638      : HCondition(first, second, dex_pc) {}
3639
3640  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3641    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3642  }
3643  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3644    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3645  }
3646  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3647                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3648    LOG(FATAL) << DebugName() << " is not defined for float values";
3649    UNREACHABLE();
3650  }
3651  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3652                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3653    LOG(FATAL) << DebugName() << " is not defined for double values";
3654    UNREACHABLE();
3655  }
3656
3657  DECLARE_INSTRUCTION(Below);
3658
3659  IfCondition GetCondition() const OVERRIDE {
3660    return kCondB;
3661  }
3662
3663  IfCondition GetOppositeCondition() const OVERRIDE {
3664    return kCondAE;
3665  }
3666
3667 private:
3668  template <typename T> static bool Compute(T x, T y) {
3669    return MakeUnsigned(x) < MakeUnsigned(y);
3670  }
3671
3672  DISALLOW_COPY_AND_ASSIGN(HBelow);
3673};
3674
3675class HBelowOrEqual FINAL : public HCondition {
3676 public:
3677  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3678      : HCondition(first, second, dex_pc) {}
3679
3680  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3681    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3682  }
3683  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3684    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3685  }
3686  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3687                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3688    LOG(FATAL) << DebugName() << " is not defined for float values";
3689    UNREACHABLE();
3690  }
3691  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3692                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3693    LOG(FATAL) << DebugName() << " is not defined for double values";
3694    UNREACHABLE();
3695  }
3696
3697  DECLARE_INSTRUCTION(BelowOrEqual);
3698
3699  IfCondition GetCondition() const OVERRIDE {
3700    return kCondBE;
3701  }
3702
3703  IfCondition GetOppositeCondition() const OVERRIDE {
3704    return kCondA;
3705  }
3706
3707 private:
3708  template <typename T> static bool Compute(T x, T y) {
3709    return MakeUnsigned(x) <= MakeUnsigned(y);
3710  }
3711
3712  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3713};
3714
3715class HAbove FINAL : public HCondition {
3716 public:
3717  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3718      : HCondition(first, second, dex_pc) {}
3719
3720  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3721    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3722  }
3723  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3724    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3725  }
3726  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3727                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3728    LOG(FATAL) << DebugName() << " is not defined for float values";
3729    UNREACHABLE();
3730  }
3731  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3732                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3733    LOG(FATAL) << DebugName() << " is not defined for double values";
3734    UNREACHABLE();
3735  }
3736
3737  DECLARE_INSTRUCTION(Above);
3738
3739  IfCondition GetCondition() const OVERRIDE {
3740    return kCondA;
3741  }
3742
3743  IfCondition GetOppositeCondition() const OVERRIDE {
3744    return kCondBE;
3745  }
3746
3747 private:
3748  template <typename T> static bool Compute(T x, T y) {
3749    return MakeUnsigned(x) > MakeUnsigned(y);
3750  }
3751
3752  DISALLOW_COPY_AND_ASSIGN(HAbove);
3753};
3754
3755class HAboveOrEqual FINAL : public HCondition {
3756 public:
3757  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3758      : HCondition(first, second, dex_pc) {}
3759
3760  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3761    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3762  }
3763  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3764    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3765  }
3766  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3767                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3768    LOG(FATAL) << DebugName() << " is not defined for float values";
3769    UNREACHABLE();
3770  }
3771  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3772                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3773    LOG(FATAL) << DebugName() << " is not defined for double values";
3774    UNREACHABLE();
3775  }
3776
3777  DECLARE_INSTRUCTION(AboveOrEqual);
3778
3779  IfCondition GetCondition() const OVERRIDE {
3780    return kCondAE;
3781  }
3782
3783  IfCondition GetOppositeCondition() const OVERRIDE {
3784    return kCondB;
3785  }
3786
3787 private:
3788  template <typename T> static bool Compute(T x, T y) {
3789    return MakeUnsigned(x) >= MakeUnsigned(y);
3790  }
3791
3792  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3793};
3794
3795// Instruction to check how two inputs compare to each other.
3796// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3797class HCompare FINAL : public HBinaryOperation {
3798 public:
3799  // Note that `comparison_type` is the type of comparison performed
3800  // between the comparison's inputs, not the type of the instantiated
3801  // HCompare instruction (which is always Primitive::kPrimInt).
3802  HCompare(Primitive::Type comparison_type,
3803           HInstruction* first,
3804           HInstruction* second,
3805           ComparisonBias bias,
3806           uint32_t dex_pc)
3807      : HBinaryOperation(Primitive::kPrimInt,
3808                         first,
3809                         second,
3810                         SideEffectsForArchRuntimeCalls(comparison_type),
3811                         dex_pc) {
3812    SetPackedField<ComparisonBiasField>(bias);
3813    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3814    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3815  }
3816
3817  template <typename T>
3818  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3819
3820  template <typename T>
3821  int32_t ComputeFP(T x, T y) const {
3822    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3823    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3824    // Handle the bias.
3825    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3826  }
3827
3828  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3829    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3830    // reach this code path when processing a freshly built HIR
3831    // graph. However HCompare integer instructions can be synthesized
3832    // by the instruction simplifier to implement IntegerCompare and
3833    // IntegerSignum intrinsics, so we have to handle this case.
3834    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3835  }
3836  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3837    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3838  }
3839  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3840    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3841  }
3842  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3843    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3844  }
3845
3846  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3847    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3848  }
3849
3850  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3851
3852  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3853  // Only meaningful for floating-point comparisons.
3854  bool IsGtBias() const {
3855    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3856    return GetBias() == ComparisonBias::kGtBias;
3857  }
3858
3859  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3860    // Comparisons do not require a runtime call in any back end.
3861    return SideEffects::None();
3862  }
3863
3864  DECLARE_INSTRUCTION(Compare);
3865
3866 protected:
3867  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3868  static constexpr size_t kFieldComparisonBiasSize =
3869      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3870  static constexpr size_t kNumberOfComparePackedBits =
3871      kFieldComparisonBias + kFieldComparisonBiasSize;
3872  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3873  using ComparisonBiasField =
3874      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3875
3876  // Return an integer constant containing the result of a comparison evaluated at compile time.
3877  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3878    DCHECK(value == -1 || value == 0 || value == 1) << value;
3879    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3880  }
3881
3882 private:
3883  DISALLOW_COPY_AND_ASSIGN(HCompare);
3884};
3885
3886class HNewInstance FINAL : public HExpression<1> {
3887 public:
3888  HNewInstance(HInstruction* cls,
3889               uint32_t dex_pc,
3890               dex::TypeIndex type_index,
3891               const DexFile& dex_file,
3892               bool finalizable,
3893               QuickEntrypointEnum entrypoint)
3894      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3895        type_index_(type_index),
3896        dex_file_(dex_file),
3897        entrypoint_(entrypoint) {
3898    SetPackedFlag<kFlagFinalizable>(finalizable);
3899    SetRawInputAt(0, cls);
3900  }
3901
3902  dex::TypeIndex GetTypeIndex() const { return type_index_; }
3903  const DexFile& GetDexFile() const { return dex_file_; }
3904
3905  // Calls runtime so needs an environment.
3906  bool NeedsEnvironment() const OVERRIDE { return true; }
3907
3908  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3909  bool CanThrow() const OVERRIDE { return true; }
3910
3911  bool NeedsChecks() const {
3912    return entrypoint_ == kQuickAllocObjectWithChecks;
3913  }
3914
3915  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3916
3917  bool CanBeNull() const OVERRIDE { return false; }
3918
3919  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3920
3921  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3922    entrypoint_ = entrypoint;
3923  }
3924
3925  HLoadClass* GetLoadClass() const {
3926    HInstruction* input = InputAt(0);
3927    if (input->IsClinitCheck()) {
3928      input = input->InputAt(0);
3929    }
3930    DCHECK(input->IsLoadClass());
3931    return input->AsLoadClass();
3932  }
3933
3934  bool IsStringAlloc() const;
3935
3936  DECLARE_INSTRUCTION(NewInstance);
3937
3938 private:
3939  static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
3940  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3941  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3942                "Too many packed fields.");
3943
3944  const dex::TypeIndex type_index_;
3945  const DexFile& dex_file_;
3946  QuickEntrypointEnum entrypoint_;
3947
3948  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3949};
3950
3951enum IntrinsicNeedsEnvironmentOrCache {
3952  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3953  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3954};
3955
3956enum IntrinsicSideEffects {
3957  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3958  kReadSideEffects,   // Intrinsic may read heap memory.
3959  kWriteSideEffects,  // Intrinsic may write heap memory.
3960  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3961};
3962
3963enum IntrinsicExceptions {
3964  kNoThrow,  // Intrinsic does not throw any exceptions.
3965  kCanThrow  // Intrinsic may throw exceptions.
3966};
3967
3968class HInvoke : public HVariableInputSizeInstruction {
3969 public:
3970  bool NeedsEnvironment() const OVERRIDE;
3971
3972  void SetArgumentAt(size_t index, HInstruction* argument) {
3973    SetRawInputAt(index, argument);
3974  }
3975
3976  // Return the number of arguments.  This number can be lower than
3977  // the number of inputs returned by InputCount(), as some invoke
3978  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3979  // inputs at the end of their list of inputs.
3980  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3981
3982  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3983
3984  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3985
3986  InvokeType GetInvokeType() const {
3987    return GetPackedField<InvokeTypeField>();
3988  }
3989
3990  Intrinsics GetIntrinsic() const {
3991    return intrinsic_;
3992  }
3993
3994  void SetIntrinsic(Intrinsics intrinsic,
3995                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3996                    IntrinsicSideEffects side_effects,
3997                    IntrinsicExceptions exceptions);
3998
3999  bool IsFromInlinedInvoke() const {
4000    return GetEnvironment()->IsFromInlinedInvoke();
4001  }
4002
4003  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4004
4005  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
4006
4007  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
4008
4009  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4010    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4011  }
4012
4013  uint32_t* GetIntrinsicOptimizations() {
4014    return &intrinsic_optimizations_;
4015  }
4016
4017  const uint32_t* GetIntrinsicOptimizations() const {
4018    return &intrinsic_optimizations_;
4019  }
4020
4021  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4022
4023  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4024  void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
4025
4026  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4027
4028 protected:
4029  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4030  static constexpr size_t kFieldInvokeTypeSize =
4031      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4032  static constexpr size_t kFieldReturnType =
4033      kFieldInvokeType + kFieldInvokeTypeSize;
4034  static constexpr size_t kFieldReturnTypeSize =
4035      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
4036  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
4037  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
4038  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4039  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4040  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
4041
4042  HInvoke(ArenaAllocator* arena,
4043          uint32_t number_of_arguments,
4044          uint32_t number_of_other_inputs,
4045          Primitive::Type return_type,
4046          uint32_t dex_pc,
4047          uint32_t dex_method_index,
4048          ArtMethod* resolved_method,
4049          InvokeType invoke_type)
4050    : HVariableInputSizeInstruction(
4051          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4052          dex_pc,
4053          arena,
4054          number_of_arguments + number_of_other_inputs,
4055          kArenaAllocInvokeInputs),
4056      number_of_arguments_(number_of_arguments),
4057      resolved_method_(resolved_method),
4058      dex_method_index_(dex_method_index),
4059      intrinsic_(Intrinsics::kNone),
4060      intrinsic_optimizations_(0) {
4061    SetPackedField<ReturnTypeField>(return_type);
4062    SetPackedField<InvokeTypeField>(invoke_type);
4063    SetPackedFlag<kFlagCanThrow>(true);
4064  }
4065
4066  uint32_t number_of_arguments_;
4067  ArtMethod* resolved_method_;
4068  const uint32_t dex_method_index_;
4069  Intrinsics intrinsic_;
4070
4071  // A magic word holding optimizations for intrinsics. See intrinsics.h.
4072  uint32_t intrinsic_optimizations_;
4073
4074 private:
4075  DISALLOW_COPY_AND_ASSIGN(HInvoke);
4076};
4077
4078class HInvokeUnresolved FINAL : public HInvoke {
4079 public:
4080  HInvokeUnresolved(ArenaAllocator* arena,
4081                    uint32_t number_of_arguments,
4082                    Primitive::Type return_type,
4083                    uint32_t dex_pc,
4084                    uint32_t dex_method_index,
4085                    InvokeType invoke_type)
4086      : HInvoke(arena,
4087                number_of_arguments,
4088                0u /* number_of_other_inputs */,
4089                return_type,
4090                dex_pc,
4091                dex_method_index,
4092                nullptr,
4093                invoke_type) {
4094  }
4095
4096  DECLARE_INSTRUCTION(InvokeUnresolved);
4097
4098 private:
4099  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
4100};
4101
4102class HInvokePolymorphic FINAL : public HInvoke {
4103 public:
4104  HInvokePolymorphic(ArenaAllocator* arena,
4105                     uint32_t number_of_arguments,
4106                     Primitive::Type return_type,
4107                     uint32_t dex_pc,
4108                     uint32_t dex_method_index)
4109      : HInvoke(arena,
4110                number_of_arguments,
4111                0u /* number_of_other_inputs */,
4112                return_type,
4113                dex_pc,
4114                dex_method_index,
4115                nullptr,
4116                kVirtual) {}
4117
4118  DECLARE_INSTRUCTION(InvokePolymorphic);
4119
4120 private:
4121  DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
4122};
4123
4124class HInvokeStaticOrDirect FINAL : public HInvoke {
4125 public:
4126  // Requirements of this method call regarding the class
4127  // initialization (clinit) check of its declaring class.
4128  enum class ClinitCheckRequirement {
4129    kNone,      // Class already initialized.
4130    kExplicit,  // Static call having explicit clinit check as last input.
4131    kImplicit,  // Static call implicitly requiring a clinit check.
4132    kLast = kImplicit
4133  };
4134
4135  // Determines how to load the target ArtMethod*.
4136  enum class MethodLoadKind {
4137    // Use a String init ArtMethod* loaded from Thread entrypoints.
4138    kStringInit,
4139
4140    // Use the method's own ArtMethod* loaded by the register allocator.
4141    kRecursive,
4142
4143    // Use ArtMethod* at a known address, embed the direct address in the code.
4144    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4145    kDirectAddress,
4146
4147    // Load from resolved methods array in the dex cache using a PC-relative load.
4148    // Used when we need to use the dex cache, for example for invoke-static that
4149    // may cause class initialization (the entry may point to a resolution method),
4150    // and we know that we can access the dex cache arrays using a PC-relative load.
4151    kDexCachePcRelative,
4152
4153    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
4154    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
4155    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
4156    // or unimplemented or impractical (i.e. slow) on a particular architecture.
4157    kDexCacheViaMethod,
4158  };
4159
4160  // Determines the location of the code pointer.
4161  enum class CodePtrLocation {
4162    // Recursive call, use local PC-relative call instruction.
4163    kCallSelf,
4164
4165    // Use code pointer from the ArtMethod*.
4166    // Used when we don't know the target code. This is also the last-resort-kind used when
4167    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4168    kCallArtMethod,
4169  };
4170
4171  struct DispatchInfo {
4172    MethodLoadKind method_load_kind;
4173    CodePtrLocation code_ptr_location;
4174    // The method load data holds
4175    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4176    //     Note that there are multiple string init methods, each having its own offset.
4177    //   - the method address for kDirectAddress
4178    //   - the dex cache arrays offset for kDexCachePcRel.
4179    uint64_t method_load_data;
4180  };
4181
4182  HInvokeStaticOrDirect(ArenaAllocator* arena,
4183                        uint32_t number_of_arguments,
4184                        Primitive::Type return_type,
4185                        uint32_t dex_pc,
4186                        uint32_t method_index,
4187                        ArtMethod* resolved_method,
4188                        DispatchInfo dispatch_info,
4189                        InvokeType invoke_type,
4190                        MethodReference target_method,
4191                        ClinitCheckRequirement clinit_check_requirement)
4192      : HInvoke(arena,
4193                number_of_arguments,
4194                // There is potentially one extra argument for the HCurrentMethod node, and
4195                // potentially one other if the clinit check is explicit, and potentially
4196                // one other if the method is a string factory.
4197                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4198                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4199                return_type,
4200                dex_pc,
4201                method_index,
4202                resolved_method,
4203                invoke_type),
4204        target_method_(target_method),
4205        dispatch_info_(dispatch_info) {
4206    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4207  }
4208
4209  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4210    bool had_current_method_input = HasCurrentMethodInput();
4211    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4212
4213    // Using the current method is the default and once we find a better
4214    // method load kind, we should not go back to using the current method.
4215    DCHECK(had_current_method_input || !needs_current_method_input);
4216
4217    if (had_current_method_input && !needs_current_method_input) {
4218      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4219      RemoveInputAt(GetSpecialInputIndex());
4220    }
4221    dispatch_info_ = dispatch_info;
4222  }
4223
4224  DispatchInfo GetDispatchInfo() const {
4225    return dispatch_info_;
4226  }
4227
4228  void AddSpecialInput(HInstruction* input) {
4229    // We allow only one special input.
4230    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4231    DCHECK(InputCount() == GetSpecialInputIndex() ||
4232           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4233    InsertInputAt(GetSpecialInputIndex(), input);
4234  }
4235
4236  using HInstruction::GetInputRecords;  // Keep the const version visible.
4237  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4238    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4239    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4240      DCHECK(!input_records.empty());
4241      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4242      HInstruction* last_input = input_records.back().GetInstruction();
4243      // Note: `last_input` may be null during arguments setup.
4244      if (last_input != nullptr) {
4245        // `last_input` is the last input of a static invoke marked as having
4246        // an explicit clinit check. It must either be:
4247        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4248        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4249        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4250      }
4251    }
4252    return input_records;
4253  }
4254
4255  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4256    // We access the method via the dex cache so we can't do an implicit null check.
4257    // TODO: for intrinsics we can generate implicit null checks.
4258    return false;
4259  }
4260
4261  bool CanBeNull() const OVERRIDE {
4262    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
4263  }
4264
4265  // Get the index of the special input, if any.
4266  //
4267  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4268  // method pointer; otherwise there may be one platform-specific special input,
4269  // such as PC-relative addressing base.
4270  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4271  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4272
4273  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4274  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4275  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4276  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4277  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4278  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4279  bool HasPcRelativeDexCache() const {
4280    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4281  }
4282  bool HasCurrentMethodInput() const {
4283    // This function can be called only after the invoke has been fully initialized by the builder.
4284    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4285      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4286      return true;
4287    } else {
4288      DCHECK(InputCount() == GetSpecialInputIndex() ||
4289             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4290      return false;
4291    }
4292  }
4293
4294  QuickEntrypointEnum GetStringInitEntryPoint() const {
4295    DCHECK(IsStringInit());
4296    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4297  }
4298
4299  uint64_t GetMethodAddress() const {
4300    DCHECK(HasMethodAddress());
4301    return dispatch_info_.method_load_data;
4302  }
4303
4304  uint32_t GetDexCacheArrayOffset() const {
4305    DCHECK(HasPcRelativeDexCache());
4306    return dispatch_info_.method_load_data;
4307  }
4308
4309  const DexFile& GetDexFileForPcRelativeDexCache() const;
4310
4311  ClinitCheckRequirement GetClinitCheckRequirement() const {
4312    return GetPackedField<ClinitCheckRequirementField>();
4313  }
4314
4315  // Is this instruction a call to a static method?
4316  bool IsStatic() const {
4317    return GetInvokeType() == kStatic;
4318  }
4319
4320  MethodReference GetTargetMethod() const {
4321    return target_method_;
4322  }
4323
4324  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4325  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4326  // instruction; only relevant for static calls with explicit clinit check.
4327  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4328    DCHECK(IsStaticWithExplicitClinitCheck());
4329    size_t last_input_index = inputs_.size() - 1u;
4330    HInstruction* last_input = inputs_.back().GetInstruction();
4331    DCHECK(last_input != nullptr);
4332    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4333    RemoveAsUserOfInput(last_input_index);
4334    inputs_.pop_back();
4335    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4336    DCHECK(!IsStaticWithExplicitClinitCheck());
4337  }
4338
4339  // Is this a call to a static method whose declaring class has an
4340  // explicit initialization check in the graph?
4341  bool IsStaticWithExplicitClinitCheck() const {
4342    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4343  }
4344
4345  // Is this a call to a static method whose declaring class has an
4346  // implicit intialization check requirement?
4347  bool IsStaticWithImplicitClinitCheck() const {
4348    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4349  }
4350
4351  // Does this method load kind need the current method as an input?
4352  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4353    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4354  }
4355
4356  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4357
4358 private:
4359  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4360  static constexpr size_t kFieldClinitCheckRequirementSize =
4361      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4362  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4363      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4364  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4365                "Too many packed fields.");
4366  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4367                                               kFieldClinitCheckRequirement,
4368                                               kFieldClinitCheckRequirementSize>;
4369
4370  // Cached values of the resolved method, to avoid needing the mutator lock.
4371  MethodReference target_method_;
4372  DispatchInfo dispatch_info_;
4373
4374  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4375};
4376std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4377std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4378
4379class HInvokeVirtual FINAL : public HInvoke {
4380 public:
4381  HInvokeVirtual(ArenaAllocator* arena,
4382                 uint32_t number_of_arguments,
4383                 Primitive::Type return_type,
4384                 uint32_t dex_pc,
4385                 uint32_t dex_method_index,
4386                 ArtMethod* resolved_method,
4387                 uint32_t vtable_index)
4388      : HInvoke(arena,
4389                number_of_arguments,
4390                0u,
4391                return_type,
4392                dex_pc,
4393                dex_method_index,
4394                resolved_method,
4395                kVirtual),
4396        vtable_index_(vtable_index) {}
4397
4398  bool CanBeNull() const OVERRIDE {
4399    switch (GetIntrinsic()) {
4400      case Intrinsics::kThreadCurrentThread:
4401      case Intrinsics::kStringBufferAppend:
4402      case Intrinsics::kStringBufferToString:
4403      case Intrinsics::kStringBuilderAppend:
4404      case Intrinsics::kStringBuilderToString:
4405        return false;
4406      default:
4407        return HInvoke::CanBeNull();
4408    }
4409  }
4410
4411  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4412    // TODO: Add implicit null checks in intrinsics.
4413    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4414  }
4415
4416  uint32_t GetVTableIndex() const { return vtable_index_; }
4417
4418  DECLARE_INSTRUCTION(InvokeVirtual);
4419
4420 private:
4421  // Cached value of the resolved method, to avoid needing the mutator lock.
4422  const uint32_t vtable_index_;
4423
4424  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4425};
4426
4427class HInvokeInterface FINAL : public HInvoke {
4428 public:
4429  HInvokeInterface(ArenaAllocator* arena,
4430                   uint32_t number_of_arguments,
4431                   Primitive::Type return_type,
4432                   uint32_t dex_pc,
4433                   uint32_t dex_method_index,
4434                   ArtMethod* resolved_method,
4435                   uint32_t imt_index)
4436      : HInvoke(arena,
4437                number_of_arguments,
4438                0u,
4439                return_type,
4440                dex_pc,
4441                dex_method_index,
4442                resolved_method,
4443                kInterface),
4444        imt_index_(imt_index) {}
4445
4446  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4447    // TODO: Add implicit null checks in intrinsics.
4448    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4449  }
4450
4451  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
4452    // The assembly stub currently needs it.
4453    return true;
4454  }
4455
4456  uint32_t GetImtIndex() const { return imt_index_; }
4457  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4458
4459  DECLARE_INSTRUCTION(InvokeInterface);
4460
4461 private:
4462  // Cached value of the resolved method, to avoid needing the mutator lock.
4463  const uint32_t imt_index_;
4464
4465  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4466};
4467
4468class HNeg FINAL : public HUnaryOperation {
4469 public:
4470  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4471      : HUnaryOperation(result_type, input, dex_pc) {
4472    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4473  }
4474
4475  template <typename T> static T Compute(T x) { return -x; }
4476
4477  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4478    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4479  }
4480  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4481    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4482  }
4483  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4484    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4485  }
4486  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4487    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4488  }
4489
4490  DECLARE_INSTRUCTION(Neg);
4491
4492 private:
4493  DISALLOW_COPY_AND_ASSIGN(HNeg);
4494};
4495
4496class HNewArray FINAL : public HExpression<2> {
4497 public:
4498  HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4499      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc) {
4500    SetRawInputAt(0, cls);
4501    SetRawInputAt(1, length);
4502  }
4503
4504  // Calls runtime so needs an environment.
4505  bool NeedsEnvironment() const OVERRIDE { return true; }
4506
4507  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4508  bool CanThrow() const OVERRIDE { return true; }
4509
4510  bool CanBeNull() const OVERRIDE { return false; }
4511
4512  HLoadClass* GetLoadClass() const {
4513    DCHECK(InputAt(0)->IsLoadClass());
4514    return InputAt(0)->AsLoadClass();
4515  }
4516
4517  HInstruction* GetLength() const {
4518    return InputAt(1);
4519  }
4520
4521  DECLARE_INSTRUCTION(NewArray);
4522
4523 private:
4524  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4525};
4526
4527class HAdd FINAL : public HBinaryOperation {
4528 public:
4529  HAdd(Primitive::Type result_type,
4530       HInstruction* left,
4531       HInstruction* right,
4532       uint32_t dex_pc = kNoDexPc)
4533      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4534
4535  bool IsCommutative() const OVERRIDE { return true; }
4536
4537  template <typename T> static T Compute(T x, T y) { return x + y; }
4538
4539  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4540    return GetBlock()->GetGraph()->GetIntConstant(
4541        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4542  }
4543  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4544    return GetBlock()->GetGraph()->GetLongConstant(
4545        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4546  }
4547  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4548    return GetBlock()->GetGraph()->GetFloatConstant(
4549        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4550  }
4551  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4552    return GetBlock()->GetGraph()->GetDoubleConstant(
4553        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4554  }
4555
4556  DECLARE_INSTRUCTION(Add);
4557
4558 private:
4559  DISALLOW_COPY_AND_ASSIGN(HAdd);
4560};
4561
4562class HSub FINAL : public HBinaryOperation {
4563 public:
4564  HSub(Primitive::Type result_type,
4565       HInstruction* left,
4566       HInstruction* right,
4567       uint32_t dex_pc = kNoDexPc)
4568      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4569
4570  template <typename T> static T Compute(T x, T y) { return x - y; }
4571
4572  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4573    return GetBlock()->GetGraph()->GetIntConstant(
4574        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4575  }
4576  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4577    return GetBlock()->GetGraph()->GetLongConstant(
4578        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4579  }
4580  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4581    return GetBlock()->GetGraph()->GetFloatConstant(
4582        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4583  }
4584  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4585    return GetBlock()->GetGraph()->GetDoubleConstant(
4586        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4587  }
4588
4589  DECLARE_INSTRUCTION(Sub);
4590
4591 private:
4592  DISALLOW_COPY_AND_ASSIGN(HSub);
4593};
4594
4595class HMul FINAL : public HBinaryOperation {
4596 public:
4597  HMul(Primitive::Type result_type,
4598       HInstruction* left,
4599       HInstruction* right,
4600       uint32_t dex_pc = kNoDexPc)
4601      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4602
4603  bool IsCommutative() const OVERRIDE { return true; }
4604
4605  template <typename T> static T Compute(T x, T y) { return x * y; }
4606
4607  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4608    return GetBlock()->GetGraph()->GetIntConstant(
4609        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4610  }
4611  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4612    return GetBlock()->GetGraph()->GetLongConstant(
4613        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4614  }
4615  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4616    return GetBlock()->GetGraph()->GetFloatConstant(
4617        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4618  }
4619  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4620    return GetBlock()->GetGraph()->GetDoubleConstant(
4621        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4622  }
4623
4624  DECLARE_INSTRUCTION(Mul);
4625
4626 private:
4627  DISALLOW_COPY_AND_ASSIGN(HMul);
4628};
4629
4630class HDiv FINAL : public HBinaryOperation {
4631 public:
4632  HDiv(Primitive::Type result_type,
4633       HInstruction* left,
4634       HInstruction* right,
4635       uint32_t dex_pc)
4636      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4637
4638  template <typename T>
4639  T ComputeIntegral(T x, T y) const {
4640    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4641    // Our graph structure ensures we never have 0 for `y` during
4642    // constant folding.
4643    DCHECK_NE(y, 0);
4644    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4645    return (y == -1) ? -x : x / y;
4646  }
4647
4648  template <typename T>
4649  T ComputeFP(T x, T y) const {
4650    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4651    return x / y;
4652  }
4653
4654  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4655    return GetBlock()->GetGraph()->GetIntConstant(
4656        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4657  }
4658  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4659    return GetBlock()->GetGraph()->GetLongConstant(
4660        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4661  }
4662  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4663    return GetBlock()->GetGraph()->GetFloatConstant(
4664        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4665  }
4666  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4667    return GetBlock()->GetGraph()->GetDoubleConstant(
4668        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4669  }
4670
4671  DECLARE_INSTRUCTION(Div);
4672
4673 private:
4674  DISALLOW_COPY_AND_ASSIGN(HDiv);
4675};
4676
4677class HRem FINAL : public HBinaryOperation {
4678 public:
4679  HRem(Primitive::Type result_type,
4680       HInstruction* left,
4681       HInstruction* right,
4682       uint32_t dex_pc)
4683      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4684
4685  template <typename T>
4686  T ComputeIntegral(T x, T y) const {
4687    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4688    // Our graph structure ensures we never have 0 for `y` during
4689    // constant folding.
4690    DCHECK_NE(y, 0);
4691    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4692    return (y == -1) ? 0 : x % y;
4693  }
4694
4695  template <typename T>
4696  T ComputeFP(T x, T y) const {
4697    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4698    return std::fmod(x, y);
4699  }
4700
4701  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4702    return GetBlock()->GetGraph()->GetIntConstant(
4703        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4704  }
4705  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4706    return GetBlock()->GetGraph()->GetLongConstant(
4707        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4708  }
4709  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4710    return GetBlock()->GetGraph()->GetFloatConstant(
4711        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4712  }
4713  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4714    return GetBlock()->GetGraph()->GetDoubleConstant(
4715        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4716  }
4717
4718  DECLARE_INSTRUCTION(Rem);
4719
4720 private:
4721  DISALLOW_COPY_AND_ASSIGN(HRem);
4722};
4723
4724class HDivZeroCheck FINAL : public HExpression<1> {
4725 public:
4726  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4727  // constructor.
4728  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4729      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4730    SetRawInputAt(0, value);
4731  }
4732
4733  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4734
4735  bool CanBeMoved() const OVERRIDE { return true; }
4736
4737  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4738    return true;
4739  }
4740
4741  bool NeedsEnvironment() const OVERRIDE { return true; }
4742  bool CanThrow() const OVERRIDE { return true; }
4743
4744  DECLARE_INSTRUCTION(DivZeroCheck);
4745
4746 private:
4747  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4748};
4749
4750class HShl FINAL : public HBinaryOperation {
4751 public:
4752  HShl(Primitive::Type result_type,
4753       HInstruction* value,
4754       HInstruction* distance,
4755       uint32_t dex_pc = kNoDexPc)
4756      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4757    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4758    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4759  }
4760
4761  template <typename T>
4762  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4763    return value << (distance & max_shift_distance);
4764  }
4765
4766  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4767    return GetBlock()->GetGraph()->GetIntConstant(
4768        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4769  }
4770  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4771    return GetBlock()->GetGraph()->GetLongConstant(
4772        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4773  }
4774  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4775                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4776    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4777    UNREACHABLE();
4778  }
4779  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4780                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4781    LOG(FATAL) << DebugName() << " is not defined for float values";
4782    UNREACHABLE();
4783  }
4784  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4785                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4786    LOG(FATAL) << DebugName() << " is not defined for double values";
4787    UNREACHABLE();
4788  }
4789
4790  DECLARE_INSTRUCTION(Shl);
4791
4792 private:
4793  DISALLOW_COPY_AND_ASSIGN(HShl);
4794};
4795
4796class HShr FINAL : public HBinaryOperation {
4797 public:
4798  HShr(Primitive::Type result_type,
4799       HInstruction* value,
4800       HInstruction* distance,
4801       uint32_t dex_pc = kNoDexPc)
4802      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4803    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4804    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4805  }
4806
4807  template <typename T>
4808  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4809    return value >> (distance & max_shift_distance);
4810  }
4811
4812  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4813    return GetBlock()->GetGraph()->GetIntConstant(
4814        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4815  }
4816  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4817    return GetBlock()->GetGraph()->GetLongConstant(
4818        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4819  }
4820  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4821                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4822    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4823    UNREACHABLE();
4824  }
4825  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4826                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4827    LOG(FATAL) << DebugName() << " is not defined for float values";
4828    UNREACHABLE();
4829  }
4830  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4831                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4832    LOG(FATAL) << DebugName() << " is not defined for double values";
4833    UNREACHABLE();
4834  }
4835
4836  DECLARE_INSTRUCTION(Shr);
4837
4838 private:
4839  DISALLOW_COPY_AND_ASSIGN(HShr);
4840};
4841
4842class HUShr FINAL : public HBinaryOperation {
4843 public:
4844  HUShr(Primitive::Type result_type,
4845        HInstruction* value,
4846        HInstruction* distance,
4847        uint32_t dex_pc = kNoDexPc)
4848      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4849    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4850    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4851  }
4852
4853  template <typename T>
4854  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4855    typedef typename std::make_unsigned<T>::type V;
4856    V ux = static_cast<V>(value);
4857    return static_cast<T>(ux >> (distance & max_shift_distance));
4858  }
4859
4860  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4861    return GetBlock()->GetGraph()->GetIntConstant(
4862        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4863  }
4864  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4865    return GetBlock()->GetGraph()->GetLongConstant(
4866        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4867  }
4868  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4869                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4870    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4871    UNREACHABLE();
4872  }
4873  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4874                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4875    LOG(FATAL) << DebugName() << " is not defined for float values";
4876    UNREACHABLE();
4877  }
4878  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4879                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4880    LOG(FATAL) << DebugName() << " is not defined for double values";
4881    UNREACHABLE();
4882  }
4883
4884  DECLARE_INSTRUCTION(UShr);
4885
4886 private:
4887  DISALLOW_COPY_AND_ASSIGN(HUShr);
4888};
4889
4890class HAnd FINAL : public HBinaryOperation {
4891 public:
4892  HAnd(Primitive::Type result_type,
4893       HInstruction* left,
4894       HInstruction* right,
4895       uint32_t dex_pc = kNoDexPc)
4896      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4897
4898  bool IsCommutative() const OVERRIDE { return true; }
4899
4900  template <typename T> static T Compute(T x, T y) { return x & y; }
4901
4902  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4903    return GetBlock()->GetGraph()->GetIntConstant(
4904        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4905  }
4906  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4907    return GetBlock()->GetGraph()->GetLongConstant(
4908        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4909  }
4910  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4911                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4912    LOG(FATAL) << DebugName() << " is not defined for float values";
4913    UNREACHABLE();
4914  }
4915  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4916                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4917    LOG(FATAL) << DebugName() << " is not defined for double values";
4918    UNREACHABLE();
4919  }
4920
4921  DECLARE_INSTRUCTION(And);
4922
4923 private:
4924  DISALLOW_COPY_AND_ASSIGN(HAnd);
4925};
4926
4927class HOr FINAL : public HBinaryOperation {
4928 public:
4929  HOr(Primitive::Type result_type,
4930      HInstruction* left,
4931      HInstruction* right,
4932      uint32_t dex_pc = kNoDexPc)
4933      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4934
4935  bool IsCommutative() const OVERRIDE { return true; }
4936
4937  template <typename T> static T Compute(T x, T y) { return x | y; }
4938
4939  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4940    return GetBlock()->GetGraph()->GetIntConstant(
4941        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4942  }
4943  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4944    return GetBlock()->GetGraph()->GetLongConstant(
4945        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4946  }
4947  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4948                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4949    LOG(FATAL) << DebugName() << " is not defined for float values";
4950    UNREACHABLE();
4951  }
4952  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4953                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4954    LOG(FATAL) << DebugName() << " is not defined for double values";
4955    UNREACHABLE();
4956  }
4957
4958  DECLARE_INSTRUCTION(Or);
4959
4960 private:
4961  DISALLOW_COPY_AND_ASSIGN(HOr);
4962};
4963
4964class HXor FINAL : public HBinaryOperation {
4965 public:
4966  HXor(Primitive::Type result_type,
4967       HInstruction* left,
4968       HInstruction* right,
4969       uint32_t dex_pc = kNoDexPc)
4970      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4971
4972  bool IsCommutative() const OVERRIDE { return true; }
4973
4974  template <typename T> static T Compute(T x, T y) { return x ^ y; }
4975
4976  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4977    return GetBlock()->GetGraph()->GetIntConstant(
4978        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4979  }
4980  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4981    return GetBlock()->GetGraph()->GetLongConstant(
4982        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4983  }
4984  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4985                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4986    LOG(FATAL) << DebugName() << " is not defined for float values";
4987    UNREACHABLE();
4988  }
4989  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4990                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4991    LOG(FATAL) << DebugName() << " is not defined for double values";
4992    UNREACHABLE();
4993  }
4994
4995  DECLARE_INSTRUCTION(Xor);
4996
4997 private:
4998  DISALLOW_COPY_AND_ASSIGN(HXor);
4999};
5000
5001class HRor FINAL : public HBinaryOperation {
5002 public:
5003  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
5004    : HBinaryOperation(result_type, value, distance) {
5005    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
5006    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
5007  }
5008
5009  template <typename T>
5010  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5011    typedef typename std::make_unsigned<T>::type V;
5012    V ux = static_cast<V>(value);
5013    if ((distance & max_shift_value) == 0) {
5014      return static_cast<T>(ux);
5015    } else {
5016      const V reg_bits = sizeof(T) * 8;
5017      return static_cast<T>(ux >> (distance & max_shift_value)) |
5018                           (value << (reg_bits - (distance & max_shift_value)));
5019    }
5020  }
5021
5022  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5023    return GetBlock()->GetGraph()->GetIntConstant(
5024        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5025  }
5026  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5027    return GetBlock()->GetGraph()->GetLongConstant(
5028        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5029  }
5030  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5031                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5032    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5033    UNREACHABLE();
5034  }
5035  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5036                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5037    LOG(FATAL) << DebugName() << " is not defined for float values";
5038    UNREACHABLE();
5039  }
5040  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5041                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5042    LOG(FATAL) << DebugName() << " is not defined for double values";
5043    UNREACHABLE();
5044  }
5045
5046  DECLARE_INSTRUCTION(Ror);
5047
5048 private:
5049  DISALLOW_COPY_AND_ASSIGN(HRor);
5050};
5051
5052// The value of a parameter in this method. Its location depends on
5053// the calling convention.
5054class HParameterValue FINAL : public HExpression<0> {
5055 public:
5056  HParameterValue(const DexFile& dex_file,
5057                  dex::TypeIndex type_index,
5058                  uint8_t index,
5059                  Primitive::Type parameter_type,
5060                  bool is_this = false)
5061      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
5062        dex_file_(dex_file),
5063        type_index_(type_index),
5064        index_(index) {
5065    SetPackedFlag<kFlagIsThis>(is_this);
5066    SetPackedFlag<kFlagCanBeNull>(!is_this);
5067  }
5068
5069  const DexFile& GetDexFile() const { return dex_file_; }
5070  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5071  uint8_t GetIndex() const { return index_; }
5072  bool IsThis() const ATTRIBUTE_UNUSED { return GetPackedFlag<kFlagIsThis>(); }
5073
5074  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
5075  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5076
5077  DECLARE_INSTRUCTION(ParameterValue);
5078
5079 private:
5080  // Whether or not the parameter value corresponds to 'this' argument.
5081  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
5082  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5083  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5084  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5085                "Too many packed fields.");
5086
5087  const DexFile& dex_file_;
5088  const dex::TypeIndex type_index_;
5089  // The index of this parameter in the parameters list. Must be less
5090  // than HGraph::number_of_in_vregs_.
5091  const uint8_t index_;
5092
5093  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
5094};
5095
5096class HNot FINAL : public HUnaryOperation {
5097 public:
5098  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5099      : HUnaryOperation(result_type, input, dex_pc) {}
5100
5101  bool CanBeMoved() const OVERRIDE { return true; }
5102  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5103    return true;
5104  }
5105
5106  template <typename T> static T Compute(T x) { return ~x; }
5107
5108  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5109    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5110  }
5111  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
5112    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5113  }
5114  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5115    LOG(FATAL) << DebugName() << " is not defined for float values";
5116    UNREACHABLE();
5117  }
5118  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5119    LOG(FATAL) << DebugName() << " is not defined for double values";
5120    UNREACHABLE();
5121  }
5122
5123  DECLARE_INSTRUCTION(Not);
5124
5125 private:
5126  DISALLOW_COPY_AND_ASSIGN(HNot);
5127};
5128
5129class HBooleanNot FINAL : public HUnaryOperation {
5130 public:
5131  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5132      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
5133
5134  bool CanBeMoved() const OVERRIDE { return true; }
5135  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5136    return true;
5137  }
5138
5139  template <typename T> static bool Compute(T x) {
5140    DCHECK(IsUint<1>(x)) << x;
5141    return !x;
5142  }
5143
5144  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5145    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5146  }
5147  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5148    LOG(FATAL) << DebugName() << " is not defined for long values";
5149    UNREACHABLE();
5150  }
5151  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5152    LOG(FATAL) << DebugName() << " is not defined for float values";
5153    UNREACHABLE();
5154  }
5155  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5156    LOG(FATAL) << DebugName() << " is not defined for double values";
5157    UNREACHABLE();
5158  }
5159
5160  DECLARE_INSTRUCTION(BooleanNot);
5161
5162 private:
5163  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
5164};
5165
5166class HTypeConversion FINAL : public HExpression<1> {
5167 public:
5168  // Instantiate a type conversion of `input` to `result_type`.
5169  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
5170      : HExpression(result_type, SideEffects::None(), dex_pc) {
5171    SetRawInputAt(0, input);
5172    // Invariant: We should never generate a conversion to a Boolean value.
5173    DCHECK_NE(Primitive::kPrimBoolean, result_type);
5174  }
5175
5176  HInstruction* GetInput() const { return InputAt(0); }
5177  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
5178  Primitive::Type GetResultType() const { return GetType(); }
5179
5180  bool CanBeMoved() const OVERRIDE { return true; }
5181  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5182    return true;
5183  }
5184
5185  // Try to statically evaluate the conversion and return a HConstant
5186  // containing the result.  If the input cannot be converted, return nullptr.
5187  HConstant* TryStaticEvaluation() const;
5188
5189  DECLARE_INSTRUCTION(TypeConversion);
5190
5191 private:
5192  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
5193};
5194
5195static constexpr uint32_t kNoRegNumber = -1;
5196
5197class HNullCheck FINAL : public HExpression<1> {
5198 public:
5199  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5200  // constructor.
5201  HNullCheck(HInstruction* value, uint32_t dex_pc)
5202      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5203    SetRawInputAt(0, value);
5204  }
5205
5206  bool CanBeMoved() const OVERRIDE { return true; }
5207  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5208    return true;
5209  }
5210
5211  bool NeedsEnvironment() const OVERRIDE { return true; }
5212
5213  bool CanThrow() const OVERRIDE { return true; }
5214
5215  bool CanBeNull() const OVERRIDE { return false; }
5216
5217
5218  DECLARE_INSTRUCTION(NullCheck);
5219
5220 private:
5221  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
5222};
5223
5224// Embeds an ArtField and all the information required by the compiler. We cache
5225// that information to avoid requiring the mutator lock every time we need it.
5226class FieldInfo : public ValueObject {
5227 public:
5228  FieldInfo(ArtField* field,
5229            MemberOffset field_offset,
5230            Primitive::Type field_type,
5231            bool is_volatile,
5232            uint32_t index,
5233            uint16_t declaring_class_def_index,
5234            const DexFile& dex_file)
5235      : field_(field),
5236        field_offset_(field_offset),
5237        field_type_(field_type),
5238        is_volatile_(is_volatile),
5239        index_(index),
5240        declaring_class_def_index_(declaring_class_def_index),
5241        dex_file_(dex_file) {}
5242
5243  ArtField* GetField() const { return field_; }
5244  MemberOffset GetFieldOffset() const { return field_offset_; }
5245  Primitive::Type GetFieldType() const { return field_type_; }
5246  uint32_t GetFieldIndex() const { return index_; }
5247  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5248  const DexFile& GetDexFile() const { return dex_file_; }
5249  bool IsVolatile() const { return is_volatile_; }
5250
5251 private:
5252  ArtField* const field_;
5253  const MemberOffset field_offset_;
5254  const Primitive::Type field_type_;
5255  const bool is_volatile_;
5256  const uint32_t index_;
5257  const uint16_t declaring_class_def_index_;
5258  const DexFile& dex_file_;
5259};
5260
5261class HInstanceFieldGet FINAL : public HExpression<1> {
5262 public:
5263  HInstanceFieldGet(HInstruction* value,
5264                    ArtField* field,
5265                    Primitive::Type field_type,
5266                    MemberOffset field_offset,
5267                    bool is_volatile,
5268                    uint32_t field_idx,
5269                    uint16_t declaring_class_def_index,
5270                    const DexFile& dex_file,
5271                    uint32_t dex_pc)
5272      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5273        field_info_(field,
5274                    field_offset,
5275                    field_type,
5276                    is_volatile,
5277                    field_idx,
5278                    declaring_class_def_index,
5279                    dex_file) {
5280    SetRawInputAt(0, value);
5281  }
5282
5283  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5284
5285  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5286    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5287    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5288  }
5289
5290  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5291    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5292  }
5293
5294  size_t ComputeHashCode() const OVERRIDE {
5295    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5296  }
5297
5298  const FieldInfo& GetFieldInfo() const { return field_info_; }
5299  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5300  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5301  bool IsVolatile() const { return field_info_.IsVolatile(); }
5302
5303  DECLARE_INSTRUCTION(InstanceFieldGet);
5304
5305 private:
5306  const FieldInfo field_info_;
5307
5308  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5309};
5310
5311class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5312 public:
5313  HInstanceFieldSet(HInstruction* object,
5314                    HInstruction* value,
5315                    ArtField* field,
5316                    Primitive::Type field_type,
5317                    MemberOffset field_offset,
5318                    bool is_volatile,
5319                    uint32_t field_idx,
5320                    uint16_t declaring_class_def_index,
5321                    const DexFile& dex_file,
5322                    uint32_t dex_pc)
5323      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5324        field_info_(field,
5325                    field_offset,
5326                    field_type,
5327                    is_volatile,
5328                    field_idx,
5329                    declaring_class_def_index,
5330                    dex_file) {
5331    SetPackedFlag<kFlagValueCanBeNull>(true);
5332    SetRawInputAt(0, object);
5333    SetRawInputAt(1, value);
5334  }
5335
5336  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5337    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5338  }
5339
5340  const FieldInfo& GetFieldInfo() const { return field_info_; }
5341  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5342  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5343  bool IsVolatile() const { return field_info_.IsVolatile(); }
5344  HInstruction* GetValue() const { return InputAt(1); }
5345  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5346  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5347
5348  DECLARE_INSTRUCTION(InstanceFieldSet);
5349
5350 private:
5351  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5352  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5353  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5354                "Too many packed fields.");
5355
5356  const FieldInfo field_info_;
5357
5358  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5359};
5360
5361class HArrayGet FINAL : public HExpression<2> {
5362 public:
5363  HArrayGet(HInstruction* array,
5364            HInstruction* index,
5365            Primitive::Type type,
5366            uint32_t dex_pc,
5367            bool is_string_char_at = false)
5368      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5369    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5370    SetRawInputAt(0, array);
5371    SetRawInputAt(1, index);
5372  }
5373
5374  bool CanBeMoved() const OVERRIDE { return true; }
5375  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5376    return true;
5377  }
5378  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5379    // TODO: We can be smarter here.
5380    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5381    // which generates the implicit null check. There are cases when these can be removed
5382    // to produce better code. If we ever add optimizations to do so we should allow an
5383    // implicit check here (as long as the address falls in the first page).
5384    return false;
5385  }
5386
5387  bool IsEquivalentOf(HArrayGet* other) const {
5388    bool result = (GetDexPc() == other->GetDexPc());
5389    if (kIsDebugBuild && result) {
5390      DCHECK_EQ(GetBlock(), other->GetBlock());
5391      DCHECK_EQ(GetArray(), other->GetArray());
5392      DCHECK_EQ(GetIndex(), other->GetIndex());
5393      if (Primitive::IsIntOrLongType(GetType())) {
5394        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5395      } else {
5396        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5397        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5398      }
5399    }
5400    return result;
5401  }
5402
5403  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5404
5405  HInstruction* GetArray() const { return InputAt(0); }
5406  HInstruction* GetIndex() const { return InputAt(1); }
5407
5408  DECLARE_INSTRUCTION(ArrayGet);
5409
5410 private:
5411  // We treat a String as an array, creating the HArrayGet from String.charAt()
5412  // intrinsic in the instruction simplifier. We can always determine whether
5413  // a particular HArrayGet is actually a String.charAt() by looking at the type
5414  // of the input but that requires holding the mutator lock, so we prefer to use
5415  // a flag, so that code generators don't need to do the locking.
5416  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5417  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5418  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5419                "Too many packed fields.");
5420
5421  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5422};
5423
5424class HArraySet FINAL : public HTemplateInstruction<3> {
5425 public:
5426  HArraySet(HInstruction* array,
5427            HInstruction* index,
5428            HInstruction* value,
5429            Primitive::Type expected_component_type,
5430            uint32_t dex_pc)
5431      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5432    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5433    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5434    SetPackedFlag<kFlagValueCanBeNull>(true);
5435    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5436    SetRawInputAt(0, array);
5437    SetRawInputAt(1, index);
5438    SetRawInputAt(2, value);
5439    // Make a best guess now, may be refined during SSA building.
5440    ComputeSideEffects();
5441  }
5442
5443  bool NeedsEnvironment() const OVERRIDE {
5444    // We call a runtime method to throw ArrayStoreException.
5445    return NeedsTypeCheck();
5446  }
5447
5448  // Can throw ArrayStoreException.
5449  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5450
5451  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5452    // TODO: Same as for ArrayGet.
5453    return false;
5454  }
5455
5456  void ClearNeedsTypeCheck() {
5457    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5458  }
5459
5460  void ClearValueCanBeNull() {
5461    SetPackedFlag<kFlagValueCanBeNull>(false);
5462  }
5463
5464  void SetStaticTypeOfArrayIsObjectArray() {
5465    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5466  }
5467
5468  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5469  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5470  bool StaticTypeOfArrayIsObjectArray() const {
5471    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5472  }
5473
5474  HInstruction* GetArray() const { return InputAt(0); }
5475  HInstruction* GetIndex() const { return InputAt(1); }
5476  HInstruction* GetValue() const { return InputAt(2); }
5477
5478  Primitive::Type GetComponentType() const {
5479    // The Dex format does not type floating point index operations. Since the
5480    // `expected_component_type_` is set during building and can therefore not
5481    // be correct, we also check what is the value type. If it is a floating
5482    // point type, we must use that type.
5483    Primitive::Type value_type = GetValue()->GetType();
5484    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5485        ? value_type
5486        : GetRawExpectedComponentType();
5487  }
5488
5489  Primitive::Type GetRawExpectedComponentType() const {
5490    return GetPackedField<ExpectedComponentTypeField>();
5491  }
5492
5493  void ComputeSideEffects() {
5494    Primitive::Type type = GetComponentType();
5495    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5496        SideEffectsForArchRuntimeCalls(type)));
5497  }
5498
5499  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5500    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5501  }
5502
5503  DECLARE_INSTRUCTION(ArraySet);
5504
5505 private:
5506  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5507  static constexpr size_t kFieldExpectedComponentTypeSize =
5508      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5509  static constexpr size_t kFlagNeedsTypeCheck =
5510      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5511  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5512  // Cached information for the reference_type_info_ so that codegen
5513  // does not need to inspect the static type.
5514  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5515  static constexpr size_t kNumberOfArraySetPackedBits =
5516      kFlagStaticTypeOfArrayIsObjectArray + 1;
5517  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5518  using ExpectedComponentTypeField =
5519      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5520
5521  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5522};
5523
5524class HArrayLength FINAL : public HExpression<1> {
5525 public:
5526  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5527      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5528    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5529    // Note that arrays do not change length, so the instruction does not
5530    // depend on any write.
5531    SetRawInputAt(0, array);
5532  }
5533
5534  bool CanBeMoved() const OVERRIDE { return true; }
5535  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5536    return true;
5537  }
5538  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5539    return obj == InputAt(0);
5540  }
5541
5542  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5543
5544  DECLARE_INSTRUCTION(ArrayLength);
5545
5546 private:
5547  // We treat a String as an array, creating the HArrayLength from String.length()
5548  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5549  // determine whether a particular HArrayLength is actually a String.length() by
5550  // looking at the type of the input but that requires holding the mutator lock, so
5551  // we prefer to use a flag, so that code generators don't need to do the locking.
5552  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5553  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5554  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5555                "Too many packed fields.");
5556
5557  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5558};
5559
5560class HBoundsCheck FINAL : public HExpression<2> {
5561 public:
5562  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5563  // constructor.
5564  HBoundsCheck(HInstruction* index,
5565               HInstruction* length,
5566               uint32_t dex_pc,
5567               bool string_char_at = false)
5568      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5569    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5570    SetPackedFlag<kFlagIsStringCharAt>(string_char_at);
5571    SetRawInputAt(0, index);
5572    SetRawInputAt(1, length);
5573  }
5574
5575  bool CanBeMoved() const OVERRIDE { return true; }
5576  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5577    return true;
5578  }
5579
5580  bool NeedsEnvironment() const OVERRIDE { return true; }
5581
5582  bool CanThrow() const OVERRIDE { return true; }
5583
5584  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5585
5586  HInstruction* GetIndex() const { return InputAt(0); }
5587
5588  DECLARE_INSTRUCTION(BoundsCheck);
5589
5590 private:
5591  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5592
5593  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5594};
5595
5596class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5597 public:
5598  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5599      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5600
5601  bool NeedsEnvironment() const OVERRIDE {
5602    return true;
5603  }
5604
5605  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5606  SlowPathCode* GetSlowPath() const { return slow_path_; }
5607
5608  DECLARE_INSTRUCTION(SuspendCheck);
5609
5610 private:
5611  // Only used for code generation, in order to share the same slow path between back edges
5612  // of a same loop.
5613  SlowPathCode* slow_path_;
5614
5615  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5616};
5617
5618// Pseudo-instruction which provides the native debugger with mapping information.
5619// It ensures that we can generate line number and local variables at this point.
5620class HNativeDebugInfo : public HTemplateInstruction<0> {
5621 public:
5622  explicit HNativeDebugInfo(uint32_t dex_pc)
5623      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5624
5625  bool NeedsEnvironment() const OVERRIDE {
5626    return true;
5627  }
5628
5629  DECLARE_INSTRUCTION(NativeDebugInfo);
5630
5631 private:
5632  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5633};
5634
5635/**
5636 * Instruction to load a Class object.
5637 */
5638class HLoadClass FINAL : public HInstruction {
5639 public:
5640  // Determines how to load the Class.
5641  enum class LoadKind {
5642    // We cannot load this class. See HSharpening::SharpenLoadClass.
5643    kInvalid = -1,
5644
5645    // Use the Class* from the method's own ArtMethod*.
5646    kReferrersClass,
5647
5648    // Use boot image Class* address that will be known at link time.
5649    // Used for boot image classes referenced by boot image code in non-PIC mode.
5650    kBootImageLinkTimeAddress,
5651
5652    // Use PC-relative boot image Class* address that will be known at link time.
5653    // Used for boot image classes referenced by boot image code in PIC mode.
5654    kBootImageLinkTimePcRelative,
5655
5656    // Use a known boot image Class* address, embedded in the code by the codegen.
5657    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5658    kBootImageAddress,
5659
5660    // Load from an entry in the .bss section using a PC-relative load.
5661    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
5662    kBssEntry,
5663
5664    // Load from the root table associated with the JIT compiled method.
5665    kJitTableAddress,
5666
5667    // Load from resolved types array accessed through the class loaded from
5668    // the compiled method's own ArtMethod*. This is the default access type when
5669    // all other types are unavailable.
5670    kDexCacheViaMethod,
5671
5672    kLast = kDexCacheViaMethod
5673  };
5674
5675  HLoadClass(HCurrentMethod* current_method,
5676             dex::TypeIndex type_index,
5677             const DexFile& dex_file,
5678             Handle<mirror::Class> klass,
5679             bool is_referrers_class,
5680             uint32_t dex_pc,
5681             bool needs_access_check)
5682      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5683        special_input_(HUserRecord<HInstruction*>(current_method)),
5684        type_index_(type_index),
5685        dex_file_(dex_file),
5686        klass_(klass),
5687        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5688    // Referrers class should not need access check. We never inline unverified
5689    // methods so we can't possibly end up in this situation.
5690    DCHECK(!is_referrers_class || !needs_access_check);
5691
5692    SetPackedField<LoadKindField>(
5693        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
5694    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5695    SetPackedFlag<kFlagIsInBootImage>(false);
5696    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5697  }
5698
5699  void SetLoadKind(LoadKind load_kind);
5700
5701  LoadKind GetLoadKind() const {
5702    return GetPackedField<LoadKindField>();
5703  }
5704
5705  bool CanBeMoved() const OVERRIDE { return true; }
5706
5707  bool InstructionDataEquals(const HInstruction* other) const;
5708
5709  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5710
5711  bool CanBeNull() const OVERRIDE { return false; }
5712
5713  bool NeedsEnvironment() const OVERRIDE {
5714    return CanCallRuntime();
5715  }
5716
5717  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5718    // The entrypoint the code generator is going to call does not do
5719    // clinit of the class.
5720    DCHECK(!NeedsAccessCheck());
5721    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5722  }
5723
5724  bool CanCallRuntime() const {
5725    return NeedsAccessCheck() ||
5726           MustGenerateClinitCheck() ||
5727           GetLoadKind() == LoadKind::kDexCacheViaMethod ||
5728           GetLoadKind() == LoadKind::kBssEntry;
5729  }
5730
5731  bool CanThrow() const OVERRIDE {
5732    return NeedsAccessCheck() ||
5733           MustGenerateClinitCheck() ||
5734           // If the class is in the boot image, the lookup in the runtime call cannot throw.
5735           // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
5736           // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
5737           ((GetLoadKind() == LoadKind::kDexCacheViaMethod ||
5738             GetLoadKind() == LoadKind::kBssEntry) &&
5739            !IsInBootImage());
5740  }
5741
5742  ReferenceTypeInfo GetLoadedClassRTI() {
5743    return loaded_class_rti_;
5744  }
5745
5746  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5747    // Make sure we only set exact types (the loaded class should never be merged).
5748    DCHECK(rti.IsExact());
5749    loaded_class_rti_ = rti;
5750  }
5751
5752  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5753  const DexFile& GetDexFile() const { return dex_file_; }
5754
5755  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5756    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5757  }
5758
5759  static SideEffects SideEffectsForArchRuntimeCalls() {
5760    return SideEffects::CanTriggerGC();
5761  }
5762
5763  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5764  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5765  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5766  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5767
5768  void MarkInBootImage() {
5769    SetPackedFlag<kFlagIsInBootImage>(true);
5770  }
5771
5772  void AddSpecialInput(HInstruction* special_input);
5773
5774  using HInstruction::GetInputRecords;  // Keep the const version visible.
5775  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5776    return ArrayRef<HUserRecord<HInstruction*>>(
5777        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5778  }
5779
5780  Primitive::Type GetType() const OVERRIDE {
5781    return Primitive::kPrimNot;
5782  }
5783
5784  Handle<mirror::Class> GetClass() const {
5785    return klass_;
5786  }
5787
5788  DECLARE_INSTRUCTION(LoadClass);
5789
5790 private:
5791  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5792  static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
5793  // Whether this instruction must generate the initialization check.
5794  // Used for code generation.
5795  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5796  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5797  static constexpr size_t kFieldLoadKindSize =
5798      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5799  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5800  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5801  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5802
5803  static bool HasTypeReference(LoadKind load_kind) {
5804    return load_kind == LoadKind::kReferrersClass ||
5805        load_kind == LoadKind::kBootImageLinkTimeAddress ||
5806        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5807        load_kind == LoadKind::kBssEntry ||
5808        load_kind == LoadKind::kDexCacheViaMethod;
5809  }
5810
5811  void SetLoadKindInternal(LoadKind load_kind);
5812
5813  // The special input is the HCurrentMethod for kDexCacheViaMethod or kReferrersClass.
5814  // For other load kinds it's empty or possibly some architecture-specific instruction
5815  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5816  HUserRecord<HInstruction*> special_input_;
5817
5818  // A type index and dex file where the class can be accessed. The dex file can be:
5819  // - The compiling method's dex file if the class is defined there too.
5820  // - The compiling method's dex file if the class is referenced there.
5821  // - The dex file where the class is defined. When the load kind can only be
5822  //   kBssEntry or kDexCacheViaMethod, we cannot emit code for this `HLoadClass`.
5823  const dex::TypeIndex type_index_;
5824  const DexFile& dex_file_;
5825
5826  Handle<mirror::Class> klass_;
5827
5828  ReferenceTypeInfo loaded_class_rti_;
5829
5830  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5831};
5832std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5833
5834// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5835inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5836  // The special input is used for PC-relative loads on some architectures,
5837  // including literal pool loads, which are PC-relative too.
5838  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5839         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5840         GetLoadKind() == LoadKind::kBootImageAddress ||
5841         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
5842  DCHECK(special_input_.GetInstruction() == nullptr);
5843  special_input_ = HUserRecord<HInstruction*>(special_input);
5844  special_input->AddUseAt(this, 0);
5845}
5846
5847class HLoadString FINAL : public HInstruction {
5848 public:
5849  // Determines how to load the String.
5850  enum class LoadKind {
5851    // Use boot image String* address that will be known at link time.
5852    // Used for boot image strings referenced by boot image code in non-PIC mode.
5853    kBootImageLinkTimeAddress,
5854
5855    // Use PC-relative boot image String* address that will be known at link time.
5856    // Used for boot image strings referenced by boot image code in PIC mode.
5857    kBootImageLinkTimePcRelative,
5858
5859    // Use a known boot image String* address, embedded in the code by the codegen.
5860    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5861    kBootImageAddress,
5862
5863    // Load from an entry in the .bss section using a PC-relative load.
5864    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5865    kBssEntry,
5866
5867    // Load from the root table associated with the JIT compiled method.
5868    kJitTableAddress,
5869
5870    // Load from resolved strings array accessed through the class loaded from
5871    // the compiled method's own ArtMethod*. This is the default access type when
5872    // all other types are unavailable.
5873    kDexCacheViaMethod,
5874
5875    kLast = kDexCacheViaMethod,
5876  };
5877
5878  HLoadString(HCurrentMethod* current_method,
5879              dex::StringIndex string_index,
5880              const DexFile& dex_file,
5881              uint32_t dex_pc)
5882      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5883        special_input_(HUserRecord<HInstruction*>(current_method)),
5884        string_index_(string_index),
5885        dex_file_(dex_file) {
5886    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5887  }
5888
5889  void SetLoadKind(LoadKind load_kind);
5890
5891  LoadKind GetLoadKind() const {
5892    return GetPackedField<LoadKindField>();
5893  }
5894
5895  const DexFile& GetDexFile() const {
5896    return dex_file_;
5897  }
5898
5899  dex::StringIndex GetStringIndex() const {
5900    return string_index_;
5901  }
5902
5903  Handle<mirror::String> GetString() const {
5904    return string_;
5905  }
5906
5907  void SetString(Handle<mirror::String> str) {
5908    string_ = str;
5909  }
5910
5911  bool CanBeMoved() const OVERRIDE { return true; }
5912
5913  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5914
5915  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
5916
5917  // Will call the runtime if we need to load the string through
5918  // the dex cache and the string is not guaranteed to be there yet.
5919  bool NeedsEnvironment() const OVERRIDE {
5920    LoadKind load_kind = GetLoadKind();
5921    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5922        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5923        load_kind == LoadKind::kBootImageAddress ||
5924        load_kind == LoadKind::kJitTableAddress) {
5925      return false;
5926    }
5927    return true;
5928  }
5929
5930  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5931    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5932  }
5933
5934  bool CanBeNull() const OVERRIDE { return false; }
5935  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5936
5937  static SideEffects SideEffectsForArchRuntimeCalls() {
5938    return SideEffects::CanTriggerGC();
5939  }
5940
5941  void AddSpecialInput(HInstruction* special_input);
5942
5943  using HInstruction::GetInputRecords;  // Keep the const version visible.
5944  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5945    return ArrayRef<HUserRecord<HInstruction*>>(
5946        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5947  }
5948
5949  Primitive::Type GetType() const OVERRIDE {
5950    return Primitive::kPrimNot;
5951  }
5952
5953  DECLARE_INSTRUCTION(LoadString);
5954
5955 private:
5956  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
5957  static constexpr size_t kFieldLoadKindSize =
5958      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5959  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5960  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5961  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5962
5963  void SetLoadKindInternal(LoadKind load_kind);
5964
5965  // The special input is the HCurrentMethod for kDexCacheViaMethod.
5966  // For other load kinds it's empty or possibly some architecture-specific instruction
5967  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5968  HUserRecord<HInstruction*> special_input_;
5969
5970  dex::StringIndex string_index_;
5971  const DexFile& dex_file_;
5972
5973  Handle<mirror::String> string_;
5974
5975  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5976};
5977std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5978
5979// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5980inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5981  // The special input is used for PC-relative loads on some architectures,
5982  // including literal pool loads, which are PC-relative too.
5983  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5984         GetLoadKind() == LoadKind::kBssEntry ||
5985         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5986         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5987  // HLoadString::GetInputRecords() returns an empty array at this point,
5988  // so use the GetInputRecords() from the base class to set the input record.
5989  DCHECK(special_input_.GetInstruction() == nullptr);
5990  special_input_ = HUserRecord<HInstruction*>(special_input);
5991  special_input->AddUseAt(this, 0);
5992}
5993
5994/**
5995 * Performs an initialization check on its Class object input.
5996 */
5997class HClinitCheck FINAL : public HExpression<1> {
5998 public:
5999  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6000      : HExpression(
6001            Primitive::kPrimNot,
6002            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
6003            dex_pc) {
6004    SetRawInputAt(0, constant);
6005  }
6006
6007  bool CanBeMoved() const OVERRIDE { return true; }
6008  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6009    return true;
6010  }
6011
6012  bool NeedsEnvironment() const OVERRIDE {
6013    // May call runtime to initialize the class.
6014    return true;
6015  }
6016
6017  bool CanThrow() const OVERRIDE { return true; }
6018
6019  HLoadClass* GetLoadClass() const {
6020    DCHECK(InputAt(0)->IsLoadClass());
6021    return InputAt(0)->AsLoadClass();
6022  }
6023
6024  DECLARE_INSTRUCTION(ClinitCheck);
6025
6026 private:
6027  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
6028};
6029
6030class HStaticFieldGet FINAL : public HExpression<1> {
6031 public:
6032  HStaticFieldGet(HInstruction* cls,
6033                  ArtField* field,
6034                  Primitive::Type field_type,
6035                  MemberOffset field_offset,
6036                  bool is_volatile,
6037                  uint32_t field_idx,
6038                  uint16_t declaring_class_def_index,
6039                  const DexFile& dex_file,
6040                  uint32_t dex_pc)
6041      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
6042        field_info_(field,
6043                    field_offset,
6044                    field_type,
6045                    is_volatile,
6046                    field_idx,
6047                    declaring_class_def_index,
6048                    dex_file) {
6049    SetRawInputAt(0, cls);
6050  }
6051
6052
6053  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6054
6055  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6056    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6057    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6058  }
6059
6060  size_t ComputeHashCode() const OVERRIDE {
6061    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6062  }
6063
6064  const FieldInfo& GetFieldInfo() const { return field_info_; }
6065  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6066  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6067  bool IsVolatile() const { return field_info_.IsVolatile(); }
6068
6069  DECLARE_INSTRUCTION(StaticFieldGet);
6070
6071 private:
6072  const FieldInfo field_info_;
6073
6074  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
6075};
6076
6077class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6078 public:
6079  HStaticFieldSet(HInstruction* cls,
6080                  HInstruction* value,
6081                  ArtField* field,
6082                  Primitive::Type field_type,
6083                  MemberOffset field_offset,
6084                  bool is_volatile,
6085                  uint32_t field_idx,
6086                  uint16_t declaring_class_def_index,
6087                  const DexFile& dex_file,
6088                  uint32_t dex_pc)
6089      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
6090        field_info_(field,
6091                    field_offset,
6092                    field_type,
6093                    is_volatile,
6094                    field_idx,
6095                    declaring_class_def_index,
6096                    dex_file) {
6097    SetPackedFlag<kFlagValueCanBeNull>(true);
6098    SetRawInputAt(0, cls);
6099    SetRawInputAt(1, value);
6100  }
6101
6102  const FieldInfo& GetFieldInfo() const { return field_info_; }
6103  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6104  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6105  bool IsVolatile() const { return field_info_.IsVolatile(); }
6106
6107  HInstruction* GetValue() const { return InputAt(1); }
6108  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6109  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6110
6111  DECLARE_INSTRUCTION(StaticFieldSet);
6112
6113 private:
6114  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6115  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6116  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6117                "Too many packed fields.");
6118
6119  const FieldInfo field_info_;
6120
6121  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
6122};
6123
6124class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6125 public:
6126  HUnresolvedInstanceFieldGet(HInstruction* obj,
6127                              Primitive::Type field_type,
6128                              uint32_t field_index,
6129                              uint32_t dex_pc)
6130      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6131        field_index_(field_index) {
6132    SetRawInputAt(0, obj);
6133  }
6134
6135  bool NeedsEnvironment() const OVERRIDE { return true; }
6136  bool CanThrow() const OVERRIDE { return true; }
6137
6138  Primitive::Type GetFieldType() const { return GetType(); }
6139  uint32_t GetFieldIndex() const { return field_index_; }
6140
6141  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6142
6143 private:
6144  const uint32_t field_index_;
6145
6146  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6147};
6148
6149class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6150 public:
6151  HUnresolvedInstanceFieldSet(HInstruction* obj,
6152                              HInstruction* value,
6153                              Primitive::Type field_type,
6154                              uint32_t field_index,
6155                              uint32_t dex_pc)
6156      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6157        field_index_(field_index) {
6158    SetPackedField<FieldTypeField>(field_type);
6159    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6160    SetRawInputAt(0, obj);
6161    SetRawInputAt(1, value);
6162  }
6163
6164  bool NeedsEnvironment() const OVERRIDE { return true; }
6165  bool CanThrow() const OVERRIDE { return true; }
6166
6167  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6168  uint32_t GetFieldIndex() const { return field_index_; }
6169
6170  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6171
6172 private:
6173  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6174  static constexpr size_t kFieldFieldTypeSize =
6175      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6176  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6177      kFieldFieldType + kFieldFieldTypeSize;
6178  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6179                "Too many packed fields.");
6180  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6181
6182  const uint32_t field_index_;
6183
6184  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6185};
6186
6187class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6188 public:
6189  HUnresolvedStaticFieldGet(Primitive::Type field_type,
6190                            uint32_t field_index,
6191                            uint32_t dex_pc)
6192      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6193        field_index_(field_index) {
6194  }
6195
6196  bool NeedsEnvironment() const OVERRIDE { return true; }
6197  bool CanThrow() const OVERRIDE { return true; }
6198
6199  Primitive::Type GetFieldType() const { return GetType(); }
6200  uint32_t GetFieldIndex() const { return field_index_; }
6201
6202  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6203
6204 private:
6205  const uint32_t field_index_;
6206
6207  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6208};
6209
6210class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6211 public:
6212  HUnresolvedStaticFieldSet(HInstruction* value,
6213                            Primitive::Type field_type,
6214                            uint32_t field_index,
6215                            uint32_t dex_pc)
6216      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6217        field_index_(field_index) {
6218    SetPackedField<FieldTypeField>(field_type);
6219    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6220    SetRawInputAt(0, value);
6221  }
6222
6223  bool NeedsEnvironment() const OVERRIDE { return true; }
6224  bool CanThrow() const OVERRIDE { return true; }
6225
6226  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6227  uint32_t GetFieldIndex() const { return field_index_; }
6228
6229  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6230
6231 private:
6232  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6233  static constexpr size_t kFieldFieldTypeSize =
6234      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6235  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6236      kFieldFieldType + kFieldFieldTypeSize;
6237  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6238                "Too many packed fields.");
6239  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6240
6241  const uint32_t field_index_;
6242
6243  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6244};
6245
6246// Implement the move-exception DEX instruction.
6247class HLoadException FINAL : public HExpression<0> {
6248 public:
6249  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6250      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
6251
6252  bool CanBeNull() const OVERRIDE { return false; }
6253
6254  DECLARE_INSTRUCTION(LoadException);
6255
6256 private:
6257  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6258};
6259
6260// Implicit part of move-exception which clears thread-local exception storage.
6261// Must not be removed because the runtime expects the TLS to get cleared.
6262class HClearException FINAL : public HTemplateInstruction<0> {
6263 public:
6264  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6265      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6266
6267  DECLARE_INSTRUCTION(ClearException);
6268
6269 private:
6270  DISALLOW_COPY_AND_ASSIGN(HClearException);
6271};
6272
6273class HThrow FINAL : public HTemplateInstruction<1> {
6274 public:
6275  HThrow(HInstruction* exception, uint32_t dex_pc)
6276      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6277    SetRawInputAt(0, exception);
6278  }
6279
6280  bool IsControlFlow() const OVERRIDE { return true; }
6281
6282  bool NeedsEnvironment() const OVERRIDE { return true; }
6283
6284  bool CanThrow() const OVERRIDE { return true; }
6285
6286
6287  DECLARE_INSTRUCTION(Throw);
6288
6289 private:
6290  DISALLOW_COPY_AND_ASSIGN(HThrow);
6291};
6292
6293/**
6294 * Implementation strategies for the code generator of a HInstanceOf
6295 * or `HCheckCast`.
6296 */
6297enum class TypeCheckKind {
6298  kUnresolvedCheck,       // Check against an unresolved type.
6299  kExactCheck,            // Can do a single class compare.
6300  kClassHierarchyCheck,   // Can just walk the super class chain.
6301  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6302  kInterfaceCheck,        // No optimization yet when checking against an interface.
6303  kArrayObjectCheck,      // Can just check if the array is not primitive.
6304  kArrayCheck,            // No optimization yet when checking against a generic array.
6305  kLast = kArrayCheck
6306};
6307
6308std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6309
6310class HInstanceOf FINAL : public HExpression<2> {
6311 public:
6312  HInstanceOf(HInstruction* object,
6313              HLoadClass* constant,
6314              TypeCheckKind check_kind,
6315              uint32_t dex_pc)
6316      : HExpression(Primitive::kPrimBoolean,
6317                    SideEffectsForArchRuntimeCalls(check_kind),
6318                    dex_pc) {
6319    SetPackedField<TypeCheckKindField>(check_kind);
6320    SetPackedFlag<kFlagMustDoNullCheck>(true);
6321    SetRawInputAt(0, object);
6322    SetRawInputAt(1, constant);
6323  }
6324
6325  bool CanBeMoved() const OVERRIDE { return true; }
6326
6327  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6328    return true;
6329  }
6330
6331  bool NeedsEnvironment() const OVERRIDE {
6332    return CanCallRuntime(GetTypeCheckKind());
6333  }
6334
6335  // Used only in code generation.
6336  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6337  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6338  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6339  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6340
6341  static bool CanCallRuntime(TypeCheckKind check_kind) {
6342    // Mips currently does runtime calls for any other checks.
6343    return check_kind != TypeCheckKind::kExactCheck;
6344  }
6345
6346  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6347    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6348  }
6349
6350  DECLARE_INSTRUCTION(InstanceOf);
6351
6352 private:
6353  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6354  static constexpr size_t kFieldTypeCheckKindSize =
6355      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6356  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6357  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6358  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6359  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6360
6361  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6362};
6363
6364class HBoundType FINAL : public HExpression<1> {
6365 public:
6366  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6367      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6368        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6369    SetPackedFlag<kFlagUpperCanBeNull>(true);
6370    SetPackedFlag<kFlagCanBeNull>(true);
6371    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6372    SetRawInputAt(0, input);
6373  }
6374
6375  // {Get,Set}Upper* should only be used in reference type propagation.
6376  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6377  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6378  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6379
6380  void SetCanBeNull(bool can_be_null) {
6381    DCHECK(GetUpperCanBeNull() || !can_be_null);
6382    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6383  }
6384
6385  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6386
6387  DECLARE_INSTRUCTION(BoundType);
6388
6389 private:
6390  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6391  // is false then CanBeNull() cannot be true).
6392  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6393  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6394  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6395  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6396
6397  // Encodes the most upper class that this instruction can have. In other words
6398  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6399  // It is used to bound the type in cases like:
6400  //   if (x instanceof ClassX) {
6401  //     // uper_bound_ will be ClassX
6402  //   }
6403  ReferenceTypeInfo upper_bound_;
6404
6405  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6406};
6407
6408class HCheckCast FINAL : public HTemplateInstruction<2> {
6409 public:
6410  HCheckCast(HInstruction* object,
6411             HLoadClass* constant,
6412             TypeCheckKind check_kind,
6413             uint32_t dex_pc)
6414      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6415    SetPackedField<TypeCheckKindField>(check_kind);
6416    SetPackedFlag<kFlagMustDoNullCheck>(true);
6417    SetRawInputAt(0, object);
6418    SetRawInputAt(1, constant);
6419  }
6420
6421  bool CanBeMoved() const OVERRIDE { return true; }
6422
6423  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6424    return true;
6425  }
6426
6427  bool NeedsEnvironment() const OVERRIDE {
6428    // Instruction may throw a CheckCastError.
6429    return true;
6430  }
6431
6432  bool CanThrow() const OVERRIDE { return true; }
6433
6434  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6435  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6436  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6437  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6438
6439  DECLARE_INSTRUCTION(CheckCast);
6440
6441 private:
6442  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6443  static constexpr size_t kFieldTypeCheckKindSize =
6444      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6445  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6446  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6447  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6448  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6449
6450  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6451};
6452
6453/**
6454 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6455 * @details We define the combined barrier types that are actually required
6456 * by the Java Memory Model, rather than using exactly the terminology from
6457 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6458 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6459 * store atomicity issues, and the recipes there are not always entirely sufficient.
6460 * The current recipe is as follows:
6461 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6462 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6463 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6464 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6465 *    class has final fields.
6466 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6467 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6468 */
6469enum MemBarrierKind {
6470  kAnyStore,
6471  kLoadAny,
6472  kStoreStore,
6473  kAnyAny,
6474  kNTStoreStore,
6475  kLastBarrierKind = kNTStoreStore
6476};
6477std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6478
6479class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6480 public:
6481  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6482      : HTemplateInstruction(
6483            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6484    SetPackedField<BarrierKindField>(barrier_kind);
6485  }
6486
6487  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6488
6489  DECLARE_INSTRUCTION(MemoryBarrier);
6490
6491 private:
6492  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6493  static constexpr size_t kFieldBarrierKindSize =
6494      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6495  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6496      kFieldBarrierKind + kFieldBarrierKindSize;
6497  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6498                "Too many packed fields.");
6499  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6500
6501  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6502};
6503
6504class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6505 public:
6506  enum class OperationKind {
6507    kEnter,
6508    kExit,
6509    kLast = kExit
6510  };
6511
6512  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6513    : HTemplateInstruction(
6514          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6515          dex_pc) {
6516    SetPackedField<OperationKindField>(kind);
6517    SetRawInputAt(0, object);
6518  }
6519
6520  // Instruction may go into runtime, so we need an environment.
6521  bool NeedsEnvironment() const OVERRIDE { return true; }
6522
6523  bool CanThrow() const OVERRIDE {
6524    // Verifier guarantees that monitor-exit cannot throw.
6525    // This is important because it allows the HGraphBuilder to remove
6526    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6527    return IsEnter();
6528  }
6529
6530  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6531  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6532
6533  DECLARE_INSTRUCTION(MonitorOperation);
6534
6535 private:
6536  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6537  static constexpr size_t kFieldOperationKindSize =
6538      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6539  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6540      kFieldOperationKind + kFieldOperationKindSize;
6541  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6542                "Too many packed fields.");
6543  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6544
6545 private:
6546  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6547};
6548
6549class HSelect FINAL : public HExpression<3> {
6550 public:
6551  HSelect(HInstruction* condition,
6552          HInstruction* true_value,
6553          HInstruction* false_value,
6554          uint32_t dex_pc)
6555      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6556    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6557
6558    // First input must be `true_value` or `false_value` to allow codegens to
6559    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6560    // that architectures which implement HSelect as a conditional move also
6561    // will not need to invert the condition.
6562    SetRawInputAt(0, false_value);
6563    SetRawInputAt(1, true_value);
6564    SetRawInputAt(2, condition);
6565  }
6566
6567  HInstruction* GetFalseValue() const { return InputAt(0); }
6568  HInstruction* GetTrueValue() const { return InputAt(1); }
6569  HInstruction* GetCondition() const { return InputAt(2); }
6570
6571  bool CanBeMoved() const OVERRIDE { return true; }
6572  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6573    return true;
6574  }
6575
6576  bool CanBeNull() const OVERRIDE {
6577    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6578  }
6579
6580  DECLARE_INSTRUCTION(Select);
6581
6582 private:
6583  DISALLOW_COPY_AND_ASSIGN(HSelect);
6584};
6585
6586class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6587 public:
6588  MoveOperands(Location source,
6589               Location destination,
6590               Primitive::Type type,
6591               HInstruction* instruction)
6592      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6593
6594  Location GetSource() const { return source_; }
6595  Location GetDestination() const { return destination_; }
6596
6597  void SetSource(Location value) { source_ = value; }
6598  void SetDestination(Location value) { destination_ = value; }
6599
6600  // The parallel move resolver marks moves as "in-progress" by clearing the
6601  // destination (but not the source).
6602  Location MarkPending() {
6603    DCHECK(!IsPending());
6604    Location dest = destination_;
6605    destination_ = Location::NoLocation();
6606    return dest;
6607  }
6608
6609  void ClearPending(Location dest) {
6610    DCHECK(IsPending());
6611    destination_ = dest;
6612  }
6613
6614  bool IsPending() const {
6615    DCHECK(source_.IsValid() || destination_.IsInvalid());
6616    return destination_.IsInvalid() && source_.IsValid();
6617  }
6618
6619  // True if this blocks a move from the given location.
6620  bool Blocks(Location loc) const {
6621    return !IsEliminated() && source_.OverlapsWith(loc);
6622  }
6623
6624  // A move is redundant if it's been eliminated, if its source and
6625  // destination are the same, or if its destination is unneeded.
6626  bool IsRedundant() const {
6627    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6628  }
6629
6630  // We clear both operands to indicate move that's been eliminated.
6631  void Eliminate() {
6632    source_ = destination_ = Location::NoLocation();
6633  }
6634
6635  bool IsEliminated() const {
6636    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6637    return source_.IsInvalid();
6638  }
6639
6640  Primitive::Type GetType() const { return type_; }
6641
6642  bool Is64BitMove() const {
6643    return Primitive::Is64BitType(type_);
6644  }
6645
6646  HInstruction* GetInstruction() const { return instruction_; }
6647
6648 private:
6649  Location source_;
6650  Location destination_;
6651  // The type this move is for.
6652  Primitive::Type type_;
6653  // The instruction this move is assocatied with. Null when this move is
6654  // for moving an input in the expected locations of user (including a phi user).
6655  // This is only used in debug mode, to ensure we do not connect interval siblings
6656  // in the same parallel move.
6657  HInstruction* instruction_;
6658};
6659
6660std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6661
6662static constexpr size_t kDefaultNumberOfMoves = 4;
6663
6664class HParallelMove FINAL : public HTemplateInstruction<0> {
6665 public:
6666  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6667      : HTemplateInstruction(SideEffects::None(), dex_pc),
6668        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6669    moves_.reserve(kDefaultNumberOfMoves);
6670  }
6671
6672  void AddMove(Location source,
6673               Location destination,
6674               Primitive::Type type,
6675               HInstruction* instruction) {
6676    DCHECK(source.IsValid());
6677    DCHECK(destination.IsValid());
6678    if (kIsDebugBuild) {
6679      if (instruction != nullptr) {
6680        for (const MoveOperands& move : moves_) {
6681          if (move.GetInstruction() == instruction) {
6682            // Special case the situation where the move is for the spill slot
6683            // of the instruction.
6684            if ((GetPrevious() == instruction)
6685                || ((GetPrevious() == nullptr)
6686                    && instruction->IsPhi()
6687                    && instruction->GetBlock() == GetBlock())) {
6688              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6689                  << "Doing parallel moves for the same instruction.";
6690            } else {
6691              DCHECK(false) << "Doing parallel moves for the same instruction.";
6692            }
6693          }
6694        }
6695      }
6696      for (const MoveOperands& move : moves_) {
6697        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6698            << "Overlapped destination for two moves in a parallel move: "
6699            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6700            << source << " ==> " << destination;
6701      }
6702    }
6703    moves_.emplace_back(source, destination, type, instruction);
6704  }
6705
6706  MoveOperands* MoveOperandsAt(size_t index) {
6707    return &moves_[index];
6708  }
6709
6710  size_t NumMoves() const { return moves_.size(); }
6711
6712  DECLARE_INSTRUCTION(ParallelMove);
6713
6714 private:
6715  ArenaVector<MoveOperands> moves_;
6716
6717  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6718};
6719
6720}  // namespace art
6721
6722#include "nodes_vector.h"
6723
6724#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6725#include "nodes_shared.h"
6726#endif
6727#ifdef ART_ENABLE_CODEGEN_arm
6728#include "nodes_arm.h"
6729#endif
6730#ifdef ART_ENABLE_CODEGEN_mips
6731#include "nodes_mips.h"
6732#endif
6733#ifdef ART_ENABLE_CODEGEN_x86
6734#include "nodes_x86.h"
6735#endif
6736
6737namespace art {
6738
6739class HGraphVisitor : public ValueObject {
6740 public:
6741  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6742  virtual ~HGraphVisitor() {}
6743
6744  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6745  virtual void VisitBasicBlock(HBasicBlock* block);
6746
6747  // Visit the graph following basic block insertion order.
6748  void VisitInsertionOrder();
6749
6750  // Visit the graph following dominator tree reverse post-order.
6751  void VisitReversePostOrder();
6752
6753  HGraph* GetGraph() const { return graph_; }
6754
6755  // Visit functions for instruction classes.
6756#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6757  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6758
6759  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6760
6761#undef DECLARE_VISIT_INSTRUCTION
6762
6763 private:
6764  HGraph* const graph_;
6765
6766  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6767};
6768
6769class HGraphDelegateVisitor : public HGraphVisitor {
6770 public:
6771  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6772  virtual ~HGraphDelegateVisitor() {}
6773
6774  // Visit functions that delegate to to super class.
6775#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6776  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6777
6778  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6779
6780#undef DECLARE_VISIT_INSTRUCTION
6781
6782 private:
6783  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6784};
6785
6786// Iterator over the blocks that art part of the loop. Includes blocks part
6787// of an inner loop. The order in which the blocks are iterated is on their
6788// block id.
6789class HBlocksInLoopIterator : public ValueObject {
6790 public:
6791  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6792      : blocks_in_loop_(info.GetBlocks()),
6793        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6794        index_(0) {
6795    if (!blocks_in_loop_.IsBitSet(index_)) {
6796      Advance();
6797    }
6798  }
6799
6800  bool Done() const { return index_ == blocks_.size(); }
6801  HBasicBlock* Current() const { return blocks_[index_]; }
6802  void Advance() {
6803    ++index_;
6804    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6805      if (blocks_in_loop_.IsBitSet(index_)) {
6806        break;
6807      }
6808    }
6809  }
6810
6811 private:
6812  const BitVector& blocks_in_loop_;
6813  const ArenaVector<HBasicBlock*>& blocks_;
6814  size_t index_;
6815
6816  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6817};
6818
6819// Iterator over the blocks that art part of the loop. Includes blocks part
6820// of an inner loop. The order in which the blocks are iterated is reverse
6821// post order.
6822class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6823 public:
6824  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6825      : blocks_in_loop_(info.GetBlocks()),
6826        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6827        index_(0) {
6828    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6829      Advance();
6830    }
6831  }
6832
6833  bool Done() const { return index_ == blocks_.size(); }
6834  HBasicBlock* Current() const { return blocks_[index_]; }
6835  void Advance() {
6836    ++index_;
6837    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6838      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6839        break;
6840      }
6841    }
6842  }
6843
6844 private:
6845  const BitVector& blocks_in_loop_;
6846  const ArenaVector<HBasicBlock*>& blocks_;
6847  size_t index_;
6848
6849  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6850};
6851
6852// Returns int64_t value of a properly typed constant.
6853inline int64_t Int64FromConstant(HConstant* constant) {
6854  if (constant->IsIntConstant()) {
6855    return constant->AsIntConstant()->GetValue();
6856  } else if (constant->IsLongConstant()) {
6857    return constant->AsLongConstant()->GetValue();
6858  } else {
6859    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6860    return 0;
6861  }
6862}
6863
6864// Returns true iff instruction is an integral constant (and sets value on success).
6865inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
6866  if (instruction->IsIntConstant()) {
6867    *value = instruction->AsIntConstant()->GetValue();
6868    return true;
6869  } else if (instruction->IsLongConstant()) {
6870    *value = instruction->AsLongConstant()->GetValue();
6871    return true;
6872  } else if (instruction->IsNullConstant()) {
6873    *value = 0;
6874    return true;
6875  }
6876  return false;
6877}
6878
6879#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6880  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6881  inline const H##type* HInstruction::As##type() const {                       \
6882    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6883  }                                                                            \
6884  inline H##type* HInstruction::As##type() {                                   \
6885    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6886  }
6887
6888  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6889#undef INSTRUCTION_TYPE_CHECK
6890
6891// Create space in `blocks` for adding `number_of_new_blocks` entries
6892// starting at location `at`. Blocks after `at` are moved accordingly.
6893inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6894                        size_t number_of_new_blocks,
6895                        size_t after) {
6896  DCHECK_LT(after, blocks->size());
6897  size_t old_size = blocks->size();
6898  size_t new_size = old_size + number_of_new_blocks;
6899  blocks->resize(new_size);
6900  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6901}
6902
6903/*
6904 * Hunt "under the hood" of array lengths (leading to array references),
6905 * null checks (also leading to array references), and new arrays
6906 * (leading to the actual length). This makes it more likely related
6907 * instructions become actually comparable.
6908 */
6909inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
6910  while (instruction->IsArrayLength() ||
6911         instruction->IsNullCheck() ||
6912         instruction->IsNewArray()) {
6913    instruction = instruction->IsNewArray()
6914        ? instruction->AsNewArray()->GetLength()
6915        : instruction->InputAt(0);
6916  }
6917  return instruction;
6918}
6919
6920}  // namespace art
6921
6922#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6923