nodes.h revision 79efadfdd861584f1c47654ade975eae6c43c360
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "dex_file.h"
32#include "dex_file_types.h"
33#include "deoptimization_kind.h"
34#include "entrypoints/quick/quick_entrypoints_enum.h"
35#include "handle.h"
36#include "handle_scope.h"
37#include "invoke_type.h"
38#include "intrinsics_enum.h"
39#include "locations.h"
40#include "method_reference.h"
41#include "mirror/class.h"
42#include "offsets.h"
43#include "primitive.h"
44#include "utils/intrusive_forward_list.h"
45
46namespace art {
47
48class GraphChecker;
49class HBasicBlock;
50class HCurrentMethod;
51class HDoubleConstant;
52class HEnvironment;
53class HFloatConstant;
54class HGraphBuilder;
55class HGraphVisitor;
56class HInstruction;
57class HIntConstant;
58class HInvoke;
59class HLongConstant;
60class HNullConstant;
61class HPhi;
62class HSuspendCheck;
63class HTryBoundary;
64class LiveInterval;
65class LocationSummary;
66class SlowPathCode;
67class SsaBuilder;
68
69namespace mirror {
70class DexCache;
71}  // namespace mirror
72
73static const int kDefaultNumberOfBlocks = 8;
74static const int kDefaultNumberOfSuccessors = 2;
75static const int kDefaultNumberOfPredecessors = 2;
76static const int kDefaultNumberOfExceptionalPredecessors = 0;
77static const int kDefaultNumberOfDominatedBlocks = 1;
78static const int kDefaultNumberOfBackEdges = 1;
79
80// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
81static constexpr int32_t kMaxIntShiftDistance = 0x1f;
82// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
83static constexpr int32_t kMaxLongShiftDistance = 0x3f;
84
85static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
86static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
87
88static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
89
90static constexpr uint32_t kNoDexPc = -1;
91
92inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
93  // For the purposes of the compiler, the dex files must actually be the same object
94  // if we want to safely treat them as the same. This is especially important for JIT
95  // as custom class loaders can open the same underlying file (or memory) multiple
96  // times and provide different class resolution but no two class loaders should ever
97  // use the same DexFile object - doing so is an unsupported hack that can lead to
98  // all sorts of weird failures.
99  return &lhs == &rhs;
100}
101
102enum IfCondition {
103  // All types.
104  kCondEQ,  // ==
105  kCondNE,  // !=
106  // Signed integers and floating-point numbers.
107  kCondLT,  // <
108  kCondLE,  // <=
109  kCondGT,  // >
110  kCondGE,  // >=
111  // Unsigned integers.
112  kCondB,   // <
113  kCondBE,  // <=
114  kCondA,   // >
115  kCondAE,  // >=
116  // First and last aliases.
117  kCondFirst = kCondEQ,
118  kCondLast = kCondAE,
119};
120
121enum GraphAnalysisResult {
122  kAnalysisSkipped,
123  kAnalysisInvalidBytecode,
124  kAnalysisFailThrowCatchLoop,
125  kAnalysisFailAmbiguousArrayOp,
126  kAnalysisSuccess,
127};
128
129template <typename T>
130static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
131  return static_cast<typename std::make_unsigned<T>::type>(x);
132}
133
134class HInstructionList : public ValueObject {
135 public:
136  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
137
138  void AddInstruction(HInstruction* instruction);
139  void RemoveInstruction(HInstruction* instruction);
140
141  // Insert `instruction` before/after an existing instruction `cursor`.
142  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
143  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
144
145  // Return true if this list contains `instruction`.
146  bool Contains(HInstruction* instruction) const;
147
148  // Return true if `instruction1` is found before `instruction2` in
149  // this instruction list and false otherwise.  Abort if none
150  // of these instructions is found.
151  bool FoundBefore(const HInstruction* instruction1,
152                   const HInstruction* instruction2) const;
153
154  bool IsEmpty() const { return first_instruction_ == nullptr; }
155  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
156
157  // Update the block of all instructions to be `block`.
158  void SetBlockOfInstructions(HBasicBlock* block) const;
159
160  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
161  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
162  void Add(const HInstructionList& instruction_list);
163
164  // Return the number of instructions in the list. This is an expensive operation.
165  size_t CountSize() const;
166
167 private:
168  HInstruction* first_instruction_;
169  HInstruction* last_instruction_;
170
171  friend class HBasicBlock;
172  friend class HGraph;
173  friend class HInstruction;
174  friend class HInstructionIterator;
175  friend class HInstructionIteratorHandleChanges;
176  friend class HBackwardInstructionIterator;
177
178  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
179};
180
181class ReferenceTypeInfo : ValueObject {
182 public:
183  typedef Handle<mirror::Class> TypeHandle;
184
185  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
186
187  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
188    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
189  }
190
191  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
192    return ReferenceTypeInfo(type_handle, is_exact);
193  }
194
195  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
196
197  static bool IsValidHandle(TypeHandle handle) {
198    return handle.GetReference() != nullptr;
199  }
200
201  bool IsValid() const {
202    return IsValidHandle(type_handle_);
203  }
204
205  bool IsExact() const { return is_exact_; }
206
207  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
208    DCHECK(IsValid());
209    return GetTypeHandle()->IsObjectClass();
210  }
211
212  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
213    DCHECK(IsValid());
214    return GetTypeHandle()->IsStringClass();
215  }
216
217  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
218    DCHECK(IsValid());
219    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
220  }
221
222  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
223    DCHECK(IsValid());
224    return GetTypeHandle()->IsInterface();
225  }
226
227  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
228    DCHECK(IsValid());
229    return GetTypeHandle()->IsArrayClass();
230  }
231
232  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
233    DCHECK(IsValid());
234    return GetTypeHandle()->IsPrimitiveArray();
235  }
236
237  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
238    DCHECK(IsValid());
239    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
240  }
241
242  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
243    DCHECK(IsValid());
244    if (!IsExact()) return false;
245    if (!IsArrayClass()) return false;
246    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
247  }
248
249  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
250    DCHECK(IsValid());
251    if (!IsExact()) return false;
252    if (!IsArrayClass()) return false;
253    if (!rti.IsArrayClass()) return false;
254    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
255        rti.GetTypeHandle()->GetComponentType());
256  }
257
258  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
259
260  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
261    DCHECK(IsValid());
262    DCHECK(rti.IsValid());
263    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
264  }
265
266  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
267    DCHECK(IsValid());
268    DCHECK(rti.IsValid());
269    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
270        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
271  }
272
273  // Returns true if the type information provide the same amount of details.
274  // Note that it does not mean that the instructions have the same actual type
275  // (because the type can be the result of a merge).
276  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
277    if (!IsValid() && !rti.IsValid()) {
278      // Invalid types are equal.
279      return true;
280    }
281    if (!IsValid() || !rti.IsValid()) {
282      // One is valid, the other not.
283      return false;
284    }
285    return IsExact() == rti.IsExact()
286        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
287  }
288
289 private:
290  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
291  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
292      : type_handle_(type_handle), is_exact_(is_exact) { }
293
294  // The class of the object.
295  TypeHandle type_handle_;
296  // Whether or not the type is exact or a superclass of the actual type.
297  // Whether or not we have any information about this type.
298  bool is_exact_;
299};
300
301std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
302
303// Control-flow graph of a method. Contains a list of basic blocks.
304class HGraph : public ArenaObject<kArenaAllocGraph> {
305 public:
306  HGraph(ArenaAllocator* arena,
307         const DexFile& dex_file,
308         uint32_t method_idx,
309         InstructionSet instruction_set,
310         InvokeType invoke_type = kInvalidInvokeType,
311         bool debuggable = false,
312         bool osr = false,
313         int start_instruction_id = 0)
314      : arena_(arena),
315        blocks_(arena->Adapter(kArenaAllocBlockList)),
316        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
317        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
318        entry_block_(nullptr),
319        exit_block_(nullptr),
320        maximum_number_of_out_vregs_(0),
321        number_of_vregs_(0),
322        number_of_in_vregs_(0),
323        temporaries_vreg_slots_(0),
324        has_bounds_checks_(false),
325        has_try_catch_(false),
326        has_simd_(false),
327        has_loops_(false),
328        has_irreducible_loops_(false),
329        debuggable_(debuggable),
330        current_instruction_id_(start_instruction_id),
331        dex_file_(dex_file),
332        method_idx_(method_idx),
333        invoke_type_(invoke_type),
334        in_ssa_form_(false),
335        number_of_cha_guards_(0),
336        instruction_set_(instruction_set),
337        cached_null_constant_(nullptr),
338        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
339        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
340        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
341        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
342        cached_current_method_(nullptr),
343        art_method_(nullptr),
344        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
345        osr_(osr),
346        cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) {
347    blocks_.reserve(kDefaultNumberOfBlocks);
348  }
349
350  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
351  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
352
353  ArenaAllocator* GetArena() const { return arena_; }
354  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
355
356  bool IsInSsaForm() const { return in_ssa_form_; }
357  void SetInSsaForm() { in_ssa_form_ = true; }
358
359  HBasicBlock* GetEntryBlock() const { return entry_block_; }
360  HBasicBlock* GetExitBlock() const { return exit_block_; }
361  bool HasExitBlock() const { return exit_block_ != nullptr; }
362
363  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
364  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
365
366  void AddBlock(HBasicBlock* block);
367
368  void ComputeDominanceInformation();
369  void ClearDominanceInformation();
370  void ClearLoopInformation();
371  void FindBackEdges(ArenaBitVector* visited);
372  GraphAnalysisResult BuildDominatorTree();
373  void SimplifyCFG();
374  void SimplifyCatchBlocks();
375
376  // Analyze all natural loops in this graph. Returns a code specifying that it
377  // was successful or the reason for failure. The method will fail if a loop
378  // is a throw-catch loop, i.e. the header is a catch block.
379  GraphAnalysisResult AnalyzeLoops() const;
380
381  // Iterate over blocks to compute try block membership. Needs reverse post
382  // order and loop information.
383  void ComputeTryBlockInformation();
384
385  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
386  // Returns the instruction to replace the invoke expression or null if the
387  // invoke is for a void method. Note that the caller is responsible for replacing
388  // and removing the invoke instruction.
389  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
390
391  // Update the loop and try membership of `block`, which was spawned from `reference`.
392  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
393  // should be the new back edge.
394  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
395                                             HBasicBlock* reference,
396                                             bool replace_if_back_edge);
397
398  // Need to add a couple of blocks to test if the loop body is entered and
399  // put deoptimization instructions, etc.
400  void TransformLoopHeaderForBCE(HBasicBlock* header);
401
402  // Adds a new loop directly after the loop with the given header and exit.
403  // Returns the new preheader.
404  HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
405                                             HBasicBlock* body,
406                                             HBasicBlock* exit);
407
408  // Removes `block` from the graph. Assumes `block` has been disconnected from
409  // other blocks and has no instructions or phis.
410  void DeleteDeadEmptyBlock(HBasicBlock* block);
411
412  // Splits the edge between `block` and `successor` while preserving the
413  // indices in the predecessor/successor lists. If there are multiple edges
414  // between the blocks, the lowest indices are used.
415  // Returns the new block which is empty and has the same dex pc as `successor`.
416  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
417
418  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
419  void SimplifyLoop(HBasicBlock* header);
420
421  int32_t GetNextInstructionId() {
422    DCHECK_NE(current_instruction_id_, INT32_MAX);
423    return current_instruction_id_++;
424  }
425
426  int32_t GetCurrentInstructionId() const {
427    return current_instruction_id_;
428  }
429
430  void SetCurrentInstructionId(int32_t id) {
431    DCHECK_GE(id, current_instruction_id_);
432    current_instruction_id_ = id;
433  }
434
435  uint16_t GetMaximumNumberOfOutVRegs() const {
436    return maximum_number_of_out_vregs_;
437  }
438
439  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
440    maximum_number_of_out_vregs_ = new_value;
441  }
442
443  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
444    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
445  }
446
447  void UpdateTemporariesVRegSlots(size_t slots) {
448    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
449  }
450
451  size_t GetTemporariesVRegSlots() const {
452    DCHECK(!in_ssa_form_);
453    return temporaries_vreg_slots_;
454  }
455
456  void SetNumberOfVRegs(uint16_t number_of_vregs) {
457    number_of_vregs_ = number_of_vregs;
458  }
459
460  uint16_t GetNumberOfVRegs() const {
461    return number_of_vregs_;
462  }
463
464  void SetNumberOfInVRegs(uint16_t value) {
465    number_of_in_vregs_ = value;
466  }
467
468  uint16_t GetNumberOfInVRegs() const {
469    return number_of_in_vregs_;
470  }
471
472  uint16_t GetNumberOfLocalVRegs() const {
473    DCHECK(!in_ssa_form_);
474    return number_of_vregs_ - number_of_in_vregs_;
475  }
476
477  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
478    return reverse_post_order_;
479  }
480
481  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
482    DCHECK(GetReversePostOrder()[0] == entry_block_);
483    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
484  }
485
486  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
487    return ReverseRange(GetReversePostOrder());
488  }
489
490  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
491    return linear_order_;
492  }
493
494  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
495    return ReverseRange(GetLinearOrder());
496  }
497
498  bool HasBoundsChecks() const {
499    return has_bounds_checks_;
500  }
501
502  void SetHasBoundsChecks(bool value) {
503    has_bounds_checks_ = value;
504  }
505
506  bool IsDebuggable() const { return debuggable_; }
507
508  // Returns a constant of the given type and value. If it does not exist
509  // already, it is created and inserted into the graph. This method is only for
510  // integral types.
511  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
512
513  // TODO: This is problematic for the consistency of reference type propagation
514  // because it can be created anytime after the pass and thus it will be left
515  // with an invalid type.
516  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
517
518  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
519    return CreateConstant(value, &cached_int_constants_, dex_pc);
520  }
521  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
522    return CreateConstant(value, &cached_long_constants_, dex_pc);
523  }
524  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
525    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
526  }
527  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
528    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
529  }
530
531  HCurrentMethod* GetCurrentMethod();
532
533  const DexFile& GetDexFile() const {
534    return dex_file_;
535  }
536
537  uint32_t GetMethodIdx() const {
538    return method_idx_;
539  }
540
541  InvokeType GetInvokeType() const {
542    return invoke_type_;
543  }
544
545  InstructionSet GetInstructionSet() const {
546    return instruction_set_;
547  }
548
549  bool IsCompilingOsr() const { return osr_; }
550
551  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
552    return cha_single_implementation_list_;
553  }
554
555  void AddCHASingleImplementationDependency(ArtMethod* method) {
556    cha_single_implementation_list_.insert(method);
557  }
558
559  bool HasShouldDeoptimizeFlag() const {
560    return number_of_cha_guards_ != 0;
561  }
562
563  bool HasTryCatch() const { return has_try_catch_; }
564  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
565
566  bool HasSIMD() const { return has_simd_; }
567  void SetHasSIMD(bool value) { has_simd_ = value; }
568
569  bool HasLoops() const { return has_loops_; }
570  void SetHasLoops(bool value) { has_loops_ = value; }
571
572  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
573  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
574
575  ArtMethod* GetArtMethod() const { return art_method_; }
576  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
577
578  // Returns an instruction with the opposite Boolean value from 'cond'.
579  // The instruction has been inserted into the graph, either as a constant, or
580  // before cursor.
581  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
582
583  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
584
585  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
586  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
587  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
588
589 private:
590  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
591  void RemoveDeadBlocks(const ArenaBitVector& visited);
592
593  template <class InstructionType, typename ValueType>
594  InstructionType* CreateConstant(ValueType value,
595                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
596                                  uint32_t dex_pc = kNoDexPc) {
597    // Try to find an existing constant of the given value.
598    InstructionType* constant = nullptr;
599    auto cached_constant = cache->find(value);
600    if (cached_constant != cache->end()) {
601      constant = cached_constant->second;
602    }
603
604    // If not found or previously deleted, create and cache a new instruction.
605    // Don't bother reviving a previously deleted instruction, for simplicity.
606    if (constant == nullptr || constant->GetBlock() == nullptr) {
607      constant = new (arena_) InstructionType(value, dex_pc);
608      cache->Overwrite(value, constant);
609      InsertConstant(constant);
610    }
611    return constant;
612  }
613
614  void InsertConstant(HConstant* instruction);
615
616  // Cache a float constant into the graph. This method should only be
617  // called by the SsaBuilder when creating "equivalent" instructions.
618  void CacheFloatConstant(HFloatConstant* constant);
619
620  // See CacheFloatConstant comment.
621  void CacheDoubleConstant(HDoubleConstant* constant);
622
623  ArenaAllocator* const arena_;
624
625  // List of blocks in insertion order.
626  ArenaVector<HBasicBlock*> blocks_;
627
628  // List of blocks to perform a reverse post order tree traversal.
629  ArenaVector<HBasicBlock*> reverse_post_order_;
630
631  // List of blocks to perform a linear order tree traversal. Unlike the reverse
632  // post order, this order is not incrementally kept up-to-date.
633  ArenaVector<HBasicBlock*> linear_order_;
634
635  HBasicBlock* entry_block_;
636  HBasicBlock* exit_block_;
637
638  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
639  uint16_t maximum_number_of_out_vregs_;
640
641  // The number of virtual registers in this method. Contains the parameters.
642  uint16_t number_of_vregs_;
643
644  // The number of virtual registers used by parameters of this method.
645  uint16_t number_of_in_vregs_;
646
647  // Number of vreg size slots that the temporaries use (used in baseline compiler).
648  size_t temporaries_vreg_slots_;
649
650  // Flag whether there are bounds checks in the graph. We can skip
651  // BCE if it's false. It's only best effort to keep it up to date in
652  // the presence of code elimination so there might be false positives.
653  bool has_bounds_checks_;
654
655  // Flag whether there are try/catch blocks in the graph. We will skip
656  // try/catch-related passes if it's false. It's only best effort to keep
657  // it up to date in the presence of code elimination so there might be
658  // false positives.
659  bool has_try_catch_;
660
661  // Flag whether SIMD instructions appear in the graph. If true, the
662  // code generators may have to be more careful spilling the wider
663  // contents of SIMD registers.
664  bool has_simd_;
665
666  // Flag whether there are any loops in the graph. We can skip loop
667  // optimization if it's false. It's only best effort to keep it up
668  // to date in the presence of code elimination so there might be false
669  // positives.
670  bool has_loops_;
671
672  // Flag whether there are any irreducible loops in the graph. It's only
673  // best effort to keep it up to date in the presence of code elimination
674  // so there might be false positives.
675  bool has_irreducible_loops_;
676
677  // Indicates whether the graph should be compiled in a way that
678  // ensures full debuggability. If false, we can apply more
679  // aggressive optimizations that may limit the level of debugging.
680  const bool debuggable_;
681
682  // The current id to assign to a newly added instruction. See HInstruction.id_.
683  int32_t current_instruction_id_;
684
685  // The dex file from which the method is from.
686  const DexFile& dex_file_;
687
688  // The method index in the dex file.
689  const uint32_t method_idx_;
690
691  // If inlined, this encodes how the callee is being invoked.
692  const InvokeType invoke_type_;
693
694  // Whether the graph has been transformed to SSA form. Only used
695  // in debug mode to ensure we are not using properties only valid
696  // for non-SSA form (like the number of temporaries).
697  bool in_ssa_form_;
698
699  // Number of CHA guards in the graph. Used to short-circuit the
700  // CHA guard optimization pass when there is no CHA guard left.
701  uint32_t number_of_cha_guards_;
702
703  const InstructionSet instruction_set_;
704
705  // Cached constants.
706  HNullConstant* cached_null_constant_;
707  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
708  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
709  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
710  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
711
712  HCurrentMethod* cached_current_method_;
713
714  // The ArtMethod this graph is for. Note that for AOT, it may be null,
715  // for example for methods whose declaring class could not be resolved
716  // (such as when the superclass could not be found).
717  ArtMethod* art_method_;
718
719  // Keep the RTI of inexact Object to avoid having to pass stack handle
720  // collection pointer to passes which may create NullConstant.
721  ReferenceTypeInfo inexact_object_rti_;
722
723  // Whether we are compiling this graph for on stack replacement: this will
724  // make all loops seen as irreducible and emit special stack maps to mark
725  // compiled code entries which the interpreter can directly jump to.
726  const bool osr_;
727
728  // List of methods that are assumed to have single implementation.
729  ArenaSet<ArtMethod*> cha_single_implementation_list_;
730
731  friend class SsaBuilder;           // For caching constants.
732  friend class SsaLivenessAnalysis;  // For the linear order.
733  friend class HInliner;             // For the reverse post order.
734  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
735  DISALLOW_COPY_AND_ASSIGN(HGraph);
736};
737
738class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
739 public:
740  HLoopInformation(HBasicBlock* header, HGraph* graph)
741      : header_(header),
742        suspend_check_(nullptr),
743        irreducible_(false),
744        contains_irreducible_loop_(false),
745        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
746        // Make bit vector growable, as the number of blocks may change.
747        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
748    back_edges_.reserve(kDefaultNumberOfBackEdges);
749  }
750
751  bool IsIrreducible() const { return irreducible_; }
752  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
753
754  void Dump(std::ostream& os);
755
756  HBasicBlock* GetHeader() const {
757    return header_;
758  }
759
760  void SetHeader(HBasicBlock* block) {
761    header_ = block;
762  }
763
764  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
765  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
766  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
767
768  void AddBackEdge(HBasicBlock* back_edge) {
769    back_edges_.push_back(back_edge);
770  }
771
772  void RemoveBackEdge(HBasicBlock* back_edge) {
773    RemoveElement(back_edges_, back_edge);
774  }
775
776  bool IsBackEdge(const HBasicBlock& block) const {
777    return ContainsElement(back_edges_, &block);
778  }
779
780  size_t NumberOfBackEdges() const {
781    return back_edges_.size();
782  }
783
784  HBasicBlock* GetPreHeader() const;
785
786  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
787    return back_edges_;
788  }
789
790  // Returns the lifetime position of the back edge that has the
791  // greatest lifetime position.
792  size_t GetLifetimeEnd() const;
793
794  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
795    ReplaceElement(back_edges_, existing, new_back_edge);
796  }
797
798  // Finds blocks that are part of this loop.
799  void Populate();
800
801  // Returns whether this loop information contains `block`.
802  // Note that this loop information *must* be populated before entering this function.
803  bool Contains(const HBasicBlock& block) const;
804
805  // Returns whether this loop information is an inner loop of `other`.
806  // Note that `other` *must* be populated before entering this function.
807  bool IsIn(const HLoopInformation& other) const;
808
809  // Returns true if instruction is not defined within this loop.
810  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
811
812  const ArenaBitVector& GetBlocks() const { return blocks_; }
813
814  void Add(HBasicBlock* block);
815  void Remove(HBasicBlock* block);
816
817  void ClearAllBlocks() {
818    blocks_.ClearAllBits();
819  }
820
821  bool HasBackEdgeNotDominatedByHeader() const;
822
823  bool IsPopulated() const {
824    return blocks_.GetHighestBitSet() != -1;
825  }
826
827  bool DominatesAllBackEdges(HBasicBlock* block);
828
829  bool HasExitEdge() const;
830
831 private:
832  // Internal recursive implementation of `Populate`.
833  void PopulateRecursive(HBasicBlock* block);
834  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
835
836  HBasicBlock* header_;
837  HSuspendCheck* suspend_check_;
838  bool irreducible_;
839  bool contains_irreducible_loop_;
840  ArenaVector<HBasicBlock*> back_edges_;
841  ArenaBitVector blocks_;
842
843  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
844};
845
846// Stores try/catch information for basic blocks.
847// Note that HGraph is constructed so that catch blocks cannot simultaneously
848// be try blocks.
849class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
850 public:
851  // Try block information constructor.
852  explicit TryCatchInformation(const HTryBoundary& try_entry)
853      : try_entry_(&try_entry),
854        catch_dex_file_(nullptr),
855        catch_type_index_(DexFile::kDexNoIndex16) {
856    DCHECK(try_entry_ != nullptr);
857  }
858
859  // Catch block information constructor.
860  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
861      : try_entry_(nullptr),
862        catch_dex_file_(&dex_file),
863        catch_type_index_(catch_type_index) {}
864
865  bool IsTryBlock() const { return try_entry_ != nullptr; }
866
867  const HTryBoundary& GetTryEntry() const {
868    DCHECK(IsTryBlock());
869    return *try_entry_;
870  }
871
872  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
873
874  bool IsCatchAllTypeIndex() const {
875    DCHECK(IsCatchBlock());
876    return !catch_type_index_.IsValid();
877  }
878
879  dex::TypeIndex GetCatchTypeIndex() const {
880    DCHECK(IsCatchBlock());
881    return catch_type_index_;
882  }
883
884  const DexFile& GetCatchDexFile() const {
885    DCHECK(IsCatchBlock());
886    return *catch_dex_file_;
887  }
888
889 private:
890  // One of possibly several TryBoundary instructions entering the block's try.
891  // Only set for try blocks.
892  const HTryBoundary* try_entry_;
893
894  // Exception type information. Only set for catch blocks.
895  const DexFile* catch_dex_file_;
896  const dex::TypeIndex catch_type_index_;
897};
898
899static constexpr size_t kNoLifetime = -1;
900static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
901
902// A block in a method. Contains the list of instructions represented
903// as a double linked list. Each block knows its predecessors and
904// successors.
905
906class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
907 public:
908  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
909      : graph_(graph),
910        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
911        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
912        loop_information_(nullptr),
913        dominator_(nullptr),
914        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
915        block_id_(kInvalidBlockId),
916        dex_pc_(dex_pc),
917        lifetime_start_(kNoLifetime),
918        lifetime_end_(kNoLifetime),
919        try_catch_information_(nullptr) {
920    predecessors_.reserve(kDefaultNumberOfPredecessors);
921    successors_.reserve(kDefaultNumberOfSuccessors);
922    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
923  }
924
925  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
926    return predecessors_;
927  }
928
929  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
930    return successors_;
931  }
932
933  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
934  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
935
936  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
937    return ContainsElement(successors_, block, start_from);
938  }
939
940  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
941    return dominated_blocks_;
942  }
943
944  bool IsEntryBlock() const {
945    return graph_->GetEntryBlock() == this;
946  }
947
948  bool IsExitBlock() const {
949    return graph_->GetExitBlock() == this;
950  }
951
952  bool IsSingleGoto() const;
953  bool IsSingleTryBoundary() const;
954
955  // Returns true if this block emits nothing but a jump.
956  bool IsSingleJump() const {
957    HLoopInformation* loop_info = GetLoopInformation();
958    return (IsSingleGoto() || IsSingleTryBoundary())
959           // Back edges generate a suspend check.
960           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
961  }
962
963  void AddBackEdge(HBasicBlock* back_edge) {
964    if (loop_information_ == nullptr) {
965      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
966    }
967    DCHECK_EQ(loop_information_->GetHeader(), this);
968    loop_information_->AddBackEdge(back_edge);
969  }
970
971  HGraph* GetGraph() const { return graph_; }
972  void SetGraph(HGraph* graph) { graph_ = graph; }
973
974  uint32_t GetBlockId() const { return block_id_; }
975  void SetBlockId(int id) { block_id_ = id; }
976  uint32_t GetDexPc() const { return dex_pc_; }
977
978  HBasicBlock* GetDominator() const { return dominator_; }
979  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
980  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
981
982  void RemoveDominatedBlock(HBasicBlock* block) {
983    RemoveElement(dominated_blocks_, block);
984  }
985
986  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
987    ReplaceElement(dominated_blocks_, existing, new_block);
988  }
989
990  void ClearDominanceInformation();
991
992  int NumberOfBackEdges() const {
993    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
994  }
995
996  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
997  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
998  const HInstructionList& GetInstructions() const { return instructions_; }
999  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
1000  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
1001  const HInstructionList& GetPhis() const { return phis_; }
1002
1003  HInstruction* GetFirstInstructionDisregardMoves() const;
1004
1005  void AddSuccessor(HBasicBlock* block) {
1006    successors_.push_back(block);
1007    block->predecessors_.push_back(this);
1008  }
1009
1010  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1011    size_t successor_index = GetSuccessorIndexOf(existing);
1012    existing->RemovePredecessor(this);
1013    new_block->predecessors_.push_back(this);
1014    successors_[successor_index] = new_block;
1015  }
1016
1017  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1018    size_t predecessor_index = GetPredecessorIndexOf(existing);
1019    existing->RemoveSuccessor(this);
1020    new_block->successors_.push_back(this);
1021    predecessors_[predecessor_index] = new_block;
1022  }
1023
1024  // Insert `this` between `predecessor` and `successor. This method
1025  // preserves the indicies, and will update the first edge found between
1026  // `predecessor` and `successor`.
1027  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1028    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1029    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1030    successor->predecessors_[predecessor_index] = this;
1031    predecessor->successors_[successor_index] = this;
1032    successors_.push_back(successor);
1033    predecessors_.push_back(predecessor);
1034  }
1035
1036  void RemovePredecessor(HBasicBlock* block) {
1037    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1038  }
1039
1040  void RemoveSuccessor(HBasicBlock* block) {
1041    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1042  }
1043
1044  void ClearAllPredecessors() {
1045    predecessors_.clear();
1046  }
1047
1048  void AddPredecessor(HBasicBlock* block) {
1049    predecessors_.push_back(block);
1050    block->successors_.push_back(this);
1051  }
1052
1053  void SwapPredecessors() {
1054    DCHECK_EQ(predecessors_.size(), 2u);
1055    std::swap(predecessors_[0], predecessors_[1]);
1056  }
1057
1058  void SwapSuccessors() {
1059    DCHECK_EQ(successors_.size(), 2u);
1060    std::swap(successors_[0], successors_[1]);
1061  }
1062
1063  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1064    return IndexOfElement(predecessors_, predecessor);
1065  }
1066
1067  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1068    return IndexOfElement(successors_, successor);
1069  }
1070
1071  HBasicBlock* GetSinglePredecessor() const {
1072    DCHECK_EQ(GetPredecessors().size(), 1u);
1073    return GetPredecessors()[0];
1074  }
1075
1076  HBasicBlock* GetSingleSuccessor() const {
1077    DCHECK_EQ(GetSuccessors().size(), 1u);
1078    return GetSuccessors()[0];
1079  }
1080
1081  // Returns whether the first occurrence of `predecessor` in the list of
1082  // predecessors is at index `idx`.
1083  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1084    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1085    return GetPredecessorIndexOf(predecessor) == idx;
1086  }
1087
1088  // Create a new block between this block and its predecessors. The new block
1089  // is added to the graph, all predecessor edges are relinked to it and an edge
1090  // is created to `this`. Returns the new empty block. Reverse post order or
1091  // loop and try/catch information are not updated.
1092  HBasicBlock* CreateImmediateDominator();
1093
1094  // Split the block into two blocks just before `cursor`. Returns the newly
1095  // created, latter block. Note that this method will add the block to the
1096  // graph, create a Goto at the end of the former block and will create an edge
1097  // between the blocks. It will not, however, update the reverse post order or
1098  // loop and try/catch information.
1099  HBasicBlock* SplitBefore(HInstruction* cursor);
1100
1101  // Split the block into two blocks just before `cursor`. Returns the newly
1102  // created block. Note that this method just updates raw block information,
1103  // like predecessors, successors, dominators, and instruction list. It does not
1104  // update the graph, reverse post order, loop information, nor make sure the
1105  // blocks are consistent (for example ending with a control flow instruction).
1106  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1107
1108  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1109  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1110
1111  // Merge `other` at the end of `this`. Successors and dominated blocks of
1112  // `other` are changed to be successors and dominated blocks of `this`. Note
1113  // that this method does not update the graph, reverse post order, loop
1114  // information, nor make sure the blocks are consistent (for example ending
1115  // with a control flow instruction).
1116  void MergeWithInlined(HBasicBlock* other);
1117
1118  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1119  // of `this` are moved to `other`.
1120  // Note that this method does not update the graph, reverse post order, loop
1121  // information, nor make sure the blocks are consistent (for example ending
1122  // with a control flow instruction).
1123  void ReplaceWith(HBasicBlock* other);
1124
1125  // Merges the instructions of `other` at the end of `this`.
1126  void MergeInstructionsWith(HBasicBlock* other);
1127
1128  // Merge `other` at the end of `this`. This method updates loops, reverse post
1129  // order, links to predecessors, successors, dominators and deletes the block
1130  // from the graph. The two blocks must be successive, i.e. `this` the only
1131  // predecessor of `other` and vice versa.
1132  void MergeWith(HBasicBlock* other);
1133
1134  // Disconnects `this` from all its predecessors, successors and dominator,
1135  // removes it from all loops it is included in and eventually from the graph.
1136  // The block must not dominate any other block. Predecessors and successors
1137  // are safely updated.
1138  void DisconnectAndDelete();
1139
1140  void AddInstruction(HInstruction* instruction);
1141  // Insert `instruction` before/after an existing instruction `cursor`.
1142  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1143  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1144  // Replace instruction `initial` with `replacement` within this block.
1145  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1146                                       HInstruction* replacement);
1147  void AddPhi(HPhi* phi);
1148  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1149  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1150  // instruction list. With 'ensure_safety' set to true, it verifies that the
1151  // instruction is not in use and removes it from the use lists of its inputs.
1152  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1153  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1154  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1155
1156  bool IsLoopHeader() const {
1157    return IsInLoop() && (loop_information_->GetHeader() == this);
1158  }
1159
1160  bool IsLoopPreHeaderFirstPredecessor() const {
1161    DCHECK(IsLoopHeader());
1162    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1163  }
1164
1165  bool IsFirstPredecessorBackEdge() const {
1166    DCHECK(IsLoopHeader());
1167    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1168  }
1169
1170  HLoopInformation* GetLoopInformation() const {
1171    return loop_information_;
1172  }
1173
1174  // Set the loop_information_ on this block. Overrides the current
1175  // loop_information if it is an outer loop of the passed loop information.
1176  // Note that this method is called while creating the loop information.
1177  void SetInLoop(HLoopInformation* info) {
1178    if (IsLoopHeader()) {
1179      // Nothing to do. This just means `info` is an outer loop.
1180    } else if (!IsInLoop()) {
1181      loop_information_ = info;
1182    } else if (loop_information_->Contains(*info->GetHeader())) {
1183      // Block is currently part of an outer loop. Make it part of this inner loop.
1184      // Note that a non loop header having a loop information means this loop information
1185      // has already been populated
1186      loop_information_ = info;
1187    } else {
1188      // Block is part of an inner loop. Do not update the loop information.
1189      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1190      // at this point, because this method is being called while populating `info`.
1191    }
1192  }
1193
1194  // Raw update of the loop information.
1195  void SetLoopInformation(HLoopInformation* info) {
1196    loop_information_ = info;
1197  }
1198
1199  bool IsInLoop() const { return loop_information_ != nullptr; }
1200
1201  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1202
1203  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1204    try_catch_information_ = try_catch_information;
1205  }
1206
1207  bool IsTryBlock() const {
1208    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1209  }
1210
1211  bool IsCatchBlock() const {
1212    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1213  }
1214
1215  // Returns the try entry that this block's successors should have. They will
1216  // be in the same try, unless the block ends in a try boundary. In that case,
1217  // the appropriate try entry will be returned.
1218  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1219
1220  bool HasThrowingInstructions() const;
1221
1222  // Returns whether this block dominates the blocked passed as parameter.
1223  bool Dominates(HBasicBlock* block) const;
1224
1225  size_t GetLifetimeStart() const { return lifetime_start_; }
1226  size_t GetLifetimeEnd() const { return lifetime_end_; }
1227
1228  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1229  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1230
1231  bool EndsWithControlFlowInstruction() const;
1232  bool EndsWithIf() const;
1233  bool EndsWithTryBoundary() const;
1234  bool HasSinglePhi() const;
1235
1236 private:
1237  HGraph* graph_;
1238  ArenaVector<HBasicBlock*> predecessors_;
1239  ArenaVector<HBasicBlock*> successors_;
1240  HInstructionList instructions_;
1241  HInstructionList phis_;
1242  HLoopInformation* loop_information_;
1243  HBasicBlock* dominator_;
1244  ArenaVector<HBasicBlock*> dominated_blocks_;
1245  uint32_t block_id_;
1246  // The dex program counter of the first instruction of this block.
1247  const uint32_t dex_pc_;
1248  size_t lifetime_start_;
1249  size_t lifetime_end_;
1250  TryCatchInformation* try_catch_information_;
1251
1252  friend class HGraph;
1253  friend class HInstruction;
1254
1255  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1256};
1257
1258// Iterates over the LoopInformation of all loops which contain 'block'
1259// from the innermost to the outermost.
1260class HLoopInformationOutwardIterator : public ValueObject {
1261 public:
1262  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1263      : current_(block.GetLoopInformation()) {}
1264
1265  bool Done() const { return current_ == nullptr; }
1266
1267  void Advance() {
1268    DCHECK(!Done());
1269    current_ = current_->GetPreHeader()->GetLoopInformation();
1270  }
1271
1272  HLoopInformation* Current() const {
1273    DCHECK(!Done());
1274    return current_;
1275  }
1276
1277 private:
1278  HLoopInformation* current_;
1279
1280  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1281};
1282
1283#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1284  M(Above, Condition)                                                   \
1285  M(AboveOrEqual, Condition)                                            \
1286  M(Add, BinaryOperation)                                               \
1287  M(And, BinaryOperation)                                               \
1288  M(ArrayGet, Instruction)                                              \
1289  M(ArrayLength, Instruction)                                           \
1290  M(ArraySet, Instruction)                                              \
1291  M(Below, Condition)                                                   \
1292  M(BelowOrEqual, Condition)                                            \
1293  M(BooleanNot, UnaryOperation)                                         \
1294  M(BoundsCheck, Instruction)                                           \
1295  M(BoundType, Instruction)                                             \
1296  M(CheckCast, Instruction)                                             \
1297  M(ClassTableGet, Instruction)                                         \
1298  M(ClearException, Instruction)                                        \
1299  M(ClinitCheck, Instruction)                                           \
1300  M(Compare, BinaryOperation)                                           \
1301  M(CurrentMethod, Instruction)                                         \
1302  M(ShouldDeoptimizeFlag, Instruction)                                  \
1303  M(Deoptimize, Instruction)                                            \
1304  M(Div, BinaryOperation)                                               \
1305  M(DivZeroCheck, Instruction)                                          \
1306  M(DoubleConstant, Constant)                                           \
1307  M(Equal, Condition)                                                   \
1308  M(Exit, Instruction)                                                  \
1309  M(FloatConstant, Constant)                                            \
1310  M(Goto, Instruction)                                                  \
1311  M(GreaterThan, Condition)                                             \
1312  M(GreaterThanOrEqual, Condition)                                      \
1313  M(If, Instruction)                                                    \
1314  M(InstanceFieldGet, Instruction)                                      \
1315  M(InstanceFieldSet, Instruction)                                      \
1316  M(InstanceOf, Instruction)                                            \
1317  M(IntConstant, Constant)                                              \
1318  M(InvokeUnresolved, Invoke)                                           \
1319  M(InvokeInterface, Invoke)                                            \
1320  M(InvokeStaticOrDirect, Invoke)                                       \
1321  M(InvokeVirtual, Invoke)                                              \
1322  M(InvokePolymorphic, Invoke)                                          \
1323  M(LessThan, Condition)                                                \
1324  M(LessThanOrEqual, Condition)                                         \
1325  M(LoadClass, Instruction)                                             \
1326  M(LoadException, Instruction)                                         \
1327  M(LoadString, Instruction)                                            \
1328  M(LongConstant, Constant)                                             \
1329  M(MemoryBarrier, Instruction)                                         \
1330  M(MonitorOperation, Instruction)                                      \
1331  M(Mul, BinaryOperation)                                               \
1332  M(NativeDebugInfo, Instruction)                                       \
1333  M(Neg, UnaryOperation)                                                \
1334  M(NewArray, Instruction)                                              \
1335  M(NewInstance, Instruction)                                           \
1336  M(Not, UnaryOperation)                                                \
1337  M(NotEqual, Condition)                                                \
1338  M(NullConstant, Instruction)                                          \
1339  M(NullCheck, Instruction)                                             \
1340  M(Or, BinaryOperation)                                                \
1341  M(PackedSwitch, Instruction)                                          \
1342  M(ParallelMove, Instruction)                                          \
1343  M(ParameterValue, Instruction)                                        \
1344  M(Phi, Instruction)                                                   \
1345  M(Rem, BinaryOperation)                                               \
1346  M(Return, Instruction)                                                \
1347  M(ReturnVoid, Instruction)                                            \
1348  M(Ror, BinaryOperation)                                               \
1349  M(Shl, BinaryOperation)                                               \
1350  M(Shr, BinaryOperation)                                               \
1351  M(StaticFieldGet, Instruction)                                        \
1352  M(StaticFieldSet, Instruction)                                        \
1353  M(UnresolvedInstanceFieldGet, Instruction)                            \
1354  M(UnresolvedInstanceFieldSet, Instruction)                            \
1355  M(UnresolvedStaticFieldGet, Instruction)                              \
1356  M(UnresolvedStaticFieldSet, Instruction)                              \
1357  M(Select, Instruction)                                                \
1358  M(Sub, BinaryOperation)                                               \
1359  M(SuspendCheck, Instruction)                                          \
1360  M(Throw, Instruction)                                                 \
1361  M(TryBoundary, Instruction)                                           \
1362  M(TypeConversion, Instruction)                                        \
1363  M(UShr, BinaryOperation)                                              \
1364  M(Xor, BinaryOperation)                                               \
1365  M(VecReplicateScalar, VecUnaryOperation)                              \
1366  M(VecSumReduce, VecUnaryOperation)                                    \
1367  M(VecCnv, VecUnaryOperation)                                          \
1368  M(VecNeg, VecUnaryOperation)                                          \
1369  M(VecAbs, VecUnaryOperation)                                          \
1370  M(VecNot, VecUnaryOperation)                                          \
1371  M(VecAdd, VecBinaryOperation)                                         \
1372  M(VecHalvingAdd, VecBinaryOperation)                                  \
1373  M(VecSub, VecBinaryOperation)                                         \
1374  M(VecMul, VecBinaryOperation)                                         \
1375  M(VecDiv, VecBinaryOperation)                                         \
1376  M(VecMin, VecBinaryOperation)                                         \
1377  M(VecMax, VecBinaryOperation)                                         \
1378  M(VecAnd, VecBinaryOperation)                                         \
1379  M(VecAndNot, VecBinaryOperation)                                      \
1380  M(VecOr, VecBinaryOperation)                                          \
1381  M(VecXor, VecBinaryOperation)                                         \
1382  M(VecShl, VecBinaryOperation)                                         \
1383  M(VecShr, VecBinaryOperation)                                         \
1384  M(VecUShr, VecBinaryOperation)                                        \
1385  M(VecSetScalars, VecOperation)                                        \
1386  M(VecMultiplyAccumulate, VecOperation)                                \
1387  M(VecLoad, VecMemoryOperation)                                        \
1388  M(VecStore, VecMemoryOperation)                                       \
1389
1390/*
1391 * Instructions, shared across several (not all) architectures.
1392 */
1393#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1394#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1395#else
1396#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1397  M(BitwiseNegatedRight, Instruction)                                   \
1398  M(DataProcWithShifterOp, Instruction)                                 \
1399  M(MultiplyAccumulate, Instruction)                                    \
1400  M(IntermediateAddress, Instruction)
1401#endif
1402
1403#ifndef ART_ENABLE_CODEGEN_arm
1404#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1405#else
1406#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1407  M(ArmDexCacheArraysBase, Instruction)
1408#endif
1409
1410#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1411
1412#ifndef ART_ENABLE_CODEGEN_mips
1413#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1414#else
1415#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1416  M(MipsComputeBaseMethodAddress, Instruction)                          \
1417  M(MipsDexCacheArraysBase, Instruction)                                \
1418  M(MipsPackedSwitch, Instruction)
1419#endif
1420
1421#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1422
1423#ifndef ART_ENABLE_CODEGEN_x86
1424#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1425#else
1426#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1427  M(X86ComputeBaseMethodAddress, Instruction)                           \
1428  M(X86LoadFromConstantTable, Instruction)                              \
1429  M(X86FPNeg, Instruction)                                              \
1430  M(X86PackedSwitch, Instruction)
1431#endif
1432
1433#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1434
1435#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1436  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1437  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1438  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1439  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1440  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1441  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1442  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1443  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1444
1445#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1446  M(Condition, BinaryOperation)                                         \
1447  M(Constant, Instruction)                                              \
1448  M(UnaryOperation, Instruction)                                        \
1449  M(BinaryOperation, Instruction)                                       \
1450  M(Invoke, Instruction)                                                \
1451  M(VecOperation, Instruction)                                          \
1452  M(VecUnaryOperation, VecOperation)                                    \
1453  M(VecBinaryOperation, VecOperation)                                   \
1454  M(VecMemoryOperation, VecOperation)
1455
1456#define FOR_EACH_INSTRUCTION(M)                                         \
1457  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1458  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1459
1460#define FORWARD_DECLARATION(type, super) class H##type;
1461FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1462#undef FORWARD_DECLARATION
1463
1464#define DECLARE_INSTRUCTION(type)                                         \
1465  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1466  const char* DebugName() const OVERRIDE { return #type; }                \
1467  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1468    return other->Is##type();                                             \
1469  }                                                                       \
1470  void Accept(HGraphVisitor* visitor) OVERRIDE
1471
1472#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1473  bool Is##type() const { return As##type() != nullptr; }               \
1474  const H##type* As##type() const { return this; }                      \
1475  H##type* As##type() { return this; }
1476
1477template <typename T>
1478class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1479 public:
1480  T GetUser() const { return user_; }
1481  size_t GetIndex() const { return index_; }
1482  void SetIndex(size_t index) { index_ = index; }
1483
1484  // Hook for the IntrusiveForwardList<>.
1485  // TODO: Hide this better.
1486  IntrusiveForwardListHook hook;
1487
1488 private:
1489  HUseListNode(T user, size_t index)
1490      : user_(user), index_(index) {}
1491
1492  T const user_;
1493  size_t index_;
1494
1495  friend class HInstruction;
1496
1497  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1498};
1499
1500template <typename T>
1501using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1502
1503// This class is used by HEnvironment and HInstruction classes to record the
1504// instructions they use and pointers to the corresponding HUseListNodes kept
1505// by the used instructions.
1506template <typename T>
1507class HUserRecord : public ValueObject {
1508 public:
1509  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1510  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1511
1512  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1513      : HUserRecord(old_record.instruction_, before_use_node) {}
1514  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1515      : instruction_(instruction), before_use_node_(before_use_node) {
1516    DCHECK(instruction_ != nullptr);
1517  }
1518
1519  HInstruction* GetInstruction() const { return instruction_; }
1520  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1521  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1522
1523 private:
1524  // Instruction used by the user.
1525  HInstruction* instruction_;
1526
1527  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1528  typename HUseList<T>::iterator before_use_node_;
1529};
1530
1531// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1532// This is used for HInstruction::GetInputs() to return a container wrapper providing
1533// HInstruction* values even though the underlying container has HUserRecord<>s.
1534struct HInputExtractor {
1535  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1536    return record.GetInstruction();
1537  }
1538  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1539    return record.GetInstruction();
1540  }
1541};
1542
1543using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1544using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1545
1546/**
1547 * Side-effects representation.
1548 *
1549 * For write/read dependences on fields/arrays, the dependence analysis uses
1550 * type disambiguation (e.g. a float field write cannot modify the value of an
1551 * integer field read) and the access type (e.g.  a reference array write cannot
1552 * modify the value of a reference field read [although it may modify the
1553 * reference fetch prior to reading the field, which is represented by its own
1554 * write/read dependence]). The analysis makes conservative points-to
1555 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1556 * the same, and any reference read depends on any reference read without
1557 * further regard of its type).
1558 *
1559 * The internal representation uses 38-bit and is described in the table below.
1560 * The first line indicates the side effect, and for field/array accesses the
1561 * second line indicates the type of the access (in the order of the
1562 * Primitive::Type enum).
1563 * The two numbered lines below indicate the bit position in the bitfield (read
1564 * vertically).
1565 *
1566 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1567 *   +-------------+---------+---------+--------------+---------+---------+
1568 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1569 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1570 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1571 *
1572 * Note that, to ease the implementation, 'changes' bits are least significant
1573 * bits, while 'dependency' bits are most significant bits.
1574 */
1575class SideEffects : public ValueObject {
1576 public:
1577  SideEffects() : flags_(0) {}
1578
1579  static SideEffects None() {
1580    return SideEffects(0);
1581  }
1582
1583  static SideEffects All() {
1584    return SideEffects(kAllChangeBits | kAllDependOnBits);
1585  }
1586
1587  static SideEffects AllChanges() {
1588    return SideEffects(kAllChangeBits);
1589  }
1590
1591  static SideEffects AllDependencies() {
1592    return SideEffects(kAllDependOnBits);
1593  }
1594
1595  static SideEffects AllExceptGCDependency() {
1596    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1597  }
1598
1599  static SideEffects AllWritesAndReads() {
1600    return SideEffects(kAllWrites | kAllReads);
1601  }
1602
1603  static SideEffects AllWrites() {
1604    return SideEffects(kAllWrites);
1605  }
1606
1607  static SideEffects AllReads() {
1608    return SideEffects(kAllReads);
1609  }
1610
1611  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1612    return is_volatile
1613        ? AllWritesAndReads()
1614        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1615  }
1616
1617  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1618    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1619  }
1620
1621  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1622    return is_volatile
1623        ? AllWritesAndReads()
1624        : SideEffects(TypeFlag(type, kFieldReadOffset));
1625  }
1626
1627  static SideEffects ArrayReadOfType(Primitive::Type type) {
1628    return SideEffects(TypeFlag(type, kArrayReadOffset));
1629  }
1630
1631  static SideEffects CanTriggerGC() {
1632    return SideEffects(1ULL << kCanTriggerGCBit);
1633  }
1634
1635  static SideEffects DependsOnGC() {
1636    return SideEffects(1ULL << kDependsOnGCBit);
1637  }
1638
1639  // Combines the side-effects of this and the other.
1640  SideEffects Union(SideEffects other) const {
1641    return SideEffects(flags_ | other.flags_);
1642  }
1643
1644  SideEffects Exclusion(SideEffects other) const {
1645    return SideEffects(flags_ & ~other.flags_);
1646  }
1647
1648  void Add(SideEffects other) {
1649    flags_ |= other.flags_;
1650  }
1651
1652  bool Includes(SideEffects other) const {
1653    return (other.flags_ & flags_) == other.flags_;
1654  }
1655
1656  bool HasSideEffects() const {
1657    return (flags_ & kAllChangeBits);
1658  }
1659
1660  bool HasDependencies() const {
1661    return (flags_ & kAllDependOnBits);
1662  }
1663
1664  // Returns true if there are no side effects or dependencies.
1665  bool DoesNothing() const {
1666    return flags_ == 0;
1667  }
1668
1669  // Returns true if something is written.
1670  bool DoesAnyWrite() const {
1671    return (flags_ & kAllWrites);
1672  }
1673
1674  // Returns true if something is read.
1675  bool DoesAnyRead() const {
1676    return (flags_ & kAllReads);
1677  }
1678
1679  // Returns true if potentially everything is written and read
1680  // (every type and every kind of access).
1681  bool DoesAllReadWrite() const {
1682    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1683  }
1684
1685  bool DoesAll() const {
1686    return flags_ == (kAllChangeBits | kAllDependOnBits);
1687  }
1688
1689  // Returns true if `this` may read something written by `other`.
1690  bool MayDependOn(SideEffects other) const {
1691    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1692    return (other.flags_ & depends_on_flags);
1693  }
1694
1695  // Returns string representation of flags (for debugging only).
1696  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1697  std::string ToString() const {
1698    std::string flags = "|";
1699    for (int s = kLastBit; s >= 0; s--) {
1700      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1701      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1702        // This is a bit for the GC side effect.
1703        if (current_bit_is_set) {
1704          flags += "GC";
1705        }
1706        flags += "|";
1707      } else {
1708        // This is a bit for the array/field analysis.
1709        // The underscore character stands for the 'can trigger GC' bit.
1710        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1711        if (current_bit_is_set) {
1712          flags += kDebug[s];
1713        }
1714        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1715            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1716          flags += "|";
1717        }
1718      }
1719    }
1720    return flags;
1721  }
1722
1723  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1724
1725 private:
1726  static constexpr int kFieldArrayAnalysisBits = 9;
1727
1728  static constexpr int kFieldWriteOffset = 0;
1729  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1730  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1731  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1732
1733  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1734
1735  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1736  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1737  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1738  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1739
1740  static constexpr int kLastBit = kDependsOnGCBit;
1741  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1742
1743  // Aliases.
1744
1745  static_assert(kChangeBits == kDependOnBits,
1746                "the 'change' bits should match the 'depend on' bits.");
1747
1748  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1749  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1750  static constexpr uint64_t kAllWrites =
1751      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1752  static constexpr uint64_t kAllReads =
1753      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1754
1755  // Translates type to bit flag.
1756  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1757    CHECK_NE(type, Primitive::kPrimVoid);
1758    const uint64_t one = 1;
1759    const int shift = type;  // 0-based consecutive enum
1760    DCHECK_LE(kFieldWriteOffset, shift);
1761    DCHECK_LT(shift, kArrayWriteOffset);
1762    return one << (type + offset);
1763  }
1764
1765  // Private constructor on direct flags value.
1766  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1767
1768  uint64_t flags_;
1769};
1770
1771// A HEnvironment object contains the values of virtual registers at a given location.
1772class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1773 public:
1774  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena,
1775                             size_t number_of_vregs,
1776                             ArtMethod* method,
1777                             uint32_t dex_pc,
1778                             HInstruction* holder)
1779     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1780       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1781       parent_(nullptr),
1782       method_(method),
1783       dex_pc_(dex_pc),
1784       holder_(holder) {
1785  }
1786
1787  ALWAYS_INLINE HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1788      : HEnvironment(arena,
1789                     to_copy.Size(),
1790                     to_copy.GetMethod(),
1791                     to_copy.GetDexPc(),
1792                     holder) {}
1793
1794  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1795    if (parent_ != nullptr) {
1796      parent_->SetAndCopyParentChain(allocator, parent);
1797    } else {
1798      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1799      parent_->CopyFrom(parent);
1800      if (parent->GetParent() != nullptr) {
1801        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1802      }
1803    }
1804  }
1805
1806  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1807  void CopyFrom(HEnvironment* environment);
1808
1809  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1810  // input to the loop phi instead. This is for inserting instructions that
1811  // require an environment (like HDeoptimization) in the loop pre-header.
1812  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1813
1814  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1815    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1816  }
1817
1818  HInstruction* GetInstructionAt(size_t index) const {
1819    return vregs_[index].GetInstruction();
1820  }
1821
1822  void RemoveAsUserOfInput(size_t index) const;
1823
1824  size_t Size() const { return vregs_.size(); }
1825
1826  HEnvironment* GetParent() const { return parent_; }
1827
1828  void SetLocationAt(size_t index, Location location) {
1829    locations_[index] = location;
1830  }
1831
1832  Location GetLocationAt(size_t index) const {
1833    return locations_[index];
1834  }
1835
1836  uint32_t GetDexPc() const {
1837    return dex_pc_;
1838  }
1839
1840  ArtMethod* GetMethod() const {
1841    return method_;
1842  }
1843
1844  HInstruction* GetHolder() const {
1845    return holder_;
1846  }
1847
1848
1849  bool IsFromInlinedInvoke() const {
1850    return GetParent() != nullptr;
1851  }
1852
1853 private:
1854  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1855  ArenaVector<Location> locations_;
1856  HEnvironment* parent_;
1857  ArtMethod* method_;
1858  const uint32_t dex_pc_;
1859
1860  // The instruction that holds this environment.
1861  HInstruction* const holder_;
1862
1863  friend class HInstruction;
1864
1865  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1866};
1867
1868class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1869 public:
1870  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1871      : previous_(nullptr),
1872        next_(nullptr),
1873        block_(nullptr),
1874        dex_pc_(dex_pc),
1875        id_(-1),
1876        ssa_index_(-1),
1877        packed_fields_(0u),
1878        environment_(nullptr),
1879        locations_(nullptr),
1880        live_interval_(nullptr),
1881        lifetime_position_(kNoLifetime),
1882        side_effects_(side_effects),
1883        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1884    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1885  }
1886
1887  virtual ~HInstruction() {}
1888
1889#define DECLARE_KIND(type, super) k##type,
1890  enum InstructionKind {
1891    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1892  };
1893#undef DECLARE_KIND
1894
1895  HInstruction* GetNext() const { return next_; }
1896  HInstruction* GetPrevious() const { return previous_; }
1897
1898  HInstruction* GetNextDisregardingMoves() const;
1899  HInstruction* GetPreviousDisregardingMoves() const;
1900
1901  HBasicBlock* GetBlock() const { return block_; }
1902  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1903  void SetBlock(HBasicBlock* block) { block_ = block; }
1904  bool IsInBlock() const { return block_ != nullptr; }
1905  bool IsInLoop() const { return block_->IsInLoop(); }
1906  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1907  bool IsIrreducibleLoopHeaderPhi() const {
1908    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1909  }
1910
1911  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1912
1913  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1914    // One virtual method is enough, just const_cast<> and then re-add the const.
1915    return ArrayRef<const HUserRecord<HInstruction*>>(
1916        const_cast<HInstruction*>(this)->GetInputRecords());
1917  }
1918
1919  HInputsRef GetInputs() {
1920    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1921  }
1922
1923  HConstInputsRef GetInputs() const {
1924    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1925  }
1926
1927  size_t InputCount() const { return GetInputRecords().size(); }
1928  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1929
1930  bool HasInput(HInstruction* input) const {
1931    for (const HInstruction* i : GetInputs()) {
1932      if (i == input) {
1933        return true;
1934      }
1935    }
1936    return false;
1937  }
1938
1939  void SetRawInputAt(size_t index, HInstruction* input) {
1940    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1941  }
1942
1943  virtual void Accept(HGraphVisitor* visitor) = 0;
1944  virtual const char* DebugName() const = 0;
1945
1946  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1947
1948  virtual bool NeedsEnvironment() const { return false; }
1949
1950  uint32_t GetDexPc() const { return dex_pc_; }
1951
1952  virtual bool IsControlFlow() const { return false; }
1953
1954  // Can the instruction throw?
1955  // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
1956  // could throw OOME, but it is still OK to remove them if they are unused.
1957  virtual bool CanThrow() const { return false; }
1958  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1959
1960  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1961  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1962
1963  // Does not apply for all instructions, but having this at top level greatly
1964  // simplifies the null check elimination.
1965  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1966  virtual bool CanBeNull() const {
1967    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1968    return true;
1969  }
1970
1971  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1972    return false;
1973  }
1974
1975  virtual bool IsActualObject() const {
1976    return GetType() == Primitive::kPrimNot;
1977  }
1978
1979  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1980
1981  ReferenceTypeInfo GetReferenceTypeInfo() const {
1982    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1983    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1984                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1985  }
1986
1987  void AddUseAt(HInstruction* user, size_t index) {
1988    DCHECK(user != nullptr);
1989    // Note: fixup_end remains valid across push_front().
1990    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1991    HUseListNode<HInstruction*>* new_node =
1992        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1993    uses_.push_front(*new_node);
1994    FixUpUserRecordsAfterUseInsertion(fixup_end);
1995  }
1996
1997  void AddEnvUseAt(HEnvironment* user, size_t index) {
1998    DCHECK(user != nullptr);
1999    // Note: env_fixup_end remains valid across push_front().
2000    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2001    HUseListNode<HEnvironment*>* new_node =
2002        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
2003    env_uses_.push_front(*new_node);
2004    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2005  }
2006
2007  void RemoveAsUserOfInput(size_t input) {
2008    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2009    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2010    input_use.GetInstruction()->uses_.erase_after(before_use_node);
2011    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2012  }
2013
2014  void RemoveAsUserOfAllInputs() {
2015    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2016      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2017      input_use.GetInstruction()->uses_.erase_after(before_use_node);
2018      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2019    }
2020  }
2021
2022  const HUseList<HInstruction*>& GetUses() const { return uses_; }
2023  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2024
2025  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
2026  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
2027  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
2028  bool HasOnlyOneNonEnvironmentUse() const {
2029    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2030  }
2031
2032  bool IsRemovable() const {
2033    return
2034        !DoesAnyWrite() &&
2035        !CanThrow() &&
2036        !IsSuspendCheck() &&
2037        !IsControlFlow() &&
2038        !IsNativeDebugInfo() &&
2039        !IsParameterValue() &&
2040        // If we added an explicit barrier then we should keep it.
2041        !IsMemoryBarrier();
2042  }
2043
2044  bool IsDeadAndRemovable() const {
2045    return IsRemovable() && !HasUses();
2046  }
2047
2048  // Does this instruction strictly dominate `other_instruction`?
2049  // Returns false if this instruction and `other_instruction` are the same.
2050  // Aborts if this instruction and `other_instruction` are both phis.
2051  bool StrictlyDominates(HInstruction* other_instruction) const;
2052
2053  int GetId() const { return id_; }
2054  void SetId(int id) { id_ = id; }
2055
2056  int GetSsaIndex() const { return ssa_index_; }
2057  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2058  bool HasSsaIndex() const { return ssa_index_ != -1; }
2059
2060  bool HasEnvironment() const { return environment_ != nullptr; }
2061  HEnvironment* GetEnvironment() const { return environment_; }
2062  // Set the `environment_` field. Raw because this method does not
2063  // update the uses lists.
2064  void SetRawEnvironment(HEnvironment* environment) {
2065    DCHECK(environment_ == nullptr);
2066    DCHECK_EQ(environment->GetHolder(), this);
2067    environment_ = environment;
2068  }
2069
2070  void InsertRawEnvironment(HEnvironment* environment) {
2071    DCHECK(environment_ != nullptr);
2072    DCHECK_EQ(environment->GetHolder(), this);
2073    DCHECK(environment->GetParent() == nullptr);
2074    environment->parent_ = environment_;
2075    environment_ = environment;
2076  }
2077
2078  void RemoveEnvironment();
2079
2080  // Set the environment of this instruction, copying it from `environment`. While
2081  // copying, the uses lists are being updated.
2082  void CopyEnvironmentFrom(HEnvironment* environment) {
2083    DCHECK(environment_ == nullptr);
2084    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2085    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2086    environment_->CopyFrom(environment);
2087    if (environment->GetParent() != nullptr) {
2088      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2089    }
2090  }
2091
2092  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2093                                                HBasicBlock* block) {
2094    DCHECK(environment_ == nullptr);
2095    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2096    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2097    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2098    if (environment->GetParent() != nullptr) {
2099      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2100    }
2101  }
2102
2103  // Returns the number of entries in the environment. Typically, that is the
2104  // number of dex registers in a method. It could be more in case of inlining.
2105  size_t EnvironmentSize() const;
2106
2107  LocationSummary* GetLocations() const { return locations_; }
2108  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2109
2110  void ReplaceWith(HInstruction* instruction);
2111  void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2112  void ReplaceInput(HInstruction* replacement, size_t index);
2113
2114  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2115  // uses of this instruction by `other` are *not* updated.
2116  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2117    ReplaceWith(other);
2118    other->ReplaceInput(this, use_index);
2119  }
2120
2121  // Move `this` instruction before `cursor`
2122  void MoveBefore(HInstruction* cursor, bool do_checks = true);
2123
2124  // Move `this` before its first user and out of any loops. If there is no
2125  // out-of-loop user that dominates all other users, move the instruction
2126  // to the end of the out-of-loop common dominator of the user's blocks.
2127  //
2128  // This can be used only on non-throwing instructions with no side effects that
2129  // have at least one use but no environment uses.
2130  void MoveBeforeFirstUserAndOutOfLoops();
2131
2132#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2133  bool Is##type() const;                                                       \
2134  const H##type* As##type() const;                                             \
2135  H##type* As##type();
2136
2137  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2138#undef INSTRUCTION_TYPE_CHECK
2139
2140#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2141  bool Is##type() const { return (As##type() != nullptr); }                    \
2142  virtual const H##type* As##type() const { return nullptr; }                  \
2143  virtual H##type* As##type() { return nullptr; }
2144  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2145#undef INSTRUCTION_TYPE_CHECK
2146
2147  // Returns whether the instruction can be moved within the graph.
2148  // TODO: this method is used by LICM and GVN with possibly different
2149  //       meanings? split and rename?
2150  virtual bool CanBeMoved() const { return false; }
2151
2152  // Returns whether the two instructions are of the same kind.
2153  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2154    return false;
2155  }
2156
2157  // Returns whether any data encoded in the two instructions is equal.
2158  // This method does not look at the inputs. Both instructions must be
2159  // of the same type, otherwise the method has undefined behavior.
2160  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2161    return false;
2162  }
2163
2164  // Returns whether two instructions are equal, that is:
2165  // 1) They have the same type and contain the same data (InstructionDataEquals).
2166  // 2) Their inputs are identical.
2167  bool Equals(const HInstruction* other) const;
2168
2169  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2170  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2171  // the virtual function because the __attribute__((__pure__)) doesn't really
2172  // apply the strong requirement for virtual functions, preventing optimizations.
2173  InstructionKind GetKind() const PURE;
2174  virtual InstructionKind GetKindInternal() const = 0;
2175
2176  virtual size_t ComputeHashCode() const {
2177    size_t result = GetKind();
2178    for (const HInstruction* input : GetInputs()) {
2179      result = (result * 31) + input->GetId();
2180    }
2181    return result;
2182  }
2183
2184  SideEffects GetSideEffects() const { return side_effects_; }
2185  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2186  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2187
2188  size_t GetLifetimePosition() const { return lifetime_position_; }
2189  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2190  LiveInterval* GetLiveInterval() const { return live_interval_; }
2191  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2192  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2193
2194  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2195
2196  // Returns whether the code generation of the instruction will require to have access
2197  // to the current method. Such instructions are:
2198  // (1): Instructions that require an environment, as calling the runtime requires
2199  //      to walk the stack and have the current method stored at a specific stack address.
2200  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2201  //      to access the dex cache.
2202  bool NeedsCurrentMethod() const {
2203    return NeedsEnvironment() || IsCurrentMethod();
2204  }
2205
2206  // Returns whether the code generation of the instruction will require to have access
2207  // to the dex cache of the current method's declaring class via the current method.
2208  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2209
2210  // Does this instruction have any use in an environment before
2211  // control flow hits 'other'?
2212  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2213
2214  // Remove all references to environment uses of this instruction.
2215  // The caller must ensure that this is safe to do.
2216  void RemoveEnvironmentUsers();
2217
2218  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2219  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2220
2221 protected:
2222  // If set, the machine code for this instruction is assumed to be generated by
2223  // its users. Used by liveness analysis to compute use positions accordingly.
2224  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2225  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2226  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2227  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2228
2229  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2230    return GetInputRecords()[i];
2231  }
2232
2233  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2234    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2235    input_records[index] = input;
2236  }
2237
2238  uint32_t GetPackedFields() const {
2239    return packed_fields_;
2240  }
2241
2242  template <size_t flag>
2243  bool GetPackedFlag() const {
2244    return (packed_fields_ & (1u << flag)) != 0u;
2245  }
2246
2247  template <size_t flag>
2248  void SetPackedFlag(bool value = true) {
2249    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2250  }
2251
2252  template <typename BitFieldType>
2253  typename BitFieldType::value_type GetPackedField() const {
2254    return BitFieldType::Decode(packed_fields_);
2255  }
2256
2257  template <typename BitFieldType>
2258  void SetPackedField(typename BitFieldType::value_type value) {
2259    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2260    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2261  }
2262
2263 private:
2264  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2265    auto before_use_node = uses_.before_begin();
2266    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2267      HInstruction* user = use_node->GetUser();
2268      size_t input_index = use_node->GetIndex();
2269      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2270      before_use_node = use_node;
2271    }
2272  }
2273
2274  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2275    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2276    if (next != uses_.end()) {
2277      HInstruction* next_user = next->GetUser();
2278      size_t next_index = next->GetIndex();
2279      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2280      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2281    }
2282  }
2283
2284  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2285    auto before_env_use_node = env_uses_.before_begin();
2286    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2287      HEnvironment* user = env_use_node->GetUser();
2288      size_t input_index = env_use_node->GetIndex();
2289      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2290      before_env_use_node = env_use_node;
2291    }
2292  }
2293
2294  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2295    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2296    if (next != env_uses_.end()) {
2297      HEnvironment* next_user = next->GetUser();
2298      size_t next_index = next->GetIndex();
2299      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2300      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2301    }
2302  }
2303
2304  HInstruction* previous_;
2305  HInstruction* next_;
2306  HBasicBlock* block_;
2307  const uint32_t dex_pc_;
2308
2309  // An instruction gets an id when it is added to the graph.
2310  // It reflects creation order. A negative id means the instruction
2311  // has not been added to the graph.
2312  int id_;
2313
2314  // When doing liveness analysis, instructions that have uses get an SSA index.
2315  int ssa_index_;
2316
2317  // Packed fields.
2318  uint32_t packed_fields_;
2319
2320  // List of instructions that have this instruction as input.
2321  HUseList<HInstruction*> uses_;
2322
2323  // List of environments that contain this instruction.
2324  HUseList<HEnvironment*> env_uses_;
2325
2326  // The environment associated with this instruction. Not null if the instruction
2327  // might jump out of the method.
2328  HEnvironment* environment_;
2329
2330  // Set by the code generator.
2331  LocationSummary* locations_;
2332
2333  // Set by the liveness analysis.
2334  LiveInterval* live_interval_;
2335
2336  // Set by the liveness analysis, this is the position in a linear
2337  // order of blocks where this instruction's live interval start.
2338  size_t lifetime_position_;
2339
2340  SideEffects side_effects_;
2341
2342  // The reference handle part of the reference type info.
2343  // The IsExact() flag is stored in packed fields.
2344  // TODO: for primitive types this should be marked as invalid.
2345  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2346
2347  friend class GraphChecker;
2348  friend class HBasicBlock;
2349  friend class HEnvironment;
2350  friend class HGraph;
2351  friend class HInstructionList;
2352
2353  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2354};
2355std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2356
2357// Iterates over the instructions, while preserving the next instruction
2358// in case the current instruction gets removed from the list by the user
2359// of this iterator.
2360class HInstructionIterator : public ValueObject {
2361 public:
2362  explicit HInstructionIterator(const HInstructionList& instructions)
2363      : instruction_(instructions.first_instruction_) {
2364    next_ = Done() ? nullptr : instruction_->GetNext();
2365  }
2366
2367  bool Done() const { return instruction_ == nullptr; }
2368  HInstruction* Current() const { return instruction_; }
2369  void Advance() {
2370    instruction_ = next_;
2371    next_ = Done() ? nullptr : instruction_->GetNext();
2372  }
2373
2374 private:
2375  HInstruction* instruction_;
2376  HInstruction* next_;
2377
2378  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2379};
2380
2381// Iterates over the instructions without saving the next instruction,
2382// therefore handling changes in the graph potentially made by the user
2383// of this iterator.
2384class HInstructionIteratorHandleChanges : public ValueObject {
2385 public:
2386  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2387      : instruction_(instructions.first_instruction_) {
2388  }
2389
2390  bool Done() const { return instruction_ == nullptr; }
2391  HInstruction* Current() const { return instruction_; }
2392  void Advance() {
2393    instruction_ = instruction_->GetNext();
2394  }
2395
2396 private:
2397  HInstruction* instruction_;
2398
2399  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2400};
2401
2402
2403class HBackwardInstructionIterator : public ValueObject {
2404 public:
2405  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2406      : instruction_(instructions.last_instruction_) {
2407    next_ = Done() ? nullptr : instruction_->GetPrevious();
2408  }
2409
2410  bool Done() const { return instruction_ == nullptr; }
2411  HInstruction* Current() const { return instruction_; }
2412  void Advance() {
2413    instruction_ = next_;
2414    next_ = Done() ? nullptr : instruction_->GetPrevious();
2415  }
2416
2417 private:
2418  HInstruction* instruction_;
2419  HInstruction* next_;
2420
2421  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2422};
2423
2424class HVariableInputSizeInstruction : public HInstruction {
2425 public:
2426  using HInstruction::GetInputRecords;  // Keep the const version visible.
2427  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2428    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2429  }
2430
2431  void AddInput(HInstruction* input);
2432  void InsertInputAt(size_t index, HInstruction* input);
2433  void RemoveInputAt(size_t index);
2434
2435 protected:
2436  HVariableInputSizeInstruction(SideEffects side_effects,
2437                                uint32_t dex_pc,
2438                                ArenaAllocator* arena,
2439                                size_t number_of_inputs,
2440                                ArenaAllocKind kind)
2441      : HInstruction(side_effects, dex_pc),
2442        inputs_(number_of_inputs, arena->Adapter(kind)) {}
2443
2444  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2445
2446 private:
2447  DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
2448};
2449
2450template<size_t N>
2451class HTemplateInstruction: public HInstruction {
2452 public:
2453  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2454      : HInstruction(side_effects, dex_pc), inputs_() {}
2455  virtual ~HTemplateInstruction() {}
2456
2457  using HInstruction::GetInputRecords;  // Keep the const version visible.
2458  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2459    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2460  }
2461
2462 private:
2463  std::array<HUserRecord<HInstruction*>, N> inputs_;
2464
2465  friend class SsaBuilder;
2466};
2467
2468// HTemplateInstruction specialization for N=0.
2469template<>
2470class HTemplateInstruction<0>: public HInstruction {
2471 public:
2472  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2473      : HInstruction(side_effects, dex_pc) {}
2474
2475  virtual ~HTemplateInstruction() {}
2476
2477  using HInstruction::GetInputRecords;  // Keep the const version visible.
2478  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2479    return ArrayRef<HUserRecord<HInstruction*>>();
2480  }
2481
2482 private:
2483  friend class SsaBuilder;
2484};
2485
2486template<intptr_t N>
2487class HExpression : public HTemplateInstruction<N> {
2488 public:
2489  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2490      : HTemplateInstruction<N>(side_effects, dex_pc) {
2491    this->template SetPackedField<TypeField>(type);
2492  }
2493  virtual ~HExpression() {}
2494
2495  Primitive::Type GetType() const OVERRIDE {
2496    return TypeField::Decode(this->GetPackedFields());
2497  }
2498
2499 protected:
2500  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2501  static constexpr size_t kFieldTypeSize =
2502      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2503  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2504  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2505                "Too many packed fields.");
2506  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2507};
2508
2509// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2510// instruction that branches to the exit block.
2511class HReturnVoid FINAL : public HTemplateInstruction<0> {
2512 public:
2513  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2514      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2515
2516  bool IsControlFlow() const OVERRIDE { return true; }
2517
2518  DECLARE_INSTRUCTION(ReturnVoid);
2519
2520 private:
2521  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2522};
2523
2524// Represents dex's RETURN opcodes. A HReturn is a control flow
2525// instruction that branches to the exit block.
2526class HReturn FINAL : public HTemplateInstruction<1> {
2527 public:
2528  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2529      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2530    SetRawInputAt(0, value);
2531  }
2532
2533  bool IsControlFlow() const OVERRIDE { return true; }
2534
2535  DECLARE_INSTRUCTION(Return);
2536
2537 private:
2538  DISALLOW_COPY_AND_ASSIGN(HReturn);
2539};
2540
2541class HPhi FINAL : public HVariableInputSizeInstruction {
2542 public:
2543  HPhi(ArenaAllocator* arena,
2544       uint32_t reg_number,
2545       size_t number_of_inputs,
2546       Primitive::Type type,
2547       uint32_t dex_pc = kNoDexPc)
2548      : HVariableInputSizeInstruction(
2549            SideEffects::None(),
2550            dex_pc,
2551            arena,
2552            number_of_inputs,
2553            kArenaAllocPhiInputs),
2554        reg_number_(reg_number) {
2555    SetPackedField<TypeField>(ToPhiType(type));
2556    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2557    // Phis are constructed live and marked dead if conflicting or unused.
2558    // Individual steps of SsaBuilder should assume that if a phi has been
2559    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2560    SetPackedFlag<kFlagIsLive>(true);
2561    SetPackedFlag<kFlagCanBeNull>(true);
2562  }
2563
2564  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2565  static Primitive::Type ToPhiType(Primitive::Type type) {
2566    return Primitive::PrimitiveKind(type);
2567  }
2568
2569  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2570
2571  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2572  void SetType(Primitive::Type new_type) {
2573    // Make sure that only valid type changes occur. The following are allowed:
2574    //  (1) int  -> float/ref (primitive type propagation),
2575    //  (2) long -> double (primitive type propagation).
2576    DCHECK(GetType() == new_type ||
2577           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2578           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2579           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2580    SetPackedField<TypeField>(new_type);
2581  }
2582
2583  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2584  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2585
2586  uint32_t GetRegNumber() const { return reg_number_; }
2587
2588  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2589  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2590  bool IsDead() const { return !IsLive(); }
2591  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2592
2593  bool IsVRegEquivalentOf(const HInstruction* other) const {
2594    return other != nullptr
2595        && other->IsPhi()
2596        && other->AsPhi()->GetBlock() == GetBlock()
2597        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2598  }
2599
2600  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2601  // An equivalent phi is a phi having the same dex register and type.
2602  // It assumes that phis with the same dex register are adjacent.
2603  HPhi* GetNextEquivalentPhiWithSameType() {
2604    HInstruction* next = GetNext();
2605    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2606      if (next->GetType() == GetType()) {
2607        return next->AsPhi();
2608      }
2609      next = next->GetNext();
2610    }
2611    return nullptr;
2612  }
2613
2614  DECLARE_INSTRUCTION(Phi);
2615
2616 private:
2617  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2618  static constexpr size_t kFieldTypeSize =
2619      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2620  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2621  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2622  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2623  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2624  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2625
2626  const uint32_t reg_number_;
2627
2628  DISALLOW_COPY_AND_ASSIGN(HPhi);
2629};
2630
2631// The exit instruction is the only instruction of the exit block.
2632// Instructions aborting the method (HThrow and HReturn) must branch to the
2633// exit block.
2634class HExit FINAL : public HTemplateInstruction<0> {
2635 public:
2636  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2637
2638  bool IsControlFlow() const OVERRIDE { return true; }
2639
2640  DECLARE_INSTRUCTION(Exit);
2641
2642 private:
2643  DISALLOW_COPY_AND_ASSIGN(HExit);
2644};
2645
2646// Jumps from one block to another.
2647class HGoto FINAL : public HTemplateInstruction<0> {
2648 public:
2649  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2650
2651  bool IsControlFlow() const OVERRIDE { return true; }
2652
2653  HBasicBlock* GetSuccessor() const {
2654    return GetBlock()->GetSingleSuccessor();
2655  }
2656
2657  DECLARE_INSTRUCTION(Goto);
2658
2659 private:
2660  DISALLOW_COPY_AND_ASSIGN(HGoto);
2661};
2662
2663class HConstant : public HExpression<0> {
2664 public:
2665  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2666      : HExpression(type, SideEffects::None(), dex_pc) {}
2667
2668  bool CanBeMoved() const OVERRIDE { return true; }
2669
2670  // Is this constant -1 in the arithmetic sense?
2671  virtual bool IsMinusOne() const { return false; }
2672  // Is this constant 0 in the arithmetic sense?
2673  virtual bool IsArithmeticZero() const { return false; }
2674  // Is this constant a 0-bit pattern?
2675  virtual bool IsZeroBitPattern() const { return false; }
2676  // Is this constant 1 in the arithmetic sense?
2677  virtual bool IsOne() const { return false; }
2678
2679  virtual uint64_t GetValueAsUint64() const = 0;
2680
2681  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2682
2683 private:
2684  DISALLOW_COPY_AND_ASSIGN(HConstant);
2685};
2686
2687class HNullConstant FINAL : public HConstant {
2688 public:
2689  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2690    return true;
2691  }
2692
2693  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2694
2695  size_t ComputeHashCode() const OVERRIDE { return 0; }
2696
2697  // The null constant representation is a 0-bit pattern.
2698  virtual bool IsZeroBitPattern() const { return true; }
2699
2700  DECLARE_INSTRUCTION(NullConstant);
2701
2702 private:
2703  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2704
2705  friend class HGraph;
2706  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2707};
2708
2709// Constants of the type int. Those can be from Dex instructions, or
2710// synthesized (for example with the if-eqz instruction).
2711class HIntConstant FINAL : public HConstant {
2712 public:
2713  int32_t GetValue() const { return value_; }
2714
2715  uint64_t GetValueAsUint64() const OVERRIDE {
2716    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2717  }
2718
2719  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2720    DCHECK(other->IsIntConstant()) << other->DebugName();
2721    return other->AsIntConstant()->value_ == value_;
2722  }
2723
2724  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2725
2726  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2727  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2728  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2729  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2730
2731  // Integer constants are used to encode Boolean values as well,
2732  // where 1 means true and 0 means false.
2733  bool IsTrue() const { return GetValue() == 1; }
2734  bool IsFalse() const { return GetValue() == 0; }
2735
2736  DECLARE_INSTRUCTION(IntConstant);
2737
2738 private:
2739  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2740      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2741  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2742      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2743
2744  const int32_t value_;
2745
2746  friend class HGraph;
2747  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2748  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2749  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2750};
2751
2752class HLongConstant FINAL : public HConstant {
2753 public:
2754  int64_t GetValue() const { return value_; }
2755
2756  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2757
2758  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2759    DCHECK(other->IsLongConstant()) << other->DebugName();
2760    return other->AsLongConstant()->value_ == value_;
2761  }
2762
2763  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2764
2765  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2766  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2767  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2768  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2769
2770  DECLARE_INSTRUCTION(LongConstant);
2771
2772 private:
2773  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2774      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2775
2776  const int64_t value_;
2777
2778  friend class HGraph;
2779  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2780};
2781
2782class HFloatConstant FINAL : public HConstant {
2783 public:
2784  float GetValue() const { return value_; }
2785
2786  uint64_t GetValueAsUint64() const OVERRIDE {
2787    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2788  }
2789
2790  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2791    DCHECK(other->IsFloatConstant()) << other->DebugName();
2792    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2793  }
2794
2795  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2796
2797  bool IsMinusOne() const OVERRIDE {
2798    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2799  }
2800  bool IsArithmeticZero() const OVERRIDE {
2801    return std::fpclassify(value_) == FP_ZERO;
2802  }
2803  bool IsArithmeticPositiveZero() const {
2804    return IsArithmeticZero() && !std::signbit(value_);
2805  }
2806  bool IsArithmeticNegativeZero() const {
2807    return IsArithmeticZero() && std::signbit(value_);
2808  }
2809  bool IsZeroBitPattern() const OVERRIDE {
2810    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2811  }
2812  bool IsOne() const OVERRIDE {
2813    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2814  }
2815  bool IsNaN() const {
2816    return std::isnan(value_);
2817  }
2818
2819  DECLARE_INSTRUCTION(FloatConstant);
2820
2821 private:
2822  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2823      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2824  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2825      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2826
2827  const float value_;
2828
2829  // Only the SsaBuilder and HGraph can create floating-point constants.
2830  friend class SsaBuilder;
2831  friend class HGraph;
2832  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2833};
2834
2835class HDoubleConstant FINAL : public HConstant {
2836 public:
2837  double GetValue() const { return value_; }
2838
2839  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2840
2841  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2842    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2843    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2844  }
2845
2846  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2847
2848  bool IsMinusOne() const OVERRIDE {
2849    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2850  }
2851  bool IsArithmeticZero() const OVERRIDE {
2852    return std::fpclassify(value_) == FP_ZERO;
2853  }
2854  bool IsArithmeticPositiveZero() const {
2855    return IsArithmeticZero() && !std::signbit(value_);
2856  }
2857  bool IsArithmeticNegativeZero() const {
2858    return IsArithmeticZero() && std::signbit(value_);
2859  }
2860  bool IsZeroBitPattern() const OVERRIDE {
2861    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2862  }
2863  bool IsOne() const OVERRIDE {
2864    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2865  }
2866  bool IsNaN() const {
2867    return std::isnan(value_);
2868  }
2869
2870  DECLARE_INSTRUCTION(DoubleConstant);
2871
2872 private:
2873  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2874      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2875  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2876      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2877
2878  const double value_;
2879
2880  // Only the SsaBuilder and HGraph can create floating-point constants.
2881  friend class SsaBuilder;
2882  friend class HGraph;
2883  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2884};
2885
2886// Conditional branch. A block ending with an HIf instruction must have
2887// two successors.
2888class HIf FINAL : public HTemplateInstruction<1> {
2889 public:
2890  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2891      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2892    SetRawInputAt(0, input);
2893  }
2894
2895  bool IsControlFlow() const OVERRIDE { return true; }
2896
2897  HBasicBlock* IfTrueSuccessor() const {
2898    return GetBlock()->GetSuccessors()[0];
2899  }
2900
2901  HBasicBlock* IfFalseSuccessor() const {
2902    return GetBlock()->GetSuccessors()[1];
2903  }
2904
2905  DECLARE_INSTRUCTION(If);
2906
2907 private:
2908  DISALLOW_COPY_AND_ASSIGN(HIf);
2909};
2910
2911
2912// Abstract instruction which marks the beginning and/or end of a try block and
2913// links it to the respective exception handlers. Behaves the same as a Goto in
2914// non-exceptional control flow.
2915// Normal-flow successor is stored at index zero, exception handlers under
2916// higher indices in no particular order.
2917class HTryBoundary FINAL : public HTemplateInstruction<0> {
2918 public:
2919  enum class BoundaryKind {
2920    kEntry,
2921    kExit,
2922    kLast = kExit
2923  };
2924
2925  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2926      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2927    SetPackedField<BoundaryKindField>(kind);
2928  }
2929
2930  bool IsControlFlow() const OVERRIDE { return true; }
2931
2932  // Returns the block's non-exceptional successor (index zero).
2933  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2934
2935  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2936    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2937  }
2938
2939  // Returns whether `handler` is among its exception handlers (non-zero index
2940  // successors).
2941  bool HasExceptionHandler(const HBasicBlock& handler) const {
2942    DCHECK(handler.IsCatchBlock());
2943    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2944  }
2945
2946  // If not present already, adds `handler` to its block's list of exception
2947  // handlers.
2948  void AddExceptionHandler(HBasicBlock* handler) {
2949    if (!HasExceptionHandler(*handler)) {
2950      GetBlock()->AddSuccessor(handler);
2951    }
2952  }
2953
2954  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2955  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2956
2957  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2958
2959  DECLARE_INSTRUCTION(TryBoundary);
2960
2961 private:
2962  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2963  static constexpr size_t kFieldBoundaryKindSize =
2964      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2965  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2966      kFieldBoundaryKind + kFieldBoundaryKindSize;
2967  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2968                "Too many packed fields.");
2969  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2970
2971  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2972};
2973
2974// Deoptimize to interpreter, upon checking a condition.
2975class HDeoptimize FINAL : public HVariableInputSizeInstruction {
2976 public:
2977  // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
2978  // across.
2979  HDeoptimize(ArenaAllocator* arena, HInstruction* cond, DeoptimizationKind kind, uint32_t dex_pc)
2980      : HVariableInputSizeInstruction(
2981            SideEffects::All(),
2982            dex_pc,
2983            arena,
2984            /* number_of_inputs */ 1,
2985            kArenaAllocMisc) {
2986    SetPackedFlag<kFieldCanBeMoved>(false);
2987    SetPackedField<DeoptimizeKindField>(kind);
2988    SetRawInputAt(0, cond);
2989  }
2990
2991  // Use this constructor when the `HDeoptimize` guards an instruction, and any user
2992  // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
2993  // instead of `guard`.
2994  // We set CanTriggerGC to prevent any intermediate address to be live
2995  // at the point of the `HDeoptimize`.
2996  HDeoptimize(ArenaAllocator* arena,
2997              HInstruction* cond,
2998              HInstruction* guard,
2999              DeoptimizationKind kind,
3000              uint32_t dex_pc)
3001      : HVariableInputSizeInstruction(
3002            SideEffects::CanTriggerGC(),
3003            dex_pc,
3004            arena,
3005            /* number_of_inputs */ 2,
3006            kArenaAllocMisc) {
3007    SetPackedFlag<kFieldCanBeMoved>(true);
3008    SetPackedField<DeoptimizeKindField>(kind);
3009    SetRawInputAt(0, cond);
3010    SetRawInputAt(1, guard);
3011  }
3012
3013  bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); }
3014
3015  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3016    return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3017  }
3018
3019  bool NeedsEnvironment() const OVERRIDE { return true; }
3020
3021  bool CanThrow() const OVERRIDE { return true; }
3022
3023  DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3024
3025  Primitive::Type GetType() const OVERRIDE {
3026    return GuardsAnInput() ? GuardedInput()->GetType() : Primitive::kPrimVoid;
3027  }
3028
3029  bool GuardsAnInput() const {
3030    return InputCount() == 2;
3031  }
3032
3033  HInstruction* GuardedInput() const {
3034    DCHECK(GuardsAnInput());
3035    return InputAt(1);
3036  }
3037
3038  void RemoveGuard() {
3039    RemoveInputAt(1);
3040  }
3041
3042  DECLARE_INSTRUCTION(Deoptimize);
3043
3044 private:
3045  static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3046  static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3047  static constexpr size_t kFieldDeoptimizeKindSize =
3048      MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3049  static constexpr size_t kNumberOfDeoptimizePackedBits =
3050      kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3051  static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3052                "Too many packed fields.");
3053  using DeoptimizeKindField =
3054      BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3055
3056  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
3057};
3058
3059// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3060// The compiled code checks this flag value in a guard before devirtualized call and
3061// if it's true, starts to do deoptimization.
3062// It has a 4-byte slot on stack.
3063// TODO: allocate a register for this flag.
3064class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
3065 public:
3066  // CHA guards are only optimized in a separate pass and it has no side effects
3067  // with regard to other passes.
3068  HShouldDeoptimizeFlag(ArenaAllocator* arena, uint32_t dex_pc)
3069      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, arena, 0, kArenaAllocCHA) {
3070  }
3071
3072  Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimInt; }
3073
3074  // We do all CHA guard elimination/motion in a single pass, after which there is no
3075  // further guard elimination/motion since a guard might have been used for justification
3076  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3077  // to avoid other optimizations trying to move it.
3078  bool CanBeMoved() const OVERRIDE { return false; }
3079
3080  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3081
3082 private:
3083  DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
3084};
3085
3086// Represents the ArtMethod that was passed as a first argument to
3087// the method. It is used by instructions that depend on it, like
3088// instructions that work with the dex cache.
3089class HCurrentMethod FINAL : public HExpression<0> {
3090 public:
3091  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
3092      : HExpression(type, SideEffects::None(), dex_pc) {}
3093
3094  DECLARE_INSTRUCTION(CurrentMethod);
3095
3096 private:
3097  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
3098};
3099
3100// Fetches an ArtMethod from the virtual table or the interface method table
3101// of a class.
3102class HClassTableGet FINAL : public HExpression<1> {
3103 public:
3104  enum class TableKind {
3105    kVTable,
3106    kIMTable,
3107    kLast = kIMTable
3108  };
3109  HClassTableGet(HInstruction* cls,
3110                 Primitive::Type type,
3111                 TableKind kind,
3112                 size_t index,
3113                 uint32_t dex_pc)
3114      : HExpression(type, SideEffects::None(), dex_pc),
3115        index_(index) {
3116    SetPackedField<TableKindField>(kind);
3117    SetRawInputAt(0, cls);
3118  }
3119
3120  bool CanBeMoved() const OVERRIDE { return true; }
3121  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3122    return other->AsClassTableGet()->GetIndex() == index_ &&
3123        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3124  }
3125
3126  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3127  size_t GetIndex() const { return index_; }
3128
3129  DECLARE_INSTRUCTION(ClassTableGet);
3130
3131 private:
3132  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3133  static constexpr size_t kFieldTableKindSize =
3134      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3135  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3136  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3137                "Too many packed fields.");
3138  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3139
3140  // The index of the ArtMethod in the table.
3141  const size_t index_;
3142
3143  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
3144};
3145
3146// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3147// have one successor for each entry in the switch table, and the final successor
3148// will be the block containing the next Dex opcode.
3149class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3150 public:
3151  HPackedSwitch(int32_t start_value,
3152                uint32_t num_entries,
3153                HInstruction* input,
3154                uint32_t dex_pc = kNoDexPc)
3155    : HTemplateInstruction(SideEffects::None(), dex_pc),
3156      start_value_(start_value),
3157      num_entries_(num_entries) {
3158    SetRawInputAt(0, input);
3159  }
3160
3161  bool IsControlFlow() const OVERRIDE { return true; }
3162
3163  int32_t GetStartValue() const { return start_value_; }
3164
3165  uint32_t GetNumEntries() const { return num_entries_; }
3166
3167  HBasicBlock* GetDefaultBlock() const {
3168    // Last entry is the default block.
3169    return GetBlock()->GetSuccessors()[num_entries_];
3170  }
3171  DECLARE_INSTRUCTION(PackedSwitch);
3172
3173 private:
3174  const int32_t start_value_;
3175  const uint32_t num_entries_;
3176
3177  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
3178};
3179
3180class HUnaryOperation : public HExpression<1> {
3181 public:
3182  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3183      : HExpression(result_type, SideEffects::None(), dex_pc) {
3184    SetRawInputAt(0, input);
3185  }
3186
3187  HInstruction* GetInput() const { return InputAt(0); }
3188  Primitive::Type GetResultType() const { return GetType(); }
3189
3190  bool CanBeMoved() const OVERRIDE { return true; }
3191  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3192    return true;
3193  }
3194
3195  // Try to statically evaluate `this` and return a HConstant
3196  // containing the result of this evaluation.  If `this` cannot
3197  // be evaluated as a constant, return null.
3198  HConstant* TryStaticEvaluation() const;
3199
3200  // Apply this operation to `x`.
3201  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3202  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3203  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3204  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3205
3206  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3207
3208 private:
3209  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
3210};
3211
3212class HBinaryOperation : public HExpression<2> {
3213 public:
3214  HBinaryOperation(Primitive::Type result_type,
3215                   HInstruction* left,
3216                   HInstruction* right,
3217                   SideEffects side_effects = SideEffects::None(),
3218                   uint32_t dex_pc = kNoDexPc)
3219      : HExpression(result_type, side_effects, dex_pc) {
3220    SetRawInputAt(0, left);
3221    SetRawInputAt(1, right);
3222  }
3223
3224  HInstruction* GetLeft() const { return InputAt(0); }
3225  HInstruction* GetRight() const { return InputAt(1); }
3226  Primitive::Type GetResultType() const { return GetType(); }
3227
3228  virtual bool IsCommutative() const { return false; }
3229
3230  // Put constant on the right.
3231  // Returns whether order is changed.
3232  bool OrderInputsWithConstantOnTheRight() {
3233    HInstruction* left = InputAt(0);
3234    HInstruction* right = InputAt(1);
3235    if (left->IsConstant() && !right->IsConstant()) {
3236      ReplaceInput(right, 0);
3237      ReplaceInput(left, 1);
3238      return true;
3239    }
3240    return false;
3241  }
3242
3243  // Order inputs by instruction id, but favor constant on the right side.
3244  // This helps GVN for commutative ops.
3245  void OrderInputs() {
3246    DCHECK(IsCommutative());
3247    HInstruction* left = InputAt(0);
3248    HInstruction* right = InputAt(1);
3249    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3250      return;
3251    }
3252    if (OrderInputsWithConstantOnTheRight()) {
3253      return;
3254    }
3255    // Order according to instruction id.
3256    if (left->GetId() > right->GetId()) {
3257      ReplaceInput(right, 0);
3258      ReplaceInput(left, 1);
3259    }
3260  }
3261
3262  bool CanBeMoved() const OVERRIDE { return true; }
3263  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3264    return true;
3265  }
3266
3267  // Try to statically evaluate `this` and return a HConstant
3268  // containing the result of this evaluation.  If `this` cannot
3269  // be evaluated as a constant, return null.
3270  HConstant* TryStaticEvaluation() const;
3271
3272  // Apply this operation to `x` and `y`.
3273  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3274                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3275    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3276    UNREACHABLE();
3277  }
3278  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3279  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3280  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3281                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3282    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3283    UNREACHABLE();
3284  }
3285  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3286  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3287
3288  // Returns an input that can legally be used as the right input and is
3289  // constant, or null.
3290  HConstant* GetConstantRight() const;
3291
3292  // If `GetConstantRight()` returns one of the input, this returns the other
3293  // one. Otherwise it returns null.
3294  HInstruction* GetLeastConstantLeft() const;
3295
3296  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3297
3298 private:
3299  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3300};
3301
3302// The comparison bias applies for floating point operations and indicates how NaN
3303// comparisons are treated:
3304enum class ComparisonBias {
3305  kNoBias,  // bias is not applicable (i.e. for long operation)
3306  kGtBias,  // return 1 for NaN comparisons
3307  kLtBias,  // return -1 for NaN comparisons
3308  kLast = kLtBias
3309};
3310
3311std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3312
3313class HCondition : public HBinaryOperation {
3314 public:
3315  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3316      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3317    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3318  }
3319
3320  // For code generation purposes, returns whether this instruction is just before
3321  // `instruction`, and disregard moves in between.
3322  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3323
3324  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3325
3326  virtual IfCondition GetCondition() const = 0;
3327
3328  virtual IfCondition GetOppositeCondition() const = 0;
3329
3330  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3331  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3332
3333  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3334  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3335
3336  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3337    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3338  }
3339
3340  bool IsFPConditionTrueIfNaN() const {
3341    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3342    IfCondition if_cond = GetCondition();
3343    if (if_cond == kCondNE) {
3344      return true;
3345    } else if (if_cond == kCondEQ) {
3346      return false;
3347    }
3348    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3349  }
3350
3351  bool IsFPConditionFalseIfNaN() const {
3352    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3353    IfCondition if_cond = GetCondition();
3354    if (if_cond == kCondEQ) {
3355      return true;
3356    } else if (if_cond == kCondNE) {
3357      return false;
3358    }
3359    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3360  }
3361
3362 protected:
3363  // Needed if we merge a HCompare into a HCondition.
3364  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3365  static constexpr size_t kFieldComparisonBiasSize =
3366      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3367  static constexpr size_t kNumberOfConditionPackedBits =
3368      kFieldComparisonBias + kFieldComparisonBiasSize;
3369  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3370  using ComparisonBiasField =
3371      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3372
3373  template <typename T>
3374  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3375
3376  template <typename T>
3377  int32_t CompareFP(T x, T y) const {
3378    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3379    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3380    // Handle the bias.
3381    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3382  }
3383
3384  // Return an integer constant containing the result of a condition evaluated at compile time.
3385  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3386    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3387  }
3388
3389 private:
3390  DISALLOW_COPY_AND_ASSIGN(HCondition);
3391};
3392
3393// Instruction to check if two inputs are equal to each other.
3394class HEqual FINAL : public HCondition {
3395 public:
3396  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3397      : HCondition(first, second, dex_pc) {}
3398
3399  bool IsCommutative() const OVERRIDE { return true; }
3400
3401  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3402                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3403    return MakeConstantCondition(true, GetDexPc());
3404  }
3405  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3406    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3407  }
3408  // In the following Evaluate methods, a HCompare instruction has
3409  // been merged into this HEqual instruction; evaluate it as
3410  // `Compare(x, y) == 0`.
3411  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3412    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3413                                 GetDexPc());
3414  }
3415  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3416    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3417  }
3418  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3419    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3420  }
3421
3422  DECLARE_INSTRUCTION(Equal);
3423
3424  IfCondition GetCondition() const OVERRIDE {
3425    return kCondEQ;
3426  }
3427
3428  IfCondition GetOppositeCondition() const OVERRIDE {
3429    return kCondNE;
3430  }
3431
3432 private:
3433  template <typename T> static bool Compute(T x, T y) { return x == y; }
3434
3435  DISALLOW_COPY_AND_ASSIGN(HEqual);
3436};
3437
3438class HNotEqual FINAL : public HCondition {
3439 public:
3440  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3441      : HCondition(first, second, dex_pc) {}
3442
3443  bool IsCommutative() const OVERRIDE { return true; }
3444
3445  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3446                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3447    return MakeConstantCondition(false, GetDexPc());
3448  }
3449  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3450    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3451  }
3452  // In the following Evaluate methods, a HCompare instruction has
3453  // been merged into this HNotEqual instruction; evaluate it as
3454  // `Compare(x, y) != 0`.
3455  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3456    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3457  }
3458  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3459    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3460  }
3461  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3462    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3463  }
3464
3465  DECLARE_INSTRUCTION(NotEqual);
3466
3467  IfCondition GetCondition() const OVERRIDE {
3468    return kCondNE;
3469  }
3470
3471  IfCondition GetOppositeCondition() const OVERRIDE {
3472    return kCondEQ;
3473  }
3474
3475 private:
3476  template <typename T> static bool Compute(T x, T y) { return x != y; }
3477
3478  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3479};
3480
3481class HLessThan FINAL : public HCondition {
3482 public:
3483  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3484      : HCondition(first, second, dex_pc) {}
3485
3486  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3487    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3488  }
3489  // In the following Evaluate methods, a HCompare instruction has
3490  // been merged into this HLessThan instruction; evaluate it as
3491  // `Compare(x, y) < 0`.
3492  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3493    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3494  }
3495  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3496    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3497  }
3498  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3499    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3500  }
3501
3502  DECLARE_INSTRUCTION(LessThan);
3503
3504  IfCondition GetCondition() const OVERRIDE {
3505    return kCondLT;
3506  }
3507
3508  IfCondition GetOppositeCondition() const OVERRIDE {
3509    return kCondGE;
3510  }
3511
3512 private:
3513  template <typename T> static bool Compute(T x, T y) { return x < y; }
3514
3515  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3516};
3517
3518class HLessThanOrEqual FINAL : public HCondition {
3519 public:
3520  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3521      : HCondition(first, second, dex_pc) {}
3522
3523  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3524    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3525  }
3526  // In the following Evaluate methods, a HCompare instruction has
3527  // been merged into this HLessThanOrEqual instruction; evaluate it as
3528  // `Compare(x, y) <= 0`.
3529  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3530    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3531  }
3532  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3533    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3534  }
3535  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3536    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3537  }
3538
3539  DECLARE_INSTRUCTION(LessThanOrEqual);
3540
3541  IfCondition GetCondition() const OVERRIDE {
3542    return kCondLE;
3543  }
3544
3545  IfCondition GetOppositeCondition() const OVERRIDE {
3546    return kCondGT;
3547  }
3548
3549 private:
3550  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3551
3552  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3553};
3554
3555class HGreaterThan FINAL : public HCondition {
3556 public:
3557  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3558      : HCondition(first, second, dex_pc) {}
3559
3560  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3561    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3562  }
3563  // In the following Evaluate methods, a HCompare instruction has
3564  // been merged into this HGreaterThan instruction; evaluate it as
3565  // `Compare(x, y) > 0`.
3566  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3567    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3568  }
3569  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3570    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3571  }
3572  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3573    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3574  }
3575
3576  DECLARE_INSTRUCTION(GreaterThan);
3577
3578  IfCondition GetCondition() const OVERRIDE {
3579    return kCondGT;
3580  }
3581
3582  IfCondition GetOppositeCondition() const OVERRIDE {
3583    return kCondLE;
3584  }
3585
3586 private:
3587  template <typename T> static bool Compute(T x, T y) { return x > y; }
3588
3589  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3590};
3591
3592class HGreaterThanOrEqual FINAL : public HCondition {
3593 public:
3594  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3595      : HCondition(first, second, dex_pc) {}
3596
3597  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3598    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3599  }
3600  // In the following Evaluate methods, a HCompare instruction has
3601  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3602  // `Compare(x, y) >= 0`.
3603  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3604    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3605  }
3606  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3607    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3608  }
3609  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3610    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3611  }
3612
3613  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3614
3615  IfCondition GetCondition() const OVERRIDE {
3616    return kCondGE;
3617  }
3618
3619  IfCondition GetOppositeCondition() const OVERRIDE {
3620    return kCondLT;
3621  }
3622
3623 private:
3624  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3625
3626  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3627};
3628
3629class HBelow FINAL : public HCondition {
3630 public:
3631  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3632      : HCondition(first, second, dex_pc) {}
3633
3634  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3635    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3636  }
3637  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3638    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3639  }
3640  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3641                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3642    LOG(FATAL) << DebugName() << " is not defined for float values";
3643    UNREACHABLE();
3644  }
3645  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3646                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3647    LOG(FATAL) << DebugName() << " is not defined for double values";
3648    UNREACHABLE();
3649  }
3650
3651  DECLARE_INSTRUCTION(Below);
3652
3653  IfCondition GetCondition() const OVERRIDE {
3654    return kCondB;
3655  }
3656
3657  IfCondition GetOppositeCondition() const OVERRIDE {
3658    return kCondAE;
3659  }
3660
3661 private:
3662  template <typename T> static bool Compute(T x, T y) {
3663    return MakeUnsigned(x) < MakeUnsigned(y);
3664  }
3665
3666  DISALLOW_COPY_AND_ASSIGN(HBelow);
3667};
3668
3669class HBelowOrEqual FINAL : public HCondition {
3670 public:
3671  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3672      : HCondition(first, second, dex_pc) {}
3673
3674  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3675    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3676  }
3677  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3678    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3679  }
3680  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3681                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3682    LOG(FATAL) << DebugName() << " is not defined for float values";
3683    UNREACHABLE();
3684  }
3685  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3686                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3687    LOG(FATAL) << DebugName() << " is not defined for double values";
3688    UNREACHABLE();
3689  }
3690
3691  DECLARE_INSTRUCTION(BelowOrEqual);
3692
3693  IfCondition GetCondition() const OVERRIDE {
3694    return kCondBE;
3695  }
3696
3697  IfCondition GetOppositeCondition() const OVERRIDE {
3698    return kCondA;
3699  }
3700
3701 private:
3702  template <typename T> static bool Compute(T x, T y) {
3703    return MakeUnsigned(x) <= MakeUnsigned(y);
3704  }
3705
3706  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3707};
3708
3709class HAbove FINAL : public HCondition {
3710 public:
3711  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3712      : HCondition(first, second, dex_pc) {}
3713
3714  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3715    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3716  }
3717  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3718    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3719  }
3720  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3721                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3722    LOG(FATAL) << DebugName() << " is not defined for float values";
3723    UNREACHABLE();
3724  }
3725  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3726                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3727    LOG(FATAL) << DebugName() << " is not defined for double values";
3728    UNREACHABLE();
3729  }
3730
3731  DECLARE_INSTRUCTION(Above);
3732
3733  IfCondition GetCondition() const OVERRIDE {
3734    return kCondA;
3735  }
3736
3737  IfCondition GetOppositeCondition() const OVERRIDE {
3738    return kCondBE;
3739  }
3740
3741 private:
3742  template <typename T> static bool Compute(T x, T y) {
3743    return MakeUnsigned(x) > MakeUnsigned(y);
3744  }
3745
3746  DISALLOW_COPY_AND_ASSIGN(HAbove);
3747};
3748
3749class HAboveOrEqual FINAL : public HCondition {
3750 public:
3751  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3752      : HCondition(first, second, dex_pc) {}
3753
3754  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3755    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3756  }
3757  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3758    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3759  }
3760  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3761                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3762    LOG(FATAL) << DebugName() << " is not defined for float values";
3763    UNREACHABLE();
3764  }
3765  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3766                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3767    LOG(FATAL) << DebugName() << " is not defined for double values";
3768    UNREACHABLE();
3769  }
3770
3771  DECLARE_INSTRUCTION(AboveOrEqual);
3772
3773  IfCondition GetCondition() const OVERRIDE {
3774    return kCondAE;
3775  }
3776
3777  IfCondition GetOppositeCondition() const OVERRIDE {
3778    return kCondB;
3779  }
3780
3781 private:
3782  template <typename T> static bool Compute(T x, T y) {
3783    return MakeUnsigned(x) >= MakeUnsigned(y);
3784  }
3785
3786  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3787};
3788
3789// Instruction to check how two inputs compare to each other.
3790// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3791class HCompare FINAL : public HBinaryOperation {
3792 public:
3793  // Note that `comparison_type` is the type of comparison performed
3794  // between the comparison's inputs, not the type of the instantiated
3795  // HCompare instruction (which is always Primitive::kPrimInt).
3796  HCompare(Primitive::Type comparison_type,
3797           HInstruction* first,
3798           HInstruction* second,
3799           ComparisonBias bias,
3800           uint32_t dex_pc)
3801      : HBinaryOperation(Primitive::kPrimInt,
3802                         first,
3803                         second,
3804                         SideEffectsForArchRuntimeCalls(comparison_type),
3805                         dex_pc) {
3806    SetPackedField<ComparisonBiasField>(bias);
3807    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3808    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3809  }
3810
3811  template <typename T>
3812  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3813
3814  template <typename T>
3815  int32_t ComputeFP(T x, T y) const {
3816    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3817    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3818    // Handle the bias.
3819    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3820  }
3821
3822  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3823    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3824    // reach this code path when processing a freshly built HIR
3825    // graph. However HCompare integer instructions can be synthesized
3826    // by the instruction simplifier to implement IntegerCompare and
3827    // IntegerSignum intrinsics, so we have to handle this case.
3828    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3829  }
3830  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3831    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3832  }
3833  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3834    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3835  }
3836  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3837    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3838  }
3839
3840  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3841    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3842  }
3843
3844  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3845
3846  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3847  // Only meaningful for floating-point comparisons.
3848  bool IsGtBias() const {
3849    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3850    return GetBias() == ComparisonBias::kGtBias;
3851  }
3852
3853  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3854    // Comparisons do not require a runtime call in any back end.
3855    return SideEffects::None();
3856  }
3857
3858  DECLARE_INSTRUCTION(Compare);
3859
3860 protected:
3861  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3862  static constexpr size_t kFieldComparisonBiasSize =
3863      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3864  static constexpr size_t kNumberOfComparePackedBits =
3865      kFieldComparisonBias + kFieldComparisonBiasSize;
3866  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3867  using ComparisonBiasField =
3868      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3869
3870  // Return an integer constant containing the result of a comparison evaluated at compile time.
3871  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3872    DCHECK(value == -1 || value == 0 || value == 1) << value;
3873    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3874  }
3875
3876 private:
3877  DISALLOW_COPY_AND_ASSIGN(HCompare);
3878};
3879
3880class HNewInstance FINAL : public HExpression<1> {
3881 public:
3882  HNewInstance(HInstruction* cls,
3883               uint32_t dex_pc,
3884               dex::TypeIndex type_index,
3885               const DexFile& dex_file,
3886               bool finalizable,
3887               QuickEntrypointEnum entrypoint)
3888      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3889        type_index_(type_index),
3890        dex_file_(dex_file),
3891        entrypoint_(entrypoint) {
3892    SetPackedFlag<kFlagFinalizable>(finalizable);
3893    SetRawInputAt(0, cls);
3894  }
3895
3896  dex::TypeIndex GetTypeIndex() const { return type_index_; }
3897  const DexFile& GetDexFile() const { return dex_file_; }
3898
3899  // Calls runtime so needs an environment.
3900  bool NeedsEnvironment() const OVERRIDE { return true; }
3901
3902  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3903  bool CanThrow() const OVERRIDE { return true; }
3904
3905  bool NeedsChecks() const {
3906    return entrypoint_ == kQuickAllocObjectWithChecks;
3907  }
3908
3909  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3910
3911  bool CanBeNull() const OVERRIDE { return false; }
3912
3913  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3914
3915  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3916    entrypoint_ = entrypoint;
3917  }
3918
3919  HLoadClass* GetLoadClass() const {
3920    HInstruction* input = InputAt(0);
3921    if (input->IsClinitCheck()) {
3922      input = input->InputAt(0);
3923    }
3924    DCHECK(input->IsLoadClass());
3925    return input->AsLoadClass();
3926  }
3927
3928  bool IsStringAlloc() const;
3929
3930  DECLARE_INSTRUCTION(NewInstance);
3931
3932 private:
3933  static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
3934  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3935  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3936                "Too many packed fields.");
3937
3938  const dex::TypeIndex type_index_;
3939  const DexFile& dex_file_;
3940  QuickEntrypointEnum entrypoint_;
3941
3942  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3943};
3944
3945enum IntrinsicNeedsEnvironmentOrCache {
3946  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3947  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3948};
3949
3950enum IntrinsicSideEffects {
3951  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3952  kReadSideEffects,   // Intrinsic may read heap memory.
3953  kWriteSideEffects,  // Intrinsic may write heap memory.
3954  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3955};
3956
3957enum IntrinsicExceptions {
3958  kNoThrow,  // Intrinsic does not throw any exceptions.
3959  kCanThrow  // Intrinsic may throw exceptions.
3960};
3961
3962class HInvoke : public HVariableInputSizeInstruction {
3963 public:
3964  bool NeedsEnvironment() const OVERRIDE;
3965
3966  void SetArgumentAt(size_t index, HInstruction* argument) {
3967    SetRawInputAt(index, argument);
3968  }
3969
3970  // Return the number of arguments.  This number can be lower than
3971  // the number of inputs returned by InputCount(), as some invoke
3972  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3973  // inputs at the end of their list of inputs.
3974  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3975
3976  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3977
3978  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3979
3980  InvokeType GetInvokeType() const {
3981    return GetPackedField<InvokeTypeField>();
3982  }
3983
3984  Intrinsics GetIntrinsic() const {
3985    return intrinsic_;
3986  }
3987
3988  void SetIntrinsic(Intrinsics intrinsic,
3989                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3990                    IntrinsicSideEffects side_effects,
3991                    IntrinsicExceptions exceptions);
3992
3993  bool IsFromInlinedInvoke() const {
3994    return GetEnvironment()->IsFromInlinedInvoke();
3995  }
3996
3997  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3998
3999  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
4000
4001  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
4002
4003  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4004    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4005  }
4006
4007  uint32_t* GetIntrinsicOptimizations() {
4008    return &intrinsic_optimizations_;
4009  }
4010
4011  const uint32_t* GetIntrinsicOptimizations() const {
4012    return &intrinsic_optimizations_;
4013  }
4014
4015  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4016
4017  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4018  void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
4019
4020  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4021
4022 protected:
4023  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4024  static constexpr size_t kFieldInvokeTypeSize =
4025      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4026  static constexpr size_t kFieldReturnType =
4027      kFieldInvokeType + kFieldInvokeTypeSize;
4028  static constexpr size_t kFieldReturnTypeSize =
4029      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
4030  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
4031  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
4032  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4033  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4034  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
4035
4036  HInvoke(ArenaAllocator* arena,
4037          uint32_t number_of_arguments,
4038          uint32_t number_of_other_inputs,
4039          Primitive::Type return_type,
4040          uint32_t dex_pc,
4041          uint32_t dex_method_index,
4042          ArtMethod* resolved_method,
4043          InvokeType invoke_type)
4044    : HVariableInputSizeInstruction(
4045          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4046          dex_pc,
4047          arena,
4048          number_of_arguments + number_of_other_inputs,
4049          kArenaAllocInvokeInputs),
4050      number_of_arguments_(number_of_arguments),
4051      resolved_method_(resolved_method),
4052      dex_method_index_(dex_method_index),
4053      intrinsic_(Intrinsics::kNone),
4054      intrinsic_optimizations_(0) {
4055    SetPackedField<ReturnTypeField>(return_type);
4056    SetPackedField<InvokeTypeField>(invoke_type);
4057    SetPackedFlag<kFlagCanThrow>(true);
4058  }
4059
4060  uint32_t number_of_arguments_;
4061  ArtMethod* resolved_method_;
4062  const uint32_t dex_method_index_;
4063  Intrinsics intrinsic_;
4064
4065  // A magic word holding optimizations for intrinsics. See intrinsics.h.
4066  uint32_t intrinsic_optimizations_;
4067
4068 private:
4069  DISALLOW_COPY_AND_ASSIGN(HInvoke);
4070};
4071
4072class HInvokeUnresolved FINAL : public HInvoke {
4073 public:
4074  HInvokeUnresolved(ArenaAllocator* arena,
4075                    uint32_t number_of_arguments,
4076                    Primitive::Type return_type,
4077                    uint32_t dex_pc,
4078                    uint32_t dex_method_index,
4079                    InvokeType invoke_type)
4080      : HInvoke(arena,
4081                number_of_arguments,
4082                0u /* number_of_other_inputs */,
4083                return_type,
4084                dex_pc,
4085                dex_method_index,
4086                nullptr,
4087                invoke_type) {
4088  }
4089
4090  DECLARE_INSTRUCTION(InvokeUnresolved);
4091
4092 private:
4093  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
4094};
4095
4096class HInvokePolymorphic FINAL : public HInvoke {
4097 public:
4098  HInvokePolymorphic(ArenaAllocator* arena,
4099                     uint32_t number_of_arguments,
4100                     Primitive::Type return_type,
4101                     uint32_t dex_pc,
4102                     uint32_t dex_method_index)
4103      : HInvoke(arena,
4104                number_of_arguments,
4105                0u /* number_of_other_inputs */,
4106                return_type,
4107                dex_pc,
4108                dex_method_index,
4109                nullptr,
4110                kVirtual) {}
4111
4112  DECLARE_INSTRUCTION(InvokePolymorphic);
4113
4114 private:
4115  DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
4116};
4117
4118class HInvokeStaticOrDirect FINAL : public HInvoke {
4119 public:
4120  // Requirements of this method call regarding the class
4121  // initialization (clinit) check of its declaring class.
4122  enum class ClinitCheckRequirement {
4123    kNone,      // Class already initialized.
4124    kExplicit,  // Static call having explicit clinit check as last input.
4125    kImplicit,  // Static call implicitly requiring a clinit check.
4126    kLast = kImplicit
4127  };
4128
4129  // Determines how to load the target ArtMethod*.
4130  enum class MethodLoadKind {
4131    // Use a String init ArtMethod* loaded from Thread entrypoints.
4132    kStringInit,
4133
4134    // Use the method's own ArtMethod* loaded by the register allocator.
4135    kRecursive,
4136
4137    // Use ArtMethod* at a known address, embed the direct address in the code.
4138    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4139    kDirectAddress,
4140
4141    // Load from resolved methods array in the dex cache using a PC-relative load.
4142    // Used when we need to use the dex cache, for example for invoke-static that
4143    // may cause class initialization (the entry may point to a resolution method),
4144    // and we know that we can access the dex cache arrays using a PC-relative load.
4145    kDexCachePcRelative,
4146
4147    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
4148    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
4149    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
4150    // or unimplemented or impractical (i.e. slow) on a particular architecture.
4151    kDexCacheViaMethod,
4152  };
4153
4154  // Determines the location of the code pointer.
4155  enum class CodePtrLocation {
4156    // Recursive call, use local PC-relative call instruction.
4157    kCallSelf,
4158
4159    // Use code pointer from the ArtMethod*.
4160    // Used when we don't know the target code. This is also the last-resort-kind used when
4161    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4162    kCallArtMethod,
4163  };
4164
4165  struct DispatchInfo {
4166    MethodLoadKind method_load_kind;
4167    CodePtrLocation code_ptr_location;
4168    // The method load data holds
4169    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4170    //     Note that there are multiple string init methods, each having its own offset.
4171    //   - the method address for kDirectAddress
4172    //   - the dex cache arrays offset for kDexCachePcRel.
4173    uint64_t method_load_data;
4174  };
4175
4176  HInvokeStaticOrDirect(ArenaAllocator* arena,
4177                        uint32_t number_of_arguments,
4178                        Primitive::Type return_type,
4179                        uint32_t dex_pc,
4180                        uint32_t method_index,
4181                        ArtMethod* resolved_method,
4182                        DispatchInfo dispatch_info,
4183                        InvokeType invoke_type,
4184                        MethodReference target_method,
4185                        ClinitCheckRequirement clinit_check_requirement)
4186      : HInvoke(arena,
4187                number_of_arguments,
4188                // There is potentially one extra argument for the HCurrentMethod node, and
4189                // potentially one other if the clinit check is explicit, and potentially
4190                // one other if the method is a string factory.
4191                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4192                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4193                return_type,
4194                dex_pc,
4195                method_index,
4196                resolved_method,
4197                invoke_type),
4198        target_method_(target_method),
4199        dispatch_info_(dispatch_info) {
4200    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4201  }
4202
4203  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4204    bool had_current_method_input = HasCurrentMethodInput();
4205    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4206
4207    // Using the current method is the default and once we find a better
4208    // method load kind, we should not go back to using the current method.
4209    DCHECK(had_current_method_input || !needs_current_method_input);
4210
4211    if (had_current_method_input && !needs_current_method_input) {
4212      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4213      RemoveInputAt(GetSpecialInputIndex());
4214    }
4215    dispatch_info_ = dispatch_info;
4216  }
4217
4218  DispatchInfo GetDispatchInfo() const {
4219    return dispatch_info_;
4220  }
4221
4222  void AddSpecialInput(HInstruction* input) {
4223    // We allow only one special input.
4224    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4225    DCHECK(InputCount() == GetSpecialInputIndex() ||
4226           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4227    InsertInputAt(GetSpecialInputIndex(), input);
4228  }
4229
4230  using HInstruction::GetInputRecords;  // Keep the const version visible.
4231  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4232    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4233    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4234      DCHECK(!input_records.empty());
4235      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4236      HInstruction* last_input = input_records.back().GetInstruction();
4237      // Note: `last_input` may be null during arguments setup.
4238      if (last_input != nullptr) {
4239        // `last_input` is the last input of a static invoke marked as having
4240        // an explicit clinit check. It must either be:
4241        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4242        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4243        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4244      }
4245    }
4246    return input_records;
4247  }
4248
4249  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4250    // We access the method via the dex cache so we can't do an implicit null check.
4251    // TODO: for intrinsics we can generate implicit null checks.
4252    return false;
4253  }
4254
4255  bool CanBeNull() const OVERRIDE {
4256    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
4257  }
4258
4259  // Get the index of the special input, if any.
4260  //
4261  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4262  // method pointer; otherwise there may be one platform-specific special input,
4263  // such as PC-relative addressing base.
4264  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4265  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4266
4267  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4268  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4269  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4270  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4271  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4272  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4273  bool HasPcRelativeDexCache() const {
4274    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4275  }
4276  bool HasCurrentMethodInput() const {
4277    // This function can be called only after the invoke has been fully initialized by the builder.
4278    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4279      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4280      return true;
4281    } else {
4282      DCHECK(InputCount() == GetSpecialInputIndex() ||
4283             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4284      return false;
4285    }
4286  }
4287
4288  QuickEntrypointEnum GetStringInitEntryPoint() const {
4289    DCHECK(IsStringInit());
4290    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4291  }
4292
4293  uint64_t GetMethodAddress() const {
4294    DCHECK(HasMethodAddress());
4295    return dispatch_info_.method_load_data;
4296  }
4297
4298  uint32_t GetDexCacheArrayOffset() const {
4299    DCHECK(HasPcRelativeDexCache());
4300    return dispatch_info_.method_load_data;
4301  }
4302
4303  const DexFile& GetDexFileForPcRelativeDexCache() const;
4304
4305  ClinitCheckRequirement GetClinitCheckRequirement() const {
4306    return GetPackedField<ClinitCheckRequirementField>();
4307  }
4308
4309  // Is this instruction a call to a static method?
4310  bool IsStatic() const {
4311    return GetInvokeType() == kStatic;
4312  }
4313
4314  MethodReference GetTargetMethod() const {
4315    return target_method_;
4316  }
4317
4318  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4319  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4320  // instruction; only relevant for static calls with explicit clinit check.
4321  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4322    DCHECK(IsStaticWithExplicitClinitCheck());
4323    size_t last_input_index = inputs_.size() - 1u;
4324    HInstruction* last_input = inputs_.back().GetInstruction();
4325    DCHECK(last_input != nullptr);
4326    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4327    RemoveAsUserOfInput(last_input_index);
4328    inputs_.pop_back();
4329    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4330    DCHECK(!IsStaticWithExplicitClinitCheck());
4331  }
4332
4333  // Is this a call to a static method whose declaring class has an
4334  // explicit initialization check in the graph?
4335  bool IsStaticWithExplicitClinitCheck() const {
4336    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4337  }
4338
4339  // Is this a call to a static method whose declaring class has an
4340  // implicit intialization check requirement?
4341  bool IsStaticWithImplicitClinitCheck() const {
4342    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4343  }
4344
4345  // Does this method load kind need the current method as an input?
4346  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4347    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4348  }
4349
4350  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4351
4352 private:
4353  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4354  static constexpr size_t kFieldClinitCheckRequirementSize =
4355      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4356  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4357      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4358  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4359                "Too many packed fields.");
4360  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4361                                               kFieldClinitCheckRequirement,
4362                                               kFieldClinitCheckRequirementSize>;
4363
4364  // Cached values of the resolved method, to avoid needing the mutator lock.
4365  MethodReference target_method_;
4366  DispatchInfo dispatch_info_;
4367
4368  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4369};
4370std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4371std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4372
4373class HInvokeVirtual FINAL : public HInvoke {
4374 public:
4375  HInvokeVirtual(ArenaAllocator* arena,
4376                 uint32_t number_of_arguments,
4377                 Primitive::Type return_type,
4378                 uint32_t dex_pc,
4379                 uint32_t dex_method_index,
4380                 ArtMethod* resolved_method,
4381                 uint32_t vtable_index)
4382      : HInvoke(arena,
4383                number_of_arguments,
4384                0u,
4385                return_type,
4386                dex_pc,
4387                dex_method_index,
4388                resolved_method,
4389                kVirtual),
4390        vtable_index_(vtable_index) {}
4391
4392  bool CanBeNull() const OVERRIDE {
4393    switch (GetIntrinsic()) {
4394      case Intrinsics::kThreadCurrentThread:
4395      case Intrinsics::kStringBufferAppend:
4396      case Intrinsics::kStringBufferToString:
4397      case Intrinsics::kStringBuilderAppend:
4398      case Intrinsics::kStringBuilderToString:
4399        return false;
4400      default:
4401        return HInvoke::CanBeNull();
4402    }
4403  }
4404
4405  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4406    // TODO: Add implicit null checks in intrinsics.
4407    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4408  }
4409
4410  uint32_t GetVTableIndex() const { return vtable_index_; }
4411
4412  DECLARE_INSTRUCTION(InvokeVirtual);
4413
4414 private:
4415  // Cached value of the resolved method, to avoid needing the mutator lock.
4416  const uint32_t vtable_index_;
4417
4418  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4419};
4420
4421class HInvokeInterface FINAL : public HInvoke {
4422 public:
4423  HInvokeInterface(ArenaAllocator* arena,
4424                   uint32_t number_of_arguments,
4425                   Primitive::Type return_type,
4426                   uint32_t dex_pc,
4427                   uint32_t dex_method_index,
4428                   ArtMethod* resolved_method,
4429                   uint32_t imt_index)
4430      : HInvoke(arena,
4431                number_of_arguments,
4432                0u,
4433                return_type,
4434                dex_pc,
4435                dex_method_index,
4436                resolved_method,
4437                kInterface),
4438        imt_index_(imt_index) {}
4439
4440  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4441    // TODO: Add implicit null checks in intrinsics.
4442    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4443  }
4444
4445  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
4446    // The assembly stub currently needs it.
4447    return true;
4448  }
4449
4450  uint32_t GetImtIndex() const { return imt_index_; }
4451  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4452
4453  DECLARE_INSTRUCTION(InvokeInterface);
4454
4455 private:
4456  // Cached value of the resolved method, to avoid needing the mutator lock.
4457  const uint32_t imt_index_;
4458
4459  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4460};
4461
4462class HNeg FINAL : public HUnaryOperation {
4463 public:
4464  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4465      : HUnaryOperation(result_type, input, dex_pc) {
4466    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4467  }
4468
4469  template <typename T> static T Compute(T x) { return -x; }
4470
4471  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4472    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4473  }
4474  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4475    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4476  }
4477  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4478    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4479  }
4480  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4481    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4482  }
4483
4484  DECLARE_INSTRUCTION(Neg);
4485
4486 private:
4487  DISALLOW_COPY_AND_ASSIGN(HNeg);
4488};
4489
4490class HNewArray FINAL : public HExpression<2> {
4491 public:
4492  HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4493      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc) {
4494    SetRawInputAt(0, cls);
4495    SetRawInputAt(1, length);
4496  }
4497
4498  // Calls runtime so needs an environment.
4499  bool NeedsEnvironment() const OVERRIDE { return true; }
4500
4501  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4502  bool CanThrow() const OVERRIDE { return true; }
4503
4504  bool CanBeNull() const OVERRIDE { return false; }
4505
4506  HLoadClass* GetLoadClass() const {
4507    DCHECK(InputAt(0)->IsLoadClass());
4508    return InputAt(0)->AsLoadClass();
4509  }
4510
4511  HInstruction* GetLength() const {
4512    return InputAt(1);
4513  }
4514
4515  DECLARE_INSTRUCTION(NewArray);
4516
4517 private:
4518  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4519};
4520
4521class HAdd FINAL : public HBinaryOperation {
4522 public:
4523  HAdd(Primitive::Type result_type,
4524       HInstruction* left,
4525       HInstruction* right,
4526       uint32_t dex_pc = kNoDexPc)
4527      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4528
4529  bool IsCommutative() const OVERRIDE { return true; }
4530
4531  template <typename T> static T Compute(T x, T y) { return x + y; }
4532
4533  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4534    return GetBlock()->GetGraph()->GetIntConstant(
4535        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4536  }
4537  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4538    return GetBlock()->GetGraph()->GetLongConstant(
4539        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4540  }
4541  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4542    return GetBlock()->GetGraph()->GetFloatConstant(
4543        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4544  }
4545  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4546    return GetBlock()->GetGraph()->GetDoubleConstant(
4547        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4548  }
4549
4550  DECLARE_INSTRUCTION(Add);
4551
4552 private:
4553  DISALLOW_COPY_AND_ASSIGN(HAdd);
4554};
4555
4556class HSub FINAL : public HBinaryOperation {
4557 public:
4558  HSub(Primitive::Type result_type,
4559       HInstruction* left,
4560       HInstruction* right,
4561       uint32_t dex_pc = kNoDexPc)
4562      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4563
4564  template <typename T> static T Compute(T x, T y) { return x - y; }
4565
4566  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4567    return GetBlock()->GetGraph()->GetIntConstant(
4568        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4569  }
4570  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4571    return GetBlock()->GetGraph()->GetLongConstant(
4572        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4573  }
4574  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4575    return GetBlock()->GetGraph()->GetFloatConstant(
4576        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4577  }
4578  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4579    return GetBlock()->GetGraph()->GetDoubleConstant(
4580        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4581  }
4582
4583  DECLARE_INSTRUCTION(Sub);
4584
4585 private:
4586  DISALLOW_COPY_AND_ASSIGN(HSub);
4587};
4588
4589class HMul FINAL : public HBinaryOperation {
4590 public:
4591  HMul(Primitive::Type result_type,
4592       HInstruction* left,
4593       HInstruction* right,
4594       uint32_t dex_pc = kNoDexPc)
4595      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4596
4597  bool IsCommutative() const OVERRIDE { return true; }
4598
4599  template <typename T> static T Compute(T x, T y) { return x * y; }
4600
4601  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4602    return GetBlock()->GetGraph()->GetIntConstant(
4603        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4604  }
4605  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4606    return GetBlock()->GetGraph()->GetLongConstant(
4607        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4608  }
4609  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4610    return GetBlock()->GetGraph()->GetFloatConstant(
4611        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4612  }
4613  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4614    return GetBlock()->GetGraph()->GetDoubleConstant(
4615        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4616  }
4617
4618  DECLARE_INSTRUCTION(Mul);
4619
4620 private:
4621  DISALLOW_COPY_AND_ASSIGN(HMul);
4622};
4623
4624class HDiv FINAL : public HBinaryOperation {
4625 public:
4626  HDiv(Primitive::Type result_type,
4627       HInstruction* left,
4628       HInstruction* right,
4629       uint32_t dex_pc)
4630      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4631
4632  template <typename T>
4633  T ComputeIntegral(T x, T y) const {
4634    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4635    // Our graph structure ensures we never have 0 for `y` during
4636    // constant folding.
4637    DCHECK_NE(y, 0);
4638    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4639    return (y == -1) ? -x : x / y;
4640  }
4641
4642  template <typename T>
4643  T ComputeFP(T x, T y) const {
4644    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4645    return x / y;
4646  }
4647
4648  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4649    return GetBlock()->GetGraph()->GetIntConstant(
4650        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4651  }
4652  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4653    return GetBlock()->GetGraph()->GetLongConstant(
4654        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4655  }
4656  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4657    return GetBlock()->GetGraph()->GetFloatConstant(
4658        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4659  }
4660  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4661    return GetBlock()->GetGraph()->GetDoubleConstant(
4662        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4663  }
4664
4665  DECLARE_INSTRUCTION(Div);
4666
4667 private:
4668  DISALLOW_COPY_AND_ASSIGN(HDiv);
4669};
4670
4671class HRem FINAL : public HBinaryOperation {
4672 public:
4673  HRem(Primitive::Type result_type,
4674       HInstruction* left,
4675       HInstruction* right,
4676       uint32_t dex_pc)
4677      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4678
4679  template <typename T>
4680  T ComputeIntegral(T x, T y) const {
4681    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4682    // Our graph structure ensures we never have 0 for `y` during
4683    // constant folding.
4684    DCHECK_NE(y, 0);
4685    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4686    return (y == -1) ? 0 : x % y;
4687  }
4688
4689  template <typename T>
4690  T ComputeFP(T x, T y) const {
4691    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4692    return std::fmod(x, y);
4693  }
4694
4695  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4696    return GetBlock()->GetGraph()->GetIntConstant(
4697        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4698  }
4699  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4700    return GetBlock()->GetGraph()->GetLongConstant(
4701        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4702  }
4703  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4704    return GetBlock()->GetGraph()->GetFloatConstant(
4705        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4706  }
4707  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4708    return GetBlock()->GetGraph()->GetDoubleConstant(
4709        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4710  }
4711
4712  DECLARE_INSTRUCTION(Rem);
4713
4714 private:
4715  DISALLOW_COPY_AND_ASSIGN(HRem);
4716};
4717
4718class HDivZeroCheck FINAL : public HExpression<1> {
4719 public:
4720  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4721  // constructor.
4722  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4723      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4724    SetRawInputAt(0, value);
4725  }
4726
4727  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4728
4729  bool CanBeMoved() const OVERRIDE { return true; }
4730
4731  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4732    return true;
4733  }
4734
4735  bool NeedsEnvironment() const OVERRIDE { return true; }
4736  bool CanThrow() const OVERRIDE { return true; }
4737
4738  DECLARE_INSTRUCTION(DivZeroCheck);
4739
4740 private:
4741  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4742};
4743
4744class HShl FINAL : public HBinaryOperation {
4745 public:
4746  HShl(Primitive::Type result_type,
4747       HInstruction* value,
4748       HInstruction* distance,
4749       uint32_t dex_pc = kNoDexPc)
4750      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4751    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4752    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4753  }
4754
4755  template <typename T>
4756  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4757    return value << (distance & max_shift_distance);
4758  }
4759
4760  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4761    return GetBlock()->GetGraph()->GetIntConstant(
4762        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4763  }
4764  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4765    return GetBlock()->GetGraph()->GetLongConstant(
4766        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4767  }
4768  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4769                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4770    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4771    UNREACHABLE();
4772  }
4773  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4774                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4775    LOG(FATAL) << DebugName() << " is not defined for float values";
4776    UNREACHABLE();
4777  }
4778  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4779                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4780    LOG(FATAL) << DebugName() << " is not defined for double values";
4781    UNREACHABLE();
4782  }
4783
4784  DECLARE_INSTRUCTION(Shl);
4785
4786 private:
4787  DISALLOW_COPY_AND_ASSIGN(HShl);
4788};
4789
4790class HShr FINAL : public HBinaryOperation {
4791 public:
4792  HShr(Primitive::Type result_type,
4793       HInstruction* value,
4794       HInstruction* distance,
4795       uint32_t dex_pc = kNoDexPc)
4796      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4797    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4798    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4799  }
4800
4801  template <typename T>
4802  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4803    return value >> (distance & max_shift_distance);
4804  }
4805
4806  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4807    return GetBlock()->GetGraph()->GetIntConstant(
4808        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4809  }
4810  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4811    return GetBlock()->GetGraph()->GetLongConstant(
4812        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4813  }
4814  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4815                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4816    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4817    UNREACHABLE();
4818  }
4819  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4820                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4821    LOG(FATAL) << DebugName() << " is not defined for float values";
4822    UNREACHABLE();
4823  }
4824  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4825                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4826    LOG(FATAL) << DebugName() << " is not defined for double values";
4827    UNREACHABLE();
4828  }
4829
4830  DECLARE_INSTRUCTION(Shr);
4831
4832 private:
4833  DISALLOW_COPY_AND_ASSIGN(HShr);
4834};
4835
4836class HUShr FINAL : public HBinaryOperation {
4837 public:
4838  HUShr(Primitive::Type result_type,
4839        HInstruction* value,
4840        HInstruction* distance,
4841        uint32_t dex_pc = kNoDexPc)
4842      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4843    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4844    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4845  }
4846
4847  template <typename T>
4848  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4849    typedef typename std::make_unsigned<T>::type V;
4850    V ux = static_cast<V>(value);
4851    return static_cast<T>(ux >> (distance & max_shift_distance));
4852  }
4853
4854  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4855    return GetBlock()->GetGraph()->GetIntConstant(
4856        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4857  }
4858  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4859    return GetBlock()->GetGraph()->GetLongConstant(
4860        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4861  }
4862  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4863                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4864    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4865    UNREACHABLE();
4866  }
4867  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4868                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4869    LOG(FATAL) << DebugName() << " is not defined for float values";
4870    UNREACHABLE();
4871  }
4872  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4873                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4874    LOG(FATAL) << DebugName() << " is not defined for double values";
4875    UNREACHABLE();
4876  }
4877
4878  DECLARE_INSTRUCTION(UShr);
4879
4880 private:
4881  DISALLOW_COPY_AND_ASSIGN(HUShr);
4882};
4883
4884class HAnd FINAL : public HBinaryOperation {
4885 public:
4886  HAnd(Primitive::Type result_type,
4887       HInstruction* left,
4888       HInstruction* right,
4889       uint32_t dex_pc = kNoDexPc)
4890      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4891
4892  bool IsCommutative() const OVERRIDE { return true; }
4893
4894  template <typename T> static T Compute(T x, T y) { return x & y; }
4895
4896  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4897    return GetBlock()->GetGraph()->GetIntConstant(
4898        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4899  }
4900  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4901    return GetBlock()->GetGraph()->GetLongConstant(
4902        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4903  }
4904  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4905                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4906    LOG(FATAL) << DebugName() << " is not defined for float values";
4907    UNREACHABLE();
4908  }
4909  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4910                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4911    LOG(FATAL) << DebugName() << " is not defined for double values";
4912    UNREACHABLE();
4913  }
4914
4915  DECLARE_INSTRUCTION(And);
4916
4917 private:
4918  DISALLOW_COPY_AND_ASSIGN(HAnd);
4919};
4920
4921class HOr FINAL : public HBinaryOperation {
4922 public:
4923  HOr(Primitive::Type result_type,
4924      HInstruction* left,
4925      HInstruction* right,
4926      uint32_t dex_pc = kNoDexPc)
4927      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4928
4929  bool IsCommutative() const OVERRIDE { return true; }
4930
4931  template <typename T> static T Compute(T x, T y) { return x | y; }
4932
4933  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4934    return GetBlock()->GetGraph()->GetIntConstant(
4935        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4936  }
4937  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4938    return GetBlock()->GetGraph()->GetLongConstant(
4939        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4940  }
4941  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4942                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4943    LOG(FATAL) << DebugName() << " is not defined for float values";
4944    UNREACHABLE();
4945  }
4946  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4947                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4948    LOG(FATAL) << DebugName() << " is not defined for double values";
4949    UNREACHABLE();
4950  }
4951
4952  DECLARE_INSTRUCTION(Or);
4953
4954 private:
4955  DISALLOW_COPY_AND_ASSIGN(HOr);
4956};
4957
4958class HXor FINAL : public HBinaryOperation {
4959 public:
4960  HXor(Primitive::Type result_type,
4961       HInstruction* left,
4962       HInstruction* right,
4963       uint32_t dex_pc = kNoDexPc)
4964      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4965
4966  bool IsCommutative() const OVERRIDE { return true; }
4967
4968  template <typename T> static T Compute(T x, T y) { return x ^ y; }
4969
4970  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4971    return GetBlock()->GetGraph()->GetIntConstant(
4972        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4973  }
4974  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4975    return GetBlock()->GetGraph()->GetLongConstant(
4976        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4977  }
4978  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4979                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4980    LOG(FATAL) << DebugName() << " is not defined for float values";
4981    UNREACHABLE();
4982  }
4983  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4984                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4985    LOG(FATAL) << DebugName() << " is not defined for double values";
4986    UNREACHABLE();
4987  }
4988
4989  DECLARE_INSTRUCTION(Xor);
4990
4991 private:
4992  DISALLOW_COPY_AND_ASSIGN(HXor);
4993};
4994
4995class HRor FINAL : public HBinaryOperation {
4996 public:
4997  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4998    : HBinaryOperation(result_type, value, distance) {
4999    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
5000    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
5001  }
5002
5003  template <typename T>
5004  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5005    typedef typename std::make_unsigned<T>::type V;
5006    V ux = static_cast<V>(value);
5007    if ((distance & max_shift_value) == 0) {
5008      return static_cast<T>(ux);
5009    } else {
5010      const V reg_bits = sizeof(T) * 8;
5011      return static_cast<T>(ux >> (distance & max_shift_value)) |
5012                           (value << (reg_bits - (distance & max_shift_value)));
5013    }
5014  }
5015
5016  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5017    return GetBlock()->GetGraph()->GetIntConstant(
5018        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5019  }
5020  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5021    return GetBlock()->GetGraph()->GetLongConstant(
5022        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5023  }
5024  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5025                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5026    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5027    UNREACHABLE();
5028  }
5029  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5030                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5031    LOG(FATAL) << DebugName() << " is not defined for float values";
5032    UNREACHABLE();
5033  }
5034  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5035                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5036    LOG(FATAL) << DebugName() << " is not defined for double values";
5037    UNREACHABLE();
5038  }
5039
5040  DECLARE_INSTRUCTION(Ror);
5041
5042 private:
5043  DISALLOW_COPY_AND_ASSIGN(HRor);
5044};
5045
5046// The value of a parameter in this method. Its location depends on
5047// the calling convention.
5048class HParameterValue FINAL : public HExpression<0> {
5049 public:
5050  HParameterValue(const DexFile& dex_file,
5051                  dex::TypeIndex type_index,
5052                  uint8_t index,
5053                  Primitive::Type parameter_type,
5054                  bool is_this = false)
5055      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
5056        dex_file_(dex_file),
5057        type_index_(type_index),
5058        index_(index) {
5059    SetPackedFlag<kFlagIsThis>(is_this);
5060    SetPackedFlag<kFlagCanBeNull>(!is_this);
5061  }
5062
5063  const DexFile& GetDexFile() const { return dex_file_; }
5064  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5065  uint8_t GetIndex() const { return index_; }
5066  bool IsThis() const ATTRIBUTE_UNUSED { return GetPackedFlag<kFlagIsThis>(); }
5067
5068  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
5069  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5070
5071  DECLARE_INSTRUCTION(ParameterValue);
5072
5073 private:
5074  // Whether or not the parameter value corresponds to 'this' argument.
5075  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
5076  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5077  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5078  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5079                "Too many packed fields.");
5080
5081  const DexFile& dex_file_;
5082  const dex::TypeIndex type_index_;
5083  // The index of this parameter in the parameters list. Must be less
5084  // than HGraph::number_of_in_vregs_.
5085  const uint8_t index_;
5086
5087  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
5088};
5089
5090class HNot FINAL : public HUnaryOperation {
5091 public:
5092  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5093      : HUnaryOperation(result_type, input, dex_pc) {}
5094
5095  bool CanBeMoved() const OVERRIDE { return true; }
5096  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5097    return true;
5098  }
5099
5100  template <typename T> static T Compute(T x) { return ~x; }
5101
5102  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5103    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5104  }
5105  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
5106    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5107  }
5108  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5109    LOG(FATAL) << DebugName() << " is not defined for float values";
5110    UNREACHABLE();
5111  }
5112  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5113    LOG(FATAL) << DebugName() << " is not defined for double values";
5114    UNREACHABLE();
5115  }
5116
5117  DECLARE_INSTRUCTION(Not);
5118
5119 private:
5120  DISALLOW_COPY_AND_ASSIGN(HNot);
5121};
5122
5123class HBooleanNot FINAL : public HUnaryOperation {
5124 public:
5125  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5126      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
5127
5128  bool CanBeMoved() const OVERRIDE { return true; }
5129  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5130    return true;
5131  }
5132
5133  template <typename T> static bool Compute(T x) {
5134    DCHECK(IsUint<1>(x)) << x;
5135    return !x;
5136  }
5137
5138  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5139    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5140  }
5141  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5142    LOG(FATAL) << DebugName() << " is not defined for long values";
5143    UNREACHABLE();
5144  }
5145  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5146    LOG(FATAL) << DebugName() << " is not defined for float values";
5147    UNREACHABLE();
5148  }
5149  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5150    LOG(FATAL) << DebugName() << " is not defined for double values";
5151    UNREACHABLE();
5152  }
5153
5154  DECLARE_INSTRUCTION(BooleanNot);
5155
5156 private:
5157  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
5158};
5159
5160class HTypeConversion FINAL : public HExpression<1> {
5161 public:
5162  // Instantiate a type conversion of `input` to `result_type`.
5163  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
5164      : HExpression(result_type, SideEffects::None(), dex_pc) {
5165    SetRawInputAt(0, input);
5166    // Invariant: We should never generate a conversion to a Boolean value.
5167    DCHECK_NE(Primitive::kPrimBoolean, result_type);
5168  }
5169
5170  HInstruction* GetInput() const { return InputAt(0); }
5171  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
5172  Primitive::Type GetResultType() const { return GetType(); }
5173
5174  bool CanBeMoved() const OVERRIDE { return true; }
5175  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5176    return true;
5177  }
5178
5179  // Try to statically evaluate the conversion and return a HConstant
5180  // containing the result.  If the input cannot be converted, return nullptr.
5181  HConstant* TryStaticEvaluation() const;
5182
5183  DECLARE_INSTRUCTION(TypeConversion);
5184
5185 private:
5186  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
5187};
5188
5189static constexpr uint32_t kNoRegNumber = -1;
5190
5191class HNullCheck FINAL : public HExpression<1> {
5192 public:
5193  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5194  // constructor.
5195  HNullCheck(HInstruction* value, uint32_t dex_pc)
5196      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5197    SetRawInputAt(0, value);
5198  }
5199
5200  bool CanBeMoved() const OVERRIDE { return true; }
5201  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5202    return true;
5203  }
5204
5205  bool NeedsEnvironment() const OVERRIDE { return true; }
5206
5207  bool CanThrow() const OVERRIDE { return true; }
5208
5209  bool CanBeNull() const OVERRIDE { return false; }
5210
5211
5212  DECLARE_INSTRUCTION(NullCheck);
5213
5214 private:
5215  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
5216};
5217
5218// Embeds an ArtField and all the information required by the compiler. We cache
5219// that information to avoid requiring the mutator lock every time we need it.
5220class FieldInfo : public ValueObject {
5221 public:
5222  FieldInfo(ArtField* field,
5223            MemberOffset field_offset,
5224            Primitive::Type field_type,
5225            bool is_volatile,
5226            uint32_t index,
5227            uint16_t declaring_class_def_index,
5228            const DexFile& dex_file)
5229      : field_(field),
5230        field_offset_(field_offset),
5231        field_type_(field_type),
5232        is_volatile_(is_volatile),
5233        index_(index),
5234        declaring_class_def_index_(declaring_class_def_index),
5235        dex_file_(dex_file) {}
5236
5237  ArtField* GetField() const { return field_; }
5238  MemberOffset GetFieldOffset() const { return field_offset_; }
5239  Primitive::Type GetFieldType() const { return field_type_; }
5240  uint32_t GetFieldIndex() const { return index_; }
5241  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5242  const DexFile& GetDexFile() const { return dex_file_; }
5243  bool IsVolatile() const { return is_volatile_; }
5244
5245 private:
5246  ArtField* const field_;
5247  const MemberOffset field_offset_;
5248  const Primitive::Type field_type_;
5249  const bool is_volatile_;
5250  const uint32_t index_;
5251  const uint16_t declaring_class_def_index_;
5252  const DexFile& dex_file_;
5253};
5254
5255class HInstanceFieldGet FINAL : public HExpression<1> {
5256 public:
5257  HInstanceFieldGet(HInstruction* value,
5258                    ArtField* field,
5259                    Primitive::Type field_type,
5260                    MemberOffset field_offset,
5261                    bool is_volatile,
5262                    uint32_t field_idx,
5263                    uint16_t declaring_class_def_index,
5264                    const DexFile& dex_file,
5265                    uint32_t dex_pc)
5266      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5267        field_info_(field,
5268                    field_offset,
5269                    field_type,
5270                    is_volatile,
5271                    field_idx,
5272                    declaring_class_def_index,
5273                    dex_file) {
5274    SetRawInputAt(0, value);
5275  }
5276
5277  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5278
5279  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5280    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5281    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5282  }
5283
5284  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5285    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5286  }
5287
5288  size_t ComputeHashCode() const OVERRIDE {
5289    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5290  }
5291
5292  const FieldInfo& GetFieldInfo() const { return field_info_; }
5293  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5294  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5295  bool IsVolatile() const { return field_info_.IsVolatile(); }
5296
5297  DECLARE_INSTRUCTION(InstanceFieldGet);
5298
5299 private:
5300  const FieldInfo field_info_;
5301
5302  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5303};
5304
5305class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5306 public:
5307  HInstanceFieldSet(HInstruction* object,
5308                    HInstruction* value,
5309                    ArtField* field,
5310                    Primitive::Type field_type,
5311                    MemberOffset field_offset,
5312                    bool is_volatile,
5313                    uint32_t field_idx,
5314                    uint16_t declaring_class_def_index,
5315                    const DexFile& dex_file,
5316                    uint32_t dex_pc)
5317      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5318        field_info_(field,
5319                    field_offset,
5320                    field_type,
5321                    is_volatile,
5322                    field_idx,
5323                    declaring_class_def_index,
5324                    dex_file) {
5325    SetPackedFlag<kFlagValueCanBeNull>(true);
5326    SetRawInputAt(0, object);
5327    SetRawInputAt(1, value);
5328  }
5329
5330  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5331    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5332  }
5333
5334  const FieldInfo& GetFieldInfo() const { return field_info_; }
5335  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5336  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5337  bool IsVolatile() const { return field_info_.IsVolatile(); }
5338  HInstruction* GetValue() const { return InputAt(1); }
5339  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5340  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5341
5342  DECLARE_INSTRUCTION(InstanceFieldSet);
5343
5344 private:
5345  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5346  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5347  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5348                "Too many packed fields.");
5349
5350  const FieldInfo field_info_;
5351
5352  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5353};
5354
5355class HArrayGet FINAL : public HExpression<2> {
5356 public:
5357  HArrayGet(HInstruction* array,
5358            HInstruction* index,
5359            Primitive::Type type,
5360            uint32_t dex_pc,
5361            bool is_string_char_at = false)
5362      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5363    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5364    SetRawInputAt(0, array);
5365    SetRawInputAt(1, index);
5366  }
5367
5368  bool CanBeMoved() const OVERRIDE { return true; }
5369  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5370    return true;
5371  }
5372  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5373    // TODO: We can be smarter here.
5374    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5375    // which generates the implicit null check. There are cases when these can be removed
5376    // to produce better code. If we ever add optimizations to do so we should allow an
5377    // implicit check here (as long as the address falls in the first page).
5378    return false;
5379  }
5380
5381  bool IsEquivalentOf(HArrayGet* other) const {
5382    bool result = (GetDexPc() == other->GetDexPc());
5383    if (kIsDebugBuild && result) {
5384      DCHECK_EQ(GetBlock(), other->GetBlock());
5385      DCHECK_EQ(GetArray(), other->GetArray());
5386      DCHECK_EQ(GetIndex(), other->GetIndex());
5387      if (Primitive::IsIntOrLongType(GetType())) {
5388        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5389      } else {
5390        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5391        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5392      }
5393    }
5394    return result;
5395  }
5396
5397  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5398
5399  HInstruction* GetArray() const { return InputAt(0); }
5400  HInstruction* GetIndex() const { return InputAt(1); }
5401
5402  DECLARE_INSTRUCTION(ArrayGet);
5403
5404 private:
5405  // We treat a String as an array, creating the HArrayGet from String.charAt()
5406  // intrinsic in the instruction simplifier. We can always determine whether
5407  // a particular HArrayGet is actually a String.charAt() by looking at the type
5408  // of the input but that requires holding the mutator lock, so we prefer to use
5409  // a flag, so that code generators don't need to do the locking.
5410  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5411  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5412  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5413                "Too many packed fields.");
5414
5415  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5416};
5417
5418class HArraySet FINAL : public HTemplateInstruction<3> {
5419 public:
5420  HArraySet(HInstruction* array,
5421            HInstruction* index,
5422            HInstruction* value,
5423            Primitive::Type expected_component_type,
5424            uint32_t dex_pc)
5425      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5426    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5427    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5428    SetPackedFlag<kFlagValueCanBeNull>(true);
5429    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5430    SetRawInputAt(0, array);
5431    SetRawInputAt(1, index);
5432    SetRawInputAt(2, value);
5433    // Make a best guess now, may be refined during SSA building.
5434    ComputeSideEffects();
5435  }
5436
5437  bool NeedsEnvironment() const OVERRIDE {
5438    // We call a runtime method to throw ArrayStoreException.
5439    return NeedsTypeCheck();
5440  }
5441
5442  // Can throw ArrayStoreException.
5443  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5444
5445  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5446    // TODO: Same as for ArrayGet.
5447    return false;
5448  }
5449
5450  void ClearNeedsTypeCheck() {
5451    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5452  }
5453
5454  void ClearValueCanBeNull() {
5455    SetPackedFlag<kFlagValueCanBeNull>(false);
5456  }
5457
5458  void SetStaticTypeOfArrayIsObjectArray() {
5459    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5460  }
5461
5462  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5463  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5464  bool StaticTypeOfArrayIsObjectArray() const {
5465    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5466  }
5467
5468  HInstruction* GetArray() const { return InputAt(0); }
5469  HInstruction* GetIndex() const { return InputAt(1); }
5470  HInstruction* GetValue() const { return InputAt(2); }
5471
5472  Primitive::Type GetComponentType() const {
5473    // The Dex format does not type floating point index operations. Since the
5474    // `expected_component_type_` is set during building and can therefore not
5475    // be correct, we also check what is the value type. If it is a floating
5476    // point type, we must use that type.
5477    Primitive::Type value_type = GetValue()->GetType();
5478    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5479        ? value_type
5480        : GetRawExpectedComponentType();
5481  }
5482
5483  Primitive::Type GetRawExpectedComponentType() const {
5484    return GetPackedField<ExpectedComponentTypeField>();
5485  }
5486
5487  void ComputeSideEffects() {
5488    Primitive::Type type = GetComponentType();
5489    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5490        SideEffectsForArchRuntimeCalls(type)));
5491  }
5492
5493  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5494    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5495  }
5496
5497  DECLARE_INSTRUCTION(ArraySet);
5498
5499 private:
5500  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5501  static constexpr size_t kFieldExpectedComponentTypeSize =
5502      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5503  static constexpr size_t kFlagNeedsTypeCheck =
5504      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5505  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5506  // Cached information for the reference_type_info_ so that codegen
5507  // does not need to inspect the static type.
5508  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5509  static constexpr size_t kNumberOfArraySetPackedBits =
5510      kFlagStaticTypeOfArrayIsObjectArray + 1;
5511  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5512  using ExpectedComponentTypeField =
5513      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5514
5515  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5516};
5517
5518class HArrayLength FINAL : public HExpression<1> {
5519 public:
5520  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5521      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5522    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5523    // Note that arrays do not change length, so the instruction does not
5524    // depend on any write.
5525    SetRawInputAt(0, array);
5526  }
5527
5528  bool CanBeMoved() const OVERRIDE { return true; }
5529  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5530    return true;
5531  }
5532  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5533    return obj == InputAt(0);
5534  }
5535
5536  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5537
5538  DECLARE_INSTRUCTION(ArrayLength);
5539
5540 private:
5541  // We treat a String as an array, creating the HArrayLength from String.length()
5542  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5543  // determine whether a particular HArrayLength is actually a String.length() by
5544  // looking at the type of the input but that requires holding the mutator lock, so
5545  // we prefer to use a flag, so that code generators don't need to do the locking.
5546  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5547  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5548  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5549                "Too many packed fields.");
5550
5551  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5552};
5553
5554class HBoundsCheck FINAL : public HExpression<2> {
5555 public:
5556  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5557  // constructor.
5558  HBoundsCheck(HInstruction* index,
5559               HInstruction* length,
5560               uint32_t dex_pc,
5561               bool string_char_at = false)
5562      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5563    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5564    SetPackedFlag<kFlagIsStringCharAt>(string_char_at);
5565    SetRawInputAt(0, index);
5566    SetRawInputAt(1, length);
5567  }
5568
5569  bool CanBeMoved() const OVERRIDE { return true; }
5570  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5571    return true;
5572  }
5573
5574  bool NeedsEnvironment() const OVERRIDE { return true; }
5575
5576  bool CanThrow() const OVERRIDE { return true; }
5577
5578  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5579
5580  HInstruction* GetIndex() const { return InputAt(0); }
5581
5582  DECLARE_INSTRUCTION(BoundsCheck);
5583
5584 private:
5585  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5586
5587  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5588};
5589
5590class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5591 public:
5592  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5593      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5594
5595  bool NeedsEnvironment() const OVERRIDE {
5596    return true;
5597  }
5598
5599  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5600  SlowPathCode* GetSlowPath() const { return slow_path_; }
5601
5602  DECLARE_INSTRUCTION(SuspendCheck);
5603
5604 private:
5605  // Only used for code generation, in order to share the same slow path between back edges
5606  // of a same loop.
5607  SlowPathCode* slow_path_;
5608
5609  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5610};
5611
5612// Pseudo-instruction which provides the native debugger with mapping information.
5613// It ensures that we can generate line number and local variables at this point.
5614class HNativeDebugInfo : public HTemplateInstruction<0> {
5615 public:
5616  explicit HNativeDebugInfo(uint32_t dex_pc)
5617      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5618
5619  bool NeedsEnvironment() const OVERRIDE {
5620    return true;
5621  }
5622
5623  DECLARE_INSTRUCTION(NativeDebugInfo);
5624
5625 private:
5626  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5627};
5628
5629/**
5630 * Instruction to load a Class object.
5631 */
5632class HLoadClass FINAL : public HInstruction {
5633 public:
5634  // Determines how to load the Class.
5635  enum class LoadKind {
5636    // We cannot load this class. See HSharpening::SharpenLoadClass.
5637    kInvalid = -1,
5638
5639    // Use the Class* from the method's own ArtMethod*.
5640    kReferrersClass,
5641
5642    // Use boot image Class* address that will be known at link time.
5643    // Used for boot image classes referenced by boot image code in non-PIC mode.
5644    kBootImageLinkTimeAddress,
5645
5646    // Use PC-relative boot image Class* address that will be known at link time.
5647    // Used for boot image classes referenced by boot image code in PIC mode.
5648    kBootImageLinkTimePcRelative,
5649
5650    // Use a known boot image Class* address, embedded in the code by the codegen.
5651    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5652    kBootImageAddress,
5653
5654    // Load from an entry in the .bss section using a PC-relative load.
5655    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
5656    kBssEntry,
5657
5658    // Load from the root table associated with the JIT compiled method.
5659    kJitTableAddress,
5660
5661    // Load from resolved types array accessed through the class loaded from
5662    // the compiled method's own ArtMethod*. This is the default access type when
5663    // all other types are unavailable.
5664    kDexCacheViaMethod,
5665
5666    kLast = kDexCacheViaMethod
5667  };
5668
5669  HLoadClass(HCurrentMethod* current_method,
5670             dex::TypeIndex type_index,
5671             const DexFile& dex_file,
5672             Handle<mirror::Class> klass,
5673             bool is_referrers_class,
5674             uint32_t dex_pc,
5675             bool needs_access_check)
5676      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5677        special_input_(HUserRecord<HInstruction*>(current_method)),
5678        type_index_(type_index),
5679        dex_file_(dex_file),
5680        klass_(klass),
5681        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5682    // Referrers class should not need access check. We never inline unverified
5683    // methods so we can't possibly end up in this situation.
5684    DCHECK(!is_referrers_class || !needs_access_check);
5685
5686    SetPackedField<LoadKindField>(
5687        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
5688    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5689    SetPackedFlag<kFlagIsInBootImage>(false);
5690    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5691  }
5692
5693  void SetLoadKind(LoadKind load_kind);
5694
5695  LoadKind GetLoadKind() const {
5696    return GetPackedField<LoadKindField>();
5697  }
5698
5699  bool CanBeMoved() const OVERRIDE { return true; }
5700
5701  bool InstructionDataEquals(const HInstruction* other) const;
5702
5703  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5704
5705  bool CanBeNull() const OVERRIDE { return false; }
5706
5707  bool NeedsEnvironment() const OVERRIDE {
5708    return CanCallRuntime();
5709  }
5710
5711  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5712    // The entrypoint the code generator is going to call does not do
5713    // clinit of the class.
5714    DCHECK(!NeedsAccessCheck());
5715    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5716  }
5717
5718  bool CanCallRuntime() const {
5719    return NeedsAccessCheck() ||
5720           MustGenerateClinitCheck() ||
5721           GetLoadKind() == LoadKind::kDexCacheViaMethod ||
5722           GetLoadKind() == LoadKind::kBssEntry;
5723  }
5724
5725  bool CanThrow() const OVERRIDE {
5726    return NeedsAccessCheck() ||
5727           MustGenerateClinitCheck() ||
5728           // If the class is in the boot image, the lookup in the runtime call cannot throw.
5729           // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
5730           // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
5731           ((GetLoadKind() == LoadKind::kDexCacheViaMethod ||
5732             GetLoadKind() == LoadKind::kBssEntry) &&
5733            !IsInBootImage());
5734  }
5735
5736  ReferenceTypeInfo GetLoadedClassRTI() {
5737    return loaded_class_rti_;
5738  }
5739
5740  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5741    // Make sure we only set exact types (the loaded class should never be merged).
5742    DCHECK(rti.IsExact());
5743    loaded_class_rti_ = rti;
5744  }
5745
5746  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5747  const DexFile& GetDexFile() const { return dex_file_; }
5748
5749  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5750    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5751  }
5752
5753  static SideEffects SideEffectsForArchRuntimeCalls() {
5754    return SideEffects::CanTriggerGC();
5755  }
5756
5757  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5758  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5759  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5760  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5761
5762  void MarkInBootImage() {
5763    SetPackedFlag<kFlagIsInBootImage>(true);
5764  }
5765
5766  void AddSpecialInput(HInstruction* special_input);
5767
5768  using HInstruction::GetInputRecords;  // Keep the const version visible.
5769  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5770    return ArrayRef<HUserRecord<HInstruction*>>(
5771        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5772  }
5773
5774  Primitive::Type GetType() const OVERRIDE {
5775    return Primitive::kPrimNot;
5776  }
5777
5778  Handle<mirror::Class> GetClass() const {
5779    return klass_;
5780  }
5781
5782  DECLARE_INSTRUCTION(LoadClass);
5783
5784 private:
5785  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5786  static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
5787  // Whether this instruction must generate the initialization check.
5788  // Used for code generation.
5789  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5790  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5791  static constexpr size_t kFieldLoadKindSize =
5792      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5793  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5794  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5795  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5796
5797  static bool HasTypeReference(LoadKind load_kind) {
5798    return load_kind == LoadKind::kReferrersClass ||
5799        load_kind == LoadKind::kBootImageLinkTimeAddress ||
5800        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5801        load_kind == LoadKind::kBssEntry ||
5802        load_kind == LoadKind::kDexCacheViaMethod;
5803  }
5804
5805  void SetLoadKindInternal(LoadKind load_kind);
5806
5807  // The special input is the HCurrentMethod for kDexCacheViaMethod or kReferrersClass.
5808  // For other load kinds it's empty or possibly some architecture-specific instruction
5809  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5810  HUserRecord<HInstruction*> special_input_;
5811
5812  // A type index and dex file where the class can be accessed. The dex file can be:
5813  // - The compiling method's dex file if the class is defined there too.
5814  // - The compiling method's dex file if the class is referenced there.
5815  // - The dex file where the class is defined. When the load kind can only be
5816  //   kBssEntry or kDexCacheViaMethod, we cannot emit code for this `HLoadClass`.
5817  const dex::TypeIndex type_index_;
5818  const DexFile& dex_file_;
5819
5820  Handle<mirror::Class> klass_;
5821
5822  ReferenceTypeInfo loaded_class_rti_;
5823
5824  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5825};
5826std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5827
5828// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5829inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5830  // The special input is used for PC-relative loads on some architectures,
5831  // including literal pool loads, which are PC-relative too.
5832  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5833         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5834         GetLoadKind() == LoadKind::kBootImageAddress ||
5835         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
5836  DCHECK(special_input_.GetInstruction() == nullptr);
5837  special_input_ = HUserRecord<HInstruction*>(special_input);
5838  special_input->AddUseAt(this, 0);
5839}
5840
5841class HLoadString FINAL : public HInstruction {
5842 public:
5843  // Determines how to load the String.
5844  enum class LoadKind {
5845    // Use boot image String* address that will be known at link time.
5846    // Used for boot image strings referenced by boot image code in non-PIC mode.
5847    kBootImageLinkTimeAddress,
5848
5849    // Use PC-relative boot image String* address that will be known at link time.
5850    // Used for boot image strings referenced by boot image code in PIC mode.
5851    kBootImageLinkTimePcRelative,
5852
5853    // Use a known boot image String* address, embedded in the code by the codegen.
5854    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5855    kBootImageAddress,
5856
5857    // Load from an entry in the .bss section using a PC-relative load.
5858    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5859    kBssEntry,
5860
5861    // Load from the root table associated with the JIT compiled method.
5862    kJitTableAddress,
5863
5864    // Load from resolved strings array accessed through the class loaded from
5865    // the compiled method's own ArtMethod*. This is the default access type when
5866    // all other types are unavailable.
5867    kDexCacheViaMethod,
5868
5869    kLast = kDexCacheViaMethod,
5870  };
5871
5872  HLoadString(HCurrentMethod* current_method,
5873              dex::StringIndex string_index,
5874              const DexFile& dex_file,
5875              uint32_t dex_pc)
5876      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5877        special_input_(HUserRecord<HInstruction*>(current_method)),
5878        string_index_(string_index),
5879        dex_file_(dex_file) {
5880    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5881  }
5882
5883  void SetLoadKind(LoadKind load_kind);
5884
5885  LoadKind GetLoadKind() const {
5886    return GetPackedField<LoadKindField>();
5887  }
5888
5889  const DexFile& GetDexFile() const {
5890    return dex_file_;
5891  }
5892
5893  dex::StringIndex GetStringIndex() const {
5894    return string_index_;
5895  }
5896
5897  Handle<mirror::String> GetString() const {
5898    return string_;
5899  }
5900
5901  void SetString(Handle<mirror::String> str) {
5902    string_ = str;
5903  }
5904
5905  bool CanBeMoved() const OVERRIDE { return true; }
5906
5907  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5908
5909  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
5910
5911  // Will call the runtime if we need to load the string through
5912  // the dex cache and the string is not guaranteed to be there yet.
5913  bool NeedsEnvironment() const OVERRIDE {
5914    LoadKind load_kind = GetLoadKind();
5915    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5916        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5917        load_kind == LoadKind::kBootImageAddress ||
5918        load_kind == LoadKind::kJitTableAddress) {
5919      return false;
5920    }
5921    return true;
5922  }
5923
5924  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5925    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5926  }
5927
5928  bool CanBeNull() const OVERRIDE { return false; }
5929  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5930
5931  static SideEffects SideEffectsForArchRuntimeCalls() {
5932    return SideEffects::CanTriggerGC();
5933  }
5934
5935  void AddSpecialInput(HInstruction* special_input);
5936
5937  using HInstruction::GetInputRecords;  // Keep the const version visible.
5938  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5939    return ArrayRef<HUserRecord<HInstruction*>>(
5940        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5941  }
5942
5943  Primitive::Type GetType() const OVERRIDE {
5944    return Primitive::kPrimNot;
5945  }
5946
5947  DECLARE_INSTRUCTION(LoadString);
5948
5949 private:
5950  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
5951  static constexpr size_t kFieldLoadKindSize =
5952      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5953  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5954  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5955  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5956
5957  void SetLoadKindInternal(LoadKind load_kind);
5958
5959  // The special input is the HCurrentMethod for kDexCacheViaMethod.
5960  // For other load kinds it's empty or possibly some architecture-specific instruction
5961  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5962  HUserRecord<HInstruction*> special_input_;
5963
5964  dex::StringIndex string_index_;
5965  const DexFile& dex_file_;
5966
5967  Handle<mirror::String> string_;
5968
5969  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5970};
5971std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5972
5973// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5974inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5975  // The special input is used for PC-relative loads on some architectures,
5976  // including literal pool loads, which are PC-relative too.
5977  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5978         GetLoadKind() == LoadKind::kBssEntry ||
5979         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5980         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5981  // HLoadString::GetInputRecords() returns an empty array at this point,
5982  // so use the GetInputRecords() from the base class to set the input record.
5983  DCHECK(special_input_.GetInstruction() == nullptr);
5984  special_input_ = HUserRecord<HInstruction*>(special_input);
5985  special_input->AddUseAt(this, 0);
5986}
5987
5988/**
5989 * Performs an initialization check on its Class object input.
5990 */
5991class HClinitCheck FINAL : public HExpression<1> {
5992 public:
5993  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5994      : HExpression(
5995            Primitive::kPrimNot,
5996            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
5997            dex_pc) {
5998    SetRawInputAt(0, constant);
5999  }
6000
6001  bool CanBeMoved() const OVERRIDE { return true; }
6002  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6003    return true;
6004  }
6005
6006  bool NeedsEnvironment() const OVERRIDE {
6007    // May call runtime to initialize the class.
6008    return true;
6009  }
6010
6011  bool CanThrow() const OVERRIDE { return true; }
6012
6013  HLoadClass* GetLoadClass() const {
6014    DCHECK(InputAt(0)->IsLoadClass());
6015    return InputAt(0)->AsLoadClass();
6016  }
6017
6018  DECLARE_INSTRUCTION(ClinitCheck);
6019
6020 private:
6021  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
6022};
6023
6024class HStaticFieldGet FINAL : public HExpression<1> {
6025 public:
6026  HStaticFieldGet(HInstruction* cls,
6027                  ArtField* field,
6028                  Primitive::Type field_type,
6029                  MemberOffset field_offset,
6030                  bool is_volatile,
6031                  uint32_t field_idx,
6032                  uint16_t declaring_class_def_index,
6033                  const DexFile& dex_file,
6034                  uint32_t dex_pc)
6035      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
6036        field_info_(field,
6037                    field_offset,
6038                    field_type,
6039                    is_volatile,
6040                    field_idx,
6041                    declaring_class_def_index,
6042                    dex_file) {
6043    SetRawInputAt(0, cls);
6044  }
6045
6046
6047  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6048
6049  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6050    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6051    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6052  }
6053
6054  size_t ComputeHashCode() const OVERRIDE {
6055    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6056  }
6057
6058  const FieldInfo& GetFieldInfo() const { return field_info_; }
6059  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6060  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6061  bool IsVolatile() const { return field_info_.IsVolatile(); }
6062
6063  DECLARE_INSTRUCTION(StaticFieldGet);
6064
6065 private:
6066  const FieldInfo field_info_;
6067
6068  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
6069};
6070
6071class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6072 public:
6073  HStaticFieldSet(HInstruction* cls,
6074                  HInstruction* value,
6075                  ArtField* field,
6076                  Primitive::Type field_type,
6077                  MemberOffset field_offset,
6078                  bool is_volatile,
6079                  uint32_t field_idx,
6080                  uint16_t declaring_class_def_index,
6081                  const DexFile& dex_file,
6082                  uint32_t dex_pc)
6083      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
6084        field_info_(field,
6085                    field_offset,
6086                    field_type,
6087                    is_volatile,
6088                    field_idx,
6089                    declaring_class_def_index,
6090                    dex_file) {
6091    SetPackedFlag<kFlagValueCanBeNull>(true);
6092    SetRawInputAt(0, cls);
6093    SetRawInputAt(1, value);
6094  }
6095
6096  const FieldInfo& GetFieldInfo() const { return field_info_; }
6097  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6098  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6099  bool IsVolatile() const { return field_info_.IsVolatile(); }
6100
6101  HInstruction* GetValue() const { return InputAt(1); }
6102  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6103  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6104
6105  DECLARE_INSTRUCTION(StaticFieldSet);
6106
6107 private:
6108  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6109  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6110  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6111                "Too many packed fields.");
6112
6113  const FieldInfo field_info_;
6114
6115  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
6116};
6117
6118class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6119 public:
6120  HUnresolvedInstanceFieldGet(HInstruction* obj,
6121                              Primitive::Type field_type,
6122                              uint32_t field_index,
6123                              uint32_t dex_pc)
6124      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6125        field_index_(field_index) {
6126    SetRawInputAt(0, obj);
6127  }
6128
6129  bool NeedsEnvironment() const OVERRIDE { return true; }
6130  bool CanThrow() const OVERRIDE { return true; }
6131
6132  Primitive::Type GetFieldType() const { return GetType(); }
6133  uint32_t GetFieldIndex() const { return field_index_; }
6134
6135  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6136
6137 private:
6138  const uint32_t field_index_;
6139
6140  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6141};
6142
6143class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6144 public:
6145  HUnresolvedInstanceFieldSet(HInstruction* obj,
6146                              HInstruction* value,
6147                              Primitive::Type field_type,
6148                              uint32_t field_index,
6149                              uint32_t dex_pc)
6150      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6151        field_index_(field_index) {
6152    SetPackedField<FieldTypeField>(field_type);
6153    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6154    SetRawInputAt(0, obj);
6155    SetRawInputAt(1, value);
6156  }
6157
6158  bool NeedsEnvironment() const OVERRIDE { return true; }
6159  bool CanThrow() const OVERRIDE { return true; }
6160
6161  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6162  uint32_t GetFieldIndex() const { return field_index_; }
6163
6164  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6165
6166 private:
6167  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6168  static constexpr size_t kFieldFieldTypeSize =
6169      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6170  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6171      kFieldFieldType + kFieldFieldTypeSize;
6172  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6173                "Too many packed fields.");
6174  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6175
6176  const uint32_t field_index_;
6177
6178  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6179};
6180
6181class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6182 public:
6183  HUnresolvedStaticFieldGet(Primitive::Type field_type,
6184                            uint32_t field_index,
6185                            uint32_t dex_pc)
6186      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6187        field_index_(field_index) {
6188  }
6189
6190  bool NeedsEnvironment() const OVERRIDE { return true; }
6191  bool CanThrow() const OVERRIDE { return true; }
6192
6193  Primitive::Type GetFieldType() const { return GetType(); }
6194  uint32_t GetFieldIndex() const { return field_index_; }
6195
6196  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6197
6198 private:
6199  const uint32_t field_index_;
6200
6201  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6202};
6203
6204class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6205 public:
6206  HUnresolvedStaticFieldSet(HInstruction* value,
6207                            Primitive::Type field_type,
6208                            uint32_t field_index,
6209                            uint32_t dex_pc)
6210      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6211        field_index_(field_index) {
6212    SetPackedField<FieldTypeField>(field_type);
6213    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6214    SetRawInputAt(0, value);
6215  }
6216
6217  bool NeedsEnvironment() const OVERRIDE { return true; }
6218  bool CanThrow() const OVERRIDE { return true; }
6219
6220  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6221  uint32_t GetFieldIndex() const { return field_index_; }
6222
6223  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6224
6225 private:
6226  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6227  static constexpr size_t kFieldFieldTypeSize =
6228      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6229  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6230      kFieldFieldType + kFieldFieldTypeSize;
6231  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6232                "Too many packed fields.");
6233  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6234
6235  const uint32_t field_index_;
6236
6237  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6238};
6239
6240// Implement the move-exception DEX instruction.
6241class HLoadException FINAL : public HExpression<0> {
6242 public:
6243  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6244      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
6245
6246  bool CanBeNull() const OVERRIDE { return false; }
6247
6248  DECLARE_INSTRUCTION(LoadException);
6249
6250 private:
6251  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6252};
6253
6254// Implicit part of move-exception which clears thread-local exception storage.
6255// Must not be removed because the runtime expects the TLS to get cleared.
6256class HClearException FINAL : public HTemplateInstruction<0> {
6257 public:
6258  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6259      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6260
6261  DECLARE_INSTRUCTION(ClearException);
6262
6263 private:
6264  DISALLOW_COPY_AND_ASSIGN(HClearException);
6265};
6266
6267class HThrow FINAL : public HTemplateInstruction<1> {
6268 public:
6269  HThrow(HInstruction* exception, uint32_t dex_pc)
6270      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6271    SetRawInputAt(0, exception);
6272  }
6273
6274  bool IsControlFlow() const OVERRIDE { return true; }
6275
6276  bool NeedsEnvironment() const OVERRIDE { return true; }
6277
6278  bool CanThrow() const OVERRIDE { return true; }
6279
6280
6281  DECLARE_INSTRUCTION(Throw);
6282
6283 private:
6284  DISALLOW_COPY_AND_ASSIGN(HThrow);
6285};
6286
6287/**
6288 * Implementation strategies for the code generator of a HInstanceOf
6289 * or `HCheckCast`.
6290 */
6291enum class TypeCheckKind {
6292  kUnresolvedCheck,       // Check against an unresolved type.
6293  kExactCheck,            // Can do a single class compare.
6294  kClassHierarchyCheck,   // Can just walk the super class chain.
6295  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6296  kInterfaceCheck,        // No optimization yet when checking against an interface.
6297  kArrayObjectCheck,      // Can just check if the array is not primitive.
6298  kArrayCheck,            // No optimization yet when checking against a generic array.
6299  kLast = kArrayCheck
6300};
6301
6302std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6303
6304class HInstanceOf FINAL : public HExpression<2> {
6305 public:
6306  HInstanceOf(HInstruction* object,
6307              HLoadClass* constant,
6308              TypeCheckKind check_kind,
6309              uint32_t dex_pc)
6310      : HExpression(Primitive::kPrimBoolean,
6311                    SideEffectsForArchRuntimeCalls(check_kind),
6312                    dex_pc) {
6313    SetPackedField<TypeCheckKindField>(check_kind);
6314    SetPackedFlag<kFlagMustDoNullCheck>(true);
6315    SetRawInputAt(0, object);
6316    SetRawInputAt(1, constant);
6317  }
6318
6319  bool CanBeMoved() const OVERRIDE { return true; }
6320
6321  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6322    return true;
6323  }
6324
6325  bool NeedsEnvironment() const OVERRIDE {
6326    return CanCallRuntime(GetTypeCheckKind());
6327  }
6328
6329  // Used only in code generation.
6330  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6331  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6332  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6333  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6334
6335  static bool CanCallRuntime(TypeCheckKind check_kind) {
6336    // Mips currently does runtime calls for any other checks.
6337    return check_kind != TypeCheckKind::kExactCheck;
6338  }
6339
6340  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6341    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6342  }
6343
6344  DECLARE_INSTRUCTION(InstanceOf);
6345
6346 private:
6347  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6348  static constexpr size_t kFieldTypeCheckKindSize =
6349      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6350  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6351  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6352  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6353  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6354
6355  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6356};
6357
6358class HBoundType FINAL : public HExpression<1> {
6359 public:
6360  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6361      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6362        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6363    SetPackedFlag<kFlagUpperCanBeNull>(true);
6364    SetPackedFlag<kFlagCanBeNull>(true);
6365    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6366    SetRawInputAt(0, input);
6367  }
6368
6369  // {Get,Set}Upper* should only be used in reference type propagation.
6370  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6371  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6372  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6373
6374  void SetCanBeNull(bool can_be_null) {
6375    DCHECK(GetUpperCanBeNull() || !can_be_null);
6376    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6377  }
6378
6379  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6380
6381  DECLARE_INSTRUCTION(BoundType);
6382
6383 private:
6384  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6385  // is false then CanBeNull() cannot be true).
6386  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6387  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6388  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6389  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6390
6391  // Encodes the most upper class that this instruction can have. In other words
6392  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6393  // It is used to bound the type in cases like:
6394  //   if (x instanceof ClassX) {
6395  //     // uper_bound_ will be ClassX
6396  //   }
6397  ReferenceTypeInfo upper_bound_;
6398
6399  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6400};
6401
6402class HCheckCast FINAL : public HTemplateInstruction<2> {
6403 public:
6404  HCheckCast(HInstruction* object,
6405             HLoadClass* constant,
6406             TypeCheckKind check_kind,
6407             uint32_t dex_pc)
6408      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6409    SetPackedField<TypeCheckKindField>(check_kind);
6410    SetPackedFlag<kFlagMustDoNullCheck>(true);
6411    SetRawInputAt(0, object);
6412    SetRawInputAt(1, constant);
6413  }
6414
6415  bool CanBeMoved() const OVERRIDE { return true; }
6416
6417  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6418    return true;
6419  }
6420
6421  bool NeedsEnvironment() const OVERRIDE {
6422    // Instruction may throw a CheckCastError.
6423    return true;
6424  }
6425
6426  bool CanThrow() const OVERRIDE { return true; }
6427
6428  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6429  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6430  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6431  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6432
6433  DECLARE_INSTRUCTION(CheckCast);
6434
6435 private:
6436  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6437  static constexpr size_t kFieldTypeCheckKindSize =
6438      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6439  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6440  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6441  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6442  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6443
6444  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6445};
6446
6447/**
6448 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6449 * @details We define the combined barrier types that are actually required
6450 * by the Java Memory Model, rather than using exactly the terminology from
6451 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6452 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6453 * store atomicity issues, and the recipes there are not always entirely sufficient.
6454 * The current recipe is as follows:
6455 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6456 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6457 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6458 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6459 *    class has final fields.
6460 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6461 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6462 */
6463enum MemBarrierKind {
6464  kAnyStore,
6465  kLoadAny,
6466  kStoreStore,
6467  kAnyAny,
6468  kNTStoreStore,
6469  kLastBarrierKind = kNTStoreStore
6470};
6471std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6472
6473class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6474 public:
6475  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6476      : HTemplateInstruction(
6477            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6478    SetPackedField<BarrierKindField>(barrier_kind);
6479  }
6480
6481  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6482
6483  DECLARE_INSTRUCTION(MemoryBarrier);
6484
6485 private:
6486  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6487  static constexpr size_t kFieldBarrierKindSize =
6488      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6489  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6490      kFieldBarrierKind + kFieldBarrierKindSize;
6491  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6492                "Too many packed fields.");
6493  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6494
6495  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6496};
6497
6498class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6499 public:
6500  enum class OperationKind {
6501    kEnter,
6502    kExit,
6503    kLast = kExit
6504  };
6505
6506  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6507    : HTemplateInstruction(
6508          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6509          dex_pc) {
6510    SetPackedField<OperationKindField>(kind);
6511    SetRawInputAt(0, object);
6512  }
6513
6514  // Instruction may go into runtime, so we need an environment.
6515  bool NeedsEnvironment() const OVERRIDE { return true; }
6516
6517  bool CanThrow() const OVERRIDE {
6518    // Verifier guarantees that monitor-exit cannot throw.
6519    // This is important because it allows the HGraphBuilder to remove
6520    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6521    return IsEnter();
6522  }
6523
6524  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6525  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6526
6527  DECLARE_INSTRUCTION(MonitorOperation);
6528
6529 private:
6530  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6531  static constexpr size_t kFieldOperationKindSize =
6532      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6533  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6534      kFieldOperationKind + kFieldOperationKindSize;
6535  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6536                "Too many packed fields.");
6537  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6538
6539 private:
6540  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6541};
6542
6543class HSelect FINAL : public HExpression<3> {
6544 public:
6545  HSelect(HInstruction* condition,
6546          HInstruction* true_value,
6547          HInstruction* false_value,
6548          uint32_t dex_pc)
6549      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6550    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6551
6552    // First input must be `true_value` or `false_value` to allow codegens to
6553    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6554    // that architectures which implement HSelect as a conditional move also
6555    // will not need to invert the condition.
6556    SetRawInputAt(0, false_value);
6557    SetRawInputAt(1, true_value);
6558    SetRawInputAt(2, condition);
6559  }
6560
6561  HInstruction* GetFalseValue() const { return InputAt(0); }
6562  HInstruction* GetTrueValue() const { return InputAt(1); }
6563  HInstruction* GetCondition() const { return InputAt(2); }
6564
6565  bool CanBeMoved() const OVERRIDE { return true; }
6566  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6567    return true;
6568  }
6569
6570  bool CanBeNull() const OVERRIDE {
6571    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6572  }
6573
6574  DECLARE_INSTRUCTION(Select);
6575
6576 private:
6577  DISALLOW_COPY_AND_ASSIGN(HSelect);
6578};
6579
6580class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6581 public:
6582  MoveOperands(Location source,
6583               Location destination,
6584               Primitive::Type type,
6585               HInstruction* instruction)
6586      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6587
6588  Location GetSource() const { return source_; }
6589  Location GetDestination() const { return destination_; }
6590
6591  void SetSource(Location value) { source_ = value; }
6592  void SetDestination(Location value) { destination_ = value; }
6593
6594  // The parallel move resolver marks moves as "in-progress" by clearing the
6595  // destination (but not the source).
6596  Location MarkPending() {
6597    DCHECK(!IsPending());
6598    Location dest = destination_;
6599    destination_ = Location::NoLocation();
6600    return dest;
6601  }
6602
6603  void ClearPending(Location dest) {
6604    DCHECK(IsPending());
6605    destination_ = dest;
6606  }
6607
6608  bool IsPending() const {
6609    DCHECK(source_.IsValid() || destination_.IsInvalid());
6610    return destination_.IsInvalid() && source_.IsValid();
6611  }
6612
6613  // True if this blocks a move from the given location.
6614  bool Blocks(Location loc) const {
6615    return !IsEliminated() && source_.OverlapsWith(loc);
6616  }
6617
6618  // A move is redundant if it's been eliminated, if its source and
6619  // destination are the same, or if its destination is unneeded.
6620  bool IsRedundant() const {
6621    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6622  }
6623
6624  // We clear both operands to indicate move that's been eliminated.
6625  void Eliminate() {
6626    source_ = destination_ = Location::NoLocation();
6627  }
6628
6629  bool IsEliminated() const {
6630    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6631    return source_.IsInvalid();
6632  }
6633
6634  Primitive::Type GetType() const { return type_; }
6635
6636  bool Is64BitMove() const {
6637    return Primitive::Is64BitType(type_);
6638  }
6639
6640  HInstruction* GetInstruction() const { return instruction_; }
6641
6642 private:
6643  Location source_;
6644  Location destination_;
6645  // The type this move is for.
6646  Primitive::Type type_;
6647  // The instruction this move is assocatied with. Null when this move is
6648  // for moving an input in the expected locations of user (including a phi user).
6649  // This is only used in debug mode, to ensure we do not connect interval siblings
6650  // in the same parallel move.
6651  HInstruction* instruction_;
6652};
6653
6654std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6655
6656static constexpr size_t kDefaultNumberOfMoves = 4;
6657
6658class HParallelMove FINAL : public HTemplateInstruction<0> {
6659 public:
6660  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6661      : HTemplateInstruction(SideEffects::None(), dex_pc),
6662        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6663    moves_.reserve(kDefaultNumberOfMoves);
6664  }
6665
6666  void AddMove(Location source,
6667               Location destination,
6668               Primitive::Type type,
6669               HInstruction* instruction) {
6670    DCHECK(source.IsValid());
6671    DCHECK(destination.IsValid());
6672    if (kIsDebugBuild) {
6673      if (instruction != nullptr) {
6674        for (const MoveOperands& move : moves_) {
6675          if (move.GetInstruction() == instruction) {
6676            // Special case the situation where the move is for the spill slot
6677            // of the instruction.
6678            if ((GetPrevious() == instruction)
6679                || ((GetPrevious() == nullptr)
6680                    && instruction->IsPhi()
6681                    && instruction->GetBlock() == GetBlock())) {
6682              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6683                  << "Doing parallel moves for the same instruction.";
6684            } else {
6685              DCHECK(false) << "Doing parallel moves for the same instruction.";
6686            }
6687          }
6688        }
6689      }
6690      for (const MoveOperands& move : moves_) {
6691        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6692            << "Overlapped destination for two moves in a parallel move: "
6693            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6694            << source << " ==> " << destination;
6695      }
6696    }
6697    moves_.emplace_back(source, destination, type, instruction);
6698  }
6699
6700  MoveOperands* MoveOperandsAt(size_t index) {
6701    return &moves_[index];
6702  }
6703
6704  size_t NumMoves() const { return moves_.size(); }
6705
6706  DECLARE_INSTRUCTION(ParallelMove);
6707
6708 private:
6709  ArenaVector<MoveOperands> moves_;
6710
6711  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6712};
6713
6714}  // namespace art
6715
6716#include "nodes_vector.h"
6717
6718#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6719#include "nodes_shared.h"
6720#endif
6721#ifdef ART_ENABLE_CODEGEN_arm
6722#include "nodes_arm.h"
6723#endif
6724#ifdef ART_ENABLE_CODEGEN_mips
6725#include "nodes_mips.h"
6726#endif
6727#ifdef ART_ENABLE_CODEGEN_x86
6728#include "nodes_x86.h"
6729#endif
6730
6731namespace art {
6732
6733class HGraphVisitor : public ValueObject {
6734 public:
6735  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6736  virtual ~HGraphVisitor() {}
6737
6738  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6739  virtual void VisitBasicBlock(HBasicBlock* block);
6740
6741  // Visit the graph following basic block insertion order.
6742  void VisitInsertionOrder();
6743
6744  // Visit the graph following dominator tree reverse post-order.
6745  void VisitReversePostOrder();
6746
6747  HGraph* GetGraph() const { return graph_; }
6748
6749  // Visit functions for instruction classes.
6750#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6751  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6752
6753  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6754
6755#undef DECLARE_VISIT_INSTRUCTION
6756
6757 private:
6758  HGraph* const graph_;
6759
6760  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6761};
6762
6763class HGraphDelegateVisitor : public HGraphVisitor {
6764 public:
6765  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6766  virtual ~HGraphDelegateVisitor() {}
6767
6768  // Visit functions that delegate to to super class.
6769#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6770  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6771
6772  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6773
6774#undef DECLARE_VISIT_INSTRUCTION
6775
6776 private:
6777  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6778};
6779
6780// Iterator over the blocks that art part of the loop. Includes blocks part
6781// of an inner loop. The order in which the blocks are iterated is on their
6782// block id.
6783class HBlocksInLoopIterator : public ValueObject {
6784 public:
6785  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6786      : blocks_in_loop_(info.GetBlocks()),
6787        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6788        index_(0) {
6789    if (!blocks_in_loop_.IsBitSet(index_)) {
6790      Advance();
6791    }
6792  }
6793
6794  bool Done() const { return index_ == blocks_.size(); }
6795  HBasicBlock* Current() const { return blocks_[index_]; }
6796  void Advance() {
6797    ++index_;
6798    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6799      if (blocks_in_loop_.IsBitSet(index_)) {
6800        break;
6801      }
6802    }
6803  }
6804
6805 private:
6806  const BitVector& blocks_in_loop_;
6807  const ArenaVector<HBasicBlock*>& blocks_;
6808  size_t index_;
6809
6810  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6811};
6812
6813// Iterator over the blocks that art part of the loop. Includes blocks part
6814// of an inner loop. The order in which the blocks are iterated is reverse
6815// post order.
6816class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6817 public:
6818  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6819      : blocks_in_loop_(info.GetBlocks()),
6820        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6821        index_(0) {
6822    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6823      Advance();
6824    }
6825  }
6826
6827  bool Done() const { return index_ == blocks_.size(); }
6828  HBasicBlock* Current() const { return blocks_[index_]; }
6829  void Advance() {
6830    ++index_;
6831    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6832      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6833        break;
6834      }
6835    }
6836  }
6837
6838 private:
6839  const BitVector& blocks_in_loop_;
6840  const ArenaVector<HBasicBlock*>& blocks_;
6841  size_t index_;
6842
6843  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6844};
6845
6846// Returns int64_t value of a properly typed constant.
6847inline int64_t Int64FromConstant(HConstant* constant) {
6848  if (constant->IsIntConstant()) {
6849    return constant->AsIntConstant()->GetValue();
6850  } else if (constant->IsLongConstant()) {
6851    return constant->AsLongConstant()->GetValue();
6852  } else {
6853    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6854    return 0;
6855  }
6856}
6857
6858// Returns true iff instruction is an integral constant (and sets value on success).
6859inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
6860  if (instruction->IsIntConstant()) {
6861    *value = instruction->AsIntConstant()->GetValue();
6862    return true;
6863  } else if (instruction->IsLongConstant()) {
6864    *value = instruction->AsLongConstant()->GetValue();
6865    return true;
6866  } else if (instruction->IsNullConstant()) {
6867    *value = 0;
6868    return true;
6869  }
6870  return false;
6871}
6872
6873#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6874  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6875  inline const H##type* HInstruction::As##type() const {                       \
6876    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6877  }                                                                            \
6878  inline H##type* HInstruction::As##type() {                                   \
6879    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6880  }
6881
6882  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6883#undef INSTRUCTION_TYPE_CHECK
6884
6885// Create space in `blocks` for adding `number_of_new_blocks` entries
6886// starting at location `at`. Blocks after `at` are moved accordingly.
6887inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6888                        size_t number_of_new_blocks,
6889                        size_t after) {
6890  DCHECK_LT(after, blocks->size());
6891  size_t old_size = blocks->size();
6892  size_t new_size = old_size + number_of_new_blocks;
6893  blocks->resize(new_size);
6894  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6895}
6896
6897/*
6898 * Hunt "under the hood" of array lengths (leading to array references),
6899 * null checks (also leading to array references), and new arrays
6900 * (leading to the actual length). This makes it more likely related
6901 * instructions become actually comparable.
6902 */
6903inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
6904  while (instruction->IsArrayLength() ||
6905         instruction->IsNullCheck() ||
6906         instruction->IsNewArray()) {
6907    instruction = instruction->IsNewArray()
6908        ? instruction->AsNewArray()->GetLength()
6909        : instruction->InputAt(0);
6910  }
6911  return instruction;
6912}
6913
6914}  // namespace art
6915
6916#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6917