nodes.h revision 22aa54bf8469689c7c6c33f15ff4df2ffba8fa15
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "dex_file.h"
32#include "dex_file_types.h"
33#include "entrypoints/quick/quick_entrypoints_enum.h"
34#include "handle.h"
35#include "handle_scope.h"
36#include "invoke_type.h"
37#include "intrinsics_enum.h"
38#include "locations.h"
39#include "method_reference.h"
40#include "mirror/class.h"
41#include "offsets.h"
42#include "primitive.h"
43#include "utils/intrusive_forward_list.h"
44
45namespace art {
46
47class GraphChecker;
48class HBasicBlock;
49class HCurrentMethod;
50class HDoubleConstant;
51class HEnvironment;
52class HFloatConstant;
53class HGraphBuilder;
54class HGraphVisitor;
55class HInstruction;
56class HIntConstant;
57class HInvoke;
58class HLongConstant;
59class HNullConstant;
60class HPhi;
61class HSuspendCheck;
62class HTryBoundary;
63class LiveInterval;
64class LocationSummary;
65class SlowPathCode;
66class SsaBuilder;
67
68namespace mirror {
69class DexCache;
70}  // namespace mirror
71
72static const int kDefaultNumberOfBlocks = 8;
73static const int kDefaultNumberOfSuccessors = 2;
74static const int kDefaultNumberOfPredecessors = 2;
75static const int kDefaultNumberOfExceptionalPredecessors = 0;
76static const int kDefaultNumberOfDominatedBlocks = 1;
77static const int kDefaultNumberOfBackEdges = 1;
78
79// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
80static constexpr int32_t kMaxIntShiftDistance = 0x1f;
81// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
82static constexpr int32_t kMaxLongShiftDistance = 0x3f;
83
84static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
85static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
86
87static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
88
89static constexpr uint32_t kNoDexPc = -1;
90
91inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
92  // For the purposes of the compiler, the dex files must actually be the same object
93  // if we want to safely treat them as the same. This is especially important for JIT
94  // as custom class loaders can open the same underlying file (or memory) multiple
95  // times and provide different class resolution but no two class loaders should ever
96  // use the same DexFile object - doing so is an unsupported hack that can lead to
97  // all sorts of weird failures.
98  return &lhs == &rhs;
99}
100
101enum IfCondition {
102  // All types.
103  kCondEQ,  // ==
104  kCondNE,  // !=
105  // Signed integers and floating-point numbers.
106  kCondLT,  // <
107  kCondLE,  // <=
108  kCondGT,  // >
109  kCondGE,  // >=
110  // Unsigned integers.
111  kCondB,   // <
112  kCondBE,  // <=
113  kCondA,   // >
114  kCondAE,  // >=
115  // First and last aliases.
116  kCondFirst = kCondEQ,
117  kCondLast = kCondAE,
118};
119
120enum GraphAnalysisResult {
121  kAnalysisSkipped,
122  kAnalysisInvalidBytecode,
123  kAnalysisFailThrowCatchLoop,
124  kAnalysisFailAmbiguousArrayOp,
125  kAnalysisSuccess,
126};
127
128template <typename T>
129static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
130  return static_cast<typename std::make_unsigned<T>::type>(x);
131}
132
133class HInstructionList : public ValueObject {
134 public:
135  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
136
137  void AddInstruction(HInstruction* instruction);
138  void RemoveInstruction(HInstruction* instruction);
139
140  // Insert `instruction` before/after an existing instruction `cursor`.
141  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
142  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
143
144  // Return true if this list contains `instruction`.
145  bool Contains(HInstruction* instruction) const;
146
147  // Return true if `instruction1` is found before `instruction2` in
148  // this instruction list and false otherwise.  Abort if none
149  // of these instructions is found.
150  bool FoundBefore(const HInstruction* instruction1,
151                   const HInstruction* instruction2) const;
152
153  bool IsEmpty() const { return first_instruction_ == nullptr; }
154  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
155
156  // Update the block of all instructions to be `block`.
157  void SetBlockOfInstructions(HBasicBlock* block) const;
158
159  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
160  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
161  void Add(const HInstructionList& instruction_list);
162
163  // Return the number of instructions in the list. This is an expensive operation.
164  size_t CountSize() const;
165
166 private:
167  HInstruction* first_instruction_;
168  HInstruction* last_instruction_;
169
170  friend class HBasicBlock;
171  friend class HGraph;
172  friend class HInstruction;
173  friend class HInstructionIterator;
174  friend class HInstructionIteratorHandleChanges;
175  friend class HBackwardInstructionIterator;
176
177  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
178};
179
180class ReferenceTypeInfo : ValueObject {
181 public:
182  typedef Handle<mirror::Class> TypeHandle;
183
184  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
185
186  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
187    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
188  }
189
190  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
191    return ReferenceTypeInfo(type_handle, is_exact);
192  }
193
194  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
195
196  static bool IsValidHandle(TypeHandle handle) {
197    return handle.GetReference() != nullptr;
198  }
199
200  bool IsValid() const {
201    return IsValidHandle(type_handle_);
202  }
203
204  bool IsExact() const { return is_exact_; }
205
206  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
207    DCHECK(IsValid());
208    return GetTypeHandle()->IsObjectClass();
209  }
210
211  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
212    DCHECK(IsValid());
213    return GetTypeHandle()->IsStringClass();
214  }
215
216  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
217    DCHECK(IsValid());
218    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
219  }
220
221  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
222    DCHECK(IsValid());
223    return GetTypeHandle()->IsInterface();
224  }
225
226  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
227    DCHECK(IsValid());
228    return GetTypeHandle()->IsArrayClass();
229  }
230
231  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
232    DCHECK(IsValid());
233    return GetTypeHandle()->IsPrimitiveArray();
234  }
235
236  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
237    DCHECK(IsValid());
238    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
239  }
240
241  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
242    DCHECK(IsValid());
243    if (!IsExact()) return false;
244    if (!IsArrayClass()) return false;
245    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
246  }
247
248  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
249    DCHECK(IsValid());
250    if (!IsExact()) return false;
251    if (!IsArrayClass()) return false;
252    if (!rti.IsArrayClass()) return false;
253    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
254        rti.GetTypeHandle()->GetComponentType());
255  }
256
257  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
258
259  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
260    DCHECK(IsValid());
261    DCHECK(rti.IsValid());
262    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
263  }
264
265  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
266    DCHECK(IsValid());
267    DCHECK(rti.IsValid());
268    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
269        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
270  }
271
272  // Returns true if the type information provide the same amount of details.
273  // Note that it does not mean that the instructions have the same actual type
274  // (because the type can be the result of a merge).
275  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
276    if (!IsValid() && !rti.IsValid()) {
277      // Invalid types are equal.
278      return true;
279    }
280    if (!IsValid() || !rti.IsValid()) {
281      // One is valid, the other not.
282      return false;
283    }
284    return IsExact() == rti.IsExact()
285        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
286  }
287
288 private:
289  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
290  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
291      : type_handle_(type_handle), is_exact_(is_exact) { }
292
293  // The class of the object.
294  TypeHandle type_handle_;
295  // Whether or not the type is exact or a superclass of the actual type.
296  // Whether or not we have any information about this type.
297  bool is_exact_;
298};
299
300std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
301
302// Control-flow graph of a method. Contains a list of basic blocks.
303class HGraph : public ArenaObject<kArenaAllocGraph> {
304 public:
305  HGraph(ArenaAllocator* arena,
306         const DexFile& dex_file,
307         uint32_t method_idx,
308         bool should_generate_constructor_barrier,
309         InstructionSet instruction_set,
310         InvokeType invoke_type = kInvalidInvokeType,
311         bool debuggable = false,
312         bool osr = false,
313         int start_instruction_id = 0)
314      : arena_(arena),
315        blocks_(arena->Adapter(kArenaAllocBlockList)),
316        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
317        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
318        entry_block_(nullptr),
319        exit_block_(nullptr),
320        maximum_number_of_out_vregs_(0),
321        number_of_vregs_(0),
322        number_of_in_vregs_(0),
323        temporaries_vreg_slots_(0),
324        has_bounds_checks_(false),
325        has_try_catch_(false),
326        has_irreducible_loops_(false),
327        debuggable_(debuggable),
328        current_instruction_id_(start_instruction_id),
329        dex_file_(dex_file),
330        method_idx_(method_idx),
331        invoke_type_(invoke_type),
332        in_ssa_form_(false),
333        should_generate_constructor_barrier_(should_generate_constructor_barrier),
334        number_of_cha_guards_(0),
335        instruction_set_(instruction_set),
336        cached_null_constant_(nullptr),
337        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
338        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
339        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
340        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
341        cached_current_method_(nullptr),
342        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
343        osr_(osr),
344        cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) {
345    blocks_.reserve(kDefaultNumberOfBlocks);
346  }
347
348  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
349  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
350
351  ArenaAllocator* GetArena() const { return arena_; }
352  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
353
354  bool IsInSsaForm() const { return in_ssa_form_; }
355  void SetInSsaForm() { in_ssa_form_ = true; }
356
357  HBasicBlock* GetEntryBlock() const { return entry_block_; }
358  HBasicBlock* GetExitBlock() const { return exit_block_; }
359  bool HasExitBlock() const { return exit_block_ != nullptr; }
360
361  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
362  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
363
364  void AddBlock(HBasicBlock* block);
365
366  void ComputeDominanceInformation();
367  void ClearDominanceInformation();
368  void ClearLoopInformation();
369  void FindBackEdges(ArenaBitVector* visited);
370  GraphAnalysisResult BuildDominatorTree();
371  void SimplifyCFG();
372  void SimplifyCatchBlocks();
373
374  // Analyze all natural loops in this graph. Returns a code specifying that it
375  // was successful or the reason for failure. The method will fail if a loop
376  // is a throw-catch loop, i.e. the header is a catch block.
377  GraphAnalysisResult AnalyzeLoops() const;
378
379  // Iterate over blocks to compute try block membership. Needs reverse post
380  // order and loop information.
381  void ComputeTryBlockInformation();
382
383  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
384  // Returns the instruction to replace the invoke expression or null if the
385  // invoke is for a void method. Note that the caller is responsible for replacing
386  // and removing the invoke instruction.
387  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
388
389  // Update the loop and try membership of `block`, which was spawned from `reference`.
390  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
391  // should be the new back edge.
392  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
393                                             HBasicBlock* reference,
394                                             bool replace_if_back_edge);
395
396  // Need to add a couple of blocks to test if the loop body is entered and
397  // put deoptimization instructions, etc.
398  void TransformLoopHeaderForBCE(HBasicBlock* header);
399
400  // Removes `block` from the graph. Assumes `block` has been disconnected from
401  // other blocks and has no instructions or phis.
402  void DeleteDeadEmptyBlock(HBasicBlock* block);
403
404  // Splits the edge between `block` and `successor` while preserving the
405  // indices in the predecessor/successor lists. If there are multiple edges
406  // between the blocks, the lowest indices are used.
407  // Returns the new block which is empty and has the same dex pc as `successor`.
408  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
409
410  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
411  void SimplifyLoop(HBasicBlock* header);
412
413  int32_t GetNextInstructionId() {
414    DCHECK_NE(current_instruction_id_, INT32_MAX);
415    return current_instruction_id_++;
416  }
417
418  int32_t GetCurrentInstructionId() const {
419    return current_instruction_id_;
420  }
421
422  void SetCurrentInstructionId(int32_t id) {
423    DCHECK_GE(id, current_instruction_id_);
424    current_instruction_id_ = id;
425  }
426
427  uint16_t GetMaximumNumberOfOutVRegs() const {
428    return maximum_number_of_out_vregs_;
429  }
430
431  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
432    maximum_number_of_out_vregs_ = new_value;
433  }
434
435  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
436    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
437  }
438
439  void UpdateTemporariesVRegSlots(size_t slots) {
440    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
441  }
442
443  size_t GetTemporariesVRegSlots() const {
444    DCHECK(!in_ssa_form_);
445    return temporaries_vreg_slots_;
446  }
447
448  void SetNumberOfVRegs(uint16_t number_of_vregs) {
449    number_of_vregs_ = number_of_vregs;
450  }
451
452  uint16_t GetNumberOfVRegs() const {
453    return number_of_vregs_;
454  }
455
456  void SetNumberOfInVRegs(uint16_t value) {
457    number_of_in_vregs_ = value;
458  }
459
460  uint16_t GetNumberOfInVRegs() const {
461    return number_of_in_vregs_;
462  }
463
464  uint16_t GetNumberOfLocalVRegs() const {
465    DCHECK(!in_ssa_form_);
466    return number_of_vregs_ - number_of_in_vregs_;
467  }
468
469  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
470    return reverse_post_order_;
471  }
472
473  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
474    DCHECK(GetReversePostOrder()[0] == entry_block_);
475    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
476  }
477
478  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
479    return ReverseRange(GetReversePostOrder());
480  }
481
482  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
483    return linear_order_;
484  }
485
486  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
487    return ReverseRange(GetLinearOrder());
488  }
489
490  bool HasBoundsChecks() const {
491    return has_bounds_checks_;
492  }
493
494  void SetHasBoundsChecks(bool value) {
495    has_bounds_checks_ = value;
496  }
497
498  bool ShouldGenerateConstructorBarrier() const {
499    return should_generate_constructor_barrier_;
500  }
501
502  bool IsDebuggable() const { return debuggable_; }
503
504  // Returns a constant of the given type and value. If it does not exist
505  // already, it is created and inserted into the graph. This method is only for
506  // integral types.
507  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
508
509  // TODO: This is problematic for the consistency of reference type propagation
510  // because it can be created anytime after the pass and thus it will be left
511  // with an invalid type.
512  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
513
514  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
515    return CreateConstant(value, &cached_int_constants_, dex_pc);
516  }
517  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
518    return CreateConstant(value, &cached_long_constants_, dex_pc);
519  }
520  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
521    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
522  }
523  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
524    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
525  }
526
527  HCurrentMethod* GetCurrentMethod();
528
529  const DexFile& GetDexFile() const {
530    return dex_file_;
531  }
532
533  uint32_t GetMethodIdx() const {
534    return method_idx_;
535  }
536
537  InvokeType GetInvokeType() const {
538    return invoke_type_;
539  }
540
541  InstructionSet GetInstructionSet() const {
542    return instruction_set_;
543  }
544
545  bool IsCompilingOsr() const { return osr_; }
546
547  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
548    return cha_single_implementation_list_;
549  }
550
551  void AddCHASingleImplementationDependency(ArtMethod* method) {
552    cha_single_implementation_list_.insert(method);
553  }
554
555  bool HasShouldDeoptimizeFlag() const {
556    return number_of_cha_guards_ != 0;
557  }
558
559  bool HasTryCatch() const { return has_try_catch_; }
560  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
561
562  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
563  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
564
565  ArtMethod* GetArtMethod() const { return art_method_; }
566  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
567
568  // Returns an instruction with the opposite Boolean value from 'cond'.
569  // The instruction has been inserted into the graph, either as a constant, or
570  // before cursor.
571  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
572
573  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
574
575  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
576  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
577  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
578
579 private:
580  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
581  void RemoveDeadBlocks(const ArenaBitVector& visited);
582
583  template <class InstructionType, typename ValueType>
584  InstructionType* CreateConstant(ValueType value,
585                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
586                                  uint32_t dex_pc = kNoDexPc) {
587    // Try to find an existing constant of the given value.
588    InstructionType* constant = nullptr;
589    auto cached_constant = cache->find(value);
590    if (cached_constant != cache->end()) {
591      constant = cached_constant->second;
592    }
593
594    // If not found or previously deleted, create and cache a new instruction.
595    // Don't bother reviving a previously deleted instruction, for simplicity.
596    if (constant == nullptr || constant->GetBlock() == nullptr) {
597      constant = new (arena_) InstructionType(value, dex_pc);
598      cache->Overwrite(value, constant);
599      InsertConstant(constant);
600    }
601    return constant;
602  }
603
604  void InsertConstant(HConstant* instruction);
605
606  // Cache a float constant into the graph. This method should only be
607  // called by the SsaBuilder when creating "equivalent" instructions.
608  void CacheFloatConstant(HFloatConstant* constant);
609
610  // See CacheFloatConstant comment.
611  void CacheDoubleConstant(HDoubleConstant* constant);
612
613  ArenaAllocator* const arena_;
614
615  // List of blocks in insertion order.
616  ArenaVector<HBasicBlock*> blocks_;
617
618  // List of blocks to perform a reverse post order tree traversal.
619  ArenaVector<HBasicBlock*> reverse_post_order_;
620
621  // List of blocks to perform a linear order tree traversal. Unlike the reverse
622  // post order, this order is not incrementally kept up-to-date.
623  ArenaVector<HBasicBlock*> linear_order_;
624
625  HBasicBlock* entry_block_;
626  HBasicBlock* exit_block_;
627
628  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
629  uint16_t maximum_number_of_out_vregs_;
630
631  // The number of virtual registers in this method. Contains the parameters.
632  uint16_t number_of_vregs_;
633
634  // The number of virtual registers used by parameters of this method.
635  uint16_t number_of_in_vregs_;
636
637  // Number of vreg size slots that the temporaries use (used in baseline compiler).
638  size_t temporaries_vreg_slots_;
639
640  // Has bounds checks. We can totally skip BCE if it's false.
641  bool has_bounds_checks_;
642
643  // Flag whether there are any try/catch blocks in the graph. We will skip
644  // try/catch-related passes if false.
645  bool has_try_catch_;
646
647  // Flag whether there are any irreducible loops in the graph.
648  bool has_irreducible_loops_;
649
650  // Indicates whether the graph should be compiled in a way that
651  // ensures full debuggability. If false, we can apply more
652  // aggressive optimizations that may limit the level of debugging.
653  const bool debuggable_;
654
655  // The current id to assign to a newly added instruction. See HInstruction.id_.
656  int32_t current_instruction_id_;
657
658  // The dex file from which the method is from.
659  const DexFile& dex_file_;
660
661  // The method index in the dex file.
662  const uint32_t method_idx_;
663
664  // If inlined, this encodes how the callee is being invoked.
665  const InvokeType invoke_type_;
666
667  // Whether the graph has been transformed to SSA form. Only used
668  // in debug mode to ensure we are not using properties only valid
669  // for non-SSA form (like the number of temporaries).
670  bool in_ssa_form_;
671
672  const bool should_generate_constructor_barrier_;
673
674  // Number of CHA guards in the graph. Used to short-circuit the
675  // CHA guard optimization pass when there is no CHA guard left.
676  uint32_t number_of_cha_guards_;
677
678  const InstructionSet instruction_set_;
679
680  // Cached constants.
681  HNullConstant* cached_null_constant_;
682  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
683  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
684  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
685  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
686
687  HCurrentMethod* cached_current_method_;
688
689  // The ArtMethod this graph is for. Note that for AOT, it may be null,
690  // for example for methods whose declaring class could not be resolved
691  // (such as when the superclass could not be found).
692  ArtMethod* art_method_;
693
694  // Keep the RTI of inexact Object to avoid having to pass stack handle
695  // collection pointer to passes which may create NullConstant.
696  ReferenceTypeInfo inexact_object_rti_;
697
698  // Whether we are compiling this graph for on stack replacement: this will
699  // make all loops seen as irreducible and emit special stack maps to mark
700  // compiled code entries which the interpreter can directly jump to.
701  const bool osr_;
702
703  // List of methods that are assumed to have single implementation.
704  ArenaSet<ArtMethod*> cha_single_implementation_list_;
705
706  friend class SsaBuilder;           // For caching constants.
707  friend class SsaLivenessAnalysis;  // For the linear order.
708  friend class HInliner;             // For the reverse post order.
709  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
710  DISALLOW_COPY_AND_ASSIGN(HGraph);
711};
712
713class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
714 public:
715  HLoopInformation(HBasicBlock* header, HGraph* graph)
716      : header_(header),
717        suspend_check_(nullptr),
718        irreducible_(false),
719        contains_irreducible_loop_(false),
720        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
721        // Make bit vector growable, as the number of blocks may change.
722        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
723    back_edges_.reserve(kDefaultNumberOfBackEdges);
724  }
725
726  bool IsIrreducible() const { return irreducible_; }
727  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
728
729  void Dump(std::ostream& os);
730
731  HBasicBlock* GetHeader() const {
732    return header_;
733  }
734
735  void SetHeader(HBasicBlock* block) {
736    header_ = block;
737  }
738
739  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
740  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
741  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
742
743  void AddBackEdge(HBasicBlock* back_edge) {
744    back_edges_.push_back(back_edge);
745  }
746
747  void RemoveBackEdge(HBasicBlock* back_edge) {
748    RemoveElement(back_edges_, back_edge);
749  }
750
751  bool IsBackEdge(const HBasicBlock& block) const {
752    return ContainsElement(back_edges_, &block);
753  }
754
755  size_t NumberOfBackEdges() const {
756    return back_edges_.size();
757  }
758
759  HBasicBlock* GetPreHeader() const;
760
761  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
762    return back_edges_;
763  }
764
765  // Returns the lifetime position of the back edge that has the
766  // greatest lifetime position.
767  size_t GetLifetimeEnd() const;
768
769  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
770    ReplaceElement(back_edges_, existing, new_back_edge);
771  }
772
773  // Finds blocks that are part of this loop.
774  void Populate();
775
776  // Returns whether this loop information contains `block`.
777  // Note that this loop information *must* be populated before entering this function.
778  bool Contains(const HBasicBlock& block) const;
779
780  // Returns whether this loop information is an inner loop of `other`.
781  // Note that `other` *must* be populated before entering this function.
782  bool IsIn(const HLoopInformation& other) const;
783
784  // Returns true if instruction is not defined within this loop.
785  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
786
787  const ArenaBitVector& GetBlocks() const { return blocks_; }
788
789  void Add(HBasicBlock* block);
790  void Remove(HBasicBlock* block);
791
792  void ClearAllBlocks() {
793    blocks_.ClearAllBits();
794  }
795
796  bool HasBackEdgeNotDominatedByHeader() const;
797
798  bool IsPopulated() const {
799    return blocks_.GetHighestBitSet() != -1;
800  }
801
802  bool DominatesAllBackEdges(HBasicBlock* block);
803
804  bool HasExitEdge() const;
805
806 private:
807  // Internal recursive implementation of `Populate`.
808  void PopulateRecursive(HBasicBlock* block);
809  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
810
811  HBasicBlock* header_;
812  HSuspendCheck* suspend_check_;
813  bool irreducible_;
814  bool contains_irreducible_loop_;
815  ArenaVector<HBasicBlock*> back_edges_;
816  ArenaBitVector blocks_;
817
818  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
819};
820
821// Stores try/catch information for basic blocks.
822// Note that HGraph is constructed so that catch blocks cannot simultaneously
823// be try blocks.
824class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
825 public:
826  // Try block information constructor.
827  explicit TryCatchInformation(const HTryBoundary& try_entry)
828      : try_entry_(&try_entry),
829        catch_dex_file_(nullptr),
830        catch_type_index_(DexFile::kDexNoIndex16) {
831    DCHECK(try_entry_ != nullptr);
832  }
833
834  // Catch block information constructor.
835  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
836      : try_entry_(nullptr),
837        catch_dex_file_(&dex_file),
838        catch_type_index_(catch_type_index) {}
839
840  bool IsTryBlock() const { return try_entry_ != nullptr; }
841
842  const HTryBoundary& GetTryEntry() const {
843    DCHECK(IsTryBlock());
844    return *try_entry_;
845  }
846
847  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
848
849  bool IsCatchAllTypeIndex() const {
850    DCHECK(IsCatchBlock());
851    return !catch_type_index_.IsValid();
852  }
853
854  dex::TypeIndex GetCatchTypeIndex() const {
855    DCHECK(IsCatchBlock());
856    return catch_type_index_;
857  }
858
859  const DexFile& GetCatchDexFile() const {
860    DCHECK(IsCatchBlock());
861    return *catch_dex_file_;
862  }
863
864 private:
865  // One of possibly several TryBoundary instructions entering the block's try.
866  // Only set for try blocks.
867  const HTryBoundary* try_entry_;
868
869  // Exception type information. Only set for catch blocks.
870  const DexFile* catch_dex_file_;
871  const dex::TypeIndex catch_type_index_;
872};
873
874static constexpr size_t kNoLifetime = -1;
875static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
876
877// A block in a method. Contains the list of instructions represented
878// as a double linked list. Each block knows its predecessors and
879// successors.
880
881class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
882 public:
883  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
884      : graph_(graph),
885        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
886        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
887        loop_information_(nullptr),
888        dominator_(nullptr),
889        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
890        block_id_(kInvalidBlockId),
891        dex_pc_(dex_pc),
892        lifetime_start_(kNoLifetime),
893        lifetime_end_(kNoLifetime),
894        try_catch_information_(nullptr) {
895    predecessors_.reserve(kDefaultNumberOfPredecessors);
896    successors_.reserve(kDefaultNumberOfSuccessors);
897    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
898  }
899
900  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
901    return predecessors_;
902  }
903
904  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
905    return successors_;
906  }
907
908  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
909  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
910
911  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
912    return ContainsElement(successors_, block, start_from);
913  }
914
915  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
916    return dominated_blocks_;
917  }
918
919  bool IsEntryBlock() const {
920    return graph_->GetEntryBlock() == this;
921  }
922
923  bool IsExitBlock() const {
924    return graph_->GetExitBlock() == this;
925  }
926
927  bool IsSingleGoto() const;
928  bool IsSingleTryBoundary() const;
929
930  // Returns true if this block emits nothing but a jump.
931  bool IsSingleJump() const {
932    HLoopInformation* loop_info = GetLoopInformation();
933    return (IsSingleGoto() || IsSingleTryBoundary())
934           // Back edges generate a suspend check.
935           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
936  }
937
938  void AddBackEdge(HBasicBlock* back_edge) {
939    if (loop_information_ == nullptr) {
940      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
941    }
942    DCHECK_EQ(loop_information_->GetHeader(), this);
943    loop_information_->AddBackEdge(back_edge);
944  }
945
946  HGraph* GetGraph() const { return graph_; }
947  void SetGraph(HGraph* graph) { graph_ = graph; }
948
949  uint32_t GetBlockId() const { return block_id_; }
950  void SetBlockId(int id) { block_id_ = id; }
951  uint32_t GetDexPc() const { return dex_pc_; }
952
953  HBasicBlock* GetDominator() const { return dominator_; }
954  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
955  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
956
957  void RemoveDominatedBlock(HBasicBlock* block) {
958    RemoveElement(dominated_blocks_, block);
959  }
960
961  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
962    ReplaceElement(dominated_blocks_, existing, new_block);
963  }
964
965  void ClearDominanceInformation();
966
967  int NumberOfBackEdges() const {
968    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
969  }
970
971  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
972  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
973  const HInstructionList& GetInstructions() const { return instructions_; }
974  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
975  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
976  const HInstructionList& GetPhis() const { return phis_; }
977
978  HInstruction* GetFirstInstructionDisregardMoves() const;
979
980  void AddSuccessor(HBasicBlock* block) {
981    successors_.push_back(block);
982    block->predecessors_.push_back(this);
983  }
984
985  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
986    size_t successor_index = GetSuccessorIndexOf(existing);
987    existing->RemovePredecessor(this);
988    new_block->predecessors_.push_back(this);
989    successors_[successor_index] = new_block;
990  }
991
992  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
993    size_t predecessor_index = GetPredecessorIndexOf(existing);
994    existing->RemoveSuccessor(this);
995    new_block->successors_.push_back(this);
996    predecessors_[predecessor_index] = new_block;
997  }
998
999  // Insert `this` between `predecessor` and `successor. This method
1000  // preserves the indicies, and will update the first edge found between
1001  // `predecessor` and `successor`.
1002  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1003    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1004    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1005    successor->predecessors_[predecessor_index] = this;
1006    predecessor->successors_[successor_index] = this;
1007    successors_.push_back(successor);
1008    predecessors_.push_back(predecessor);
1009  }
1010
1011  void RemovePredecessor(HBasicBlock* block) {
1012    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1013  }
1014
1015  void RemoveSuccessor(HBasicBlock* block) {
1016    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1017  }
1018
1019  void ClearAllPredecessors() {
1020    predecessors_.clear();
1021  }
1022
1023  void AddPredecessor(HBasicBlock* block) {
1024    predecessors_.push_back(block);
1025    block->successors_.push_back(this);
1026  }
1027
1028  void SwapPredecessors() {
1029    DCHECK_EQ(predecessors_.size(), 2u);
1030    std::swap(predecessors_[0], predecessors_[1]);
1031  }
1032
1033  void SwapSuccessors() {
1034    DCHECK_EQ(successors_.size(), 2u);
1035    std::swap(successors_[0], successors_[1]);
1036  }
1037
1038  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1039    return IndexOfElement(predecessors_, predecessor);
1040  }
1041
1042  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1043    return IndexOfElement(successors_, successor);
1044  }
1045
1046  HBasicBlock* GetSinglePredecessor() const {
1047    DCHECK_EQ(GetPredecessors().size(), 1u);
1048    return GetPredecessors()[0];
1049  }
1050
1051  HBasicBlock* GetSingleSuccessor() const {
1052    DCHECK_EQ(GetSuccessors().size(), 1u);
1053    return GetSuccessors()[0];
1054  }
1055
1056  // Returns whether the first occurrence of `predecessor` in the list of
1057  // predecessors is at index `idx`.
1058  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1059    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1060    return GetPredecessorIndexOf(predecessor) == idx;
1061  }
1062
1063  // Create a new block between this block and its predecessors. The new block
1064  // is added to the graph, all predecessor edges are relinked to it and an edge
1065  // is created to `this`. Returns the new empty block. Reverse post order or
1066  // loop and try/catch information are not updated.
1067  HBasicBlock* CreateImmediateDominator();
1068
1069  // Split the block into two blocks just before `cursor`. Returns the newly
1070  // created, latter block. Note that this method will add the block to the
1071  // graph, create a Goto at the end of the former block and will create an edge
1072  // between the blocks. It will not, however, update the reverse post order or
1073  // loop and try/catch information.
1074  HBasicBlock* SplitBefore(HInstruction* cursor);
1075
1076  // Split the block into two blocks just before `cursor`. Returns the newly
1077  // created block. Note that this method just updates raw block information,
1078  // like predecessors, successors, dominators, and instruction list. It does not
1079  // update the graph, reverse post order, loop information, nor make sure the
1080  // blocks are consistent (for example ending with a control flow instruction).
1081  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1082
1083  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1084  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1085
1086  // Merge `other` at the end of `this`. Successors and dominated blocks of
1087  // `other` are changed to be successors and dominated blocks of `this`. Note
1088  // that this method does not update the graph, reverse post order, loop
1089  // information, nor make sure the blocks are consistent (for example ending
1090  // with a control flow instruction).
1091  void MergeWithInlined(HBasicBlock* other);
1092
1093  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1094  // of `this` are moved to `other`.
1095  // Note that this method does not update the graph, reverse post order, loop
1096  // information, nor make sure the blocks are consistent (for example ending
1097  // with a control flow instruction).
1098  void ReplaceWith(HBasicBlock* other);
1099
1100  // Merges the instructions of `other` at the end of `this`.
1101  void MergeInstructionsWith(HBasicBlock* other);
1102
1103  // Merge `other` at the end of `this`. This method updates loops, reverse post
1104  // order, links to predecessors, successors, dominators and deletes the block
1105  // from the graph. The two blocks must be successive, i.e. `this` the only
1106  // predecessor of `other` and vice versa.
1107  void MergeWith(HBasicBlock* other);
1108
1109  // Disconnects `this` from all its predecessors, successors and dominator,
1110  // removes it from all loops it is included in and eventually from the graph.
1111  // The block must not dominate any other block. Predecessors and successors
1112  // are safely updated.
1113  void DisconnectAndDelete();
1114
1115  void AddInstruction(HInstruction* instruction);
1116  // Insert `instruction` before/after an existing instruction `cursor`.
1117  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1118  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1119  // Replace instruction `initial` with `replacement` within this block.
1120  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1121                                       HInstruction* replacement);
1122  void AddPhi(HPhi* phi);
1123  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1124  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1125  // instruction list. With 'ensure_safety' set to true, it verifies that the
1126  // instruction is not in use and removes it from the use lists of its inputs.
1127  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1128  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1129  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1130
1131  bool IsLoopHeader() const {
1132    return IsInLoop() && (loop_information_->GetHeader() == this);
1133  }
1134
1135  bool IsLoopPreHeaderFirstPredecessor() const {
1136    DCHECK(IsLoopHeader());
1137    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1138  }
1139
1140  bool IsFirstPredecessorBackEdge() const {
1141    DCHECK(IsLoopHeader());
1142    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1143  }
1144
1145  HLoopInformation* GetLoopInformation() const {
1146    return loop_information_;
1147  }
1148
1149  // Set the loop_information_ on this block. Overrides the current
1150  // loop_information if it is an outer loop of the passed loop information.
1151  // Note that this method is called while creating the loop information.
1152  void SetInLoop(HLoopInformation* info) {
1153    if (IsLoopHeader()) {
1154      // Nothing to do. This just means `info` is an outer loop.
1155    } else if (!IsInLoop()) {
1156      loop_information_ = info;
1157    } else if (loop_information_->Contains(*info->GetHeader())) {
1158      // Block is currently part of an outer loop. Make it part of this inner loop.
1159      // Note that a non loop header having a loop information means this loop information
1160      // has already been populated
1161      loop_information_ = info;
1162    } else {
1163      // Block is part of an inner loop. Do not update the loop information.
1164      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1165      // at this point, because this method is being called while populating `info`.
1166    }
1167  }
1168
1169  // Raw update of the loop information.
1170  void SetLoopInformation(HLoopInformation* info) {
1171    loop_information_ = info;
1172  }
1173
1174  bool IsInLoop() const { return loop_information_ != nullptr; }
1175
1176  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1177
1178  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1179    try_catch_information_ = try_catch_information;
1180  }
1181
1182  bool IsTryBlock() const {
1183    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1184  }
1185
1186  bool IsCatchBlock() const {
1187    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1188  }
1189
1190  // Returns the try entry that this block's successors should have. They will
1191  // be in the same try, unless the block ends in a try boundary. In that case,
1192  // the appropriate try entry will be returned.
1193  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1194
1195  bool HasThrowingInstructions() const;
1196
1197  // Returns whether this block dominates the blocked passed as parameter.
1198  bool Dominates(HBasicBlock* block) const;
1199
1200  size_t GetLifetimeStart() const { return lifetime_start_; }
1201  size_t GetLifetimeEnd() const { return lifetime_end_; }
1202
1203  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1204  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1205
1206  bool EndsWithControlFlowInstruction() const;
1207  bool EndsWithIf() const;
1208  bool EndsWithTryBoundary() const;
1209  bool HasSinglePhi() const;
1210
1211 private:
1212  HGraph* graph_;
1213  ArenaVector<HBasicBlock*> predecessors_;
1214  ArenaVector<HBasicBlock*> successors_;
1215  HInstructionList instructions_;
1216  HInstructionList phis_;
1217  HLoopInformation* loop_information_;
1218  HBasicBlock* dominator_;
1219  ArenaVector<HBasicBlock*> dominated_blocks_;
1220  uint32_t block_id_;
1221  // The dex program counter of the first instruction of this block.
1222  const uint32_t dex_pc_;
1223  size_t lifetime_start_;
1224  size_t lifetime_end_;
1225  TryCatchInformation* try_catch_information_;
1226
1227  friend class HGraph;
1228  friend class HInstruction;
1229
1230  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1231};
1232
1233// Iterates over the LoopInformation of all loops which contain 'block'
1234// from the innermost to the outermost.
1235class HLoopInformationOutwardIterator : public ValueObject {
1236 public:
1237  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1238      : current_(block.GetLoopInformation()) {}
1239
1240  bool Done() const { return current_ == nullptr; }
1241
1242  void Advance() {
1243    DCHECK(!Done());
1244    current_ = current_->GetPreHeader()->GetLoopInformation();
1245  }
1246
1247  HLoopInformation* Current() const {
1248    DCHECK(!Done());
1249    return current_;
1250  }
1251
1252 private:
1253  HLoopInformation* current_;
1254
1255  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1256};
1257
1258#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1259  M(Above, Condition)                                                   \
1260  M(AboveOrEqual, Condition)                                            \
1261  M(Add, BinaryOperation)                                               \
1262  M(And, BinaryOperation)                                               \
1263  M(ArrayGet, Instruction)                                              \
1264  M(ArrayLength, Instruction)                                           \
1265  M(ArraySet, Instruction)                                              \
1266  M(Below, Condition)                                                   \
1267  M(BelowOrEqual, Condition)                                            \
1268  M(BooleanNot, UnaryOperation)                                         \
1269  M(BoundsCheck, Instruction)                                           \
1270  M(BoundType, Instruction)                                             \
1271  M(CheckCast, Instruction)                                             \
1272  M(ClassTableGet, Instruction)                                         \
1273  M(ClearException, Instruction)                                        \
1274  M(ClinitCheck, Instruction)                                           \
1275  M(Compare, BinaryOperation)                                           \
1276  M(CurrentMethod, Instruction)                                         \
1277  M(ShouldDeoptimizeFlag, Instruction)                                  \
1278  M(Deoptimize, Instruction)                                            \
1279  M(Div, BinaryOperation)                                               \
1280  M(DivZeroCheck, Instruction)                                          \
1281  M(DoubleConstant, Constant)                                           \
1282  M(Equal, Condition)                                                   \
1283  M(Exit, Instruction)                                                  \
1284  M(FloatConstant, Constant)                                            \
1285  M(Goto, Instruction)                                                  \
1286  M(GreaterThan, Condition)                                             \
1287  M(GreaterThanOrEqual, Condition)                                      \
1288  M(If, Instruction)                                                    \
1289  M(InstanceFieldGet, Instruction)                                      \
1290  M(InstanceFieldSet, Instruction)                                      \
1291  M(InstanceOf, Instruction)                                            \
1292  M(IntConstant, Constant)                                              \
1293  M(InvokeUnresolved, Invoke)                                           \
1294  M(InvokeInterface, Invoke)                                            \
1295  M(InvokeStaticOrDirect, Invoke)                                       \
1296  M(InvokeVirtual, Invoke)                                              \
1297  M(InvokePolymorphic, Invoke)                                          \
1298  M(LessThan, Condition)                                                \
1299  M(LessThanOrEqual, Condition)                                         \
1300  M(LoadClass, Instruction)                                             \
1301  M(LoadException, Instruction)                                         \
1302  M(LoadString, Instruction)                                            \
1303  M(LongConstant, Constant)                                             \
1304  M(MemoryBarrier, Instruction)                                         \
1305  M(MonitorOperation, Instruction)                                      \
1306  M(Mul, BinaryOperation)                                               \
1307  M(NativeDebugInfo, Instruction)                                       \
1308  M(Neg, UnaryOperation)                                                \
1309  M(NewArray, Instruction)                                              \
1310  M(NewInstance, Instruction)                                           \
1311  M(Not, UnaryOperation)                                                \
1312  M(NotEqual, Condition)                                                \
1313  M(NullConstant, Instruction)                                          \
1314  M(NullCheck, Instruction)                                             \
1315  M(Or, BinaryOperation)                                                \
1316  M(PackedSwitch, Instruction)                                          \
1317  M(ParallelMove, Instruction)                                          \
1318  M(ParameterValue, Instruction)                                        \
1319  M(Phi, Instruction)                                                   \
1320  M(Rem, BinaryOperation)                                               \
1321  M(Return, Instruction)                                                \
1322  M(ReturnVoid, Instruction)                                            \
1323  M(Ror, BinaryOperation)                                               \
1324  M(Shl, BinaryOperation)                                               \
1325  M(Shr, BinaryOperation)                                               \
1326  M(StaticFieldGet, Instruction)                                        \
1327  M(StaticFieldSet, Instruction)                                        \
1328  M(UnresolvedInstanceFieldGet, Instruction)                            \
1329  M(UnresolvedInstanceFieldSet, Instruction)                            \
1330  M(UnresolvedStaticFieldGet, Instruction)                              \
1331  M(UnresolvedStaticFieldSet, Instruction)                              \
1332  M(Select, Instruction)                                                \
1333  M(Sub, BinaryOperation)                                               \
1334  M(SuspendCheck, Instruction)                                          \
1335  M(Throw, Instruction)                                                 \
1336  M(TryBoundary, Instruction)                                           \
1337  M(TypeConversion, Instruction)                                        \
1338  M(UShr, BinaryOperation)                                              \
1339  M(Xor, BinaryOperation)                                               \
1340
1341/*
1342 * Instructions, shared across several (not all) architectures.
1343 */
1344#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1345#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1346#else
1347#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1348  M(BitwiseNegatedRight, Instruction)                                   \
1349  M(MultiplyAccumulate, Instruction)                                    \
1350  M(IntermediateAddress, Instruction)
1351#endif
1352
1353#ifndef ART_ENABLE_CODEGEN_arm
1354#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1355#else
1356#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1357  M(ArmDexCacheArraysBase, Instruction)
1358#endif
1359
1360#ifndef ART_ENABLE_CODEGEN_arm64
1361#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1362#else
1363#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                          \
1364  M(Arm64DataProcWithShifterOp, Instruction)
1365#endif
1366
1367#ifndef ART_ENABLE_CODEGEN_mips
1368#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1369#else
1370#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1371  M(MipsComputeBaseMethodAddress, Instruction)                          \
1372  M(MipsDexCacheArraysBase, Instruction)                                \
1373  M(MipsPackedSwitch, Instruction)
1374#endif
1375
1376#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1377
1378#ifndef ART_ENABLE_CODEGEN_x86
1379#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1380#else
1381#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1382  M(X86ComputeBaseMethodAddress, Instruction)                           \
1383  M(X86LoadFromConstantTable, Instruction)                              \
1384  M(X86FPNeg, Instruction)                                              \
1385  M(X86PackedSwitch, Instruction)
1386#endif
1387
1388#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1389
1390#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1391  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1392  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1393  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1394  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1395  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1396  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1397  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1398  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1399
1400#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1401  M(Condition, BinaryOperation)                                         \
1402  M(Constant, Instruction)                                              \
1403  M(UnaryOperation, Instruction)                                        \
1404  M(BinaryOperation, Instruction)                                       \
1405  M(Invoke, Instruction)
1406
1407#define FOR_EACH_INSTRUCTION(M)                                         \
1408  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1409  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1410
1411#define FORWARD_DECLARATION(type, super) class H##type;
1412FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1413#undef FORWARD_DECLARATION
1414
1415#define DECLARE_INSTRUCTION(type)                                         \
1416  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1417  const char* DebugName() const OVERRIDE { return #type; }                \
1418  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1419    return other->Is##type();                                             \
1420  }                                                                       \
1421  void Accept(HGraphVisitor* visitor) OVERRIDE
1422
1423#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1424  bool Is##type() const { return As##type() != nullptr; }               \
1425  const H##type* As##type() const { return this; }                      \
1426  H##type* As##type() { return this; }
1427
1428template <typename T>
1429class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1430 public:
1431  T GetUser() const { return user_; }
1432  size_t GetIndex() const { return index_; }
1433  void SetIndex(size_t index) { index_ = index; }
1434
1435  // Hook for the IntrusiveForwardList<>.
1436  // TODO: Hide this better.
1437  IntrusiveForwardListHook hook;
1438
1439 private:
1440  HUseListNode(T user, size_t index)
1441      : user_(user), index_(index) {}
1442
1443  T const user_;
1444  size_t index_;
1445
1446  friend class HInstruction;
1447
1448  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1449};
1450
1451template <typename T>
1452using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1453
1454// This class is used by HEnvironment and HInstruction classes to record the
1455// instructions they use and pointers to the corresponding HUseListNodes kept
1456// by the used instructions.
1457template <typename T>
1458class HUserRecord : public ValueObject {
1459 public:
1460  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1461  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1462
1463  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1464      : HUserRecord(old_record.instruction_, before_use_node) {}
1465  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1466      : instruction_(instruction), before_use_node_(before_use_node) {
1467    DCHECK(instruction_ != nullptr);
1468  }
1469
1470  HInstruction* GetInstruction() const { return instruction_; }
1471  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1472  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1473
1474 private:
1475  // Instruction used by the user.
1476  HInstruction* instruction_;
1477
1478  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1479  typename HUseList<T>::iterator before_use_node_;
1480};
1481
1482// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1483// This is used for HInstruction::GetInputs() to return a container wrapper providing
1484// HInstruction* values even though the underlying container has HUserRecord<>s.
1485struct HInputExtractor {
1486  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1487    return record.GetInstruction();
1488  }
1489  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1490    return record.GetInstruction();
1491  }
1492};
1493
1494using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1495using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1496
1497/**
1498 * Side-effects representation.
1499 *
1500 * For write/read dependences on fields/arrays, the dependence analysis uses
1501 * type disambiguation (e.g. a float field write cannot modify the value of an
1502 * integer field read) and the access type (e.g.  a reference array write cannot
1503 * modify the value of a reference field read [although it may modify the
1504 * reference fetch prior to reading the field, which is represented by its own
1505 * write/read dependence]). The analysis makes conservative points-to
1506 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1507 * the same, and any reference read depends on any reference read without
1508 * further regard of its type).
1509 *
1510 * The internal representation uses 38-bit and is described in the table below.
1511 * The first line indicates the side effect, and for field/array accesses the
1512 * second line indicates the type of the access (in the order of the
1513 * Primitive::Type enum).
1514 * The two numbered lines below indicate the bit position in the bitfield (read
1515 * vertically).
1516 *
1517 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1518 *   +-------------+---------+---------+--------------+---------+---------+
1519 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1520 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1521 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1522 *
1523 * Note that, to ease the implementation, 'changes' bits are least significant
1524 * bits, while 'dependency' bits are most significant bits.
1525 */
1526class SideEffects : public ValueObject {
1527 public:
1528  SideEffects() : flags_(0) {}
1529
1530  static SideEffects None() {
1531    return SideEffects(0);
1532  }
1533
1534  static SideEffects All() {
1535    return SideEffects(kAllChangeBits | kAllDependOnBits);
1536  }
1537
1538  static SideEffects AllChanges() {
1539    return SideEffects(kAllChangeBits);
1540  }
1541
1542  static SideEffects AllDependencies() {
1543    return SideEffects(kAllDependOnBits);
1544  }
1545
1546  static SideEffects AllExceptGCDependency() {
1547    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1548  }
1549
1550  static SideEffects AllWritesAndReads() {
1551    return SideEffects(kAllWrites | kAllReads);
1552  }
1553
1554  static SideEffects AllWrites() {
1555    return SideEffects(kAllWrites);
1556  }
1557
1558  static SideEffects AllReads() {
1559    return SideEffects(kAllReads);
1560  }
1561
1562  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1563    return is_volatile
1564        ? AllWritesAndReads()
1565        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1566  }
1567
1568  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1569    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1570  }
1571
1572  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1573    return is_volatile
1574        ? AllWritesAndReads()
1575        : SideEffects(TypeFlag(type, kFieldReadOffset));
1576  }
1577
1578  static SideEffects ArrayReadOfType(Primitive::Type type) {
1579    return SideEffects(TypeFlag(type, kArrayReadOffset));
1580  }
1581
1582  static SideEffects CanTriggerGC() {
1583    return SideEffects(1ULL << kCanTriggerGCBit);
1584  }
1585
1586  static SideEffects DependsOnGC() {
1587    return SideEffects(1ULL << kDependsOnGCBit);
1588  }
1589
1590  // Combines the side-effects of this and the other.
1591  SideEffects Union(SideEffects other) const {
1592    return SideEffects(flags_ | other.flags_);
1593  }
1594
1595  SideEffects Exclusion(SideEffects other) const {
1596    return SideEffects(flags_ & ~other.flags_);
1597  }
1598
1599  void Add(SideEffects other) {
1600    flags_ |= other.flags_;
1601  }
1602
1603  bool Includes(SideEffects other) const {
1604    return (other.flags_ & flags_) == other.flags_;
1605  }
1606
1607  bool HasSideEffects() const {
1608    return (flags_ & kAllChangeBits);
1609  }
1610
1611  bool HasDependencies() const {
1612    return (flags_ & kAllDependOnBits);
1613  }
1614
1615  // Returns true if there are no side effects or dependencies.
1616  bool DoesNothing() const {
1617    return flags_ == 0;
1618  }
1619
1620  // Returns true if something is written.
1621  bool DoesAnyWrite() const {
1622    return (flags_ & kAllWrites);
1623  }
1624
1625  // Returns true if something is read.
1626  bool DoesAnyRead() const {
1627    return (flags_ & kAllReads);
1628  }
1629
1630  // Returns true if potentially everything is written and read
1631  // (every type and every kind of access).
1632  bool DoesAllReadWrite() const {
1633    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1634  }
1635
1636  bool DoesAll() const {
1637    return flags_ == (kAllChangeBits | kAllDependOnBits);
1638  }
1639
1640  // Returns true if `this` may read something written by `other`.
1641  bool MayDependOn(SideEffects other) const {
1642    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1643    return (other.flags_ & depends_on_flags);
1644  }
1645
1646  // Returns string representation of flags (for debugging only).
1647  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1648  std::string ToString() const {
1649    std::string flags = "|";
1650    for (int s = kLastBit; s >= 0; s--) {
1651      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1652      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1653        // This is a bit for the GC side effect.
1654        if (current_bit_is_set) {
1655          flags += "GC";
1656        }
1657        flags += "|";
1658      } else {
1659        // This is a bit for the array/field analysis.
1660        // The underscore character stands for the 'can trigger GC' bit.
1661        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1662        if (current_bit_is_set) {
1663          flags += kDebug[s];
1664        }
1665        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1666            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1667          flags += "|";
1668        }
1669      }
1670    }
1671    return flags;
1672  }
1673
1674  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1675
1676 private:
1677  static constexpr int kFieldArrayAnalysisBits = 9;
1678
1679  static constexpr int kFieldWriteOffset = 0;
1680  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1681  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1682  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1683
1684  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1685
1686  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1687  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1688  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1689  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1690
1691  static constexpr int kLastBit = kDependsOnGCBit;
1692  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1693
1694  // Aliases.
1695
1696  static_assert(kChangeBits == kDependOnBits,
1697                "the 'change' bits should match the 'depend on' bits.");
1698
1699  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1700  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1701  static constexpr uint64_t kAllWrites =
1702      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1703  static constexpr uint64_t kAllReads =
1704      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1705
1706  // Translates type to bit flag.
1707  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1708    CHECK_NE(type, Primitive::kPrimVoid);
1709    const uint64_t one = 1;
1710    const int shift = type;  // 0-based consecutive enum
1711    DCHECK_LE(kFieldWriteOffset, shift);
1712    DCHECK_LT(shift, kArrayWriteOffset);
1713    return one << (type + offset);
1714  }
1715
1716  // Private constructor on direct flags value.
1717  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1718
1719  uint64_t flags_;
1720};
1721
1722// A HEnvironment object contains the values of virtual registers at a given location.
1723class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1724 public:
1725  HEnvironment(ArenaAllocator* arena,
1726               size_t number_of_vregs,
1727               ArtMethod* method,
1728               uint32_t dex_pc,
1729               HInstruction* holder)
1730     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1731       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1732       parent_(nullptr),
1733       method_(method),
1734       dex_pc_(dex_pc),
1735       holder_(holder) {
1736  }
1737
1738  HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1739      : HEnvironment(arena,
1740                     to_copy.Size(),
1741                     to_copy.GetMethod(),
1742                     to_copy.GetDexPc(),
1743                     holder) {}
1744
1745  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1746    if (parent_ != nullptr) {
1747      parent_->SetAndCopyParentChain(allocator, parent);
1748    } else {
1749      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1750      parent_->CopyFrom(parent);
1751      if (parent->GetParent() != nullptr) {
1752        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1753      }
1754    }
1755  }
1756
1757  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1758  void CopyFrom(HEnvironment* environment);
1759
1760  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1761  // input to the loop phi instead. This is for inserting instructions that
1762  // require an environment (like HDeoptimization) in the loop pre-header.
1763  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1764
1765  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1766    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1767  }
1768
1769  HInstruction* GetInstructionAt(size_t index) const {
1770    return vregs_[index].GetInstruction();
1771  }
1772
1773  void RemoveAsUserOfInput(size_t index) const;
1774
1775  size_t Size() const { return vregs_.size(); }
1776
1777  HEnvironment* GetParent() const { return parent_; }
1778
1779  void SetLocationAt(size_t index, Location location) {
1780    locations_[index] = location;
1781  }
1782
1783  Location GetLocationAt(size_t index) const {
1784    return locations_[index];
1785  }
1786
1787  uint32_t GetDexPc() const {
1788    return dex_pc_;
1789  }
1790
1791  ArtMethod* GetMethod() const {
1792    return method_;
1793  }
1794
1795  HInstruction* GetHolder() const {
1796    return holder_;
1797  }
1798
1799
1800  bool IsFromInlinedInvoke() const {
1801    return GetParent() != nullptr;
1802  }
1803
1804 private:
1805  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1806  ArenaVector<Location> locations_;
1807  HEnvironment* parent_;
1808  ArtMethod* method_;
1809  const uint32_t dex_pc_;
1810
1811  // The instruction that holds this environment.
1812  HInstruction* const holder_;
1813
1814  friend class HInstruction;
1815
1816  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1817};
1818
1819class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1820 public:
1821  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1822      : previous_(nullptr),
1823        next_(nullptr),
1824        block_(nullptr),
1825        dex_pc_(dex_pc),
1826        id_(-1),
1827        ssa_index_(-1),
1828        packed_fields_(0u),
1829        environment_(nullptr),
1830        locations_(nullptr),
1831        live_interval_(nullptr),
1832        lifetime_position_(kNoLifetime),
1833        side_effects_(side_effects),
1834        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1835    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1836  }
1837
1838  virtual ~HInstruction() {}
1839
1840#define DECLARE_KIND(type, super) k##type,
1841  enum InstructionKind {
1842    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1843  };
1844#undef DECLARE_KIND
1845
1846  HInstruction* GetNext() const { return next_; }
1847  HInstruction* GetPrevious() const { return previous_; }
1848
1849  HInstruction* GetNextDisregardingMoves() const;
1850  HInstruction* GetPreviousDisregardingMoves() const;
1851
1852  HBasicBlock* GetBlock() const { return block_; }
1853  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1854  void SetBlock(HBasicBlock* block) { block_ = block; }
1855  bool IsInBlock() const { return block_ != nullptr; }
1856  bool IsInLoop() const { return block_->IsInLoop(); }
1857  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1858  bool IsIrreducibleLoopHeaderPhi() const {
1859    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1860  }
1861
1862  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1863
1864  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1865    // One virtual method is enough, just const_cast<> and then re-add the const.
1866    return ArrayRef<const HUserRecord<HInstruction*>>(
1867        const_cast<HInstruction*>(this)->GetInputRecords());
1868  }
1869
1870  HInputsRef GetInputs() {
1871    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1872  }
1873
1874  HConstInputsRef GetInputs() const {
1875    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1876  }
1877
1878  size_t InputCount() const { return GetInputRecords().size(); }
1879  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1880
1881  bool HasInput(HInstruction* input) const {
1882    for (const HInstruction* i : GetInputs()) {
1883      if (i == input) {
1884        return true;
1885      }
1886    }
1887    return false;
1888  }
1889
1890  void SetRawInputAt(size_t index, HInstruction* input) {
1891    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1892  }
1893
1894  virtual void Accept(HGraphVisitor* visitor) = 0;
1895  virtual const char* DebugName() const = 0;
1896
1897  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1898
1899  virtual bool NeedsEnvironment() const { return false; }
1900
1901  uint32_t GetDexPc() const { return dex_pc_; }
1902
1903  virtual bool IsControlFlow() const { return false; }
1904
1905  virtual bool CanThrow() const { return false; }
1906  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1907
1908  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1909  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1910
1911  // Does not apply for all instructions, but having this at top level greatly
1912  // simplifies the null check elimination.
1913  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1914  virtual bool CanBeNull() const {
1915    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1916    return true;
1917  }
1918
1919  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1920    return false;
1921  }
1922
1923  virtual bool IsActualObject() const {
1924    return GetType() == Primitive::kPrimNot;
1925  }
1926
1927  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1928
1929  ReferenceTypeInfo GetReferenceTypeInfo() const {
1930    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1931    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1932                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1933  }
1934
1935  void AddUseAt(HInstruction* user, size_t index) {
1936    DCHECK(user != nullptr);
1937    // Note: fixup_end remains valid across push_front().
1938    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1939    HUseListNode<HInstruction*>* new_node =
1940        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1941    uses_.push_front(*new_node);
1942    FixUpUserRecordsAfterUseInsertion(fixup_end);
1943  }
1944
1945  void AddEnvUseAt(HEnvironment* user, size_t index) {
1946    DCHECK(user != nullptr);
1947    // Note: env_fixup_end remains valid across push_front().
1948    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
1949    HUseListNode<HEnvironment*>* new_node =
1950        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
1951    env_uses_.push_front(*new_node);
1952    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1953  }
1954
1955  void RemoveAsUserOfInput(size_t input) {
1956    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
1957    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1958    input_use.GetInstruction()->uses_.erase_after(before_use_node);
1959    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1960  }
1961
1962  void RemoveAsUserOfAllInputs() {
1963    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
1964      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1965      input_use.GetInstruction()->uses_.erase_after(before_use_node);
1966      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1967    }
1968  }
1969
1970  const HUseList<HInstruction*>& GetUses() const { return uses_; }
1971  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
1972
1973  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
1974  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
1975  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
1976  bool HasOnlyOneNonEnvironmentUse() const {
1977    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
1978  }
1979
1980  bool IsRemovable() const {
1981    return
1982        !DoesAnyWrite() &&
1983        !CanThrow() &&
1984        !IsSuspendCheck() &&
1985        !IsControlFlow() &&
1986        !IsNativeDebugInfo() &&
1987        !IsParameterValue() &&
1988        // If we added an explicit barrier then we should keep it.
1989        !IsMemoryBarrier();
1990  }
1991
1992  bool IsDeadAndRemovable() const {
1993    return IsRemovable() && !HasUses();
1994  }
1995
1996  // Does this instruction strictly dominate `other_instruction`?
1997  // Returns false if this instruction and `other_instruction` are the same.
1998  // Aborts if this instruction and `other_instruction` are both phis.
1999  bool StrictlyDominates(HInstruction* other_instruction) const;
2000
2001  int GetId() const { return id_; }
2002  void SetId(int id) { id_ = id; }
2003
2004  int GetSsaIndex() const { return ssa_index_; }
2005  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2006  bool HasSsaIndex() const { return ssa_index_ != -1; }
2007
2008  bool HasEnvironment() const { return environment_ != nullptr; }
2009  HEnvironment* GetEnvironment() const { return environment_; }
2010  // Set the `environment_` field. Raw because this method does not
2011  // update the uses lists.
2012  void SetRawEnvironment(HEnvironment* environment) {
2013    DCHECK(environment_ == nullptr);
2014    DCHECK_EQ(environment->GetHolder(), this);
2015    environment_ = environment;
2016  }
2017
2018  void InsertRawEnvironment(HEnvironment* environment) {
2019    DCHECK(environment_ != nullptr);
2020    DCHECK_EQ(environment->GetHolder(), this);
2021    DCHECK(environment->GetParent() == nullptr);
2022    environment->parent_ = environment_;
2023    environment_ = environment;
2024  }
2025
2026  void RemoveEnvironment();
2027
2028  // Set the environment of this instruction, copying it from `environment`. While
2029  // copying, the uses lists are being updated.
2030  void CopyEnvironmentFrom(HEnvironment* environment) {
2031    DCHECK(environment_ == nullptr);
2032    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2033    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2034    environment_->CopyFrom(environment);
2035    if (environment->GetParent() != nullptr) {
2036      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2037    }
2038  }
2039
2040  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2041                                                HBasicBlock* block) {
2042    DCHECK(environment_ == nullptr);
2043    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2044    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2045    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2046    if (environment->GetParent() != nullptr) {
2047      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2048    }
2049  }
2050
2051  // Returns the number of entries in the environment. Typically, that is the
2052  // number of dex registers in a method. It could be more in case of inlining.
2053  size_t EnvironmentSize() const;
2054
2055  LocationSummary* GetLocations() const { return locations_; }
2056  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2057
2058  void ReplaceWith(HInstruction* instruction);
2059  void ReplaceInput(HInstruction* replacement, size_t index);
2060
2061  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2062  // uses of this instruction by `other` are *not* updated.
2063  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2064    ReplaceWith(other);
2065    other->ReplaceInput(this, use_index);
2066  }
2067
2068  // Move `this` instruction before `cursor`
2069  void MoveBefore(HInstruction* cursor, bool do_checks = true);
2070
2071  // Move `this` before its first user and out of any loops. If there is no
2072  // out-of-loop user that dominates all other users, move the instruction
2073  // to the end of the out-of-loop common dominator of the user's blocks.
2074  //
2075  // This can be used only on non-throwing instructions with no side effects that
2076  // have at least one use but no environment uses.
2077  void MoveBeforeFirstUserAndOutOfLoops();
2078
2079#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2080  bool Is##type() const;                                                       \
2081  const H##type* As##type() const;                                             \
2082  H##type* As##type();
2083
2084  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2085#undef INSTRUCTION_TYPE_CHECK
2086
2087#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2088  bool Is##type() const { return (As##type() != nullptr); }                    \
2089  virtual const H##type* As##type() const { return nullptr; }                  \
2090  virtual H##type* As##type() { return nullptr; }
2091  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2092#undef INSTRUCTION_TYPE_CHECK
2093
2094  // Returns whether the instruction can be moved within the graph.
2095  // TODO: this method is used by LICM and GVN with possibly different
2096  //       meanings? split and rename?
2097  virtual bool CanBeMoved() const { return false; }
2098
2099  // Returns whether the two instructions are of the same kind.
2100  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2101    return false;
2102  }
2103
2104  // Returns whether any data encoded in the two instructions is equal.
2105  // This method does not look at the inputs. Both instructions must be
2106  // of the same type, otherwise the method has undefined behavior.
2107  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2108    return false;
2109  }
2110
2111  // Returns whether two instructions are equal, that is:
2112  // 1) They have the same type and contain the same data (InstructionDataEquals).
2113  // 2) Their inputs are identical.
2114  bool Equals(const HInstruction* other) const;
2115
2116  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2117  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2118  // the virtual function because the __attribute__((__pure__)) doesn't really
2119  // apply the strong requirement for virtual functions, preventing optimizations.
2120  InstructionKind GetKind() const PURE;
2121  virtual InstructionKind GetKindInternal() const = 0;
2122
2123  virtual size_t ComputeHashCode() const {
2124    size_t result = GetKind();
2125    for (const HInstruction* input : GetInputs()) {
2126      result = (result * 31) + input->GetId();
2127    }
2128    return result;
2129  }
2130
2131  SideEffects GetSideEffects() const { return side_effects_; }
2132  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2133  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2134
2135  size_t GetLifetimePosition() const { return lifetime_position_; }
2136  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2137  LiveInterval* GetLiveInterval() const { return live_interval_; }
2138  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2139  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2140
2141  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2142
2143  // Returns whether the code generation of the instruction will require to have access
2144  // to the current method. Such instructions are:
2145  // (1): Instructions that require an environment, as calling the runtime requires
2146  //      to walk the stack and have the current method stored at a specific stack address.
2147  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2148  //      to access the dex cache.
2149  bool NeedsCurrentMethod() const {
2150    return NeedsEnvironment() || IsCurrentMethod();
2151  }
2152
2153  // Returns whether the code generation of the instruction will require to have access
2154  // to the dex cache of the current method's declaring class via the current method.
2155  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2156
2157  // Does this instruction have any use in an environment before
2158  // control flow hits 'other'?
2159  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2160
2161  // Remove all references to environment uses of this instruction.
2162  // The caller must ensure that this is safe to do.
2163  void RemoveEnvironmentUsers();
2164
2165  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2166  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2167
2168 protected:
2169  // If set, the machine code for this instruction is assumed to be generated by
2170  // its users. Used by liveness analysis to compute use positions accordingly.
2171  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2172  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2173  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2174  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2175
2176  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2177    return GetInputRecords()[i];
2178  }
2179
2180  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2181    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2182    input_records[index] = input;
2183  }
2184
2185  uint32_t GetPackedFields() const {
2186    return packed_fields_;
2187  }
2188
2189  template <size_t flag>
2190  bool GetPackedFlag() const {
2191    return (packed_fields_ & (1u << flag)) != 0u;
2192  }
2193
2194  template <size_t flag>
2195  void SetPackedFlag(bool value = true) {
2196    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2197  }
2198
2199  template <typename BitFieldType>
2200  typename BitFieldType::value_type GetPackedField() const {
2201    return BitFieldType::Decode(packed_fields_);
2202  }
2203
2204  template <typename BitFieldType>
2205  void SetPackedField(typename BitFieldType::value_type value) {
2206    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2207    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2208  }
2209
2210 private:
2211  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2212    auto before_use_node = uses_.before_begin();
2213    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2214      HInstruction* user = use_node->GetUser();
2215      size_t input_index = use_node->GetIndex();
2216      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2217      before_use_node = use_node;
2218    }
2219  }
2220
2221  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2222    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2223    if (next != uses_.end()) {
2224      HInstruction* next_user = next->GetUser();
2225      size_t next_index = next->GetIndex();
2226      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2227      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2228    }
2229  }
2230
2231  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2232    auto before_env_use_node = env_uses_.before_begin();
2233    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2234      HEnvironment* user = env_use_node->GetUser();
2235      size_t input_index = env_use_node->GetIndex();
2236      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2237      before_env_use_node = env_use_node;
2238    }
2239  }
2240
2241  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2242    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2243    if (next != env_uses_.end()) {
2244      HEnvironment* next_user = next->GetUser();
2245      size_t next_index = next->GetIndex();
2246      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2247      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2248    }
2249  }
2250
2251  HInstruction* previous_;
2252  HInstruction* next_;
2253  HBasicBlock* block_;
2254  const uint32_t dex_pc_;
2255
2256  // An instruction gets an id when it is added to the graph.
2257  // It reflects creation order. A negative id means the instruction
2258  // has not been added to the graph.
2259  int id_;
2260
2261  // When doing liveness analysis, instructions that have uses get an SSA index.
2262  int ssa_index_;
2263
2264  // Packed fields.
2265  uint32_t packed_fields_;
2266
2267  // List of instructions that have this instruction as input.
2268  HUseList<HInstruction*> uses_;
2269
2270  // List of environments that contain this instruction.
2271  HUseList<HEnvironment*> env_uses_;
2272
2273  // The environment associated with this instruction. Not null if the instruction
2274  // might jump out of the method.
2275  HEnvironment* environment_;
2276
2277  // Set by the code generator.
2278  LocationSummary* locations_;
2279
2280  // Set by the liveness analysis.
2281  LiveInterval* live_interval_;
2282
2283  // Set by the liveness analysis, this is the position in a linear
2284  // order of blocks where this instruction's live interval start.
2285  size_t lifetime_position_;
2286
2287  SideEffects side_effects_;
2288
2289  // The reference handle part of the reference type info.
2290  // The IsExact() flag is stored in packed fields.
2291  // TODO: for primitive types this should be marked as invalid.
2292  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2293
2294  friend class GraphChecker;
2295  friend class HBasicBlock;
2296  friend class HEnvironment;
2297  friend class HGraph;
2298  friend class HInstructionList;
2299
2300  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2301};
2302std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2303
2304// Iterates over the instructions, while preserving the next instruction
2305// in case the current instruction gets removed from the list by the user
2306// of this iterator.
2307class HInstructionIterator : public ValueObject {
2308 public:
2309  explicit HInstructionIterator(const HInstructionList& instructions)
2310      : instruction_(instructions.first_instruction_) {
2311    next_ = Done() ? nullptr : instruction_->GetNext();
2312  }
2313
2314  bool Done() const { return instruction_ == nullptr; }
2315  HInstruction* Current() const { return instruction_; }
2316  void Advance() {
2317    instruction_ = next_;
2318    next_ = Done() ? nullptr : instruction_->GetNext();
2319  }
2320
2321 private:
2322  HInstruction* instruction_;
2323  HInstruction* next_;
2324
2325  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2326};
2327
2328// Iterates over the instructions without saving the next instruction,
2329// therefore handling changes in the graph potentially made by the user
2330// of this iterator.
2331class HInstructionIteratorHandleChanges : public ValueObject {
2332 public:
2333  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2334      : instruction_(instructions.first_instruction_) {
2335  }
2336
2337  bool Done() const { return instruction_ == nullptr; }
2338  HInstruction* Current() const { return instruction_; }
2339  void Advance() {
2340    instruction_ = instruction_->GetNext();
2341  }
2342
2343 private:
2344  HInstruction* instruction_;
2345
2346  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2347};
2348
2349
2350class HBackwardInstructionIterator : public ValueObject {
2351 public:
2352  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2353      : instruction_(instructions.last_instruction_) {
2354    next_ = Done() ? nullptr : instruction_->GetPrevious();
2355  }
2356
2357  bool Done() const { return instruction_ == nullptr; }
2358  HInstruction* Current() const { return instruction_; }
2359  void Advance() {
2360    instruction_ = next_;
2361    next_ = Done() ? nullptr : instruction_->GetPrevious();
2362  }
2363
2364 private:
2365  HInstruction* instruction_;
2366  HInstruction* next_;
2367
2368  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2369};
2370
2371class HVariableInputSizeInstruction : public HInstruction {
2372 public:
2373  using HInstruction::GetInputRecords;  // Keep the const version visible.
2374  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2375    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2376  }
2377
2378  void AddInput(HInstruction* input);
2379  void InsertInputAt(size_t index, HInstruction* input);
2380  void RemoveInputAt(size_t index);
2381
2382 protected:
2383  HVariableInputSizeInstruction(SideEffects side_effects,
2384                                uint32_t dex_pc,
2385                                ArenaAllocator* arena,
2386                                size_t number_of_inputs,
2387                                ArenaAllocKind kind)
2388      : HInstruction(side_effects, dex_pc),
2389        inputs_(number_of_inputs, arena->Adapter(kind)) {}
2390
2391  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2392
2393 private:
2394  DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
2395};
2396
2397template<size_t N>
2398class HTemplateInstruction: public HInstruction {
2399 public:
2400  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2401      : HInstruction(side_effects, dex_pc), inputs_() {}
2402  virtual ~HTemplateInstruction() {}
2403
2404  using HInstruction::GetInputRecords;  // Keep the const version visible.
2405  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2406    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2407  }
2408
2409 private:
2410  std::array<HUserRecord<HInstruction*>, N> inputs_;
2411
2412  friend class SsaBuilder;
2413};
2414
2415// HTemplateInstruction specialization for N=0.
2416template<>
2417class HTemplateInstruction<0>: public HInstruction {
2418 public:
2419  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2420      : HInstruction(side_effects, dex_pc) {}
2421
2422  virtual ~HTemplateInstruction() {}
2423
2424  using HInstruction::GetInputRecords;  // Keep the const version visible.
2425  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2426    return ArrayRef<HUserRecord<HInstruction*>>();
2427  }
2428
2429 private:
2430  friend class SsaBuilder;
2431};
2432
2433template<intptr_t N>
2434class HExpression : public HTemplateInstruction<N> {
2435 public:
2436  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2437      : HTemplateInstruction<N>(side_effects, dex_pc) {
2438    this->template SetPackedField<TypeField>(type);
2439  }
2440  virtual ~HExpression() {}
2441
2442  Primitive::Type GetType() const OVERRIDE {
2443    return TypeField::Decode(this->GetPackedFields());
2444  }
2445
2446 protected:
2447  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2448  static constexpr size_t kFieldTypeSize =
2449      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2450  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2451  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2452                "Too many packed fields.");
2453  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2454};
2455
2456// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2457// instruction that branches to the exit block.
2458class HReturnVoid FINAL : public HTemplateInstruction<0> {
2459 public:
2460  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2461      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2462
2463  bool IsControlFlow() const OVERRIDE { return true; }
2464
2465  DECLARE_INSTRUCTION(ReturnVoid);
2466
2467 private:
2468  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2469};
2470
2471// Represents dex's RETURN opcodes. A HReturn is a control flow
2472// instruction that branches to the exit block.
2473class HReturn FINAL : public HTemplateInstruction<1> {
2474 public:
2475  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2476      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2477    SetRawInputAt(0, value);
2478  }
2479
2480  bool IsControlFlow() const OVERRIDE { return true; }
2481
2482  DECLARE_INSTRUCTION(Return);
2483
2484 private:
2485  DISALLOW_COPY_AND_ASSIGN(HReturn);
2486};
2487
2488class HPhi FINAL : public HVariableInputSizeInstruction {
2489 public:
2490  HPhi(ArenaAllocator* arena,
2491       uint32_t reg_number,
2492       size_t number_of_inputs,
2493       Primitive::Type type,
2494       uint32_t dex_pc = kNoDexPc)
2495      : HVariableInputSizeInstruction(
2496            SideEffects::None(),
2497            dex_pc,
2498            arena,
2499            number_of_inputs,
2500            kArenaAllocPhiInputs),
2501        reg_number_(reg_number) {
2502    SetPackedField<TypeField>(ToPhiType(type));
2503    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2504    // Phis are constructed live and marked dead if conflicting or unused.
2505    // Individual steps of SsaBuilder should assume that if a phi has been
2506    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2507    SetPackedFlag<kFlagIsLive>(true);
2508    SetPackedFlag<kFlagCanBeNull>(true);
2509  }
2510
2511  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2512  static Primitive::Type ToPhiType(Primitive::Type type) {
2513    return Primitive::PrimitiveKind(type);
2514  }
2515
2516  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2517
2518  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2519  void SetType(Primitive::Type new_type) {
2520    // Make sure that only valid type changes occur. The following are allowed:
2521    //  (1) int  -> float/ref (primitive type propagation),
2522    //  (2) long -> double (primitive type propagation).
2523    DCHECK(GetType() == new_type ||
2524           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2525           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2526           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2527    SetPackedField<TypeField>(new_type);
2528  }
2529
2530  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2531  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2532
2533  uint32_t GetRegNumber() const { return reg_number_; }
2534
2535  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2536  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2537  bool IsDead() const { return !IsLive(); }
2538  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2539
2540  bool IsVRegEquivalentOf(const HInstruction* other) const {
2541    return other != nullptr
2542        && other->IsPhi()
2543        && other->AsPhi()->GetBlock() == GetBlock()
2544        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2545  }
2546
2547  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2548  // An equivalent phi is a phi having the same dex register and type.
2549  // It assumes that phis with the same dex register are adjacent.
2550  HPhi* GetNextEquivalentPhiWithSameType() {
2551    HInstruction* next = GetNext();
2552    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2553      if (next->GetType() == GetType()) {
2554        return next->AsPhi();
2555      }
2556      next = next->GetNext();
2557    }
2558    return nullptr;
2559  }
2560
2561  DECLARE_INSTRUCTION(Phi);
2562
2563 private:
2564  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2565  static constexpr size_t kFieldTypeSize =
2566      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2567  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2568  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2569  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2570  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2571  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2572
2573  const uint32_t reg_number_;
2574
2575  DISALLOW_COPY_AND_ASSIGN(HPhi);
2576};
2577
2578// The exit instruction is the only instruction of the exit block.
2579// Instructions aborting the method (HThrow and HReturn) must branch to the
2580// exit block.
2581class HExit FINAL : public HTemplateInstruction<0> {
2582 public:
2583  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2584
2585  bool IsControlFlow() const OVERRIDE { return true; }
2586
2587  DECLARE_INSTRUCTION(Exit);
2588
2589 private:
2590  DISALLOW_COPY_AND_ASSIGN(HExit);
2591};
2592
2593// Jumps from one block to another.
2594class HGoto FINAL : public HTemplateInstruction<0> {
2595 public:
2596  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2597
2598  bool IsControlFlow() const OVERRIDE { return true; }
2599
2600  HBasicBlock* GetSuccessor() const {
2601    return GetBlock()->GetSingleSuccessor();
2602  }
2603
2604  DECLARE_INSTRUCTION(Goto);
2605
2606 private:
2607  DISALLOW_COPY_AND_ASSIGN(HGoto);
2608};
2609
2610class HConstant : public HExpression<0> {
2611 public:
2612  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2613      : HExpression(type, SideEffects::None(), dex_pc) {}
2614
2615  bool CanBeMoved() const OVERRIDE { return true; }
2616
2617  // Is this constant -1 in the arithmetic sense?
2618  virtual bool IsMinusOne() const { return false; }
2619  // Is this constant 0 in the arithmetic sense?
2620  virtual bool IsArithmeticZero() const { return false; }
2621  // Is this constant a 0-bit pattern?
2622  virtual bool IsZeroBitPattern() const { return false; }
2623  // Is this constant 1 in the arithmetic sense?
2624  virtual bool IsOne() const { return false; }
2625
2626  virtual uint64_t GetValueAsUint64() const = 0;
2627
2628  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2629
2630 private:
2631  DISALLOW_COPY_AND_ASSIGN(HConstant);
2632};
2633
2634class HNullConstant FINAL : public HConstant {
2635 public:
2636  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2637    return true;
2638  }
2639
2640  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2641
2642  size_t ComputeHashCode() const OVERRIDE { return 0; }
2643
2644  // The null constant representation is a 0-bit pattern.
2645  virtual bool IsZeroBitPattern() const { return true; }
2646
2647  DECLARE_INSTRUCTION(NullConstant);
2648
2649 private:
2650  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2651
2652  friend class HGraph;
2653  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2654};
2655
2656// Constants of the type int. Those can be from Dex instructions, or
2657// synthesized (for example with the if-eqz instruction).
2658class HIntConstant FINAL : public HConstant {
2659 public:
2660  int32_t GetValue() const { return value_; }
2661
2662  uint64_t GetValueAsUint64() const OVERRIDE {
2663    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2664  }
2665
2666  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2667    DCHECK(other->IsIntConstant()) << other->DebugName();
2668    return other->AsIntConstant()->value_ == value_;
2669  }
2670
2671  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2672
2673  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2674  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2675  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2676  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2677
2678  // Integer constants are used to encode Boolean values as well,
2679  // where 1 means true and 0 means false.
2680  bool IsTrue() const { return GetValue() == 1; }
2681  bool IsFalse() const { return GetValue() == 0; }
2682
2683  DECLARE_INSTRUCTION(IntConstant);
2684
2685 private:
2686  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2687      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2688  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2689      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2690
2691  const int32_t value_;
2692
2693  friend class HGraph;
2694  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2695  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2696  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2697};
2698
2699class HLongConstant FINAL : public HConstant {
2700 public:
2701  int64_t GetValue() const { return value_; }
2702
2703  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2704
2705  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2706    DCHECK(other->IsLongConstant()) << other->DebugName();
2707    return other->AsLongConstant()->value_ == value_;
2708  }
2709
2710  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2711
2712  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2713  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2714  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2715  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2716
2717  DECLARE_INSTRUCTION(LongConstant);
2718
2719 private:
2720  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2721      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2722
2723  const int64_t value_;
2724
2725  friend class HGraph;
2726  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2727};
2728
2729class HFloatConstant FINAL : public HConstant {
2730 public:
2731  float GetValue() const { return value_; }
2732
2733  uint64_t GetValueAsUint64() const OVERRIDE {
2734    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2735  }
2736
2737  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2738    DCHECK(other->IsFloatConstant()) << other->DebugName();
2739    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2740  }
2741
2742  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2743
2744  bool IsMinusOne() const OVERRIDE {
2745    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2746  }
2747  bool IsArithmeticZero() const OVERRIDE {
2748    return std::fpclassify(value_) == FP_ZERO;
2749  }
2750  bool IsArithmeticPositiveZero() const {
2751    return IsArithmeticZero() && !std::signbit(value_);
2752  }
2753  bool IsArithmeticNegativeZero() const {
2754    return IsArithmeticZero() && std::signbit(value_);
2755  }
2756  bool IsZeroBitPattern() const OVERRIDE {
2757    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2758  }
2759  bool IsOne() const OVERRIDE {
2760    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2761  }
2762  bool IsNaN() const {
2763    return std::isnan(value_);
2764  }
2765
2766  DECLARE_INSTRUCTION(FloatConstant);
2767
2768 private:
2769  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2770      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2771  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2772      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2773
2774  const float value_;
2775
2776  // Only the SsaBuilder and HGraph can create floating-point constants.
2777  friend class SsaBuilder;
2778  friend class HGraph;
2779  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2780};
2781
2782class HDoubleConstant FINAL : public HConstant {
2783 public:
2784  double GetValue() const { return value_; }
2785
2786  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2787
2788  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2789    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2790    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2791  }
2792
2793  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2794
2795  bool IsMinusOne() const OVERRIDE {
2796    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2797  }
2798  bool IsArithmeticZero() const OVERRIDE {
2799    return std::fpclassify(value_) == FP_ZERO;
2800  }
2801  bool IsArithmeticPositiveZero() const {
2802    return IsArithmeticZero() && !std::signbit(value_);
2803  }
2804  bool IsArithmeticNegativeZero() const {
2805    return IsArithmeticZero() && std::signbit(value_);
2806  }
2807  bool IsZeroBitPattern() const OVERRIDE {
2808    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2809  }
2810  bool IsOne() const OVERRIDE {
2811    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2812  }
2813  bool IsNaN() const {
2814    return std::isnan(value_);
2815  }
2816
2817  DECLARE_INSTRUCTION(DoubleConstant);
2818
2819 private:
2820  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2821      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2822  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2823      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2824
2825  const double value_;
2826
2827  // Only the SsaBuilder and HGraph can create floating-point constants.
2828  friend class SsaBuilder;
2829  friend class HGraph;
2830  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2831};
2832
2833// Conditional branch. A block ending with an HIf instruction must have
2834// two successors.
2835class HIf FINAL : public HTemplateInstruction<1> {
2836 public:
2837  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2838      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2839    SetRawInputAt(0, input);
2840  }
2841
2842  bool IsControlFlow() const OVERRIDE { return true; }
2843
2844  HBasicBlock* IfTrueSuccessor() const {
2845    return GetBlock()->GetSuccessors()[0];
2846  }
2847
2848  HBasicBlock* IfFalseSuccessor() const {
2849    return GetBlock()->GetSuccessors()[1];
2850  }
2851
2852  DECLARE_INSTRUCTION(If);
2853
2854 private:
2855  DISALLOW_COPY_AND_ASSIGN(HIf);
2856};
2857
2858
2859// Abstract instruction which marks the beginning and/or end of a try block and
2860// links it to the respective exception handlers. Behaves the same as a Goto in
2861// non-exceptional control flow.
2862// Normal-flow successor is stored at index zero, exception handlers under
2863// higher indices in no particular order.
2864class HTryBoundary FINAL : public HTemplateInstruction<0> {
2865 public:
2866  enum class BoundaryKind {
2867    kEntry,
2868    kExit,
2869    kLast = kExit
2870  };
2871
2872  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2873      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2874    SetPackedField<BoundaryKindField>(kind);
2875  }
2876
2877  bool IsControlFlow() const OVERRIDE { return true; }
2878
2879  // Returns the block's non-exceptional successor (index zero).
2880  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2881
2882  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2883    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2884  }
2885
2886  // Returns whether `handler` is among its exception handlers (non-zero index
2887  // successors).
2888  bool HasExceptionHandler(const HBasicBlock& handler) const {
2889    DCHECK(handler.IsCatchBlock());
2890    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2891  }
2892
2893  // If not present already, adds `handler` to its block's list of exception
2894  // handlers.
2895  void AddExceptionHandler(HBasicBlock* handler) {
2896    if (!HasExceptionHandler(*handler)) {
2897      GetBlock()->AddSuccessor(handler);
2898    }
2899  }
2900
2901  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2902  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2903
2904  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2905
2906  DECLARE_INSTRUCTION(TryBoundary);
2907
2908 private:
2909  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2910  static constexpr size_t kFieldBoundaryKindSize =
2911      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2912  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2913      kFieldBoundaryKind + kFieldBoundaryKindSize;
2914  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2915                "Too many packed fields.");
2916  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2917
2918  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2919};
2920
2921// Deoptimize to interpreter, upon checking a condition.
2922class HDeoptimize FINAL : public HTemplateInstruction<1> {
2923 public:
2924  // We set CanTriggerGC to prevent any intermediate address to be live
2925  // at the point of the `HDeoptimize`.
2926  HDeoptimize(HInstruction* cond, uint32_t dex_pc)
2927      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
2928    SetRawInputAt(0, cond);
2929  }
2930
2931  bool CanBeMoved() const OVERRIDE { return true; }
2932  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2933    return true;
2934  }
2935  bool NeedsEnvironment() const OVERRIDE { return true; }
2936  bool CanThrow() const OVERRIDE { return true; }
2937
2938  DECLARE_INSTRUCTION(Deoptimize);
2939
2940 private:
2941  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
2942};
2943
2944// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
2945// The compiled code checks this flag value in a guard before devirtualized call and
2946// if it's true, starts to do deoptimization.
2947// It has a 4-byte slot on stack.
2948// TODO: allocate a register for this flag.
2949class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
2950 public:
2951  // CHA guards are only optimized in a separate pass and it has no side effects
2952  // with regard to other passes.
2953  HShouldDeoptimizeFlag(ArenaAllocator* arena, uint32_t dex_pc)
2954      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, arena, 0, kArenaAllocCHA) {
2955  }
2956
2957  Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimInt; }
2958
2959  // We do all CHA guard elimination/motion in a single pass, after which there is no
2960  // further guard elimination/motion since a guard might have been used for justification
2961  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
2962  // to avoid other optimizations trying to move it.
2963  bool CanBeMoved() const OVERRIDE { return false; }
2964
2965  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
2966
2967 private:
2968  DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
2969};
2970
2971// Represents the ArtMethod that was passed as a first argument to
2972// the method. It is used by instructions that depend on it, like
2973// instructions that work with the dex cache.
2974class HCurrentMethod FINAL : public HExpression<0> {
2975 public:
2976  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2977      : HExpression(type, SideEffects::None(), dex_pc) {}
2978
2979  DECLARE_INSTRUCTION(CurrentMethod);
2980
2981 private:
2982  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
2983};
2984
2985// Fetches an ArtMethod from the virtual table or the interface method table
2986// of a class.
2987class HClassTableGet FINAL : public HExpression<1> {
2988 public:
2989  enum class TableKind {
2990    kVTable,
2991    kIMTable,
2992    kLast = kIMTable
2993  };
2994  HClassTableGet(HInstruction* cls,
2995                 Primitive::Type type,
2996                 TableKind kind,
2997                 size_t index,
2998                 uint32_t dex_pc)
2999      : HExpression(type, SideEffects::None(), dex_pc),
3000        index_(index) {
3001    SetPackedField<TableKindField>(kind);
3002    SetRawInputAt(0, cls);
3003  }
3004
3005  bool CanBeMoved() const OVERRIDE { return true; }
3006  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3007    return other->AsClassTableGet()->GetIndex() == index_ &&
3008        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3009  }
3010
3011  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3012  size_t GetIndex() const { return index_; }
3013
3014  DECLARE_INSTRUCTION(ClassTableGet);
3015
3016 private:
3017  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3018  static constexpr size_t kFieldTableKindSize =
3019      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3020  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3021  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3022                "Too many packed fields.");
3023  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3024
3025  // The index of the ArtMethod in the table.
3026  const size_t index_;
3027
3028  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
3029};
3030
3031// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3032// have one successor for each entry in the switch table, and the final successor
3033// will be the block containing the next Dex opcode.
3034class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3035 public:
3036  HPackedSwitch(int32_t start_value,
3037                uint32_t num_entries,
3038                HInstruction* input,
3039                uint32_t dex_pc = kNoDexPc)
3040    : HTemplateInstruction(SideEffects::None(), dex_pc),
3041      start_value_(start_value),
3042      num_entries_(num_entries) {
3043    SetRawInputAt(0, input);
3044  }
3045
3046  bool IsControlFlow() const OVERRIDE { return true; }
3047
3048  int32_t GetStartValue() const { return start_value_; }
3049
3050  uint32_t GetNumEntries() const { return num_entries_; }
3051
3052  HBasicBlock* GetDefaultBlock() const {
3053    // Last entry is the default block.
3054    return GetBlock()->GetSuccessors()[num_entries_];
3055  }
3056  DECLARE_INSTRUCTION(PackedSwitch);
3057
3058 private:
3059  const int32_t start_value_;
3060  const uint32_t num_entries_;
3061
3062  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
3063};
3064
3065class HUnaryOperation : public HExpression<1> {
3066 public:
3067  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3068      : HExpression(result_type, SideEffects::None(), dex_pc) {
3069    SetRawInputAt(0, input);
3070  }
3071
3072  HInstruction* GetInput() const { return InputAt(0); }
3073  Primitive::Type GetResultType() const { return GetType(); }
3074
3075  bool CanBeMoved() const OVERRIDE { return true; }
3076  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3077    return true;
3078  }
3079
3080  // Try to statically evaluate `this` and return a HConstant
3081  // containing the result of this evaluation.  If `this` cannot
3082  // be evaluated as a constant, return null.
3083  HConstant* TryStaticEvaluation() const;
3084
3085  // Apply this operation to `x`.
3086  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3087  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3088  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3089  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3090
3091  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3092
3093 private:
3094  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
3095};
3096
3097class HBinaryOperation : public HExpression<2> {
3098 public:
3099  HBinaryOperation(Primitive::Type result_type,
3100                   HInstruction* left,
3101                   HInstruction* right,
3102                   SideEffects side_effects = SideEffects::None(),
3103                   uint32_t dex_pc = kNoDexPc)
3104      : HExpression(result_type, side_effects, dex_pc) {
3105    SetRawInputAt(0, left);
3106    SetRawInputAt(1, right);
3107  }
3108
3109  HInstruction* GetLeft() const { return InputAt(0); }
3110  HInstruction* GetRight() const { return InputAt(1); }
3111  Primitive::Type GetResultType() const { return GetType(); }
3112
3113  virtual bool IsCommutative() const { return false; }
3114
3115  // Put constant on the right.
3116  // Returns whether order is changed.
3117  bool OrderInputsWithConstantOnTheRight() {
3118    HInstruction* left = InputAt(0);
3119    HInstruction* right = InputAt(1);
3120    if (left->IsConstant() && !right->IsConstant()) {
3121      ReplaceInput(right, 0);
3122      ReplaceInput(left, 1);
3123      return true;
3124    }
3125    return false;
3126  }
3127
3128  // Order inputs by instruction id, but favor constant on the right side.
3129  // This helps GVN for commutative ops.
3130  void OrderInputs() {
3131    DCHECK(IsCommutative());
3132    HInstruction* left = InputAt(0);
3133    HInstruction* right = InputAt(1);
3134    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3135      return;
3136    }
3137    if (OrderInputsWithConstantOnTheRight()) {
3138      return;
3139    }
3140    // Order according to instruction id.
3141    if (left->GetId() > right->GetId()) {
3142      ReplaceInput(right, 0);
3143      ReplaceInput(left, 1);
3144    }
3145  }
3146
3147  bool CanBeMoved() const OVERRIDE { return true; }
3148  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3149    return true;
3150  }
3151
3152  // Try to statically evaluate `this` and return a HConstant
3153  // containing the result of this evaluation.  If `this` cannot
3154  // be evaluated as a constant, return null.
3155  HConstant* TryStaticEvaluation() const;
3156
3157  // Apply this operation to `x` and `y`.
3158  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3159                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3160    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3161    UNREACHABLE();
3162  }
3163  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3164  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3165  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3166                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3167    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3168    UNREACHABLE();
3169  }
3170  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3171  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3172
3173  // Returns an input that can legally be used as the right input and is
3174  // constant, or null.
3175  HConstant* GetConstantRight() const;
3176
3177  // If `GetConstantRight()` returns one of the input, this returns the other
3178  // one. Otherwise it returns null.
3179  HInstruction* GetLeastConstantLeft() const;
3180
3181  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3182
3183 private:
3184  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3185};
3186
3187// The comparison bias applies for floating point operations and indicates how NaN
3188// comparisons are treated:
3189enum class ComparisonBias {
3190  kNoBias,  // bias is not applicable (i.e. for long operation)
3191  kGtBias,  // return 1 for NaN comparisons
3192  kLtBias,  // return -1 for NaN comparisons
3193  kLast = kLtBias
3194};
3195
3196std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3197
3198class HCondition : public HBinaryOperation {
3199 public:
3200  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3201      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3202    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3203  }
3204
3205  // For code generation purposes, returns whether this instruction is just before
3206  // `instruction`, and disregard moves in between.
3207  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3208
3209  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3210
3211  virtual IfCondition GetCondition() const = 0;
3212
3213  virtual IfCondition GetOppositeCondition() const = 0;
3214
3215  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3216  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3217
3218  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3219  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3220
3221  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3222    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3223  }
3224
3225  bool IsFPConditionTrueIfNaN() const {
3226    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3227    IfCondition if_cond = GetCondition();
3228    if (if_cond == kCondNE) {
3229      return true;
3230    } else if (if_cond == kCondEQ) {
3231      return false;
3232    }
3233    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3234  }
3235
3236  bool IsFPConditionFalseIfNaN() const {
3237    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3238    IfCondition if_cond = GetCondition();
3239    if (if_cond == kCondEQ) {
3240      return true;
3241    } else if (if_cond == kCondNE) {
3242      return false;
3243    }
3244    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3245  }
3246
3247 protected:
3248  // Needed if we merge a HCompare into a HCondition.
3249  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3250  static constexpr size_t kFieldComparisonBiasSize =
3251      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3252  static constexpr size_t kNumberOfConditionPackedBits =
3253      kFieldComparisonBias + kFieldComparisonBiasSize;
3254  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3255  using ComparisonBiasField =
3256      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3257
3258  template <typename T>
3259  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3260
3261  template <typename T>
3262  int32_t CompareFP(T x, T y) const {
3263    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3264    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3265    // Handle the bias.
3266    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3267  }
3268
3269  // Return an integer constant containing the result of a condition evaluated at compile time.
3270  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3271    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3272  }
3273
3274 private:
3275  DISALLOW_COPY_AND_ASSIGN(HCondition);
3276};
3277
3278// Instruction to check if two inputs are equal to each other.
3279class HEqual FINAL : public HCondition {
3280 public:
3281  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3282      : HCondition(first, second, dex_pc) {}
3283
3284  bool IsCommutative() const OVERRIDE { return true; }
3285
3286  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3287                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3288    return MakeConstantCondition(true, GetDexPc());
3289  }
3290  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3291    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3292  }
3293  // In the following Evaluate methods, a HCompare instruction has
3294  // been merged into this HEqual instruction; evaluate it as
3295  // `Compare(x, y) == 0`.
3296  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3297    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3298                                 GetDexPc());
3299  }
3300  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3301    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3302  }
3303  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3304    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3305  }
3306
3307  DECLARE_INSTRUCTION(Equal);
3308
3309  IfCondition GetCondition() const OVERRIDE {
3310    return kCondEQ;
3311  }
3312
3313  IfCondition GetOppositeCondition() const OVERRIDE {
3314    return kCondNE;
3315  }
3316
3317 private:
3318  template <typename T> static bool Compute(T x, T y) { return x == y; }
3319
3320  DISALLOW_COPY_AND_ASSIGN(HEqual);
3321};
3322
3323class HNotEqual FINAL : public HCondition {
3324 public:
3325  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3326      : HCondition(first, second, dex_pc) {}
3327
3328  bool IsCommutative() const OVERRIDE { return true; }
3329
3330  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3331                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3332    return MakeConstantCondition(false, GetDexPc());
3333  }
3334  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3335    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3336  }
3337  // In the following Evaluate methods, a HCompare instruction has
3338  // been merged into this HNotEqual instruction; evaluate it as
3339  // `Compare(x, y) != 0`.
3340  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3341    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3342  }
3343  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3344    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3345  }
3346  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3347    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3348  }
3349
3350  DECLARE_INSTRUCTION(NotEqual);
3351
3352  IfCondition GetCondition() const OVERRIDE {
3353    return kCondNE;
3354  }
3355
3356  IfCondition GetOppositeCondition() const OVERRIDE {
3357    return kCondEQ;
3358  }
3359
3360 private:
3361  template <typename T> static bool Compute(T x, T y) { return x != y; }
3362
3363  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3364};
3365
3366class HLessThan FINAL : public HCondition {
3367 public:
3368  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3369      : HCondition(first, second, dex_pc) {}
3370
3371  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3372    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3373  }
3374  // In the following Evaluate methods, a HCompare instruction has
3375  // been merged into this HLessThan instruction; evaluate it as
3376  // `Compare(x, y) < 0`.
3377  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3378    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3379  }
3380  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3381    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3382  }
3383  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3384    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3385  }
3386
3387  DECLARE_INSTRUCTION(LessThan);
3388
3389  IfCondition GetCondition() const OVERRIDE {
3390    return kCondLT;
3391  }
3392
3393  IfCondition GetOppositeCondition() const OVERRIDE {
3394    return kCondGE;
3395  }
3396
3397 private:
3398  template <typename T> static bool Compute(T x, T y) { return x < y; }
3399
3400  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3401};
3402
3403class HLessThanOrEqual FINAL : public HCondition {
3404 public:
3405  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3406      : HCondition(first, second, dex_pc) {}
3407
3408  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3409    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3410  }
3411  // In the following Evaluate methods, a HCompare instruction has
3412  // been merged into this HLessThanOrEqual instruction; evaluate it as
3413  // `Compare(x, y) <= 0`.
3414  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3415    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3416  }
3417  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3418    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3419  }
3420  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3421    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3422  }
3423
3424  DECLARE_INSTRUCTION(LessThanOrEqual);
3425
3426  IfCondition GetCondition() const OVERRIDE {
3427    return kCondLE;
3428  }
3429
3430  IfCondition GetOppositeCondition() const OVERRIDE {
3431    return kCondGT;
3432  }
3433
3434 private:
3435  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3436
3437  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3438};
3439
3440class HGreaterThan FINAL : public HCondition {
3441 public:
3442  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3443      : HCondition(first, second, dex_pc) {}
3444
3445  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3446    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3447  }
3448  // In the following Evaluate methods, a HCompare instruction has
3449  // been merged into this HGreaterThan instruction; evaluate it as
3450  // `Compare(x, y) > 0`.
3451  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3452    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3453  }
3454  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3455    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3456  }
3457  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3458    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3459  }
3460
3461  DECLARE_INSTRUCTION(GreaterThan);
3462
3463  IfCondition GetCondition() const OVERRIDE {
3464    return kCondGT;
3465  }
3466
3467  IfCondition GetOppositeCondition() const OVERRIDE {
3468    return kCondLE;
3469  }
3470
3471 private:
3472  template <typename T> static bool Compute(T x, T y) { return x > y; }
3473
3474  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3475};
3476
3477class HGreaterThanOrEqual FINAL : public HCondition {
3478 public:
3479  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3480      : HCondition(first, second, dex_pc) {}
3481
3482  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3483    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3484  }
3485  // In the following Evaluate methods, a HCompare instruction has
3486  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3487  // `Compare(x, y) >= 0`.
3488  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3489    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3490  }
3491  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3492    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3493  }
3494  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3495    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3496  }
3497
3498  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3499
3500  IfCondition GetCondition() const OVERRIDE {
3501    return kCondGE;
3502  }
3503
3504  IfCondition GetOppositeCondition() const OVERRIDE {
3505    return kCondLT;
3506  }
3507
3508 private:
3509  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3510
3511  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3512};
3513
3514class HBelow FINAL : public HCondition {
3515 public:
3516  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3517      : HCondition(first, second, dex_pc) {}
3518
3519  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3520    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3521  }
3522  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3523    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3524  }
3525  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3526                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3527    LOG(FATAL) << DebugName() << " is not defined for float values";
3528    UNREACHABLE();
3529  }
3530  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3531                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3532    LOG(FATAL) << DebugName() << " is not defined for double values";
3533    UNREACHABLE();
3534  }
3535
3536  DECLARE_INSTRUCTION(Below);
3537
3538  IfCondition GetCondition() const OVERRIDE {
3539    return kCondB;
3540  }
3541
3542  IfCondition GetOppositeCondition() const OVERRIDE {
3543    return kCondAE;
3544  }
3545
3546 private:
3547  template <typename T> static bool Compute(T x, T y) {
3548    return MakeUnsigned(x) < MakeUnsigned(y);
3549  }
3550
3551  DISALLOW_COPY_AND_ASSIGN(HBelow);
3552};
3553
3554class HBelowOrEqual FINAL : public HCondition {
3555 public:
3556  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3557      : HCondition(first, second, dex_pc) {}
3558
3559  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3560    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3561  }
3562  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3563    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3564  }
3565  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3566                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3567    LOG(FATAL) << DebugName() << " is not defined for float values";
3568    UNREACHABLE();
3569  }
3570  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3571                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3572    LOG(FATAL) << DebugName() << " is not defined for double values";
3573    UNREACHABLE();
3574  }
3575
3576  DECLARE_INSTRUCTION(BelowOrEqual);
3577
3578  IfCondition GetCondition() const OVERRIDE {
3579    return kCondBE;
3580  }
3581
3582  IfCondition GetOppositeCondition() const OVERRIDE {
3583    return kCondA;
3584  }
3585
3586 private:
3587  template <typename T> static bool Compute(T x, T y) {
3588    return MakeUnsigned(x) <= MakeUnsigned(y);
3589  }
3590
3591  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3592};
3593
3594class HAbove FINAL : public HCondition {
3595 public:
3596  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3597      : HCondition(first, second, dex_pc) {}
3598
3599  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3600    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3601  }
3602  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3603    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3604  }
3605  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3606                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3607    LOG(FATAL) << DebugName() << " is not defined for float values";
3608    UNREACHABLE();
3609  }
3610  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3611                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3612    LOG(FATAL) << DebugName() << " is not defined for double values";
3613    UNREACHABLE();
3614  }
3615
3616  DECLARE_INSTRUCTION(Above);
3617
3618  IfCondition GetCondition() const OVERRIDE {
3619    return kCondA;
3620  }
3621
3622  IfCondition GetOppositeCondition() const OVERRIDE {
3623    return kCondBE;
3624  }
3625
3626 private:
3627  template <typename T> static bool Compute(T x, T y) {
3628    return MakeUnsigned(x) > MakeUnsigned(y);
3629  }
3630
3631  DISALLOW_COPY_AND_ASSIGN(HAbove);
3632};
3633
3634class HAboveOrEqual FINAL : public HCondition {
3635 public:
3636  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3637      : HCondition(first, second, dex_pc) {}
3638
3639  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3640    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3641  }
3642  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3643    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3644  }
3645  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3646                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3647    LOG(FATAL) << DebugName() << " is not defined for float values";
3648    UNREACHABLE();
3649  }
3650  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3651                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3652    LOG(FATAL) << DebugName() << " is not defined for double values";
3653    UNREACHABLE();
3654  }
3655
3656  DECLARE_INSTRUCTION(AboveOrEqual);
3657
3658  IfCondition GetCondition() const OVERRIDE {
3659    return kCondAE;
3660  }
3661
3662  IfCondition GetOppositeCondition() const OVERRIDE {
3663    return kCondB;
3664  }
3665
3666 private:
3667  template <typename T> static bool Compute(T x, T y) {
3668    return MakeUnsigned(x) >= MakeUnsigned(y);
3669  }
3670
3671  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3672};
3673
3674// Instruction to check how two inputs compare to each other.
3675// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3676class HCompare FINAL : public HBinaryOperation {
3677 public:
3678  // Note that `comparison_type` is the type of comparison performed
3679  // between the comparison's inputs, not the type of the instantiated
3680  // HCompare instruction (which is always Primitive::kPrimInt).
3681  HCompare(Primitive::Type comparison_type,
3682           HInstruction* first,
3683           HInstruction* second,
3684           ComparisonBias bias,
3685           uint32_t dex_pc)
3686      : HBinaryOperation(Primitive::kPrimInt,
3687                         first,
3688                         second,
3689                         SideEffectsForArchRuntimeCalls(comparison_type),
3690                         dex_pc) {
3691    SetPackedField<ComparisonBiasField>(bias);
3692    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3693    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3694  }
3695
3696  template <typename T>
3697  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3698
3699  template <typename T>
3700  int32_t ComputeFP(T x, T y) const {
3701    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3702    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3703    // Handle the bias.
3704    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3705  }
3706
3707  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3708    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3709    // reach this code path when processing a freshly built HIR
3710    // graph. However HCompare integer instructions can be synthesized
3711    // by the instruction simplifier to implement IntegerCompare and
3712    // IntegerSignum intrinsics, so we have to handle this case.
3713    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3714  }
3715  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3716    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3717  }
3718  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3719    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3720  }
3721  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3722    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3723  }
3724
3725  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3726    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3727  }
3728
3729  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3730
3731  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3732  // Only meaningful for floating-point comparisons.
3733  bool IsGtBias() const {
3734    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3735    return GetBias() == ComparisonBias::kGtBias;
3736  }
3737
3738  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3739    // Comparisons do not require a runtime call in any back end.
3740    return SideEffects::None();
3741  }
3742
3743  DECLARE_INSTRUCTION(Compare);
3744
3745 protected:
3746  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3747  static constexpr size_t kFieldComparisonBiasSize =
3748      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3749  static constexpr size_t kNumberOfComparePackedBits =
3750      kFieldComparisonBias + kFieldComparisonBiasSize;
3751  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3752  using ComparisonBiasField =
3753      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3754
3755  // Return an integer constant containing the result of a comparison evaluated at compile time.
3756  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3757    DCHECK(value == -1 || value == 0 || value == 1) << value;
3758    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3759  }
3760
3761 private:
3762  DISALLOW_COPY_AND_ASSIGN(HCompare);
3763};
3764
3765class HNewInstance FINAL : public HExpression<1> {
3766 public:
3767  HNewInstance(HInstruction* cls,
3768               uint32_t dex_pc,
3769               dex::TypeIndex type_index,
3770               const DexFile& dex_file,
3771               bool finalizable,
3772               QuickEntrypointEnum entrypoint)
3773      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3774        type_index_(type_index),
3775        dex_file_(dex_file),
3776        entrypoint_(entrypoint) {
3777    SetPackedFlag<kFlagFinalizable>(finalizable);
3778    SetRawInputAt(0, cls);
3779  }
3780
3781  dex::TypeIndex GetTypeIndex() const { return type_index_; }
3782  const DexFile& GetDexFile() const { return dex_file_; }
3783
3784  // Calls runtime so needs an environment.
3785  bool NeedsEnvironment() const OVERRIDE { return true; }
3786
3787  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3788  bool CanThrow() const OVERRIDE { return true; }
3789
3790  bool NeedsChecks() const {
3791    return entrypoint_ == kQuickAllocObjectWithChecks;
3792  }
3793
3794  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3795
3796  bool CanBeNull() const OVERRIDE { return false; }
3797
3798  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3799
3800  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3801    entrypoint_ = entrypoint;
3802  }
3803
3804  HLoadClass* GetLoadClass() const {
3805    HInstruction* input = InputAt(0);
3806    if (input->IsClinitCheck()) {
3807      input = input->InputAt(0);
3808    }
3809    DCHECK(input->IsLoadClass());
3810    return input->AsLoadClass();
3811  }
3812
3813  bool IsStringAlloc() const;
3814
3815  DECLARE_INSTRUCTION(NewInstance);
3816
3817 private:
3818  static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
3819  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3820  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3821                "Too many packed fields.");
3822
3823  const dex::TypeIndex type_index_;
3824  const DexFile& dex_file_;
3825  QuickEntrypointEnum entrypoint_;
3826
3827  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3828};
3829
3830enum IntrinsicNeedsEnvironmentOrCache {
3831  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3832  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3833};
3834
3835enum IntrinsicSideEffects {
3836  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3837  kReadSideEffects,   // Intrinsic may read heap memory.
3838  kWriteSideEffects,  // Intrinsic may write heap memory.
3839  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3840};
3841
3842enum IntrinsicExceptions {
3843  kNoThrow,  // Intrinsic does not throw any exceptions.
3844  kCanThrow  // Intrinsic may throw exceptions.
3845};
3846
3847class HInvoke : public HVariableInputSizeInstruction {
3848 public:
3849  bool NeedsEnvironment() const OVERRIDE;
3850
3851  void SetArgumentAt(size_t index, HInstruction* argument) {
3852    SetRawInputAt(index, argument);
3853  }
3854
3855  // Return the number of arguments.  This number can be lower than
3856  // the number of inputs returned by InputCount(), as some invoke
3857  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3858  // inputs at the end of their list of inputs.
3859  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3860
3861  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3862
3863  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3864
3865  InvokeType GetInvokeType() const {
3866    return GetPackedField<InvokeTypeField>();
3867  }
3868
3869  Intrinsics GetIntrinsic() const {
3870    return intrinsic_;
3871  }
3872
3873  void SetIntrinsic(Intrinsics intrinsic,
3874                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3875                    IntrinsicSideEffects side_effects,
3876                    IntrinsicExceptions exceptions);
3877
3878  bool IsFromInlinedInvoke() const {
3879    return GetEnvironment()->IsFromInlinedInvoke();
3880  }
3881
3882  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3883
3884  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
3885
3886  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
3887
3888  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3889    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
3890  }
3891
3892  uint32_t* GetIntrinsicOptimizations() {
3893    return &intrinsic_optimizations_;
3894  }
3895
3896  const uint32_t* GetIntrinsicOptimizations() const {
3897    return &intrinsic_optimizations_;
3898  }
3899
3900  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
3901
3902  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
3903
3904  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
3905
3906 protected:
3907  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
3908  static constexpr size_t kFieldInvokeTypeSize =
3909      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
3910  static constexpr size_t kFieldReturnType =
3911      kFieldInvokeType + kFieldInvokeTypeSize;
3912  static constexpr size_t kFieldReturnTypeSize =
3913      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
3914  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
3915  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
3916  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3917  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
3918  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
3919
3920  HInvoke(ArenaAllocator* arena,
3921          uint32_t number_of_arguments,
3922          uint32_t number_of_other_inputs,
3923          Primitive::Type return_type,
3924          uint32_t dex_pc,
3925          uint32_t dex_method_index,
3926          ArtMethod* resolved_method,
3927          InvokeType invoke_type)
3928    : HVariableInputSizeInstruction(
3929          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
3930          dex_pc,
3931          arena,
3932          number_of_arguments + number_of_other_inputs,
3933          kArenaAllocInvokeInputs),
3934      number_of_arguments_(number_of_arguments),
3935      resolved_method_(resolved_method),
3936      dex_method_index_(dex_method_index),
3937      intrinsic_(Intrinsics::kNone),
3938      intrinsic_optimizations_(0) {
3939    SetPackedField<ReturnTypeField>(return_type);
3940    SetPackedField<InvokeTypeField>(invoke_type);
3941    SetPackedFlag<kFlagCanThrow>(true);
3942  }
3943
3944  uint32_t number_of_arguments_;
3945  ArtMethod* const resolved_method_;
3946  const uint32_t dex_method_index_;
3947  Intrinsics intrinsic_;
3948
3949  // A magic word holding optimizations for intrinsics. See intrinsics.h.
3950  uint32_t intrinsic_optimizations_;
3951
3952 private:
3953  DISALLOW_COPY_AND_ASSIGN(HInvoke);
3954};
3955
3956class HInvokeUnresolved FINAL : public HInvoke {
3957 public:
3958  HInvokeUnresolved(ArenaAllocator* arena,
3959                    uint32_t number_of_arguments,
3960                    Primitive::Type return_type,
3961                    uint32_t dex_pc,
3962                    uint32_t dex_method_index,
3963                    InvokeType invoke_type)
3964      : HInvoke(arena,
3965                number_of_arguments,
3966                0u /* number_of_other_inputs */,
3967                return_type,
3968                dex_pc,
3969                dex_method_index,
3970                nullptr,
3971                invoke_type) {
3972  }
3973
3974  DECLARE_INSTRUCTION(InvokeUnresolved);
3975
3976 private:
3977  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
3978};
3979
3980class HInvokePolymorphic FINAL : public HInvoke {
3981 public:
3982  HInvokePolymorphic(ArenaAllocator* arena,
3983                     uint32_t number_of_arguments,
3984                     Primitive::Type return_type,
3985                     uint32_t dex_pc,
3986                     uint32_t dex_method_index)
3987      : HInvoke(arena,
3988                number_of_arguments,
3989                0u /* number_of_other_inputs */,
3990                return_type,
3991                dex_pc,
3992                dex_method_index,
3993                nullptr,
3994                kVirtual) {}
3995
3996  DECLARE_INSTRUCTION(InvokePolymorphic);
3997
3998 private:
3999  DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
4000};
4001
4002class HInvokeStaticOrDirect FINAL : public HInvoke {
4003 public:
4004  // Requirements of this method call regarding the class
4005  // initialization (clinit) check of its declaring class.
4006  enum class ClinitCheckRequirement {
4007    kNone,      // Class already initialized.
4008    kExplicit,  // Static call having explicit clinit check as last input.
4009    kImplicit,  // Static call implicitly requiring a clinit check.
4010    kLast = kImplicit
4011  };
4012
4013  // Determines how to load the target ArtMethod*.
4014  enum class MethodLoadKind {
4015    // Use a String init ArtMethod* loaded from Thread entrypoints.
4016    kStringInit,
4017
4018    // Use the method's own ArtMethod* loaded by the register allocator.
4019    kRecursive,
4020
4021    // Use ArtMethod* at a known address, embed the direct address in the code.
4022    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4023    kDirectAddress,
4024
4025    // Load from resolved methods array in the dex cache using a PC-relative load.
4026    // Used when we need to use the dex cache, for example for invoke-static that
4027    // may cause class initialization (the entry may point to a resolution method),
4028    // and we know that we can access the dex cache arrays using a PC-relative load.
4029    kDexCachePcRelative,
4030
4031    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
4032    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
4033    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
4034    // or unimplemented or impractical (i.e. slow) on a particular architecture.
4035    kDexCacheViaMethod,
4036  };
4037
4038  // Determines the location of the code pointer.
4039  enum class CodePtrLocation {
4040    // Recursive call, use local PC-relative call instruction.
4041    kCallSelf,
4042
4043    // Use code pointer from the ArtMethod*.
4044    // Used when we don't know the target code. This is also the last-resort-kind used when
4045    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4046    kCallArtMethod,
4047  };
4048
4049  struct DispatchInfo {
4050    MethodLoadKind method_load_kind;
4051    CodePtrLocation code_ptr_location;
4052    // The method load data holds
4053    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4054    //     Note that there are multiple string init methods, each having its own offset.
4055    //   - the method address for kDirectAddress
4056    //   - the dex cache arrays offset for kDexCachePcRel.
4057    uint64_t method_load_data;
4058  };
4059
4060  HInvokeStaticOrDirect(ArenaAllocator* arena,
4061                        uint32_t number_of_arguments,
4062                        Primitive::Type return_type,
4063                        uint32_t dex_pc,
4064                        uint32_t method_index,
4065                        ArtMethod* resolved_method,
4066                        DispatchInfo dispatch_info,
4067                        InvokeType invoke_type,
4068                        MethodReference target_method,
4069                        ClinitCheckRequirement clinit_check_requirement)
4070      : HInvoke(arena,
4071                number_of_arguments,
4072                // There is potentially one extra argument for the HCurrentMethod node, and
4073                // potentially one other if the clinit check is explicit, and potentially
4074                // one other if the method is a string factory.
4075                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4076                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4077                return_type,
4078                dex_pc,
4079                method_index,
4080                resolved_method,
4081                invoke_type),
4082        target_method_(target_method),
4083        dispatch_info_(dispatch_info) {
4084    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4085  }
4086
4087  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4088    bool had_current_method_input = HasCurrentMethodInput();
4089    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4090
4091    // Using the current method is the default and once we find a better
4092    // method load kind, we should not go back to using the current method.
4093    DCHECK(had_current_method_input || !needs_current_method_input);
4094
4095    if (had_current_method_input && !needs_current_method_input) {
4096      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4097      RemoveInputAt(GetSpecialInputIndex());
4098    }
4099    dispatch_info_ = dispatch_info;
4100  }
4101
4102  void AddSpecialInput(HInstruction* input) {
4103    // We allow only one special input.
4104    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4105    DCHECK(InputCount() == GetSpecialInputIndex() ||
4106           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4107    InsertInputAt(GetSpecialInputIndex(), input);
4108  }
4109
4110  using HInstruction::GetInputRecords;  // Keep the const version visible.
4111  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4112    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4113    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4114      DCHECK(!input_records.empty());
4115      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4116      HInstruction* last_input = input_records.back().GetInstruction();
4117      // Note: `last_input` may be null during arguments setup.
4118      if (last_input != nullptr) {
4119        // `last_input` is the last input of a static invoke marked as having
4120        // an explicit clinit check. It must either be:
4121        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4122        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4123        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4124      }
4125    }
4126    return input_records;
4127  }
4128
4129  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4130    // We access the method via the dex cache so we can't do an implicit null check.
4131    // TODO: for intrinsics we can generate implicit null checks.
4132    return false;
4133  }
4134
4135  bool CanBeNull() const OVERRIDE {
4136    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
4137  }
4138
4139  // Get the index of the special input, if any.
4140  //
4141  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4142  // method pointer; otherwise there may be one platform-specific special input,
4143  // such as PC-relative addressing base.
4144  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4145  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4146
4147  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4148  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4149  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4150  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4151  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4152  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4153  bool HasPcRelativeDexCache() const {
4154    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4155  }
4156  bool HasCurrentMethodInput() const {
4157    // This function can be called only after the invoke has been fully initialized by the builder.
4158    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4159      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4160      return true;
4161    } else {
4162      DCHECK(InputCount() == GetSpecialInputIndex() ||
4163             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4164      return false;
4165    }
4166  }
4167
4168  QuickEntrypointEnum GetStringInitEntryPoint() const {
4169    DCHECK(IsStringInit());
4170    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4171  }
4172
4173  uint64_t GetMethodAddress() const {
4174    DCHECK(HasMethodAddress());
4175    return dispatch_info_.method_load_data;
4176  }
4177
4178  uint32_t GetDexCacheArrayOffset() const {
4179    DCHECK(HasPcRelativeDexCache());
4180    return dispatch_info_.method_load_data;
4181  }
4182
4183  const DexFile& GetDexFileForPcRelativeDexCache() const;
4184
4185  ClinitCheckRequirement GetClinitCheckRequirement() const {
4186    return GetPackedField<ClinitCheckRequirementField>();
4187  }
4188
4189  // Is this instruction a call to a static method?
4190  bool IsStatic() const {
4191    return GetInvokeType() == kStatic;
4192  }
4193
4194  MethodReference GetTargetMethod() const {
4195    return target_method_;
4196  }
4197
4198  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4199  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4200  // instruction; only relevant for static calls with explicit clinit check.
4201  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4202    DCHECK(IsStaticWithExplicitClinitCheck());
4203    size_t last_input_index = inputs_.size() - 1u;
4204    HInstruction* last_input = inputs_.back().GetInstruction();
4205    DCHECK(last_input != nullptr);
4206    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4207    RemoveAsUserOfInput(last_input_index);
4208    inputs_.pop_back();
4209    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4210    DCHECK(!IsStaticWithExplicitClinitCheck());
4211  }
4212
4213  // Is this a call to a static method whose declaring class has an
4214  // explicit initialization check in the graph?
4215  bool IsStaticWithExplicitClinitCheck() const {
4216    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4217  }
4218
4219  // Is this a call to a static method whose declaring class has an
4220  // implicit intialization check requirement?
4221  bool IsStaticWithImplicitClinitCheck() const {
4222    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4223  }
4224
4225  // Does this method load kind need the current method as an input?
4226  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4227    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4228  }
4229
4230  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4231
4232 private:
4233  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4234  static constexpr size_t kFieldClinitCheckRequirementSize =
4235      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4236  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4237      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4238  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4239                "Too many packed fields.");
4240  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4241                                               kFieldClinitCheckRequirement,
4242                                               kFieldClinitCheckRequirementSize>;
4243
4244  // Cached values of the resolved method, to avoid needing the mutator lock.
4245  MethodReference target_method_;
4246  DispatchInfo dispatch_info_;
4247
4248  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4249};
4250std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4251std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4252
4253class HInvokeVirtual FINAL : public HInvoke {
4254 public:
4255  HInvokeVirtual(ArenaAllocator* arena,
4256                 uint32_t number_of_arguments,
4257                 Primitive::Type return_type,
4258                 uint32_t dex_pc,
4259                 uint32_t dex_method_index,
4260                 ArtMethod* resolved_method,
4261                 uint32_t vtable_index)
4262      : HInvoke(arena,
4263                number_of_arguments,
4264                0u,
4265                return_type,
4266                dex_pc,
4267                dex_method_index,
4268                resolved_method,
4269                kVirtual),
4270        vtable_index_(vtable_index) {}
4271
4272  bool CanBeNull() const OVERRIDE {
4273    switch (GetIntrinsic()) {
4274      case Intrinsics::kThreadCurrentThread:
4275      case Intrinsics::kStringBufferAppend:
4276      case Intrinsics::kStringBufferToString:
4277      case Intrinsics::kStringBuilderAppend:
4278      case Intrinsics::kStringBuilderToString:
4279        return false;
4280      default:
4281        return HInvoke::CanBeNull();
4282    }
4283  }
4284
4285  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4286    // TODO: Add implicit null checks in intrinsics.
4287    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4288  }
4289
4290  uint32_t GetVTableIndex() const { return vtable_index_; }
4291
4292  DECLARE_INSTRUCTION(InvokeVirtual);
4293
4294 private:
4295  // Cached value of the resolved method, to avoid needing the mutator lock.
4296  const uint32_t vtable_index_;
4297
4298  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4299};
4300
4301class HInvokeInterface FINAL : public HInvoke {
4302 public:
4303  HInvokeInterface(ArenaAllocator* arena,
4304                   uint32_t number_of_arguments,
4305                   Primitive::Type return_type,
4306                   uint32_t dex_pc,
4307                   uint32_t dex_method_index,
4308                   ArtMethod* resolved_method,
4309                   uint32_t imt_index)
4310      : HInvoke(arena,
4311                number_of_arguments,
4312                0u,
4313                return_type,
4314                dex_pc,
4315                dex_method_index,
4316                resolved_method,
4317                kInterface),
4318        imt_index_(imt_index) {}
4319
4320  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4321    // TODO: Add implicit null checks in intrinsics.
4322    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4323  }
4324
4325  uint32_t GetImtIndex() const { return imt_index_; }
4326  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4327
4328  DECLARE_INSTRUCTION(InvokeInterface);
4329
4330 private:
4331  // Cached value of the resolved method, to avoid needing the mutator lock.
4332  const uint32_t imt_index_;
4333
4334  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4335};
4336
4337class HNeg FINAL : public HUnaryOperation {
4338 public:
4339  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4340      : HUnaryOperation(result_type, input, dex_pc) {
4341    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4342  }
4343
4344  template <typename T> static T Compute(T x) { return -x; }
4345
4346  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4347    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4348  }
4349  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4350    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4351  }
4352  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4353    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4354  }
4355  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4356    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4357  }
4358
4359  DECLARE_INSTRUCTION(Neg);
4360
4361 private:
4362  DISALLOW_COPY_AND_ASSIGN(HNeg);
4363};
4364
4365class HNewArray FINAL : public HExpression<2> {
4366 public:
4367  HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4368      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc) {
4369    SetRawInputAt(0, cls);
4370    SetRawInputAt(1, length);
4371  }
4372
4373  // Calls runtime so needs an environment.
4374  bool NeedsEnvironment() const OVERRIDE { return true; }
4375
4376  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4377  bool CanThrow() const OVERRIDE { return true; }
4378
4379  bool CanBeNull() const OVERRIDE { return false; }
4380
4381  HLoadClass* GetLoadClass() const {
4382    DCHECK(InputAt(0)->IsLoadClass());
4383    return InputAt(0)->AsLoadClass();
4384  }
4385
4386  HInstruction* GetLength() const {
4387    return InputAt(1);
4388  }
4389
4390  DECLARE_INSTRUCTION(NewArray);
4391
4392 private:
4393  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4394};
4395
4396class HAdd FINAL : public HBinaryOperation {
4397 public:
4398  HAdd(Primitive::Type result_type,
4399       HInstruction* left,
4400       HInstruction* right,
4401       uint32_t dex_pc = kNoDexPc)
4402      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4403
4404  bool IsCommutative() const OVERRIDE { return true; }
4405
4406  template <typename T> static T Compute(T x, T y) { return x + y; }
4407
4408  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4409    return GetBlock()->GetGraph()->GetIntConstant(
4410        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4411  }
4412  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4413    return GetBlock()->GetGraph()->GetLongConstant(
4414        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4415  }
4416  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4417    return GetBlock()->GetGraph()->GetFloatConstant(
4418        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4419  }
4420  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4421    return GetBlock()->GetGraph()->GetDoubleConstant(
4422        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4423  }
4424
4425  DECLARE_INSTRUCTION(Add);
4426
4427 private:
4428  DISALLOW_COPY_AND_ASSIGN(HAdd);
4429};
4430
4431class HSub FINAL : public HBinaryOperation {
4432 public:
4433  HSub(Primitive::Type result_type,
4434       HInstruction* left,
4435       HInstruction* right,
4436       uint32_t dex_pc = kNoDexPc)
4437      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4438
4439  template <typename T> static T Compute(T x, T y) { return x - y; }
4440
4441  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4442    return GetBlock()->GetGraph()->GetIntConstant(
4443        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4444  }
4445  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4446    return GetBlock()->GetGraph()->GetLongConstant(
4447        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4448  }
4449  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4450    return GetBlock()->GetGraph()->GetFloatConstant(
4451        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4452  }
4453  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4454    return GetBlock()->GetGraph()->GetDoubleConstant(
4455        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4456  }
4457
4458  DECLARE_INSTRUCTION(Sub);
4459
4460 private:
4461  DISALLOW_COPY_AND_ASSIGN(HSub);
4462};
4463
4464class HMul FINAL : public HBinaryOperation {
4465 public:
4466  HMul(Primitive::Type result_type,
4467       HInstruction* left,
4468       HInstruction* right,
4469       uint32_t dex_pc = kNoDexPc)
4470      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4471
4472  bool IsCommutative() const OVERRIDE { return true; }
4473
4474  template <typename T> static T Compute(T x, T y) { return x * y; }
4475
4476  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4477    return GetBlock()->GetGraph()->GetIntConstant(
4478        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4479  }
4480  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4481    return GetBlock()->GetGraph()->GetLongConstant(
4482        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4483  }
4484  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4485    return GetBlock()->GetGraph()->GetFloatConstant(
4486        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4487  }
4488  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4489    return GetBlock()->GetGraph()->GetDoubleConstant(
4490        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4491  }
4492
4493  DECLARE_INSTRUCTION(Mul);
4494
4495 private:
4496  DISALLOW_COPY_AND_ASSIGN(HMul);
4497};
4498
4499class HDiv FINAL : public HBinaryOperation {
4500 public:
4501  HDiv(Primitive::Type result_type,
4502       HInstruction* left,
4503       HInstruction* right,
4504       uint32_t dex_pc)
4505      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4506
4507  template <typename T>
4508  T ComputeIntegral(T x, T y) const {
4509    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4510    // Our graph structure ensures we never have 0 for `y` during
4511    // constant folding.
4512    DCHECK_NE(y, 0);
4513    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4514    return (y == -1) ? -x : x / y;
4515  }
4516
4517  template <typename T>
4518  T ComputeFP(T x, T y) const {
4519    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4520    return x / y;
4521  }
4522
4523  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4524    return GetBlock()->GetGraph()->GetIntConstant(
4525        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4526  }
4527  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4528    return GetBlock()->GetGraph()->GetLongConstant(
4529        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4530  }
4531  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4532    return GetBlock()->GetGraph()->GetFloatConstant(
4533        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4534  }
4535  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4536    return GetBlock()->GetGraph()->GetDoubleConstant(
4537        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4538  }
4539
4540  DECLARE_INSTRUCTION(Div);
4541
4542 private:
4543  DISALLOW_COPY_AND_ASSIGN(HDiv);
4544};
4545
4546class HRem FINAL : public HBinaryOperation {
4547 public:
4548  HRem(Primitive::Type result_type,
4549       HInstruction* left,
4550       HInstruction* right,
4551       uint32_t dex_pc)
4552      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4553
4554  template <typename T>
4555  T ComputeIntegral(T x, T y) const {
4556    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4557    // Our graph structure ensures we never have 0 for `y` during
4558    // constant folding.
4559    DCHECK_NE(y, 0);
4560    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4561    return (y == -1) ? 0 : x % y;
4562  }
4563
4564  template <typename T>
4565  T ComputeFP(T x, T y) const {
4566    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4567    return std::fmod(x, y);
4568  }
4569
4570  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4571    return GetBlock()->GetGraph()->GetIntConstant(
4572        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4573  }
4574  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4575    return GetBlock()->GetGraph()->GetLongConstant(
4576        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4577  }
4578  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4579    return GetBlock()->GetGraph()->GetFloatConstant(
4580        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4581  }
4582  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4583    return GetBlock()->GetGraph()->GetDoubleConstant(
4584        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4585  }
4586
4587  DECLARE_INSTRUCTION(Rem);
4588
4589 private:
4590  DISALLOW_COPY_AND_ASSIGN(HRem);
4591};
4592
4593class HDivZeroCheck FINAL : public HExpression<1> {
4594 public:
4595  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4596  // constructor.
4597  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4598      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4599    SetRawInputAt(0, value);
4600  }
4601
4602  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4603
4604  bool CanBeMoved() const OVERRIDE { return true; }
4605
4606  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4607    return true;
4608  }
4609
4610  bool NeedsEnvironment() const OVERRIDE { return true; }
4611  bool CanThrow() const OVERRIDE { return true; }
4612
4613  DECLARE_INSTRUCTION(DivZeroCheck);
4614
4615 private:
4616  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4617};
4618
4619class HShl FINAL : public HBinaryOperation {
4620 public:
4621  HShl(Primitive::Type result_type,
4622       HInstruction* value,
4623       HInstruction* distance,
4624       uint32_t dex_pc = kNoDexPc)
4625      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4626    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4627    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4628  }
4629
4630  template <typename T>
4631  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4632    return value << (distance & max_shift_distance);
4633  }
4634
4635  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4636    return GetBlock()->GetGraph()->GetIntConstant(
4637        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4638  }
4639  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4640    return GetBlock()->GetGraph()->GetLongConstant(
4641        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4642  }
4643  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4644                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4645    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4646    UNREACHABLE();
4647  }
4648  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4649                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4650    LOG(FATAL) << DebugName() << " is not defined for float values";
4651    UNREACHABLE();
4652  }
4653  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4654                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4655    LOG(FATAL) << DebugName() << " is not defined for double values";
4656    UNREACHABLE();
4657  }
4658
4659  DECLARE_INSTRUCTION(Shl);
4660
4661 private:
4662  DISALLOW_COPY_AND_ASSIGN(HShl);
4663};
4664
4665class HShr FINAL : public HBinaryOperation {
4666 public:
4667  HShr(Primitive::Type result_type,
4668       HInstruction* value,
4669       HInstruction* distance,
4670       uint32_t dex_pc = kNoDexPc)
4671      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4672    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4673    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4674  }
4675
4676  template <typename T>
4677  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4678    return value >> (distance & max_shift_distance);
4679  }
4680
4681  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4682    return GetBlock()->GetGraph()->GetIntConstant(
4683        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4684  }
4685  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4686    return GetBlock()->GetGraph()->GetLongConstant(
4687        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4688  }
4689  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4690                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4691    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4692    UNREACHABLE();
4693  }
4694  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4695                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4696    LOG(FATAL) << DebugName() << " is not defined for float values";
4697    UNREACHABLE();
4698  }
4699  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4700                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4701    LOG(FATAL) << DebugName() << " is not defined for double values";
4702    UNREACHABLE();
4703  }
4704
4705  DECLARE_INSTRUCTION(Shr);
4706
4707 private:
4708  DISALLOW_COPY_AND_ASSIGN(HShr);
4709};
4710
4711class HUShr FINAL : public HBinaryOperation {
4712 public:
4713  HUShr(Primitive::Type result_type,
4714        HInstruction* value,
4715        HInstruction* distance,
4716        uint32_t dex_pc = kNoDexPc)
4717      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4718    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4719    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4720  }
4721
4722  template <typename T>
4723  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4724    typedef typename std::make_unsigned<T>::type V;
4725    V ux = static_cast<V>(value);
4726    return static_cast<T>(ux >> (distance & max_shift_distance));
4727  }
4728
4729  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4730    return GetBlock()->GetGraph()->GetIntConstant(
4731        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4732  }
4733  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4734    return GetBlock()->GetGraph()->GetLongConstant(
4735        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4736  }
4737  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4738                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4739    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4740    UNREACHABLE();
4741  }
4742  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4743                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4744    LOG(FATAL) << DebugName() << " is not defined for float values";
4745    UNREACHABLE();
4746  }
4747  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4748                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4749    LOG(FATAL) << DebugName() << " is not defined for double values";
4750    UNREACHABLE();
4751  }
4752
4753  DECLARE_INSTRUCTION(UShr);
4754
4755 private:
4756  DISALLOW_COPY_AND_ASSIGN(HUShr);
4757};
4758
4759class HAnd FINAL : public HBinaryOperation {
4760 public:
4761  HAnd(Primitive::Type result_type,
4762       HInstruction* left,
4763       HInstruction* right,
4764       uint32_t dex_pc = kNoDexPc)
4765      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4766
4767  bool IsCommutative() const OVERRIDE { return true; }
4768
4769  template <typename T> static T Compute(T x, T y) { return x & y; }
4770
4771  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4772    return GetBlock()->GetGraph()->GetIntConstant(
4773        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4774  }
4775  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4776    return GetBlock()->GetGraph()->GetLongConstant(
4777        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4778  }
4779  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4780                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4781    LOG(FATAL) << DebugName() << " is not defined for float values";
4782    UNREACHABLE();
4783  }
4784  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4785                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4786    LOG(FATAL) << DebugName() << " is not defined for double values";
4787    UNREACHABLE();
4788  }
4789
4790  DECLARE_INSTRUCTION(And);
4791
4792 private:
4793  DISALLOW_COPY_AND_ASSIGN(HAnd);
4794};
4795
4796class HOr FINAL : public HBinaryOperation {
4797 public:
4798  HOr(Primitive::Type result_type,
4799      HInstruction* left,
4800      HInstruction* right,
4801      uint32_t dex_pc = kNoDexPc)
4802      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4803
4804  bool IsCommutative() const OVERRIDE { return true; }
4805
4806  template <typename T> static T Compute(T x, T y) { return x | y; }
4807
4808  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4809    return GetBlock()->GetGraph()->GetIntConstant(
4810        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4811  }
4812  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4813    return GetBlock()->GetGraph()->GetLongConstant(
4814        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4815  }
4816  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4817                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4818    LOG(FATAL) << DebugName() << " is not defined for float values";
4819    UNREACHABLE();
4820  }
4821  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4822                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4823    LOG(FATAL) << DebugName() << " is not defined for double values";
4824    UNREACHABLE();
4825  }
4826
4827  DECLARE_INSTRUCTION(Or);
4828
4829 private:
4830  DISALLOW_COPY_AND_ASSIGN(HOr);
4831};
4832
4833class HXor FINAL : public HBinaryOperation {
4834 public:
4835  HXor(Primitive::Type result_type,
4836       HInstruction* left,
4837       HInstruction* right,
4838       uint32_t dex_pc = kNoDexPc)
4839      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4840
4841  bool IsCommutative() const OVERRIDE { return true; }
4842
4843  template <typename T> static T Compute(T x, T y) { return x ^ y; }
4844
4845  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4846    return GetBlock()->GetGraph()->GetIntConstant(
4847        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4848  }
4849  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4850    return GetBlock()->GetGraph()->GetLongConstant(
4851        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4852  }
4853  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4854                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4855    LOG(FATAL) << DebugName() << " is not defined for float values";
4856    UNREACHABLE();
4857  }
4858  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4859                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4860    LOG(FATAL) << DebugName() << " is not defined for double values";
4861    UNREACHABLE();
4862  }
4863
4864  DECLARE_INSTRUCTION(Xor);
4865
4866 private:
4867  DISALLOW_COPY_AND_ASSIGN(HXor);
4868};
4869
4870class HRor FINAL : public HBinaryOperation {
4871 public:
4872  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4873    : HBinaryOperation(result_type, value, distance) {
4874    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4875    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4876  }
4877
4878  template <typename T>
4879  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
4880    typedef typename std::make_unsigned<T>::type V;
4881    V ux = static_cast<V>(value);
4882    if ((distance & max_shift_value) == 0) {
4883      return static_cast<T>(ux);
4884    } else {
4885      const V reg_bits = sizeof(T) * 8;
4886      return static_cast<T>(ux >> (distance & max_shift_value)) |
4887                           (value << (reg_bits - (distance & max_shift_value)));
4888    }
4889  }
4890
4891  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4892    return GetBlock()->GetGraph()->GetIntConstant(
4893        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4894  }
4895  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4896    return GetBlock()->GetGraph()->GetLongConstant(
4897        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4898  }
4899  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4900                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4901    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4902    UNREACHABLE();
4903  }
4904  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4905                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4906    LOG(FATAL) << DebugName() << " is not defined for float values";
4907    UNREACHABLE();
4908  }
4909  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4910                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4911    LOG(FATAL) << DebugName() << " is not defined for double values";
4912    UNREACHABLE();
4913  }
4914
4915  DECLARE_INSTRUCTION(Ror);
4916
4917 private:
4918  DISALLOW_COPY_AND_ASSIGN(HRor);
4919};
4920
4921// The value of a parameter in this method. Its location depends on
4922// the calling convention.
4923class HParameterValue FINAL : public HExpression<0> {
4924 public:
4925  HParameterValue(const DexFile& dex_file,
4926                  dex::TypeIndex type_index,
4927                  uint8_t index,
4928                  Primitive::Type parameter_type,
4929                  bool is_this = false)
4930      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
4931        dex_file_(dex_file),
4932        type_index_(type_index),
4933        index_(index) {
4934    SetPackedFlag<kFlagIsThis>(is_this);
4935    SetPackedFlag<kFlagCanBeNull>(!is_this);
4936  }
4937
4938  const DexFile& GetDexFile() const { return dex_file_; }
4939  dex::TypeIndex GetTypeIndex() const { return type_index_; }
4940  uint8_t GetIndex() const { return index_; }
4941  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
4942
4943  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
4944  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
4945
4946  DECLARE_INSTRUCTION(ParameterValue);
4947
4948 private:
4949  // Whether or not the parameter value corresponds to 'this' argument.
4950  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
4951  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
4952  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
4953  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
4954                "Too many packed fields.");
4955
4956  const DexFile& dex_file_;
4957  const dex::TypeIndex type_index_;
4958  // The index of this parameter in the parameters list. Must be less
4959  // than HGraph::number_of_in_vregs_.
4960  const uint8_t index_;
4961
4962  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
4963};
4964
4965class HNot FINAL : public HUnaryOperation {
4966 public:
4967  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4968      : HUnaryOperation(result_type, input, dex_pc) {}
4969
4970  bool CanBeMoved() const OVERRIDE { return true; }
4971  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4972    return true;
4973  }
4974
4975  template <typename T> static T Compute(T x) { return ~x; }
4976
4977  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4978    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4979  }
4980  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4981    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4982  }
4983  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4984    LOG(FATAL) << DebugName() << " is not defined for float values";
4985    UNREACHABLE();
4986  }
4987  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4988    LOG(FATAL) << DebugName() << " is not defined for double values";
4989    UNREACHABLE();
4990  }
4991
4992  DECLARE_INSTRUCTION(Not);
4993
4994 private:
4995  DISALLOW_COPY_AND_ASSIGN(HNot);
4996};
4997
4998class HBooleanNot FINAL : public HUnaryOperation {
4999 public:
5000  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5001      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
5002
5003  bool CanBeMoved() const OVERRIDE { return true; }
5004  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5005    return true;
5006  }
5007
5008  template <typename T> static bool Compute(T x) {
5009    DCHECK(IsUint<1>(x)) << x;
5010    return !x;
5011  }
5012
5013  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5014    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5015  }
5016  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5017    LOG(FATAL) << DebugName() << " is not defined for long values";
5018    UNREACHABLE();
5019  }
5020  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5021    LOG(FATAL) << DebugName() << " is not defined for float values";
5022    UNREACHABLE();
5023  }
5024  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5025    LOG(FATAL) << DebugName() << " is not defined for double values";
5026    UNREACHABLE();
5027  }
5028
5029  DECLARE_INSTRUCTION(BooleanNot);
5030
5031 private:
5032  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
5033};
5034
5035class HTypeConversion FINAL : public HExpression<1> {
5036 public:
5037  // Instantiate a type conversion of `input` to `result_type`.
5038  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
5039      : HExpression(result_type, SideEffects::None(), dex_pc) {
5040    SetRawInputAt(0, input);
5041    // Invariant: We should never generate a conversion to a Boolean value.
5042    DCHECK_NE(Primitive::kPrimBoolean, result_type);
5043  }
5044
5045  HInstruction* GetInput() const { return InputAt(0); }
5046  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
5047  Primitive::Type GetResultType() const { return GetType(); }
5048
5049  bool CanBeMoved() const OVERRIDE { return true; }
5050  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5051    return true;
5052  }
5053
5054  // Try to statically evaluate the conversion and return a HConstant
5055  // containing the result.  If the input cannot be converted, return nullptr.
5056  HConstant* TryStaticEvaluation() const;
5057
5058  DECLARE_INSTRUCTION(TypeConversion);
5059
5060 private:
5061  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
5062};
5063
5064static constexpr uint32_t kNoRegNumber = -1;
5065
5066class HNullCheck FINAL : public HExpression<1> {
5067 public:
5068  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5069  // constructor.
5070  HNullCheck(HInstruction* value, uint32_t dex_pc)
5071      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5072    SetRawInputAt(0, value);
5073  }
5074
5075  bool CanBeMoved() const OVERRIDE { return true; }
5076  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5077    return true;
5078  }
5079
5080  bool NeedsEnvironment() const OVERRIDE { return true; }
5081
5082  bool CanThrow() const OVERRIDE { return true; }
5083
5084  bool CanBeNull() const OVERRIDE { return false; }
5085
5086
5087  DECLARE_INSTRUCTION(NullCheck);
5088
5089 private:
5090  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
5091};
5092
5093// Embeds an ArtField and all the information required by the compiler. We cache
5094// that information to avoid requiring the mutator lock every time we need it.
5095class FieldInfo : public ValueObject {
5096 public:
5097  FieldInfo(ArtField* field,
5098            MemberOffset field_offset,
5099            Primitive::Type field_type,
5100            bool is_volatile,
5101            uint32_t index,
5102            uint16_t declaring_class_def_index,
5103            const DexFile& dex_file)
5104      : field_(field),
5105        field_offset_(field_offset),
5106        field_type_(field_type),
5107        is_volatile_(is_volatile),
5108        index_(index),
5109        declaring_class_def_index_(declaring_class_def_index),
5110        dex_file_(dex_file) {}
5111
5112  ArtField* GetField() const { return field_; }
5113  MemberOffset GetFieldOffset() const { return field_offset_; }
5114  Primitive::Type GetFieldType() const { return field_type_; }
5115  uint32_t GetFieldIndex() const { return index_; }
5116  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5117  const DexFile& GetDexFile() const { return dex_file_; }
5118  bool IsVolatile() const { return is_volatile_; }
5119
5120 private:
5121  ArtField* const field_;
5122  const MemberOffset field_offset_;
5123  const Primitive::Type field_type_;
5124  const bool is_volatile_;
5125  const uint32_t index_;
5126  const uint16_t declaring_class_def_index_;
5127  const DexFile& dex_file_;
5128};
5129
5130class HInstanceFieldGet FINAL : public HExpression<1> {
5131 public:
5132  HInstanceFieldGet(HInstruction* value,
5133                    ArtField* field,
5134                    Primitive::Type field_type,
5135                    MemberOffset field_offset,
5136                    bool is_volatile,
5137                    uint32_t field_idx,
5138                    uint16_t declaring_class_def_index,
5139                    const DexFile& dex_file,
5140                    uint32_t dex_pc)
5141      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5142        field_info_(field,
5143                    field_offset,
5144                    field_type,
5145                    is_volatile,
5146                    field_idx,
5147                    declaring_class_def_index,
5148                    dex_file) {
5149    SetRawInputAt(0, value);
5150  }
5151
5152  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5153
5154  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5155    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5156    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5157  }
5158
5159  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5160    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5161  }
5162
5163  size_t ComputeHashCode() const OVERRIDE {
5164    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5165  }
5166
5167  const FieldInfo& GetFieldInfo() const { return field_info_; }
5168  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5169  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5170  bool IsVolatile() const { return field_info_.IsVolatile(); }
5171
5172  DECLARE_INSTRUCTION(InstanceFieldGet);
5173
5174 private:
5175  const FieldInfo field_info_;
5176
5177  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5178};
5179
5180class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5181 public:
5182  HInstanceFieldSet(HInstruction* object,
5183                    HInstruction* value,
5184                    ArtField* field,
5185                    Primitive::Type field_type,
5186                    MemberOffset field_offset,
5187                    bool is_volatile,
5188                    uint32_t field_idx,
5189                    uint16_t declaring_class_def_index,
5190                    const DexFile& dex_file,
5191                    uint32_t dex_pc)
5192      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5193        field_info_(field,
5194                    field_offset,
5195                    field_type,
5196                    is_volatile,
5197                    field_idx,
5198                    declaring_class_def_index,
5199                    dex_file) {
5200    SetPackedFlag<kFlagValueCanBeNull>(true);
5201    SetRawInputAt(0, object);
5202    SetRawInputAt(1, value);
5203  }
5204
5205  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5206    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5207  }
5208
5209  const FieldInfo& GetFieldInfo() const { return field_info_; }
5210  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5211  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5212  bool IsVolatile() const { return field_info_.IsVolatile(); }
5213  HInstruction* GetValue() const { return InputAt(1); }
5214  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5215  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5216
5217  DECLARE_INSTRUCTION(InstanceFieldSet);
5218
5219 private:
5220  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5221  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5222  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5223                "Too many packed fields.");
5224
5225  const FieldInfo field_info_;
5226
5227  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5228};
5229
5230class HArrayGet FINAL : public HExpression<2> {
5231 public:
5232  HArrayGet(HInstruction* array,
5233            HInstruction* index,
5234            Primitive::Type type,
5235            uint32_t dex_pc,
5236            bool is_string_char_at = false)
5237      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5238    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5239    SetRawInputAt(0, array);
5240    SetRawInputAt(1, index);
5241  }
5242
5243  bool CanBeMoved() const OVERRIDE { return true; }
5244  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5245    return true;
5246  }
5247  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5248    // TODO: We can be smarter here.
5249    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5250    // which generates the implicit null check. There are cases when these can be removed
5251    // to produce better code. If we ever add optimizations to do so we should allow an
5252    // implicit check here (as long as the address falls in the first page).
5253    return false;
5254  }
5255
5256  bool IsEquivalentOf(HArrayGet* other) const {
5257    bool result = (GetDexPc() == other->GetDexPc());
5258    if (kIsDebugBuild && result) {
5259      DCHECK_EQ(GetBlock(), other->GetBlock());
5260      DCHECK_EQ(GetArray(), other->GetArray());
5261      DCHECK_EQ(GetIndex(), other->GetIndex());
5262      if (Primitive::IsIntOrLongType(GetType())) {
5263        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5264      } else {
5265        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5266        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5267      }
5268    }
5269    return result;
5270  }
5271
5272  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5273
5274  HInstruction* GetArray() const { return InputAt(0); }
5275  HInstruction* GetIndex() const { return InputAt(1); }
5276
5277  DECLARE_INSTRUCTION(ArrayGet);
5278
5279 private:
5280  // We treat a String as an array, creating the HArrayGet from String.charAt()
5281  // intrinsic in the instruction simplifier. We can always determine whether
5282  // a particular HArrayGet is actually a String.charAt() by looking at the type
5283  // of the input but that requires holding the mutator lock, so we prefer to use
5284  // a flag, so that code generators don't need to do the locking.
5285  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5286  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5287  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5288                "Too many packed fields.");
5289
5290  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5291};
5292
5293class HArraySet FINAL : public HTemplateInstruction<3> {
5294 public:
5295  HArraySet(HInstruction* array,
5296            HInstruction* index,
5297            HInstruction* value,
5298            Primitive::Type expected_component_type,
5299            uint32_t dex_pc)
5300      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5301    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5302    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5303    SetPackedFlag<kFlagValueCanBeNull>(true);
5304    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5305    SetRawInputAt(0, array);
5306    SetRawInputAt(1, index);
5307    SetRawInputAt(2, value);
5308    // Make a best guess now, may be refined during SSA building.
5309    ComputeSideEffects();
5310  }
5311
5312  bool NeedsEnvironment() const OVERRIDE {
5313    // We call a runtime method to throw ArrayStoreException.
5314    return NeedsTypeCheck();
5315  }
5316
5317  // Can throw ArrayStoreException.
5318  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5319
5320  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5321    // TODO: Same as for ArrayGet.
5322    return false;
5323  }
5324
5325  void ClearNeedsTypeCheck() {
5326    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5327  }
5328
5329  void ClearValueCanBeNull() {
5330    SetPackedFlag<kFlagValueCanBeNull>(false);
5331  }
5332
5333  void SetStaticTypeOfArrayIsObjectArray() {
5334    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5335  }
5336
5337  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5338  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5339  bool StaticTypeOfArrayIsObjectArray() const {
5340    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5341  }
5342
5343  HInstruction* GetArray() const { return InputAt(0); }
5344  HInstruction* GetIndex() const { return InputAt(1); }
5345  HInstruction* GetValue() const { return InputAt(2); }
5346
5347  Primitive::Type GetComponentType() const {
5348    // The Dex format does not type floating point index operations. Since the
5349    // `expected_component_type_` is set during building and can therefore not
5350    // be correct, we also check what is the value type. If it is a floating
5351    // point type, we must use that type.
5352    Primitive::Type value_type = GetValue()->GetType();
5353    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5354        ? value_type
5355        : GetRawExpectedComponentType();
5356  }
5357
5358  Primitive::Type GetRawExpectedComponentType() const {
5359    return GetPackedField<ExpectedComponentTypeField>();
5360  }
5361
5362  void ComputeSideEffects() {
5363    Primitive::Type type = GetComponentType();
5364    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5365        SideEffectsForArchRuntimeCalls(type)));
5366  }
5367
5368  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5369    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5370  }
5371
5372  DECLARE_INSTRUCTION(ArraySet);
5373
5374 private:
5375  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5376  static constexpr size_t kFieldExpectedComponentTypeSize =
5377      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5378  static constexpr size_t kFlagNeedsTypeCheck =
5379      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5380  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5381  // Cached information for the reference_type_info_ so that codegen
5382  // does not need to inspect the static type.
5383  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5384  static constexpr size_t kNumberOfArraySetPackedBits =
5385      kFlagStaticTypeOfArrayIsObjectArray + 1;
5386  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5387  using ExpectedComponentTypeField =
5388      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5389
5390  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5391};
5392
5393class HArrayLength FINAL : public HExpression<1> {
5394 public:
5395  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5396      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5397    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5398    // Note that arrays do not change length, so the instruction does not
5399    // depend on any write.
5400    SetRawInputAt(0, array);
5401  }
5402
5403  bool CanBeMoved() const OVERRIDE { return true; }
5404  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5405    return true;
5406  }
5407  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5408    return obj == InputAt(0);
5409  }
5410
5411  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5412
5413  DECLARE_INSTRUCTION(ArrayLength);
5414
5415 private:
5416  // We treat a String as an array, creating the HArrayLength from String.length()
5417  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5418  // determine whether a particular HArrayLength is actually a String.length() by
5419  // looking at the type of the input but that requires holding the mutator lock, so
5420  // we prefer to use a flag, so that code generators don't need to do the locking.
5421  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5422  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5423  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5424                "Too many packed fields.");
5425
5426  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5427};
5428
5429class HBoundsCheck FINAL : public HExpression<2> {
5430 public:
5431  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5432  // constructor.
5433  HBoundsCheck(HInstruction* index,
5434               HInstruction* length,
5435               uint32_t dex_pc,
5436               bool string_char_at = false)
5437      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5438    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5439    SetPackedFlag<kFlagIsStringCharAt>(string_char_at);
5440    SetRawInputAt(0, index);
5441    SetRawInputAt(1, length);
5442  }
5443
5444  bool CanBeMoved() const OVERRIDE { return true; }
5445  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5446    return true;
5447  }
5448
5449  bool NeedsEnvironment() const OVERRIDE { return true; }
5450
5451  bool CanThrow() const OVERRIDE { return true; }
5452
5453  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5454
5455  HInstruction* GetIndex() const { return InputAt(0); }
5456
5457  DECLARE_INSTRUCTION(BoundsCheck);
5458
5459 private:
5460  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5461
5462  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5463};
5464
5465class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5466 public:
5467  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5468      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5469
5470  bool NeedsEnvironment() const OVERRIDE {
5471    return true;
5472  }
5473
5474  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5475  SlowPathCode* GetSlowPath() const { return slow_path_; }
5476
5477  DECLARE_INSTRUCTION(SuspendCheck);
5478
5479 private:
5480  // Only used for code generation, in order to share the same slow path between back edges
5481  // of a same loop.
5482  SlowPathCode* slow_path_;
5483
5484  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5485};
5486
5487// Pseudo-instruction which provides the native debugger with mapping information.
5488// It ensures that we can generate line number and local variables at this point.
5489class HNativeDebugInfo : public HTemplateInstruction<0> {
5490 public:
5491  explicit HNativeDebugInfo(uint32_t dex_pc)
5492      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5493
5494  bool NeedsEnvironment() const OVERRIDE {
5495    return true;
5496  }
5497
5498  DECLARE_INSTRUCTION(NativeDebugInfo);
5499
5500 private:
5501  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5502};
5503
5504/**
5505 * Instruction to load a Class object.
5506 */
5507class HLoadClass FINAL : public HInstruction {
5508 public:
5509  // Determines how to load the Class.
5510  enum class LoadKind {
5511    // Use the Class* from the method's own ArtMethod*.
5512    kReferrersClass,
5513
5514    // Use boot image Class* address that will be known at link time.
5515    // Used for boot image classes referenced by boot image code in non-PIC mode.
5516    kBootImageLinkTimeAddress,
5517
5518    // Use PC-relative boot image Class* address that will be known at link time.
5519    // Used for boot image classes referenced by boot image code in PIC mode.
5520    kBootImageLinkTimePcRelative,
5521
5522    // Use a known boot image Class* address, embedded in the code by the codegen.
5523    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5524    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5525    // GetIncludePatchInformation().
5526    kBootImageAddress,
5527
5528    // Load from an entry in the .bss section using a PC-relative load.
5529    // Used for classes outside boot image when .bss is accessible with a PC-relative load.
5530    kBssEntry,
5531
5532    // Load from the root table associated with the JIT compiled method.
5533    kJitTableAddress,
5534
5535    // Load from resolved types array accessed through the class loaded from
5536    // the compiled method's own ArtMethod*. This is the default access type when
5537    // all other types are unavailable.
5538    kDexCacheViaMethod,
5539
5540    kLast = kDexCacheViaMethod
5541  };
5542
5543  HLoadClass(HCurrentMethod* current_method,
5544             dex::TypeIndex type_index,
5545             const DexFile& dex_file,
5546             Handle<mirror::Class> klass,
5547             bool is_referrers_class,
5548             uint32_t dex_pc,
5549             bool needs_access_check)
5550      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5551        special_input_(HUserRecord<HInstruction*>(current_method)),
5552        type_index_(type_index),
5553        dex_file_(dex_file),
5554        klass_(klass),
5555        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5556    // Referrers class should not need access check. We never inline unverified
5557    // methods so we can't possibly end up in this situation.
5558    DCHECK(!is_referrers_class || !needs_access_check);
5559
5560    SetPackedField<LoadKindField>(
5561        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
5562    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5563    SetPackedFlag<kFlagIsInBootImage>(false);
5564    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5565  }
5566
5567  void SetLoadKind(LoadKind load_kind) {
5568    SetLoadKindInternal(load_kind);
5569  }
5570
5571  void SetLoadKindWithTypeReference(LoadKind load_kind,
5572                                    const DexFile& dex_file,
5573                                    dex::TypeIndex type_index) {
5574    DCHECK(HasTypeReference(load_kind));
5575    DCHECK(IsSameDexFile(dex_file_, dex_file));
5576    DCHECK_EQ(type_index_, type_index);
5577    SetLoadKindInternal(load_kind);
5578  }
5579
5580  LoadKind GetLoadKind() const {
5581    return GetPackedField<LoadKindField>();
5582  }
5583
5584  bool CanBeMoved() const OVERRIDE { return true; }
5585
5586  bool InstructionDataEquals(const HInstruction* other) const;
5587
5588  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5589
5590  bool CanBeNull() const OVERRIDE { return false; }
5591
5592  bool NeedsEnvironment() const OVERRIDE {
5593    return CanCallRuntime();
5594  }
5595
5596  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5597    // The entrypoint the code generator is going to call does not do
5598    // clinit of the class.
5599    DCHECK(!NeedsAccessCheck());
5600    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5601  }
5602
5603  bool CanCallRuntime() const {
5604    return NeedsAccessCheck() ||
5605           MustGenerateClinitCheck() ||
5606           GetLoadKind() == LoadKind::kDexCacheViaMethod ||
5607           GetLoadKind() == LoadKind::kBssEntry;
5608  }
5609
5610  bool CanThrow() const OVERRIDE {
5611    return NeedsAccessCheck() ||
5612           MustGenerateClinitCheck() ||
5613           // If the class is in the boot image, the lookup in the runtime call cannot throw.
5614           // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
5615           // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
5616           ((GetLoadKind() == LoadKind::kDexCacheViaMethod ||
5617             GetLoadKind() == LoadKind::kBssEntry) &&
5618            !IsInBootImage());
5619  }
5620
5621  ReferenceTypeInfo GetLoadedClassRTI() {
5622    return loaded_class_rti_;
5623  }
5624
5625  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5626    // Make sure we only set exact types (the loaded class should never be merged).
5627    DCHECK(rti.IsExact());
5628    loaded_class_rti_ = rti;
5629  }
5630
5631  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5632  const DexFile& GetDexFile() const { return dex_file_; }
5633
5634  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5635    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5636  }
5637
5638  static SideEffects SideEffectsForArchRuntimeCalls() {
5639    return SideEffects::CanTriggerGC();
5640  }
5641
5642  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5643  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5644  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5645  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5646
5647  void MarkInBootImage() {
5648    SetPackedFlag<kFlagIsInBootImage>(true);
5649  }
5650
5651  void AddSpecialInput(HInstruction* special_input);
5652
5653  using HInstruction::GetInputRecords;  // Keep the const version visible.
5654  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5655    return ArrayRef<HUserRecord<HInstruction*>>(
5656        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5657  }
5658
5659  Primitive::Type GetType() const OVERRIDE {
5660    return Primitive::kPrimNot;
5661  }
5662
5663  Handle<mirror::Class> GetClass() const {
5664    return klass_;
5665  }
5666
5667  DECLARE_INSTRUCTION(LoadClass);
5668
5669 private:
5670  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5671  static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
5672  // Whether this instruction must generate the initialization check.
5673  // Used for code generation.
5674  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5675  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5676  static constexpr size_t kFieldLoadKindSize =
5677      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5678  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5679  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5680  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5681
5682  static bool HasTypeReference(LoadKind load_kind) {
5683    return load_kind == LoadKind::kReferrersClass ||
5684        load_kind == LoadKind::kBootImageLinkTimeAddress ||
5685        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5686        load_kind == LoadKind::kBssEntry ||
5687        load_kind == LoadKind::kDexCacheViaMethod;
5688  }
5689
5690  void SetLoadKindInternal(LoadKind load_kind);
5691
5692  // The special input is the HCurrentMethod for kDexCacheViaMethod or kReferrersClass.
5693  // For other load kinds it's empty or possibly some architecture-specific instruction
5694  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5695  HUserRecord<HInstruction*> special_input_;
5696
5697  const dex::TypeIndex type_index_;
5698  const DexFile& dex_file_;
5699
5700  Handle<mirror::Class> klass_;
5701
5702  ReferenceTypeInfo loaded_class_rti_;
5703
5704  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5705};
5706std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5707
5708// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5709inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5710  // The special input is used for PC-relative loads on some architectures,
5711  // including literal pool loads, which are PC-relative too.
5712  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5713         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5714         GetLoadKind() == LoadKind::kBootImageAddress ||
5715         GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
5716  DCHECK(special_input_.GetInstruction() == nullptr);
5717  special_input_ = HUserRecord<HInstruction*>(special_input);
5718  special_input->AddUseAt(this, 0);
5719}
5720
5721class HLoadString FINAL : public HInstruction {
5722 public:
5723  // Determines how to load the String.
5724  enum class LoadKind {
5725    // Use boot image String* address that will be known at link time.
5726    // Used for boot image strings referenced by boot image code in non-PIC mode.
5727    kBootImageLinkTimeAddress,
5728
5729    // Use PC-relative boot image String* address that will be known at link time.
5730    // Used for boot image strings referenced by boot image code in PIC mode.
5731    kBootImageLinkTimePcRelative,
5732
5733    // Use a known boot image String* address, embedded in the code by the codegen.
5734    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5735    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5736    // GetIncludePatchInformation().
5737    kBootImageAddress,
5738
5739    // Load from an entry in the .bss section using a PC-relative load.
5740    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5741    kBssEntry,
5742
5743    // Load from the root table associated with the JIT compiled method.
5744    kJitTableAddress,
5745
5746    // Load from resolved strings array accessed through the class loaded from
5747    // the compiled method's own ArtMethod*. This is the default access type when
5748    // all other types are unavailable.
5749    kDexCacheViaMethod,
5750
5751    kLast = kDexCacheViaMethod,
5752  };
5753
5754  HLoadString(HCurrentMethod* current_method,
5755              dex::StringIndex string_index,
5756              const DexFile& dex_file,
5757              uint32_t dex_pc)
5758      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5759        special_input_(HUserRecord<HInstruction*>(current_method)),
5760        string_index_(string_index),
5761        dex_file_(dex_file) {
5762    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5763  }
5764
5765  void SetLoadKind(LoadKind load_kind);
5766
5767  LoadKind GetLoadKind() const {
5768    return GetPackedField<LoadKindField>();
5769  }
5770
5771  const DexFile& GetDexFile() const {
5772    return dex_file_;
5773  }
5774
5775  dex::StringIndex GetStringIndex() const {
5776    return string_index_;
5777  }
5778
5779  Handle<mirror::String> GetString() const {
5780    return string_;
5781  }
5782
5783  void SetString(Handle<mirror::String> str) {
5784    string_ = str;
5785  }
5786
5787  bool CanBeMoved() const OVERRIDE { return true; }
5788
5789  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5790
5791  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
5792
5793  // Will call the runtime if we need to load the string through
5794  // the dex cache and the string is not guaranteed to be there yet.
5795  bool NeedsEnvironment() const OVERRIDE {
5796    LoadKind load_kind = GetLoadKind();
5797    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5798        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5799        load_kind == LoadKind::kBootImageAddress ||
5800        load_kind == LoadKind::kJitTableAddress) {
5801      return false;
5802    }
5803    return true;
5804  }
5805
5806  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5807    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5808  }
5809
5810  bool CanBeNull() const OVERRIDE { return false; }
5811  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5812
5813  static SideEffects SideEffectsForArchRuntimeCalls() {
5814    return SideEffects::CanTriggerGC();
5815  }
5816
5817  void AddSpecialInput(HInstruction* special_input);
5818
5819  using HInstruction::GetInputRecords;  // Keep the const version visible.
5820  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5821    return ArrayRef<HUserRecord<HInstruction*>>(
5822        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5823  }
5824
5825  Primitive::Type GetType() const OVERRIDE {
5826    return Primitive::kPrimNot;
5827  }
5828
5829  DECLARE_INSTRUCTION(LoadString);
5830
5831 private:
5832  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
5833  static constexpr size_t kFieldLoadKindSize =
5834      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5835  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5836  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5837  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5838
5839  void SetLoadKindInternal(LoadKind load_kind);
5840
5841  // The special input is the HCurrentMethod for kDexCacheViaMethod.
5842  // For other load kinds it's empty or possibly some architecture-specific instruction
5843  // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
5844  HUserRecord<HInstruction*> special_input_;
5845
5846  dex::StringIndex string_index_;
5847  const DexFile& dex_file_;
5848
5849  Handle<mirror::String> string_;
5850
5851  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5852};
5853std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5854
5855// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5856inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5857  // The special input is used for PC-relative loads on some architectures,
5858  // including literal pool loads, which are PC-relative too.
5859  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5860         GetLoadKind() == LoadKind::kBssEntry ||
5861         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5862         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5863  // HLoadString::GetInputRecords() returns an empty array at this point,
5864  // so use the GetInputRecords() from the base class to set the input record.
5865  DCHECK(special_input_.GetInstruction() == nullptr);
5866  special_input_ = HUserRecord<HInstruction*>(special_input);
5867  special_input->AddUseAt(this, 0);
5868}
5869
5870/**
5871 * Performs an initialization check on its Class object input.
5872 */
5873class HClinitCheck FINAL : public HExpression<1> {
5874 public:
5875  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5876      : HExpression(
5877            Primitive::kPrimNot,
5878            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
5879            dex_pc) {
5880    SetRawInputAt(0, constant);
5881  }
5882
5883  bool CanBeMoved() const OVERRIDE { return true; }
5884  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5885    return true;
5886  }
5887
5888  bool NeedsEnvironment() const OVERRIDE {
5889    // May call runtime to initialize the class.
5890    return true;
5891  }
5892
5893  bool CanThrow() const OVERRIDE { return true; }
5894
5895  HLoadClass* GetLoadClass() const {
5896    DCHECK(InputAt(0)->IsLoadClass());
5897    return InputAt(0)->AsLoadClass();
5898  }
5899
5900  DECLARE_INSTRUCTION(ClinitCheck);
5901
5902 private:
5903  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
5904};
5905
5906class HStaticFieldGet FINAL : public HExpression<1> {
5907 public:
5908  HStaticFieldGet(HInstruction* cls,
5909                  ArtField* field,
5910                  Primitive::Type field_type,
5911                  MemberOffset field_offset,
5912                  bool is_volatile,
5913                  uint32_t field_idx,
5914                  uint16_t declaring_class_def_index,
5915                  const DexFile& dex_file,
5916                  uint32_t dex_pc)
5917      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5918        field_info_(field,
5919                    field_offset,
5920                    field_type,
5921                    is_volatile,
5922                    field_idx,
5923                    declaring_class_def_index,
5924                    dex_file) {
5925    SetRawInputAt(0, cls);
5926  }
5927
5928
5929  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5930
5931  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5932    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
5933    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5934  }
5935
5936  size_t ComputeHashCode() const OVERRIDE {
5937    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5938  }
5939
5940  const FieldInfo& GetFieldInfo() const { return field_info_; }
5941  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5942  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5943  bool IsVolatile() const { return field_info_.IsVolatile(); }
5944
5945  DECLARE_INSTRUCTION(StaticFieldGet);
5946
5947 private:
5948  const FieldInfo field_info_;
5949
5950  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
5951};
5952
5953class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
5954 public:
5955  HStaticFieldSet(HInstruction* cls,
5956                  HInstruction* value,
5957                  ArtField* field,
5958                  Primitive::Type field_type,
5959                  MemberOffset field_offset,
5960                  bool is_volatile,
5961                  uint32_t field_idx,
5962                  uint16_t declaring_class_def_index,
5963                  const DexFile& dex_file,
5964                  uint32_t dex_pc)
5965      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5966        field_info_(field,
5967                    field_offset,
5968                    field_type,
5969                    is_volatile,
5970                    field_idx,
5971                    declaring_class_def_index,
5972                    dex_file) {
5973    SetPackedFlag<kFlagValueCanBeNull>(true);
5974    SetRawInputAt(0, cls);
5975    SetRawInputAt(1, value);
5976  }
5977
5978  const FieldInfo& GetFieldInfo() const { return field_info_; }
5979  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5980  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5981  bool IsVolatile() const { return field_info_.IsVolatile(); }
5982
5983  HInstruction* GetValue() const { return InputAt(1); }
5984  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5985  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5986
5987  DECLARE_INSTRUCTION(StaticFieldSet);
5988
5989 private:
5990  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5991  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
5992  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
5993                "Too many packed fields.");
5994
5995  const FieldInfo field_info_;
5996
5997  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
5998};
5999
6000class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6001 public:
6002  HUnresolvedInstanceFieldGet(HInstruction* obj,
6003                              Primitive::Type field_type,
6004                              uint32_t field_index,
6005                              uint32_t dex_pc)
6006      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6007        field_index_(field_index) {
6008    SetRawInputAt(0, obj);
6009  }
6010
6011  bool NeedsEnvironment() const OVERRIDE { return true; }
6012  bool CanThrow() const OVERRIDE { return true; }
6013
6014  Primitive::Type GetFieldType() const { return GetType(); }
6015  uint32_t GetFieldIndex() const { return field_index_; }
6016
6017  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6018
6019 private:
6020  const uint32_t field_index_;
6021
6022  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6023};
6024
6025class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6026 public:
6027  HUnresolvedInstanceFieldSet(HInstruction* obj,
6028                              HInstruction* value,
6029                              Primitive::Type field_type,
6030                              uint32_t field_index,
6031                              uint32_t dex_pc)
6032      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6033        field_index_(field_index) {
6034    SetPackedField<FieldTypeField>(field_type);
6035    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6036    SetRawInputAt(0, obj);
6037    SetRawInputAt(1, value);
6038  }
6039
6040  bool NeedsEnvironment() const OVERRIDE { return true; }
6041  bool CanThrow() const OVERRIDE { return true; }
6042
6043  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6044  uint32_t GetFieldIndex() const { return field_index_; }
6045
6046  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6047
6048 private:
6049  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6050  static constexpr size_t kFieldFieldTypeSize =
6051      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6052  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6053      kFieldFieldType + kFieldFieldTypeSize;
6054  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6055                "Too many packed fields.");
6056  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6057
6058  const uint32_t field_index_;
6059
6060  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6061};
6062
6063class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6064 public:
6065  HUnresolvedStaticFieldGet(Primitive::Type field_type,
6066                            uint32_t field_index,
6067                            uint32_t dex_pc)
6068      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6069        field_index_(field_index) {
6070  }
6071
6072  bool NeedsEnvironment() const OVERRIDE { return true; }
6073  bool CanThrow() const OVERRIDE { return true; }
6074
6075  Primitive::Type GetFieldType() const { return GetType(); }
6076  uint32_t GetFieldIndex() const { return field_index_; }
6077
6078  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6079
6080 private:
6081  const uint32_t field_index_;
6082
6083  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6084};
6085
6086class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6087 public:
6088  HUnresolvedStaticFieldSet(HInstruction* value,
6089                            Primitive::Type field_type,
6090                            uint32_t field_index,
6091                            uint32_t dex_pc)
6092      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6093        field_index_(field_index) {
6094    SetPackedField<FieldTypeField>(field_type);
6095    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6096    SetRawInputAt(0, value);
6097  }
6098
6099  bool NeedsEnvironment() const OVERRIDE { return true; }
6100  bool CanThrow() const OVERRIDE { return true; }
6101
6102  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6103  uint32_t GetFieldIndex() const { return field_index_; }
6104
6105  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6106
6107 private:
6108  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6109  static constexpr size_t kFieldFieldTypeSize =
6110      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6111  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6112      kFieldFieldType + kFieldFieldTypeSize;
6113  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6114                "Too many packed fields.");
6115  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6116
6117  const uint32_t field_index_;
6118
6119  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6120};
6121
6122// Implement the move-exception DEX instruction.
6123class HLoadException FINAL : public HExpression<0> {
6124 public:
6125  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6126      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
6127
6128  bool CanBeNull() const OVERRIDE { return false; }
6129
6130  DECLARE_INSTRUCTION(LoadException);
6131
6132 private:
6133  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6134};
6135
6136// Implicit part of move-exception which clears thread-local exception storage.
6137// Must not be removed because the runtime expects the TLS to get cleared.
6138class HClearException FINAL : public HTemplateInstruction<0> {
6139 public:
6140  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6141      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6142
6143  DECLARE_INSTRUCTION(ClearException);
6144
6145 private:
6146  DISALLOW_COPY_AND_ASSIGN(HClearException);
6147};
6148
6149class HThrow FINAL : public HTemplateInstruction<1> {
6150 public:
6151  HThrow(HInstruction* exception, uint32_t dex_pc)
6152      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6153    SetRawInputAt(0, exception);
6154  }
6155
6156  bool IsControlFlow() const OVERRIDE { return true; }
6157
6158  bool NeedsEnvironment() const OVERRIDE { return true; }
6159
6160  bool CanThrow() const OVERRIDE { return true; }
6161
6162
6163  DECLARE_INSTRUCTION(Throw);
6164
6165 private:
6166  DISALLOW_COPY_AND_ASSIGN(HThrow);
6167};
6168
6169/**
6170 * Implementation strategies for the code generator of a HInstanceOf
6171 * or `HCheckCast`.
6172 */
6173enum class TypeCheckKind {
6174  kUnresolvedCheck,       // Check against an unresolved type.
6175  kExactCheck,            // Can do a single class compare.
6176  kClassHierarchyCheck,   // Can just walk the super class chain.
6177  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6178  kInterfaceCheck,        // No optimization yet when checking against an interface.
6179  kArrayObjectCheck,      // Can just check if the array is not primitive.
6180  kArrayCheck,            // No optimization yet when checking against a generic array.
6181  kLast = kArrayCheck
6182};
6183
6184std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6185
6186class HInstanceOf FINAL : public HExpression<2> {
6187 public:
6188  HInstanceOf(HInstruction* object,
6189              HLoadClass* constant,
6190              TypeCheckKind check_kind,
6191              uint32_t dex_pc)
6192      : HExpression(Primitive::kPrimBoolean,
6193                    SideEffectsForArchRuntimeCalls(check_kind),
6194                    dex_pc) {
6195    SetPackedField<TypeCheckKindField>(check_kind);
6196    SetPackedFlag<kFlagMustDoNullCheck>(true);
6197    SetRawInputAt(0, object);
6198    SetRawInputAt(1, constant);
6199  }
6200
6201  bool CanBeMoved() const OVERRIDE { return true; }
6202
6203  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6204    return true;
6205  }
6206
6207  bool NeedsEnvironment() const OVERRIDE {
6208    return CanCallRuntime(GetTypeCheckKind());
6209  }
6210
6211  // Used only in code generation.
6212  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6213  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6214  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6215  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6216
6217  static bool CanCallRuntime(TypeCheckKind check_kind) {
6218    // Mips currently does runtime calls for any other checks.
6219    return check_kind != TypeCheckKind::kExactCheck;
6220  }
6221
6222  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6223    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6224  }
6225
6226  DECLARE_INSTRUCTION(InstanceOf);
6227
6228 private:
6229  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6230  static constexpr size_t kFieldTypeCheckKindSize =
6231      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6232  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6233  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6234  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6235  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6236
6237  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6238};
6239
6240class HBoundType FINAL : public HExpression<1> {
6241 public:
6242  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6243      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6244        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6245    SetPackedFlag<kFlagUpperCanBeNull>(true);
6246    SetPackedFlag<kFlagCanBeNull>(true);
6247    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6248    SetRawInputAt(0, input);
6249  }
6250
6251  // {Get,Set}Upper* should only be used in reference type propagation.
6252  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6253  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6254  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6255
6256  void SetCanBeNull(bool can_be_null) {
6257    DCHECK(GetUpperCanBeNull() || !can_be_null);
6258    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6259  }
6260
6261  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6262
6263  DECLARE_INSTRUCTION(BoundType);
6264
6265 private:
6266  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6267  // is false then CanBeNull() cannot be true).
6268  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6269  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6270  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6271  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6272
6273  // Encodes the most upper class that this instruction can have. In other words
6274  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6275  // It is used to bound the type in cases like:
6276  //   if (x instanceof ClassX) {
6277  //     // uper_bound_ will be ClassX
6278  //   }
6279  ReferenceTypeInfo upper_bound_;
6280
6281  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6282};
6283
6284class HCheckCast FINAL : public HTemplateInstruction<2> {
6285 public:
6286  HCheckCast(HInstruction* object,
6287             HLoadClass* constant,
6288             TypeCheckKind check_kind,
6289             uint32_t dex_pc)
6290      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6291    SetPackedField<TypeCheckKindField>(check_kind);
6292    SetPackedFlag<kFlagMustDoNullCheck>(true);
6293    SetRawInputAt(0, object);
6294    SetRawInputAt(1, constant);
6295  }
6296
6297  bool CanBeMoved() const OVERRIDE { return true; }
6298
6299  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6300    return true;
6301  }
6302
6303  bool NeedsEnvironment() const OVERRIDE {
6304    // Instruction may throw a CheckCastError.
6305    return true;
6306  }
6307
6308  bool CanThrow() const OVERRIDE { return true; }
6309
6310  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6311  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6312  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6313  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6314
6315  DECLARE_INSTRUCTION(CheckCast);
6316
6317 private:
6318  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6319  static constexpr size_t kFieldTypeCheckKindSize =
6320      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6321  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6322  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6323  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6324  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6325
6326  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6327};
6328
6329/**
6330 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6331 * @details We define the combined barrier types that are actually required
6332 * by the Java Memory Model, rather than using exactly the terminology from
6333 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6334 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6335 * store atomicity issues, and the recipes there are not always entirely sufficient.
6336 * The current recipe is as follows:
6337 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6338 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6339 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6340 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6341 *    class has final fields.
6342 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6343 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6344 */
6345enum MemBarrierKind {
6346  kAnyStore,
6347  kLoadAny,
6348  kStoreStore,
6349  kAnyAny,
6350  kNTStoreStore,
6351  kLastBarrierKind = kNTStoreStore
6352};
6353std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6354
6355class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6356 public:
6357  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6358      : HTemplateInstruction(
6359            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6360    SetPackedField<BarrierKindField>(barrier_kind);
6361  }
6362
6363  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6364
6365  DECLARE_INSTRUCTION(MemoryBarrier);
6366
6367 private:
6368  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6369  static constexpr size_t kFieldBarrierKindSize =
6370      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6371  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6372      kFieldBarrierKind + kFieldBarrierKindSize;
6373  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6374                "Too many packed fields.");
6375  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6376
6377  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6378};
6379
6380class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6381 public:
6382  enum class OperationKind {
6383    kEnter,
6384    kExit,
6385    kLast = kExit
6386  };
6387
6388  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6389    : HTemplateInstruction(
6390          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6391          dex_pc) {
6392    SetPackedField<OperationKindField>(kind);
6393    SetRawInputAt(0, object);
6394  }
6395
6396  // Instruction may go into runtime, so we need an environment.
6397  bool NeedsEnvironment() const OVERRIDE { return true; }
6398
6399  bool CanThrow() const OVERRIDE {
6400    // Verifier guarantees that monitor-exit cannot throw.
6401    // This is important because it allows the HGraphBuilder to remove
6402    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6403    return IsEnter();
6404  }
6405
6406  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6407  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6408
6409  DECLARE_INSTRUCTION(MonitorOperation);
6410
6411 private:
6412  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6413  static constexpr size_t kFieldOperationKindSize =
6414      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6415  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6416      kFieldOperationKind + kFieldOperationKindSize;
6417  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6418                "Too many packed fields.");
6419  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6420
6421 private:
6422  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6423};
6424
6425class HSelect FINAL : public HExpression<3> {
6426 public:
6427  HSelect(HInstruction* condition,
6428          HInstruction* true_value,
6429          HInstruction* false_value,
6430          uint32_t dex_pc)
6431      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6432    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6433
6434    // First input must be `true_value` or `false_value` to allow codegens to
6435    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6436    // that architectures which implement HSelect as a conditional move also
6437    // will not need to invert the condition.
6438    SetRawInputAt(0, false_value);
6439    SetRawInputAt(1, true_value);
6440    SetRawInputAt(2, condition);
6441  }
6442
6443  HInstruction* GetFalseValue() const { return InputAt(0); }
6444  HInstruction* GetTrueValue() const { return InputAt(1); }
6445  HInstruction* GetCondition() const { return InputAt(2); }
6446
6447  bool CanBeMoved() const OVERRIDE { return true; }
6448  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6449    return true;
6450  }
6451
6452  bool CanBeNull() const OVERRIDE {
6453    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6454  }
6455
6456  DECLARE_INSTRUCTION(Select);
6457
6458 private:
6459  DISALLOW_COPY_AND_ASSIGN(HSelect);
6460};
6461
6462class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6463 public:
6464  MoveOperands(Location source,
6465               Location destination,
6466               Primitive::Type type,
6467               HInstruction* instruction)
6468      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6469
6470  Location GetSource() const { return source_; }
6471  Location GetDestination() const { return destination_; }
6472
6473  void SetSource(Location value) { source_ = value; }
6474  void SetDestination(Location value) { destination_ = value; }
6475
6476  // The parallel move resolver marks moves as "in-progress" by clearing the
6477  // destination (but not the source).
6478  Location MarkPending() {
6479    DCHECK(!IsPending());
6480    Location dest = destination_;
6481    destination_ = Location::NoLocation();
6482    return dest;
6483  }
6484
6485  void ClearPending(Location dest) {
6486    DCHECK(IsPending());
6487    destination_ = dest;
6488  }
6489
6490  bool IsPending() const {
6491    DCHECK(source_.IsValid() || destination_.IsInvalid());
6492    return destination_.IsInvalid() && source_.IsValid();
6493  }
6494
6495  // True if this blocks a move from the given location.
6496  bool Blocks(Location loc) const {
6497    return !IsEliminated() && source_.OverlapsWith(loc);
6498  }
6499
6500  // A move is redundant if it's been eliminated, if its source and
6501  // destination are the same, or if its destination is unneeded.
6502  bool IsRedundant() const {
6503    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6504  }
6505
6506  // We clear both operands to indicate move that's been eliminated.
6507  void Eliminate() {
6508    source_ = destination_ = Location::NoLocation();
6509  }
6510
6511  bool IsEliminated() const {
6512    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6513    return source_.IsInvalid();
6514  }
6515
6516  Primitive::Type GetType() const { return type_; }
6517
6518  bool Is64BitMove() const {
6519    return Primitive::Is64BitType(type_);
6520  }
6521
6522  HInstruction* GetInstruction() const { return instruction_; }
6523
6524 private:
6525  Location source_;
6526  Location destination_;
6527  // The type this move is for.
6528  Primitive::Type type_;
6529  // The instruction this move is assocatied with. Null when this move is
6530  // for moving an input in the expected locations of user (including a phi user).
6531  // This is only used in debug mode, to ensure we do not connect interval siblings
6532  // in the same parallel move.
6533  HInstruction* instruction_;
6534};
6535
6536std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6537
6538static constexpr size_t kDefaultNumberOfMoves = 4;
6539
6540class HParallelMove FINAL : public HTemplateInstruction<0> {
6541 public:
6542  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6543      : HTemplateInstruction(SideEffects::None(), dex_pc),
6544        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6545    moves_.reserve(kDefaultNumberOfMoves);
6546  }
6547
6548  void AddMove(Location source,
6549               Location destination,
6550               Primitive::Type type,
6551               HInstruction* instruction) {
6552    DCHECK(source.IsValid());
6553    DCHECK(destination.IsValid());
6554    if (kIsDebugBuild) {
6555      if (instruction != nullptr) {
6556        for (const MoveOperands& move : moves_) {
6557          if (move.GetInstruction() == instruction) {
6558            // Special case the situation where the move is for the spill slot
6559            // of the instruction.
6560            if ((GetPrevious() == instruction)
6561                || ((GetPrevious() == nullptr)
6562                    && instruction->IsPhi()
6563                    && instruction->GetBlock() == GetBlock())) {
6564              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6565                  << "Doing parallel moves for the same instruction.";
6566            } else {
6567              DCHECK(false) << "Doing parallel moves for the same instruction.";
6568            }
6569          }
6570        }
6571      }
6572      for (const MoveOperands& move : moves_) {
6573        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6574            << "Overlapped destination for two moves in a parallel move: "
6575            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6576            << source << " ==> " << destination;
6577      }
6578    }
6579    moves_.emplace_back(source, destination, type, instruction);
6580  }
6581
6582  MoveOperands* MoveOperandsAt(size_t index) {
6583    return &moves_[index];
6584  }
6585
6586  size_t NumMoves() const { return moves_.size(); }
6587
6588  DECLARE_INSTRUCTION(ParallelMove);
6589
6590 private:
6591  ArenaVector<MoveOperands> moves_;
6592
6593  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6594};
6595
6596}  // namespace art
6597
6598#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6599#include "nodes_shared.h"
6600#endif
6601#ifdef ART_ENABLE_CODEGEN_arm
6602#include "nodes_arm.h"
6603#endif
6604#ifdef ART_ENABLE_CODEGEN_arm64
6605#include "nodes_arm64.h"
6606#endif
6607#ifdef ART_ENABLE_CODEGEN_mips
6608#include "nodes_mips.h"
6609#endif
6610#ifdef ART_ENABLE_CODEGEN_x86
6611#include "nodes_x86.h"
6612#endif
6613
6614namespace art {
6615
6616class HGraphVisitor : public ValueObject {
6617 public:
6618  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6619  virtual ~HGraphVisitor() {}
6620
6621  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6622  virtual void VisitBasicBlock(HBasicBlock* block);
6623
6624  // Visit the graph following basic block insertion order.
6625  void VisitInsertionOrder();
6626
6627  // Visit the graph following dominator tree reverse post-order.
6628  void VisitReversePostOrder();
6629
6630  HGraph* GetGraph() const { return graph_; }
6631
6632  // Visit functions for instruction classes.
6633#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6634  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6635
6636  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6637
6638#undef DECLARE_VISIT_INSTRUCTION
6639
6640 private:
6641  HGraph* const graph_;
6642
6643  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6644};
6645
6646class HGraphDelegateVisitor : public HGraphVisitor {
6647 public:
6648  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6649  virtual ~HGraphDelegateVisitor() {}
6650
6651  // Visit functions that delegate to to super class.
6652#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6653  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6654
6655  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6656
6657#undef DECLARE_VISIT_INSTRUCTION
6658
6659 private:
6660  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6661};
6662
6663// Iterator over the blocks that art part of the loop. Includes blocks part
6664// of an inner loop. The order in which the blocks are iterated is on their
6665// block id.
6666class HBlocksInLoopIterator : public ValueObject {
6667 public:
6668  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6669      : blocks_in_loop_(info.GetBlocks()),
6670        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6671        index_(0) {
6672    if (!blocks_in_loop_.IsBitSet(index_)) {
6673      Advance();
6674    }
6675  }
6676
6677  bool Done() const { return index_ == blocks_.size(); }
6678  HBasicBlock* Current() const { return blocks_[index_]; }
6679  void Advance() {
6680    ++index_;
6681    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6682      if (blocks_in_loop_.IsBitSet(index_)) {
6683        break;
6684      }
6685    }
6686  }
6687
6688 private:
6689  const BitVector& blocks_in_loop_;
6690  const ArenaVector<HBasicBlock*>& blocks_;
6691  size_t index_;
6692
6693  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6694};
6695
6696// Iterator over the blocks that art part of the loop. Includes blocks part
6697// of an inner loop. The order in which the blocks are iterated is reverse
6698// post order.
6699class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6700 public:
6701  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6702      : blocks_in_loop_(info.GetBlocks()),
6703        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6704        index_(0) {
6705    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6706      Advance();
6707    }
6708  }
6709
6710  bool Done() const { return index_ == blocks_.size(); }
6711  HBasicBlock* Current() const { return blocks_[index_]; }
6712  void Advance() {
6713    ++index_;
6714    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6715      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6716        break;
6717      }
6718    }
6719  }
6720
6721 private:
6722  const BitVector& blocks_in_loop_;
6723  const ArenaVector<HBasicBlock*>& blocks_;
6724  size_t index_;
6725
6726  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6727};
6728
6729inline int64_t Int64FromConstant(HConstant* constant) {
6730  if (constant->IsIntConstant()) {
6731    return constant->AsIntConstant()->GetValue();
6732  } else if (constant->IsLongConstant()) {
6733    return constant->AsLongConstant()->GetValue();
6734  } else {
6735    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6736    return 0;
6737  }
6738}
6739
6740#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6741  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6742  inline const H##type* HInstruction::As##type() const {                       \
6743    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6744  }                                                                            \
6745  inline H##type* HInstruction::As##type() {                                   \
6746    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6747  }
6748
6749  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6750#undef INSTRUCTION_TYPE_CHECK
6751
6752// Create space in `blocks` for adding `number_of_new_blocks` entries
6753// starting at location `at`. Blocks after `at` are moved accordingly.
6754inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6755                        size_t number_of_new_blocks,
6756                        size_t after) {
6757  DCHECK_LT(after, blocks->size());
6758  size_t old_size = blocks->size();
6759  size_t new_size = old_size + number_of_new_blocks;
6760  blocks->resize(new_size);
6761  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6762}
6763
6764/*
6765 * Hunt "under the hood" of array lengths (leading to array references),
6766 * null checks (also leading to array references), and new arrays
6767 * (leading to the actual length). This makes it more likely related
6768 * instructions become actually comparable.
6769 */
6770inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
6771  while (instruction->IsArrayLength() ||
6772         instruction->IsNullCheck() ||
6773         instruction->IsNewArray()) {
6774    instruction = instruction->IsNewArray()
6775        ? instruction->AsNewArray()->GetLength()
6776        : instruction->InputAt(0);
6777  }
6778  return instruction;
6779}
6780
6781}  // namespace art
6782
6783#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6784