nodes.h revision 02e3092f8d98f339588e48691db77f227b48ac1e
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "dex_file.h"
32#include "dex_file_types.h"
33#include "entrypoints/quick/quick_entrypoints_enum.h"
34#include "handle.h"
35#include "handle_scope.h"
36#include "invoke_type.h"
37#include "intrinsics_enum.h"
38#include "locations.h"
39#include "method_reference.h"
40#include "mirror/class.h"
41#include "offsets.h"
42#include "primitive.h"
43#include "utils/intrusive_forward_list.h"
44
45namespace art {
46
47class GraphChecker;
48class HBasicBlock;
49class HCurrentMethod;
50class HDoubleConstant;
51class HEnvironment;
52class HFloatConstant;
53class HGraphBuilder;
54class HGraphVisitor;
55class HInstruction;
56class HIntConstant;
57class HInvoke;
58class HLongConstant;
59class HNullConstant;
60class HPhi;
61class HSuspendCheck;
62class HTryBoundary;
63class LiveInterval;
64class LocationSummary;
65class SlowPathCode;
66class SsaBuilder;
67
68namespace mirror {
69class DexCache;
70}  // namespace mirror
71
72static const int kDefaultNumberOfBlocks = 8;
73static const int kDefaultNumberOfSuccessors = 2;
74static const int kDefaultNumberOfPredecessors = 2;
75static const int kDefaultNumberOfExceptionalPredecessors = 0;
76static const int kDefaultNumberOfDominatedBlocks = 1;
77static const int kDefaultNumberOfBackEdges = 1;
78
79// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
80static constexpr int32_t kMaxIntShiftDistance = 0x1f;
81// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
82static constexpr int32_t kMaxLongShiftDistance = 0x3f;
83
84static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
85static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
86
87static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
88
89static constexpr uint32_t kNoDexPc = -1;
90
91inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
92  // For the purposes of the compiler, the dex files must actually be the same object
93  // if we want to safely treat them as the same. This is especially important for JIT
94  // as custom class loaders can open the same underlying file (or memory) multiple
95  // times and provide different class resolution but no two class loaders should ever
96  // use the same DexFile object - doing so is an unsupported hack that can lead to
97  // all sorts of weird failures.
98  return &lhs == &rhs;
99}
100
101enum IfCondition {
102  // All types.
103  kCondEQ,  // ==
104  kCondNE,  // !=
105  // Signed integers and floating-point numbers.
106  kCondLT,  // <
107  kCondLE,  // <=
108  kCondGT,  // >
109  kCondGE,  // >=
110  // Unsigned integers.
111  kCondB,   // <
112  kCondBE,  // <=
113  kCondA,   // >
114  kCondAE,  // >=
115  // First and last aliases.
116  kCondFirst = kCondEQ,
117  kCondLast = kCondAE,
118};
119
120enum GraphAnalysisResult {
121  kAnalysisSkipped,
122  kAnalysisInvalidBytecode,
123  kAnalysisFailThrowCatchLoop,
124  kAnalysisFailAmbiguousArrayOp,
125  kAnalysisSuccess,
126};
127
128template <typename T>
129static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
130  return static_cast<typename std::make_unsigned<T>::type>(x);
131}
132
133class HInstructionList : public ValueObject {
134 public:
135  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
136
137  void AddInstruction(HInstruction* instruction);
138  void RemoveInstruction(HInstruction* instruction);
139
140  // Insert `instruction` before/after an existing instruction `cursor`.
141  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
142  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
143
144  // Return true if this list contains `instruction`.
145  bool Contains(HInstruction* instruction) const;
146
147  // Return true if `instruction1` is found before `instruction2` in
148  // this instruction list and false otherwise.  Abort if none
149  // of these instructions is found.
150  bool FoundBefore(const HInstruction* instruction1,
151                   const HInstruction* instruction2) const;
152
153  bool IsEmpty() const { return first_instruction_ == nullptr; }
154  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
155
156  // Update the block of all instructions to be `block`.
157  void SetBlockOfInstructions(HBasicBlock* block) const;
158
159  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
160  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
161  void Add(const HInstructionList& instruction_list);
162
163  // Return the number of instructions in the list. This is an expensive operation.
164  size_t CountSize() const;
165
166 private:
167  HInstruction* first_instruction_;
168  HInstruction* last_instruction_;
169
170  friend class HBasicBlock;
171  friend class HGraph;
172  friend class HInstruction;
173  friend class HInstructionIterator;
174  friend class HInstructionIteratorHandleChanges;
175  friend class HBackwardInstructionIterator;
176
177  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
178};
179
180class ReferenceTypeInfo : ValueObject {
181 public:
182  typedef Handle<mirror::Class> TypeHandle;
183
184  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
185
186  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
187    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
188  }
189
190  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
191    return ReferenceTypeInfo(type_handle, is_exact);
192  }
193
194  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
195
196  static bool IsValidHandle(TypeHandle handle) {
197    return handle.GetReference() != nullptr;
198  }
199
200  bool IsValid() const {
201    return IsValidHandle(type_handle_);
202  }
203
204  bool IsExact() const { return is_exact_; }
205
206  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
207    DCHECK(IsValid());
208    return GetTypeHandle()->IsObjectClass();
209  }
210
211  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
212    DCHECK(IsValid());
213    return GetTypeHandle()->IsStringClass();
214  }
215
216  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
217    DCHECK(IsValid());
218    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
219  }
220
221  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
222    DCHECK(IsValid());
223    return GetTypeHandle()->IsInterface();
224  }
225
226  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
227    DCHECK(IsValid());
228    return GetTypeHandle()->IsArrayClass();
229  }
230
231  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
232    DCHECK(IsValid());
233    return GetTypeHandle()->IsPrimitiveArray();
234  }
235
236  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
237    DCHECK(IsValid());
238    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
239  }
240
241  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
242    DCHECK(IsValid());
243    if (!IsExact()) return false;
244    if (!IsArrayClass()) return false;
245    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
246  }
247
248  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
249    DCHECK(IsValid());
250    if (!IsExact()) return false;
251    if (!IsArrayClass()) return false;
252    if (!rti.IsArrayClass()) return false;
253    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
254        rti.GetTypeHandle()->GetComponentType());
255  }
256
257  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
258
259  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
260    DCHECK(IsValid());
261    DCHECK(rti.IsValid());
262    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
263  }
264
265  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
266    DCHECK(IsValid());
267    DCHECK(rti.IsValid());
268    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
269        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
270  }
271
272  // Returns true if the type information provide the same amount of details.
273  // Note that it does not mean that the instructions have the same actual type
274  // (because the type can be the result of a merge).
275  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
276    if (!IsValid() && !rti.IsValid()) {
277      // Invalid types are equal.
278      return true;
279    }
280    if (!IsValid() || !rti.IsValid()) {
281      // One is valid, the other not.
282      return false;
283    }
284    return IsExact() == rti.IsExact()
285        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
286  }
287
288 private:
289  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
290  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
291      : type_handle_(type_handle), is_exact_(is_exact) { }
292
293  // The class of the object.
294  TypeHandle type_handle_;
295  // Whether or not the type is exact or a superclass of the actual type.
296  // Whether or not we have any information about this type.
297  bool is_exact_;
298};
299
300std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
301
302// Control-flow graph of a method. Contains a list of basic blocks.
303class HGraph : public ArenaObject<kArenaAllocGraph> {
304 public:
305  HGraph(ArenaAllocator* arena,
306         const DexFile& dex_file,
307         uint32_t method_idx,
308         bool should_generate_constructor_barrier,
309         InstructionSet instruction_set,
310         InvokeType invoke_type = kInvalidInvokeType,
311         bool debuggable = false,
312         bool osr = false,
313         int start_instruction_id = 0)
314      : arena_(arena),
315        blocks_(arena->Adapter(kArenaAllocBlockList)),
316        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
317        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
318        entry_block_(nullptr),
319        exit_block_(nullptr),
320        maximum_number_of_out_vregs_(0),
321        number_of_vregs_(0),
322        number_of_in_vregs_(0),
323        temporaries_vreg_slots_(0),
324        has_bounds_checks_(false),
325        has_try_catch_(false),
326        has_irreducible_loops_(false),
327        debuggable_(debuggable),
328        current_instruction_id_(start_instruction_id),
329        dex_file_(dex_file),
330        method_idx_(method_idx),
331        invoke_type_(invoke_type),
332        in_ssa_form_(false),
333        should_generate_constructor_barrier_(should_generate_constructor_barrier),
334        number_of_cha_guards_(0),
335        instruction_set_(instruction_set),
336        cached_null_constant_(nullptr),
337        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
338        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
339        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
340        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
341        cached_current_method_(nullptr),
342        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
343        osr_(osr),
344        cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) {
345    blocks_.reserve(kDefaultNumberOfBlocks);
346  }
347
348  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
349  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
350
351  ArenaAllocator* GetArena() const { return arena_; }
352  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
353
354  bool IsInSsaForm() const { return in_ssa_form_; }
355  void SetInSsaForm() { in_ssa_form_ = true; }
356
357  HBasicBlock* GetEntryBlock() const { return entry_block_; }
358  HBasicBlock* GetExitBlock() const { return exit_block_; }
359  bool HasExitBlock() const { return exit_block_ != nullptr; }
360
361  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
362  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
363
364  void AddBlock(HBasicBlock* block);
365
366  void ComputeDominanceInformation();
367  void ClearDominanceInformation();
368  void ClearLoopInformation();
369  void FindBackEdges(ArenaBitVector* visited);
370  GraphAnalysisResult BuildDominatorTree();
371  void SimplifyCFG();
372  void SimplifyCatchBlocks();
373
374  // Analyze all natural loops in this graph. Returns a code specifying that it
375  // was successful or the reason for failure. The method will fail if a loop
376  // is a throw-catch loop, i.e. the header is a catch block.
377  GraphAnalysisResult AnalyzeLoops() const;
378
379  // Iterate over blocks to compute try block membership. Needs reverse post
380  // order and loop information.
381  void ComputeTryBlockInformation();
382
383  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
384  // Returns the instruction to replace the invoke expression or null if the
385  // invoke is for a void method. Note that the caller is responsible for replacing
386  // and removing the invoke instruction.
387  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
388
389  // Update the loop and try membership of `block`, which was spawned from `reference`.
390  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
391  // should be the new back edge.
392  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
393                                             HBasicBlock* reference,
394                                             bool replace_if_back_edge);
395
396  // Need to add a couple of blocks to test if the loop body is entered and
397  // put deoptimization instructions, etc.
398  void TransformLoopHeaderForBCE(HBasicBlock* header);
399
400  // Removes `block` from the graph. Assumes `block` has been disconnected from
401  // other blocks and has no instructions or phis.
402  void DeleteDeadEmptyBlock(HBasicBlock* block);
403
404  // Splits the edge between `block` and `successor` while preserving the
405  // indices in the predecessor/successor lists. If there are multiple edges
406  // between the blocks, the lowest indices are used.
407  // Returns the new block which is empty and has the same dex pc as `successor`.
408  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
409
410  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
411  void SimplifyLoop(HBasicBlock* header);
412
413  int32_t GetNextInstructionId() {
414    DCHECK_NE(current_instruction_id_, INT32_MAX);
415    return current_instruction_id_++;
416  }
417
418  int32_t GetCurrentInstructionId() const {
419    return current_instruction_id_;
420  }
421
422  void SetCurrentInstructionId(int32_t id) {
423    DCHECK_GE(id, current_instruction_id_);
424    current_instruction_id_ = id;
425  }
426
427  uint16_t GetMaximumNumberOfOutVRegs() const {
428    return maximum_number_of_out_vregs_;
429  }
430
431  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
432    maximum_number_of_out_vregs_ = new_value;
433  }
434
435  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
436    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
437  }
438
439  void UpdateTemporariesVRegSlots(size_t slots) {
440    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
441  }
442
443  size_t GetTemporariesVRegSlots() const {
444    DCHECK(!in_ssa_form_);
445    return temporaries_vreg_slots_;
446  }
447
448  void SetNumberOfVRegs(uint16_t number_of_vregs) {
449    number_of_vregs_ = number_of_vregs;
450  }
451
452  uint16_t GetNumberOfVRegs() const {
453    return number_of_vregs_;
454  }
455
456  void SetNumberOfInVRegs(uint16_t value) {
457    number_of_in_vregs_ = value;
458  }
459
460  uint16_t GetNumberOfInVRegs() const {
461    return number_of_in_vregs_;
462  }
463
464  uint16_t GetNumberOfLocalVRegs() const {
465    DCHECK(!in_ssa_form_);
466    return number_of_vregs_ - number_of_in_vregs_;
467  }
468
469  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
470    return reverse_post_order_;
471  }
472
473  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
474    DCHECK(GetReversePostOrder()[0] == entry_block_);
475    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
476  }
477
478  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
479    return ReverseRange(GetReversePostOrder());
480  }
481
482  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
483    return linear_order_;
484  }
485
486  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
487    return ReverseRange(GetLinearOrder());
488  }
489
490  bool HasBoundsChecks() const {
491    return has_bounds_checks_;
492  }
493
494  void SetHasBoundsChecks(bool value) {
495    has_bounds_checks_ = value;
496  }
497
498  bool ShouldGenerateConstructorBarrier() const {
499    return should_generate_constructor_barrier_;
500  }
501
502  bool IsDebuggable() const { return debuggable_; }
503
504  // Returns a constant of the given type and value. If it does not exist
505  // already, it is created and inserted into the graph. This method is only for
506  // integral types.
507  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
508
509  // TODO: This is problematic for the consistency of reference type propagation
510  // because it can be created anytime after the pass and thus it will be left
511  // with an invalid type.
512  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
513
514  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
515    return CreateConstant(value, &cached_int_constants_, dex_pc);
516  }
517  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
518    return CreateConstant(value, &cached_long_constants_, dex_pc);
519  }
520  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
521    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
522  }
523  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
524    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
525  }
526
527  HCurrentMethod* GetCurrentMethod();
528
529  const DexFile& GetDexFile() const {
530    return dex_file_;
531  }
532
533  uint32_t GetMethodIdx() const {
534    return method_idx_;
535  }
536
537  InvokeType GetInvokeType() const {
538    return invoke_type_;
539  }
540
541  InstructionSet GetInstructionSet() const {
542    return instruction_set_;
543  }
544
545  bool IsCompilingOsr() const { return osr_; }
546
547  ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
548    return cha_single_implementation_list_;
549  }
550
551  void AddCHASingleImplementationDependency(ArtMethod* method) {
552    cha_single_implementation_list_.insert(method);
553  }
554
555  bool HasShouldDeoptimizeFlag() const {
556    return number_of_cha_guards_ != 0;
557  }
558
559  bool HasTryCatch() const { return has_try_catch_; }
560  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
561
562  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
563  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
564
565  ArtMethod* GetArtMethod() const { return art_method_; }
566  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
567
568  // Returns an instruction with the opposite boolean value from 'cond'.
569  // The instruction has been inserted into the graph, either as a constant, or
570  // before cursor.
571  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
572
573  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
574
575  uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
576  void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
577  void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
578
579 private:
580  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
581  void RemoveDeadBlocks(const ArenaBitVector& visited);
582
583  template <class InstructionType, typename ValueType>
584  InstructionType* CreateConstant(ValueType value,
585                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
586                                  uint32_t dex_pc = kNoDexPc) {
587    // Try to find an existing constant of the given value.
588    InstructionType* constant = nullptr;
589    auto cached_constant = cache->find(value);
590    if (cached_constant != cache->end()) {
591      constant = cached_constant->second;
592    }
593
594    // If not found or previously deleted, create and cache a new instruction.
595    // Don't bother reviving a previously deleted instruction, for simplicity.
596    if (constant == nullptr || constant->GetBlock() == nullptr) {
597      constant = new (arena_) InstructionType(value, dex_pc);
598      cache->Overwrite(value, constant);
599      InsertConstant(constant);
600    }
601    return constant;
602  }
603
604  void InsertConstant(HConstant* instruction);
605
606  // Cache a float constant into the graph. This method should only be
607  // called by the SsaBuilder when creating "equivalent" instructions.
608  void CacheFloatConstant(HFloatConstant* constant);
609
610  // See CacheFloatConstant comment.
611  void CacheDoubleConstant(HDoubleConstant* constant);
612
613  ArenaAllocator* const arena_;
614
615  // List of blocks in insertion order.
616  ArenaVector<HBasicBlock*> blocks_;
617
618  // List of blocks to perform a reverse post order tree traversal.
619  ArenaVector<HBasicBlock*> reverse_post_order_;
620
621  // List of blocks to perform a linear order tree traversal. Unlike the reverse
622  // post order, this order is not incrementally kept up-to-date.
623  ArenaVector<HBasicBlock*> linear_order_;
624
625  HBasicBlock* entry_block_;
626  HBasicBlock* exit_block_;
627
628  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
629  uint16_t maximum_number_of_out_vregs_;
630
631  // The number of virtual registers in this method. Contains the parameters.
632  uint16_t number_of_vregs_;
633
634  // The number of virtual registers used by parameters of this method.
635  uint16_t number_of_in_vregs_;
636
637  // Number of vreg size slots that the temporaries use (used in baseline compiler).
638  size_t temporaries_vreg_slots_;
639
640  // Has bounds checks. We can totally skip BCE if it's false.
641  bool has_bounds_checks_;
642
643  // Flag whether there are any try/catch blocks in the graph. We will skip
644  // try/catch-related passes if false.
645  bool has_try_catch_;
646
647  // Flag whether there are any irreducible loops in the graph.
648  bool has_irreducible_loops_;
649
650  // Indicates whether the graph should be compiled in a way that
651  // ensures full debuggability. If false, we can apply more
652  // aggressive optimizations that may limit the level of debugging.
653  const bool debuggable_;
654
655  // The current id to assign to a newly added instruction. See HInstruction.id_.
656  int32_t current_instruction_id_;
657
658  // The dex file from which the method is from.
659  const DexFile& dex_file_;
660
661  // The method index in the dex file.
662  const uint32_t method_idx_;
663
664  // If inlined, this encodes how the callee is being invoked.
665  const InvokeType invoke_type_;
666
667  // Whether the graph has been transformed to SSA form. Only used
668  // in debug mode to ensure we are not using properties only valid
669  // for non-SSA form (like the number of temporaries).
670  bool in_ssa_form_;
671
672  const bool should_generate_constructor_barrier_;
673
674  // Number of CHA guards in the graph. Used to short-circuit the
675  // CHA guard optimization pass when there is no CHA guard left.
676  uint32_t number_of_cha_guards_;
677
678  const InstructionSet instruction_set_;
679
680  // Cached constants.
681  HNullConstant* cached_null_constant_;
682  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
683  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
684  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
685  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
686
687  HCurrentMethod* cached_current_method_;
688
689  // The ArtMethod this graph is for. Note that for AOT, it may be null,
690  // for example for methods whose declaring class could not be resolved
691  // (such as when the superclass could not be found).
692  ArtMethod* art_method_;
693
694  // Keep the RTI of inexact Object to avoid having to pass stack handle
695  // collection pointer to passes which may create NullConstant.
696  ReferenceTypeInfo inexact_object_rti_;
697
698  // Whether we are compiling this graph for on stack replacement: this will
699  // make all loops seen as irreducible and emit special stack maps to mark
700  // compiled code entries which the interpreter can directly jump to.
701  const bool osr_;
702
703  // List of methods that are assumed to have single implementation.
704  ArenaSet<ArtMethod*> cha_single_implementation_list_;
705
706  friend class SsaBuilder;           // For caching constants.
707  friend class SsaLivenessAnalysis;  // For the linear order.
708  friend class HInliner;             // For the reverse post order.
709  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
710  DISALLOW_COPY_AND_ASSIGN(HGraph);
711};
712
713class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
714 public:
715  HLoopInformation(HBasicBlock* header, HGraph* graph)
716      : header_(header),
717        suspend_check_(nullptr),
718        irreducible_(false),
719        contains_irreducible_loop_(false),
720        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
721        // Make bit vector growable, as the number of blocks may change.
722        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
723    back_edges_.reserve(kDefaultNumberOfBackEdges);
724  }
725
726  bool IsIrreducible() const { return irreducible_; }
727  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
728
729  void Dump(std::ostream& os);
730
731  HBasicBlock* GetHeader() const {
732    return header_;
733  }
734
735  void SetHeader(HBasicBlock* block) {
736    header_ = block;
737  }
738
739  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
740  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
741  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
742
743  void AddBackEdge(HBasicBlock* back_edge) {
744    back_edges_.push_back(back_edge);
745  }
746
747  void RemoveBackEdge(HBasicBlock* back_edge) {
748    RemoveElement(back_edges_, back_edge);
749  }
750
751  bool IsBackEdge(const HBasicBlock& block) const {
752    return ContainsElement(back_edges_, &block);
753  }
754
755  size_t NumberOfBackEdges() const {
756    return back_edges_.size();
757  }
758
759  HBasicBlock* GetPreHeader() const;
760
761  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
762    return back_edges_;
763  }
764
765  // Returns the lifetime position of the back edge that has the
766  // greatest lifetime position.
767  size_t GetLifetimeEnd() const;
768
769  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
770    ReplaceElement(back_edges_, existing, new_back_edge);
771  }
772
773  // Finds blocks that are part of this loop.
774  void Populate();
775
776  // Returns whether this loop information contains `block`.
777  // Note that this loop information *must* be populated before entering this function.
778  bool Contains(const HBasicBlock& block) const;
779
780  // Returns whether this loop information is an inner loop of `other`.
781  // Note that `other` *must* be populated before entering this function.
782  bool IsIn(const HLoopInformation& other) const;
783
784  // Returns true if instruction is not defined within this loop.
785  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
786
787  const ArenaBitVector& GetBlocks() const { return blocks_; }
788
789  void Add(HBasicBlock* block);
790  void Remove(HBasicBlock* block);
791
792  void ClearAllBlocks() {
793    blocks_.ClearAllBits();
794  }
795
796  bool HasBackEdgeNotDominatedByHeader() const;
797
798  bool IsPopulated() const {
799    return blocks_.GetHighestBitSet() != -1;
800  }
801
802  bool DominatesAllBackEdges(HBasicBlock* block);
803
804  bool HasExitEdge() const;
805
806 private:
807  // Internal recursive implementation of `Populate`.
808  void PopulateRecursive(HBasicBlock* block);
809  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
810
811  HBasicBlock* header_;
812  HSuspendCheck* suspend_check_;
813  bool irreducible_;
814  bool contains_irreducible_loop_;
815  ArenaVector<HBasicBlock*> back_edges_;
816  ArenaBitVector blocks_;
817
818  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
819};
820
821// Stores try/catch information for basic blocks.
822// Note that HGraph is constructed so that catch blocks cannot simultaneously
823// be try blocks.
824class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
825 public:
826  // Try block information constructor.
827  explicit TryCatchInformation(const HTryBoundary& try_entry)
828      : try_entry_(&try_entry),
829        catch_dex_file_(nullptr),
830        catch_type_index_(DexFile::kDexNoIndex16) {
831    DCHECK(try_entry_ != nullptr);
832  }
833
834  // Catch block information constructor.
835  TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
836      : try_entry_(nullptr),
837        catch_dex_file_(&dex_file),
838        catch_type_index_(catch_type_index) {}
839
840  bool IsTryBlock() const { return try_entry_ != nullptr; }
841
842  const HTryBoundary& GetTryEntry() const {
843    DCHECK(IsTryBlock());
844    return *try_entry_;
845  }
846
847  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
848
849  bool IsCatchAllTypeIndex() const {
850    DCHECK(IsCatchBlock());
851    return !catch_type_index_.IsValid();
852  }
853
854  dex::TypeIndex GetCatchTypeIndex() const {
855    DCHECK(IsCatchBlock());
856    return catch_type_index_;
857  }
858
859  const DexFile& GetCatchDexFile() const {
860    DCHECK(IsCatchBlock());
861    return *catch_dex_file_;
862  }
863
864 private:
865  // One of possibly several TryBoundary instructions entering the block's try.
866  // Only set for try blocks.
867  const HTryBoundary* try_entry_;
868
869  // Exception type information. Only set for catch blocks.
870  const DexFile* catch_dex_file_;
871  const dex::TypeIndex catch_type_index_;
872};
873
874static constexpr size_t kNoLifetime = -1;
875static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
876
877// A block in a method. Contains the list of instructions represented
878// as a double linked list. Each block knows its predecessors and
879// successors.
880
881class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
882 public:
883  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
884      : graph_(graph),
885        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
886        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
887        loop_information_(nullptr),
888        dominator_(nullptr),
889        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
890        block_id_(kInvalidBlockId),
891        dex_pc_(dex_pc),
892        lifetime_start_(kNoLifetime),
893        lifetime_end_(kNoLifetime),
894        try_catch_information_(nullptr) {
895    predecessors_.reserve(kDefaultNumberOfPredecessors);
896    successors_.reserve(kDefaultNumberOfSuccessors);
897    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
898  }
899
900  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
901    return predecessors_;
902  }
903
904  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
905    return successors_;
906  }
907
908  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
909  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
910
911  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
912    return ContainsElement(successors_, block, start_from);
913  }
914
915  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
916    return dominated_blocks_;
917  }
918
919  bool IsEntryBlock() const {
920    return graph_->GetEntryBlock() == this;
921  }
922
923  bool IsExitBlock() const {
924    return graph_->GetExitBlock() == this;
925  }
926
927  bool IsSingleGoto() const;
928  bool IsSingleTryBoundary() const;
929
930  // Returns true if this block emits nothing but a jump.
931  bool IsSingleJump() const {
932    HLoopInformation* loop_info = GetLoopInformation();
933    return (IsSingleGoto() || IsSingleTryBoundary())
934           // Back edges generate a suspend check.
935           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
936  }
937
938  void AddBackEdge(HBasicBlock* back_edge) {
939    if (loop_information_ == nullptr) {
940      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
941    }
942    DCHECK_EQ(loop_information_->GetHeader(), this);
943    loop_information_->AddBackEdge(back_edge);
944  }
945
946  HGraph* GetGraph() const { return graph_; }
947  void SetGraph(HGraph* graph) { graph_ = graph; }
948
949  uint32_t GetBlockId() const { return block_id_; }
950  void SetBlockId(int id) { block_id_ = id; }
951  uint32_t GetDexPc() const { return dex_pc_; }
952
953  HBasicBlock* GetDominator() const { return dominator_; }
954  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
955  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
956
957  void RemoveDominatedBlock(HBasicBlock* block) {
958    RemoveElement(dominated_blocks_, block);
959  }
960
961  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
962    ReplaceElement(dominated_blocks_, existing, new_block);
963  }
964
965  void ClearDominanceInformation();
966
967  int NumberOfBackEdges() const {
968    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
969  }
970
971  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
972  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
973  const HInstructionList& GetInstructions() const { return instructions_; }
974  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
975  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
976  const HInstructionList& GetPhis() const { return phis_; }
977
978  HInstruction* GetFirstInstructionDisregardMoves() const;
979
980  void AddSuccessor(HBasicBlock* block) {
981    successors_.push_back(block);
982    block->predecessors_.push_back(this);
983  }
984
985  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
986    size_t successor_index = GetSuccessorIndexOf(existing);
987    existing->RemovePredecessor(this);
988    new_block->predecessors_.push_back(this);
989    successors_[successor_index] = new_block;
990  }
991
992  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
993    size_t predecessor_index = GetPredecessorIndexOf(existing);
994    existing->RemoveSuccessor(this);
995    new_block->successors_.push_back(this);
996    predecessors_[predecessor_index] = new_block;
997  }
998
999  // Insert `this` between `predecessor` and `successor. This method
1000  // preserves the indicies, and will update the first edge found between
1001  // `predecessor` and `successor`.
1002  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1003    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1004    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1005    successor->predecessors_[predecessor_index] = this;
1006    predecessor->successors_[successor_index] = this;
1007    successors_.push_back(successor);
1008    predecessors_.push_back(predecessor);
1009  }
1010
1011  void RemovePredecessor(HBasicBlock* block) {
1012    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1013  }
1014
1015  void RemoveSuccessor(HBasicBlock* block) {
1016    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1017  }
1018
1019  void ClearAllPredecessors() {
1020    predecessors_.clear();
1021  }
1022
1023  void AddPredecessor(HBasicBlock* block) {
1024    predecessors_.push_back(block);
1025    block->successors_.push_back(this);
1026  }
1027
1028  void SwapPredecessors() {
1029    DCHECK_EQ(predecessors_.size(), 2u);
1030    std::swap(predecessors_[0], predecessors_[1]);
1031  }
1032
1033  void SwapSuccessors() {
1034    DCHECK_EQ(successors_.size(), 2u);
1035    std::swap(successors_[0], successors_[1]);
1036  }
1037
1038  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1039    return IndexOfElement(predecessors_, predecessor);
1040  }
1041
1042  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1043    return IndexOfElement(successors_, successor);
1044  }
1045
1046  HBasicBlock* GetSinglePredecessor() const {
1047    DCHECK_EQ(GetPredecessors().size(), 1u);
1048    return GetPredecessors()[0];
1049  }
1050
1051  HBasicBlock* GetSingleSuccessor() const {
1052    DCHECK_EQ(GetSuccessors().size(), 1u);
1053    return GetSuccessors()[0];
1054  }
1055
1056  // Returns whether the first occurrence of `predecessor` in the list of
1057  // predecessors is at index `idx`.
1058  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1059    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1060    return GetPredecessorIndexOf(predecessor) == idx;
1061  }
1062
1063  // Create a new block between this block and its predecessors. The new block
1064  // is added to the graph, all predecessor edges are relinked to it and an edge
1065  // is created to `this`. Returns the new empty block. Reverse post order or
1066  // loop and try/catch information are not updated.
1067  HBasicBlock* CreateImmediateDominator();
1068
1069  // Split the block into two blocks just before `cursor`. Returns the newly
1070  // created, latter block. Note that this method will add the block to the
1071  // graph, create a Goto at the end of the former block and will create an edge
1072  // between the blocks. It will not, however, update the reverse post order or
1073  // loop and try/catch information.
1074  HBasicBlock* SplitBefore(HInstruction* cursor);
1075
1076  // Split the block into two blocks just before `cursor`. Returns the newly
1077  // created block. Note that this method just updates raw block information,
1078  // like predecessors, successors, dominators, and instruction list. It does not
1079  // update the graph, reverse post order, loop information, nor make sure the
1080  // blocks are consistent (for example ending with a control flow instruction).
1081  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1082
1083  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1084  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1085
1086  // Merge `other` at the end of `this`. Successors and dominated blocks of
1087  // `other` are changed to be successors and dominated blocks of `this`. Note
1088  // that this method does not update the graph, reverse post order, loop
1089  // information, nor make sure the blocks are consistent (for example ending
1090  // with a control flow instruction).
1091  void MergeWithInlined(HBasicBlock* other);
1092
1093  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1094  // of `this` are moved to `other`.
1095  // Note that this method does not update the graph, reverse post order, loop
1096  // information, nor make sure the blocks are consistent (for example ending
1097  // with a control flow instruction).
1098  void ReplaceWith(HBasicBlock* other);
1099
1100  // Merge `other` at the end of `this`. This method updates loops, reverse post
1101  // order, links to predecessors, successors, dominators and deletes the block
1102  // from the graph. The two blocks must be successive, i.e. `this` the only
1103  // predecessor of `other` and vice versa.
1104  void MergeWith(HBasicBlock* other);
1105
1106  // Disconnects `this` from all its predecessors, successors and dominator,
1107  // removes it from all loops it is included in and eventually from the graph.
1108  // The block must not dominate any other block. Predecessors and successors
1109  // are safely updated.
1110  void DisconnectAndDelete();
1111
1112  void AddInstruction(HInstruction* instruction);
1113  // Insert `instruction` before/after an existing instruction `cursor`.
1114  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1115  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1116  // Replace instruction `initial` with `replacement` within this block.
1117  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1118                                       HInstruction* replacement);
1119  void AddPhi(HPhi* phi);
1120  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1121  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1122  // instruction list. With 'ensure_safety' set to true, it verifies that the
1123  // instruction is not in use and removes it from the use lists of its inputs.
1124  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1125  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1126  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1127
1128  bool IsLoopHeader() const {
1129    return IsInLoop() && (loop_information_->GetHeader() == this);
1130  }
1131
1132  bool IsLoopPreHeaderFirstPredecessor() const {
1133    DCHECK(IsLoopHeader());
1134    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1135  }
1136
1137  bool IsFirstPredecessorBackEdge() const {
1138    DCHECK(IsLoopHeader());
1139    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1140  }
1141
1142  HLoopInformation* GetLoopInformation() const {
1143    return loop_information_;
1144  }
1145
1146  // Set the loop_information_ on this block. Overrides the current
1147  // loop_information if it is an outer loop of the passed loop information.
1148  // Note that this method is called while creating the loop information.
1149  void SetInLoop(HLoopInformation* info) {
1150    if (IsLoopHeader()) {
1151      // Nothing to do. This just means `info` is an outer loop.
1152    } else if (!IsInLoop()) {
1153      loop_information_ = info;
1154    } else if (loop_information_->Contains(*info->GetHeader())) {
1155      // Block is currently part of an outer loop. Make it part of this inner loop.
1156      // Note that a non loop header having a loop information means this loop information
1157      // has already been populated
1158      loop_information_ = info;
1159    } else {
1160      // Block is part of an inner loop. Do not update the loop information.
1161      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1162      // at this point, because this method is being called while populating `info`.
1163    }
1164  }
1165
1166  // Raw update of the loop information.
1167  void SetLoopInformation(HLoopInformation* info) {
1168    loop_information_ = info;
1169  }
1170
1171  bool IsInLoop() const { return loop_information_ != nullptr; }
1172
1173  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1174
1175  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1176    try_catch_information_ = try_catch_information;
1177  }
1178
1179  bool IsTryBlock() const {
1180    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1181  }
1182
1183  bool IsCatchBlock() const {
1184    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1185  }
1186
1187  // Returns the try entry that this block's successors should have. They will
1188  // be in the same try, unless the block ends in a try boundary. In that case,
1189  // the appropriate try entry will be returned.
1190  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1191
1192  bool HasThrowingInstructions() const;
1193
1194  // Returns whether this block dominates the blocked passed as parameter.
1195  bool Dominates(HBasicBlock* block) const;
1196
1197  size_t GetLifetimeStart() const { return lifetime_start_; }
1198  size_t GetLifetimeEnd() const { return lifetime_end_; }
1199
1200  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1201  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1202
1203  bool EndsWithControlFlowInstruction() const;
1204  bool EndsWithIf() const;
1205  bool EndsWithTryBoundary() const;
1206  bool HasSinglePhi() const;
1207
1208 private:
1209  HGraph* graph_;
1210  ArenaVector<HBasicBlock*> predecessors_;
1211  ArenaVector<HBasicBlock*> successors_;
1212  HInstructionList instructions_;
1213  HInstructionList phis_;
1214  HLoopInformation* loop_information_;
1215  HBasicBlock* dominator_;
1216  ArenaVector<HBasicBlock*> dominated_blocks_;
1217  uint32_t block_id_;
1218  // The dex program counter of the first instruction of this block.
1219  const uint32_t dex_pc_;
1220  size_t lifetime_start_;
1221  size_t lifetime_end_;
1222  TryCatchInformation* try_catch_information_;
1223
1224  friend class HGraph;
1225  friend class HInstruction;
1226
1227  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1228};
1229
1230// Iterates over the LoopInformation of all loops which contain 'block'
1231// from the innermost to the outermost.
1232class HLoopInformationOutwardIterator : public ValueObject {
1233 public:
1234  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1235      : current_(block.GetLoopInformation()) {}
1236
1237  bool Done() const { return current_ == nullptr; }
1238
1239  void Advance() {
1240    DCHECK(!Done());
1241    current_ = current_->GetPreHeader()->GetLoopInformation();
1242  }
1243
1244  HLoopInformation* Current() const {
1245    DCHECK(!Done());
1246    return current_;
1247  }
1248
1249 private:
1250  HLoopInformation* current_;
1251
1252  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1253};
1254
1255#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1256  M(Above, Condition)                                                   \
1257  M(AboveOrEqual, Condition)                                            \
1258  M(Add, BinaryOperation)                                               \
1259  M(And, BinaryOperation)                                               \
1260  M(ArrayGet, Instruction)                                              \
1261  M(ArrayLength, Instruction)                                           \
1262  M(ArraySet, Instruction)                                              \
1263  M(Below, Condition)                                                   \
1264  M(BelowOrEqual, Condition)                                            \
1265  M(BooleanNot, UnaryOperation)                                         \
1266  M(BoundsCheck, Instruction)                                           \
1267  M(BoundType, Instruction)                                             \
1268  M(CheckCast, Instruction)                                             \
1269  M(ClassTableGet, Instruction)                                         \
1270  M(ClearException, Instruction)                                        \
1271  M(ClinitCheck, Instruction)                                           \
1272  M(Compare, BinaryOperation)                                           \
1273  M(CurrentMethod, Instruction)                                         \
1274  M(ShouldDeoptimizeFlag, Instruction)                                  \
1275  M(Deoptimize, Instruction)                                            \
1276  M(Div, BinaryOperation)                                               \
1277  M(DivZeroCheck, Instruction)                                          \
1278  M(DoubleConstant, Constant)                                           \
1279  M(Equal, Condition)                                                   \
1280  M(Exit, Instruction)                                                  \
1281  M(FloatConstant, Constant)                                            \
1282  M(Goto, Instruction)                                                  \
1283  M(GreaterThan, Condition)                                             \
1284  M(GreaterThanOrEqual, Condition)                                      \
1285  M(If, Instruction)                                                    \
1286  M(InstanceFieldGet, Instruction)                                      \
1287  M(InstanceFieldSet, Instruction)                                      \
1288  M(InstanceOf, Instruction)                                            \
1289  M(IntConstant, Constant)                                              \
1290  M(InvokeUnresolved, Invoke)                                           \
1291  M(InvokeInterface, Invoke)                                            \
1292  M(InvokeStaticOrDirect, Invoke)                                       \
1293  M(InvokeVirtual, Invoke)                                              \
1294  M(InvokePolymorphic, Invoke)                                          \
1295  M(LessThan, Condition)                                                \
1296  M(LessThanOrEqual, Condition)                                         \
1297  M(LoadClass, Instruction)                                             \
1298  M(LoadException, Instruction)                                         \
1299  M(LoadString, Instruction)                                            \
1300  M(LongConstant, Constant)                                             \
1301  M(MemoryBarrier, Instruction)                                         \
1302  M(MonitorOperation, Instruction)                                      \
1303  M(Mul, BinaryOperation)                                               \
1304  M(NativeDebugInfo, Instruction)                                       \
1305  M(Neg, UnaryOperation)                                                \
1306  M(NewArray, Instruction)                                              \
1307  M(NewInstance, Instruction)                                           \
1308  M(Not, UnaryOperation)                                                \
1309  M(NotEqual, Condition)                                                \
1310  M(NullConstant, Instruction)                                          \
1311  M(NullCheck, Instruction)                                             \
1312  M(Or, BinaryOperation)                                                \
1313  M(PackedSwitch, Instruction)                                          \
1314  M(ParallelMove, Instruction)                                          \
1315  M(ParameterValue, Instruction)                                        \
1316  M(Phi, Instruction)                                                   \
1317  M(Rem, BinaryOperation)                                               \
1318  M(Return, Instruction)                                                \
1319  M(ReturnVoid, Instruction)                                            \
1320  M(Ror, BinaryOperation)                                               \
1321  M(Shl, BinaryOperation)                                               \
1322  M(Shr, BinaryOperation)                                               \
1323  M(StaticFieldGet, Instruction)                                        \
1324  M(StaticFieldSet, Instruction)                                        \
1325  M(UnresolvedInstanceFieldGet, Instruction)                            \
1326  M(UnresolvedInstanceFieldSet, Instruction)                            \
1327  M(UnresolvedStaticFieldGet, Instruction)                              \
1328  M(UnresolvedStaticFieldSet, Instruction)                              \
1329  M(Select, Instruction)                                                \
1330  M(Sub, BinaryOperation)                                               \
1331  M(SuspendCheck, Instruction)                                          \
1332  M(Throw, Instruction)                                                 \
1333  M(TryBoundary, Instruction)                                           \
1334  M(TypeConversion, Instruction)                                        \
1335  M(UShr, BinaryOperation)                                              \
1336  M(Xor, BinaryOperation)                                               \
1337
1338/*
1339 * Instructions, shared across several (not all) architectures.
1340 */
1341#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1342#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1343#else
1344#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1345  M(BitwiseNegatedRight, Instruction)                                   \
1346  M(MultiplyAccumulate, Instruction)                                    \
1347  M(IntermediateAddress, Instruction)
1348#endif
1349
1350#ifndef ART_ENABLE_CODEGEN_arm
1351#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1352#else
1353#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1354  M(ArmDexCacheArraysBase, Instruction)
1355#endif
1356
1357#ifndef ART_ENABLE_CODEGEN_arm64
1358#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1359#else
1360#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                          \
1361  M(Arm64DataProcWithShifterOp, Instruction)
1362#endif
1363
1364#ifndef ART_ENABLE_CODEGEN_mips
1365#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1366#else
1367#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1368  M(MipsComputeBaseMethodAddress, Instruction)                          \
1369  M(MipsDexCacheArraysBase, Instruction)                                \
1370  M(MipsPackedSwitch, Instruction)
1371#endif
1372
1373#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1374
1375#ifndef ART_ENABLE_CODEGEN_x86
1376#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1377#else
1378#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1379  M(X86ComputeBaseMethodAddress, Instruction)                           \
1380  M(X86LoadFromConstantTable, Instruction)                              \
1381  M(X86FPNeg, Instruction)                                              \
1382  M(X86PackedSwitch, Instruction)
1383#endif
1384
1385#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1386
1387#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1388  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1389  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1390  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1391  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1392  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1393  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1394  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1395  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1396
1397#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1398  M(Condition, BinaryOperation)                                         \
1399  M(Constant, Instruction)                                              \
1400  M(UnaryOperation, Instruction)                                        \
1401  M(BinaryOperation, Instruction)                                       \
1402  M(Invoke, Instruction)
1403
1404#define FOR_EACH_INSTRUCTION(M)                                         \
1405  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1406  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1407
1408#define FORWARD_DECLARATION(type, super) class H##type;
1409FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1410#undef FORWARD_DECLARATION
1411
1412#define DECLARE_INSTRUCTION(type)                                         \
1413  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1414  const char* DebugName() const OVERRIDE { return #type; }                \
1415  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1416    return other->Is##type();                                             \
1417  }                                                                       \
1418  void Accept(HGraphVisitor* visitor) OVERRIDE
1419
1420#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1421  bool Is##type() const { return As##type() != nullptr; }               \
1422  const H##type* As##type() const { return this; }                      \
1423  H##type* As##type() { return this; }
1424
1425template <typename T>
1426class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1427 public:
1428  T GetUser() const { return user_; }
1429  size_t GetIndex() const { return index_; }
1430  void SetIndex(size_t index) { index_ = index; }
1431
1432  // Hook for the IntrusiveForwardList<>.
1433  // TODO: Hide this better.
1434  IntrusiveForwardListHook hook;
1435
1436 private:
1437  HUseListNode(T user, size_t index)
1438      : user_(user), index_(index) {}
1439
1440  T const user_;
1441  size_t index_;
1442
1443  friend class HInstruction;
1444
1445  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1446};
1447
1448template <typename T>
1449using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1450
1451// This class is used by HEnvironment and HInstruction classes to record the
1452// instructions they use and pointers to the corresponding HUseListNodes kept
1453// by the used instructions.
1454template <typename T>
1455class HUserRecord : public ValueObject {
1456 public:
1457  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1458  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1459
1460  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1461      : HUserRecord(old_record.instruction_, before_use_node) {}
1462  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1463      : instruction_(instruction), before_use_node_(before_use_node) {
1464    DCHECK(instruction_ != nullptr);
1465  }
1466
1467  HInstruction* GetInstruction() const { return instruction_; }
1468  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1469  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1470
1471 private:
1472  // Instruction used by the user.
1473  HInstruction* instruction_;
1474
1475  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1476  typename HUseList<T>::iterator before_use_node_;
1477};
1478
1479// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1480// This is used for HInstruction::GetInputs() to return a container wrapper providing
1481// HInstruction* values even though the underlying container has HUserRecord<>s.
1482struct HInputExtractor {
1483  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1484    return record.GetInstruction();
1485  }
1486  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1487    return record.GetInstruction();
1488  }
1489};
1490
1491using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1492using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1493
1494/**
1495 * Side-effects representation.
1496 *
1497 * For write/read dependences on fields/arrays, the dependence analysis uses
1498 * type disambiguation (e.g. a float field write cannot modify the value of an
1499 * integer field read) and the access type (e.g.  a reference array write cannot
1500 * modify the value of a reference field read [although it may modify the
1501 * reference fetch prior to reading the field, which is represented by its own
1502 * write/read dependence]). The analysis makes conservative points-to
1503 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1504 * the same, and any reference read depends on any reference read without
1505 * further regard of its type).
1506 *
1507 * The internal representation uses 38-bit and is described in the table below.
1508 * The first line indicates the side effect, and for field/array accesses the
1509 * second line indicates the type of the access (in the order of the
1510 * Primitive::Type enum).
1511 * The two numbered lines below indicate the bit position in the bitfield (read
1512 * vertically).
1513 *
1514 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1515 *   +-------------+---------+---------+--------------+---------+---------+
1516 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1517 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1518 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1519 *
1520 * Note that, to ease the implementation, 'changes' bits are least significant
1521 * bits, while 'dependency' bits are most significant bits.
1522 */
1523class SideEffects : public ValueObject {
1524 public:
1525  SideEffects() : flags_(0) {}
1526
1527  static SideEffects None() {
1528    return SideEffects(0);
1529  }
1530
1531  static SideEffects All() {
1532    return SideEffects(kAllChangeBits | kAllDependOnBits);
1533  }
1534
1535  static SideEffects AllChanges() {
1536    return SideEffects(kAllChangeBits);
1537  }
1538
1539  static SideEffects AllDependencies() {
1540    return SideEffects(kAllDependOnBits);
1541  }
1542
1543  static SideEffects AllExceptGCDependency() {
1544    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1545  }
1546
1547  static SideEffects AllWritesAndReads() {
1548    return SideEffects(kAllWrites | kAllReads);
1549  }
1550
1551  static SideEffects AllWrites() {
1552    return SideEffects(kAllWrites);
1553  }
1554
1555  static SideEffects AllReads() {
1556    return SideEffects(kAllReads);
1557  }
1558
1559  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1560    return is_volatile
1561        ? AllWritesAndReads()
1562        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1563  }
1564
1565  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1566    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1567  }
1568
1569  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1570    return is_volatile
1571        ? AllWritesAndReads()
1572        : SideEffects(TypeFlag(type, kFieldReadOffset));
1573  }
1574
1575  static SideEffects ArrayReadOfType(Primitive::Type type) {
1576    return SideEffects(TypeFlag(type, kArrayReadOffset));
1577  }
1578
1579  static SideEffects CanTriggerGC() {
1580    return SideEffects(1ULL << kCanTriggerGCBit);
1581  }
1582
1583  static SideEffects DependsOnGC() {
1584    return SideEffects(1ULL << kDependsOnGCBit);
1585  }
1586
1587  // Combines the side-effects of this and the other.
1588  SideEffects Union(SideEffects other) const {
1589    return SideEffects(flags_ | other.flags_);
1590  }
1591
1592  SideEffects Exclusion(SideEffects other) const {
1593    return SideEffects(flags_ & ~other.flags_);
1594  }
1595
1596  void Add(SideEffects other) {
1597    flags_ |= other.flags_;
1598  }
1599
1600  bool Includes(SideEffects other) const {
1601    return (other.flags_ & flags_) == other.flags_;
1602  }
1603
1604  bool HasSideEffects() const {
1605    return (flags_ & kAllChangeBits);
1606  }
1607
1608  bool HasDependencies() const {
1609    return (flags_ & kAllDependOnBits);
1610  }
1611
1612  // Returns true if there are no side effects or dependencies.
1613  bool DoesNothing() const {
1614    return flags_ == 0;
1615  }
1616
1617  // Returns true if something is written.
1618  bool DoesAnyWrite() const {
1619    return (flags_ & kAllWrites);
1620  }
1621
1622  // Returns true if something is read.
1623  bool DoesAnyRead() const {
1624    return (flags_ & kAllReads);
1625  }
1626
1627  // Returns true if potentially everything is written and read
1628  // (every type and every kind of access).
1629  bool DoesAllReadWrite() const {
1630    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1631  }
1632
1633  bool DoesAll() const {
1634    return flags_ == (kAllChangeBits | kAllDependOnBits);
1635  }
1636
1637  // Returns true if `this` may read something written by `other`.
1638  bool MayDependOn(SideEffects other) const {
1639    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1640    return (other.flags_ & depends_on_flags);
1641  }
1642
1643  // Returns string representation of flags (for debugging only).
1644  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1645  std::string ToString() const {
1646    std::string flags = "|";
1647    for (int s = kLastBit; s >= 0; s--) {
1648      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1649      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1650        // This is a bit for the GC side effect.
1651        if (current_bit_is_set) {
1652          flags += "GC";
1653        }
1654        flags += "|";
1655      } else {
1656        // This is a bit for the array/field analysis.
1657        // The underscore character stands for the 'can trigger GC' bit.
1658        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1659        if (current_bit_is_set) {
1660          flags += kDebug[s];
1661        }
1662        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1663            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1664          flags += "|";
1665        }
1666      }
1667    }
1668    return flags;
1669  }
1670
1671  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1672
1673 private:
1674  static constexpr int kFieldArrayAnalysisBits = 9;
1675
1676  static constexpr int kFieldWriteOffset = 0;
1677  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1678  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1679  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1680
1681  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1682
1683  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1684  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1685  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1686  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1687
1688  static constexpr int kLastBit = kDependsOnGCBit;
1689  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1690
1691  // Aliases.
1692
1693  static_assert(kChangeBits == kDependOnBits,
1694                "the 'change' bits should match the 'depend on' bits.");
1695
1696  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1697  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1698  static constexpr uint64_t kAllWrites =
1699      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1700  static constexpr uint64_t kAllReads =
1701      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1702
1703  // Translates type to bit flag.
1704  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1705    CHECK_NE(type, Primitive::kPrimVoid);
1706    const uint64_t one = 1;
1707    const int shift = type;  // 0-based consecutive enum
1708    DCHECK_LE(kFieldWriteOffset, shift);
1709    DCHECK_LT(shift, kArrayWriteOffset);
1710    return one << (type + offset);
1711  }
1712
1713  // Private constructor on direct flags value.
1714  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1715
1716  uint64_t flags_;
1717};
1718
1719// A HEnvironment object contains the values of virtual registers at a given location.
1720class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1721 public:
1722  HEnvironment(ArenaAllocator* arena,
1723               size_t number_of_vregs,
1724               const DexFile& dex_file,
1725               uint32_t method_idx,
1726               uint32_t dex_pc,
1727               InvokeType invoke_type,
1728               HInstruction* holder)
1729     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1730       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1731       parent_(nullptr),
1732       dex_file_(dex_file),
1733       method_idx_(method_idx),
1734       dex_pc_(dex_pc),
1735       invoke_type_(invoke_type),
1736       holder_(holder) {
1737  }
1738
1739  HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1740      : HEnvironment(arena,
1741                     to_copy.Size(),
1742                     to_copy.GetDexFile(),
1743                     to_copy.GetMethodIdx(),
1744                     to_copy.GetDexPc(),
1745                     to_copy.GetInvokeType(),
1746                     holder) {}
1747
1748  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1749    if (parent_ != nullptr) {
1750      parent_->SetAndCopyParentChain(allocator, parent);
1751    } else {
1752      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1753      parent_->CopyFrom(parent);
1754      if (parent->GetParent() != nullptr) {
1755        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1756      }
1757    }
1758  }
1759
1760  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1761  void CopyFrom(HEnvironment* environment);
1762
1763  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1764  // input to the loop phi instead. This is for inserting instructions that
1765  // require an environment (like HDeoptimization) in the loop pre-header.
1766  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1767
1768  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1769    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1770  }
1771
1772  HInstruction* GetInstructionAt(size_t index) const {
1773    return vregs_[index].GetInstruction();
1774  }
1775
1776  void RemoveAsUserOfInput(size_t index) const;
1777
1778  size_t Size() const { return vregs_.size(); }
1779
1780  HEnvironment* GetParent() const { return parent_; }
1781
1782  void SetLocationAt(size_t index, Location location) {
1783    locations_[index] = location;
1784  }
1785
1786  Location GetLocationAt(size_t index) const {
1787    return locations_[index];
1788  }
1789
1790  uint32_t GetDexPc() const {
1791    return dex_pc_;
1792  }
1793
1794  uint32_t GetMethodIdx() const {
1795    return method_idx_;
1796  }
1797
1798  InvokeType GetInvokeType() const {
1799    return invoke_type_;
1800  }
1801
1802  const DexFile& GetDexFile() const {
1803    return dex_file_;
1804  }
1805
1806  HInstruction* GetHolder() const {
1807    return holder_;
1808  }
1809
1810
1811  bool IsFromInlinedInvoke() const {
1812    return GetParent() != nullptr;
1813  }
1814
1815 private:
1816  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1817  ArenaVector<Location> locations_;
1818  HEnvironment* parent_;
1819  const DexFile& dex_file_;
1820  const uint32_t method_idx_;
1821  const uint32_t dex_pc_;
1822  const InvokeType invoke_type_;
1823
1824  // The instruction that holds this environment.
1825  HInstruction* const holder_;
1826
1827  friend class HInstruction;
1828
1829  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1830};
1831
1832class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1833 public:
1834  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1835      : previous_(nullptr),
1836        next_(nullptr),
1837        block_(nullptr),
1838        dex_pc_(dex_pc),
1839        id_(-1),
1840        ssa_index_(-1),
1841        packed_fields_(0u),
1842        environment_(nullptr),
1843        locations_(nullptr),
1844        live_interval_(nullptr),
1845        lifetime_position_(kNoLifetime),
1846        side_effects_(side_effects),
1847        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1848    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1849  }
1850
1851  virtual ~HInstruction() {}
1852
1853#define DECLARE_KIND(type, super) k##type,
1854  enum InstructionKind {
1855    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1856  };
1857#undef DECLARE_KIND
1858
1859  HInstruction* GetNext() const { return next_; }
1860  HInstruction* GetPrevious() const { return previous_; }
1861
1862  HInstruction* GetNextDisregardingMoves() const;
1863  HInstruction* GetPreviousDisregardingMoves() const;
1864
1865  HBasicBlock* GetBlock() const { return block_; }
1866  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1867  void SetBlock(HBasicBlock* block) { block_ = block; }
1868  bool IsInBlock() const { return block_ != nullptr; }
1869  bool IsInLoop() const { return block_->IsInLoop(); }
1870  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1871  bool IsIrreducibleLoopHeaderPhi() const {
1872    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1873  }
1874
1875  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1876
1877  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1878    // One virtual method is enough, just const_cast<> and then re-add the const.
1879    return ArrayRef<const HUserRecord<HInstruction*>>(
1880        const_cast<HInstruction*>(this)->GetInputRecords());
1881  }
1882
1883  HInputsRef GetInputs() {
1884    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1885  }
1886
1887  HConstInputsRef GetInputs() const {
1888    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1889  }
1890
1891  size_t InputCount() const { return GetInputRecords().size(); }
1892  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1893
1894  bool HasInput(HInstruction* input) const {
1895    for (const HInstruction* i : GetInputs()) {
1896      if (i == input) {
1897        return true;
1898      }
1899    }
1900    return false;
1901  }
1902
1903  void SetRawInputAt(size_t index, HInstruction* input) {
1904    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1905  }
1906
1907  virtual void Accept(HGraphVisitor* visitor) = 0;
1908  virtual const char* DebugName() const = 0;
1909
1910  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1911
1912  virtual bool NeedsEnvironment() const { return false; }
1913
1914  uint32_t GetDexPc() const { return dex_pc_; }
1915
1916  virtual bool IsControlFlow() const { return false; }
1917
1918  virtual bool CanThrow() const { return false; }
1919  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1920
1921  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1922  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1923
1924  // Does not apply for all instructions, but having this at top level greatly
1925  // simplifies the null check elimination.
1926  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1927  virtual bool CanBeNull() const {
1928    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1929    return true;
1930  }
1931
1932  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1933    return false;
1934  }
1935
1936  virtual bool IsActualObject() const {
1937    return GetType() == Primitive::kPrimNot;
1938  }
1939
1940  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1941
1942  ReferenceTypeInfo GetReferenceTypeInfo() const {
1943    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1944    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1945                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1946  }
1947
1948  void AddUseAt(HInstruction* user, size_t index) {
1949    DCHECK(user != nullptr);
1950    // Note: fixup_end remains valid across push_front().
1951    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1952    HUseListNode<HInstruction*>* new_node =
1953        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1954    uses_.push_front(*new_node);
1955    FixUpUserRecordsAfterUseInsertion(fixup_end);
1956  }
1957
1958  void AddEnvUseAt(HEnvironment* user, size_t index) {
1959    DCHECK(user != nullptr);
1960    // Note: env_fixup_end remains valid across push_front().
1961    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
1962    HUseListNode<HEnvironment*>* new_node =
1963        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
1964    env_uses_.push_front(*new_node);
1965    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1966  }
1967
1968  void RemoveAsUserOfInput(size_t input) {
1969    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
1970    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1971    input_use.GetInstruction()->uses_.erase_after(before_use_node);
1972    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1973  }
1974
1975  void RemoveAsUserOfAllInputs() {
1976    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
1977      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1978      input_use.GetInstruction()->uses_.erase_after(before_use_node);
1979      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1980    }
1981  }
1982
1983  const HUseList<HInstruction*>& GetUses() const { return uses_; }
1984  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
1985
1986  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
1987  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
1988  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
1989  bool HasOnlyOneNonEnvironmentUse() const {
1990    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
1991  }
1992
1993  bool IsRemovable() const {
1994    return
1995        !DoesAnyWrite() &&
1996        !CanThrow() &&
1997        !IsSuspendCheck() &&
1998        !IsControlFlow() &&
1999        !IsNativeDebugInfo() &&
2000        !IsParameterValue() &&
2001        // If we added an explicit barrier then we should keep it.
2002        !IsMemoryBarrier();
2003  }
2004
2005  bool IsDeadAndRemovable() const {
2006    return IsRemovable() && !HasUses();
2007  }
2008
2009  // Does this instruction strictly dominate `other_instruction`?
2010  // Returns false if this instruction and `other_instruction` are the same.
2011  // Aborts if this instruction and `other_instruction` are both phis.
2012  bool StrictlyDominates(HInstruction* other_instruction) const;
2013
2014  int GetId() const { return id_; }
2015  void SetId(int id) { id_ = id; }
2016
2017  int GetSsaIndex() const { return ssa_index_; }
2018  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
2019  bool HasSsaIndex() const { return ssa_index_ != -1; }
2020
2021  bool HasEnvironment() const { return environment_ != nullptr; }
2022  HEnvironment* GetEnvironment() const { return environment_; }
2023  // Set the `environment_` field. Raw because this method does not
2024  // update the uses lists.
2025  void SetRawEnvironment(HEnvironment* environment) {
2026    DCHECK(environment_ == nullptr);
2027    DCHECK_EQ(environment->GetHolder(), this);
2028    environment_ = environment;
2029  }
2030
2031  void InsertRawEnvironment(HEnvironment* environment) {
2032    DCHECK(environment_ != nullptr);
2033    DCHECK_EQ(environment->GetHolder(), this);
2034    DCHECK(environment->GetParent() == nullptr);
2035    environment->parent_ = environment_;
2036    environment_ = environment;
2037  }
2038
2039  void RemoveEnvironment();
2040
2041  // Set the environment of this instruction, copying it from `environment`. While
2042  // copying, the uses lists are being updated.
2043  void CopyEnvironmentFrom(HEnvironment* environment) {
2044    DCHECK(environment_ == nullptr);
2045    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2046    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2047    environment_->CopyFrom(environment);
2048    if (environment->GetParent() != nullptr) {
2049      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2050    }
2051  }
2052
2053  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2054                                                HBasicBlock* block) {
2055    DCHECK(environment_ == nullptr);
2056    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2057    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2058    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2059    if (environment->GetParent() != nullptr) {
2060      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2061    }
2062  }
2063
2064  // Returns the number of entries in the environment. Typically, that is the
2065  // number of dex registers in a method. It could be more in case of inlining.
2066  size_t EnvironmentSize() const;
2067
2068  LocationSummary* GetLocations() const { return locations_; }
2069  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2070
2071  void ReplaceWith(HInstruction* instruction);
2072  void ReplaceInput(HInstruction* replacement, size_t index);
2073
2074  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2075  // uses of this instruction by `other` are *not* updated.
2076  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2077    ReplaceWith(other);
2078    other->ReplaceInput(this, use_index);
2079  }
2080
2081  // Move `this` instruction before `cursor`.
2082  void MoveBefore(HInstruction* cursor);
2083
2084  // Move `this` before its first user and out of any loops. If there is no
2085  // out-of-loop user that dominates all other users, move the instruction
2086  // to the end of the out-of-loop common dominator of the user's blocks.
2087  //
2088  // This can be used only on non-throwing instructions with no side effects that
2089  // have at least one use but no environment uses.
2090  void MoveBeforeFirstUserAndOutOfLoops();
2091
2092#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2093  bool Is##type() const;                                                       \
2094  const H##type* As##type() const;                                             \
2095  H##type* As##type();
2096
2097  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2098#undef INSTRUCTION_TYPE_CHECK
2099
2100#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2101  bool Is##type() const { return (As##type() != nullptr); }                    \
2102  virtual const H##type* As##type() const { return nullptr; }                  \
2103  virtual H##type* As##type() { return nullptr; }
2104  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2105#undef INSTRUCTION_TYPE_CHECK
2106
2107  // Returns whether the instruction can be moved within the graph.
2108  // TODO: this method is used by LICM and GVN with possibly different
2109  //       meanings? split and rename?
2110  virtual bool CanBeMoved() const { return false; }
2111
2112  // Returns whether the two instructions are of the same kind.
2113  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2114    return false;
2115  }
2116
2117  // Returns whether any data encoded in the two instructions is equal.
2118  // This method does not look at the inputs. Both instructions must be
2119  // of the same type, otherwise the method has undefined behavior.
2120  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2121    return false;
2122  }
2123
2124  // Returns whether two instructions are equal, that is:
2125  // 1) They have the same type and contain the same data (InstructionDataEquals).
2126  // 2) Their inputs are identical.
2127  bool Equals(const HInstruction* other) const;
2128
2129  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2130  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2131  // the virtual function because the __attribute__((__pure__)) doesn't really
2132  // apply the strong requirement for virtual functions, preventing optimizations.
2133  InstructionKind GetKind() const PURE;
2134  virtual InstructionKind GetKindInternal() const = 0;
2135
2136  virtual size_t ComputeHashCode() const {
2137    size_t result = GetKind();
2138    for (const HInstruction* input : GetInputs()) {
2139      result = (result * 31) + input->GetId();
2140    }
2141    return result;
2142  }
2143
2144  SideEffects GetSideEffects() const { return side_effects_; }
2145  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2146  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2147
2148  size_t GetLifetimePosition() const { return lifetime_position_; }
2149  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2150  LiveInterval* GetLiveInterval() const { return live_interval_; }
2151  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2152  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2153
2154  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2155
2156  // Returns whether the code generation of the instruction will require to have access
2157  // to the current method. Such instructions are:
2158  // (1): Instructions that require an environment, as calling the runtime requires
2159  //      to walk the stack and have the current method stored at a specific stack address.
2160  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2161  //      to access the dex cache.
2162  bool NeedsCurrentMethod() const {
2163    return NeedsEnvironment() || IsCurrentMethod();
2164  }
2165
2166  // Returns whether the code generation of the instruction will require to have access
2167  // to the dex cache of the current method's declaring class via the current method.
2168  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2169
2170  // Does this instruction have any use in an environment before
2171  // control flow hits 'other'?
2172  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2173
2174  // Remove all references to environment uses of this instruction.
2175  // The caller must ensure that this is safe to do.
2176  void RemoveEnvironmentUsers();
2177
2178  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2179  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2180
2181 protected:
2182  // If set, the machine code for this instruction is assumed to be generated by
2183  // its users. Used by liveness analysis to compute use positions accordingly.
2184  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2185  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2186  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2187  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2188
2189  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2190    return GetInputRecords()[i];
2191  }
2192
2193  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2194    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2195    input_records[index] = input;
2196  }
2197
2198  uint32_t GetPackedFields() const {
2199    return packed_fields_;
2200  }
2201
2202  template <size_t flag>
2203  bool GetPackedFlag() const {
2204    return (packed_fields_ & (1u << flag)) != 0u;
2205  }
2206
2207  template <size_t flag>
2208  void SetPackedFlag(bool value = true) {
2209    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2210  }
2211
2212  template <typename BitFieldType>
2213  typename BitFieldType::value_type GetPackedField() const {
2214    return BitFieldType::Decode(packed_fields_);
2215  }
2216
2217  template <typename BitFieldType>
2218  void SetPackedField(typename BitFieldType::value_type value) {
2219    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2220    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2221  }
2222
2223 private:
2224  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2225    auto before_use_node = uses_.before_begin();
2226    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2227      HInstruction* user = use_node->GetUser();
2228      size_t input_index = use_node->GetIndex();
2229      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2230      before_use_node = use_node;
2231    }
2232  }
2233
2234  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2235    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2236    if (next != uses_.end()) {
2237      HInstruction* next_user = next->GetUser();
2238      size_t next_index = next->GetIndex();
2239      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2240      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2241    }
2242  }
2243
2244  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2245    auto before_env_use_node = env_uses_.before_begin();
2246    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2247      HEnvironment* user = env_use_node->GetUser();
2248      size_t input_index = env_use_node->GetIndex();
2249      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2250      before_env_use_node = env_use_node;
2251    }
2252  }
2253
2254  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2255    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2256    if (next != env_uses_.end()) {
2257      HEnvironment* next_user = next->GetUser();
2258      size_t next_index = next->GetIndex();
2259      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2260      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2261    }
2262  }
2263
2264  HInstruction* previous_;
2265  HInstruction* next_;
2266  HBasicBlock* block_;
2267  const uint32_t dex_pc_;
2268
2269  // An instruction gets an id when it is added to the graph.
2270  // It reflects creation order. A negative id means the instruction
2271  // has not been added to the graph.
2272  int id_;
2273
2274  // When doing liveness analysis, instructions that have uses get an SSA index.
2275  int ssa_index_;
2276
2277  // Packed fields.
2278  uint32_t packed_fields_;
2279
2280  // List of instructions that have this instruction as input.
2281  HUseList<HInstruction*> uses_;
2282
2283  // List of environments that contain this instruction.
2284  HUseList<HEnvironment*> env_uses_;
2285
2286  // The environment associated with this instruction. Not null if the instruction
2287  // might jump out of the method.
2288  HEnvironment* environment_;
2289
2290  // Set by the code generator.
2291  LocationSummary* locations_;
2292
2293  // Set by the liveness analysis.
2294  LiveInterval* live_interval_;
2295
2296  // Set by the liveness analysis, this is the position in a linear
2297  // order of blocks where this instruction's live interval start.
2298  size_t lifetime_position_;
2299
2300  SideEffects side_effects_;
2301
2302  // The reference handle part of the reference type info.
2303  // The IsExact() flag is stored in packed fields.
2304  // TODO: for primitive types this should be marked as invalid.
2305  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2306
2307  friend class GraphChecker;
2308  friend class HBasicBlock;
2309  friend class HEnvironment;
2310  friend class HGraph;
2311  friend class HInstructionList;
2312
2313  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2314};
2315std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2316
2317// Iterates over the instructions, while preserving the next instruction
2318// in case the current instruction gets removed from the list by the user
2319// of this iterator.
2320class HInstructionIterator : public ValueObject {
2321 public:
2322  explicit HInstructionIterator(const HInstructionList& instructions)
2323      : instruction_(instructions.first_instruction_) {
2324    next_ = Done() ? nullptr : instruction_->GetNext();
2325  }
2326
2327  bool Done() const { return instruction_ == nullptr; }
2328  HInstruction* Current() const { return instruction_; }
2329  void Advance() {
2330    instruction_ = next_;
2331    next_ = Done() ? nullptr : instruction_->GetNext();
2332  }
2333
2334 private:
2335  HInstruction* instruction_;
2336  HInstruction* next_;
2337
2338  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2339};
2340
2341// Iterates over the instructions without saving the next instruction,
2342// therefore handling changes in the graph potentially made by the user
2343// of this iterator.
2344class HInstructionIteratorHandleChanges : public ValueObject {
2345 public:
2346  explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2347      : instruction_(instructions.first_instruction_) {
2348  }
2349
2350  bool Done() const { return instruction_ == nullptr; }
2351  HInstruction* Current() const { return instruction_; }
2352  void Advance() {
2353    instruction_ = instruction_->GetNext();
2354  }
2355
2356 private:
2357  HInstruction* instruction_;
2358
2359  DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2360};
2361
2362
2363class HBackwardInstructionIterator : public ValueObject {
2364 public:
2365  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2366      : instruction_(instructions.last_instruction_) {
2367    next_ = Done() ? nullptr : instruction_->GetPrevious();
2368  }
2369
2370  bool Done() const { return instruction_ == nullptr; }
2371  HInstruction* Current() const { return instruction_; }
2372  void Advance() {
2373    instruction_ = next_;
2374    next_ = Done() ? nullptr : instruction_->GetPrevious();
2375  }
2376
2377 private:
2378  HInstruction* instruction_;
2379  HInstruction* next_;
2380
2381  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2382};
2383
2384class HVariableInputSizeInstruction : public HInstruction {
2385 public:
2386  using HInstruction::GetInputRecords;  // Keep the const version visible.
2387  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2388    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2389  }
2390
2391  void AddInput(HInstruction* input);
2392  void InsertInputAt(size_t index, HInstruction* input);
2393  void RemoveInputAt(size_t index);
2394
2395 protected:
2396  HVariableInputSizeInstruction(SideEffects side_effects,
2397                                uint32_t dex_pc,
2398                                ArenaAllocator* arena,
2399                                size_t number_of_inputs,
2400                                ArenaAllocKind kind)
2401      : HInstruction(side_effects, dex_pc),
2402        inputs_(number_of_inputs, arena->Adapter(kind)) {}
2403
2404  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2405
2406 private:
2407  DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
2408};
2409
2410template<size_t N>
2411class HTemplateInstruction: public HInstruction {
2412 public:
2413  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2414      : HInstruction(side_effects, dex_pc), inputs_() {}
2415  virtual ~HTemplateInstruction() {}
2416
2417  using HInstruction::GetInputRecords;  // Keep the const version visible.
2418  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2419    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2420  }
2421
2422 private:
2423  std::array<HUserRecord<HInstruction*>, N> inputs_;
2424
2425  friend class SsaBuilder;
2426};
2427
2428// HTemplateInstruction specialization for N=0.
2429template<>
2430class HTemplateInstruction<0>: public HInstruction {
2431 public:
2432  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2433      : HInstruction(side_effects, dex_pc) {}
2434
2435  virtual ~HTemplateInstruction() {}
2436
2437  using HInstruction::GetInputRecords;  // Keep the const version visible.
2438  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2439    return ArrayRef<HUserRecord<HInstruction*>>();
2440  }
2441
2442 private:
2443  friend class SsaBuilder;
2444};
2445
2446template<intptr_t N>
2447class HExpression : public HTemplateInstruction<N> {
2448 public:
2449  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2450      : HTemplateInstruction<N>(side_effects, dex_pc) {
2451    this->template SetPackedField<TypeField>(type);
2452  }
2453  virtual ~HExpression() {}
2454
2455  Primitive::Type GetType() const OVERRIDE {
2456    return TypeField::Decode(this->GetPackedFields());
2457  }
2458
2459 protected:
2460  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2461  static constexpr size_t kFieldTypeSize =
2462      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2463  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2464  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2465                "Too many packed fields.");
2466  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2467};
2468
2469// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2470// instruction that branches to the exit block.
2471class HReturnVoid FINAL : public HTemplateInstruction<0> {
2472 public:
2473  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2474      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2475
2476  bool IsControlFlow() const OVERRIDE { return true; }
2477
2478  DECLARE_INSTRUCTION(ReturnVoid);
2479
2480 private:
2481  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2482};
2483
2484// Represents dex's RETURN opcodes. A HReturn is a control flow
2485// instruction that branches to the exit block.
2486class HReturn FINAL : public HTemplateInstruction<1> {
2487 public:
2488  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2489      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2490    SetRawInputAt(0, value);
2491  }
2492
2493  bool IsControlFlow() const OVERRIDE { return true; }
2494
2495  DECLARE_INSTRUCTION(Return);
2496
2497 private:
2498  DISALLOW_COPY_AND_ASSIGN(HReturn);
2499};
2500
2501class HPhi FINAL : public HVariableInputSizeInstruction {
2502 public:
2503  HPhi(ArenaAllocator* arena,
2504       uint32_t reg_number,
2505       size_t number_of_inputs,
2506       Primitive::Type type,
2507       uint32_t dex_pc = kNoDexPc)
2508      : HVariableInputSizeInstruction(
2509            SideEffects::None(),
2510            dex_pc,
2511            arena,
2512            number_of_inputs,
2513            kArenaAllocPhiInputs),
2514        reg_number_(reg_number) {
2515    SetPackedField<TypeField>(ToPhiType(type));
2516    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2517    // Phis are constructed live and marked dead if conflicting or unused.
2518    // Individual steps of SsaBuilder should assume that if a phi has been
2519    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2520    SetPackedFlag<kFlagIsLive>(true);
2521    SetPackedFlag<kFlagCanBeNull>(true);
2522  }
2523
2524  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2525  static Primitive::Type ToPhiType(Primitive::Type type) {
2526    return Primitive::PrimitiveKind(type);
2527  }
2528
2529  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2530
2531  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2532  void SetType(Primitive::Type new_type) {
2533    // Make sure that only valid type changes occur. The following are allowed:
2534    //  (1) int  -> float/ref (primitive type propagation),
2535    //  (2) long -> double (primitive type propagation).
2536    DCHECK(GetType() == new_type ||
2537           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2538           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2539           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2540    SetPackedField<TypeField>(new_type);
2541  }
2542
2543  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2544  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2545
2546  uint32_t GetRegNumber() const { return reg_number_; }
2547
2548  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2549  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2550  bool IsDead() const { return !IsLive(); }
2551  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2552
2553  bool IsVRegEquivalentOf(const HInstruction* other) const {
2554    return other != nullptr
2555        && other->IsPhi()
2556        && other->AsPhi()->GetBlock() == GetBlock()
2557        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2558  }
2559
2560  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2561  // An equivalent phi is a phi having the same dex register and type.
2562  // It assumes that phis with the same dex register are adjacent.
2563  HPhi* GetNextEquivalentPhiWithSameType() {
2564    HInstruction* next = GetNext();
2565    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2566      if (next->GetType() == GetType()) {
2567        return next->AsPhi();
2568      }
2569      next = next->GetNext();
2570    }
2571    return nullptr;
2572  }
2573
2574  DECLARE_INSTRUCTION(Phi);
2575
2576 private:
2577  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2578  static constexpr size_t kFieldTypeSize =
2579      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2580  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2581  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2582  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2583  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2584  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2585
2586  const uint32_t reg_number_;
2587
2588  DISALLOW_COPY_AND_ASSIGN(HPhi);
2589};
2590
2591// The exit instruction is the only instruction of the exit block.
2592// Instructions aborting the method (HThrow and HReturn) must branch to the
2593// exit block.
2594class HExit FINAL : public HTemplateInstruction<0> {
2595 public:
2596  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2597
2598  bool IsControlFlow() const OVERRIDE { return true; }
2599
2600  DECLARE_INSTRUCTION(Exit);
2601
2602 private:
2603  DISALLOW_COPY_AND_ASSIGN(HExit);
2604};
2605
2606// Jumps from one block to another.
2607class HGoto FINAL : public HTemplateInstruction<0> {
2608 public:
2609  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2610
2611  bool IsControlFlow() const OVERRIDE { return true; }
2612
2613  HBasicBlock* GetSuccessor() const {
2614    return GetBlock()->GetSingleSuccessor();
2615  }
2616
2617  DECLARE_INSTRUCTION(Goto);
2618
2619 private:
2620  DISALLOW_COPY_AND_ASSIGN(HGoto);
2621};
2622
2623class HConstant : public HExpression<0> {
2624 public:
2625  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2626      : HExpression(type, SideEffects::None(), dex_pc) {}
2627
2628  bool CanBeMoved() const OVERRIDE { return true; }
2629
2630  // Is this constant -1 in the arithmetic sense?
2631  virtual bool IsMinusOne() const { return false; }
2632  // Is this constant 0 in the arithmetic sense?
2633  virtual bool IsArithmeticZero() const { return false; }
2634  // Is this constant a 0-bit pattern?
2635  virtual bool IsZeroBitPattern() const { return false; }
2636  // Is this constant 1 in the arithmetic sense?
2637  virtual bool IsOne() const { return false; }
2638
2639  virtual uint64_t GetValueAsUint64() const = 0;
2640
2641  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2642
2643 private:
2644  DISALLOW_COPY_AND_ASSIGN(HConstant);
2645};
2646
2647class HNullConstant FINAL : public HConstant {
2648 public:
2649  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2650    return true;
2651  }
2652
2653  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2654
2655  size_t ComputeHashCode() const OVERRIDE { return 0; }
2656
2657  // The null constant representation is a 0-bit pattern.
2658  virtual bool IsZeroBitPattern() const { return true; }
2659
2660  DECLARE_INSTRUCTION(NullConstant);
2661
2662 private:
2663  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2664
2665  friend class HGraph;
2666  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2667};
2668
2669// Constants of the type int. Those can be from Dex instructions, or
2670// synthesized (for example with the if-eqz instruction).
2671class HIntConstant FINAL : public HConstant {
2672 public:
2673  int32_t GetValue() const { return value_; }
2674
2675  uint64_t GetValueAsUint64() const OVERRIDE {
2676    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2677  }
2678
2679  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2680    DCHECK(other->IsIntConstant()) << other->DebugName();
2681    return other->AsIntConstant()->value_ == value_;
2682  }
2683
2684  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2685
2686  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2687  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2688  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2689  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2690
2691  // Integer constants are used to encode Boolean values as well,
2692  // where 1 means true and 0 means false.
2693  bool IsTrue() const { return GetValue() == 1; }
2694  bool IsFalse() const { return GetValue() == 0; }
2695
2696  DECLARE_INSTRUCTION(IntConstant);
2697
2698 private:
2699  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2700      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2701  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2702      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2703
2704  const int32_t value_;
2705
2706  friend class HGraph;
2707  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2708  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2709  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2710};
2711
2712class HLongConstant FINAL : public HConstant {
2713 public:
2714  int64_t GetValue() const { return value_; }
2715
2716  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2717
2718  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2719    DCHECK(other->IsLongConstant()) << other->DebugName();
2720    return other->AsLongConstant()->value_ == value_;
2721  }
2722
2723  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2724
2725  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2726  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2727  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2728  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2729
2730  DECLARE_INSTRUCTION(LongConstant);
2731
2732 private:
2733  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2734      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2735
2736  const int64_t value_;
2737
2738  friend class HGraph;
2739  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2740};
2741
2742class HFloatConstant FINAL : public HConstant {
2743 public:
2744  float GetValue() const { return value_; }
2745
2746  uint64_t GetValueAsUint64() const OVERRIDE {
2747    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2748  }
2749
2750  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2751    DCHECK(other->IsFloatConstant()) << other->DebugName();
2752    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2753  }
2754
2755  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2756
2757  bool IsMinusOne() const OVERRIDE {
2758    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2759  }
2760  bool IsArithmeticZero() const OVERRIDE {
2761    return std::fpclassify(value_) == FP_ZERO;
2762  }
2763  bool IsArithmeticPositiveZero() const {
2764    return IsArithmeticZero() && !std::signbit(value_);
2765  }
2766  bool IsArithmeticNegativeZero() const {
2767    return IsArithmeticZero() && std::signbit(value_);
2768  }
2769  bool IsZeroBitPattern() const OVERRIDE {
2770    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2771  }
2772  bool IsOne() const OVERRIDE {
2773    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2774  }
2775  bool IsNaN() const {
2776    return std::isnan(value_);
2777  }
2778
2779  DECLARE_INSTRUCTION(FloatConstant);
2780
2781 private:
2782  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2783      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2784  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2785      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2786
2787  const float value_;
2788
2789  // Only the SsaBuilder and HGraph can create floating-point constants.
2790  friend class SsaBuilder;
2791  friend class HGraph;
2792  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2793};
2794
2795class HDoubleConstant FINAL : public HConstant {
2796 public:
2797  double GetValue() const { return value_; }
2798
2799  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2800
2801  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2802    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2803    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2804  }
2805
2806  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2807
2808  bool IsMinusOne() const OVERRIDE {
2809    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2810  }
2811  bool IsArithmeticZero() const OVERRIDE {
2812    return std::fpclassify(value_) == FP_ZERO;
2813  }
2814  bool IsArithmeticPositiveZero() const {
2815    return IsArithmeticZero() && !std::signbit(value_);
2816  }
2817  bool IsArithmeticNegativeZero() const {
2818    return IsArithmeticZero() && std::signbit(value_);
2819  }
2820  bool IsZeroBitPattern() const OVERRIDE {
2821    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2822  }
2823  bool IsOne() const OVERRIDE {
2824    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2825  }
2826  bool IsNaN() const {
2827    return std::isnan(value_);
2828  }
2829
2830  DECLARE_INSTRUCTION(DoubleConstant);
2831
2832 private:
2833  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2834      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2835  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2836      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2837
2838  const double value_;
2839
2840  // Only the SsaBuilder and HGraph can create floating-point constants.
2841  friend class SsaBuilder;
2842  friend class HGraph;
2843  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2844};
2845
2846// Conditional branch. A block ending with an HIf instruction must have
2847// two successors.
2848class HIf FINAL : public HTemplateInstruction<1> {
2849 public:
2850  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2851      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2852    SetRawInputAt(0, input);
2853  }
2854
2855  bool IsControlFlow() const OVERRIDE { return true; }
2856
2857  HBasicBlock* IfTrueSuccessor() const {
2858    return GetBlock()->GetSuccessors()[0];
2859  }
2860
2861  HBasicBlock* IfFalseSuccessor() const {
2862    return GetBlock()->GetSuccessors()[1];
2863  }
2864
2865  DECLARE_INSTRUCTION(If);
2866
2867 private:
2868  DISALLOW_COPY_AND_ASSIGN(HIf);
2869};
2870
2871
2872// Abstract instruction which marks the beginning and/or end of a try block and
2873// links it to the respective exception handlers. Behaves the same as a Goto in
2874// non-exceptional control flow.
2875// Normal-flow successor is stored at index zero, exception handlers under
2876// higher indices in no particular order.
2877class HTryBoundary FINAL : public HTemplateInstruction<0> {
2878 public:
2879  enum class BoundaryKind {
2880    kEntry,
2881    kExit,
2882    kLast = kExit
2883  };
2884
2885  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2886      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2887    SetPackedField<BoundaryKindField>(kind);
2888  }
2889
2890  bool IsControlFlow() const OVERRIDE { return true; }
2891
2892  // Returns the block's non-exceptional successor (index zero).
2893  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2894
2895  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2896    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2897  }
2898
2899  // Returns whether `handler` is among its exception handlers (non-zero index
2900  // successors).
2901  bool HasExceptionHandler(const HBasicBlock& handler) const {
2902    DCHECK(handler.IsCatchBlock());
2903    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2904  }
2905
2906  // If not present already, adds `handler` to its block's list of exception
2907  // handlers.
2908  void AddExceptionHandler(HBasicBlock* handler) {
2909    if (!HasExceptionHandler(*handler)) {
2910      GetBlock()->AddSuccessor(handler);
2911    }
2912  }
2913
2914  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2915  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2916
2917  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2918
2919  DECLARE_INSTRUCTION(TryBoundary);
2920
2921 private:
2922  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2923  static constexpr size_t kFieldBoundaryKindSize =
2924      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2925  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2926      kFieldBoundaryKind + kFieldBoundaryKindSize;
2927  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2928                "Too many packed fields.");
2929  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2930
2931  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2932};
2933
2934// Deoptimize to interpreter, upon checking a condition.
2935class HDeoptimize FINAL : public HTemplateInstruction<1> {
2936 public:
2937  // We set CanTriggerGC to prevent any intermediate address to be live
2938  // at the point of the `HDeoptimize`.
2939  HDeoptimize(HInstruction* cond, uint32_t dex_pc)
2940      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
2941    SetRawInputAt(0, cond);
2942  }
2943
2944  bool CanBeMoved() const OVERRIDE { return true; }
2945  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2946    return true;
2947  }
2948  bool NeedsEnvironment() const OVERRIDE { return true; }
2949  bool CanThrow() const OVERRIDE { return true; }
2950
2951  DECLARE_INSTRUCTION(Deoptimize);
2952
2953 private:
2954  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
2955};
2956
2957// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
2958// The compiled code checks this flag value in a guard before devirtualized call and
2959// if it's true, starts to do deoptimization.
2960// It has a 4-byte slot on stack.
2961// TODO: allocate a register for this flag.
2962class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
2963 public:
2964  // CHA guards are only optimized in a separate pass and it has no side effects
2965  // with regard to other passes.
2966  HShouldDeoptimizeFlag(ArenaAllocator* arena, uint32_t dex_pc)
2967      : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, arena, 0, kArenaAllocCHA) {
2968  }
2969
2970  Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimInt; }
2971
2972  // We do all CHA guard elimination/motion in a single pass, after which there is no
2973  // further guard elimination/motion since a guard might have been used for justification
2974  // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
2975  // to avoid other optimizations trying to move it.
2976  bool CanBeMoved() const OVERRIDE { return false; }
2977
2978  DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
2979
2980 private:
2981  DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
2982};
2983
2984// Represents the ArtMethod that was passed as a first argument to
2985// the method. It is used by instructions that depend on it, like
2986// instructions that work with the dex cache.
2987class HCurrentMethod FINAL : public HExpression<0> {
2988 public:
2989  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2990      : HExpression(type, SideEffects::None(), dex_pc) {}
2991
2992  DECLARE_INSTRUCTION(CurrentMethod);
2993
2994 private:
2995  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
2996};
2997
2998// Fetches an ArtMethod from the virtual table or the interface method table
2999// of a class.
3000class HClassTableGet FINAL : public HExpression<1> {
3001 public:
3002  enum class TableKind {
3003    kVTable,
3004    kIMTable,
3005    kLast = kIMTable
3006  };
3007  HClassTableGet(HInstruction* cls,
3008                 Primitive::Type type,
3009                 TableKind kind,
3010                 size_t index,
3011                 uint32_t dex_pc)
3012      : HExpression(type, SideEffects::None(), dex_pc),
3013        index_(index) {
3014    SetPackedField<TableKindField>(kind);
3015    SetRawInputAt(0, cls);
3016  }
3017
3018  bool CanBeMoved() const OVERRIDE { return true; }
3019  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3020    return other->AsClassTableGet()->GetIndex() == index_ &&
3021        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3022  }
3023
3024  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3025  size_t GetIndex() const { return index_; }
3026
3027  DECLARE_INSTRUCTION(ClassTableGet);
3028
3029 private:
3030  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3031  static constexpr size_t kFieldTableKindSize =
3032      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3033  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3034  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3035                "Too many packed fields.");
3036  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3037
3038  // The index of the ArtMethod in the table.
3039  const size_t index_;
3040
3041  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
3042};
3043
3044// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3045// have one successor for each entry in the switch table, and the final successor
3046// will be the block containing the next Dex opcode.
3047class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3048 public:
3049  HPackedSwitch(int32_t start_value,
3050                uint32_t num_entries,
3051                HInstruction* input,
3052                uint32_t dex_pc = kNoDexPc)
3053    : HTemplateInstruction(SideEffects::None(), dex_pc),
3054      start_value_(start_value),
3055      num_entries_(num_entries) {
3056    SetRawInputAt(0, input);
3057  }
3058
3059  bool IsControlFlow() const OVERRIDE { return true; }
3060
3061  int32_t GetStartValue() const { return start_value_; }
3062
3063  uint32_t GetNumEntries() const { return num_entries_; }
3064
3065  HBasicBlock* GetDefaultBlock() const {
3066    // Last entry is the default block.
3067    return GetBlock()->GetSuccessors()[num_entries_];
3068  }
3069  DECLARE_INSTRUCTION(PackedSwitch);
3070
3071 private:
3072  const int32_t start_value_;
3073  const uint32_t num_entries_;
3074
3075  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
3076};
3077
3078class HUnaryOperation : public HExpression<1> {
3079 public:
3080  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
3081      : HExpression(result_type, SideEffects::None(), dex_pc) {
3082    SetRawInputAt(0, input);
3083  }
3084
3085  HInstruction* GetInput() const { return InputAt(0); }
3086  Primitive::Type GetResultType() const { return GetType(); }
3087
3088  bool CanBeMoved() const OVERRIDE { return true; }
3089  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3090    return true;
3091  }
3092
3093  // Try to statically evaluate `this` and return a HConstant
3094  // containing the result of this evaluation.  If `this` cannot
3095  // be evaluated as a constant, return null.
3096  HConstant* TryStaticEvaluation() const;
3097
3098  // Apply this operation to `x`.
3099  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3100  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3101  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3102  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3103
3104  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3105
3106 private:
3107  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
3108};
3109
3110class HBinaryOperation : public HExpression<2> {
3111 public:
3112  HBinaryOperation(Primitive::Type result_type,
3113                   HInstruction* left,
3114                   HInstruction* right,
3115                   SideEffects side_effects = SideEffects::None(),
3116                   uint32_t dex_pc = kNoDexPc)
3117      : HExpression(result_type, side_effects, dex_pc) {
3118    SetRawInputAt(0, left);
3119    SetRawInputAt(1, right);
3120  }
3121
3122  HInstruction* GetLeft() const { return InputAt(0); }
3123  HInstruction* GetRight() const { return InputAt(1); }
3124  Primitive::Type GetResultType() const { return GetType(); }
3125
3126  virtual bool IsCommutative() const { return false; }
3127
3128  // Put constant on the right.
3129  // Returns whether order is changed.
3130  bool OrderInputsWithConstantOnTheRight() {
3131    HInstruction* left = InputAt(0);
3132    HInstruction* right = InputAt(1);
3133    if (left->IsConstant() && !right->IsConstant()) {
3134      ReplaceInput(right, 0);
3135      ReplaceInput(left, 1);
3136      return true;
3137    }
3138    return false;
3139  }
3140
3141  // Order inputs by instruction id, but favor constant on the right side.
3142  // This helps GVN for commutative ops.
3143  void OrderInputs() {
3144    DCHECK(IsCommutative());
3145    HInstruction* left = InputAt(0);
3146    HInstruction* right = InputAt(1);
3147    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3148      return;
3149    }
3150    if (OrderInputsWithConstantOnTheRight()) {
3151      return;
3152    }
3153    // Order according to instruction id.
3154    if (left->GetId() > right->GetId()) {
3155      ReplaceInput(right, 0);
3156      ReplaceInput(left, 1);
3157    }
3158  }
3159
3160  bool CanBeMoved() const OVERRIDE { return true; }
3161  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3162    return true;
3163  }
3164
3165  // Try to statically evaluate `this` and return a HConstant
3166  // containing the result of this evaluation.  If `this` cannot
3167  // be evaluated as a constant, return null.
3168  HConstant* TryStaticEvaluation() const;
3169
3170  // Apply this operation to `x` and `y`.
3171  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3172                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3173    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3174    UNREACHABLE();
3175  }
3176  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3177  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3178  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3179                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3180    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3181    UNREACHABLE();
3182  }
3183  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3184  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3185
3186  // Returns an input that can legally be used as the right input and is
3187  // constant, or null.
3188  HConstant* GetConstantRight() const;
3189
3190  // If `GetConstantRight()` returns one of the input, this returns the other
3191  // one. Otherwise it returns null.
3192  HInstruction* GetLeastConstantLeft() const;
3193
3194  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3195
3196 private:
3197  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3198};
3199
3200// The comparison bias applies for floating point operations and indicates how NaN
3201// comparisons are treated:
3202enum class ComparisonBias {
3203  kNoBias,  // bias is not applicable (i.e. for long operation)
3204  kGtBias,  // return 1 for NaN comparisons
3205  kLtBias,  // return -1 for NaN comparisons
3206  kLast = kLtBias
3207};
3208
3209std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3210
3211class HCondition : public HBinaryOperation {
3212 public:
3213  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3214      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3215    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3216  }
3217
3218  // For code generation purposes, returns whether this instruction is just before
3219  // `instruction`, and disregard moves in between.
3220  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3221
3222  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3223
3224  virtual IfCondition GetCondition() const = 0;
3225
3226  virtual IfCondition GetOppositeCondition() const = 0;
3227
3228  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3229  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3230
3231  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3232  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3233
3234  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3235    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3236  }
3237
3238  bool IsFPConditionTrueIfNaN() const {
3239    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3240    IfCondition if_cond = GetCondition();
3241    if (if_cond == kCondNE) {
3242      return true;
3243    } else if (if_cond == kCondEQ) {
3244      return false;
3245    }
3246    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3247  }
3248
3249  bool IsFPConditionFalseIfNaN() const {
3250    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3251    IfCondition if_cond = GetCondition();
3252    if (if_cond == kCondEQ) {
3253      return true;
3254    } else if (if_cond == kCondNE) {
3255      return false;
3256    }
3257    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3258  }
3259
3260 protected:
3261  // Needed if we merge a HCompare into a HCondition.
3262  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3263  static constexpr size_t kFieldComparisonBiasSize =
3264      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3265  static constexpr size_t kNumberOfConditionPackedBits =
3266      kFieldComparisonBias + kFieldComparisonBiasSize;
3267  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3268  using ComparisonBiasField =
3269      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3270
3271  template <typename T>
3272  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3273
3274  template <typename T>
3275  int32_t CompareFP(T x, T y) const {
3276    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3277    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3278    // Handle the bias.
3279    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3280  }
3281
3282  // Return an integer constant containing the result of a condition evaluated at compile time.
3283  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3284    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3285  }
3286
3287 private:
3288  DISALLOW_COPY_AND_ASSIGN(HCondition);
3289};
3290
3291// Instruction to check if two inputs are equal to each other.
3292class HEqual FINAL : public HCondition {
3293 public:
3294  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3295      : HCondition(first, second, dex_pc) {}
3296
3297  bool IsCommutative() const OVERRIDE { return true; }
3298
3299  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3300                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3301    return MakeConstantCondition(true, GetDexPc());
3302  }
3303  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3304    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3305  }
3306  // In the following Evaluate methods, a HCompare instruction has
3307  // been merged into this HEqual instruction; evaluate it as
3308  // `Compare(x, y) == 0`.
3309  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3310    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3311                                 GetDexPc());
3312  }
3313  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3314    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3315  }
3316  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3317    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3318  }
3319
3320  DECLARE_INSTRUCTION(Equal);
3321
3322  IfCondition GetCondition() const OVERRIDE {
3323    return kCondEQ;
3324  }
3325
3326  IfCondition GetOppositeCondition() const OVERRIDE {
3327    return kCondNE;
3328  }
3329
3330 private:
3331  template <typename T> static bool Compute(T x, T y) { return x == y; }
3332
3333  DISALLOW_COPY_AND_ASSIGN(HEqual);
3334};
3335
3336class HNotEqual FINAL : public HCondition {
3337 public:
3338  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3339      : HCondition(first, second, dex_pc) {}
3340
3341  bool IsCommutative() const OVERRIDE { return true; }
3342
3343  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3344                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3345    return MakeConstantCondition(false, GetDexPc());
3346  }
3347  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3348    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3349  }
3350  // In the following Evaluate methods, a HCompare instruction has
3351  // been merged into this HNotEqual instruction; evaluate it as
3352  // `Compare(x, y) != 0`.
3353  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3354    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3355  }
3356  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3357    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3358  }
3359  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3360    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3361  }
3362
3363  DECLARE_INSTRUCTION(NotEqual);
3364
3365  IfCondition GetCondition() const OVERRIDE {
3366    return kCondNE;
3367  }
3368
3369  IfCondition GetOppositeCondition() const OVERRIDE {
3370    return kCondEQ;
3371  }
3372
3373 private:
3374  template <typename T> static bool Compute(T x, T y) { return x != y; }
3375
3376  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3377};
3378
3379class HLessThan FINAL : public HCondition {
3380 public:
3381  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3382      : HCondition(first, second, dex_pc) {}
3383
3384  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3385    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3386  }
3387  // In the following Evaluate methods, a HCompare instruction has
3388  // been merged into this HLessThan instruction; evaluate it as
3389  // `Compare(x, y) < 0`.
3390  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3391    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3392  }
3393  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3394    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3395  }
3396  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3397    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3398  }
3399
3400  DECLARE_INSTRUCTION(LessThan);
3401
3402  IfCondition GetCondition() const OVERRIDE {
3403    return kCondLT;
3404  }
3405
3406  IfCondition GetOppositeCondition() const OVERRIDE {
3407    return kCondGE;
3408  }
3409
3410 private:
3411  template <typename T> static bool Compute(T x, T y) { return x < y; }
3412
3413  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3414};
3415
3416class HLessThanOrEqual FINAL : public HCondition {
3417 public:
3418  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3419      : HCondition(first, second, dex_pc) {}
3420
3421  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3422    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3423  }
3424  // In the following Evaluate methods, a HCompare instruction has
3425  // been merged into this HLessThanOrEqual instruction; evaluate it as
3426  // `Compare(x, y) <= 0`.
3427  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3428    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3429  }
3430  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3431    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3432  }
3433  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3434    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3435  }
3436
3437  DECLARE_INSTRUCTION(LessThanOrEqual);
3438
3439  IfCondition GetCondition() const OVERRIDE {
3440    return kCondLE;
3441  }
3442
3443  IfCondition GetOppositeCondition() const OVERRIDE {
3444    return kCondGT;
3445  }
3446
3447 private:
3448  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3449
3450  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3451};
3452
3453class HGreaterThan FINAL : public HCondition {
3454 public:
3455  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3456      : HCondition(first, second, dex_pc) {}
3457
3458  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3459    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3460  }
3461  // In the following Evaluate methods, a HCompare instruction has
3462  // been merged into this HGreaterThan instruction; evaluate it as
3463  // `Compare(x, y) > 0`.
3464  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3465    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3466  }
3467  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3468    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3469  }
3470  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3471    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3472  }
3473
3474  DECLARE_INSTRUCTION(GreaterThan);
3475
3476  IfCondition GetCondition() const OVERRIDE {
3477    return kCondGT;
3478  }
3479
3480  IfCondition GetOppositeCondition() const OVERRIDE {
3481    return kCondLE;
3482  }
3483
3484 private:
3485  template <typename T> static bool Compute(T x, T y) { return x > y; }
3486
3487  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3488};
3489
3490class HGreaterThanOrEqual FINAL : public HCondition {
3491 public:
3492  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3493      : HCondition(first, second, dex_pc) {}
3494
3495  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3496    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3497  }
3498  // In the following Evaluate methods, a HCompare instruction has
3499  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3500  // `Compare(x, y) >= 0`.
3501  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3502    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3503  }
3504  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3505    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3506  }
3507  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3508    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3509  }
3510
3511  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3512
3513  IfCondition GetCondition() const OVERRIDE {
3514    return kCondGE;
3515  }
3516
3517  IfCondition GetOppositeCondition() const OVERRIDE {
3518    return kCondLT;
3519  }
3520
3521 private:
3522  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3523
3524  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3525};
3526
3527class HBelow FINAL : public HCondition {
3528 public:
3529  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3530      : HCondition(first, second, dex_pc) {}
3531
3532  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3533    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3534  }
3535  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3536    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3537  }
3538  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3539                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3540    LOG(FATAL) << DebugName() << " is not defined for float values";
3541    UNREACHABLE();
3542  }
3543  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3544                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3545    LOG(FATAL) << DebugName() << " is not defined for double values";
3546    UNREACHABLE();
3547  }
3548
3549  DECLARE_INSTRUCTION(Below);
3550
3551  IfCondition GetCondition() const OVERRIDE {
3552    return kCondB;
3553  }
3554
3555  IfCondition GetOppositeCondition() const OVERRIDE {
3556    return kCondAE;
3557  }
3558
3559 private:
3560  template <typename T> static bool Compute(T x, T y) {
3561    return MakeUnsigned(x) < MakeUnsigned(y);
3562  }
3563
3564  DISALLOW_COPY_AND_ASSIGN(HBelow);
3565};
3566
3567class HBelowOrEqual FINAL : public HCondition {
3568 public:
3569  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3570      : HCondition(first, second, dex_pc) {}
3571
3572  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3573    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3574  }
3575  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3576    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3577  }
3578  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3579                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3580    LOG(FATAL) << DebugName() << " is not defined for float values";
3581    UNREACHABLE();
3582  }
3583  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3584                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3585    LOG(FATAL) << DebugName() << " is not defined for double values";
3586    UNREACHABLE();
3587  }
3588
3589  DECLARE_INSTRUCTION(BelowOrEqual);
3590
3591  IfCondition GetCondition() const OVERRIDE {
3592    return kCondBE;
3593  }
3594
3595  IfCondition GetOppositeCondition() const OVERRIDE {
3596    return kCondA;
3597  }
3598
3599 private:
3600  template <typename T> static bool Compute(T x, T y) {
3601    return MakeUnsigned(x) <= MakeUnsigned(y);
3602  }
3603
3604  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3605};
3606
3607class HAbove FINAL : public HCondition {
3608 public:
3609  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3610      : HCondition(first, second, dex_pc) {}
3611
3612  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3613    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3614  }
3615  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3616    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3617  }
3618  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3619                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3620    LOG(FATAL) << DebugName() << " is not defined for float values";
3621    UNREACHABLE();
3622  }
3623  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3624                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3625    LOG(FATAL) << DebugName() << " is not defined for double values";
3626    UNREACHABLE();
3627  }
3628
3629  DECLARE_INSTRUCTION(Above);
3630
3631  IfCondition GetCondition() const OVERRIDE {
3632    return kCondA;
3633  }
3634
3635  IfCondition GetOppositeCondition() const OVERRIDE {
3636    return kCondBE;
3637  }
3638
3639 private:
3640  template <typename T> static bool Compute(T x, T y) {
3641    return MakeUnsigned(x) > MakeUnsigned(y);
3642  }
3643
3644  DISALLOW_COPY_AND_ASSIGN(HAbove);
3645};
3646
3647class HAboveOrEqual FINAL : public HCondition {
3648 public:
3649  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3650      : HCondition(first, second, dex_pc) {}
3651
3652  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3653    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3654  }
3655  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3656    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3657  }
3658  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3659                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3660    LOG(FATAL) << DebugName() << " is not defined for float values";
3661    UNREACHABLE();
3662  }
3663  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3664                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3665    LOG(FATAL) << DebugName() << " is not defined for double values";
3666    UNREACHABLE();
3667  }
3668
3669  DECLARE_INSTRUCTION(AboveOrEqual);
3670
3671  IfCondition GetCondition() const OVERRIDE {
3672    return kCondAE;
3673  }
3674
3675  IfCondition GetOppositeCondition() const OVERRIDE {
3676    return kCondB;
3677  }
3678
3679 private:
3680  template <typename T> static bool Compute(T x, T y) {
3681    return MakeUnsigned(x) >= MakeUnsigned(y);
3682  }
3683
3684  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3685};
3686
3687// Instruction to check how two inputs compare to each other.
3688// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3689class HCompare FINAL : public HBinaryOperation {
3690 public:
3691  // Note that `comparison_type` is the type of comparison performed
3692  // between the comparison's inputs, not the type of the instantiated
3693  // HCompare instruction (which is always Primitive::kPrimInt).
3694  HCompare(Primitive::Type comparison_type,
3695           HInstruction* first,
3696           HInstruction* second,
3697           ComparisonBias bias,
3698           uint32_t dex_pc)
3699      : HBinaryOperation(Primitive::kPrimInt,
3700                         first,
3701                         second,
3702                         SideEffectsForArchRuntimeCalls(comparison_type),
3703                         dex_pc) {
3704    SetPackedField<ComparisonBiasField>(bias);
3705    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3706    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3707  }
3708
3709  template <typename T>
3710  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3711
3712  template <typename T>
3713  int32_t ComputeFP(T x, T y) const {
3714    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3715    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3716    // Handle the bias.
3717    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3718  }
3719
3720  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3721    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3722    // reach this code path when processing a freshly built HIR
3723    // graph. However HCompare integer instructions can be synthesized
3724    // by the instruction simplifier to implement IntegerCompare and
3725    // IntegerSignum intrinsics, so we have to handle this case.
3726    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3727  }
3728  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3729    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3730  }
3731  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3732    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3733  }
3734  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3735    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3736  }
3737
3738  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3739    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3740  }
3741
3742  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3743
3744  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3745  // Only meaningful for floating-point comparisons.
3746  bool IsGtBias() const {
3747    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3748    return GetBias() == ComparisonBias::kGtBias;
3749  }
3750
3751  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3752    // Comparisons do not require a runtime call in any back end.
3753    return SideEffects::None();
3754  }
3755
3756  DECLARE_INSTRUCTION(Compare);
3757
3758 protected:
3759  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3760  static constexpr size_t kFieldComparisonBiasSize =
3761      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3762  static constexpr size_t kNumberOfComparePackedBits =
3763      kFieldComparisonBias + kFieldComparisonBiasSize;
3764  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3765  using ComparisonBiasField =
3766      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3767
3768  // Return an integer constant containing the result of a comparison evaluated at compile time.
3769  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3770    DCHECK(value == -1 || value == 0 || value == 1) << value;
3771    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3772  }
3773
3774 private:
3775  DISALLOW_COPY_AND_ASSIGN(HCompare);
3776};
3777
3778class HNewInstance FINAL : public HExpression<1> {
3779 public:
3780  HNewInstance(HInstruction* cls,
3781               uint32_t dex_pc,
3782               dex::TypeIndex type_index,
3783               const DexFile& dex_file,
3784               bool needs_access_check,
3785               bool finalizable,
3786               QuickEntrypointEnum entrypoint)
3787      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3788        type_index_(type_index),
3789        dex_file_(dex_file),
3790        entrypoint_(entrypoint) {
3791    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
3792    SetPackedFlag<kFlagFinalizable>(finalizable);
3793    SetRawInputAt(0, cls);
3794  }
3795
3796  dex::TypeIndex GetTypeIndex() const { return type_index_; }
3797  const DexFile& GetDexFile() const { return dex_file_; }
3798
3799  // Calls runtime so needs an environment.
3800  bool NeedsEnvironment() const OVERRIDE { return true; }
3801
3802  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3803  bool CanThrow() const OVERRIDE { return true; }
3804
3805  // Needs to call into runtime to make sure it's instantiable/accessible.
3806  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
3807
3808  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3809
3810  bool CanBeNull() const OVERRIDE { return false; }
3811
3812  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3813
3814  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3815    entrypoint_ = entrypoint;
3816  }
3817
3818  bool IsStringAlloc() const;
3819
3820  DECLARE_INSTRUCTION(NewInstance);
3821
3822 private:
3823  static constexpr size_t kFlagNeedsAccessCheck = kNumberOfExpressionPackedBits;
3824  static constexpr size_t kFlagFinalizable = kFlagNeedsAccessCheck + 1;
3825  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3826  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3827                "Too many packed fields.");
3828
3829  const dex::TypeIndex type_index_;
3830  const DexFile& dex_file_;
3831  QuickEntrypointEnum entrypoint_;
3832
3833  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3834};
3835
3836enum IntrinsicNeedsEnvironmentOrCache {
3837  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3838  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3839};
3840
3841enum IntrinsicSideEffects {
3842  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3843  kReadSideEffects,   // Intrinsic may read heap memory.
3844  kWriteSideEffects,  // Intrinsic may write heap memory.
3845  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3846};
3847
3848enum IntrinsicExceptions {
3849  kNoThrow,  // Intrinsic does not throw any exceptions.
3850  kCanThrow  // Intrinsic may throw exceptions.
3851};
3852
3853class HInvoke : public HVariableInputSizeInstruction {
3854 public:
3855  bool NeedsEnvironment() const OVERRIDE;
3856
3857  void SetArgumentAt(size_t index, HInstruction* argument) {
3858    SetRawInputAt(index, argument);
3859  }
3860
3861  // Return the number of arguments.  This number can be lower than
3862  // the number of inputs returned by InputCount(), as some invoke
3863  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3864  // inputs at the end of their list of inputs.
3865  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3866
3867  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3868
3869  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3870  const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
3871
3872  InvokeType GetInvokeType() const {
3873    return GetPackedField<InvokeTypeField>();
3874  }
3875
3876  Intrinsics GetIntrinsic() const {
3877    return intrinsic_;
3878  }
3879
3880  void SetIntrinsic(Intrinsics intrinsic,
3881                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3882                    IntrinsicSideEffects side_effects,
3883                    IntrinsicExceptions exceptions);
3884
3885  bool IsFromInlinedInvoke() const {
3886    return GetEnvironment()->IsFromInlinedInvoke();
3887  }
3888
3889  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3890
3891  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
3892
3893  bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
3894
3895  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3896    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
3897  }
3898
3899  uint32_t* GetIntrinsicOptimizations() {
3900    return &intrinsic_optimizations_;
3901  }
3902
3903  const uint32_t* GetIntrinsicOptimizations() const {
3904    return &intrinsic_optimizations_;
3905  }
3906
3907  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
3908
3909  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
3910
3911  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
3912
3913 protected:
3914  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
3915  static constexpr size_t kFieldInvokeTypeSize =
3916      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
3917  static constexpr size_t kFieldReturnType =
3918      kFieldInvokeType + kFieldInvokeTypeSize;
3919  static constexpr size_t kFieldReturnTypeSize =
3920      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
3921  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
3922  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
3923  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3924  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
3925  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
3926
3927  HInvoke(ArenaAllocator* arena,
3928          uint32_t number_of_arguments,
3929          uint32_t number_of_other_inputs,
3930          Primitive::Type return_type,
3931          uint32_t dex_pc,
3932          uint32_t dex_method_index,
3933          ArtMethod* resolved_method,
3934          InvokeType invoke_type)
3935    : HVariableInputSizeInstruction(
3936          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
3937          dex_pc,
3938          arena,
3939          number_of_arguments + number_of_other_inputs,
3940          kArenaAllocInvokeInputs),
3941      number_of_arguments_(number_of_arguments),
3942      resolved_method_(resolved_method),
3943      dex_method_index_(dex_method_index),
3944      intrinsic_(Intrinsics::kNone),
3945      intrinsic_optimizations_(0) {
3946    SetPackedField<ReturnTypeField>(return_type);
3947    SetPackedField<InvokeTypeField>(invoke_type);
3948    SetPackedFlag<kFlagCanThrow>(true);
3949  }
3950
3951  uint32_t number_of_arguments_;
3952  ArtMethod* const resolved_method_;
3953  const uint32_t dex_method_index_;
3954  Intrinsics intrinsic_;
3955
3956  // A magic word holding optimizations for intrinsics. See intrinsics.h.
3957  uint32_t intrinsic_optimizations_;
3958
3959 private:
3960  DISALLOW_COPY_AND_ASSIGN(HInvoke);
3961};
3962
3963class HInvokeUnresolved FINAL : public HInvoke {
3964 public:
3965  HInvokeUnresolved(ArenaAllocator* arena,
3966                    uint32_t number_of_arguments,
3967                    Primitive::Type return_type,
3968                    uint32_t dex_pc,
3969                    uint32_t dex_method_index,
3970                    InvokeType invoke_type)
3971      : HInvoke(arena,
3972                number_of_arguments,
3973                0u /* number_of_other_inputs */,
3974                return_type,
3975                dex_pc,
3976                dex_method_index,
3977                nullptr,
3978                invoke_type) {
3979  }
3980
3981  DECLARE_INSTRUCTION(InvokeUnresolved);
3982
3983 private:
3984  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
3985};
3986
3987class HInvokePolymorphic FINAL : public HInvoke {
3988 public:
3989  HInvokePolymorphic(ArenaAllocator* arena,
3990                     uint32_t number_of_arguments,
3991                     Primitive::Type return_type,
3992                     uint32_t dex_pc,
3993                     uint32_t dex_method_index)
3994      : HInvoke(arena,
3995                number_of_arguments,
3996                0u /* number_of_other_inputs */,
3997                return_type,
3998                dex_pc,
3999                dex_method_index,
4000                nullptr,
4001                kVirtual) {}
4002
4003  DECLARE_INSTRUCTION(InvokePolymorphic);
4004
4005 private:
4006  DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
4007};
4008
4009class HInvokeStaticOrDirect FINAL : public HInvoke {
4010 public:
4011  // Requirements of this method call regarding the class
4012  // initialization (clinit) check of its declaring class.
4013  enum class ClinitCheckRequirement {
4014    kNone,      // Class already initialized.
4015    kExplicit,  // Static call having explicit clinit check as last input.
4016    kImplicit,  // Static call implicitly requiring a clinit check.
4017    kLast = kImplicit
4018  };
4019
4020  // Determines how to load the target ArtMethod*.
4021  enum class MethodLoadKind {
4022    // Use a String init ArtMethod* loaded from Thread entrypoints.
4023    kStringInit,
4024
4025    // Use the method's own ArtMethod* loaded by the register allocator.
4026    kRecursive,
4027
4028    // Use ArtMethod* at a known address, embed the direct address in the code.
4029    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4030    kDirectAddress,
4031
4032    // Load from resolved methods array in the dex cache using a PC-relative load.
4033    // Used when we need to use the dex cache, for example for invoke-static that
4034    // may cause class initialization (the entry may point to a resolution method),
4035    // and we know that we can access the dex cache arrays using a PC-relative load.
4036    kDexCachePcRelative,
4037
4038    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
4039    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
4040    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
4041    // or unimplemented or impractical (i.e. slow) on a particular architecture.
4042    kDexCacheViaMethod,
4043  };
4044
4045  // Determines the location of the code pointer.
4046  enum class CodePtrLocation {
4047    // Recursive call, use local PC-relative call instruction.
4048    kCallSelf,
4049
4050    // Use code pointer from the ArtMethod*.
4051    // Used when we don't know the target code. This is also the last-resort-kind used when
4052    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4053    kCallArtMethod,
4054  };
4055
4056  struct DispatchInfo {
4057    MethodLoadKind method_load_kind;
4058    CodePtrLocation code_ptr_location;
4059    // The method load data holds
4060    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4061    //     Note that there are multiple string init methods, each having its own offset.
4062    //   - the method address for kDirectAddress
4063    //   - the dex cache arrays offset for kDexCachePcRel.
4064    uint64_t method_load_data;
4065  };
4066
4067  HInvokeStaticOrDirect(ArenaAllocator* arena,
4068                        uint32_t number_of_arguments,
4069                        Primitive::Type return_type,
4070                        uint32_t dex_pc,
4071                        uint32_t method_index,
4072                        ArtMethod* resolved_method,
4073                        DispatchInfo dispatch_info,
4074                        InvokeType invoke_type,
4075                        MethodReference target_method,
4076                        ClinitCheckRequirement clinit_check_requirement)
4077      : HInvoke(arena,
4078                number_of_arguments,
4079                // There is potentially one extra argument for the HCurrentMethod node, and
4080                // potentially one other if the clinit check is explicit, and potentially
4081                // one other if the method is a string factory.
4082                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4083                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4084                return_type,
4085                dex_pc,
4086                method_index,
4087                resolved_method,
4088                invoke_type),
4089        target_method_(target_method),
4090        dispatch_info_(dispatch_info) {
4091    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4092  }
4093
4094  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4095    bool had_current_method_input = HasCurrentMethodInput();
4096    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4097
4098    // Using the current method is the default and once we find a better
4099    // method load kind, we should not go back to using the current method.
4100    DCHECK(had_current_method_input || !needs_current_method_input);
4101
4102    if (had_current_method_input && !needs_current_method_input) {
4103      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4104      RemoveInputAt(GetSpecialInputIndex());
4105    }
4106    dispatch_info_ = dispatch_info;
4107  }
4108
4109  void AddSpecialInput(HInstruction* input) {
4110    // We allow only one special input.
4111    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4112    DCHECK(InputCount() == GetSpecialInputIndex() ||
4113           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4114    InsertInputAt(GetSpecialInputIndex(), input);
4115  }
4116
4117  using HInstruction::GetInputRecords;  // Keep the const version visible.
4118  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4119    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4120    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4121      DCHECK(!input_records.empty());
4122      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4123      HInstruction* last_input = input_records.back().GetInstruction();
4124      // Note: `last_input` may be null during arguments setup.
4125      if (last_input != nullptr) {
4126        // `last_input` is the last input of a static invoke marked as having
4127        // an explicit clinit check. It must either be:
4128        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4129        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4130        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4131      }
4132    }
4133    return input_records;
4134  }
4135
4136  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4137    // We access the method via the dex cache so we can't do an implicit null check.
4138    // TODO: for intrinsics we can generate implicit null checks.
4139    return false;
4140  }
4141
4142  bool CanBeNull() const OVERRIDE {
4143    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
4144  }
4145
4146  // Get the index of the special input, if any.
4147  //
4148  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4149  // method pointer; otherwise there may be one platform-specific special input,
4150  // such as PC-relative addressing base.
4151  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4152  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4153
4154  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4155  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4156  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4157  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4158  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4159  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4160  bool HasPcRelativeDexCache() const {
4161    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4162  }
4163  bool HasCurrentMethodInput() const {
4164    // This function can be called only after the invoke has been fully initialized by the builder.
4165    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4166      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4167      return true;
4168    } else {
4169      DCHECK(InputCount() == GetSpecialInputIndex() ||
4170             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4171      return false;
4172    }
4173  }
4174
4175  QuickEntrypointEnum GetStringInitEntryPoint() const {
4176    DCHECK(IsStringInit());
4177    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4178  }
4179
4180  uint64_t GetMethodAddress() const {
4181    DCHECK(HasMethodAddress());
4182    return dispatch_info_.method_load_data;
4183  }
4184
4185  uint32_t GetDexCacheArrayOffset() const {
4186    DCHECK(HasPcRelativeDexCache());
4187    return dispatch_info_.method_load_data;
4188  }
4189
4190  ClinitCheckRequirement GetClinitCheckRequirement() const {
4191    return GetPackedField<ClinitCheckRequirementField>();
4192  }
4193
4194  // Is this instruction a call to a static method?
4195  bool IsStatic() const {
4196    return GetInvokeType() == kStatic;
4197  }
4198
4199  MethodReference GetTargetMethod() const {
4200    return target_method_;
4201  }
4202
4203  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4204  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4205  // instruction; only relevant for static calls with explicit clinit check.
4206  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4207    DCHECK(IsStaticWithExplicitClinitCheck());
4208    size_t last_input_index = inputs_.size() - 1u;
4209    HInstruction* last_input = inputs_.back().GetInstruction();
4210    DCHECK(last_input != nullptr);
4211    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4212    RemoveAsUserOfInput(last_input_index);
4213    inputs_.pop_back();
4214    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4215    DCHECK(!IsStaticWithExplicitClinitCheck());
4216  }
4217
4218  // Is this a call to a static method whose declaring class has an
4219  // explicit initialization check in the graph?
4220  bool IsStaticWithExplicitClinitCheck() const {
4221    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4222  }
4223
4224  // Is this a call to a static method whose declaring class has an
4225  // implicit intialization check requirement?
4226  bool IsStaticWithImplicitClinitCheck() const {
4227    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4228  }
4229
4230  // Does this method load kind need the current method as an input?
4231  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4232    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4233  }
4234
4235  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4236
4237 private:
4238  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4239  static constexpr size_t kFieldClinitCheckRequirementSize =
4240      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4241  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4242      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4243  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4244                "Too many packed fields.");
4245  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4246                                               kFieldClinitCheckRequirement,
4247                                               kFieldClinitCheckRequirementSize>;
4248
4249  // Cached values of the resolved method, to avoid needing the mutator lock.
4250  MethodReference target_method_;
4251  DispatchInfo dispatch_info_;
4252
4253  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4254};
4255std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4256std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4257
4258class HInvokeVirtual FINAL : public HInvoke {
4259 public:
4260  HInvokeVirtual(ArenaAllocator* arena,
4261                 uint32_t number_of_arguments,
4262                 Primitive::Type return_type,
4263                 uint32_t dex_pc,
4264                 uint32_t dex_method_index,
4265                 ArtMethod* resolved_method,
4266                 uint32_t vtable_index)
4267      : HInvoke(arena,
4268                number_of_arguments,
4269                0u,
4270                return_type,
4271                dex_pc,
4272                dex_method_index,
4273                resolved_method,
4274                kVirtual),
4275        vtable_index_(vtable_index) {}
4276
4277  bool CanBeNull() const OVERRIDE {
4278    switch (GetIntrinsic()) {
4279      case Intrinsics::kThreadCurrentThread:
4280      case Intrinsics::kStringBufferAppend:
4281      case Intrinsics::kStringBufferToString:
4282      case Intrinsics::kStringBuilderAppend:
4283      case Intrinsics::kStringBuilderToString:
4284        return false;
4285      default:
4286        return HInvoke::CanBeNull();
4287    }
4288  }
4289
4290  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4291    // TODO: Add implicit null checks in intrinsics.
4292    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4293  }
4294
4295  uint32_t GetVTableIndex() const { return vtable_index_; }
4296
4297  DECLARE_INSTRUCTION(InvokeVirtual);
4298
4299 private:
4300  // Cached value of the resolved method, to avoid needing the mutator lock.
4301  const uint32_t vtable_index_;
4302
4303  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4304};
4305
4306class HInvokeInterface FINAL : public HInvoke {
4307 public:
4308  HInvokeInterface(ArenaAllocator* arena,
4309                   uint32_t number_of_arguments,
4310                   Primitive::Type return_type,
4311                   uint32_t dex_pc,
4312                   uint32_t dex_method_index,
4313                   ArtMethod* resolved_method,
4314                   uint32_t imt_index)
4315      : HInvoke(arena,
4316                number_of_arguments,
4317                0u,
4318                return_type,
4319                dex_pc,
4320                dex_method_index,
4321                resolved_method,
4322                kInterface),
4323        imt_index_(imt_index) {}
4324
4325  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4326    // TODO: Add implicit null checks in intrinsics.
4327    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4328  }
4329
4330  uint32_t GetImtIndex() const { return imt_index_; }
4331  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4332
4333  DECLARE_INSTRUCTION(InvokeInterface);
4334
4335 private:
4336  // Cached value of the resolved method, to avoid needing the mutator lock.
4337  const uint32_t imt_index_;
4338
4339  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4340};
4341
4342class HNeg FINAL : public HUnaryOperation {
4343 public:
4344  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4345      : HUnaryOperation(result_type, input, dex_pc) {
4346    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4347  }
4348
4349  template <typename T> static T Compute(T x) { return -x; }
4350
4351  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4352    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4353  }
4354  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4355    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4356  }
4357  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4358    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4359  }
4360  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4361    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4362  }
4363
4364  DECLARE_INSTRUCTION(Neg);
4365
4366 private:
4367  DISALLOW_COPY_AND_ASSIGN(HNeg);
4368};
4369
4370class HNewArray FINAL : public HExpression<2> {
4371 public:
4372  HNewArray(HInstruction* length,
4373            HCurrentMethod* current_method,
4374            uint32_t dex_pc,
4375            dex::TypeIndex type_index,
4376            const DexFile& dex_file,
4377            QuickEntrypointEnum entrypoint)
4378      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
4379        type_index_(type_index),
4380        dex_file_(dex_file),
4381        entrypoint_(entrypoint) {
4382    SetRawInputAt(0, length);
4383    SetRawInputAt(1, current_method);
4384  }
4385
4386  dex::TypeIndex GetTypeIndex() const { return type_index_; }
4387  const DexFile& GetDexFile() const { return dex_file_; }
4388
4389  // Calls runtime so needs an environment.
4390  bool NeedsEnvironment() const OVERRIDE { return true; }
4391
4392  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4393  bool CanThrow() const OVERRIDE { return true; }
4394
4395  bool CanBeNull() const OVERRIDE { return false; }
4396
4397  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4398
4399  DECLARE_INSTRUCTION(NewArray);
4400
4401 private:
4402  const dex::TypeIndex type_index_;
4403  const DexFile& dex_file_;
4404  const QuickEntrypointEnum entrypoint_;
4405
4406  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4407};
4408
4409class HAdd FINAL : public HBinaryOperation {
4410 public:
4411  HAdd(Primitive::Type result_type,
4412       HInstruction* left,
4413       HInstruction* right,
4414       uint32_t dex_pc = kNoDexPc)
4415      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4416
4417  bool IsCommutative() const OVERRIDE { return true; }
4418
4419  template <typename T> static T Compute(T x, T y) { return x + y; }
4420
4421  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4422    return GetBlock()->GetGraph()->GetIntConstant(
4423        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4424  }
4425  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4426    return GetBlock()->GetGraph()->GetLongConstant(
4427        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4428  }
4429  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4430    return GetBlock()->GetGraph()->GetFloatConstant(
4431        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4432  }
4433  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4434    return GetBlock()->GetGraph()->GetDoubleConstant(
4435        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4436  }
4437
4438  DECLARE_INSTRUCTION(Add);
4439
4440 private:
4441  DISALLOW_COPY_AND_ASSIGN(HAdd);
4442};
4443
4444class HSub FINAL : public HBinaryOperation {
4445 public:
4446  HSub(Primitive::Type result_type,
4447       HInstruction* left,
4448       HInstruction* right,
4449       uint32_t dex_pc = kNoDexPc)
4450      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4451
4452  template <typename T> static T Compute(T x, T y) { return x - y; }
4453
4454  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4455    return GetBlock()->GetGraph()->GetIntConstant(
4456        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4457  }
4458  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4459    return GetBlock()->GetGraph()->GetLongConstant(
4460        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4461  }
4462  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4463    return GetBlock()->GetGraph()->GetFloatConstant(
4464        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4465  }
4466  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4467    return GetBlock()->GetGraph()->GetDoubleConstant(
4468        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4469  }
4470
4471  DECLARE_INSTRUCTION(Sub);
4472
4473 private:
4474  DISALLOW_COPY_AND_ASSIGN(HSub);
4475};
4476
4477class HMul FINAL : public HBinaryOperation {
4478 public:
4479  HMul(Primitive::Type result_type,
4480       HInstruction* left,
4481       HInstruction* right,
4482       uint32_t dex_pc = kNoDexPc)
4483      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4484
4485  bool IsCommutative() const OVERRIDE { return true; }
4486
4487  template <typename T> static T Compute(T x, T y) { return x * y; }
4488
4489  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4490    return GetBlock()->GetGraph()->GetIntConstant(
4491        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4492  }
4493  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4494    return GetBlock()->GetGraph()->GetLongConstant(
4495        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4496  }
4497  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4498    return GetBlock()->GetGraph()->GetFloatConstant(
4499        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4500  }
4501  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4502    return GetBlock()->GetGraph()->GetDoubleConstant(
4503        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4504  }
4505
4506  DECLARE_INSTRUCTION(Mul);
4507
4508 private:
4509  DISALLOW_COPY_AND_ASSIGN(HMul);
4510};
4511
4512class HDiv FINAL : public HBinaryOperation {
4513 public:
4514  HDiv(Primitive::Type result_type,
4515       HInstruction* left,
4516       HInstruction* right,
4517       uint32_t dex_pc)
4518      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4519
4520  template <typename T>
4521  T ComputeIntegral(T x, T y) const {
4522    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4523    // Our graph structure ensures we never have 0 for `y` during
4524    // constant folding.
4525    DCHECK_NE(y, 0);
4526    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4527    return (y == -1) ? -x : x / y;
4528  }
4529
4530  template <typename T>
4531  T ComputeFP(T x, T y) const {
4532    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4533    return x / y;
4534  }
4535
4536  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4537    return GetBlock()->GetGraph()->GetIntConstant(
4538        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4539  }
4540  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4541    return GetBlock()->GetGraph()->GetLongConstant(
4542        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4543  }
4544  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4545    return GetBlock()->GetGraph()->GetFloatConstant(
4546        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4547  }
4548  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4549    return GetBlock()->GetGraph()->GetDoubleConstant(
4550        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4551  }
4552
4553  DECLARE_INSTRUCTION(Div);
4554
4555 private:
4556  DISALLOW_COPY_AND_ASSIGN(HDiv);
4557};
4558
4559class HRem FINAL : public HBinaryOperation {
4560 public:
4561  HRem(Primitive::Type result_type,
4562       HInstruction* left,
4563       HInstruction* right,
4564       uint32_t dex_pc)
4565      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4566
4567  template <typename T>
4568  T ComputeIntegral(T x, T y) const {
4569    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4570    // Our graph structure ensures we never have 0 for `y` during
4571    // constant folding.
4572    DCHECK_NE(y, 0);
4573    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4574    return (y == -1) ? 0 : x % y;
4575  }
4576
4577  template <typename T>
4578  T ComputeFP(T x, T y) const {
4579    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4580    return std::fmod(x, y);
4581  }
4582
4583  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4584    return GetBlock()->GetGraph()->GetIntConstant(
4585        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4586  }
4587  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4588    return GetBlock()->GetGraph()->GetLongConstant(
4589        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4590  }
4591  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4592    return GetBlock()->GetGraph()->GetFloatConstant(
4593        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4594  }
4595  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4596    return GetBlock()->GetGraph()->GetDoubleConstant(
4597        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4598  }
4599
4600  DECLARE_INSTRUCTION(Rem);
4601
4602 private:
4603  DISALLOW_COPY_AND_ASSIGN(HRem);
4604};
4605
4606class HDivZeroCheck FINAL : public HExpression<1> {
4607 public:
4608  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4609  // constructor.
4610  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4611      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4612    SetRawInputAt(0, value);
4613  }
4614
4615  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4616
4617  bool CanBeMoved() const OVERRIDE { return true; }
4618
4619  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4620    return true;
4621  }
4622
4623  bool NeedsEnvironment() const OVERRIDE { return true; }
4624  bool CanThrow() const OVERRIDE { return true; }
4625
4626  DECLARE_INSTRUCTION(DivZeroCheck);
4627
4628 private:
4629  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4630};
4631
4632class HShl FINAL : public HBinaryOperation {
4633 public:
4634  HShl(Primitive::Type result_type,
4635       HInstruction* value,
4636       HInstruction* distance,
4637       uint32_t dex_pc = kNoDexPc)
4638      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4639    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4640    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4641  }
4642
4643  template <typename T>
4644  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4645    return value << (distance & max_shift_distance);
4646  }
4647
4648  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4649    return GetBlock()->GetGraph()->GetIntConstant(
4650        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4651  }
4652  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4653    return GetBlock()->GetGraph()->GetLongConstant(
4654        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4655  }
4656  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4657                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4658    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4659    UNREACHABLE();
4660  }
4661  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4662                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4663    LOG(FATAL) << DebugName() << " is not defined for float values";
4664    UNREACHABLE();
4665  }
4666  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4667                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4668    LOG(FATAL) << DebugName() << " is not defined for double values";
4669    UNREACHABLE();
4670  }
4671
4672  DECLARE_INSTRUCTION(Shl);
4673
4674 private:
4675  DISALLOW_COPY_AND_ASSIGN(HShl);
4676};
4677
4678class HShr FINAL : public HBinaryOperation {
4679 public:
4680  HShr(Primitive::Type result_type,
4681       HInstruction* value,
4682       HInstruction* distance,
4683       uint32_t dex_pc = kNoDexPc)
4684      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4685    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4686    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4687  }
4688
4689  template <typename T>
4690  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4691    return value >> (distance & max_shift_distance);
4692  }
4693
4694  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4695    return GetBlock()->GetGraph()->GetIntConstant(
4696        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4697  }
4698  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4699    return GetBlock()->GetGraph()->GetLongConstant(
4700        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4701  }
4702  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4703                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4704    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4705    UNREACHABLE();
4706  }
4707  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4708                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4709    LOG(FATAL) << DebugName() << " is not defined for float values";
4710    UNREACHABLE();
4711  }
4712  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4713                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4714    LOG(FATAL) << DebugName() << " is not defined for double values";
4715    UNREACHABLE();
4716  }
4717
4718  DECLARE_INSTRUCTION(Shr);
4719
4720 private:
4721  DISALLOW_COPY_AND_ASSIGN(HShr);
4722};
4723
4724class HUShr FINAL : public HBinaryOperation {
4725 public:
4726  HUShr(Primitive::Type result_type,
4727        HInstruction* value,
4728        HInstruction* distance,
4729        uint32_t dex_pc = kNoDexPc)
4730      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4731    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4732    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4733  }
4734
4735  template <typename T>
4736  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4737    typedef typename std::make_unsigned<T>::type V;
4738    V ux = static_cast<V>(value);
4739    return static_cast<T>(ux >> (distance & max_shift_distance));
4740  }
4741
4742  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4743    return GetBlock()->GetGraph()->GetIntConstant(
4744        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4745  }
4746  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4747    return GetBlock()->GetGraph()->GetLongConstant(
4748        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4749  }
4750  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4751                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4752    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4753    UNREACHABLE();
4754  }
4755  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4756                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4757    LOG(FATAL) << DebugName() << " is not defined for float values";
4758    UNREACHABLE();
4759  }
4760  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4761                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4762    LOG(FATAL) << DebugName() << " is not defined for double values";
4763    UNREACHABLE();
4764  }
4765
4766  DECLARE_INSTRUCTION(UShr);
4767
4768 private:
4769  DISALLOW_COPY_AND_ASSIGN(HUShr);
4770};
4771
4772class HAnd FINAL : public HBinaryOperation {
4773 public:
4774  HAnd(Primitive::Type result_type,
4775       HInstruction* left,
4776       HInstruction* right,
4777       uint32_t dex_pc = kNoDexPc)
4778      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4779
4780  bool IsCommutative() const OVERRIDE { return true; }
4781
4782  template <typename T> static T Compute(T x, T y) { return x & y; }
4783
4784  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4785    return GetBlock()->GetGraph()->GetIntConstant(
4786        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4787  }
4788  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4789    return GetBlock()->GetGraph()->GetLongConstant(
4790        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4791  }
4792  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4793                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4794    LOG(FATAL) << DebugName() << " is not defined for float values";
4795    UNREACHABLE();
4796  }
4797  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4798                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4799    LOG(FATAL) << DebugName() << " is not defined for double values";
4800    UNREACHABLE();
4801  }
4802
4803  DECLARE_INSTRUCTION(And);
4804
4805 private:
4806  DISALLOW_COPY_AND_ASSIGN(HAnd);
4807};
4808
4809class HOr FINAL : public HBinaryOperation {
4810 public:
4811  HOr(Primitive::Type result_type,
4812      HInstruction* left,
4813      HInstruction* right,
4814      uint32_t dex_pc = kNoDexPc)
4815      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4816
4817  bool IsCommutative() const OVERRIDE { return true; }
4818
4819  template <typename T> static T Compute(T x, T y) { return x | y; }
4820
4821  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4822    return GetBlock()->GetGraph()->GetIntConstant(
4823        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4824  }
4825  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4826    return GetBlock()->GetGraph()->GetLongConstant(
4827        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4828  }
4829  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4830                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4831    LOG(FATAL) << DebugName() << " is not defined for float values";
4832    UNREACHABLE();
4833  }
4834  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4835                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4836    LOG(FATAL) << DebugName() << " is not defined for double values";
4837    UNREACHABLE();
4838  }
4839
4840  DECLARE_INSTRUCTION(Or);
4841
4842 private:
4843  DISALLOW_COPY_AND_ASSIGN(HOr);
4844};
4845
4846class HXor FINAL : public HBinaryOperation {
4847 public:
4848  HXor(Primitive::Type result_type,
4849       HInstruction* left,
4850       HInstruction* right,
4851       uint32_t dex_pc = kNoDexPc)
4852      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4853
4854  bool IsCommutative() const OVERRIDE { return true; }
4855
4856  template <typename T> static T Compute(T x, T y) { return x ^ y; }
4857
4858  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4859    return GetBlock()->GetGraph()->GetIntConstant(
4860        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4861  }
4862  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4863    return GetBlock()->GetGraph()->GetLongConstant(
4864        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4865  }
4866  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4867                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4868    LOG(FATAL) << DebugName() << " is not defined for float values";
4869    UNREACHABLE();
4870  }
4871  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4872                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4873    LOG(FATAL) << DebugName() << " is not defined for double values";
4874    UNREACHABLE();
4875  }
4876
4877  DECLARE_INSTRUCTION(Xor);
4878
4879 private:
4880  DISALLOW_COPY_AND_ASSIGN(HXor);
4881};
4882
4883class HRor FINAL : public HBinaryOperation {
4884 public:
4885  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4886    : HBinaryOperation(result_type, value, distance) {
4887    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4888    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4889  }
4890
4891  template <typename T>
4892  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
4893    typedef typename std::make_unsigned<T>::type V;
4894    V ux = static_cast<V>(value);
4895    if ((distance & max_shift_value) == 0) {
4896      return static_cast<T>(ux);
4897    } else {
4898      const V reg_bits = sizeof(T) * 8;
4899      return static_cast<T>(ux >> (distance & max_shift_value)) |
4900                           (value << (reg_bits - (distance & max_shift_value)));
4901    }
4902  }
4903
4904  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4905    return GetBlock()->GetGraph()->GetIntConstant(
4906        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4907  }
4908  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4909    return GetBlock()->GetGraph()->GetLongConstant(
4910        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4911  }
4912  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4913                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4914    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4915    UNREACHABLE();
4916  }
4917  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4918                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4919    LOG(FATAL) << DebugName() << " is not defined for float values";
4920    UNREACHABLE();
4921  }
4922  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4923                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4924    LOG(FATAL) << DebugName() << " is not defined for double values";
4925    UNREACHABLE();
4926  }
4927
4928  DECLARE_INSTRUCTION(Ror);
4929
4930 private:
4931  DISALLOW_COPY_AND_ASSIGN(HRor);
4932};
4933
4934// The value of a parameter in this method. Its location depends on
4935// the calling convention.
4936class HParameterValue FINAL : public HExpression<0> {
4937 public:
4938  HParameterValue(const DexFile& dex_file,
4939                  dex::TypeIndex type_index,
4940                  uint8_t index,
4941                  Primitive::Type parameter_type,
4942                  bool is_this = false)
4943      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
4944        dex_file_(dex_file),
4945        type_index_(type_index),
4946        index_(index) {
4947    SetPackedFlag<kFlagIsThis>(is_this);
4948    SetPackedFlag<kFlagCanBeNull>(!is_this);
4949  }
4950
4951  const DexFile& GetDexFile() const { return dex_file_; }
4952  dex::TypeIndex GetTypeIndex() const { return type_index_; }
4953  uint8_t GetIndex() const { return index_; }
4954  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
4955
4956  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
4957  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
4958
4959  DECLARE_INSTRUCTION(ParameterValue);
4960
4961 private:
4962  // Whether or not the parameter value corresponds to 'this' argument.
4963  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
4964  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
4965  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
4966  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
4967                "Too many packed fields.");
4968
4969  const DexFile& dex_file_;
4970  const dex::TypeIndex type_index_;
4971  // The index of this parameter in the parameters list. Must be less
4972  // than HGraph::number_of_in_vregs_.
4973  const uint8_t index_;
4974
4975  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
4976};
4977
4978class HNot FINAL : public HUnaryOperation {
4979 public:
4980  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4981      : HUnaryOperation(result_type, input, dex_pc) {}
4982
4983  bool CanBeMoved() const OVERRIDE { return true; }
4984  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4985    return true;
4986  }
4987
4988  template <typename T> static T Compute(T x) { return ~x; }
4989
4990  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4991    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4992  }
4993  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4994    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4995  }
4996  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4997    LOG(FATAL) << DebugName() << " is not defined for float values";
4998    UNREACHABLE();
4999  }
5000  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5001    LOG(FATAL) << DebugName() << " is not defined for double values";
5002    UNREACHABLE();
5003  }
5004
5005  DECLARE_INSTRUCTION(Not);
5006
5007 private:
5008  DISALLOW_COPY_AND_ASSIGN(HNot);
5009};
5010
5011class HBooleanNot FINAL : public HUnaryOperation {
5012 public:
5013  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5014      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
5015
5016  bool CanBeMoved() const OVERRIDE { return true; }
5017  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5018    return true;
5019  }
5020
5021  template <typename T> static bool Compute(T x) {
5022    DCHECK(IsUint<1>(x)) << x;
5023    return !x;
5024  }
5025
5026  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5027    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5028  }
5029  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5030    LOG(FATAL) << DebugName() << " is not defined for long values";
5031    UNREACHABLE();
5032  }
5033  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5034    LOG(FATAL) << DebugName() << " is not defined for float values";
5035    UNREACHABLE();
5036  }
5037  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5038    LOG(FATAL) << DebugName() << " is not defined for double values";
5039    UNREACHABLE();
5040  }
5041
5042  DECLARE_INSTRUCTION(BooleanNot);
5043
5044 private:
5045  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
5046};
5047
5048class HTypeConversion FINAL : public HExpression<1> {
5049 public:
5050  // Instantiate a type conversion of `input` to `result_type`.
5051  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
5052      : HExpression(result_type, SideEffects::None(), dex_pc) {
5053    SetRawInputAt(0, input);
5054    // Invariant: We should never generate a conversion to a Boolean value.
5055    DCHECK_NE(Primitive::kPrimBoolean, result_type);
5056  }
5057
5058  HInstruction* GetInput() const { return InputAt(0); }
5059  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
5060  Primitive::Type GetResultType() const { return GetType(); }
5061
5062  bool CanBeMoved() const OVERRIDE { return true; }
5063  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5064    return true;
5065  }
5066
5067  // Try to statically evaluate the conversion and return a HConstant
5068  // containing the result.  If the input cannot be converted, return nullptr.
5069  HConstant* TryStaticEvaluation() const;
5070
5071  DECLARE_INSTRUCTION(TypeConversion);
5072
5073 private:
5074  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
5075};
5076
5077static constexpr uint32_t kNoRegNumber = -1;
5078
5079class HNullCheck FINAL : public HExpression<1> {
5080 public:
5081  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5082  // constructor.
5083  HNullCheck(HInstruction* value, uint32_t dex_pc)
5084      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5085    SetRawInputAt(0, value);
5086  }
5087
5088  bool CanBeMoved() const OVERRIDE { return true; }
5089  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5090    return true;
5091  }
5092
5093  bool NeedsEnvironment() const OVERRIDE { return true; }
5094
5095  bool CanThrow() const OVERRIDE { return true; }
5096
5097  bool CanBeNull() const OVERRIDE { return false; }
5098
5099
5100  DECLARE_INSTRUCTION(NullCheck);
5101
5102 private:
5103  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
5104};
5105
5106// Embeds an ArtField and all the information required by the compiler. We cache
5107// that information to avoid requiring the mutator lock every time we need it.
5108class FieldInfo : public ValueObject {
5109 public:
5110  FieldInfo(ArtField* field,
5111            MemberOffset field_offset,
5112            Primitive::Type field_type,
5113            bool is_volatile,
5114            uint32_t index,
5115            uint16_t declaring_class_def_index,
5116            const DexFile& dex_file)
5117      : field_(field),
5118        field_offset_(field_offset),
5119        field_type_(field_type),
5120        is_volatile_(is_volatile),
5121        index_(index),
5122        declaring_class_def_index_(declaring_class_def_index),
5123        dex_file_(dex_file) {}
5124
5125  ArtField* GetField() const { return field_; }
5126  MemberOffset GetFieldOffset() const { return field_offset_; }
5127  Primitive::Type GetFieldType() const { return field_type_; }
5128  uint32_t GetFieldIndex() const { return index_; }
5129  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5130  const DexFile& GetDexFile() const { return dex_file_; }
5131  bool IsVolatile() const { return is_volatile_; }
5132
5133 private:
5134  ArtField* const field_;
5135  const MemberOffset field_offset_;
5136  const Primitive::Type field_type_;
5137  const bool is_volatile_;
5138  const uint32_t index_;
5139  const uint16_t declaring_class_def_index_;
5140  const DexFile& dex_file_;
5141};
5142
5143class HInstanceFieldGet FINAL : public HExpression<1> {
5144 public:
5145  HInstanceFieldGet(HInstruction* value,
5146                    ArtField* field,
5147                    Primitive::Type field_type,
5148                    MemberOffset field_offset,
5149                    bool is_volatile,
5150                    uint32_t field_idx,
5151                    uint16_t declaring_class_def_index,
5152                    const DexFile& dex_file,
5153                    uint32_t dex_pc)
5154      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5155        field_info_(field,
5156                    field_offset,
5157                    field_type,
5158                    is_volatile,
5159                    field_idx,
5160                    declaring_class_def_index,
5161                    dex_file) {
5162    SetRawInputAt(0, value);
5163  }
5164
5165  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5166
5167  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5168    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5169    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5170  }
5171
5172  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5173    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5174  }
5175
5176  size_t ComputeHashCode() const OVERRIDE {
5177    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5178  }
5179
5180  const FieldInfo& GetFieldInfo() const { return field_info_; }
5181  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5182  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5183  bool IsVolatile() const { return field_info_.IsVolatile(); }
5184
5185  DECLARE_INSTRUCTION(InstanceFieldGet);
5186
5187 private:
5188  const FieldInfo field_info_;
5189
5190  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5191};
5192
5193class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5194 public:
5195  HInstanceFieldSet(HInstruction* object,
5196                    HInstruction* value,
5197                    ArtField* field,
5198                    Primitive::Type field_type,
5199                    MemberOffset field_offset,
5200                    bool is_volatile,
5201                    uint32_t field_idx,
5202                    uint16_t declaring_class_def_index,
5203                    const DexFile& dex_file,
5204                    uint32_t dex_pc)
5205      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5206        field_info_(field,
5207                    field_offset,
5208                    field_type,
5209                    is_volatile,
5210                    field_idx,
5211                    declaring_class_def_index,
5212                    dex_file) {
5213    SetPackedFlag<kFlagValueCanBeNull>(true);
5214    SetRawInputAt(0, object);
5215    SetRawInputAt(1, value);
5216  }
5217
5218  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5219    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5220  }
5221
5222  const FieldInfo& GetFieldInfo() const { return field_info_; }
5223  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5224  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5225  bool IsVolatile() const { return field_info_.IsVolatile(); }
5226  HInstruction* GetValue() const { return InputAt(1); }
5227  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5228  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5229
5230  DECLARE_INSTRUCTION(InstanceFieldSet);
5231
5232 private:
5233  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5234  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5235  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5236                "Too many packed fields.");
5237
5238  const FieldInfo field_info_;
5239
5240  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5241};
5242
5243class HArrayGet FINAL : public HExpression<2> {
5244 public:
5245  HArrayGet(HInstruction* array,
5246            HInstruction* index,
5247            Primitive::Type type,
5248            uint32_t dex_pc,
5249            bool is_string_char_at = false)
5250      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5251    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5252    SetRawInputAt(0, array);
5253    SetRawInputAt(1, index);
5254  }
5255
5256  bool CanBeMoved() const OVERRIDE { return true; }
5257  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5258    return true;
5259  }
5260  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5261    // TODO: We can be smarter here.
5262    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5263    // which generates the implicit null check. There are cases when these can be removed
5264    // to produce better code. If we ever add optimizations to do so we should allow an
5265    // implicit check here (as long as the address falls in the first page).
5266    return false;
5267  }
5268
5269  bool IsEquivalentOf(HArrayGet* other) const {
5270    bool result = (GetDexPc() == other->GetDexPc());
5271    if (kIsDebugBuild && result) {
5272      DCHECK_EQ(GetBlock(), other->GetBlock());
5273      DCHECK_EQ(GetArray(), other->GetArray());
5274      DCHECK_EQ(GetIndex(), other->GetIndex());
5275      if (Primitive::IsIntOrLongType(GetType())) {
5276        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5277      } else {
5278        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5279        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5280      }
5281    }
5282    return result;
5283  }
5284
5285  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5286
5287  HInstruction* GetArray() const { return InputAt(0); }
5288  HInstruction* GetIndex() const { return InputAt(1); }
5289
5290  DECLARE_INSTRUCTION(ArrayGet);
5291
5292 private:
5293  // We treat a String as an array, creating the HArrayGet from String.charAt()
5294  // intrinsic in the instruction simplifier. We can always determine whether
5295  // a particular HArrayGet is actually a String.charAt() by looking at the type
5296  // of the input but that requires holding the mutator lock, so we prefer to use
5297  // a flag, so that code generators don't need to do the locking.
5298  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5299  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5300  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5301                "Too many packed fields.");
5302
5303  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5304};
5305
5306class HArraySet FINAL : public HTemplateInstruction<3> {
5307 public:
5308  HArraySet(HInstruction* array,
5309            HInstruction* index,
5310            HInstruction* value,
5311            Primitive::Type expected_component_type,
5312            uint32_t dex_pc)
5313      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5314    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5315    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5316    SetPackedFlag<kFlagValueCanBeNull>(true);
5317    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5318    SetRawInputAt(0, array);
5319    SetRawInputAt(1, index);
5320    SetRawInputAt(2, value);
5321    // Make a best guess now, may be refined during SSA building.
5322    ComputeSideEffects();
5323  }
5324
5325  bool NeedsEnvironment() const OVERRIDE {
5326    // We call a runtime method to throw ArrayStoreException.
5327    return NeedsTypeCheck();
5328  }
5329
5330  // Can throw ArrayStoreException.
5331  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5332
5333  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5334    // TODO: Same as for ArrayGet.
5335    return false;
5336  }
5337
5338  void ClearNeedsTypeCheck() {
5339    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5340  }
5341
5342  void ClearValueCanBeNull() {
5343    SetPackedFlag<kFlagValueCanBeNull>(false);
5344  }
5345
5346  void SetStaticTypeOfArrayIsObjectArray() {
5347    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5348  }
5349
5350  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5351  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5352  bool StaticTypeOfArrayIsObjectArray() const {
5353    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5354  }
5355
5356  HInstruction* GetArray() const { return InputAt(0); }
5357  HInstruction* GetIndex() const { return InputAt(1); }
5358  HInstruction* GetValue() const { return InputAt(2); }
5359
5360  Primitive::Type GetComponentType() const {
5361    // The Dex format does not type floating point index operations. Since the
5362    // `expected_component_type_` is set during building and can therefore not
5363    // be correct, we also check what is the value type. If it is a floating
5364    // point type, we must use that type.
5365    Primitive::Type value_type = GetValue()->GetType();
5366    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5367        ? value_type
5368        : GetRawExpectedComponentType();
5369  }
5370
5371  Primitive::Type GetRawExpectedComponentType() const {
5372    return GetPackedField<ExpectedComponentTypeField>();
5373  }
5374
5375  void ComputeSideEffects() {
5376    Primitive::Type type = GetComponentType();
5377    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5378        SideEffectsForArchRuntimeCalls(type)));
5379  }
5380
5381  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5382    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5383  }
5384
5385  DECLARE_INSTRUCTION(ArraySet);
5386
5387 private:
5388  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5389  static constexpr size_t kFieldExpectedComponentTypeSize =
5390      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5391  static constexpr size_t kFlagNeedsTypeCheck =
5392      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5393  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5394  // Cached information for the reference_type_info_ so that codegen
5395  // does not need to inspect the static type.
5396  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5397  static constexpr size_t kNumberOfArraySetPackedBits =
5398      kFlagStaticTypeOfArrayIsObjectArray + 1;
5399  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5400  using ExpectedComponentTypeField =
5401      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5402
5403  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5404};
5405
5406class HArrayLength FINAL : public HExpression<1> {
5407 public:
5408  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5409      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5410    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5411    // Note that arrays do not change length, so the instruction does not
5412    // depend on any write.
5413    SetRawInputAt(0, array);
5414  }
5415
5416  bool CanBeMoved() const OVERRIDE { return true; }
5417  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5418    return true;
5419  }
5420  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5421    return obj == InputAt(0);
5422  }
5423
5424  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5425
5426  DECLARE_INSTRUCTION(ArrayLength);
5427
5428 private:
5429  // We treat a String as an array, creating the HArrayLength from String.length()
5430  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5431  // determine whether a particular HArrayLength is actually a String.length() by
5432  // looking at the type of the input but that requires holding the mutator lock, so
5433  // we prefer to use a flag, so that code generators don't need to do the locking.
5434  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5435  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5436  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5437                "Too many packed fields.");
5438
5439  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5440};
5441
5442class HBoundsCheck FINAL : public HExpression<2> {
5443 public:
5444  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5445  // constructor.
5446  HBoundsCheck(HInstruction* index,
5447               HInstruction* length,
5448               uint32_t dex_pc,
5449               uint32_t string_char_at_method_index = DexFile::kDexNoIndex)
5450      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc),
5451        string_char_at_method_index_(string_char_at_method_index) {
5452    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5453    SetRawInputAt(0, index);
5454    SetRawInputAt(1, length);
5455  }
5456
5457  bool CanBeMoved() const OVERRIDE { return true; }
5458  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5459    return true;
5460  }
5461
5462  bool NeedsEnvironment() const OVERRIDE { return true; }
5463
5464  bool CanThrow() const OVERRIDE { return true; }
5465
5466  bool IsStringCharAt() const { return GetStringCharAtMethodIndex() != DexFile::kDexNoIndex; }
5467  uint32_t GetStringCharAtMethodIndex() const { return string_char_at_method_index_; }
5468
5469  HInstruction* GetIndex() const { return InputAt(0); }
5470
5471  DECLARE_INSTRUCTION(BoundsCheck);
5472
5473 private:
5474  // We treat a String as an array, creating the HBoundsCheck from String.charAt()
5475  // intrinsic in the instruction simplifier. We want to include the String.charAt()
5476  // in the stack trace if we actually throw the StringIndexOutOfBoundsException,
5477  // so we need to create an HEnvironment which will be translated to an InlineInfo
5478  // indicating the extra stack frame. Since we add this HEnvironment quite late,
5479  // in the PrepareForRegisterAllocation pass, we need to remember the method index
5480  // from the invoke as we don't want to look again at the dex bytecode.
5481  uint32_t string_char_at_method_index_;  // DexFile::kDexNoIndex if regular array.
5482
5483  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5484};
5485
5486class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5487 public:
5488  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5489      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5490
5491  bool NeedsEnvironment() const OVERRIDE {
5492    return true;
5493  }
5494
5495  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5496  SlowPathCode* GetSlowPath() const { return slow_path_; }
5497
5498  DECLARE_INSTRUCTION(SuspendCheck);
5499
5500 private:
5501  // Only used for code generation, in order to share the same slow path between back edges
5502  // of a same loop.
5503  SlowPathCode* slow_path_;
5504
5505  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5506};
5507
5508// Pseudo-instruction which provides the native debugger with mapping information.
5509// It ensures that we can generate line number and local variables at this point.
5510class HNativeDebugInfo : public HTemplateInstruction<0> {
5511 public:
5512  explicit HNativeDebugInfo(uint32_t dex_pc)
5513      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5514
5515  bool NeedsEnvironment() const OVERRIDE {
5516    return true;
5517  }
5518
5519  DECLARE_INSTRUCTION(NativeDebugInfo);
5520
5521 private:
5522  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5523};
5524
5525/**
5526 * Instruction to load a Class object.
5527 */
5528class HLoadClass FINAL : public HInstruction {
5529 public:
5530  // Determines how to load the Class.
5531  enum class LoadKind {
5532    // Use the Class* from the method's own ArtMethod*.
5533    kReferrersClass,
5534
5535    // Use boot image Class* address that will be known at link time.
5536    // Used for boot image classes referenced by boot image code in non-PIC mode.
5537    kBootImageLinkTimeAddress,
5538
5539    // Use PC-relative boot image Class* address that will be known at link time.
5540    // Used for boot image classes referenced by boot image code in PIC mode.
5541    kBootImageLinkTimePcRelative,
5542
5543    // Use a known boot image Class* address, embedded in the code by the codegen.
5544    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5545    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5546    // GetIncludePatchInformation().
5547    kBootImageAddress,
5548
5549    // Load from the root table associated with the JIT compiled method.
5550    kJitTableAddress,
5551
5552    // Load from resolved types array in the dex cache using a PC-relative load.
5553    // Used for classes outside boot image when we know that we can access
5554    // the dex cache arrays using a PC-relative load.
5555    kDexCachePcRelative,
5556
5557    // Load from resolved types array accessed through the class loaded from
5558    // the compiled method's own ArtMethod*. This is the default access type when
5559    // all other types are unavailable.
5560    kDexCacheViaMethod,
5561
5562    kLast = kDexCacheViaMethod
5563  };
5564
5565  HLoadClass(HCurrentMethod* current_method,
5566             dex::TypeIndex type_index,
5567             const DexFile& dex_file,
5568             bool is_referrers_class,
5569             uint32_t dex_pc,
5570             bool needs_access_check)
5571      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5572        special_input_(HUserRecord<HInstruction*>(current_method)),
5573        type_index_(type_index),
5574        dex_file_(dex_file),
5575        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5576    // Referrers class should not need access check. We never inline unverified
5577    // methods so we can't possibly end up in this situation.
5578    DCHECK(!is_referrers_class || !needs_access_check);
5579
5580    SetPackedField<LoadKindField>(
5581        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
5582    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5583    SetPackedFlag<kFlagIsInDexCache>(false);
5584    SetPackedFlag<kFlagIsInBootImage>(false);
5585    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5586  }
5587
5588  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5589    DCHECK(HasAddress(load_kind));
5590    load_data_.address = address;
5591    SetLoadKindInternal(load_kind);
5592  }
5593
5594  void SetLoadKindWithTypeReference(LoadKind load_kind,
5595                                    const DexFile& dex_file,
5596                                    dex::TypeIndex type_index) {
5597    DCHECK(HasTypeReference(load_kind));
5598    DCHECK(IsSameDexFile(dex_file_, dex_file));
5599    DCHECK_EQ(type_index_, type_index);
5600    SetLoadKindInternal(load_kind);
5601  }
5602
5603  void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5604                                        const DexFile& dex_file,
5605                                        uint32_t element_index) {
5606    DCHECK(HasDexCacheReference(load_kind));
5607    DCHECK(IsSameDexFile(dex_file_, dex_file));
5608    load_data_.dex_cache_element_index = element_index;
5609    SetLoadKindInternal(load_kind);
5610  }
5611
5612  LoadKind GetLoadKind() const {
5613    return GetPackedField<LoadKindField>();
5614  }
5615
5616  bool CanBeMoved() const OVERRIDE { return true; }
5617
5618  bool InstructionDataEquals(const HInstruction* other) const;
5619
5620  size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
5621
5622  bool CanBeNull() const OVERRIDE { return false; }
5623
5624  bool NeedsEnvironment() const OVERRIDE {
5625    return CanCallRuntime();
5626  }
5627
5628  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5629    // The entrypoint the code generator is going to call does not do
5630    // clinit of the class.
5631    DCHECK(!NeedsAccessCheck());
5632    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5633  }
5634
5635  bool CanCallRuntime() const {
5636    return MustGenerateClinitCheck() ||
5637           (!IsReferrersClass() && !IsInDexCache()) ||
5638           NeedsAccessCheck();
5639  }
5640
5641  bool CanThrow() const OVERRIDE {
5642    return CanCallRuntime();
5643  }
5644
5645  ReferenceTypeInfo GetLoadedClassRTI() {
5646    return loaded_class_rti_;
5647  }
5648
5649  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5650    // Make sure we only set exact types (the loaded class should never be merged).
5651    DCHECK(rti.IsExact());
5652    loaded_class_rti_ = rti;
5653  }
5654
5655  dex::TypeIndex GetTypeIndex() const { return type_index_; }
5656  const DexFile& GetDexFile() const { return dex_file_; }
5657
5658  uint32_t GetDexCacheElementOffset() const;
5659
5660  uint64_t GetAddress() const {
5661    DCHECK(HasAddress(GetLoadKind()));
5662    return load_data_.address;
5663  }
5664
5665  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5666    return !IsReferrersClass();
5667  }
5668
5669  static SideEffects SideEffectsForArchRuntimeCalls() {
5670    return SideEffects::CanTriggerGC();
5671  }
5672
5673  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5674  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5675  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5676  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5677  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5678
5679  void MarkInDexCache() {
5680    SetPackedFlag<kFlagIsInDexCache>(true);
5681    DCHECK(!NeedsEnvironment());
5682    RemoveEnvironment();
5683    SetSideEffects(SideEffects::None());
5684  }
5685
5686  void MarkInBootImage() {
5687    SetPackedFlag<kFlagIsInBootImage>(true);
5688  }
5689
5690  void AddSpecialInput(HInstruction* special_input);
5691
5692  using HInstruction::GetInputRecords;  // Keep the const version visible.
5693  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5694    return ArrayRef<HUserRecord<HInstruction*>>(
5695        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5696  }
5697
5698  Primitive::Type GetType() const OVERRIDE {
5699    return Primitive::kPrimNot;
5700  }
5701
5702  DECLARE_INSTRUCTION(LoadClass);
5703
5704 private:
5705  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5706  static constexpr size_t kFlagIsInDexCache        = kFlagNeedsAccessCheck + 1;
5707  static constexpr size_t kFlagIsInBootImage       = kFlagIsInDexCache + 1;
5708  // Whether this instruction must generate the initialization check.
5709  // Used for code generation.
5710  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5711  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5712  static constexpr size_t kFieldLoadKindSize =
5713      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5714  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5715  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5716  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5717
5718  static bool HasTypeReference(LoadKind load_kind) {
5719    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5720        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5721        load_kind == LoadKind::kDexCacheViaMethod ||
5722        load_kind == LoadKind::kReferrersClass;
5723  }
5724
5725  static bool HasAddress(LoadKind load_kind) {
5726    return load_kind == LoadKind::kBootImageAddress ||
5727        load_kind == LoadKind::kJitTableAddress;
5728  }
5729
5730  static bool HasDexCacheReference(LoadKind load_kind) {
5731    return load_kind == LoadKind::kDexCachePcRelative;
5732  }
5733
5734  void SetLoadKindInternal(LoadKind load_kind);
5735
5736  // The special input is the HCurrentMethod for kDexCacheViaMethod or kReferrersClass.
5737  // For other load kinds it's empty or possibly some architecture-specific instruction
5738  // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
5739  HUserRecord<HInstruction*> special_input_;
5740
5741  const dex::TypeIndex type_index_;
5742  const DexFile& dex_file_;
5743
5744  union {
5745    uint32_t dex_cache_element_index;   // Only for dex cache reference.
5746    uint64_t address;  // Up to 64-bit, needed for kJitTableAddress on 64-bit targets.
5747  } load_data_;
5748
5749  ReferenceTypeInfo loaded_class_rti_;
5750
5751  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5752};
5753std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5754
5755// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5756inline uint32_t HLoadClass::GetDexCacheElementOffset() const {
5757  DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5758  return load_data_.dex_cache_element_index;
5759}
5760
5761// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5762inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5763  // The special input is used for PC-relative loads on some architectures,
5764  // including literal pool loads, which are PC-relative too.
5765  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5766         GetLoadKind() == LoadKind::kDexCachePcRelative ||
5767         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5768         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5769  DCHECK(special_input_.GetInstruction() == nullptr);
5770  special_input_ = HUserRecord<HInstruction*>(special_input);
5771  special_input->AddUseAt(this, 0);
5772}
5773
5774class HLoadString FINAL : public HInstruction {
5775 public:
5776  // Determines how to load the String.
5777  enum class LoadKind {
5778    // Use boot image String* address that will be known at link time.
5779    // Used for boot image strings referenced by boot image code in non-PIC mode.
5780    kBootImageLinkTimeAddress,
5781
5782    // Use PC-relative boot image String* address that will be known at link time.
5783    // Used for boot image strings referenced by boot image code in PIC mode.
5784    kBootImageLinkTimePcRelative,
5785
5786    // Use a known boot image String* address, embedded in the code by the codegen.
5787    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5788    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5789    // GetIncludePatchInformation().
5790    kBootImageAddress,
5791
5792    // Load from an entry in the .bss section using a PC-relative load.
5793    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5794    kBssEntry,
5795
5796    // Load from resolved strings array accessed through the class loaded from
5797    // the compiled method's own ArtMethod*. This is the default access type when
5798    // all other types are unavailable.
5799    kDexCacheViaMethod,
5800
5801    // Load from the root table associated with the JIT compiled method.
5802    kJitTableAddress,
5803
5804    kLast = kJitTableAddress,
5805  };
5806
5807  HLoadString(HCurrentMethod* current_method,
5808              dex::StringIndex string_index,
5809              const DexFile& dex_file,
5810              uint32_t dex_pc)
5811      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5812        special_input_(HUserRecord<HInstruction*>(current_method)),
5813        string_index_(string_index) {
5814    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5815    load_data_.dex_file_ = &dex_file;
5816  }
5817
5818  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5819    DCHECK(HasAddress(load_kind));
5820    load_data_.address = address;
5821    SetLoadKindInternal(load_kind);
5822  }
5823
5824  void SetLoadKindWithStringReference(LoadKind load_kind,
5825                                      const DexFile& dex_file,
5826                                      dex::StringIndex string_index) {
5827    DCHECK(HasStringReference(load_kind));
5828    load_data_.dex_file_ = &dex_file;
5829    string_index_ = string_index;
5830    SetLoadKindInternal(load_kind);
5831  }
5832
5833  LoadKind GetLoadKind() const {
5834    return GetPackedField<LoadKindField>();
5835  }
5836
5837  const DexFile& GetDexFile() const;
5838
5839  dex::StringIndex GetStringIndex() const {
5840    return string_index_;
5841  }
5842
5843  uint64_t GetAddress() const {
5844    DCHECK(HasAddress(GetLoadKind()));
5845    return load_data_.address;
5846  }
5847
5848  bool CanBeMoved() const OVERRIDE { return true; }
5849
5850  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5851
5852  size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
5853
5854  // Will call the runtime if we need to load the string through
5855  // the dex cache and the string is not guaranteed to be there yet.
5856  bool NeedsEnvironment() const OVERRIDE {
5857    LoadKind load_kind = GetLoadKind();
5858    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5859        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5860        load_kind == LoadKind::kBootImageAddress ||
5861        load_kind == LoadKind::kJitTableAddress) {
5862      return false;
5863    }
5864    return true;
5865  }
5866
5867  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5868    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5869  }
5870
5871  bool CanBeNull() const OVERRIDE { return false; }
5872  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5873
5874  static SideEffects SideEffectsForArchRuntimeCalls() {
5875    return SideEffects::CanTriggerGC();
5876  }
5877
5878  void AddSpecialInput(HInstruction* special_input);
5879
5880  using HInstruction::GetInputRecords;  // Keep the const version visible.
5881  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5882    return ArrayRef<HUserRecord<HInstruction*>>(
5883        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5884  }
5885
5886  Primitive::Type GetType() const OVERRIDE {
5887    return Primitive::kPrimNot;
5888  }
5889
5890  DECLARE_INSTRUCTION(LoadString);
5891
5892 private:
5893  static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
5894  static constexpr size_t kFieldLoadKindSize =
5895      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5896  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5897  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5898  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5899
5900  static bool HasStringReference(LoadKind load_kind) {
5901    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5902        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5903        load_kind == LoadKind::kBssEntry ||
5904        load_kind == LoadKind::kDexCacheViaMethod ||
5905        load_kind == LoadKind::kJitTableAddress;
5906  }
5907
5908  static bool HasAddress(LoadKind load_kind) {
5909    return load_kind == LoadKind::kBootImageAddress;
5910  }
5911
5912  void SetLoadKindInternal(LoadKind load_kind);
5913
5914  // The special input is the HCurrentMethod for kDexCacheViaMethod.
5915  // For other load kinds it's empty or possibly some architecture-specific instruction
5916  // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
5917  HUserRecord<HInstruction*> special_input_;
5918
5919  // String index serves also as the hash code and it's also needed for slow-paths,
5920  // so it must not be overwritten with other load data.
5921  dex::StringIndex string_index_;
5922
5923  union {
5924    const DexFile* dex_file_;            // For string reference.
5925    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5926  } load_data_;
5927
5928  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5929};
5930std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5931
5932// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5933inline const DexFile& HLoadString::GetDexFile() const {
5934  DCHECK(HasStringReference(GetLoadKind())) << GetLoadKind();
5935  return *load_data_.dex_file_;
5936}
5937
5938// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5939inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5940  // The special input is used for PC-relative loads on some architectures,
5941  // including literal pool loads, which are PC-relative too.
5942  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5943         GetLoadKind() == LoadKind::kBssEntry ||
5944         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5945         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5946  // HLoadString::GetInputRecords() returns an empty array at this point,
5947  // so use the GetInputRecords() from the base class to set the input record.
5948  DCHECK(special_input_.GetInstruction() == nullptr);
5949  special_input_ = HUserRecord<HInstruction*>(special_input);
5950  special_input->AddUseAt(this, 0);
5951}
5952
5953/**
5954 * Performs an initialization check on its Class object input.
5955 */
5956class HClinitCheck FINAL : public HExpression<1> {
5957 public:
5958  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5959      : HExpression(
5960            Primitive::kPrimNot,
5961            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
5962            dex_pc) {
5963    SetRawInputAt(0, constant);
5964  }
5965
5966  bool CanBeMoved() const OVERRIDE { return true; }
5967  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5968    return true;
5969  }
5970
5971  bool NeedsEnvironment() const OVERRIDE {
5972    // May call runtime to initialize the class.
5973    return true;
5974  }
5975
5976  bool CanThrow() const OVERRIDE { return true; }
5977
5978  HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); }
5979
5980  DECLARE_INSTRUCTION(ClinitCheck);
5981
5982 private:
5983  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
5984};
5985
5986class HStaticFieldGet FINAL : public HExpression<1> {
5987 public:
5988  HStaticFieldGet(HInstruction* cls,
5989                  ArtField* field,
5990                  Primitive::Type field_type,
5991                  MemberOffset field_offset,
5992                  bool is_volatile,
5993                  uint32_t field_idx,
5994                  uint16_t declaring_class_def_index,
5995                  const DexFile& dex_file,
5996                  uint32_t dex_pc)
5997      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5998        field_info_(field,
5999                    field_offset,
6000                    field_type,
6001                    is_volatile,
6002                    field_idx,
6003                    declaring_class_def_index,
6004                    dex_file) {
6005    SetRawInputAt(0, cls);
6006  }
6007
6008
6009  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6010
6011  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6012    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6013    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6014  }
6015
6016  size_t ComputeHashCode() const OVERRIDE {
6017    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6018  }
6019
6020  const FieldInfo& GetFieldInfo() const { return field_info_; }
6021  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6022  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6023  bool IsVolatile() const { return field_info_.IsVolatile(); }
6024
6025  DECLARE_INSTRUCTION(StaticFieldGet);
6026
6027 private:
6028  const FieldInfo field_info_;
6029
6030  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
6031};
6032
6033class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6034 public:
6035  HStaticFieldSet(HInstruction* cls,
6036                  HInstruction* value,
6037                  ArtField* field,
6038                  Primitive::Type field_type,
6039                  MemberOffset field_offset,
6040                  bool is_volatile,
6041                  uint32_t field_idx,
6042                  uint16_t declaring_class_def_index,
6043                  const DexFile& dex_file,
6044                  uint32_t dex_pc)
6045      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
6046        field_info_(field,
6047                    field_offset,
6048                    field_type,
6049                    is_volatile,
6050                    field_idx,
6051                    declaring_class_def_index,
6052                    dex_file) {
6053    SetPackedFlag<kFlagValueCanBeNull>(true);
6054    SetRawInputAt(0, cls);
6055    SetRawInputAt(1, value);
6056  }
6057
6058  const FieldInfo& GetFieldInfo() const { return field_info_; }
6059  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6060  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6061  bool IsVolatile() const { return field_info_.IsVolatile(); }
6062
6063  HInstruction* GetValue() const { return InputAt(1); }
6064  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6065  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6066
6067  DECLARE_INSTRUCTION(StaticFieldSet);
6068
6069 private:
6070  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6071  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6072  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6073                "Too many packed fields.");
6074
6075  const FieldInfo field_info_;
6076
6077  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
6078};
6079
6080class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6081 public:
6082  HUnresolvedInstanceFieldGet(HInstruction* obj,
6083                              Primitive::Type field_type,
6084                              uint32_t field_index,
6085                              uint32_t dex_pc)
6086      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6087        field_index_(field_index) {
6088    SetRawInputAt(0, obj);
6089  }
6090
6091  bool NeedsEnvironment() const OVERRIDE { return true; }
6092  bool CanThrow() const OVERRIDE { return true; }
6093
6094  Primitive::Type GetFieldType() const { return GetType(); }
6095  uint32_t GetFieldIndex() const { return field_index_; }
6096
6097  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6098
6099 private:
6100  const uint32_t field_index_;
6101
6102  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6103};
6104
6105class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6106 public:
6107  HUnresolvedInstanceFieldSet(HInstruction* obj,
6108                              HInstruction* value,
6109                              Primitive::Type field_type,
6110                              uint32_t field_index,
6111                              uint32_t dex_pc)
6112      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6113        field_index_(field_index) {
6114    SetPackedField<FieldTypeField>(field_type);
6115    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6116    SetRawInputAt(0, obj);
6117    SetRawInputAt(1, value);
6118  }
6119
6120  bool NeedsEnvironment() const OVERRIDE { return true; }
6121  bool CanThrow() const OVERRIDE { return true; }
6122
6123  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6124  uint32_t GetFieldIndex() const { return field_index_; }
6125
6126  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6127
6128 private:
6129  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6130  static constexpr size_t kFieldFieldTypeSize =
6131      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6132  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6133      kFieldFieldType + kFieldFieldTypeSize;
6134  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6135                "Too many packed fields.");
6136  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6137
6138  const uint32_t field_index_;
6139
6140  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6141};
6142
6143class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6144 public:
6145  HUnresolvedStaticFieldGet(Primitive::Type field_type,
6146                            uint32_t field_index,
6147                            uint32_t dex_pc)
6148      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6149        field_index_(field_index) {
6150  }
6151
6152  bool NeedsEnvironment() const OVERRIDE { return true; }
6153  bool CanThrow() const OVERRIDE { return true; }
6154
6155  Primitive::Type GetFieldType() const { return GetType(); }
6156  uint32_t GetFieldIndex() const { return field_index_; }
6157
6158  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6159
6160 private:
6161  const uint32_t field_index_;
6162
6163  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6164};
6165
6166class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6167 public:
6168  HUnresolvedStaticFieldSet(HInstruction* value,
6169                            Primitive::Type field_type,
6170                            uint32_t field_index,
6171                            uint32_t dex_pc)
6172      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6173        field_index_(field_index) {
6174    SetPackedField<FieldTypeField>(field_type);
6175    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6176    SetRawInputAt(0, value);
6177  }
6178
6179  bool NeedsEnvironment() const OVERRIDE { return true; }
6180  bool CanThrow() const OVERRIDE { return true; }
6181
6182  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6183  uint32_t GetFieldIndex() const { return field_index_; }
6184
6185  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6186
6187 private:
6188  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6189  static constexpr size_t kFieldFieldTypeSize =
6190      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6191  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6192      kFieldFieldType + kFieldFieldTypeSize;
6193  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6194                "Too many packed fields.");
6195  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6196
6197  const uint32_t field_index_;
6198
6199  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6200};
6201
6202// Implement the move-exception DEX instruction.
6203class HLoadException FINAL : public HExpression<0> {
6204 public:
6205  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6206      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
6207
6208  bool CanBeNull() const OVERRIDE { return false; }
6209
6210  DECLARE_INSTRUCTION(LoadException);
6211
6212 private:
6213  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6214};
6215
6216// Implicit part of move-exception which clears thread-local exception storage.
6217// Must not be removed because the runtime expects the TLS to get cleared.
6218class HClearException FINAL : public HTemplateInstruction<0> {
6219 public:
6220  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6221      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6222
6223  DECLARE_INSTRUCTION(ClearException);
6224
6225 private:
6226  DISALLOW_COPY_AND_ASSIGN(HClearException);
6227};
6228
6229class HThrow FINAL : public HTemplateInstruction<1> {
6230 public:
6231  HThrow(HInstruction* exception, uint32_t dex_pc)
6232      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6233    SetRawInputAt(0, exception);
6234  }
6235
6236  bool IsControlFlow() const OVERRIDE { return true; }
6237
6238  bool NeedsEnvironment() const OVERRIDE { return true; }
6239
6240  bool CanThrow() const OVERRIDE { return true; }
6241
6242
6243  DECLARE_INSTRUCTION(Throw);
6244
6245 private:
6246  DISALLOW_COPY_AND_ASSIGN(HThrow);
6247};
6248
6249/**
6250 * Implementation strategies for the code generator of a HInstanceOf
6251 * or `HCheckCast`.
6252 */
6253enum class TypeCheckKind {
6254  kUnresolvedCheck,       // Check against an unresolved type.
6255  kExactCheck,            // Can do a single class compare.
6256  kClassHierarchyCheck,   // Can just walk the super class chain.
6257  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6258  kInterfaceCheck,        // No optimization yet when checking against an interface.
6259  kArrayObjectCheck,      // Can just check if the array is not primitive.
6260  kArrayCheck,            // No optimization yet when checking against a generic array.
6261  kLast = kArrayCheck
6262};
6263
6264std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6265
6266class HInstanceOf FINAL : public HExpression<2> {
6267 public:
6268  HInstanceOf(HInstruction* object,
6269              HLoadClass* constant,
6270              TypeCheckKind check_kind,
6271              uint32_t dex_pc)
6272      : HExpression(Primitive::kPrimBoolean,
6273                    SideEffectsForArchRuntimeCalls(check_kind),
6274                    dex_pc) {
6275    SetPackedField<TypeCheckKindField>(check_kind);
6276    SetPackedFlag<kFlagMustDoNullCheck>(true);
6277    SetRawInputAt(0, object);
6278    SetRawInputAt(1, constant);
6279  }
6280
6281  bool CanBeMoved() const OVERRIDE { return true; }
6282
6283  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6284    return true;
6285  }
6286
6287  bool NeedsEnvironment() const OVERRIDE {
6288    return CanCallRuntime(GetTypeCheckKind());
6289  }
6290
6291  // Used only in code generation.
6292  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6293  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6294  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6295  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6296
6297  static bool CanCallRuntime(TypeCheckKind check_kind) {
6298    // Mips currently does runtime calls for any other checks.
6299    return check_kind != TypeCheckKind::kExactCheck;
6300  }
6301
6302  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6303    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6304  }
6305
6306  DECLARE_INSTRUCTION(InstanceOf);
6307
6308 private:
6309  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6310  static constexpr size_t kFieldTypeCheckKindSize =
6311      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6312  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6313  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6314  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6315  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6316
6317  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6318};
6319
6320class HBoundType FINAL : public HExpression<1> {
6321 public:
6322  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6323      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6324        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6325    SetPackedFlag<kFlagUpperCanBeNull>(true);
6326    SetPackedFlag<kFlagCanBeNull>(true);
6327    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6328    SetRawInputAt(0, input);
6329  }
6330
6331  // {Get,Set}Upper* should only be used in reference type propagation.
6332  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6333  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6334  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6335
6336  void SetCanBeNull(bool can_be_null) {
6337    DCHECK(GetUpperCanBeNull() || !can_be_null);
6338    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6339  }
6340
6341  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6342
6343  DECLARE_INSTRUCTION(BoundType);
6344
6345 private:
6346  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6347  // is false then CanBeNull() cannot be true).
6348  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6349  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6350  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6351  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6352
6353  // Encodes the most upper class that this instruction can have. In other words
6354  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6355  // It is used to bound the type in cases like:
6356  //   if (x instanceof ClassX) {
6357  //     // uper_bound_ will be ClassX
6358  //   }
6359  ReferenceTypeInfo upper_bound_;
6360
6361  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6362};
6363
6364class HCheckCast FINAL : public HTemplateInstruction<2> {
6365 public:
6366  HCheckCast(HInstruction* object,
6367             HLoadClass* constant,
6368             TypeCheckKind check_kind,
6369             uint32_t dex_pc)
6370      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6371    SetPackedField<TypeCheckKindField>(check_kind);
6372    SetPackedFlag<kFlagMustDoNullCheck>(true);
6373    SetRawInputAt(0, object);
6374    SetRawInputAt(1, constant);
6375  }
6376
6377  bool CanBeMoved() const OVERRIDE { return true; }
6378
6379  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6380    return true;
6381  }
6382
6383  bool NeedsEnvironment() const OVERRIDE {
6384    // Instruction may throw a CheckCastError.
6385    return true;
6386  }
6387
6388  bool CanThrow() const OVERRIDE { return true; }
6389
6390  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6391  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6392  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6393  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6394
6395  DECLARE_INSTRUCTION(CheckCast);
6396
6397 private:
6398  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6399  static constexpr size_t kFieldTypeCheckKindSize =
6400      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6401  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6402  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6403  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6404  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6405
6406  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6407};
6408
6409/**
6410 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6411 * @details We define the combined barrier types that are actually required
6412 * by the Java Memory Model, rather than using exactly the terminology from
6413 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6414 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6415 * store atomicity issues, and the recipes there are not always entirely sufficient.
6416 * The current recipe is as follows:
6417 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6418 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6419 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6420 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6421 *    class has final fields.
6422 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6423 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6424 */
6425enum MemBarrierKind {
6426  kAnyStore,
6427  kLoadAny,
6428  kStoreStore,
6429  kAnyAny,
6430  kNTStoreStore,
6431  kLastBarrierKind = kNTStoreStore
6432};
6433std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6434
6435class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6436 public:
6437  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6438      : HTemplateInstruction(
6439            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6440    SetPackedField<BarrierKindField>(barrier_kind);
6441  }
6442
6443  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6444
6445  DECLARE_INSTRUCTION(MemoryBarrier);
6446
6447 private:
6448  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6449  static constexpr size_t kFieldBarrierKindSize =
6450      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6451  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6452      kFieldBarrierKind + kFieldBarrierKindSize;
6453  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6454                "Too many packed fields.");
6455  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6456
6457  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6458};
6459
6460class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6461 public:
6462  enum class OperationKind {
6463    kEnter,
6464    kExit,
6465    kLast = kExit
6466  };
6467
6468  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6469    : HTemplateInstruction(
6470          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6471          dex_pc) {
6472    SetPackedField<OperationKindField>(kind);
6473    SetRawInputAt(0, object);
6474  }
6475
6476  // Instruction may go into runtime, so we need an environment.
6477  bool NeedsEnvironment() const OVERRIDE { return true; }
6478
6479  bool CanThrow() const OVERRIDE {
6480    // Verifier guarantees that monitor-exit cannot throw.
6481    // This is important because it allows the HGraphBuilder to remove
6482    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6483    return IsEnter();
6484  }
6485
6486  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6487  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6488
6489  DECLARE_INSTRUCTION(MonitorOperation);
6490
6491 private:
6492  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6493  static constexpr size_t kFieldOperationKindSize =
6494      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6495  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6496      kFieldOperationKind + kFieldOperationKindSize;
6497  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6498                "Too many packed fields.");
6499  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6500
6501 private:
6502  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6503};
6504
6505class HSelect FINAL : public HExpression<3> {
6506 public:
6507  HSelect(HInstruction* condition,
6508          HInstruction* true_value,
6509          HInstruction* false_value,
6510          uint32_t dex_pc)
6511      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6512    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6513
6514    // First input must be `true_value` or `false_value` to allow codegens to
6515    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6516    // that architectures which implement HSelect as a conditional move also
6517    // will not need to invert the condition.
6518    SetRawInputAt(0, false_value);
6519    SetRawInputAt(1, true_value);
6520    SetRawInputAt(2, condition);
6521  }
6522
6523  HInstruction* GetFalseValue() const { return InputAt(0); }
6524  HInstruction* GetTrueValue() const { return InputAt(1); }
6525  HInstruction* GetCondition() const { return InputAt(2); }
6526
6527  bool CanBeMoved() const OVERRIDE { return true; }
6528  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6529    return true;
6530  }
6531
6532  bool CanBeNull() const OVERRIDE {
6533    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6534  }
6535
6536  DECLARE_INSTRUCTION(Select);
6537
6538 private:
6539  DISALLOW_COPY_AND_ASSIGN(HSelect);
6540};
6541
6542class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6543 public:
6544  MoveOperands(Location source,
6545               Location destination,
6546               Primitive::Type type,
6547               HInstruction* instruction)
6548      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6549
6550  Location GetSource() const { return source_; }
6551  Location GetDestination() const { return destination_; }
6552
6553  void SetSource(Location value) { source_ = value; }
6554  void SetDestination(Location value) { destination_ = value; }
6555
6556  // The parallel move resolver marks moves as "in-progress" by clearing the
6557  // destination (but not the source).
6558  Location MarkPending() {
6559    DCHECK(!IsPending());
6560    Location dest = destination_;
6561    destination_ = Location::NoLocation();
6562    return dest;
6563  }
6564
6565  void ClearPending(Location dest) {
6566    DCHECK(IsPending());
6567    destination_ = dest;
6568  }
6569
6570  bool IsPending() const {
6571    DCHECK(source_.IsValid() || destination_.IsInvalid());
6572    return destination_.IsInvalid() && source_.IsValid();
6573  }
6574
6575  // True if this blocks a move from the given location.
6576  bool Blocks(Location loc) const {
6577    return !IsEliminated() && source_.OverlapsWith(loc);
6578  }
6579
6580  // A move is redundant if it's been eliminated, if its source and
6581  // destination are the same, or if its destination is unneeded.
6582  bool IsRedundant() const {
6583    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6584  }
6585
6586  // We clear both operands to indicate move that's been eliminated.
6587  void Eliminate() {
6588    source_ = destination_ = Location::NoLocation();
6589  }
6590
6591  bool IsEliminated() const {
6592    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6593    return source_.IsInvalid();
6594  }
6595
6596  Primitive::Type GetType() const { return type_; }
6597
6598  bool Is64BitMove() const {
6599    return Primitive::Is64BitType(type_);
6600  }
6601
6602  HInstruction* GetInstruction() const { return instruction_; }
6603
6604 private:
6605  Location source_;
6606  Location destination_;
6607  // The type this move is for.
6608  Primitive::Type type_;
6609  // The instruction this move is assocatied with. Null when this move is
6610  // for moving an input in the expected locations of user (including a phi user).
6611  // This is only used in debug mode, to ensure we do not connect interval siblings
6612  // in the same parallel move.
6613  HInstruction* instruction_;
6614};
6615
6616std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6617
6618static constexpr size_t kDefaultNumberOfMoves = 4;
6619
6620class HParallelMove FINAL : public HTemplateInstruction<0> {
6621 public:
6622  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6623      : HTemplateInstruction(SideEffects::None(), dex_pc),
6624        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6625    moves_.reserve(kDefaultNumberOfMoves);
6626  }
6627
6628  void AddMove(Location source,
6629               Location destination,
6630               Primitive::Type type,
6631               HInstruction* instruction) {
6632    DCHECK(source.IsValid());
6633    DCHECK(destination.IsValid());
6634    if (kIsDebugBuild) {
6635      if (instruction != nullptr) {
6636        for (const MoveOperands& move : moves_) {
6637          if (move.GetInstruction() == instruction) {
6638            // Special case the situation where the move is for the spill slot
6639            // of the instruction.
6640            if ((GetPrevious() == instruction)
6641                || ((GetPrevious() == nullptr)
6642                    && instruction->IsPhi()
6643                    && instruction->GetBlock() == GetBlock())) {
6644              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6645                  << "Doing parallel moves for the same instruction.";
6646            } else {
6647              DCHECK(false) << "Doing parallel moves for the same instruction.";
6648            }
6649          }
6650        }
6651      }
6652      for (const MoveOperands& move : moves_) {
6653        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6654            << "Overlapped destination for two moves in a parallel move: "
6655            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6656            << source << " ==> " << destination;
6657      }
6658    }
6659    moves_.emplace_back(source, destination, type, instruction);
6660  }
6661
6662  MoveOperands* MoveOperandsAt(size_t index) {
6663    return &moves_[index];
6664  }
6665
6666  size_t NumMoves() const { return moves_.size(); }
6667
6668  DECLARE_INSTRUCTION(ParallelMove);
6669
6670 private:
6671  ArenaVector<MoveOperands> moves_;
6672
6673  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6674};
6675
6676}  // namespace art
6677
6678#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6679#include "nodes_shared.h"
6680#endif
6681#ifdef ART_ENABLE_CODEGEN_arm
6682#include "nodes_arm.h"
6683#endif
6684#ifdef ART_ENABLE_CODEGEN_arm64
6685#include "nodes_arm64.h"
6686#endif
6687#ifdef ART_ENABLE_CODEGEN_mips
6688#include "nodes_mips.h"
6689#endif
6690#ifdef ART_ENABLE_CODEGEN_x86
6691#include "nodes_x86.h"
6692#endif
6693
6694namespace art {
6695
6696class HGraphVisitor : public ValueObject {
6697 public:
6698  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6699  virtual ~HGraphVisitor() {}
6700
6701  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6702  virtual void VisitBasicBlock(HBasicBlock* block);
6703
6704  // Visit the graph following basic block insertion order.
6705  void VisitInsertionOrder();
6706
6707  // Visit the graph following dominator tree reverse post-order.
6708  void VisitReversePostOrder();
6709
6710  HGraph* GetGraph() const { return graph_; }
6711
6712  // Visit functions for instruction classes.
6713#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6714  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6715
6716  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6717
6718#undef DECLARE_VISIT_INSTRUCTION
6719
6720 private:
6721  HGraph* const graph_;
6722
6723  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6724};
6725
6726class HGraphDelegateVisitor : public HGraphVisitor {
6727 public:
6728  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6729  virtual ~HGraphDelegateVisitor() {}
6730
6731  // Visit functions that delegate to to super class.
6732#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6733  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6734
6735  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6736
6737#undef DECLARE_VISIT_INSTRUCTION
6738
6739 private:
6740  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6741};
6742
6743// Iterator over the blocks that art part of the loop. Includes blocks part
6744// of an inner loop. The order in which the blocks are iterated is on their
6745// block id.
6746class HBlocksInLoopIterator : public ValueObject {
6747 public:
6748  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6749      : blocks_in_loop_(info.GetBlocks()),
6750        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6751        index_(0) {
6752    if (!blocks_in_loop_.IsBitSet(index_)) {
6753      Advance();
6754    }
6755  }
6756
6757  bool Done() const { return index_ == blocks_.size(); }
6758  HBasicBlock* Current() const { return blocks_[index_]; }
6759  void Advance() {
6760    ++index_;
6761    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6762      if (blocks_in_loop_.IsBitSet(index_)) {
6763        break;
6764      }
6765    }
6766  }
6767
6768 private:
6769  const BitVector& blocks_in_loop_;
6770  const ArenaVector<HBasicBlock*>& blocks_;
6771  size_t index_;
6772
6773  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6774};
6775
6776// Iterator over the blocks that art part of the loop. Includes blocks part
6777// of an inner loop. The order in which the blocks are iterated is reverse
6778// post order.
6779class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6780 public:
6781  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6782      : blocks_in_loop_(info.GetBlocks()),
6783        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6784        index_(0) {
6785    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6786      Advance();
6787    }
6788  }
6789
6790  bool Done() const { return index_ == blocks_.size(); }
6791  HBasicBlock* Current() const { return blocks_[index_]; }
6792  void Advance() {
6793    ++index_;
6794    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6795      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6796        break;
6797      }
6798    }
6799  }
6800
6801 private:
6802  const BitVector& blocks_in_loop_;
6803  const ArenaVector<HBasicBlock*>& blocks_;
6804  size_t index_;
6805
6806  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6807};
6808
6809inline int64_t Int64FromConstant(HConstant* constant) {
6810  if (constant->IsIntConstant()) {
6811    return constant->AsIntConstant()->GetValue();
6812  } else if (constant->IsLongConstant()) {
6813    return constant->AsLongConstant()->GetValue();
6814  } else {
6815    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6816    return 0;
6817  }
6818}
6819
6820#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6821  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6822  inline const H##type* HInstruction::As##type() const {                       \
6823    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6824  }                                                                            \
6825  inline H##type* HInstruction::As##type() {                                   \
6826    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6827  }
6828
6829  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6830#undef INSTRUCTION_TYPE_CHECK
6831
6832// Create space in `blocks` for adding `number_of_new_blocks` entries
6833// starting at location `at`. Blocks after `at` are moved accordingly.
6834inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6835                        size_t number_of_new_blocks,
6836                        size_t after) {
6837  DCHECK_LT(after, blocks->size());
6838  size_t old_size = blocks->size();
6839  size_t new_size = old_size + number_of_new_blocks;
6840  blocks->resize(new_size);
6841  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6842}
6843
6844}  // namespace art
6845
6846#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6847