nodes.h revision d9c90373d640a5e08072cf469c372e24a8c0fc35
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/stl_util.h"
29#include "base/transform_array_ref.h"
30#include "dex_file.h"
31#include "entrypoints/quick/quick_entrypoints_enum.h"
32#include "handle.h"
33#include "handle_scope.h"
34#include "invoke_type.h"
35#include "locations.h"
36#include "method_reference.h"
37#include "mirror/class.h"
38#include "offsets.h"
39#include "primitive.h"
40#include "utils/intrusive_forward_list.h"
41
42namespace art {
43
44class GraphChecker;
45class HBasicBlock;
46class HCurrentMethod;
47class HDoubleConstant;
48class HEnvironment;
49class HFloatConstant;
50class HGraphBuilder;
51class HGraphVisitor;
52class HInstruction;
53class HIntConstant;
54class HInvoke;
55class HLongConstant;
56class HNullConstant;
57class HPhi;
58class HSuspendCheck;
59class HTryBoundary;
60class LiveInterval;
61class LocationSummary;
62class SlowPathCode;
63class SsaBuilder;
64
65namespace mirror {
66class DexCache;
67}  // namespace mirror
68
69static const int kDefaultNumberOfBlocks = 8;
70static const int kDefaultNumberOfSuccessors = 2;
71static const int kDefaultNumberOfPredecessors = 2;
72static const int kDefaultNumberOfExceptionalPredecessors = 0;
73static const int kDefaultNumberOfDominatedBlocks = 1;
74static const int kDefaultNumberOfBackEdges = 1;
75
76// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
77static constexpr int32_t kMaxIntShiftDistance = 0x1f;
78// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
79static constexpr int32_t kMaxLongShiftDistance = 0x3f;
80
81static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
82static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
83
84static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
85
86static constexpr uint32_t kNoDexPc = -1;
87
88inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
89  // For the purposes of the compiler, the dex files must actually be the same object
90  // if we want to safely treat them as the same. This is especially important for JIT
91  // as custom class loaders can open the same underlying file (or memory) multiple
92  // times and provide different class resolution but no two class loaders should ever
93  // use the same DexFile object - doing so is an unsupported hack that can lead to
94  // all sorts of weird failures.
95  return &lhs == &rhs;
96}
97
98enum IfCondition {
99  // All types.
100  kCondEQ,  // ==
101  kCondNE,  // !=
102  // Signed integers and floating-point numbers.
103  kCondLT,  // <
104  kCondLE,  // <=
105  kCondGT,  // >
106  kCondGE,  // >=
107  // Unsigned integers.
108  kCondB,   // <
109  kCondBE,  // <=
110  kCondA,   // >
111  kCondAE,  // >=
112  // First and last aliases.
113  kCondFirst = kCondEQ,
114  kCondLast = kCondAE,
115};
116
117enum GraphAnalysisResult {
118  kAnalysisSkipped,
119  kAnalysisInvalidBytecode,
120  kAnalysisFailThrowCatchLoop,
121  kAnalysisFailAmbiguousArrayOp,
122  kAnalysisSuccess,
123};
124
125class HInstructionList : public ValueObject {
126 public:
127  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
128
129  void AddInstruction(HInstruction* instruction);
130  void RemoveInstruction(HInstruction* instruction);
131
132  // Insert `instruction` before/after an existing instruction `cursor`.
133  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
134  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
135
136  // Return true if this list contains `instruction`.
137  bool Contains(HInstruction* instruction) const;
138
139  // Return true if `instruction1` is found before `instruction2` in
140  // this instruction list and false otherwise.  Abort if none
141  // of these instructions is found.
142  bool FoundBefore(const HInstruction* instruction1,
143                   const HInstruction* instruction2) const;
144
145  bool IsEmpty() const { return first_instruction_ == nullptr; }
146  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
147
148  // Update the block of all instructions to be `block`.
149  void SetBlockOfInstructions(HBasicBlock* block) const;
150
151  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
152  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
153  void Add(const HInstructionList& instruction_list);
154
155  // Return the number of instructions in the list. This is an expensive operation.
156  size_t CountSize() const;
157
158 private:
159  HInstruction* first_instruction_;
160  HInstruction* last_instruction_;
161
162  friend class HBasicBlock;
163  friend class HGraph;
164  friend class HInstruction;
165  friend class HInstructionIterator;
166  friend class HBackwardInstructionIterator;
167
168  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
169};
170
171class ReferenceTypeInfo : ValueObject {
172 public:
173  typedef Handle<mirror::Class> TypeHandle;
174
175  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
176
177  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
178    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
179  }
180
181  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
182    return ReferenceTypeInfo(type_handle, is_exact);
183  }
184
185  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
186
187  static bool IsValidHandle(TypeHandle handle) {
188    return handle.GetReference() != nullptr;
189  }
190
191  bool IsValid() const {
192    return IsValidHandle(type_handle_);
193  }
194
195  bool IsExact() const { return is_exact_; }
196
197  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
198    DCHECK(IsValid());
199    return GetTypeHandle()->IsObjectClass();
200  }
201
202  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
203    DCHECK(IsValid());
204    return GetTypeHandle()->IsStringClass();
205  }
206
207  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
208    DCHECK(IsValid());
209    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
210  }
211
212  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
213    DCHECK(IsValid());
214    return GetTypeHandle()->IsInterface();
215  }
216
217  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
218    DCHECK(IsValid());
219    return GetTypeHandle()->IsArrayClass();
220  }
221
222  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
223    DCHECK(IsValid());
224    return GetTypeHandle()->IsPrimitiveArray();
225  }
226
227  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
228    DCHECK(IsValid());
229    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
230  }
231
232  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
233    DCHECK(IsValid());
234    if (!IsExact()) return false;
235    if (!IsArrayClass()) return false;
236    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
237  }
238
239  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
240    DCHECK(IsValid());
241    if (!IsExact()) return false;
242    if (!IsArrayClass()) return false;
243    if (!rti.IsArrayClass()) return false;
244    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
245        rti.GetTypeHandle()->GetComponentType());
246  }
247
248  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
249
250  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
251    DCHECK(IsValid());
252    DCHECK(rti.IsValid());
253    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
254  }
255
256  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
257    DCHECK(IsValid());
258    DCHECK(rti.IsValid());
259    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
260        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
261  }
262
263  // Returns true if the type information provide the same amount of details.
264  // Note that it does not mean that the instructions have the same actual type
265  // (because the type can be the result of a merge).
266  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
267    if (!IsValid() && !rti.IsValid()) {
268      // Invalid types are equal.
269      return true;
270    }
271    if (!IsValid() || !rti.IsValid()) {
272      // One is valid, the other not.
273      return false;
274    }
275    return IsExact() == rti.IsExact()
276        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
277  }
278
279 private:
280  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
281  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
282      : type_handle_(type_handle), is_exact_(is_exact) { }
283
284  // The class of the object.
285  TypeHandle type_handle_;
286  // Whether or not the type is exact or a superclass of the actual type.
287  // Whether or not we have any information about this type.
288  bool is_exact_;
289};
290
291std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
292
293// Control-flow graph of a method. Contains a list of basic blocks.
294class HGraph : public ArenaObject<kArenaAllocGraph> {
295 public:
296  HGraph(ArenaAllocator* arena,
297         const DexFile& dex_file,
298         uint32_t method_idx,
299         bool should_generate_constructor_barrier,
300         InstructionSet instruction_set,
301         InvokeType invoke_type = kInvalidInvokeType,
302         bool debuggable = false,
303         bool osr = false,
304         int start_instruction_id = 0)
305      : arena_(arena),
306        blocks_(arena->Adapter(kArenaAllocBlockList)),
307        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
308        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
309        entry_block_(nullptr),
310        exit_block_(nullptr),
311        maximum_number_of_out_vregs_(0),
312        number_of_vregs_(0),
313        number_of_in_vregs_(0),
314        temporaries_vreg_slots_(0),
315        has_bounds_checks_(false),
316        has_try_catch_(false),
317        has_irreducible_loops_(false),
318        debuggable_(debuggable),
319        current_instruction_id_(start_instruction_id),
320        dex_file_(dex_file),
321        method_idx_(method_idx),
322        invoke_type_(invoke_type),
323        in_ssa_form_(false),
324        should_generate_constructor_barrier_(should_generate_constructor_barrier),
325        instruction_set_(instruction_set),
326        cached_null_constant_(nullptr),
327        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
328        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
329        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
330        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
331        cached_current_method_(nullptr),
332        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
333        osr_(osr) {
334    blocks_.reserve(kDefaultNumberOfBlocks);
335  }
336
337  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
338  void InitializeInexactObjectRTI(StackHandleScopeCollection* handles);
339
340  ArenaAllocator* GetArena() const { return arena_; }
341  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
342
343  bool IsInSsaForm() const { return in_ssa_form_; }
344  void SetInSsaForm() { in_ssa_form_ = true; }
345
346  HBasicBlock* GetEntryBlock() const { return entry_block_; }
347  HBasicBlock* GetExitBlock() const { return exit_block_; }
348  bool HasExitBlock() const { return exit_block_ != nullptr; }
349
350  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
351  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
352
353  void AddBlock(HBasicBlock* block);
354
355  void ComputeDominanceInformation();
356  void ClearDominanceInformation();
357  void ClearLoopInformation();
358  void FindBackEdges(ArenaBitVector* visited);
359  GraphAnalysisResult BuildDominatorTree();
360  void SimplifyCFG();
361  void SimplifyCatchBlocks();
362
363  // Analyze all natural loops in this graph. Returns a code specifying that it
364  // was successful or the reason for failure. The method will fail if a loop
365  // is a throw-catch loop, i.e. the header is a catch block.
366  GraphAnalysisResult AnalyzeLoops() const;
367
368  // Iterate over blocks to compute try block membership. Needs reverse post
369  // order and loop information.
370  void ComputeTryBlockInformation();
371
372  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
373  // Returns the instruction to replace the invoke expression or null if the
374  // invoke is for a void method. Note that the caller is responsible for replacing
375  // and removing the invoke instruction.
376  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
377
378  // Update the loop and try membership of `block`, which was spawned from `reference`.
379  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
380  // should be the new back edge.
381  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
382                                             HBasicBlock* reference,
383                                             bool replace_if_back_edge);
384
385  // Need to add a couple of blocks to test if the loop body is entered and
386  // put deoptimization instructions, etc.
387  void TransformLoopHeaderForBCE(HBasicBlock* header);
388
389  // Removes `block` from the graph. Assumes `block` has been disconnected from
390  // other blocks and has no instructions or phis.
391  void DeleteDeadEmptyBlock(HBasicBlock* block);
392
393  // Splits the edge between `block` and `successor` while preserving the
394  // indices in the predecessor/successor lists. If there are multiple edges
395  // between the blocks, the lowest indices are used.
396  // Returns the new block which is empty and has the same dex pc as `successor`.
397  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
398
399  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
400  void SimplifyLoop(HBasicBlock* header);
401
402  int32_t GetNextInstructionId() {
403    DCHECK_NE(current_instruction_id_, INT32_MAX);
404    return current_instruction_id_++;
405  }
406
407  int32_t GetCurrentInstructionId() const {
408    return current_instruction_id_;
409  }
410
411  void SetCurrentInstructionId(int32_t id) {
412    DCHECK_GE(id, current_instruction_id_);
413    current_instruction_id_ = id;
414  }
415
416  uint16_t GetMaximumNumberOfOutVRegs() const {
417    return maximum_number_of_out_vregs_;
418  }
419
420  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
421    maximum_number_of_out_vregs_ = new_value;
422  }
423
424  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
425    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
426  }
427
428  void UpdateTemporariesVRegSlots(size_t slots) {
429    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
430  }
431
432  size_t GetTemporariesVRegSlots() const {
433    DCHECK(!in_ssa_form_);
434    return temporaries_vreg_slots_;
435  }
436
437  void SetNumberOfVRegs(uint16_t number_of_vregs) {
438    number_of_vregs_ = number_of_vregs;
439  }
440
441  uint16_t GetNumberOfVRegs() const {
442    return number_of_vregs_;
443  }
444
445  void SetNumberOfInVRegs(uint16_t value) {
446    number_of_in_vregs_ = value;
447  }
448
449  uint16_t GetNumberOfInVRegs() const {
450    return number_of_in_vregs_;
451  }
452
453  uint16_t GetNumberOfLocalVRegs() const {
454    DCHECK(!in_ssa_form_);
455    return number_of_vregs_ - number_of_in_vregs_;
456  }
457
458  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
459    return reverse_post_order_;
460  }
461
462  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
463    return linear_order_;
464  }
465
466  bool HasBoundsChecks() const {
467    return has_bounds_checks_;
468  }
469
470  void SetHasBoundsChecks(bool value) {
471    has_bounds_checks_ = value;
472  }
473
474  bool ShouldGenerateConstructorBarrier() const {
475    return should_generate_constructor_barrier_;
476  }
477
478  bool IsDebuggable() const { return debuggable_; }
479
480  // Returns a constant of the given type and value. If it does not exist
481  // already, it is created and inserted into the graph. This method is only for
482  // integral types.
483  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
484
485  // TODO: This is problematic for the consistency of reference type propagation
486  // because it can be created anytime after the pass and thus it will be left
487  // with an invalid type.
488  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
489
490  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
491    return CreateConstant(value, &cached_int_constants_, dex_pc);
492  }
493  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
494    return CreateConstant(value, &cached_long_constants_, dex_pc);
495  }
496  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
497    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
498  }
499  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
500    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
501  }
502
503  HCurrentMethod* GetCurrentMethod();
504
505  const DexFile& GetDexFile() const {
506    return dex_file_;
507  }
508
509  uint32_t GetMethodIdx() const {
510    return method_idx_;
511  }
512
513  InvokeType GetInvokeType() const {
514    return invoke_type_;
515  }
516
517  InstructionSet GetInstructionSet() const {
518    return instruction_set_;
519  }
520
521  bool IsCompilingOsr() const { return osr_; }
522
523  bool HasTryCatch() const { return has_try_catch_; }
524  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
525
526  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
527  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
528
529  ArtMethod* GetArtMethod() const { return art_method_; }
530  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
531
532  // Returns an instruction with the opposite boolean value from 'cond'.
533  // The instruction has been inserted into the graph, either as a constant, or
534  // before cursor.
535  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
536
537  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
538
539 private:
540  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
541  void RemoveDeadBlocks(const ArenaBitVector& visited);
542
543  template <class InstructionType, typename ValueType>
544  InstructionType* CreateConstant(ValueType value,
545                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
546                                  uint32_t dex_pc = kNoDexPc) {
547    // Try to find an existing constant of the given value.
548    InstructionType* constant = nullptr;
549    auto cached_constant = cache->find(value);
550    if (cached_constant != cache->end()) {
551      constant = cached_constant->second;
552    }
553
554    // If not found or previously deleted, create and cache a new instruction.
555    // Don't bother reviving a previously deleted instruction, for simplicity.
556    if (constant == nullptr || constant->GetBlock() == nullptr) {
557      constant = new (arena_) InstructionType(value, dex_pc);
558      cache->Overwrite(value, constant);
559      InsertConstant(constant);
560    }
561    return constant;
562  }
563
564  void InsertConstant(HConstant* instruction);
565
566  // Cache a float constant into the graph. This method should only be
567  // called by the SsaBuilder when creating "equivalent" instructions.
568  void CacheFloatConstant(HFloatConstant* constant);
569
570  // See CacheFloatConstant comment.
571  void CacheDoubleConstant(HDoubleConstant* constant);
572
573  ArenaAllocator* const arena_;
574
575  // List of blocks in insertion order.
576  ArenaVector<HBasicBlock*> blocks_;
577
578  // List of blocks to perform a reverse post order tree traversal.
579  ArenaVector<HBasicBlock*> reverse_post_order_;
580
581  // List of blocks to perform a linear order tree traversal.
582  ArenaVector<HBasicBlock*> linear_order_;
583
584  HBasicBlock* entry_block_;
585  HBasicBlock* exit_block_;
586
587  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
588  uint16_t maximum_number_of_out_vregs_;
589
590  // The number of virtual registers in this method. Contains the parameters.
591  uint16_t number_of_vregs_;
592
593  // The number of virtual registers used by parameters of this method.
594  uint16_t number_of_in_vregs_;
595
596  // Number of vreg size slots that the temporaries use (used in baseline compiler).
597  size_t temporaries_vreg_slots_;
598
599  // Has bounds checks. We can totally skip BCE if it's false.
600  bool has_bounds_checks_;
601
602  // Flag whether there are any try/catch blocks in the graph. We will skip
603  // try/catch-related passes if false.
604  bool has_try_catch_;
605
606  // Flag whether there are any irreducible loops in the graph.
607  bool has_irreducible_loops_;
608
609  // Indicates whether the graph should be compiled in a way that
610  // ensures full debuggability. If false, we can apply more
611  // aggressive optimizations that may limit the level of debugging.
612  const bool debuggable_;
613
614  // The current id to assign to a newly added instruction. See HInstruction.id_.
615  int32_t current_instruction_id_;
616
617  // The dex file from which the method is from.
618  const DexFile& dex_file_;
619
620  // The method index in the dex file.
621  const uint32_t method_idx_;
622
623  // If inlined, this encodes how the callee is being invoked.
624  const InvokeType invoke_type_;
625
626  // Whether the graph has been transformed to SSA form. Only used
627  // in debug mode to ensure we are not using properties only valid
628  // for non-SSA form (like the number of temporaries).
629  bool in_ssa_form_;
630
631  const bool should_generate_constructor_barrier_;
632
633  const InstructionSet instruction_set_;
634
635  // Cached constants.
636  HNullConstant* cached_null_constant_;
637  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
638  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
639  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
640  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
641
642  HCurrentMethod* cached_current_method_;
643
644  // The ArtMethod this graph is for. Note that for AOT, it may be null,
645  // for example for methods whose declaring class could not be resolved
646  // (such as when the superclass could not be found).
647  ArtMethod* art_method_;
648
649  // Keep the RTI of inexact Object to avoid having to pass stack handle
650  // collection pointer to passes which may create NullConstant.
651  ReferenceTypeInfo inexact_object_rti_;
652
653  // Whether we are compiling this graph for on stack replacement: this will
654  // make all loops seen as irreducible and emit special stack maps to mark
655  // compiled code entries which the interpreter can directly jump to.
656  const bool osr_;
657
658  friend class SsaBuilder;           // For caching constants.
659  friend class SsaLivenessAnalysis;  // For the linear order.
660  friend class HInliner;             // For the reverse post order.
661  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
662  DISALLOW_COPY_AND_ASSIGN(HGraph);
663};
664
665class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
666 public:
667  HLoopInformation(HBasicBlock* header, HGraph* graph)
668      : header_(header),
669        suspend_check_(nullptr),
670        irreducible_(false),
671        contains_irreducible_loop_(false),
672        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
673        // Make bit vector growable, as the number of blocks may change.
674        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
675    back_edges_.reserve(kDefaultNumberOfBackEdges);
676  }
677
678  bool IsIrreducible() const { return irreducible_; }
679  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
680
681  void Dump(std::ostream& os);
682
683  HBasicBlock* GetHeader() const {
684    return header_;
685  }
686
687  void SetHeader(HBasicBlock* block) {
688    header_ = block;
689  }
690
691  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
692  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
693  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
694
695  void AddBackEdge(HBasicBlock* back_edge) {
696    back_edges_.push_back(back_edge);
697  }
698
699  void RemoveBackEdge(HBasicBlock* back_edge) {
700    RemoveElement(back_edges_, back_edge);
701  }
702
703  bool IsBackEdge(const HBasicBlock& block) const {
704    return ContainsElement(back_edges_, &block);
705  }
706
707  size_t NumberOfBackEdges() const {
708    return back_edges_.size();
709  }
710
711  HBasicBlock* GetPreHeader() const;
712
713  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
714    return back_edges_;
715  }
716
717  // Returns the lifetime position of the back edge that has the
718  // greatest lifetime position.
719  size_t GetLifetimeEnd() const;
720
721  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
722    ReplaceElement(back_edges_, existing, new_back_edge);
723  }
724
725  // Finds blocks that are part of this loop.
726  void Populate();
727
728  // Returns whether this loop information contains `block`.
729  // Note that this loop information *must* be populated before entering this function.
730  bool Contains(const HBasicBlock& block) const;
731
732  // Returns whether this loop information is an inner loop of `other`.
733  // Note that `other` *must* be populated before entering this function.
734  bool IsIn(const HLoopInformation& other) const;
735
736  // Returns true if instruction is not defined within this loop.
737  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
738
739  const ArenaBitVector& GetBlocks() const { return blocks_; }
740
741  void Add(HBasicBlock* block);
742  void Remove(HBasicBlock* block);
743
744  void ClearAllBlocks() {
745    blocks_.ClearAllBits();
746  }
747
748  bool HasBackEdgeNotDominatedByHeader() const;
749
750  bool IsPopulated() const {
751    return blocks_.GetHighestBitSet() != -1;
752  }
753
754  bool DominatesAllBackEdges(HBasicBlock* block);
755
756 private:
757  // Internal recursive implementation of `Populate`.
758  void PopulateRecursive(HBasicBlock* block);
759  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
760
761  HBasicBlock* header_;
762  HSuspendCheck* suspend_check_;
763  bool irreducible_;
764  bool contains_irreducible_loop_;
765  ArenaVector<HBasicBlock*> back_edges_;
766  ArenaBitVector blocks_;
767
768  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
769};
770
771// Stores try/catch information for basic blocks.
772// Note that HGraph is constructed so that catch blocks cannot simultaneously
773// be try blocks.
774class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
775 public:
776  // Try block information constructor.
777  explicit TryCatchInformation(const HTryBoundary& try_entry)
778      : try_entry_(&try_entry),
779        catch_dex_file_(nullptr),
780        catch_type_index_(DexFile::kDexNoIndex16) {
781    DCHECK(try_entry_ != nullptr);
782  }
783
784  // Catch block information constructor.
785  TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file)
786      : try_entry_(nullptr),
787        catch_dex_file_(&dex_file),
788        catch_type_index_(catch_type_index) {}
789
790  bool IsTryBlock() const { return try_entry_ != nullptr; }
791
792  const HTryBoundary& GetTryEntry() const {
793    DCHECK(IsTryBlock());
794    return *try_entry_;
795  }
796
797  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
798
799  bool IsCatchAllTypeIndex() const {
800    DCHECK(IsCatchBlock());
801    return catch_type_index_ == DexFile::kDexNoIndex16;
802  }
803
804  uint16_t GetCatchTypeIndex() const {
805    DCHECK(IsCatchBlock());
806    return catch_type_index_;
807  }
808
809  const DexFile& GetCatchDexFile() const {
810    DCHECK(IsCatchBlock());
811    return *catch_dex_file_;
812  }
813
814 private:
815  // One of possibly several TryBoundary instructions entering the block's try.
816  // Only set for try blocks.
817  const HTryBoundary* try_entry_;
818
819  // Exception type information. Only set for catch blocks.
820  const DexFile* catch_dex_file_;
821  const uint16_t catch_type_index_;
822};
823
824static constexpr size_t kNoLifetime = -1;
825static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
826
827// A block in a method. Contains the list of instructions represented
828// as a double linked list. Each block knows its predecessors and
829// successors.
830
831class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
832 public:
833  HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
834      : graph_(graph),
835        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
836        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
837        loop_information_(nullptr),
838        dominator_(nullptr),
839        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
840        block_id_(kInvalidBlockId),
841        dex_pc_(dex_pc),
842        lifetime_start_(kNoLifetime),
843        lifetime_end_(kNoLifetime),
844        try_catch_information_(nullptr) {
845    predecessors_.reserve(kDefaultNumberOfPredecessors);
846    successors_.reserve(kDefaultNumberOfSuccessors);
847    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
848  }
849
850  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
851    return predecessors_;
852  }
853
854  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
855    return successors_;
856  }
857
858  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
859  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
860
861  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
862    return ContainsElement(successors_, block, start_from);
863  }
864
865  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
866    return dominated_blocks_;
867  }
868
869  bool IsEntryBlock() const {
870    return graph_->GetEntryBlock() == this;
871  }
872
873  bool IsExitBlock() const {
874    return graph_->GetExitBlock() == this;
875  }
876
877  bool IsSingleGoto() const;
878  bool IsSingleTryBoundary() const;
879
880  // Returns true if this block emits nothing but a jump.
881  bool IsSingleJump() const {
882    HLoopInformation* loop_info = GetLoopInformation();
883    return (IsSingleGoto() || IsSingleTryBoundary())
884           // Back edges generate a suspend check.
885           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
886  }
887
888  void AddBackEdge(HBasicBlock* back_edge) {
889    if (loop_information_ == nullptr) {
890      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
891    }
892    DCHECK_EQ(loop_information_->GetHeader(), this);
893    loop_information_->AddBackEdge(back_edge);
894  }
895
896  HGraph* GetGraph() const { return graph_; }
897  void SetGraph(HGraph* graph) { graph_ = graph; }
898
899  uint32_t GetBlockId() const { return block_id_; }
900  void SetBlockId(int id) { block_id_ = id; }
901  uint32_t GetDexPc() const { return dex_pc_; }
902
903  HBasicBlock* GetDominator() const { return dominator_; }
904  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
905  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
906
907  void RemoveDominatedBlock(HBasicBlock* block) {
908    RemoveElement(dominated_blocks_, block);
909  }
910
911  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
912    ReplaceElement(dominated_blocks_, existing, new_block);
913  }
914
915  void ClearDominanceInformation();
916
917  int NumberOfBackEdges() const {
918    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
919  }
920
921  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
922  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
923  const HInstructionList& GetInstructions() const { return instructions_; }
924  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
925  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
926  const HInstructionList& GetPhis() const { return phis_; }
927
928  HInstruction* GetFirstInstructionDisregardMoves() const;
929
930  void AddSuccessor(HBasicBlock* block) {
931    successors_.push_back(block);
932    block->predecessors_.push_back(this);
933  }
934
935  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
936    size_t successor_index = GetSuccessorIndexOf(existing);
937    existing->RemovePredecessor(this);
938    new_block->predecessors_.push_back(this);
939    successors_[successor_index] = new_block;
940  }
941
942  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
943    size_t predecessor_index = GetPredecessorIndexOf(existing);
944    existing->RemoveSuccessor(this);
945    new_block->successors_.push_back(this);
946    predecessors_[predecessor_index] = new_block;
947  }
948
949  // Insert `this` between `predecessor` and `successor. This method
950  // preserves the indicies, and will update the first edge found between
951  // `predecessor` and `successor`.
952  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
953    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
954    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
955    successor->predecessors_[predecessor_index] = this;
956    predecessor->successors_[successor_index] = this;
957    successors_.push_back(successor);
958    predecessors_.push_back(predecessor);
959  }
960
961  void RemovePredecessor(HBasicBlock* block) {
962    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
963  }
964
965  void RemoveSuccessor(HBasicBlock* block) {
966    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
967  }
968
969  void ClearAllPredecessors() {
970    predecessors_.clear();
971  }
972
973  void AddPredecessor(HBasicBlock* block) {
974    predecessors_.push_back(block);
975    block->successors_.push_back(this);
976  }
977
978  void SwapPredecessors() {
979    DCHECK_EQ(predecessors_.size(), 2u);
980    std::swap(predecessors_[0], predecessors_[1]);
981  }
982
983  void SwapSuccessors() {
984    DCHECK_EQ(successors_.size(), 2u);
985    std::swap(successors_[0], successors_[1]);
986  }
987
988  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
989    return IndexOfElement(predecessors_, predecessor);
990  }
991
992  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
993    return IndexOfElement(successors_, successor);
994  }
995
996  HBasicBlock* GetSinglePredecessor() const {
997    DCHECK_EQ(GetPredecessors().size(), 1u);
998    return GetPredecessors()[0];
999  }
1000
1001  HBasicBlock* GetSingleSuccessor() const {
1002    DCHECK_EQ(GetSuccessors().size(), 1u);
1003    return GetSuccessors()[0];
1004  }
1005
1006  // Returns whether the first occurrence of `predecessor` in the list of
1007  // predecessors is at index `idx`.
1008  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1009    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1010    return GetPredecessorIndexOf(predecessor) == idx;
1011  }
1012
1013  // Create a new block between this block and its predecessors. The new block
1014  // is added to the graph, all predecessor edges are relinked to it and an edge
1015  // is created to `this`. Returns the new empty block. Reverse post order or
1016  // loop and try/catch information are not updated.
1017  HBasicBlock* CreateImmediateDominator();
1018
1019  // Split the block into two blocks just before `cursor`. Returns the newly
1020  // created, latter block. Note that this method will add the block to the
1021  // graph, create a Goto at the end of the former block and will create an edge
1022  // between the blocks. It will not, however, update the reverse post order or
1023  // loop and try/catch information.
1024  HBasicBlock* SplitBefore(HInstruction* cursor);
1025
1026  // Split the block into two blocks just before `cursor`. Returns the newly
1027  // created block. Note that this method just updates raw block information,
1028  // like predecessors, successors, dominators, and instruction list. It does not
1029  // update the graph, reverse post order, loop information, nor make sure the
1030  // blocks are consistent (for example ending with a control flow instruction).
1031  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1032
1033  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1034  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1035
1036  // Merge `other` at the end of `this`. Successors and dominated blocks of
1037  // `other` are changed to be successors and dominated blocks of `this`. Note
1038  // that this method does not update the graph, reverse post order, loop
1039  // information, nor make sure the blocks are consistent (for example ending
1040  // with a control flow instruction).
1041  void MergeWithInlined(HBasicBlock* other);
1042
1043  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1044  // of `this` are moved to `other`.
1045  // Note that this method does not update the graph, reverse post order, loop
1046  // information, nor make sure the blocks are consistent (for example ending
1047  // with a control flow instruction).
1048  void ReplaceWith(HBasicBlock* other);
1049
1050  // Merge `other` at the end of `this`. This method updates loops, reverse post
1051  // order, links to predecessors, successors, dominators and deletes the block
1052  // from the graph. The two blocks must be successive, i.e. `this` the only
1053  // predecessor of `other` and vice versa.
1054  void MergeWith(HBasicBlock* other);
1055
1056  // Disconnects `this` from all its predecessors, successors and dominator,
1057  // removes it from all loops it is included in and eventually from the graph.
1058  // The block must not dominate any other block. Predecessors and successors
1059  // are safely updated.
1060  void DisconnectAndDelete();
1061
1062  void AddInstruction(HInstruction* instruction);
1063  // Insert `instruction` before/after an existing instruction `cursor`.
1064  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1065  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1066  // Replace instruction `initial` with `replacement` within this block.
1067  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1068                                       HInstruction* replacement);
1069  void AddPhi(HPhi* phi);
1070  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1071  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1072  // instruction list. With 'ensure_safety' set to true, it verifies that the
1073  // instruction is not in use and removes it from the use lists of its inputs.
1074  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1075  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1076  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1077
1078  bool IsLoopHeader() const {
1079    return IsInLoop() && (loop_information_->GetHeader() == this);
1080  }
1081
1082  bool IsLoopPreHeaderFirstPredecessor() const {
1083    DCHECK(IsLoopHeader());
1084    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1085  }
1086
1087  bool IsFirstPredecessorBackEdge() const {
1088    DCHECK(IsLoopHeader());
1089    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1090  }
1091
1092  HLoopInformation* GetLoopInformation() const {
1093    return loop_information_;
1094  }
1095
1096  // Set the loop_information_ on this block. Overrides the current
1097  // loop_information if it is an outer loop of the passed loop information.
1098  // Note that this method is called while creating the loop information.
1099  void SetInLoop(HLoopInformation* info) {
1100    if (IsLoopHeader()) {
1101      // Nothing to do. This just means `info` is an outer loop.
1102    } else if (!IsInLoop()) {
1103      loop_information_ = info;
1104    } else if (loop_information_->Contains(*info->GetHeader())) {
1105      // Block is currently part of an outer loop. Make it part of this inner loop.
1106      // Note that a non loop header having a loop information means this loop information
1107      // has already been populated
1108      loop_information_ = info;
1109    } else {
1110      // Block is part of an inner loop. Do not update the loop information.
1111      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1112      // at this point, because this method is being called while populating `info`.
1113    }
1114  }
1115
1116  // Raw update of the loop information.
1117  void SetLoopInformation(HLoopInformation* info) {
1118    loop_information_ = info;
1119  }
1120
1121  bool IsInLoop() const { return loop_information_ != nullptr; }
1122
1123  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1124
1125  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1126    try_catch_information_ = try_catch_information;
1127  }
1128
1129  bool IsTryBlock() const {
1130    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1131  }
1132
1133  bool IsCatchBlock() const {
1134    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1135  }
1136
1137  // Returns the try entry that this block's successors should have. They will
1138  // be in the same try, unless the block ends in a try boundary. In that case,
1139  // the appropriate try entry will be returned.
1140  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1141
1142  bool HasThrowingInstructions() const;
1143
1144  // Returns whether this block dominates the blocked passed as parameter.
1145  bool Dominates(HBasicBlock* block) const;
1146
1147  size_t GetLifetimeStart() const { return lifetime_start_; }
1148  size_t GetLifetimeEnd() const { return lifetime_end_; }
1149
1150  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1151  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1152
1153  bool EndsWithControlFlowInstruction() const;
1154  bool EndsWithIf() const;
1155  bool EndsWithTryBoundary() const;
1156  bool HasSinglePhi() const;
1157
1158 private:
1159  HGraph* graph_;
1160  ArenaVector<HBasicBlock*> predecessors_;
1161  ArenaVector<HBasicBlock*> successors_;
1162  HInstructionList instructions_;
1163  HInstructionList phis_;
1164  HLoopInformation* loop_information_;
1165  HBasicBlock* dominator_;
1166  ArenaVector<HBasicBlock*> dominated_blocks_;
1167  uint32_t block_id_;
1168  // The dex program counter of the first instruction of this block.
1169  const uint32_t dex_pc_;
1170  size_t lifetime_start_;
1171  size_t lifetime_end_;
1172  TryCatchInformation* try_catch_information_;
1173
1174  friend class HGraph;
1175  friend class HInstruction;
1176
1177  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1178};
1179
1180// Iterates over the LoopInformation of all loops which contain 'block'
1181// from the innermost to the outermost.
1182class HLoopInformationOutwardIterator : public ValueObject {
1183 public:
1184  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1185      : current_(block.GetLoopInformation()) {}
1186
1187  bool Done() const { return current_ == nullptr; }
1188
1189  void Advance() {
1190    DCHECK(!Done());
1191    current_ = current_->GetPreHeader()->GetLoopInformation();
1192  }
1193
1194  HLoopInformation* Current() const {
1195    DCHECK(!Done());
1196    return current_;
1197  }
1198
1199 private:
1200  HLoopInformation* current_;
1201
1202  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1203};
1204
1205#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1206  M(Above, Condition)                                                   \
1207  M(AboveOrEqual, Condition)                                            \
1208  M(Add, BinaryOperation)                                               \
1209  M(And, BinaryOperation)                                               \
1210  M(ArrayGet, Instruction)                                              \
1211  M(ArrayLength, Instruction)                                           \
1212  M(ArraySet, Instruction)                                              \
1213  M(Below, Condition)                                                   \
1214  M(BelowOrEqual, Condition)                                            \
1215  M(BooleanNot, UnaryOperation)                                         \
1216  M(BoundsCheck, Instruction)                                           \
1217  M(BoundType, Instruction)                                             \
1218  M(CheckCast, Instruction)                                             \
1219  M(ClassTableGet, Instruction)                                         \
1220  M(ClearException, Instruction)                                        \
1221  M(ClinitCheck, Instruction)                                           \
1222  M(Compare, BinaryOperation)                                           \
1223  M(CurrentMethod, Instruction)                                         \
1224  M(Deoptimize, Instruction)                                            \
1225  M(Div, BinaryOperation)                                               \
1226  M(DivZeroCheck, Instruction)                                          \
1227  M(DoubleConstant, Constant)                                           \
1228  M(Equal, Condition)                                                   \
1229  M(Exit, Instruction)                                                  \
1230  M(FloatConstant, Constant)                                            \
1231  M(Goto, Instruction)                                                  \
1232  M(GreaterThan, Condition)                                             \
1233  M(GreaterThanOrEqual, Condition)                                      \
1234  M(If, Instruction)                                                    \
1235  M(InstanceFieldGet, Instruction)                                      \
1236  M(InstanceFieldSet, Instruction)                                      \
1237  M(InstanceOf, Instruction)                                            \
1238  M(IntConstant, Constant)                                              \
1239  M(InvokeUnresolved, Invoke)                                           \
1240  M(InvokeInterface, Invoke)                                            \
1241  M(InvokeStaticOrDirect, Invoke)                                       \
1242  M(InvokeVirtual, Invoke)                                              \
1243  M(LessThan, Condition)                                                \
1244  M(LessThanOrEqual, Condition)                                         \
1245  M(LoadClass, Instruction)                                             \
1246  M(LoadException, Instruction)                                         \
1247  M(LoadString, Instruction)                                            \
1248  M(LongConstant, Constant)                                             \
1249  M(MemoryBarrier, Instruction)                                         \
1250  M(MonitorOperation, Instruction)                                      \
1251  M(Mul, BinaryOperation)                                               \
1252  M(NativeDebugInfo, Instruction)                                       \
1253  M(Neg, UnaryOperation)                                                \
1254  M(NewArray, Instruction)                                              \
1255  M(NewInstance, Instruction)                                           \
1256  M(Not, UnaryOperation)                                                \
1257  M(NotEqual, Condition)                                                \
1258  M(NullConstant, Instruction)                                          \
1259  M(NullCheck, Instruction)                                             \
1260  M(Or, BinaryOperation)                                                \
1261  M(PackedSwitch, Instruction)                                          \
1262  M(ParallelMove, Instruction)                                          \
1263  M(ParameterValue, Instruction)                                        \
1264  M(Phi, Instruction)                                                   \
1265  M(Rem, BinaryOperation)                                               \
1266  M(Return, Instruction)                                                \
1267  M(ReturnVoid, Instruction)                                            \
1268  M(Ror, BinaryOperation)                                               \
1269  M(Shl, BinaryOperation)                                               \
1270  M(Shr, BinaryOperation)                                               \
1271  M(StaticFieldGet, Instruction)                                        \
1272  M(StaticFieldSet, Instruction)                                        \
1273  M(UnresolvedInstanceFieldGet, Instruction)                            \
1274  M(UnresolvedInstanceFieldSet, Instruction)                            \
1275  M(UnresolvedStaticFieldGet, Instruction)                              \
1276  M(UnresolvedStaticFieldSet, Instruction)                              \
1277  M(Select, Instruction)                                                \
1278  M(Sub, BinaryOperation)                                               \
1279  M(SuspendCheck, Instruction)                                          \
1280  M(Throw, Instruction)                                                 \
1281  M(TryBoundary, Instruction)                                           \
1282  M(TypeConversion, Instruction)                                        \
1283  M(UShr, BinaryOperation)                                              \
1284  M(Xor, BinaryOperation)                                               \
1285
1286/*
1287 * Instructions, shared across several (not all) architectures.
1288 */
1289#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1290#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1291#else
1292#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1293  M(BitwiseNegatedRight, Instruction)                                   \
1294  M(MultiplyAccumulate, Instruction)                                    \
1295  M(IntermediateAddress, Instruction)
1296#endif
1297
1298#ifndef ART_ENABLE_CODEGEN_arm
1299#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1300#else
1301#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1302  M(ArmDexCacheArraysBase, Instruction)
1303#endif
1304
1305#ifndef ART_ENABLE_CODEGEN_arm64
1306#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1307#else
1308#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                          \
1309  M(Arm64DataProcWithShifterOp, Instruction)
1310#endif
1311
1312#ifndef ART_ENABLE_CODEGEN_mips
1313#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1314#else
1315#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1316  M(MipsComputeBaseMethodAddress, Instruction)                          \
1317  M(MipsDexCacheArraysBase, Instruction)
1318#endif
1319
1320#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1321
1322#ifndef ART_ENABLE_CODEGEN_x86
1323#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1324#else
1325#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1326  M(X86ComputeBaseMethodAddress, Instruction)                           \
1327  M(X86LoadFromConstantTable, Instruction)                              \
1328  M(X86FPNeg, Instruction)                                              \
1329  M(X86PackedSwitch, Instruction)
1330#endif
1331
1332#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1333
1334#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1335  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1336  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1337  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1338  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1339  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1340  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1341  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1342  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1343
1344#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1345  M(Condition, BinaryOperation)                                         \
1346  M(Constant, Instruction)                                              \
1347  M(UnaryOperation, Instruction)                                        \
1348  M(BinaryOperation, Instruction)                                       \
1349  M(Invoke, Instruction)
1350
1351#define FOR_EACH_INSTRUCTION(M)                                         \
1352  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1353  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1354
1355#define FORWARD_DECLARATION(type, super) class H##type;
1356FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1357#undef FORWARD_DECLARATION
1358
1359#define DECLARE_INSTRUCTION(type)                                         \
1360  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1361  const char* DebugName() const OVERRIDE { return #type; }                \
1362  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1363    return other->Is##type();                                             \
1364  }                                                                       \
1365  void Accept(HGraphVisitor* visitor) OVERRIDE
1366
1367#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1368  bool Is##type() const { return As##type() != nullptr; }               \
1369  const H##type* As##type() const { return this; }                      \
1370  H##type* As##type() { return this; }
1371
1372template <typename T>
1373class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1374 public:
1375  T GetUser() const { return user_; }
1376  size_t GetIndex() const { return index_; }
1377  void SetIndex(size_t index) { index_ = index; }
1378
1379  // Hook for the IntrusiveForwardList<>.
1380  // TODO: Hide this better.
1381  IntrusiveForwardListHook hook;
1382
1383 private:
1384  HUseListNode(T user, size_t index)
1385      : user_(user), index_(index) {}
1386
1387  T const user_;
1388  size_t index_;
1389
1390  friend class HInstruction;
1391
1392  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1393};
1394
1395template <typename T>
1396using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1397
1398// This class is used by HEnvironment and HInstruction classes to record the
1399// instructions they use and pointers to the corresponding HUseListNodes kept
1400// by the used instructions.
1401template <typename T>
1402class HUserRecord : public ValueObject {
1403 public:
1404  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1405  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1406
1407  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1408      : HUserRecord(old_record.instruction_, before_use_node) {}
1409  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1410      : instruction_(instruction), before_use_node_(before_use_node) {
1411    DCHECK(instruction_ != nullptr);
1412  }
1413
1414  HInstruction* GetInstruction() const { return instruction_; }
1415  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1416  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1417
1418 private:
1419  // Instruction used by the user.
1420  HInstruction* instruction_;
1421
1422  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1423  typename HUseList<T>::iterator before_use_node_;
1424};
1425
1426// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1427// This is used for HInstruction::GetInputs() to return a container wrapper providing
1428// HInstruction* values even though the underlying container has HUserRecord<>s.
1429struct HInputExtractor {
1430  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1431    return record.GetInstruction();
1432  }
1433  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1434    return record.GetInstruction();
1435  }
1436};
1437
1438using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1439using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1440
1441/**
1442 * Side-effects representation.
1443 *
1444 * For write/read dependences on fields/arrays, the dependence analysis uses
1445 * type disambiguation (e.g. a float field write cannot modify the value of an
1446 * integer field read) and the access type (e.g.  a reference array write cannot
1447 * modify the value of a reference field read [although it may modify the
1448 * reference fetch prior to reading the field, which is represented by its own
1449 * write/read dependence]). The analysis makes conservative points-to
1450 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1451 * the same, and any reference read depends on any reference read without
1452 * further regard of its type).
1453 *
1454 * The internal representation uses 38-bit and is described in the table below.
1455 * The first line indicates the side effect, and for field/array accesses the
1456 * second line indicates the type of the access (in the order of the
1457 * Primitive::Type enum).
1458 * The two numbered lines below indicate the bit position in the bitfield (read
1459 * vertically).
1460 *
1461 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1462 *   +-------------+---------+---------+--------------+---------+---------+
1463 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1464 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1465 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1466 *
1467 * Note that, to ease the implementation, 'changes' bits are least significant
1468 * bits, while 'dependency' bits are most significant bits.
1469 */
1470class SideEffects : public ValueObject {
1471 public:
1472  SideEffects() : flags_(0) {}
1473
1474  static SideEffects None() {
1475    return SideEffects(0);
1476  }
1477
1478  static SideEffects All() {
1479    return SideEffects(kAllChangeBits | kAllDependOnBits);
1480  }
1481
1482  static SideEffects AllChanges() {
1483    return SideEffects(kAllChangeBits);
1484  }
1485
1486  static SideEffects AllDependencies() {
1487    return SideEffects(kAllDependOnBits);
1488  }
1489
1490  static SideEffects AllExceptGCDependency() {
1491    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1492  }
1493
1494  static SideEffects AllWritesAndReads() {
1495    return SideEffects(kAllWrites | kAllReads);
1496  }
1497
1498  static SideEffects AllWrites() {
1499    return SideEffects(kAllWrites);
1500  }
1501
1502  static SideEffects AllReads() {
1503    return SideEffects(kAllReads);
1504  }
1505
1506  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1507    return is_volatile
1508        ? AllWritesAndReads()
1509        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1510  }
1511
1512  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1513    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1514  }
1515
1516  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1517    return is_volatile
1518        ? AllWritesAndReads()
1519        : SideEffects(TypeFlag(type, kFieldReadOffset));
1520  }
1521
1522  static SideEffects ArrayReadOfType(Primitive::Type type) {
1523    return SideEffects(TypeFlag(type, kArrayReadOffset));
1524  }
1525
1526  static SideEffects CanTriggerGC() {
1527    return SideEffects(1ULL << kCanTriggerGCBit);
1528  }
1529
1530  static SideEffects DependsOnGC() {
1531    return SideEffects(1ULL << kDependsOnGCBit);
1532  }
1533
1534  // Combines the side-effects of this and the other.
1535  SideEffects Union(SideEffects other) const {
1536    return SideEffects(flags_ | other.flags_);
1537  }
1538
1539  SideEffects Exclusion(SideEffects other) const {
1540    return SideEffects(flags_ & ~other.flags_);
1541  }
1542
1543  void Add(SideEffects other) {
1544    flags_ |= other.flags_;
1545  }
1546
1547  bool Includes(SideEffects other) const {
1548    return (other.flags_ & flags_) == other.flags_;
1549  }
1550
1551  bool HasSideEffects() const {
1552    return (flags_ & kAllChangeBits);
1553  }
1554
1555  bool HasDependencies() const {
1556    return (flags_ & kAllDependOnBits);
1557  }
1558
1559  // Returns true if there are no side effects or dependencies.
1560  bool DoesNothing() const {
1561    return flags_ == 0;
1562  }
1563
1564  // Returns true if something is written.
1565  bool DoesAnyWrite() const {
1566    return (flags_ & kAllWrites);
1567  }
1568
1569  // Returns true if something is read.
1570  bool DoesAnyRead() const {
1571    return (flags_ & kAllReads);
1572  }
1573
1574  // Returns true if potentially everything is written and read
1575  // (every type and every kind of access).
1576  bool DoesAllReadWrite() const {
1577    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1578  }
1579
1580  bool DoesAll() const {
1581    return flags_ == (kAllChangeBits | kAllDependOnBits);
1582  }
1583
1584  // Returns true if `this` may read something written by `other`.
1585  bool MayDependOn(SideEffects other) const {
1586    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1587    return (other.flags_ & depends_on_flags);
1588  }
1589
1590  // Returns string representation of flags (for debugging only).
1591  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1592  std::string ToString() const {
1593    std::string flags = "|";
1594    for (int s = kLastBit; s >= 0; s--) {
1595      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1596      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1597        // This is a bit for the GC side effect.
1598        if (current_bit_is_set) {
1599          flags += "GC";
1600        }
1601        flags += "|";
1602      } else {
1603        // This is a bit for the array/field analysis.
1604        // The underscore character stands for the 'can trigger GC' bit.
1605        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1606        if (current_bit_is_set) {
1607          flags += kDebug[s];
1608        }
1609        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1610            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1611          flags += "|";
1612        }
1613      }
1614    }
1615    return flags;
1616  }
1617
1618  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1619
1620 private:
1621  static constexpr int kFieldArrayAnalysisBits = 9;
1622
1623  static constexpr int kFieldWriteOffset = 0;
1624  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1625  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1626  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1627
1628  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1629
1630  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1631  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1632  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1633  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1634
1635  static constexpr int kLastBit = kDependsOnGCBit;
1636  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1637
1638  // Aliases.
1639
1640  static_assert(kChangeBits == kDependOnBits,
1641                "the 'change' bits should match the 'depend on' bits.");
1642
1643  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1644  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1645  static constexpr uint64_t kAllWrites =
1646      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1647  static constexpr uint64_t kAllReads =
1648      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1649
1650  // Translates type to bit flag.
1651  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1652    CHECK_NE(type, Primitive::kPrimVoid);
1653    const uint64_t one = 1;
1654    const int shift = type;  // 0-based consecutive enum
1655    DCHECK_LE(kFieldWriteOffset, shift);
1656    DCHECK_LT(shift, kArrayWriteOffset);
1657    return one << (type + offset);
1658  }
1659
1660  // Private constructor on direct flags value.
1661  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1662
1663  uint64_t flags_;
1664};
1665
1666// A HEnvironment object contains the values of virtual registers at a given location.
1667class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1668 public:
1669  HEnvironment(ArenaAllocator* arena,
1670               size_t number_of_vregs,
1671               const DexFile& dex_file,
1672               uint32_t method_idx,
1673               uint32_t dex_pc,
1674               InvokeType invoke_type,
1675               HInstruction* holder)
1676     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1677       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1678       parent_(nullptr),
1679       dex_file_(dex_file),
1680       method_idx_(method_idx),
1681       dex_pc_(dex_pc),
1682       invoke_type_(invoke_type),
1683       holder_(holder) {
1684  }
1685
1686  HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1687      : HEnvironment(arena,
1688                     to_copy.Size(),
1689                     to_copy.GetDexFile(),
1690                     to_copy.GetMethodIdx(),
1691                     to_copy.GetDexPc(),
1692                     to_copy.GetInvokeType(),
1693                     holder) {}
1694
1695  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1696    if (parent_ != nullptr) {
1697      parent_->SetAndCopyParentChain(allocator, parent);
1698    } else {
1699      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1700      parent_->CopyFrom(parent);
1701      if (parent->GetParent() != nullptr) {
1702        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1703      }
1704    }
1705  }
1706
1707  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1708  void CopyFrom(HEnvironment* environment);
1709
1710  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1711  // input to the loop phi instead. This is for inserting instructions that
1712  // require an environment (like HDeoptimization) in the loop pre-header.
1713  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1714
1715  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1716    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1717  }
1718
1719  HInstruction* GetInstructionAt(size_t index) const {
1720    return vregs_[index].GetInstruction();
1721  }
1722
1723  void RemoveAsUserOfInput(size_t index) const;
1724
1725  size_t Size() const { return vregs_.size(); }
1726
1727  HEnvironment* GetParent() const { return parent_; }
1728
1729  void SetLocationAt(size_t index, Location location) {
1730    locations_[index] = location;
1731  }
1732
1733  Location GetLocationAt(size_t index) const {
1734    return locations_[index];
1735  }
1736
1737  uint32_t GetDexPc() const {
1738    return dex_pc_;
1739  }
1740
1741  uint32_t GetMethodIdx() const {
1742    return method_idx_;
1743  }
1744
1745  InvokeType GetInvokeType() const {
1746    return invoke_type_;
1747  }
1748
1749  const DexFile& GetDexFile() const {
1750    return dex_file_;
1751  }
1752
1753  HInstruction* GetHolder() const {
1754    return holder_;
1755  }
1756
1757
1758  bool IsFromInlinedInvoke() const {
1759    return GetParent() != nullptr;
1760  }
1761
1762 private:
1763  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1764  ArenaVector<Location> locations_;
1765  HEnvironment* parent_;
1766  const DexFile& dex_file_;
1767  const uint32_t method_idx_;
1768  const uint32_t dex_pc_;
1769  const InvokeType invoke_type_;
1770
1771  // The instruction that holds this environment.
1772  HInstruction* const holder_;
1773
1774  friend class HInstruction;
1775
1776  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1777};
1778
1779class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1780 public:
1781  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1782      : previous_(nullptr),
1783        next_(nullptr),
1784        block_(nullptr),
1785        dex_pc_(dex_pc),
1786        id_(-1),
1787        ssa_index_(-1),
1788        packed_fields_(0u),
1789        environment_(nullptr),
1790        locations_(nullptr),
1791        live_interval_(nullptr),
1792        lifetime_position_(kNoLifetime),
1793        side_effects_(side_effects),
1794        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1795    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1796  }
1797
1798  virtual ~HInstruction() {}
1799
1800#define DECLARE_KIND(type, super) k##type,
1801  enum InstructionKind {
1802    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1803  };
1804#undef DECLARE_KIND
1805
1806  HInstruction* GetNext() const { return next_; }
1807  HInstruction* GetPrevious() const { return previous_; }
1808
1809  HInstruction* GetNextDisregardingMoves() const;
1810  HInstruction* GetPreviousDisregardingMoves() const;
1811
1812  HBasicBlock* GetBlock() const { return block_; }
1813  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1814  void SetBlock(HBasicBlock* block) { block_ = block; }
1815  bool IsInBlock() const { return block_ != nullptr; }
1816  bool IsInLoop() const { return block_->IsInLoop(); }
1817  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1818  bool IsIrreducibleLoopHeaderPhi() const {
1819    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1820  }
1821
1822  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1823
1824  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1825    // One virtual method is enough, just const_cast<> and then re-add the const.
1826    return ArrayRef<const HUserRecord<HInstruction*>>(
1827        const_cast<HInstruction*>(this)->GetInputRecords());
1828  }
1829
1830  HInputsRef GetInputs() {
1831    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1832  }
1833
1834  HConstInputsRef GetInputs() const {
1835    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1836  }
1837
1838  size_t InputCount() const { return GetInputRecords().size(); }
1839  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1840
1841  void SetRawInputAt(size_t index, HInstruction* input) {
1842    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1843  }
1844
1845  virtual void Accept(HGraphVisitor* visitor) = 0;
1846  virtual const char* DebugName() const = 0;
1847
1848  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1849
1850  virtual bool NeedsEnvironment() const { return false; }
1851
1852  uint32_t GetDexPc() const { return dex_pc_; }
1853
1854  virtual bool IsControlFlow() const { return false; }
1855
1856  virtual bool CanThrow() const { return false; }
1857  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1858
1859  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1860  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1861
1862  // Does not apply for all instructions, but having this at top level greatly
1863  // simplifies the null check elimination.
1864  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1865  virtual bool CanBeNull() const {
1866    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1867    return true;
1868  }
1869
1870  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1871    return false;
1872  }
1873
1874  virtual bool IsActualObject() const {
1875    return GetType() == Primitive::kPrimNot;
1876  }
1877
1878  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1879
1880  ReferenceTypeInfo GetReferenceTypeInfo() const {
1881    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1882    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1883                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1884  }
1885
1886  void AddUseAt(HInstruction* user, size_t index) {
1887    DCHECK(user != nullptr);
1888    // Note: fixup_end remains valid across push_front().
1889    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1890    HUseListNode<HInstruction*>* new_node =
1891        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1892    uses_.push_front(*new_node);
1893    FixUpUserRecordsAfterUseInsertion(fixup_end);
1894  }
1895
1896  void AddEnvUseAt(HEnvironment* user, size_t index) {
1897    DCHECK(user != nullptr);
1898    // Note: env_fixup_end remains valid across push_front().
1899    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
1900    HUseListNode<HEnvironment*>* new_node =
1901        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
1902    env_uses_.push_front(*new_node);
1903    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1904  }
1905
1906  void RemoveAsUserOfInput(size_t input) {
1907    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
1908    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1909    input_use.GetInstruction()->uses_.erase_after(before_use_node);
1910    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1911  }
1912
1913  void RemoveAsUserOfAllInputs() {
1914    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
1915      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1916      input_use.GetInstruction()->uses_.erase_after(before_use_node);
1917      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1918    }
1919  }
1920
1921  const HUseList<HInstruction*>& GetUses() const { return uses_; }
1922  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
1923
1924  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
1925  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
1926  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
1927  bool HasOnlyOneNonEnvironmentUse() const {
1928    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
1929  }
1930
1931  // Does this instruction strictly dominate `other_instruction`?
1932  // Returns false if this instruction and `other_instruction` are the same.
1933  // Aborts if this instruction and `other_instruction` are both phis.
1934  bool StrictlyDominates(HInstruction* other_instruction) const;
1935
1936  int GetId() const { return id_; }
1937  void SetId(int id) { id_ = id; }
1938
1939  int GetSsaIndex() const { return ssa_index_; }
1940  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
1941  bool HasSsaIndex() const { return ssa_index_ != -1; }
1942
1943  bool HasEnvironment() const { return environment_ != nullptr; }
1944  HEnvironment* GetEnvironment() const { return environment_; }
1945  // Set the `environment_` field. Raw because this method does not
1946  // update the uses lists.
1947  void SetRawEnvironment(HEnvironment* environment) {
1948    DCHECK(environment_ == nullptr);
1949    DCHECK_EQ(environment->GetHolder(), this);
1950    environment_ = environment;
1951  }
1952
1953  void InsertRawEnvironment(HEnvironment* environment) {
1954    DCHECK(environment_ != nullptr);
1955    DCHECK_EQ(environment->GetHolder(), this);
1956    DCHECK(environment->GetParent() == nullptr);
1957    environment->parent_ = environment_;
1958    environment_ = environment;
1959  }
1960
1961  void RemoveEnvironment();
1962
1963  // Set the environment of this instruction, copying it from `environment`. While
1964  // copying, the uses lists are being updated.
1965  void CopyEnvironmentFrom(HEnvironment* environment) {
1966    DCHECK(environment_ == nullptr);
1967    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1968    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1969    environment_->CopyFrom(environment);
1970    if (environment->GetParent() != nullptr) {
1971      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1972    }
1973  }
1974
1975  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
1976                                                HBasicBlock* block) {
1977    DCHECK(environment_ == nullptr);
1978    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1979    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1980    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
1981    if (environment->GetParent() != nullptr) {
1982      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1983    }
1984  }
1985
1986  // Returns the number of entries in the environment. Typically, that is the
1987  // number of dex registers in a method. It could be more in case of inlining.
1988  size_t EnvironmentSize() const;
1989
1990  LocationSummary* GetLocations() const { return locations_; }
1991  void SetLocations(LocationSummary* locations) { locations_ = locations; }
1992
1993  void ReplaceWith(HInstruction* instruction);
1994  void ReplaceInput(HInstruction* replacement, size_t index);
1995
1996  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
1997  // uses of this instruction by `other` are *not* updated.
1998  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
1999    ReplaceWith(other);
2000    other->ReplaceInput(this, use_index);
2001  }
2002
2003  // Move `this` instruction before `cursor`.
2004  void MoveBefore(HInstruction* cursor);
2005
2006  // Move `this` before its first user and out of any loops. If there is no
2007  // out-of-loop user that dominates all other users, move the instruction
2008  // to the end of the out-of-loop common dominator of the user's blocks.
2009  //
2010  // This can be used only on non-throwing instructions with no side effects that
2011  // have at least one use but no environment uses.
2012  void MoveBeforeFirstUserAndOutOfLoops();
2013
2014#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2015  bool Is##type() const;                                                       \
2016  const H##type* As##type() const;                                             \
2017  H##type* As##type();
2018
2019  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2020#undef INSTRUCTION_TYPE_CHECK
2021
2022#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2023  bool Is##type() const { return (As##type() != nullptr); }                    \
2024  virtual const H##type* As##type() const { return nullptr; }                  \
2025  virtual H##type* As##type() { return nullptr; }
2026  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2027#undef INSTRUCTION_TYPE_CHECK
2028
2029  // Returns whether the instruction can be moved within the graph.
2030  virtual bool CanBeMoved() const { return false; }
2031
2032  // Returns whether the two instructions are of the same kind.
2033  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2034    return false;
2035  }
2036
2037  // Returns whether any data encoded in the two instructions is equal.
2038  // This method does not look at the inputs. Both instructions must be
2039  // of the same type, otherwise the method has undefined behavior.
2040  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2041    return false;
2042  }
2043
2044  // Returns whether two instructions are equal, that is:
2045  // 1) They have the same type and contain the same data (InstructionDataEquals).
2046  // 2) Their inputs are identical.
2047  bool Equals(const HInstruction* other) const;
2048
2049  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2050  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2051  // the virtual function because the __attribute__((__pure__)) doesn't really
2052  // apply the strong requirement for virtual functions, preventing optimizations.
2053  InstructionKind GetKind() const PURE;
2054  virtual InstructionKind GetKindInternal() const = 0;
2055
2056  virtual size_t ComputeHashCode() const {
2057    size_t result = GetKind();
2058    for (const HInstruction* input : GetInputs()) {
2059      result = (result * 31) + input->GetId();
2060    }
2061    return result;
2062  }
2063
2064  SideEffects GetSideEffects() const { return side_effects_; }
2065  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2066  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2067
2068  size_t GetLifetimePosition() const { return lifetime_position_; }
2069  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2070  LiveInterval* GetLiveInterval() const { return live_interval_; }
2071  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2072  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2073
2074  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2075
2076  // Returns whether the code generation of the instruction will require to have access
2077  // to the current method. Such instructions are:
2078  // (1): Instructions that require an environment, as calling the runtime requires
2079  //      to walk the stack and have the current method stored at a specific stack address.
2080  // (2): Object literals like classes and strings, that are loaded from the dex cache
2081  //      fields of the current method.
2082  bool NeedsCurrentMethod() const {
2083    return NeedsEnvironment() || IsLoadClass() || IsLoadString();
2084  }
2085
2086  // Returns whether the code generation of the instruction will require to have access
2087  // to the dex cache of the current method's declaring class via the current method.
2088  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2089
2090  // Does this instruction have any use in an environment before
2091  // control flow hits 'other'?
2092  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2093
2094  // Remove all references to environment uses of this instruction.
2095  // The caller must ensure that this is safe to do.
2096  void RemoveEnvironmentUsers();
2097
2098  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2099  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2100
2101 protected:
2102  // If set, the machine code for this instruction is assumed to be generated by
2103  // its users. Used by liveness analysis to compute use positions accordingly.
2104  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2105  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2106  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2107  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2108
2109  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2110    return GetInputRecords()[i];
2111  }
2112
2113  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2114    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2115    input_records[index] = input;
2116  }
2117
2118  uint32_t GetPackedFields() const {
2119    return packed_fields_;
2120  }
2121
2122  template <size_t flag>
2123  bool GetPackedFlag() const {
2124    return (packed_fields_ & (1u << flag)) != 0u;
2125  }
2126
2127  template <size_t flag>
2128  void SetPackedFlag(bool value = true) {
2129    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2130  }
2131
2132  template <typename BitFieldType>
2133  typename BitFieldType::value_type GetPackedField() const {
2134    return BitFieldType::Decode(packed_fields_);
2135  }
2136
2137  template <typename BitFieldType>
2138  void SetPackedField(typename BitFieldType::value_type value) {
2139    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2140    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2141  }
2142
2143 private:
2144  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2145    auto before_use_node = uses_.before_begin();
2146    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2147      HInstruction* user = use_node->GetUser();
2148      size_t input_index = use_node->GetIndex();
2149      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2150      before_use_node = use_node;
2151    }
2152  }
2153
2154  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2155    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2156    if (next != uses_.end()) {
2157      HInstruction* next_user = next->GetUser();
2158      size_t next_index = next->GetIndex();
2159      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2160      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2161    }
2162  }
2163
2164  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2165    auto before_env_use_node = env_uses_.before_begin();
2166    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2167      HEnvironment* user = env_use_node->GetUser();
2168      size_t input_index = env_use_node->GetIndex();
2169      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2170      before_env_use_node = env_use_node;
2171    }
2172  }
2173
2174  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2175    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2176    if (next != env_uses_.end()) {
2177      HEnvironment* next_user = next->GetUser();
2178      size_t next_index = next->GetIndex();
2179      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2180      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2181    }
2182  }
2183
2184  HInstruction* previous_;
2185  HInstruction* next_;
2186  HBasicBlock* block_;
2187  const uint32_t dex_pc_;
2188
2189  // An instruction gets an id when it is added to the graph.
2190  // It reflects creation order. A negative id means the instruction
2191  // has not been added to the graph.
2192  int id_;
2193
2194  // When doing liveness analysis, instructions that have uses get an SSA index.
2195  int ssa_index_;
2196
2197  // Packed fields.
2198  uint32_t packed_fields_;
2199
2200  // List of instructions that have this instruction as input.
2201  HUseList<HInstruction*> uses_;
2202
2203  // List of environments that contain this instruction.
2204  HUseList<HEnvironment*> env_uses_;
2205
2206  // The environment associated with this instruction. Not null if the instruction
2207  // might jump out of the method.
2208  HEnvironment* environment_;
2209
2210  // Set by the code generator.
2211  LocationSummary* locations_;
2212
2213  // Set by the liveness analysis.
2214  LiveInterval* live_interval_;
2215
2216  // Set by the liveness analysis, this is the position in a linear
2217  // order of blocks where this instruction's live interval start.
2218  size_t lifetime_position_;
2219
2220  SideEffects side_effects_;
2221
2222  // The reference handle part of the reference type info.
2223  // The IsExact() flag is stored in packed fields.
2224  // TODO: for primitive types this should be marked as invalid.
2225  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2226
2227  friend class GraphChecker;
2228  friend class HBasicBlock;
2229  friend class HEnvironment;
2230  friend class HGraph;
2231  friend class HInstructionList;
2232
2233  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2234};
2235std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2236
2237class HInstructionIterator : public ValueObject {
2238 public:
2239  explicit HInstructionIterator(const HInstructionList& instructions)
2240      : instruction_(instructions.first_instruction_) {
2241    next_ = Done() ? nullptr : instruction_->GetNext();
2242  }
2243
2244  bool Done() const { return instruction_ == nullptr; }
2245  HInstruction* Current() const { return instruction_; }
2246  void Advance() {
2247    instruction_ = next_;
2248    next_ = Done() ? nullptr : instruction_->GetNext();
2249  }
2250
2251 private:
2252  HInstruction* instruction_;
2253  HInstruction* next_;
2254
2255  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2256};
2257
2258class HBackwardInstructionIterator : public ValueObject {
2259 public:
2260  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2261      : instruction_(instructions.last_instruction_) {
2262    next_ = Done() ? nullptr : instruction_->GetPrevious();
2263  }
2264
2265  bool Done() const { return instruction_ == nullptr; }
2266  HInstruction* Current() const { return instruction_; }
2267  void Advance() {
2268    instruction_ = next_;
2269    next_ = Done() ? nullptr : instruction_->GetPrevious();
2270  }
2271
2272 private:
2273  HInstruction* instruction_;
2274  HInstruction* next_;
2275
2276  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2277};
2278
2279template<size_t N>
2280class HTemplateInstruction: public HInstruction {
2281 public:
2282  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2283      : HInstruction(side_effects, dex_pc), inputs_() {}
2284  virtual ~HTemplateInstruction() {}
2285
2286  using HInstruction::GetInputRecords;  // Keep the const version visible.
2287  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2288    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2289  }
2290
2291 private:
2292  std::array<HUserRecord<HInstruction*>, N> inputs_;
2293
2294  friend class SsaBuilder;
2295};
2296
2297// HTemplateInstruction specialization for N=0.
2298template<>
2299class HTemplateInstruction<0>: public HInstruction {
2300 public:
2301  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2302      : HInstruction(side_effects, dex_pc) {}
2303
2304  virtual ~HTemplateInstruction() {}
2305
2306  using HInstruction::GetInputRecords;  // Keep the const version visible.
2307  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2308    return ArrayRef<HUserRecord<HInstruction*>>();
2309  }
2310
2311 private:
2312  friend class SsaBuilder;
2313};
2314
2315template<intptr_t N>
2316class HExpression : public HTemplateInstruction<N> {
2317 public:
2318  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2319      : HTemplateInstruction<N>(side_effects, dex_pc) {
2320    this->template SetPackedField<TypeField>(type);
2321  }
2322  virtual ~HExpression() {}
2323
2324  Primitive::Type GetType() const OVERRIDE {
2325    return TypeField::Decode(this->GetPackedFields());
2326  }
2327
2328 protected:
2329  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2330  static constexpr size_t kFieldTypeSize =
2331      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2332  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2333  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2334                "Too many packed fields.");
2335  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2336};
2337
2338// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2339// instruction that branches to the exit block.
2340class HReturnVoid FINAL : public HTemplateInstruction<0> {
2341 public:
2342  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2343      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2344
2345  bool IsControlFlow() const OVERRIDE { return true; }
2346
2347  DECLARE_INSTRUCTION(ReturnVoid);
2348
2349 private:
2350  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2351};
2352
2353// Represents dex's RETURN opcodes. A HReturn is a control flow
2354// instruction that branches to the exit block.
2355class HReturn FINAL : public HTemplateInstruction<1> {
2356 public:
2357  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2358      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2359    SetRawInputAt(0, value);
2360  }
2361
2362  bool IsControlFlow() const OVERRIDE { return true; }
2363
2364  DECLARE_INSTRUCTION(Return);
2365
2366 private:
2367  DISALLOW_COPY_AND_ASSIGN(HReturn);
2368};
2369
2370class HPhi FINAL : public HInstruction {
2371 public:
2372  HPhi(ArenaAllocator* arena,
2373       uint32_t reg_number,
2374       size_t number_of_inputs,
2375       Primitive::Type type,
2376       uint32_t dex_pc = kNoDexPc)
2377      : HInstruction(SideEffects::None(), dex_pc),
2378        inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)),
2379        reg_number_(reg_number) {
2380    SetPackedField<TypeField>(ToPhiType(type));
2381    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2382    // Phis are constructed live and marked dead if conflicting or unused.
2383    // Individual steps of SsaBuilder should assume that if a phi has been
2384    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2385    SetPackedFlag<kFlagIsLive>(true);
2386    SetPackedFlag<kFlagCanBeNull>(true);
2387  }
2388
2389  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2390  static Primitive::Type ToPhiType(Primitive::Type type) {
2391    return Primitive::PrimitiveKind(type);
2392  }
2393
2394  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2395
2396  using HInstruction::GetInputRecords;  // Keep the const version visible.
2397  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2398    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2399  }
2400
2401  void AddInput(HInstruction* input);
2402  void RemoveInputAt(size_t index);
2403
2404  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2405  void SetType(Primitive::Type new_type) {
2406    // Make sure that only valid type changes occur. The following are allowed:
2407    //  (1) int  -> float/ref (primitive type propagation),
2408    //  (2) long -> double (primitive type propagation).
2409    DCHECK(GetType() == new_type ||
2410           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2411           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2412           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2413    SetPackedField<TypeField>(new_type);
2414  }
2415
2416  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2417  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2418
2419  uint32_t GetRegNumber() const { return reg_number_; }
2420
2421  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2422  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2423  bool IsDead() const { return !IsLive(); }
2424  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2425
2426  bool IsVRegEquivalentOf(const HInstruction* other) const {
2427    return other != nullptr
2428        && other->IsPhi()
2429        && other->AsPhi()->GetBlock() == GetBlock()
2430        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2431  }
2432
2433  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2434  // An equivalent phi is a phi having the same dex register and type.
2435  // It assumes that phis with the same dex register are adjacent.
2436  HPhi* GetNextEquivalentPhiWithSameType() {
2437    HInstruction* next = GetNext();
2438    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2439      if (next->GetType() == GetType()) {
2440        return next->AsPhi();
2441      }
2442      next = next->GetNext();
2443    }
2444    return nullptr;
2445  }
2446
2447  DECLARE_INSTRUCTION(Phi);
2448
2449 private:
2450  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2451  static constexpr size_t kFieldTypeSize =
2452      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2453  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2454  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2455  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2456  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2457  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2458
2459  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2460  const uint32_t reg_number_;
2461
2462  DISALLOW_COPY_AND_ASSIGN(HPhi);
2463};
2464
2465// The exit instruction is the only instruction of the exit block.
2466// Instructions aborting the method (HThrow and HReturn) must branch to the
2467// exit block.
2468class HExit FINAL : public HTemplateInstruction<0> {
2469 public:
2470  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2471
2472  bool IsControlFlow() const OVERRIDE { return true; }
2473
2474  DECLARE_INSTRUCTION(Exit);
2475
2476 private:
2477  DISALLOW_COPY_AND_ASSIGN(HExit);
2478};
2479
2480// Jumps from one block to another.
2481class HGoto FINAL : public HTemplateInstruction<0> {
2482 public:
2483  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2484
2485  bool IsControlFlow() const OVERRIDE { return true; }
2486
2487  HBasicBlock* GetSuccessor() const {
2488    return GetBlock()->GetSingleSuccessor();
2489  }
2490
2491  DECLARE_INSTRUCTION(Goto);
2492
2493 private:
2494  DISALLOW_COPY_AND_ASSIGN(HGoto);
2495};
2496
2497class HConstant : public HExpression<0> {
2498 public:
2499  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2500      : HExpression(type, SideEffects::None(), dex_pc) {}
2501
2502  bool CanBeMoved() const OVERRIDE { return true; }
2503
2504  // Is this constant -1 in the arithmetic sense?
2505  virtual bool IsMinusOne() const { return false; }
2506  // Is this constant 0 in the arithmetic sense?
2507  virtual bool IsArithmeticZero() const { return false; }
2508  // Is this constant a 0-bit pattern?
2509  virtual bool IsZeroBitPattern() const { return false; }
2510  // Is this constant 1 in the arithmetic sense?
2511  virtual bool IsOne() const { return false; }
2512
2513  virtual uint64_t GetValueAsUint64() const = 0;
2514
2515  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2516
2517 private:
2518  DISALLOW_COPY_AND_ASSIGN(HConstant);
2519};
2520
2521class HNullConstant FINAL : public HConstant {
2522 public:
2523  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2524    return true;
2525  }
2526
2527  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2528
2529  size_t ComputeHashCode() const OVERRIDE { return 0; }
2530
2531  // The null constant representation is a 0-bit pattern.
2532  virtual bool IsZeroBitPattern() const { return true; }
2533
2534  DECLARE_INSTRUCTION(NullConstant);
2535
2536 private:
2537  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2538
2539  friend class HGraph;
2540  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2541};
2542
2543// Constants of the type int. Those can be from Dex instructions, or
2544// synthesized (for example with the if-eqz instruction).
2545class HIntConstant FINAL : public HConstant {
2546 public:
2547  int32_t GetValue() const { return value_; }
2548
2549  uint64_t GetValueAsUint64() const OVERRIDE {
2550    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2551  }
2552
2553  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2554    DCHECK(other->IsIntConstant()) << other->DebugName();
2555    return other->AsIntConstant()->value_ == value_;
2556  }
2557
2558  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2559
2560  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2561  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2562  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2563  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2564
2565  // Integer constants are used to encode Boolean values as well,
2566  // where 1 means true and 0 means false.
2567  bool IsTrue() const { return GetValue() == 1; }
2568  bool IsFalse() const { return GetValue() == 0; }
2569
2570  DECLARE_INSTRUCTION(IntConstant);
2571
2572 private:
2573  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2574      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2575  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2576      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2577
2578  const int32_t value_;
2579
2580  friend class HGraph;
2581  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2582  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2583  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2584};
2585
2586class HLongConstant FINAL : public HConstant {
2587 public:
2588  int64_t GetValue() const { return value_; }
2589
2590  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2591
2592  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2593    DCHECK(other->IsLongConstant()) << other->DebugName();
2594    return other->AsLongConstant()->value_ == value_;
2595  }
2596
2597  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2598
2599  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2600  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2601  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2602  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2603
2604  DECLARE_INSTRUCTION(LongConstant);
2605
2606 private:
2607  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2608      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2609
2610  const int64_t value_;
2611
2612  friend class HGraph;
2613  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2614};
2615
2616class HFloatConstant FINAL : public HConstant {
2617 public:
2618  float GetValue() const { return value_; }
2619
2620  uint64_t GetValueAsUint64() const OVERRIDE {
2621    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2622  }
2623
2624  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2625    DCHECK(other->IsFloatConstant()) << other->DebugName();
2626    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2627  }
2628
2629  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2630
2631  bool IsMinusOne() const OVERRIDE {
2632    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2633  }
2634  bool IsArithmeticZero() const OVERRIDE {
2635    return std::fpclassify(value_) == FP_ZERO;
2636  }
2637  bool IsArithmeticPositiveZero() const {
2638    return IsArithmeticZero() && !std::signbit(value_);
2639  }
2640  bool IsArithmeticNegativeZero() const {
2641    return IsArithmeticZero() && std::signbit(value_);
2642  }
2643  bool IsZeroBitPattern() const OVERRIDE {
2644    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2645  }
2646  bool IsOne() const OVERRIDE {
2647    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2648  }
2649  bool IsNaN() const {
2650    return std::isnan(value_);
2651  }
2652
2653  DECLARE_INSTRUCTION(FloatConstant);
2654
2655 private:
2656  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2657      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2658  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2659      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2660
2661  const float value_;
2662
2663  // Only the SsaBuilder and HGraph can create floating-point constants.
2664  friend class SsaBuilder;
2665  friend class HGraph;
2666  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2667};
2668
2669class HDoubleConstant FINAL : public HConstant {
2670 public:
2671  double GetValue() const { return value_; }
2672
2673  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2674
2675  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2676    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2677    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2678  }
2679
2680  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2681
2682  bool IsMinusOne() const OVERRIDE {
2683    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2684  }
2685  bool IsArithmeticZero() const OVERRIDE {
2686    return std::fpclassify(value_) == FP_ZERO;
2687  }
2688  bool IsArithmeticPositiveZero() const {
2689    return IsArithmeticZero() && !std::signbit(value_);
2690  }
2691  bool IsArithmeticNegativeZero() const {
2692    return IsArithmeticZero() && std::signbit(value_);
2693  }
2694  bool IsZeroBitPattern() const OVERRIDE {
2695    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2696  }
2697  bool IsOne() const OVERRIDE {
2698    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2699  }
2700  bool IsNaN() const {
2701    return std::isnan(value_);
2702  }
2703
2704  DECLARE_INSTRUCTION(DoubleConstant);
2705
2706 private:
2707  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2708      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2709  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2710      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2711
2712  const double value_;
2713
2714  // Only the SsaBuilder and HGraph can create floating-point constants.
2715  friend class SsaBuilder;
2716  friend class HGraph;
2717  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2718};
2719
2720// Conditional branch. A block ending with an HIf instruction must have
2721// two successors.
2722class HIf FINAL : public HTemplateInstruction<1> {
2723 public:
2724  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2725      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2726    SetRawInputAt(0, input);
2727  }
2728
2729  bool IsControlFlow() const OVERRIDE { return true; }
2730
2731  HBasicBlock* IfTrueSuccessor() const {
2732    return GetBlock()->GetSuccessors()[0];
2733  }
2734
2735  HBasicBlock* IfFalseSuccessor() const {
2736    return GetBlock()->GetSuccessors()[1];
2737  }
2738
2739  DECLARE_INSTRUCTION(If);
2740
2741 private:
2742  DISALLOW_COPY_AND_ASSIGN(HIf);
2743};
2744
2745
2746// Abstract instruction which marks the beginning and/or end of a try block and
2747// links it to the respective exception handlers. Behaves the same as a Goto in
2748// non-exceptional control flow.
2749// Normal-flow successor is stored at index zero, exception handlers under
2750// higher indices in no particular order.
2751class HTryBoundary FINAL : public HTemplateInstruction<0> {
2752 public:
2753  enum class BoundaryKind {
2754    kEntry,
2755    kExit,
2756    kLast = kExit
2757  };
2758
2759  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2760      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2761    SetPackedField<BoundaryKindField>(kind);
2762  }
2763
2764  bool IsControlFlow() const OVERRIDE { return true; }
2765
2766  // Returns the block's non-exceptional successor (index zero).
2767  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2768
2769  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2770    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2771  }
2772
2773  // Returns whether `handler` is among its exception handlers (non-zero index
2774  // successors).
2775  bool HasExceptionHandler(const HBasicBlock& handler) const {
2776    DCHECK(handler.IsCatchBlock());
2777    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2778  }
2779
2780  // If not present already, adds `handler` to its block's list of exception
2781  // handlers.
2782  void AddExceptionHandler(HBasicBlock* handler) {
2783    if (!HasExceptionHandler(*handler)) {
2784      GetBlock()->AddSuccessor(handler);
2785    }
2786  }
2787
2788  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2789  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2790
2791  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2792
2793  DECLARE_INSTRUCTION(TryBoundary);
2794
2795 private:
2796  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2797  static constexpr size_t kFieldBoundaryKindSize =
2798      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2799  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2800      kFieldBoundaryKind + kFieldBoundaryKindSize;
2801  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2802                "Too many packed fields.");
2803  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2804
2805  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2806};
2807
2808// Deoptimize to interpreter, upon checking a condition.
2809class HDeoptimize FINAL : public HTemplateInstruction<1> {
2810 public:
2811  // We set CanTriggerGC to prevent any intermediate address to be live
2812  // at the point of the `HDeoptimize`.
2813  HDeoptimize(HInstruction* cond, uint32_t dex_pc)
2814      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
2815    SetRawInputAt(0, cond);
2816  }
2817
2818  bool CanBeMoved() const OVERRIDE { return true; }
2819  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2820    return true;
2821  }
2822  bool NeedsEnvironment() const OVERRIDE { return true; }
2823  bool CanThrow() const OVERRIDE { return true; }
2824
2825  DECLARE_INSTRUCTION(Deoptimize);
2826
2827 private:
2828  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
2829};
2830
2831// Represents the ArtMethod that was passed as a first argument to
2832// the method. It is used by instructions that depend on it, like
2833// instructions that work with the dex cache.
2834class HCurrentMethod FINAL : public HExpression<0> {
2835 public:
2836  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2837      : HExpression(type, SideEffects::None(), dex_pc) {}
2838
2839  DECLARE_INSTRUCTION(CurrentMethod);
2840
2841 private:
2842  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
2843};
2844
2845// Fetches an ArtMethod from the virtual table or the interface method table
2846// of a class.
2847class HClassTableGet FINAL : public HExpression<1> {
2848 public:
2849  enum class TableKind {
2850    kVTable,
2851    kIMTable,
2852    kLast = kIMTable
2853  };
2854  HClassTableGet(HInstruction* cls,
2855                 Primitive::Type type,
2856                 TableKind kind,
2857                 size_t index,
2858                 uint32_t dex_pc)
2859      : HExpression(type, SideEffects::None(), dex_pc),
2860        index_(index) {
2861    SetPackedField<TableKindField>(kind);
2862    SetRawInputAt(0, cls);
2863  }
2864
2865  bool CanBeMoved() const OVERRIDE { return true; }
2866  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2867    return other->AsClassTableGet()->GetIndex() == index_ &&
2868        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
2869  }
2870
2871  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
2872  size_t GetIndex() const { return index_; }
2873
2874  DECLARE_INSTRUCTION(ClassTableGet);
2875
2876 private:
2877  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
2878  static constexpr size_t kFieldTableKindSize =
2879      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
2880  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
2881  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
2882                "Too many packed fields.");
2883  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
2884
2885  // The index of the ArtMethod in the table.
2886  const size_t index_;
2887
2888  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
2889};
2890
2891// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
2892// have one successor for each entry in the switch table, and the final successor
2893// will be the block containing the next Dex opcode.
2894class HPackedSwitch FINAL : public HTemplateInstruction<1> {
2895 public:
2896  HPackedSwitch(int32_t start_value,
2897                uint32_t num_entries,
2898                HInstruction* input,
2899                uint32_t dex_pc = kNoDexPc)
2900    : HTemplateInstruction(SideEffects::None(), dex_pc),
2901      start_value_(start_value),
2902      num_entries_(num_entries) {
2903    SetRawInputAt(0, input);
2904  }
2905
2906  bool IsControlFlow() const OVERRIDE { return true; }
2907
2908  int32_t GetStartValue() const { return start_value_; }
2909
2910  uint32_t GetNumEntries() const { return num_entries_; }
2911
2912  HBasicBlock* GetDefaultBlock() const {
2913    // Last entry is the default block.
2914    return GetBlock()->GetSuccessors()[num_entries_];
2915  }
2916  DECLARE_INSTRUCTION(PackedSwitch);
2917
2918 private:
2919  const int32_t start_value_;
2920  const uint32_t num_entries_;
2921
2922  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
2923};
2924
2925class HUnaryOperation : public HExpression<1> {
2926 public:
2927  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
2928      : HExpression(result_type, SideEffects::None(), dex_pc) {
2929    SetRawInputAt(0, input);
2930  }
2931
2932  HInstruction* GetInput() const { return InputAt(0); }
2933  Primitive::Type GetResultType() const { return GetType(); }
2934
2935  bool CanBeMoved() const OVERRIDE { return true; }
2936  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2937    return true;
2938  }
2939
2940  // Try to statically evaluate `this` and return a HConstant
2941  // containing the result of this evaluation.  If `this` cannot
2942  // be evaluated as a constant, return null.
2943  HConstant* TryStaticEvaluation() const;
2944
2945  // Apply this operation to `x`.
2946  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
2947  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
2948  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
2949  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
2950
2951  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
2952
2953 private:
2954  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
2955};
2956
2957class HBinaryOperation : public HExpression<2> {
2958 public:
2959  HBinaryOperation(Primitive::Type result_type,
2960                   HInstruction* left,
2961                   HInstruction* right,
2962                   SideEffects side_effects = SideEffects::None(),
2963                   uint32_t dex_pc = kNoDexPc)
2964      : HExpression(result_type, side_effects, dex_pc) {
2965    SetRawInputAt(0, left);
2966    SetRawInputAt(1, right);
2967  }
2968
2969  HInstruction* GetLeft() const { return InputAt(0); }
2970  HInstruction* GetRight() const { return InputAt(1); }
2971  Primitive::Type GetResultType() const { return GetType(); }
2972
2973  virtual bool IsCommutative() const { return false; }
2974
2975  // Put constant on the right.
2976  // Returns whether order is changed.
2977  bool OrderInputsWithConstantOnTheRight() {
2978    HInstruction* left = InputAt(0);
2979    HInstruction* right = InputAt(1);
2980    if (left->IsConstant() && !right->IsConstant()) {
2981      ReplaceInput(right, 0);
2982      ReplaceInput(left, 1);
2983      return true;
2984    }
2985    return false;
2986  }
2987
2988  // Order inputs by instruction id, but favor constant on the right side.
2989  // This helps GVN for commutative ops.
2990  void OrderInputs() {
2991    DCHECK(IsCommutative());
2992    HInstruction* left = InputAt(0);
2993    HInstruction* right = InputAt(1);
2994    if (left == right || (!left->IsConstant() && right->IsConstant())) {
2995      return;
2996    }
2997    if (OrderInputsWithConstantOnTheRight()) {
2998      return;
2999    }
3000    // Order according to instruction id.
3001    if (left->GetId() > right->GetId()) {
3002      ReplaceInput(right, 0);
3003      ReplaceInput(left, 1);
3004    }
3005  }
3006
3007  bool CanBeMoved() const OVERRIDE { return true; }
3008  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3009    return true;
3010  }
3011
3012  // Try to statically evaluate `this` and return a HConstant
3013  // containing the result of this evaluation.  If `this` cannot
3014  // be evaluated as a constant, return null.
3015  HConstant* TryStaticEvaluation() const;
3016
3017  // Apply this operation to `x` and `y`.
3018  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3019                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3020    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3021    UNREACHABLE();
3022  }
3023  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3024  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3025  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3026                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3027    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3028    UNREACHABLE();
3029  }
3030  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3031  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3032
3033  // Returns an input that can legally be used as the right input and is
3034  // constant, or null.
3035  HConstant* GetConstantRight() const;
3036
3037  // If `GetConstantRight()` returns one of the input, this returns the other
3038  // one. Otherwise it returns null.
3039  HInstruction* GetLeastConstantLeft() const;
3040
3041  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3042
3043 private:
3044  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3045};
3046
3047// The comparison bias applies for floating point operations and indicates how NaN
3048// comparisons are treated:
3049enum class ComparisonBias {
3050  kNoBias,  // bias is not applicable (i.e. for long operation)
3051  kGtBias,  // return 1 for NaN comparisons
3052  kLtBias,  // return -1 for NaN comparisons
3053  kLast = kLtBias
3054};
3055
3056std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3057
3058class HCondition : public HBinaryOperation {
3059 public:
3060  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3061      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3062    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3063  }
3064
3065  // For code generation purposes, returns whether this instruction is just before
3066  // `instruction`, and disregard moves in between.
3067  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3068
3069  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3070
3071  virtual IfCondition GetCondition() const = 0;
3072
3073  virtual IfCondition GetOppositeCondition() const = 0;
3074
3075  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3076  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3077
3078  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3079  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3080
3081  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3082    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3083  }
3084
3085  bool IsFPConditionTrueIfNaN() const {
3086    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3087    IfCondition if_cond = GetCondition();
3088    if (if_cond == kCondNE) {
3089      return true;
3090    } else if (if_cond == kCondEQ) {
3091      return false;
3092    }
3093    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3094  }
3095
3096  bool IsFPConditionFalseIfNaN() const {
3097    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3098    IfCondition if_cond = GetCondition();
3099    if (if_cond == kCondEQ) {
3100      return true;
3101    } else if (if_cond == kCondNE) {
3102      return false;
3103    }
3104    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3105  }
3106
3107 protected:
3108  // Needed if we merge a HCompare into a HCondition.
3109  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3110  static constexpr size_t kFieldComparisonBiasSize =
3111      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3112  static constexpr size_t kNumberOfConditionPackedBits =
3113      kFieldComparisonBias + kFieldComparisonBiasSize;
3114  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3115  using ComparisonBiasField =
3116      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3117
3118  template <typename T>
3119  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3120
3121  template <typename T>
3122  int32_t CompareFP(T x, T y) const {
3123    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3124    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3125    // Handle the bias.
3126    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3127  }
3128
3129  // Return an integer constant containing the result of a condition evaluated at compile time.
3130  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3131    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3132  }
3133
3134 private:
3135  DISALLOW_COPY_AND_ASSIGN(HCondition);
3136};
3137
3138// Instruction to check if two inputs are equal to each other.
3139class HEqual FINAL : public HCondition {
3140 public:
3141  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3142      : HCondition(first, second, dex_pc) {}
3143
3144  bool IsCommutative() const OVERRIDE { return true; }
3145
3146  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3147                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3148    return MakeConstantCondition(true, GetDexPc());
3149  }
3150  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3151    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3152  }
3153  // In the following Evaluate methods, a HCompare instruction has
3154  // been merged into this HEqual instruction; evaluate it as
3155  // `Compare(x, y) == 0`.
3156  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3157    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3158                                 GetDexPc());
3159  }
3160  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3161    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3162  }
3163  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3164    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3165  }
3166
3167  DECLARE_INSTRUCTION(Equal);
3168
3169  IfCondition GetCondition() const OVERRIDE {
3170    return kCondEQ;
3171  }
3172
3173  IfCondition GetOppositeCondition() const OVERRIDE {
3174    return kCondNE;
3175  }
3176
3177 private:
3178  template <typename T> static bool Compute(T x, T y) { return x == y; }
3179
3180  DISALLOW_COPY_AND_ASSIGN(HEqual);
3181};
3182
3183class HNotEqual FINAL : public HCondition {
3184 public:
3185  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3186      : HCondition(first, second, dex_pc) {}
3187
3188  bool IsCommutative() const OVERRIDE { return true; }
3189
3190  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3191                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3192    return MakeConstantCondition(false, GetDexPc());
3193  }
3194  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3195    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3196  }
3197  // In the following Evaluate methods, a HCompare instruction has
3198  // been merged into this HNotEqual instruction; evaluate it as
3199  // `Compare(x, y) != 0`.
3200  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3201    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3202  }
3203  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3204    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3205  }
3206  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3207    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3208  }
3209
3210  DECLARE_INSTRUCTION(NotEqual);
3211
3212  IfCondition GetCondition() const OVERRIDE {
3213    return kCondNE;
3214  }
3215
3216  IfCondition GetOppositeCondition() const OVERRIDE {
3217    return kCondEQ;
3218  }
3219
3220 private:
3221  template <typename T> static bool Compute(T x, T y) { return x != y; }
3222
3223  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3224};
3225
3226class HLessThan FINAL : public HCondition {
3227 public:
3228  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3229      : HCondition(first, second, dex_pc) {}
3230
3231  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3232    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3233  }
3234  // In the following Evaluate methods, a HCompare instruction has
3235  // been merged into this HLessThan instruction; evaluate it as
3236  // `Compare(x, y) < 0`.
3237  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3238    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3239  }
3240  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3241    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3242  }
3243  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3244    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3245  }
3246
3247  DECLARE_INSTRUCTION(LessThan);
3248
3249  IfCondition GetCondition() const OVERRIDE {
3250    return kCondLT;
3251  }
3252
3253  IfCondition GetOppositeCondition() const OVERRIDE {
3254    return kCondGE;
3255  }
3256
3257 private:
3258  template <typename T> static bool Compute(T x, T y) { return x < y; }
3259
3260  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3261};
3262
3263class HLessThanOrEqual FINAL : public HCondition {
3264 public:
3265  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3266      : HCondition(first, second, dex_pc) {}
3267
3268  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3269    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3270  }
3271  // In the following Evaluate methods, a HCompare instruction has
3272  // been merged into this HLessThanOrEqual instruction; evaluate it as
3273  // `Compare(x, y) <= 0`.
3274  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3275    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3276  }
3277  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3278    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3279  }
3280  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3281    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3282  }
3283
3284  DECLARE_INSTRUCTION(LessThanOrEqual);
3285
3286  IfCondition GetCondition() const OVERRIDE {
3287    return kCondLE;
3288  }
3289
3290  IfCondition GetOppositeCondition() const OVERRIDE {
3291    return kCondGT;
3292  }
3293
3294 private:
3295  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3296
3297  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3298};
3299
3300class HGreaterThan FINAL : public HCondition {
3301 public:
3302  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3303      : HCondition(first, second, dex_pc) {}
3304
3305  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3306    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3307  }
3308  // In the following Evaluate methods, a HCompare instruction has
3309  // been merged into this HGreaterThan instruction; evaluate it as
3310  // `Compare(x, y) > 0`.
3311  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3312    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3313  }
3314  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3315    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3316  }
3317  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3318    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3319  }
3320
3321  DECLARE_INSTRUCTION(GreaterThan);
3322
3323  IfCondition GetCondition() const OVERRIDE {
3324    return kCondGT;
3325  }
3326
3327  IfCondition GetOppositeCondition() const OVERRIDE {
3328    return kCondLE;
3329  }
3330
3331 private:
3332  template <typename T> static bool Compute(T x, T y) { return x > y; }
3333
3334  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3335};
3336
3337class HGreaterThanOrEqual FINAL : public HCondition {
3338 public:
3339  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3340      : HCondition(first, second, dex_pc) {}
3341
3342  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3343    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3344  }
3345  // In the following Evaluate methods, a HCompare instruction has
3346  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3347  // `Compare(x, y) >= 0`.
3348  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3349    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3350  }
3351  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3352    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3353  }
3354  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3355    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3356  }
3357
3358  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3359
3360  IfCondition GetCondition() const OVERRIDE {
3361    return kCondGE;
3362  }
3363
3364  IfCondition GetOppositeCondition() const OVERRIDE {
3365    return kCondLT;
3366  }
3367
3368 private:
3369  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3370
3371  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3372};
3373
3374class HBelow FINAL : public HCondition {
3375 public:
3376  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3377      : HCondition(first, second, dex_pc) {}
3378
3379  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3380    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3381  }
3382  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3383    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3384  }
3385  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3386                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3387    LOG(FATAL) << DebugName() << " is not defined for float values";
3388    UNREACHABLE();
3389  }
3390  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3391                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3392    LOG(FATAL) << DebugName() << " is not defined for double values";
3393    UNREACHABLE();
3394  }
3395
3396  DECLARE_INSTRUCTION(Below);
3397
3398  IfCondition GetCondition() const OVERRIDE {
3399    return kCondB;
3400  }
3401
3402  IfCondition GetOppositeCondition() const OVERRIDE {
3403    return kCondAE;
3404  }
3405
3406 private:
3407  template <typename T> static bool Compute(T x, T y) {
3408    return MakeUnsigned(x) < MakeUnsigned(y);
3409  }
3410
3411  DISALLOW_COPY_AND_ASSIGN(HBelow);
3412};
3413
3414class HBelowOrEqual FINAL : public HCondition {
3415 public:
3416  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3417      : HCondition(first, second, dex_pc) {}
3418
3419  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3420    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3421  }
3422  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3423    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3424  }
3425  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3426                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3427    LOG(FATAL) << DebugName() << " is not defined for float values";
3428    UNREACHABLE();
3429  }
3430  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3431                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3432    LOG(FATAL) << DebugName() << " is not defined for double values";
3433    UNREACHABLE();
3434  }
3435
3436  DECLARE_INSTRUCTION(BelowOrEqual);
3437
3438  IfCondition GetCondition() const OVERRIDE {
3439    return kCondBE;
3440  }
3441
3442  IfCondition GetOppositeCondition() const OVERRIDE {
3443    return kCondA;
3444  }
3445
3446 private:
3447  template <typename T> static bool Compute(T x, T y) {
3448    return MakeUnsigned(x) <= MakeUnsigned(y);
3449  }
3450
3451  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3452};
3453
3454class HAbove FINAL : public HCondition {
3455 public:
3456  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3457      : HCondition(first, second, dex_pc) {}
3458
3459  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3460    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3461  }
3462  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3463    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3464  }
3465  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3466                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3467    LOG(FATAL) << DebugName() << " is not defined for float values";
3468    UNREACHABLE();
3469  }
3470  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3471                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3472    LOG(FATAL) << DebugName() << " is not defined for double values";
3473    UNREACHABLE();
3474  }
3475
3476  DECLARE_INSTRUCTION(Above);
3477
3478  IfCondition GetCondition() const OVERRIDE {
3479    return kCondA;
3480  }
3481
3482  IfCondition GetOppositeCondition() const OVERRIDE {
3483    return kCondBE;
3484  }
3485
3486 private:
3487  template <typename T> static bool Compute(T x, T y) {
3488    return MakeUnsigned(x) > MakeUnsigned(y);
3489  }
3490
3491  DISALLOW_COPY_AND_ASSIGN(HAbove);
3492};
3493
3494class HAboveOrEqual FINAL : public HCondition {
3495 public:
3496  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3497      : HCondition(first, second, dex_pc) {}
3498
3499  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3500    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3501  }
3502  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3503    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3504  }
3505  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3506                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3507    LOG(FATAL) << DebugName() << " is not defined for float values";
3508    UNREACHABLE();
3509  }
3510  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3511                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3512    LOG(FATAL) << DebugName() << " is not defined for double values";
3513    UNREACHABLE();
3514  }
3515
3516  DECLARE_INSTRUCTION(AboveOrEqual);
3517
3518  IfCondition GetCondition() const OVERRIDE {
3519    return kCondAE;
3520  }
3521
3522  IfCondition GetOppositeCondition() const OVERRIDE {
3523    return kCondB;
3524  }
3525
3526 private:
3527  template <typename T> static bool Compute(T x, T y) {
3528    return MakeUnsigned(x) >= MakeUnsigned(y);
3529  }
3530
3531  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3532};
3533
3534// Instruction to check how two inputs compare to each other.
3535// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3536class HCompare FINAL : public HBinaryOperation {
3537 public:
3538  // Note that `comparison_type` is the type of comparison performed
3539  // between the comparison's inputs, not the type of the instantiated
3540  // HCompare instruction (which is always Primitive::kPrimInt).
3541  HCompare(Primitive::Type comparison_type,
3542           HInstruction* first,
3543           HInstruction* second,
3544           ComparisonBias bias,
3545           uint32_t dex_pc)
3546      : HBinaryOperation(Primitive::kPrimInt,
3547                         first,
3548                         second,
3549                         SideEffectsForArchRuntimeCalls(comparison_type),
3550                         dex_pc) {
3551    SetPackedField<ComparisonBiasField>(bias);
3552    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3553    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3554  }
3555
3556  template <typename T>
3557  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3558
3559  template <typename T>
3560  int32_t ComputeFP(T x, T y) const {
3561    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3562    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3563    // Handle the bias.
3564    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3565  }
3566
3567  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3568    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3569    // reach this code path when processing a freshly built HIR
3570    // graph. However HCompare integer instructions can be synthesized
3571    // by the instruction simplifier to implement IntegerCompare and
3572    // IntegerSignum intrinsics, so we have to handle this case.
3573    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3574  }
3575  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3576    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3577  }
3578  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3579    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3580  }
3581  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3582    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3583  }
3584
3585  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3586    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3587  }
3588
3589  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3590
3591  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3592  // Only meaningful for floating-point comparisons.
3593  bool IsGtBias() const {
3594    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3595    return GetBias() == ComparisonBias::kGtBias;
3596  }
3597
3598  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3599    // Comparisons do not require a runtime call in any back end.
3600    return SideEffects::None();
3601  }
3602
3603  DECLARE_INSTRUCTION(Compare);
3604
3605 protected:
3606  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3607  static constexpr size_t kFieldComparisonBiasSize =
3608      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3609  static constexpr size_t kNumberOfComparePackedBits =
3610      kFieldComparisonBias + kFieldComparisonBiasSize;
3611  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3612  using ComparisonBiasField =
3613      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3614
3615  // Return an integer constant containing the result of a comparison evaluated at compile time.
3616  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3617    DCHECK(value == -1 || value == 0 || value == 1) << value;
3618    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3619  }
3620
3621 private:
3622  DISALLOW_COPY_AND_ASSIGN(HCompare);
3623};
3624
3625class HNewInstance FINAL : public HExpression<2> {
3626 public:
3627  HNewInstance(HInstruction* cls,
3628               HCurrentMethod* current_method,
3629               uint32_t dex_pc,
3630               uint16_t type_index,
3631               const DexFile& dex_file,
3632               bool needs_access_check,
3633               bool finalizable,
3634               QuickEntrypointEnum entrypoint)
3635      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3636        type_index_(type_index),
3637        dex_file_(dex_file),
3638        entrypoint_(entrypoint) {
3639    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
3640    SetPackedFlag<kFlagFinalizable>(finalizable);
3641    SetRawInputAt(0, cls);
3642    SetRawInputAt(1, current_method);
3643  }
3644
3645  uint16_t GetTypeIndex() const { return type_index_; }
3646  const DexFile& GetDexFile() const { return dex_file_; }
3647
3648  // Calls runtime so needs an environment.
3649  bool NeedsEnvironment() const OVERRIDE { return true; }
3650
3651  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3652  bool CanThrow() const OVERRIDE { return true; }
3653
3654  // Needs to call into runtime to make sure it's instantiable/accessible.
3655  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
3656
3657  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3658
3659  bool CanBeNull() const OVERRIDE { return false; }
3660
3661  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3662
3663  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3664    entrypoint_ = entrypoint;
3665  }
3666
3667  bool IsStringAlloc() const;
3668
3669  DECLARE_INSTRUCTION(NewInstance);
3670
3671 private:
3672  static constexpr size_t kFlagNeedsAccessCheck = kNumberOfExpressionPackedBits;
3673  static constexpr size_t kFlagFinalizable = kFlagNeedsAccessCheck + 1;
3674  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3675  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3676                "Too many packed fields.");
3677
3678  const uint16_t type_index_;
3679  const DexFile& dex_file_;
3680  QuickEntrypointEnum entrypoint_;
3681
3682  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3683};
3684
3685enum class Intrinsics {
3686#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \
3687  k ## Name,
3688#include "intrinsics_list.h"
3689  kNone,
3690  INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3691#undef INTRINSICS_LIST
3692#undef OPTIMIZING_INTRINSICS
3693};
3694std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic);
3695
3696enum IntrinsicNeedsEnvironmentOrCache {
3697  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3698  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3699};
3700
3701enum IntrinsicSideEffects {
3702  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3703  kReadSideEffects,   // Intrinsic may read heap memory.
3704  kWriteSideEffects,  // Intrinsic may write heap memory.
3705  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3706};
3707
3708enum IntrinsicExceptions {
3709  kNoThrow,  // Intrinsic does not throw any exceptions.
3710  kCanThrow  // Intrinsic may throw exceptions.
3711};
3712
3713class HInvoke : public HInstruction {
3714 public:
3715  bool NeedsEnvironment() const OVERRIDE;
3716
3717  using HInstruction::GetInputRecords;  // Keep the const version visible.
3718  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
3719    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
3720  }
3721
3722  void SetArgumentAt(size_t index, HInstruction* argument) {
3723    SetRawInputAt(index, argument);
3724  }
3725
3726  // Return the number of arguments.  This number can be lower than
3727  // the number of inputs returned by InputCount(), as some invoke
3728  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3729  // inputs at the end of their list of inputs.
3730  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3731
3732  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3733
3734  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3735  const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
3736
3737  InvokeType GetOriginalInvokeType() const {
3738    return GetPackedField<OriginalInvokeTypeField>();
3739  }
3740
3741  Intrinsics GetIntrinsic() const {
3742    return intrinsic_;
3743  }
3744
3745  void SetIntrinsic(Intrinsics intrinsic,
3746                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3747                    IntrinsicSideEffects side_effects,
3748                    IntrinsicExceptions exceptions);
3749
3750  bool IsFromInlinedInvoke() const {
3751    return GetEnvironment()->IsFromInlinedInvoke();
3752  }
3753
3754  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
3755
3756  bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); }
3757
3758  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3759    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
3760  }
3761
3762  uint32_t* GetIntrinsicOptimizations() {
3763    return &intrinsic_optimizations_;
3764  }
3765
3766  const uint32_t* GetIntrinsicOptimizations() const {
3767    return &intrinsic_optimizations_;
3768  }
3769
3770  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
3771
3772  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
3773
3774 protected:
3775  static constexpr size_t kFieldOriginalInvokeType = kNumberOfGenericPackedBits;
3776  static constexpr size_t kFieldOriginalInvokeTypeSize =
3777      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
3778  static constexpr size_t kFieldReturnType =
3779      kFieldOriginalInvokeType + kFieldOriginalInvokeTypeSize;
3780  static constexpr size_t kFieldReturnTypeSize =
3781      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
3782  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
3783  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
3784  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3785  using OriginalInvokeTypeField =
3786      BitField<InvokeType, kFieldOriginalInvokeType, kFieldOriginalInvokeTypeSize>;
3787  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
3788
3789  HInvoke(ArenaAllocator* arena,
3790          uint32_t number_of_arguments,
3791          uint32_t number_of_other_inputs,
3792          Primitive::Type return_type,
3793          uint32_t dex_pc,
3794          uint32_t dex_method_index,
3795          InvokeType original_invoke_type)
3796    : HInstruction(
3797          SideEffects::AllExceptGCDependency(), dex_pc),  // Assume write/read on all fields/arrays.
3798      number_of_arguments_(number_of_arguments),
3799      inputs_(number_of_arguments + number_of_other_inputs,
3800              arena->Adapter(kArenaAllocInvokeInputs)),
3801      dex_method_index_(dex_method_index),
3802      intrinsic_(Intrinsics::kNone),
3803      intrinsic_optimizations_(0) {
3804    SetPackedField<ReturnTypeField>(return_type);
3805    SetPackedField<OriginalInvokeTypeField>(original_invoke_type);
3806    SetPackedFlag<kFlagCanThrow>(true);
3807  }
3808
3809  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3810
3811  uint32_t number_of_arguments_;
3812  ArenaVector<HUserRecord<HInstruction*>> inputs_;
3813  const uint32_t dex_method_index_;
3814  Intrinsics intrinsic_;
3815
3816  // A magic word holding optimizations for intrinsics. See intrinsics.h.
3817  uint32_t intrinsic_optimizations_;
3818
3819 private:
3820  DISALLOW_COPY_AND_ASSIGN(HInvoke);
3821};
3822
3823class HInvokeUnresolved FINAL : public HInvoke {
3824 public:
3825  HInvokeUnresolved(ArenaAllocator* arena,
3826                    uint32_t number_of_arguments,
3827                    Primitive::Type return_type,
3828                    uint32_t dex_pc,
3829                    uint32_t dex_method_index,
3830                    InvokeType invoke_type)
3831      : HInvoke(arena,
3832                number_of_arguments,
3833                0u /* number_of_other_inputs */,
3834                return_type,
3835                dex_pc,
3836                dex_method_index,
3837                invoke_type) {
3838  }
3839
3840  DECLARE_INSTRUCTION(InvokeUnresolved);
3841
3842 private:
3843  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
3844};
3845
3846class HInvokeStaticOrDirect FINAL : public HInvoke {
3847 public:
3848  // Requirements of this method call regarding the class
3849  // initialization (clinit) check of its declaring class.
3850  enum class ClinitCheckRequirement {
3851    kNone,      // Class already initialized.
3852    kExplicit,  // Static call having explicit clinit check as last input.
3853    kImplicit,  // Static call implicitly requiring a clinit check.
3854    kLast = kImplicit
3855  };
3856
3857  // Determines how to load the target ArtMethod*.
3858  enum class MethodLoadKind {
3859    // Use a String init ArtMethod* loaded from Thread entrypoints.
3860    kStringInit,
3861
3862    // Use the method's own ArtMethod* loaded by the register allocator.
3863    kRecursive,
3864
3865    // Use ArtMethod* at a known address, embed the direct address in the code.
3866    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
3867    kDirectAddress,
3868
3869    // Use ArtMethod* at an address that will be known at link time, embed the direct
3870    // address in the code. If the image is relocatable, emit .patch_oat entry.
3871    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3872    // the image relocatable or not.
3873    kDirectAddressWithFixup,
3874
3875    // Load from resolved methods array in the dex cache using a PC-relative load.
3876    // Used when we need to use the dex cache, for example for invoke-static that
3877    // may cause class initialization (the entry may point to a resolution method),
3878    // and we know that we can access the dex cache arrays using a PC-relative load.
3879    kDexCachePcRelative,
3880
3881    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
3882    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
3883    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
3884    // or unimplemented or impractical (i.e. slow) on a particular architecture.
3885    kDexCacheViaMethod,
3886  };
3887
3888  // Determines the location of the code pointer.
3889  enum class CodePtrLocation {
3890    // Recursive call, use local PC-relative call instruction.
3891    kCallSelf,
3892
3893    // Use PC-relative call instruction patched at link time.
3894    // Used for calls within an oat file, boot->boot or app->app.
3895    kCallPCRelative,
3896
3897    // Call to a known target address, embed the direct address in code.
3898    // Used for app->boot call with non-relocatable image and for JIT-compiled calls.
3899    kCallDirect,
3900
3901    // Call to a target address that will be known at link time, embed the direct
3902    // address in code. If the image is relocatable, emit .patch_oat entry.
3903    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3904    // the image relocatable or not.
3905    kCallDirectWithFixup,
3906
3907    // Use code pointer from the ArtMethod*.
3908    // Used when we don't know the target code. This is also the last-resort-kind used when
3909    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
3910    kCallArtMethod,
3911  };
3912
3913  struct DispatchInfo {
3914    MethodLoadKind method_load_kind;
3915    CodePtrLocation code_ptr_location;
3916    // The method load data holds
3917    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
3918    //     Note that there are multiple string init methods, each having its own offset.
3919    //   - the method address for kDirectAddress
3920    //   - the dex cache arrays offset for kDexCachePcRel.
3921    uint64_t method_load_data;
3922    uint64_t direct_code_ptr;
3923  };
3924
3925  HInvokeStaticOrDirect(ArenaAllocator* arena,
3926                        uint32_t number_of_arguments,
3927                        Primitive::Type return_type,
3928                        uint32_t dex_pc,
3929                        uint32_t method_index,
3930                        MethodReference target_method,
3931                        DispatchInfo dispatch_info,
3932                        InvokeType original_invoke_type,
3933                        InvokeType optimized_invoke_type,
3934                        ClinitCheckRequirement clinit_check_requirement)
3935      : HInvoke(arena,
3936                number_of_arguments,
3937                // There is potentially one extra argument for the HCurrentMethod node, and
3938                // potentially one other if the clinit check is explicit, and potentially
3939                // one other if the method is a string factory.
3940                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
3941                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
3942                return_type,
3943                dex_pc,
3944                method_index,
3945                original_invoke_type),
3946        target_method_(target_method),
3947        dispatch_info_(dispatch_info) {
3948    SetPackedField<OptimizedInvokeTypeField>(optimized_invoke_type);
3949    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
3950  }
3951
3952  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
3953    bool had_current_method_input = HasCurrentMethodInput();
3954    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
3955
3956    // Using the current method is the default and once we find a better
3957    // method load kind, we should not go back to using the current method.
3958    DCHECK(had_current_method_input || !needs_current_method_input);
3959
3960    if (had_current_method_input && !needs_current_method_input) {
3961      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
3962      RemoveInputAt(GetSpecialInputIndex());
3963    }
3964    dispatch_info_ = dispatch_info;
3965  }
3966
3967  void AddSpecialInput(HInstruction* input) {
3968    // We allow only one special input.
3969    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
3970    DCHECK(InputCount() == GetSpecialInputIndex() ||
3971           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
3972    InsertInputAt(GetSpecialInputIndex(), input);
3973  }
3974
3975  using HInstruction::GetInputRecords;  // Keep the const version visible.
3976  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
3977    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
3978    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
3979      DCHECK(!input_records.empty());
3980      DCHECK_GT(input_records.size(), GetNumberOfArguments());
3981      HInstruction* last_input = input_records.back().GetInstruction();
3982      // Note: `last_input` may be null during arguments setup.
3983      if (last_input != nullptr) {
3984        // `last_input` is the last input of a static invoke marked as having
3985        // an explicit clinit check. It must either be:
3986        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
3987        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
3988        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
3989      }
3990    }
3991    return input_records;
3992  }
3993
3994  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
3995    // We access the method via the dex cache so we can't do an implicit null check.
3996    // TODO: for intrinsics we can generate implicit null checks.
3997    return false;
3998  }
3999
4000  bool CanBeNull() const OVERRIDE {
4001    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
4002  }
4003
4004  // Get the index of the special input, if any.
4005  //
4006  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4007  // method pointer; otherwise there may be one platform-specific special input,
4008  // such as PC-relative addressing base.
4009  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4010  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4011
4012  InvokeType GetOptimizedInvokeType() const {
4013    return GetPackedField<OptimizedInvokeTypeField>();
4014  }
4015
4016  void SetOptimizedInvokeType(InvokeType invoke_type) {
4017    SetPackedField<OptimizedInvokeTypeField>(invoke_type);
4018  }
4019
4020  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4021  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4022  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4023  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4024  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4025  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4026  bool HasPcRelativeDexCache() const {
4027    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4028  }
4029  bool HasCurrentMethodInput() const {
4030    // This function can be called only after the invoke has been fully initialized by the builder.
4031    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4032      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4033      return true;
4034    } else {
4035      DCHECK(InputCount() == GetSpecialInputIndex() ||
4036             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4037      return false;
4038    }
4039  }
4040  bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
4041  MethodReference GetTargetMethod() const { return target_method_; }
4042  void SetTargetMethod(MethodReference method) { target_method_ = method; }
4043
4044  int32_t GetStringInitOffset() const {
4045    DCHECK(IsStringInit());
4046    return dispatch_info_.method_load_data;
4047  }
4048
4049  uint64_t GetMethodAddress() const {
4050    DCHECK(HasMethodAddress());
4051    return dispatch_info_.method_load_data;
4052  }
4053
4054  uint32_t GetDexCacheArrayOffset() const {
4055    DCHECK(HasPcRelativeDexCache());
4056    return dispatch_info_.method_load_data;
4057  }
4058
4059  uint64_t GetDirectCodePtr() const {
4060    DCHECK(HasDirectCodePtr());
4061    return dispatch_info_.direct_code_ptr;
4062  }
4063
4064  ClinitCheckRequirement GetClinitCheckRequirement() const {
4065    return GetPackedField<ClinitCheckRequirementField>();
4066  }
4067
4068  // Is this instruction a call to a static method?
4069  bool IsStatic() const {
4070    return GetOriginalInvokeType() == kStatic;
4071  }
4072
4073  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4074  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4075  // instruction; only relevant for static calls with explicit clinit check.
4076  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4077    DCHECK(IsStaticWithExplicitClinitCheck());
4078    size_t last_input_index = inputs_.size() - 1u;
4079    HInstruction* last_input = inputs_.back().GetInstruction();
4080    DCHECK(last_input != nullptr);
4081    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4082    RemoveAsUserOfInput(last_input_index);
4083    inputs_.pop_back();
4084    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4085    DCHECK(!IsStaticWithExplicitClinitCheck());
4086  }
4087
4088  // Is this a call to a static method whose declaring class has an
4089  // explicit initialization check in the graph?
4090  bool IsStaticWithExplicitClinitCheck() const {
4091    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4092  }
4093
4094  // Is this a call to a static method whose declaring class has an
4095  // implicit intialization check requirement?
4096  bool IsStaticWithImplicitClinitCheck() const {
4097    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4098  }
4099
4100  // Does this method load kind need the current method as an input?
4101  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4102    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4103  }
4104
4105  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4106
4107 protected:
4108  void InsertInputAt(size_t index, HInstruction* input);
4109  void RemoveInputAt(size_t index);
4110
4111 private:
4112  static constexpr size_t kFieldOptimizedInvokeType = kNumberOfInvokePackedBits;
4113  static constexpr size_t kFieldOptimizedInvokeTypeSize =
4114      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4115  static constexpr size_t kFieldClinitCheckRequirement =
4116      kFieldOptimizedInvokeType + kFieldOptimizedInvokeTypeSize;
4117  static constexpr size_t kFieldClinitCheckRequirementSize =
4118      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4119  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4120      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4121  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4122                "Too many packed fields.");
4123  using OptimizedInvokeTypeField =
4124      BitField<InvokeType, kFieldOptimizedInvokeType, kFieldOptimizedInvokeTypeSize>;
4125  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4126                                               kFieldClinitCheckRequirement,
4127                                               kFieldClinitCheckRequirementSize>;
4128
4129  // The target method may refer to different dex file or method index than the original
4130  // invoke. This happens for sharpened calls and for calls where a method was redeclared
4131  // in derived class to increase visibility.
4132  MethodReference target_method_;
4133  DispatchInfo dispatch_info_;
4134
4135  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4136};
4137std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4138std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4139
4140class HInvokeVirtual FINAL : public HInvoke {
4141 public:
4142  HInvokeVirtual(ArenaAllocator* arena,
4143                 uint32_t number_of_arguments,
4144                 Primitive::Type return_type,
4145                 uint32_t dex_pc,
4146                 uint32_t dex_method_index,
4147                 uint32_t vtable_index)
4148      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual),
4149        vtable_index_(vtable_index) {}
4150
4151  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4152    // TODO: Add implicit null checks in intrinsics.
4153    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4154  }
4155
4156  uint32_t GetVTableIndex() const { return vtable_index_; }
4157
4158  DECLARE_INSTRUCTION(InvokeVirtual);
4159
4160 private:
4161  const uint32_t vtable_index_;
4162
4163  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4164};
4165
4166class HInvokeInterface FINAL : public HInvoke {
4167 public:
4168  HInvokeInterface(ArenaAllocator* arena,
4169                   uint32_t number_of_arguments,
4170                   Primitive::Type return_type,
4171                   uint32_t dex_pc,
4172                   uint32_t dex_method_index,
4173                   uint32_t imt_index)
4174      : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface),
4175        imt_index_(imt_index) {}
4176
4177  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4178    // TODO: Add implicit null checks in intrinsics.
4179    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4180  }
4181
4182  uint32_t GetImtIndex() const { return imt_index_; }
4183  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4184
4185  DECLARE_INSTRUCTION(InvokeInterface);
4186
4187 private:
4188  const uint32_t imt_index_;
4189
4190  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4191};
4192
4193class HNeg FINAL : public HUnaryOperation {
4194 public:
4195  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4196      : HUnaryOperation(result_type, input, dex_pc) {
4197    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4198  }
4199
4200  template <typename T> static T Compute(T x) { return -x; }
4201
4202  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4203    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4204  }
4205  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4206    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4207  }
4208  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4209    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4210  }
4211  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4212    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4213  }
4214
4215  DECLARE_INSTRUCTION(Neg);
4216
4217 private:
4218  DISALLOW_COPY_AND_ASSIGN(HNeg);
4219};
4220
4221class HNewArray FINAL : public HExpression<2> {
4222 public:
4223  HNewArray(HInstruction* length,
4224            HCurrentMethod* current_method,
4225            uint32_t dex_pc,
4226            uint16_t type_index,
4227            const DexFile& dex_file,
4228            QuickEntrypointEnum entrypoint)
4229      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
4230        type_index_(type_index),
4231        dex_file_(dex_file),
4232        entrypoint_(entrypoint) {
4233    SetRawInputAt(0, length);
4234    SetRawInputAt(1, current_method);
4235  }
4236
4237  uint16_t GetTypeIndex() const { return type_index_; }
4238  const DexFile& GetDexFile() const { return dex_file_; }
4239
4240  // Calls runtime so needs an environment.
4241  bool NeedsEnvironment() const OVERRIDE { return true; }
4242
4243  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4244  bool CanThrow() const OVERRIDE { return true; }
4245
4246  bool CanBeNull() const OVERRIDE { return false; }
4247
4248  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4249
4250  DECLARE_INSTRUCTION(NewArray);
4251
4252 private:
4253  const uint16_t type_index_;
4254  const DexFile& dex_file_;
4255  const QuickEntrypointEnum entrypoint_;
4256
4257  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4258};
4259
4260class HAdd FINAL : public HBinaryOperation {
4261 public:
4262  HAdd(Primitive::Type result_type,
4263       HInstruction* left,
4264       HInstruction* right,
4265       uint32_t dex_pc = kNoDexPc)
4266      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4267
4268  bool IsCommutative() const OVERRIDE { return true; }
4269
4270  template <typename T> static T Compute(T x, T y) { return x + y; }
4271
4272  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4273    return GetBlock()->GetGraph()->GetIntConstant(
4274        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4275  }
4276  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4277    return GetBlock()->GetGraph()->GetLongConstant(
4278        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4279  }
4280  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4281    return GetBlock()->GetGraph()->GetFloatConstant(
4282        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4283  }
4284  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4285    return GetBlock()->GetGraph()->GetDoubleConstant(
4286        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4287  }
4288
4289  DECLARE_INSTRUCTION(Add);
4290
4291 private:
4292  DISALLOW_COPY_AND_ASSIGN(HAdd);
4293};
4294
4295class HSub FINAL : public HBinaryOperation {
4296 public:
4297  HSub(Primitive::Type result_type,
4298       HInstruction* left,
4299       HInstruction* right,
4300       uint32_t dex_pc = kNoDexPc)
4301      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4302
4303  template <typename T> static T Compute(T x, T y) { return x - y; }
4304
4305  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4306    return GetBlock()->GetGraph()->GetIntConstant(
4307        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4308  }
4309  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4310    return GetBlock()->GetGraph()->GetLongConstant(
4311        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4312  }
4313  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4314    return GetBlock()->GetGraph()->GetFloatConstant(
4315        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4316  }
4317  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4318    return GetBlock()->GetGraph()->GetDoubleConstant(
4319        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4320  }
4321
4322  DECLARE_INSTRUCTION(Sub);
4323
4324 private:
4325  DISALLOW_COPY_AND_ASSIGN(HSub);
4326};
4327
4328class HMul FINAL : public HBinaryOperation {
4329 public:
4330  HMul(Primitive::Type result_type,
4331       HInstruction* left,
4332       HInstruction* right,
4333       uint32_t dex_pc = kNoDexPc)
4334      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4335
4336  bool IsCommutative() const OVERRIDE { return true; }
4337
4338  template <typename T> static T Compute(T x, T y) { return x * y; }
4339
4340  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4341    return GetBlock()->GetGraph()->GetIntConstant(
4342        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4343  }
4344  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4345    return GetBlock()->GetGraph()->GetLongConstant(
4346        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4347  }
4348  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4349    return GetBlock()->GetGraph()->GetFloatConstant(
4350        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4351  }
4352  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4353    return GetBlock()->GetGraph()->GetDoubleConstant(
4354        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4355  }
4356
4357  DECLARE_INSTRUCTION(Mul);
4358
4359 private:
4360  DISALLOW_COPY_AND_ASSIGN(HMul);
4361};
4362
4363class HDiv FINAL : public HBinaryOperation {
4364 public:
4365  HDiv(Primitive::Type result_type,
4366       HInstruction* left,
4367       HInstruction* right,
4368       uint32_t dex_pc)
4369      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4370
4371  template <typename T>
4372  T ComputeIntegral(T x, T y) const {
4373    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4374    // Our graph structure ensures we never have 0 for `y` during
4375    // constant folding.
4376    DCHECK_NE(y, 0);
4377    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4378    return (y == -1) ? -x : x / y;
4379  }
4380
4381  template <typename T>
4382  T ComputeFP(T x, T y) const {
4383    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4384    return x / y;
4385  }
4386
4387  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4388    return GetBlock()->GetGraph()->GetIntConstant(
4389        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4390  }
4391  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4392    return GetBlock()->GetGraph()->GetLongConstant(
4393        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4394  }
4395  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4396    return GetBlock()->GetGraph()->GetFloatConstant(
4397        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4398  }
4399  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4400    return GetBlock()->GetGraph()->GetDoubleConstant(
4401        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4402  }
4403
4404  static SideEffects SideEffectsForArchRuntimeCalls() {
4405    // The generated code can use a runtime call.
4406    return SideEffects::CanTriggerGC();
4407  }
4408
4409  DECLARE_INSTRUCTION(Div);
4410
4411 private:
4412  DISALLOW_COPY_AND_ASSIGN(HDiv);
4413};
4414
4415class HRem FINAL : public HBinaryOperation {
4416 public:
4417  HRem(Primitive::Type result_type,
4418       HInstruction* left,
4419       HInstruction* right,
4420       uint32_t dex_pc)
4421      : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4422
4423  template <typename T>
4424  T ComputeIntegral(T x, T y) const {
4425    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4426    // Our graph structure ensures we never have 0 for `y` during
4427    // constant folding.
4428    DCHECK_NE(y, 0);
4429    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4430    return (y == -1) ? 0 : x % y;
4431  }
4432
4433  template <typename T>
4434  T ComputeFP(T x, T y) const {
4435    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4436    return std::fmod(x, y);
4437  }
4438
4439  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4440    return GetBlock()->GetGraph()->GetIntConstant(
4441        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4442  }
4443  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4444    return GetBlock()->GetGraph()->GetLongConstant(
4445        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4446  }
4447  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4448    return GetBlock()->GetGraph()->GetFloatConstant(
4449        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4450  }
4451  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4452    return GetBlock()->GetGraph()->GetDoubleConstant(
4453        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4454  }
4455
4456  static SideEffects SideEffectsForArchRuntimeCalls() {
4457    return SideEffects::CanTriggerGC();
4458  }
4459
4460  DECLARE_INSTRUCTION(Rem);
4461
4462 private:
4463  DISALLOW_COPY_AND_ASSIGN(HRem);
4464};
4465
4466class HDivZeroCheck FINAL : public HExpression<1> {
4467 public:
4468  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4469  // constructor.
4470  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4471      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4472    SetRawInputAt(0, value);
4473  }
4474
4475  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4476
4477  bool CanBeMoved() const OVERRIDE { return true; }
4478
4479  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4480    return true;
4481  }
4482
4483  bool NeedsEnvironment() const OVERRIDE { return true; }
4484  bool CanThrow() const OVERRIDE { return true; }
4485
4486  DECLARE_INSTRUCTION(DivZeroCheck);
4487
4488 private:
4489  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4490};
4491
4492class HShl FINAL : public HBinaryOperation {
4493 public:
4494  HShl(Primitive::Type result_type,
4495       HInstruction* value,
4496       HInstruction* distance,
4497       uint32_t dex_pc = kNoDexPc)
4498      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4499    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4500    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4501  }
4502
4503  template <typename T>
4504  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4505    return value << (distance & max_shift_distance);
4506  }
4507
4508  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4509    return GetBlock()->GetGraph()->GetIntConstant(
4510        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4511  }
4512  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4513    return GetBlock()->GetGraph()->GetLongConstant(
4514        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4515  }
4516  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4517                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4518    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4519    UNREACHABLE();
4520  }
4521  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4522                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4523    LOG(FATAL) << DebugName() << " is not defined for float values";
4524    UNREACHABLE();
4525  }
4526  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4527                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4528    LOG(FATAL) << DebugName() << " is not defined for double values";
4529    UNREACHABLE();
4530  }
4531
4532  DECLARE_INSTRUCTION(Shl);
4533
4534 private:
4535  DISALLOW_COPY_AND_ASSIGN(HShl);
4536};
4537
4538class HShr FINAL : public HBinaryOperation {
4539 public:
4540  HShr(Primitive::Type result_type,
4541       HInstruction* value,
4542       HInstruction* distance,
4543       uint32_t dex_pc = kNoDexPc)
4544      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4545    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4546    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4547  }
4548
4549  template <typename T>
4550  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4551    return value >> (distance & max_shift_distance);
4552  }
4553
4554  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4555    return GetBlock()->GetGraph()->GetIntConstant(
4556        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4557  }
4558  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4559    return GetBlock()->GetGraph()->GetLongConstant(
4560        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4561  }
4562  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4563                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4564    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4565    UNREACHABLE();
4566  }
4567  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4568                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4569    LOG(FATAL) << DebugName() << " is not defined for float values";
4570    UNREACHABLE();
4571  }
4572  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4573                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4574    LOG(FATAL) << DebugName() << " is not defined for double values";
4575    UNREACHABLE();
4576  }
4577
4578  DECLARE_INSTRUCTION(Shr);
4579
4580 private:
4581  DISALLOW_COPY_AND_ASSIGN(HShr);
4582};
4583
4584class HUShr FINAL : public HBinaryOperation {
4585 public:
4586  HUShr(Primitive::Type result_type,
4587        HInstruction* value,
4588        HInstruction* distance,
4589        uint32_t dex_pc = kNoDexPc)
4590      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4591    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4592    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4593  }
4594
4595  template <typename T>
4596  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4597    typedef typename std::make_unsigned<T>::type V;
4598    V ux = static_cast<V>(value);
4599    return static_cast<T>(ux >> (distance & max_shift_distance));
4600  }
4601
4602  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4603    return GetBlock()->GetGraph()->GetIntConstant(
4604        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4605  }
4606  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4607    return GetBlock()->GetGraph()->GetLongConstant(
4608        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4609  }
4610  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4611                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4612    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4613    UNREACHABLE();
4614  }
4615  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4616                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4617    LOG(FATAL) << DebugName() << " is not defined for float values";
4618    UNREACHABLE();
4619  }
4620  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4621                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4622    LOG(FATAL) << DebugName() << " is not defined for double values";
4623    UNREACHABLE();
4624  }
4625
4626  DECLARE_INSTRUCTION(UShr);
4627
4628 private:
4629  DISALLOW_COPY_AND_ASSIGN(HUShr);
4630};
4631
4632class HAnd FINAL : public HBinaryOperation {
4633 public:
4634  HAnd(Primitive::Type result_type,
4635       HInstruction* left,
4636       HInstruction* right,
4637       uint32_t dex_pc = kNoDexPc)
4638      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4639
4640  bool IsCommutative() const OVERRIDE { return true; }
4641
4642  template <typename T> static T Compute(T x, T y) { return x & y; }
4643
4644  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4645    return GetBlock()->GetGraph()->GetIntConstant(
4646        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4647  }
4648  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4649    return GetBlock()->GetGraph()->GetLongConstant(
4650        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4651  }
4652  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4653                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4654    LOG(FATAL) << DebugName() << " is not defined for float values";
4655    UNREACHABLE();
4656  }
4657  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4658                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4659    LOG(FATAL) << DebugName() << " is not defined for double values";
4660    UNREACHABLE();
4661  }
4662
4663  DECLARE_INSTRUCTION(And);
4664
4665 private:
4666  DISALLOW_COPY_AND_ASSIGN(HAnd);
4667};
4668
4669class HOr FINAL : public HBinaryOperation {
4670 public:
4671  HOr(Primitive::Type result_type,
4672      HInstruction* left,
4673      HInstruction* right,
4674      uint32_t dex_pc = kNoDexPc)
4675      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4676
4677  bool IsCommutative() const OVERRIDE { return true; }
4678
4679  template <typename T> static T Compute(T x, T y) { return x | y; }
4680
4681  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4682    return GetBlock()->GetGraph()->GetIntConstant(
4683        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4684  }
4685  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4686    return GetBlock()->GetGraph()->GetLongConstant(
4687        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4688  }
4689  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4690                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4691    LOG(FATAL) << DebugName() << " is not defined for float values";
4692    UNREACHABLE();
4693  }
4694  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4695                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4696    LOG(FATAL) << DebugName() << " is not defined for double values";
4697    UNREACHABLE();
4698  }
4699
4700  DECLARE_INSTRUCTION(Or);
4701
4702 private:
4703  DISALLOW_COPY_AND_ASSIGN(HOr);
4704};
4705
4706class HXor FINAL : public HBinaryOperation {
4707 public:
4708  HXor(Primitive::Type result_type,
4709       HInstruction* left,
4710       HInstruction* right,
4711       uint32_t dex_pc = kNoDexPc)
4712      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4713
4714  bool IsCommutative() const OVERRIDE { return true; }
4715
4716  template <typename T> static T Compute(T x, T y) { return x ^ y; }
4717
4718  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4719    return GetBlock()->GetGraph()->GetIntConstant(
4720        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4721  }
4722  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4723    return GetBlock()->GetGraph()->GetLongConstant(
4724        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4725  }
4726  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4727                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4728    LOG(FATAL) << DebugName() << " is not defined for float values";
4729    UNREACHABLE();
4730  }
4731  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4732                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4733    LOG(FATAL) << DebugName() << " is not defined for double values";
4734    UNREACHABLE();
4735  }
4736
4737  DECLARE_INSTRUCTION(Xor);
4738
4739 private:
4740  DISALLOW_COPY_AND_ASSIGN(HXor);
4741};
4742
4743class HRor FINAL : public HBinaryOperation {
4744 public:
4745  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4746    : HBinaryOperation(result_type, value, distance) {
4747    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4748    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4749  }
4750
4751  template <typename T>
4752  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
4753    typedef typename std::make_unsigned<T>::type V;
4754    V ux = static_cast<V>(value);
4755    if ((distance & max_shift_value) == 0) {
4756      return static_cast<T>(ux);
4757    } else {
4758      const V reg_bits = sizeof(T) * 8;
4759      return static_cast<T>(ux >> (distance & max_shift_value)) |
4760                           (value << (reg_bits - (distance & max_shift_value)));
4761    }
4762  }
4763
4764  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4765    return GetBlock()->GetGraph()->GetIntConstant(
4766        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4767  }
4768  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4769    return GetBlock()->GetGraph()->GetLongConstant(
4770        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4771  }
4772  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4773                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4774    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4775    UNREACHABLE();
4776  }
4777  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4778                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4779    LOG(FATAL) << DebugName() << " is not defined for float values";
4780    UNREACHABLE();
4781  }
4782  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4783                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4784    LOG(FATAL) << DebugName() << " is not defined for double values";
4785    UNREACHABLE();
4786  }
4787
4788  DECLARE_INSTRUCTION(Ror);
4789
4790 private:
4791  DISALLOW_COPY_AND_ASSIGN(HRor);
4792};
4793
4794// The value of a parameter in this method. Its location depends on
4795// the calling convention.
4796class HParameterValue FINAL : public HExpression<0> {
4797 public:
4798  HParameterValue(const DexFile& dex_file,
4799                  uint16_t type_index,
4800                  uint8_t index,
4801                  Primitive::Type parameter_type,
4802                  bool is_this = false)
4803      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
4804        dex_file_(dex_file),
4805        type_index_(type_index),
4806        index_(index) {
4807    SetPackedFlag<kFlagIsThis>(is_this);
4808    SetPackedFlag<kFlagCanBeNull>(!is_this);
4809  }
4810
4811  const DexFile& GetDexFile() const { return dex_file_; }
4812  uint16_t GetTypeIndex() const { return type_index_; }
4813  uint8_t GetIndex() const { return index_; }
4814  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
4815
4816  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
4817  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
4818
4819  DECLARE_INSTRUCTION(ParameterValue);
4820
4821 private:
4822  // Whether or not the parameter value corresponds to 'this' argument.
4823  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
4824  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
4825  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
4826  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
4827                "Too many packed fields.");
4828
4829  const DexFile& dex_file_;
4830  const uint16_t type_index_;
4831  // The index of this parameter in the parameters list. Must be less
4832  // than HGraph::number_of_in_vregs_.
4833  const uint8_t index_;
4834
4835  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
4836};
4837
4838class HNot FINAL : public HUnaryOperation {
4839 public:
4840  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4841      : HUnaryOperation(result_type, input, dex_pc) {}
4842
4843  bool CanBeMoved() const OVERRIDE { return true; }
4844  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4845    return true;
4846  }
4847
4848  template <typename T> static T Compute(T x) { return ~x; }
4849
4850  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4851    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4852  }
4853  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4854    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4855  }
4856  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4857    LOG(FATAL) << DebugName() << " is not defined for float values";
4858    UNREACHABLE();
4859  }
4860  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4861    LOG(FATAL) << DebugName() << " is not defined for double values";
4862    UNREACHABLE();
4863  }
4864
4865  DECLARE_INSTRUCTION(Not);
4866
4867 private:
4868  DISALLOW_COPY_AND_ASSIGN(HNot);
4869};
4870
4871class HBooleanNot FINAL : public HUnaryOperation {
4872 public:
4873  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
4874      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
4875
4876  bool CanBeMoved() const OVERRIDE { return true; }
4877  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4878    return true;
4879  }
4880
4881  template <typename T> static bool Compute(T x) {
4882    DCHECK(IsUint<1>(x)) << x;
4883    return !x;
4884  }
4885
4886  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4887    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4888  }
4889  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4890    LOG(FATAL) << DebugName() << " is not defined for long values";
4891    UNREACHABLE();
4892  }
4893  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4894    LOG(FATAL) << DebugName() << " is not defined for float values";
4895    UNREACHABLE();
4896  }
4897  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4898    LOG(FATAL) << DebugName() << " is not defined for double values";
4899    UNREACHABLE();
4900  }
4901
4902  DECLARE_INSTRUCTION(BooleanNot);
4903
4904 private:
4905  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
4906};
4907
4908class HTypeConversion FINAL : public HExpression<1> {
4909 public:
4910  // Instantiate a type conversion of `input` to `result_type`.
4911  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
4912      : HExpression(result_type,
4913                    SideEffectsForArchRuntimeCalls(input->GetType(), result_type),
4914                    dex_pc) {
4915    SetRawInputAt(0, input);
4916    // Invariant: We should never generate a conversion to a Boolean value.
4917    DCHECK_NE(Primitive::kPrimBoolean, result_type);
4918  }
4919
4920  HInstruction* GetInput() const { return InputAt(0); }
4921  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
4922  Primitive::Type GetResultType() const { return GetType(); }
4923
4924  bool CanBeMoved() const OVERRIDE { return true; }
4925  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4926    return true;
4927  }
4928
4929  // Try to statically evaluate the conversion and return a HConstant
4930  // containing the result.  If the input cannot be converted, return nullptr.
4931  HConstant* TryStaticEvaluation() const;
4932
4933  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type,
4934                                                    Primitive::Type result_type) {
4935    // Some architectures may not require the 'GC' side effects, but at this point
4936    // in the compilation process we do not know what architecture we will
4937    // generate code for, so we must be conservative.
4938    if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type))
4939        || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) {
4940      return SideEffects::CanTriggerGC();
4941    }
4942    return SideEffects::None();
4943  }
4944
4945  DECLARE_INSTRUCTION(TypeConversion);
4946
4947 private:
4948  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
4949};
4950
4951static constexpr uint32_t kNoRegNumber = -1;
4952
4953class HNullCheck FINAL : public HExpression<1> {
4954 public:
4955  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
4956  // constructor.
4957  HNullCheck(HInstruction* value, uint32_t dex_pc)
4958      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4959    SetRawInputAt(0, value);
4960  }
4961
4962  bool CanBeMoved() const OVERRIDE { return true; }
4963  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4964    return true;
4965  }
4966
4967  bool NeedsEnvironment() const OVERRIDE { return true; }
4968
4969  bool CanThrow() const OVERRIDE { return true; }
4970
4971  bool CanBeNull() const OVERRIDE { return false; }
4972
4973
4974  DECLARE_INSTRUCTION(NullCheck);
4975
4976 private:
4977  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
4978};
4979
4980class FieldInfo : public ValueObject {
4981 public:
4982  FieldInfo(MemberOffset field_offset,
4983            Primitive::Type field_type,
4984            bool is_volatile,
4985            uint32_t index,
4986            uint16_t declaring_class_def_index,
4987            const DexFile& dex_file,
4988            Handle<mirror::DexCache> dex_cache)
4989      : field_offset_(field_offset),
4990        field_type_(field_type),
4991        is_volatile_(is_volatile),
4992        index_(index),
4993        declaring_class_def_index_(declaring_class_def_index),
4994        dex_file_(dex_file),
4995        dex_cache_(dex_cache) {}
4996
4997  MemberOffset GetFieldOffset() const { return field_offset_; }
4998  Primitive::Type GetFieldType() const { return field_type_; }
4999  uint32_t GetFieldIndex() const { return index_; }
5000  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5001  const DexFile& GetDexFile() const { return dex_file_; }
5002  bool IsVolatile() const { return is_volatile_; }
5003  Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; }
5004
5005 private:
5006  const MemberOffset field_offset_;
5007  const Primitive::Type field_type_;
5008  const bool is_volatile_;
5009  const uint32_t index_;
5010  const uint16_t declaring_class_def_index_;
5011  const DexFile& dex_file_;
5012  const Handle<mirror::DexCache> dex_cache_;
5013};
5014
5015class HInstanceFieldGet FINAL : public HExpression<1> {
5016 public:
5017  HInstanceFieldGet(HInstruction* value,
5018                    Primitive::Type field_type,
5019                    MemberOffset field_offset,
5020                    bool is_volatile,
5021                    uint32_t field_idx,
5022                    uint16_t declaring_class_def_index,
5023                    const DexFile& dex_file,
5024                    Handle<mirror::DexCache> dex_cache,
5025                    uint32_t dex_pc)
5026      : HExpression(field_type,
5027                    SideEffectsForArchRuntimeCalls(field_type, is_volatile),
5028                    dex_pc),
5029        field_info_(field_offset,
5030                    field_type,
5031                    is_volatile,
5032                    field_idx,
5033                    declaring_class_def_index,
5034                    dex_file,
5035                    dex_cache) {
5036    SetRawInputAt(0, value);
5037  }
5038
5039  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5040
5041  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5042    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5043    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5044  }
5045
5046  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5047    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5048  }
5049
5050  size_t ComputeHashCode() const OVERRIDE {
5051    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5052  }
5053
5054  const FieldInfo& GetFieldInfo() const { return field_info_; }
5055  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5056  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5057  bool IsVolatile() const { return field_info_.IsVolatile(); }
5058
5059  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type field_type, bool is_volatile) {
5060    SideEffects side_effects = SideEffects::FieldReadOfType(field_type, is_volatile);
5061
5062    // MIPS delegates volatile kPrimLong and kPrimDouble loads to a runtime helper.
5063    if (Primitive::Is64BitType(field_type)) {
5064      side_effects.Add(SideEffects::CanTriggerGC());
5065    }
5066    return side_effects;
5067  }
5068
5069  DECLARE_INSTRUCTION(InstanceFieldGet);
5070
5071 private:
5072  const FieldInfo field_info_;
5073
5074  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5075};
5076
5077class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5078 public:
5079  HInstanceFieldSet(HInstruction* object,
5080                    HInstruction* value,
5081                    Primitive::Type field_type,
5082                    MemberOffset field_offset,
5083                    bool is_volatile,
5084                    uint32_t field_idx,
5085                    uint16_t declaring_class_def_index,
5086                    const DexFile& dex_file,
5087                    Handle<mirror::DexCache> dex_cache,
5088                    uint32_t dex_pc)
5089      : HTemplateInstruction(SideEffectsForArchRuntimeCalls(field_type, is_volatile),
5090                             dex_pc),
5091        field_info_(field_offset,
5092                    field_type,
5093                    is_volatile,
5094                    field_idx,
5095                    declaring_class_def_index,
5096                    dex_file,
5097                    dex_cache) {
5098    SetPackedFlag<kFlagValueCanBeNull>(true);
5099    SetRawInputAt(0, object);
5100    SetRawInputAt(1, value);
5101  }
5102
5103  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5104    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5105  }
5106
5107  const FieldInfo& GetFieldInfo() const { return field_info_; }
5108  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5109  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5110  bool IsVolatile() const { return field_info_.IsVolatile(); }
5111  HInstruction* GetValue() const { return InputAt(1); }
5112  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5113  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5114
5115  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type field_type, bool is_volatile) {
5116    SideEffects side_effects = SideEffects::FieldWriteOfType(field_type, is_volatile);
5117
5118    // MIPS delegates volatile kPrimLong and kPrimDouble stores to a runtime helper.
5119    if (Primitive::Is64BitType(field_type)) {
5120      side_effects.Add(SideEffects::CanTriggerGC());
5121    }
5122    return side_effects;
5123  }
5124
5125  DECLARE_INSTRUCTION(InstanceFieldSet);
5126
5127 private:
5128  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5129  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5130  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5131                "Too many packed fields.");
5132
5133  const FieldInfo field_info_;
5134
5135  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5136};
5137
5138class HArrayGet FINAL : public HExpression<2> {
5139 public:
5140  HArrayGet(HInstruction* array,
5141            HInstruction* index,
5142            Primitive::Type type,
5143            uint32_t dex_pc,
5144            bool is_string_char_at = false)
5145      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5146    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5147    SetRawInputAt(0, array);
5148    SetRawInputAt(1, index);
5149  }
5150
5151  bool CanBeMoved() const OVERRIDE { return true; }
5152  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5153    return true;
5154  }
5155  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5156    // TODO: We can be smarter here.
5157    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5158    // which generates the implicit null check. There are cases when these can be removed
5159    // to produce better code. If we ever add optimizations to do so we should allow an
5160    // implicit check here (as long as the address falls in the first page).
5161    return false;
5162  }
5163
5164  bool IsEquivalentOf(HArrayGet* other) const {
5165    bool result = (GetDexPc() == other->GetDexPc());
5166    if (kIsDebugBuild && result) {
5167      DCHECK_EQ(GetBlock(), other->GetBlock());
5168      DCHECK_EQ(GetArray(), other->GetArray());
5169      DCHECK_EQ(GetIndex(), other->GetIndex());
5170      if (Primitive::IsIntOrLongType(GetType())) {
5171        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5172      } else {
5173        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5174        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5175      }
5176    }
5177    return result;
5178  }
5179
5180  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5181
5182  HInstruction* GetArray() const { return InputAt(0); }
5183  HInstruction* GetIndex() const { return InputAt(1); }
5184
5185  DECLARE_INSTRUCTION(ArrayGet);
5186
5187 private:
5188  // We treat a String as an array, creating the HArrayGet from String.charAt()
5189  // intrinsic in the instruction simplifier. We can always determine whether
5190  // a particular HArrayGet is actually a String.charAt() by looking at the type
5191  // of the input but that requires holding the mutator lock, so we prefer to use
5192  // a flag, so that code generators don't need to do the locking.
5193  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5194  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5195  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5196                "Too many packed fields.");
5197
5198  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5199};
5200
5201class HArraySet FINAL : public HTemplateInstruction<3> {
5202 public:
5203  HArraySet(HInstruction* array,
5204            HInstruction* index,
5205            HInstruction* value,
5206            Primitive::Type expected_component_type,
5207            uint32_t dex_pc)
5208      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5209    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5210    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5211    SetPackedFlag<kFlagValueCanBeNull>(true);
5212    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5213    SetRawInputAt(0, array);
5214    SetRawInputAt(1, index);
5215    SetRawInputAt(2, value);
5216    // Make a best guess now, may be refined during SSA building.
5217    ComputeSideEffects();
5218  }
5219
5220  bool NeedsEnvironment() const OVERRIDE {
5221    // We call a runtime method to throw ArrayStoreException.
5222    return NeedsTypeCheck();
5223  }
5224
5225  // Can throw ArrayStoreException.
5226  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5227
5228  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5229    // TODO: Same as for ArrayGet.
5230    return false;
5231  }
5232
5233  void ClearNeedsTypeCheck() {
5234    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5235  }
5236
5237  void ClearValueCanBeNull() {
5238    SetPackedFlag<kFlagValueCanBeNull>(false);
5239  }
5240
5241  void SetStaticTypeOfArrayIsObjectArray() {
5242    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5243  }
5244
5245  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5246  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5247  bool StaticTypeOfArrayIsObjectArray() const {
5248    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5249  }
5250
5251  HInstruction* GetArray() const { return InputAt(0); }
5252  HInstruction* GetIndex() const { return InputAt(1); }
5253  HInstruction* GetValue() const { return InputAt(2); }
5254
5255  Primitive::Type GetComponentType() const {
5256    // The Dex format does not type floating point index operations. Since the
5257    // `expected_component_type_` is set during building and can therefore not
5258    // be correct, we also check what is the value type. If it is a floating
5259    // point type, we must use that type.
5260    Primitive::Type value_type = GetValue()->GetType();
5261    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5262        ? value_type
5263        : GetRawExpectedComponentType();
5264  }
5265
5266  Primitive::Type GetRawExpectedComponentType() const {
5267    return GetPackedField<ExpectedComponentTypeField>();
5268  }
5269
5270  void ComputeSideEffects() {
5271    Primitive::Type type = GetComponentType();
5272    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5273        SideEffectsForArchRuntimeCalls(type)));
5274  }
5275
5276  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5277    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5278  }
5279
5280  DECLARE_INSTRUCTION(ArraySet);
5281
5282 private:
5283  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5284  static constexpr size_t kFieldExpectedComponentTypeSize =
5285      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5286  static constexpr size_t kFlagNeedsTypeCheck =
5287      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5288  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5289  // Cached information for the reference_type_info_ so that codegen
5290  // does not need to inspect the static type.
5291  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5292  static constexpr size_t kNumberOfArraySetPackedBits =
5293      kFlagStaticTypeOfArrayIsObjectArray + 1;
5294  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5295  using ExpectedComponentTypeField =
5296      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5297
5298  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5299};
5300
5301class HArrayLength FINAL : public HExpression<1> {
5302 public:
5303  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5304      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5305    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5306    // Note that arrays do not change length, so the instruction does not
5307    // depend on any write.
5308    SetRawInputAt(0, array);
5309  }
5310
5311  bool CanBeMoved() const OVERRIDE { return true; }
5312  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5313    return true;
5314  }
5315  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5316    return obj == InputAt(0);
5317  }
5318
5319  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5320
5321  DECLARE_INSTRUCTION(ArrayLength);
5322
5323 private:
5324  // We treat a String as an array, creating the HArrayLength from String.length()
5325  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5326  // determine whether a particular HArrayLength is actually a String.length() by
5327  // looking at the type of the input but that requires holding the mutator lock, so
5328  // we prefer to use a flag, so that code generators don't need to do the locking.
5329  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5330  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5331  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5332                "Too many packed fields.");
5333
5334  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5335};
5336
5337class HBoundsCheck FINAL : public HExpression<2> {
5338 public:
5339  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5340  // constructor.
5341  HBoundsCheck(HInstruction* index,
5342               HInstruction* length,
5343               uint32_t dex_pc,
5344               uint32_t string_char_at_method_index = DexFile::kDexNoIndex)
5345      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc),
5346        string_char_at_method_index_(string_char_at_method_index) {
5347    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5348    SetRawInputAt(0, index);
5349    SetRawInputAt(1, length);
5350  }
5351
5352  bool CanBeMoved() const OVERRIDE { return true; }
5353  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5354    return true;
5355  }
5356
5357  bool NeedsEnvironment() const OVERRIDE { return true; }
5358
5359  bool CanThrow() const OVERRIDE { return true; }
5360
5361  bool IsStringCharAt() const { return GetStringCharAtMethodIndex() != DexFile::kDexNoIndex; }
5362  uint32_t GetStringCharAtMethodIndex() const { return string_char_at_method_index_; }
5363
5364  HInstruction* GetIndex() const { return InputAt(0); }
5365
5366  DECLARE_INSTRUCTION(BoundsCheck);
5367
5368 private:
5369  // We treat a String as an array, creating the HBoundsCheck from String.charAt()
5370  // intrinsic in the instruction simplifier. We want to include the String.charAt()
5371  // in the stack trace if we actually throw the StringIndexOutOfBoundsException,
5372  // so we need to create an HEnvironment which will be translated to an InlineInfo
5373  // indicating the extra stack frame. Since we add this HEnvironment quite late,
5374  // in the PrepareForRegisterAllocation pass, we need to remember the method index
5375  // from the invoke as we don't want to look again at the dex bytecode.
5376  uint32_t string_char_at_method_index_;  // DexFile::kDexNoIndex if regular array.
5377
5378  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5379};
5380
5381class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5382 public:
5383  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5384      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5385
5386  bool NeedsEnvironment() const OVERRIDE {
5387    return true;
5388  }
5389
5390  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5391  SlowPathCode* GetSlowPath() const { return slow_path_; }
5392
5393  DECLARE_INSTRUCTION(SuspendCheck);
5394
5395 private:
5396  // Only used for code generation, in order to share the same slow path between back edges
5397  // of a same loop.
5398  SlowPathCode* slow_path_;
5399
5400  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5401};
5402
5403// Pseudo-instruction which provides the native debugger with mapping information.
5404// It ensures that we can generate line number and local variables at this point.
5405class HNativeDebugInfo : public HTemplateInstruction<0> {
5406 public:
5407  explicit HNativeDebugInfo(uint32_t dex_pc)
5408      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5409
5410  bool NeedsEnvironment() const OVERRIDE {
5411    return true;
5412  }
5413
5414  DECLARE_INSTRUCTION(NativeDebugInfo);
5415
5416 private:
5417  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5418};
5419
5420/**
5421 * Instruction to load a Class object.
5422 */
5423class HLoadClass FINAL : public HInstruction {
5424 public:
5425  // Determines how to load the Class.
5426  enum class LoadKind {
5427    // Use the Class* from the method's own ArtMethod*.
5428    kReferrersClass,
5429
5430    // Use boot image Class* address that will be known at link time.
5431    // Used for boot image classes referenced by boot image code in non-PIC mode.
5432    kBootImageLinkTimeAddress,
5433
5434    // Use PC-relative boot image Class* address that will be known at link time.
5435    // Used for boot image classes referenced by boot image code in PIC mode.
5436    kBootImageLinkTimePcRelative,
5437
5438    // Use a known boot image Class* address, embedded in the code by the codegen.
5439    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5440    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5441    // GetIncludePatchInformation().
5442    kBootImageAddress,
5443
5444    // Load from the resolved types array at an absolute address.
5445    // Used for classes outside the boot image referenced by JIT-compiled code.
5446    kDexCacheAddress,
5447
5448    // Load from resolved types array in the dex cache using a PC-relative load.
5449    // Used for classes outside boot image when we know that we can access
5450    // the dex cache arrays using a PC-relative load.
5451    kDexCachePcRelative,
5452
5453    // Load from resolved types array accessed through the class loaded from
5454    // the compiled method's own ArtMethod*. This is the default access type when
5455    // all other types are unavailable.
5456    kDexCacheViaMethod,
5457
5458    kLast = kDexCacheViaMethod
5459  };
5460
5461  HLoadClass(HCurrentMethod* current_method,
5462             uint16_t type_index,
5463             const DexFile& dex_file,
5464             bool is_referrers_class,
5465             uint32_t dex_pc,
5466             bool needs_access_check,
5467             bool is_in_dex_cache,
5468             bool is_in_boot_image)
5469      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5470        special_input_(HUserRecord<HInstruction*>(current_method)),
5471        type_index_(type_index),
5472        dex_file_(dex_file),
5473        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5474    // Referrers class should not need access check. We never inline unverified
5475    // methods so we can't possibly end up in this situation.
5476    DCHECK(!is_referrers_class || !needs_access_check);
5477
5478    SetPackedField<LoadKindField>(
5479        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
5480    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5481    SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache);
5482    SetPackedFlag<kFlagIsInBootImage>(is_in_boot_image);
5483    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5484  }
5485
5486  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5487    DCHECK(HasAddress(load_kind));
5488    load_data_.address = address;
5489    SetLoadKindInternal(load_kind);
5490  }
5491
5492  void SetLoadKindWithTypeReference(LoadKind load_kind,
5493                                    const DexFile& dex_file,
5494                                    uint32_t type_index) {
5495    DCHECK(HasTypeReference(load_kind));
5496    DCHECK(IsSameDexFile(dex_file_, dex_file));
5497    DCHECK_EQ(type_index_, type_index);
5498    SetLoadKindInternal(load_kind);
5499  }
5500
5501  void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5502                                        const DexFile& dex_file,
5503                                        uint32_t element_index) {
5504    DCHECK(HasDexCacheReference(load_kind));
5505    DCHECK(IsSameDexFile(dex_file_, dex_file));
5506    load_data_.dex_cache_element_index = element_index;
5507    SetLoadKindInternal(load_kind);
5508  }
5509
5510  LoadKind GetLoadKind() const {
5511    return GetPackedField<LoadKindField>();
5512  }
5513
5514  bool CanBeMoved() const OVERRIDE { return true; }
5515
5516  bool InstructionDataEquals(const HInstruction* other) const;
5517
5518  size_t ComputeHashCode() const OVERRIDE { return type_index_; }
5519
5520  bool CanBeNull() const OVERRIDE { return false; }
5521
5522  bool NeedsEnvironment() const OVERRIDE {
5523    return CanCallRuntime();
5524  }
5525
5526  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5527    // The entrypoint the code generator is going to call does not do
5528    // clinit of the class.
5529    DCHECK(!NeedsAccessCheck());
5530    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5531  }
5532
5533  bool CanCallRuntime() const {
5534    return MustGenerateClinitCheck() ||
5535           (!IsReferrersClass() && !IsInDexCache()) ||
5536           NeedsAccessCheck();
5537  }
5538
5539
5540  bool CanThrow() const OVERRIDE {
5541    return CanCallRuntime();
5542  }
5543
5544  ReferenceTypeInfo GetLoadedClassRTI() {
5545    return loaded_class_rti_;
5546  }
5547
5548  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5549    // Make sure we only set exact types (the loaded class should never be merged).
5550    DCHECK(rti.IsExact());
5551    loaded_class_rti_ = rti;
5552  }
5553
5554  uint32_t GetTypeIndex() const { return type_index_; }
5555  const DexFile& GetDexFile() const { return dex_file_; }
5556
5557  uint32_t GetDexCacheElementOffset() const;
5558
5559  uint64_t GetAddress() const {
5560    DCHECK(HasAddress(GetLoadKind()));
5561    return load_data_.address;
5562  }
5563
5564  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !IsReferrersClass(); }
5565
5566  static SideEffects SideEffectsForArchRuntimeCalls() {
5567    return SideEffects::CanTriggerGC();
5568  }
5569
5570  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5571  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5572  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5573  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5574  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5575
5576  void MarkInDexCache() {
5577    SetPackedFlag<kFlagIsInDexCache>(true);
5578    DCHECK(!NeedsEnvironment());
5579    RemoveEnvironment();
5580    SetSideEffects(SideEffects::None());
5581  }
5582
5583  void MarkInBootImage() {
5584    SetPackedFlag<kFlagIsInBootImage>(true);
5585  }
5586
5587  void AddSpecialInput(HInstruction* special_input);
5588
5589  using HInstruction::GetInputRecords;  // Keep the const version visible.
5590  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5591    return ArrayRef<HUserRecord<HInstruction*>>(
5592        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5593  }
5594
5595  Primitive::Type GetType() const OVERRIDE {
5596    return Primitive::kPrimNot;
5597  }
5598
5599  DECLARE_INSTRUCTION(LoadClass);
5600
5601 private:
5602  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5603  static constexpr size_t kFlagIsInDexCache        = kFlagNeedsAccessCheck + 1;
5604  static constexpr size_t kFlagIsInBootImage       = kFlagIsInDexCache + 1;
5605  // Whether this instruction must generate the initialization check.
5606  // Used for code generation.
5607  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5608  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5609  static constexpr size_t kFieldLoadKindSize =
5610      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5611  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5612  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5613  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5614
5615  static bool HasTypeReference(LoadKind load_kind) {
5616    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5617        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5618        load_kind == LoadKind::kDexCacheViaMethod ||
5619        load_kind == LoadKind::kReferrersClass;
5620  }
5621
5622  static bool HasAddress(LoadKind load_kind) {
5623    return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress;
5624  }
5625
5626  static bool HasDexCacheReference(LoadKind load_kind) {
5627    return load_kind == LoadKind::kDexCachePcRelative;
5628  }
5629
5630  void SetLoadKindInternal(LoadKind load_kind);
5631
5632  // The special input is the HCurrentMethod for kDexCacheViaMethod or kReferrersClass.
5633  // For other load kinds it's empty or possibly some architecture-specific instruction
5634  // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
5635  HUserRecord<HInstruction*> special_input_;
5636
5637  const uint16_t type_index_;
5638  const DexFile& dex_file_;
5639
5640  union {
5641    uint32_t dex_cache_element_index;   // Only for dex cache reference.
5642    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5643  } load_data_;
5644
5645  ReferenceTypeInfo loaded_class_rti_;
5646
5647  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5648};
5649std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5650
5651// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5652inline uint32_t HLoadClass::GetDexCacheElementOffset() const {
5653  DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5654  return load_data_.dex_cache_element_index;
5655}
5656
5657// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5658inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5659  // The special input is used for PC-relative loads on some architectures,
5660  // including literal pool loads, which are PC-relative too.
5661  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5662         GetLoadKind() == LoadKind::kDexCachePcRelative ||
5663         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5664         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5665  DCHECK(special_input_.GetInstruction() == nullptr);
5666  special_input_ = HUserRecord<HInstruction*>(special_input);
5667  special_input->AddUseAt(this, 0);
5668}
5669
5670class HLoadString FINAL : public HInstruction {
5671 public:
5672  // Determines how to load the String.
5673  enum class LoadKind {
5674    // Use boot image String* address that will be known at link time.
5675    // Used for boot image strings referenced by boot image code in non-PIC mode.
5676    kBootImageLinkTimeAddress,
5677
5678    // Use PC-relative boot image String* address that will be known at link time.
5679    // Used for boot image strings referenced by boot image code in PIC mode.
5680    kBootImageLinkTimePcRelative,
5681
5682    // Use a known boot image String* address, embedded in the code by the codegen.
5683    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5684    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5685    // GetIncludePatchInformation().
5686    kBootImageAddress,
5687
5688    // Load from the resolved strings array at an absolute address.
5689    // Used for strings outside the boot image referenced by JIT-compiled code.
5690    kDexCacheAddress,
5691
5692    // Load from resolved strings array in the dex cache using a PC-relative load.
5693    // Used for strings outside boot image when we know that we can access
5694    // the dex cache arrays using a PC-relative load.
5695    kDexCachePcRelative,
5696
5697    // Load from resolved strings array accessed through the class loaded from
5698    // the compiled method's own ArtMethod*. This is the default access type when
5699    // all other types are unavailable.
5700    kDexCacheViaMethod,
5701
5702    kLast = kDexCacheViaMethod
5703  };
5704
5705  HLoadString(HCurrentMethod* current_method,
5706              uint32_t string_index,
5707              const DexFile& dex_file,
5708              uint32_t dex_pc)
5709      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5710        special_input_(HUserRecord<HInstruction*>(current_method)),
5711        string_index_(string_index) {
5712    SetPackedFlag<kFlagIsInDexCache>(false);
5713    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5714    load_data_.ref.dex_file = &dex_file;
5715  }
5716
5717  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5718    DCHECK(HasAddress(load_kind));
5719    load_data_.address = address;
5720    SetLoadKindInternal(load_kind);
5721  }
5722
5723  void SetLoadKindWithStringReference(LoadKind load_kind,
5724                                      const DexFile& dex_file,
5725                                      uint32_t string_index) {
5726    DCHECK(HasStringReference(load_kind));
5727    load_data_.ref.dex_file = &dex_file;
5728    string_index_ = string_index;
5729    SetLoadKindInternal(load_kind);
5730  }
5731
5732  void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5733                                        const DexFile& dex_file,
5734                                        uint32_t element_index) {
5735    DCHECK(HasDexCacheReference(load_kind));
5736    load_data_.ref.dex_file = &dex_file;
5737    load_data_.ref.dex_cache_element_index = element_index;
5738    SetLoadKindInternal(load_kind);
5739  }
5740
5741  LoadKind GetLoadKind() const {
5742    return GetPackedField<LoadKindField>();
5743  }
5744
5745  const DexFile& GetDexFile() const;
5746
5747  uint32_t GetStringIndex() const {
5748    DCHECK(HasStringReference(GetLoadKind()) || /* For slow paths. */ !IsInDexCache());
5749    return string_index_;
5750  }
5751
5752  uint32_t GetDexCacheElementOffset() const;
5753
5754  uint64_t GetAddress() const {
5755    DCHECK(HasAddress(GetLoadKind()));
5756    return load_data_.address;
5757  }
5758
5759  bool CanBeMoved() const OVERRIDE { return true; }
5760
5761  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5762
5763  size_t ComputeHashCode() const OVERRIDE { return string_index_; }
5764
5765  // Will call the runtime if we need to load the string through
5766  // the dex cache and the string is not guaranteed to be there yet.
5767  bool NeedsEnvironment() const OVERRIDE {
5768    LoadKind load_kind = GetLoadKind();
5769    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5770        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5771        load_kind == LoadKind::kBootImageAddress) {
5772      return false;
5773    }
5774    return !IsInDexCache();
5775  }
5776
5777  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5778    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5779  }
5780
5781  bool CanBeNull() const OVERRIDE { return false; }
5782  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5783
5784  static SideEffects SideEffectsForArchRuntimeCalls() {
5785    return SideEffects::CanTriggerGC();
5786  }
5787
5788  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5789
5790  void MarkInDexCache() {
5791    SetPackedFlag<kFlagIsInDexCache>(true);
5792    DCHECK(!NeedsEnvironment());
5793    RemoveEnvironment();
5794    SetSideEffects(SideEffects::None());
5795  }
5796
5797  void AddSpecialInput(HInstruction* special_input);
5798
5799  using HInstruction::GetInputRecords;  // Keep the const version visible.
5800  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5801    return ArrayRef<HUserRecord<HInstruction*>>(
5802        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5803  }
5804
5805  Primitive::Type GetType() const OVERRIDE {
5806    return Primitive::kPrimNot;
5807  }
5808
5809  DECLARE_INSTRUCTION(LoadString);
5810
5811 private:
5812  static constexpr size_t kFlagIsInDexCache = kNumberOfGenericPackedBits;
5813  static constexpr size_t kFieldLoadKind = kFlagIsInDexCache + 1;
5814  static constexpr size_t kFieldLoadKindSize =
5815      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5816  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5817  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5818  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5819
5820  static bool HasStringReference(LoadKind load_kind) {
5821    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5822        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5823        load_kind == LoadKind::kDexCacheViaMethod;
5824  }
5825
5826  static bool HasAddress(LoadKind load_kind) {
5827    return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress;
5828  }
5829
5830  static bool HasDexCacheReference(LoadKind load_kind) {
5831    return load_kind == LoadKind::kDexCachePcRelative;
5832  }
5833
5834  void SetLoadKindInternal(LoadKind load_kind);
5835
5836  // The special input is the HCurrentMethod for kDexCacheViaMethod.
5837  // For other load kinds it's empty or possibly some architecture-specific instruction
5838  // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
5839  HUserRecord<HInstruction*> special_input_;
5840
5841  // String index serves also as the hash code and it's also needed for slow-paths,
5842  // so it must not be overwritten with other load data.
5843  uint32_t string_index_;
5844
5845  union {
5846    struct {
5847      const DexFile* dex_file;            // For string reference and dex cache reference.
5848      uint32_t dex_cache_element_index;   // Only for dex cache reference.
5849    } ref;
5850    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5851  } load_data_;
5852
5853  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5854};
5855std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5856
5857// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5858inline const DexFile& HLoadString::GetDexFile() const {
5859  DCHECK(HasStringReference(GetLoadKind()) || HasDexCacheReference(GetLoadKind()))
5860      << GetLoadKind();
5861  return *load_data_.ref.dex_file;
5862}
5863
5864// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5865inline uint32_t HLoadString::GetDexCacheElementOffset() const {
5866  DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5867  return load_data_.ref.dex_cache_element_index;
5868}
5869
5870// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5871inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5872  // The special input is used for PC-relative loads on some architectures,
5873  // including literal pool loads, which are PC-relative too.
5874  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5875         GetLoadKind() == LoadKind::kDexCachePcRelative ||
5876         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5877         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5878  // HLoadString::GetInputRecords() returns an empty array at this point,
5879  // so use the GetInputRecords() from the base class to set the input record.
5880  DCHECK(special_input_.GetInstruction() == nullptr);
5881  special_input_ = HUserRecord<HInstruction*>(special_input);
5882  special_input->AddUseAt(this, 0);
5883}
5884
5885/**
5886 * Performs an initialization check on its Class object input.
5887 */
5888class HClinitCheck FINAL : public HExpression<1> {
5889 public:
5890  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5891      : HExpression(
5892            Primitive::kPrimNot,
5893            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
5894            dex_pc) {
5895    SetRawInputAt(0, constant);
5896  }
5897
5898  bool CanBeMoved() const OVERRIDE { return true; }
5899  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5900    return true;
5901  }
5902
5903  bool NeedsEnvironment() const OVERRIDE {
5904    // May call runtime to initialize the class.
5905    return true;
5906  }
5907
5908  bool CanThrow() const OVERRIDE { return true; }
5909
5910  HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); }
5911
5912  DECLARE_INSTRUCTION(ClinitCheck);
5913
5914 private:
5915  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
5916};
5917
5918class HStaticFieldGet FINAL : public HExpression<1> {
5919 public:
5920  HStaticFieldGet(HInstruction* cls,
5921                  Primitive::Type field_type,
5922                  MemberOffset field_offset,
5923                  bool is_volatile,
5924                  uint32_t field_idx,
5925                  uint16_t declaring_class_def_index,
5926                  const DexFile& dex_file,
5927                  Handle<mirror::DexCache> dex_cache,
5928                  uint32_t dex_pc)
5929      : HExpression(field_type,
5930                    SideEffectsForArchRuntimeCalls(field_type, is_volatile),
5931                    dex_pc),
5932        field_info_(field_offset,
5933                    field_type,
5934                    is_volatile,
5935                    field_idx,
5936                    declaring_class_def_index,
5937                    dex_file,
5938                    dex_cache) {
5939    SetRawInputAt(0, cls);
5940  }
5941
5942
5943  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5944
5945  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5946    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
5947    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5948  }
5949
5950  size_t ComputeHashCode() const OVERRIDE {
5951    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5952  }
5953
5954  const FieldInfo& GetFieldInfo() const { return field_info_; }
5955  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5956  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5957  bool IsVolatile() const { return field_info_.IsVolatile(); }
5958
5959  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type field_type, bool is_volatile) {
5960    SideEffects side_effects = SideEffects::FieldReadOfType(field_type, is_volatile);
5961
5962    // MIPS delegates volatile kPrimLong and kPrimDouble loads to a runtime helper.
5963    if (Primitive::Is64BitType(field_type)) {
5964      side_effects.Add(SideEffects::CanTriggerGC());
5965    }
5966    return side_effects;
5967  }
5968
5969  DECLARE_INSTRUCTION(StaticFieldGet);
5970
5971 private:
5972  const FieldInfo field_info_;
5973
5974  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
5975};
5976
5977class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
5978 public:
5979  HStaticFieldSet(HInstruction* cls,
5980                  HInstruction* value,
5981                  Primitive::Type field_type,
5982                  MemberOffset field_offset,
5983                  bool is_volatile,
5984                  uint32_t field_idx,
5985                  uint16_t declaring_class_def_index,
5986                  const DexFile& dex_file,
5987                  Handle<mirror::DexCache> dex_cache,
5988                  uint32_t dex_pc)
5989      : HTemplateInstruction(SideEffectsForArchRuntimeCalls(field_type, is_volatile),
5990                             dex_pc),
5991        field_info_(field_offset,
5992                    field_type,
5993                    is_volatile,
5994                    field_idx,
5995                    declaring_class_def_index,
5996                    dex_file,
5997                    dex_cache) {
5998    SetPackedFlag<kFlagValueCanBeNull>(true);
5999    SetRawInputAt(0, cls);
6000    SetRawInputAt(1, value);
6001  }
6002
6003  const FieldInfo& GetFieldInfo() const { return field_info_; }
6004  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6005  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
6006  bool IsVolatile() const { return field_info_.IsVolatile(); }
6007
6008  HInstruction* GetValue() const { return InputAt(1); }
6009  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6010  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6011
6012  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type field_type, bool is_volatile) {
6013    SideEffects side_effects = SideEffects::FieldWriteOfType(field_type, is_volatile);
6014
6015    // MIPS delegates volatile kPrimLong and kPrimDouble stores to a runtime helper.
6016    if (Primitive::Is64BitType(field_type)) {
6017      side_effects.Add(SideEffects::CanTriggerGC());
6018    }
6019    return side_effects;
6020  }
6021
6022  DECLARE_INSTRUCTION(StaticFieldSet);
6023
6024 private:
6025  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6026  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6027  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6028                "Too many packed fields.");
6029
6030  const FieldInfo field_info_;
6031
6032  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
6033};
6034
6035class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6036 public:
6037  HUnresolvedInstanceFieldGet(HInstruction* obj,
6038                              Primitive::Type field_type,
6039                              uint32_t field_index,
6040                              uint32_t dex_pc)
6041      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6042        field_index_(field_index) {
6043    SetRawInputAt(0, obj);
6044  }
6045
6046  bool NeedsEnvironment() const OVERRIDE { return true; }
6047  bool CanThrow() const OVERRIDE { return true; }
6048
6049  Primitive::Type GetFieldType() const { return GetType(); }
6050  uint32_t GetFieldIndex() const { return field_index_; }
6051
6052  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6053
6054 private:
6055  const uint32_t field_index_;
6056
6057  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6058};
6059
6060class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6061 public:
6062  HUnresolvedInstanceFieldSet(HInstruction* obj,
6063                              HInstruction* value,
6064                              Primitive::Type field_type,
6065                              uint32_t field_index,
6066                              uint32_t dex_pc)
6067      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6068        field_index_(field_index) {
6069    SetPackedField<FieldTypeField>(field_type);
6070    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6071    SetRawInputAt(0, obj);
6072    SetRawInputAt(1, value);
6073  }
6074
6075  bool NeedsEnvironment() const OVERRIDE { return true; }
6076  bool CanThrow() const OVERRIDE { return true; }
6077
6078  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6079  uint32_t GetFieldIndex() const { return field_index_; }
6080
6081  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6082
6083 private:
6084  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6085  static constexpr size_t kFieldFieldTypeSize =
6086      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6087  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6088      kFieldFieldType + kFieldFieldTypeSize;
6089  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6090                "Too many packed fields.");
6091  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6092
6093  const uint32_t field_index_;
6094
6095  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6096};
6097
6098class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6099 public:
6100  HUnresolvedStaticFieldGet(Primitive::Type field_type,
6101                            uint32_t field_index,
6102                            uint32_t dex_pc)
6103      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6104        field_index_(field_index) {
6105  }
6106
6107  bool NeedsEnvironment() const OVERRIDE { return true; }
6108  bool CanThrow() const OVERRIDE { return true; }
6109
6110  Primitive::Type GetFieldType() const { return GetType(); }
6111  uint32_t GetFieldIndex() const { return field_index_; }
6112
6113  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6114
6115 private:
6116  const uint32_t field_index_;
6117
6118  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6119};
6120
6121class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6122 public:
6123  HUnresolvedStaticFieldSet(HInstruction* value,
6124                            Primitive::Type field_type,
6125                            uint32_t field_index,
6126                            uint32_t dex_pc)
6127      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6128        field_index_(field_index) {
6129    SetPackedField<FieldTypeField>(field_type);
6130    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6131    SetRawInputAt(0, value);
6132  }
6133
6134  bool NeedsEnvironment() const OVERRIDE { return true; }
6135  bool CanThrow() const OVERRIDE { return true; }
6136
6137  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6138  uint32_t GetFieldIndex() const { return field_index_; }
6139
6140  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6141
6142 private:
6143  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6144  static constexpr size_t kFieldFieldTypeSize =
6145      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6146  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6147      kFieldFieldType + kFieldFieldTypeSize;
6148  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6149                "Too many packed fields.");
6150  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6151
6152  const uint32_t field_index_;
6153
6154  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6155};
6156
6157// Implement the move-exception DEX instruction.
6158class HLoadException FINAL : public HExpression<0> {
6159 public:
6160  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6161      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
6162
6163  bool CanBeNull() const OVERRIDE { return false; }
6164
6165  DECLARE_INSTRUCTION(LoadException);
6166
6167 private:
6168  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6169};
6170
6171// Implicit part of move-exception which clears thread-local exception storage.
6172// Must not be removed because the runtime expects the TLS to get cleared.
6173class HClearException FINAL : public HTemplateInstruction<0> {
6174 public:
6175  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6176      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6177
6178  DECLARE_INSTRUCTION(ClearException);
6179
6180 private:
6181  DISALLOW_COPY_AND_ASSIGN(HClearException);
6182};
6183
6184class HThrow FINAL : public HTemplateInstruction<1> {
6185 public:
6186  HThrow(HInstruction* exception, uint32_t dex_pc)
6187      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6188    SetRawInputAt(0, exception);
6189  }
6190
6191  bool IsControlFlow() const OVERRIDE { return true; }
6192
6193  bool NeedsEnvironment() const OVERRIDE { return true; }
6194
6195  bool CanThrow() const OVERRIDE { return true; }
6196
6197
6198  DECLARE_INSTRUCTION(Throw);
6199
6200 private:
6201  DISALLOW_COPY_AND_ASSIGN(HThrow);
6202};
6203
6204/**
6205 * Implementation strategies for the code generator of a HInstanceOf
6206 * or `HCheckCast`.
6207 */
6208enum class TypeCheckKind {
6209  kUnresolvedCheck,       // Check against an unresolved type.
6210  kExactCheck,            // Can do a single class compare.
6211  kClassHierarchyCheck,   // Can just walk the super class chain.
6212  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6213  kInterfaceCheck,        // No optimization yet when checking against an interface.
6214  kArrayObjectCheck,      // Can just check if the array is not primitive.
6215  kArrayCheck,            // No optimization yet when checking against a generic array.
6216  kLast = kArrayCheck
6217};
6218
6219std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6220
6221class HInstanceOf FINAL : public HExpression<2> {
6222 public:
6223  HInstanceOf(HInstruction* object,
6224              HLoadClass* constant,
6225              TypeCheckKind check_kind,
6226              uint32_t dex_pc)
6227      : HExpression(Primitive::kPrimBoolean,
6228                    SideEffectsForArchRuntimeCalls(check_kind),
6229                    dex_pc) {
6230    SetPackedField<TypeCheckKindField>(check_kind);
6231    SetPackedFlag<kFlagMustDoNullCheck>(true);
6232    SetRawInputAt(0, object);
6233    SetRawInputAt(1, constant);
6234  }
6235
6236  bool CanBeMoved() const OVERRIDE { return true; }
6237
6238  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6239    return true;
6240  }
6241
6242  bool NeedsEnvironment() const OVERRIDE {
6243    return CanCallRuntime(GetTypeCheckKind());
6244  }
6245
6246  // Used only in code generation.
6247  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6248  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6249  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6250  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6251
6252  static bool CanCallRuntime(TypeCheckKind check_kind) {
6253    // Mips currently does runtime calls for any other checks.
6254    return check_kind != TypeCheckKind::kExactCheck;
6255  }
6256
6257  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6258    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6259  }
6260
6261  DECLARE_INSTRUCTION(InstanceOf);
6262
6263 private:
6264  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6265  static constexpr size_t kFieldTypeCheckKindSize =
6266      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6267  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6268  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6269  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6270  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6271
6272  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6273};
6274
6275class HBoundType FINAL : public HExpression<1> {
6276 public:
6277  HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6278      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6279        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6280    SetPackedFlag<kFlagUpperCanBeNull>(true);
6281    SetPackedFlag<kFlagCanBeNull>(true);
6282    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6283    SetRawInputAt(0, input);
6284  }
6285
6286  // {Get,Set}Upper* should only be used in reference type propagation.
6287  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6288  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6289  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6290
6291  void SetCanBeNull(bool can_be_null) {
6292    DCHECK(GetUpperCanBeNull() || !can_be_null);
6293    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6294  }
6295
6296  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6297
6298  DECLARE_INSTRUCTION(BoundType);
6299
6300 private:
6301  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6302  // is false then CanBeNull() cannot be true).
6303  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6304  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6305  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6306  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6307
6308  // Encodes the most upper class that this instruction can have. In other words
6309  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6310  // It is used to bound the type in cases like:
6311  //   if (x instanceof ClassX) {
6312  //     // uper_bound_ will be ClassX
6313  //   }
6314  ReferenceTypeInfo upper_bound_;
6315
6316  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6317};
6318
6319class HCheckCast FINAL : public HTemplateInstruction<2> {
6320 public:
6321  HCheckCast(HInstruction* object,
6322             HLoadClass* constant,
6323             TypeCheckKind check_kind,
6324             uint32_t dex_pc)
6325      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6326    SetPackedField<TypeCheckKindField>(check_kind);
6327    SetPackedFlag<kFlagMustDoNullCheck>(true);
6328    SetRawInputAt(0, object);
6329    SetRawInputAt(1, constant);
6330  }
6331
6332  bool CanBeMoved() const OVERRIDE { return true; }
6333
6334  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6335    return true;
6336  }
6337
6338  bool NeedsEnvironment() const OVERRIDE {
6339    // Instruction may throw a CheckCastError.
6340    return true;
6341  }
6342
6343  bool CanThrow() const OVERRIDE { return true; }
6344
6345  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6346  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6347  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6348  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6349
6350  DECLARE_INSTRUCTION(CheckCast);
6351
6352 private:
6353  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6354  static constexpr size_t kFieldTypeCheckKindSize =
6355      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6356  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6357  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6358  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6359  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6360
6361  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6362};
6363
6364/**
6365 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6366 * @details We define the combined barrier types that are actually required
6367 * by the Java Memory Model, rather than using exactly the terminology from
6368 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6369 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6370 * store atomicity issues, and the recipes there are not always entirely sufficient.
6371 * The current recipe is as follows:
6372 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6373 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6374 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6375 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6376 *    class has final fields.
6377 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6378 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6379 */
6380enum MemBarrierKind {
6381  kAnyStore,
6382  kLoadAny,
6383  kStoreStore,
6384  kAnyAny,
6385  kNTStoreStore,
6386  kLastBarrierKind = kNTStoreStore
6387};
6388std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6389
6390class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6391 public:
6392  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6393      : HTemplateInstruction(
6394            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6395    SetPackedField<BarrierKindField>(barrier_kind);
6396  }
6397
6398  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6399
6400  DECLARE_INSTRUCTION(MemoryBarrier);
6401
6402 private:
6403  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6404  static constexpr size_t kFieldBarrierKindSize =
6405      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6406  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6407      kFieldBarrierKind + kFieldBarrierKindSize;
6408  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6409                "Too many packed fields.");
6410  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6411
6412  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6413};
6414
6415class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6416 public:
6417  enum class OperationKind {
6418    kEnter,
6419    kExit,
6420    kLast = kExit
6421  };
6422
6423  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6424    : HTemplateInstruction(
6425          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6426          dex_pc) {
6427    SetPackedField<OperationKindField>(kind);
6428    SetRawInputAt(0, object);
6429  }
6430
6431  // Instruction may go into runtime, so we need an environment.
6432  bool NeedsEnvironment() const OVERRIDE { return true; }
6433
6434  bool CanThrow() const OVERRIDE {
6435    // Verifier guarantees that monitor-exit cannot throw.
6436    // This is important because it allows the HGraphBuilder to remove
6437    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6438    return IsEnter();
6439  }
6440
6441  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6442  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6443
6444  DECLARE_INSTRUCTION(MonitorOperation);
6445
6446 private:
6447  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6448  static constexpr size_t kFieldOperationKindSize =
6449      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6450  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6451      kFieldOperationKind + kFieldOperationKindSize;
6452  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6453                "Too many packed fields.");
6454  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6455
6456 private:
6457  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6458};
6459
6460class HSelect FINAL : public HExpression<3> {
6461 public:
6462  HSelect(HInstruction* condition,
6463          HInstruction* true_value,
6464          HInstruction* false_value,
6465          uint32_t dex_pc)
6466      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6467    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6468
6469    // First input must be `true_value` or `false_value` to allow codegens to
6470    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6471    // that architectures which implement HSelect as a conditional move also
6472    // will not need to invert the condition.
6473    SetRawInputAt(0, false_value);
6474    SetRawInputAt(1, true_value);
6475    SetRawInputAt(2, condition);
6476  }
6477
6478  HInstruction* GetFalseValue() const { return InputAt(0); }
6479  HInstruction* GetTrueValue() const { return InputAt(1); }
6480  HInstruction* GetCondition() const { return InputAt(2); }
6481
6482  bool CanBeMoved() const OVERRIDE { return true; }
6483  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6484    return true;
6485  }
6486
6487  bool CanBeNull() const OVERRIDE {
6488    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6489  }
6490
6491  DECLARE_INSTRUCTION(Select);
6492
6493 private:
6494  DISALLOW_COPY_AND_ASSIGN(HSelect);
6495};
6496
6497class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6498 public:
6499  MoveOperands(Location source,
6500               Location destination,
6501               Primitive::Type type,
6502               HInstruction* instruction)
6503      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6504
6505  Location GetSource() const { return source_; }
6506  Location GetDestination() const { return destination_; }
6507
6508  void SetSource(Location value) { source_ = value; }
6509  void SetDestination(Location value) { destination_ = value; }
6510
6511  // The parallel move resolver marks moves as "in-progress" by clearing the
6512  // destination (but not the source).
6513  Location MarkPending() {
6514    DCHECK(!IsPending());
6515    Location dest = destination_;
6516    destination_ = Location::NoLocation();
6517    return dest;
6518  }
6519
6520  void ClearPending(Location dest) {
6521    DCHECK(IsPending());
6522    destination_ = dest;
6523  }
6524
6525  bool IsPending() const {
6526    DCHECK(source_.IsValid() || destination_.IsInvalid());
6527    return destination_.IsInvalid() && source_.IsValid();
6528  }
6529
6530  // True if this blocks a move from the given location.
6531  bool Blocks(Location loc) const {
6532    return !IsEliminated() && source_.OverlapsWith(loc);
6533  }
6534
6535  // A move is redundant if it's been eliminated, if its source and
6536  // destination are the same, or if its destination is unneeded.
6537  bool IsRedundant() const {
6538    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6539  }
6540
6541  // We clear both operands to indicate move that's been eliminated.
6542  void Eliminate() {
6543    source_ = destination_ = Location::NoLocation();
6544  }
6545
6546  bool IsEliminated() const {
6547    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6548    return source_.IsInvalid();
6549  }
6550
6551  Primitive::Type GetType() const { return type_; }
6552
6553  bool Is64BitMove() const {
6554    return Primitive::Is64BitType(type_);
6555  }
6556
6557  HInstruction* GetInstruction() const { return instruction_; }
6558
6559 private:
6560  Location source_;
6561  Location destination_;
6562  // The type this move is for.
6563  Primitive::Type type_;
6564  // The instruction this move is assocatied with. Null when this move is
6565  // for moving an input in the expected locations of user (including a phi user).
6566  // This is only used in debug mode, to ensure we do not connect interval siblings
6567  // in the same parallel move.
6568  HInstruction* instruction_;
6569};
6570
6571std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6572
6573static constexpr size_t kDefaultNumberOfMoves = 4;
6574
6575class HParallelMove FINAL : public HTemplateInstruction<0> {
6576 public:
6577  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6578      : HTemplateInstruction(SideEffects::None(), dex_pc),
6579        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6580    moves_.reserve(kDefaultNumberOfMoves);
6581  }
6582
6583  void AddMove(Location source,
6584               Location destination,
6585               Primitive::Type type,
6586               HInstruction* instruction) {
6587    DCHECK(source.IsValid());
6588    DCHECK(destination.IsValid());
6589    if (kIsDebugBuild) {
6590      if (instruction != nullptr) {
6591        for (const MoveOperands& move : moves_) {
6592          if (move.GetInstruction() == instruction) {
6593            // Special case the situation where the move is for the spill slot
6594            // of the instruction.
6595            if ((GetPrevious() == instruction)
6596                || ((GetPrevious() == nullptr)
6597                    && instruction->IsPhi()
6598                    && instruction->GetBlock() == GetBlock())) {
6599              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6600                  << "Doing parallel moves for the same instruction.";
6601            } else {
6602              DCHECK(false) << "Doing parallel moves for the same instruction.";
6603            }
6604          }
6605        }
6606      }
6607      for (const MoveOperands& move : moves_) {
6608        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6609            << "Overlapped destination for two moves in a parallel move: "
6610            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6611            << source << " ==> " << destination;
6612      }
6613    }
6614    moves_.emplace_back(source, destination, type, instruction);
6615  }
6616
6617  MoveOperands* MoveOperandsAt(size_t index) {
6618    return &moves_[index];
6619  }
6620
6621  size_t NumMoves() const { return moves_.size(); }
6622
6623  DECLARE_INSTRUCTION(ParallelMove);
6624
6625 private:
6626  ArenaVector<MoveOperands> moves_;
6627
6628  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6629};
6630
6631}  // namespace art
6632
6633#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6634#include "nodes_shared.h"
6635#endif
6636#ifdef ART_ENABLE_CODEGEN_arm
6637#include "nodes_arm.h"
6638#endif
6639#ifdef ART_ENABLE_CODEGEN_arm64
6640#include "nodes_arm64.h"
6641#endif
6642#ifdef ART_ENABLE_CODEGEN_mips
6643#include "nodes_mips.h"
6644#endif
6645#ifdef ART_ENABLE_CODEGEN_x86
6646#include "nodes_x86.h"
6647#endif
6648
6649namespace art {
6650
6651class HGraphVisitor : public ValueObject {
6652 public:
6653  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6654  virtual ~HGraphVisitor() {}
6655
6656  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6657  virtual void VisitBasicBlock(HBasicBlock* block);
6658
6659  // Visit the graph following basic block insertion order.
6660  void VisitInsertionOrder();
6661
6662  // Visit the graph following dominator tree reverse post-order.
6663  void VisitReversePostOrder();
6664
6665  HGraph* GetGraph() const { return graph_; }
6666
6667  // Visit functions for instruction classes.
6668#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6669  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6670
6671  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6672
6673#undef DECLARE_VISIT_INSTRUCTION
6674
6675 private:
6676  HGraph* const graph_;
6677
6678  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6679};
6680
6681class HGraphDelegateVisitor : public HGraphVisitor {
6682 public:
6683  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6684  virtual ~HGraphDelegateVisitor() {}
6685
6686  // Visit functions that delegate to to super class.
6687#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6688  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6689
6690  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6691
6692#undef DECLARE_VISIT_INSTRUCTION
6693
6694 private:
6695  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6696};
6697
6698class HInsertionOrderIterator : public ValueObject {
6699 public:
6700  explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {}
6701
6702  bool Done() const { return index_ == graph_.GetBlocks().size(); }
6703  HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; }
6704  void Advance() { ++index_; }
6705
6706 private:
6707  const HGraph& graph_;
6708  size_t index_;
6709
6710  DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator);
6711};
6712
6713class HReversePostOrderIterator : public ValueObject {
6714 public:
6715  explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {
6716    // Check that reverse post order of the graph has been built.
6717    DCHECK(!graph.GetReversePostOrder().empty());
6718  }
6719
6720  bool Done() const { return index_ == graph_.GetReversePostOrder().size(); }
6721  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; }
6722  void Advance() { ++index_; }
6723
6724 private:
6725  const HGraph& graph_;
6726  size_t index_;
6727
6728  DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator);
6729};
6730
6731class HPostOrderIterator : public ValueObject {
6732 public:
6733  explicit HPostOrderIterator(const HGraph& graph)
6734      : graph_(graph), index_(graph_.GetReversePostOrder().size()) {
6735    // Check that reverse post order of the graph has been built.
6736    DCHECK(!graph.GetReversePostOrder().empty());
6737  }
6738
6739  bool Done() const { return index_ == 0; }
6740  HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; }
6741  void Advance() { --index_; }
6742
6743 private:
6744  const HGraph& graph_;
6745  size_t index_;
6746
6747  DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator);
6748};
6749
6750class HLinearPostOrderIterator : public ValueObject {
6751 public:
6752  explicit HLinearPostOrderIterator(const HGraph& graph)
6753      : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {}
6754
6755  bool Done() const { return index_ == 0; }
6756
6757  HBasicBlock* Current() const { return order_[index_ - 1u]; }
6758
6759  void Advance() {
6760    --index_;
6761    DCHECK_GE(index_, 0U);
6762  }
6763
6764 private:
6765  const ArenaVector<HBasicBlock*>& order_;
6766  size_t index_;
6767
6768  DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator);
6769};
6770
6771class HLinearOrderIterator : public ValueObject {
6772 public:
6773  explicit HLinearOrderIterator(const HGraph& graph)
6774      : order_(graph.GetLinearOrder()), index_(0) {}
6775
6776  bool Done() const { return index_ == order_.size(); }
6777  HBasicBlock* Current() const { return order_[index_]; }
6778  void Advance() { ++index_; }
6779
6780 private:
6781  const ArenaVector<HBasicBlock*>& order_;
6782  size_t index_;
6783
6784  DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator);
6785};
6786
6787// Iterator over the blocks that art part of the loop. Includes blocks part
6788// of an inner loop. The order in which the blocks are iterated is on their
6789// block id.
6790class HBlocksInLoopIterator : public ValueObject {
6791 public:
6792  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6793      : blocks_in_loop_(info.GetBlocks()),
6794        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6795        index_(0) {
6796    if (!blocks_in_loop_.IsBitSet(index_)) {
6797      Advance();
6798    }
6799  }
6800
6801  bool Done() const { return index_ == blocks_.size(); }
6802  HBasicBlock* Current() const { return blocks_[index_]; }
6803  void Advance() {
6804    ++index_;
6805    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6806      if (blocks_in_loop_.IsBitSet(index_)) {
6807        break;
6808      }
6809    }
6810  }
6811
6812 private:
6813  const BitVector& blocks_in_loop_;
6814  const ArenaVector<HBasicBlock*>& blocks_;
6815  size_t index_;
6816
6817  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6818};
6819
6820// Iterator over the blocks that art part of the loop. Includes blocks part
6821// of an inner loop. The order in which the blocks are iterated is reverse
6822// post order.
6823class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6824 public:
6825  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6826      : blocks_in_loop_(info.GetBlocks()),
6827        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6828        index_(0) {
6829    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6830      Advance();
6831    }
6832  }
6833
6834  bool Done() const { return index_ == blocks_.size(); }
6835  HBasicBlock* Current() const { return blocks_[index_]; }
6836  void Advance() {
6837    ++index_;
6838    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6839      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6840        break;
6841      }
6842    }
6843  }
6844
6845 private:
6846  const BitVector& blocks_in_loop_;
6847  const ArenaVector<HBasicBlock*>& blocks_;
6848  size_t index_;
6849
6850  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6851};
6852
6853inline int64_t Int64FromConstant(HConstant* constant) {
6854  if (constant->IsIntConstant()) {
6855    return constant->AsIntConstant()->GetValue();
6856  } else if (constant->IsLongConstant()) {
6857    return constant->AsLongConstant()->GetValue();
6858  } else {
6859    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6860    return 0;
6861  }
6862}
6863
6864#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6865  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6866  inline const H##type* HInstruction::As##type() const {                       \
6867    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6868  }                                                                            \
6869  inline H##type* HInstruction::As##type() {                                   \
6870    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6871  }
6872
6873  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6874#undef INSTRUCTION_TYPE_CHECK
6875
6876// Create space in `blocks` for adding `number_of_new_blocks` entries
6877// starting at location `at`. Blocks after `at` are moved accordingly.
6878inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6879                        size_t number_of_new_blocks,
6880                        size_t after) {
6881  DCHECK_LT(after, blocks->size());
6882  size_t old_size = blocks->size();
6883  size_t new_size = old_size + number_of_new_blocks;
6884  blocks->resize(new_size);
6885  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6886}
6887
6888}  // namespace art
6889
6890#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6891