nodes.h revision 75afcdd3503a8a8518e5b23d21b6e73306ce39ce
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18#define ART_COMPILER_OPTIMIZING_NODES_H_
19
20#include <algorithm>
21#include <array>
22#include <type_traits>
23
24#include "base/arena_bit_vector.h"
25#include "base/arena_containers.h"
26#include "base/arena_object.h"
27#include "base/array_ref.h"
28#include "base/iteration_range.h"
29#include "base/stl_util.h"
30#include "base/transform_array_ref.h"
31#include "dex_file.h"
32#include "entrypoints/quick/quick_entrypoints_enum.h"
33#include "handle.h"
34#include "handle_scope.h"
35#include "invoke_type.h"
36#include "intrinsics_enum.h"
37#include "locations.h"
38#include "method_reference.h"
39#include "mirror/class.h"
40#include "offsets.h"
41#include "primitive.h"
42#include "utils/intrusive_forward_list.h"
43
44namespace art {
45
46class GraphChecker;
47class HBasicBlock;
48class HCurrentMethod;
49class HDoubleConstant;
50class HEnvironment;
51class HFloatConstant;
52class HGraphBuilder;
53class HGraphVisitor;
54class HInstruction;
55class HIntConstant;
56class HInvoke;
57class HLongConstant;
58class HNullConstant;
59class HPhi;
60class HSuspendCheck;
61class HTryBoundary;
62class LiveInterval;
63class LocationSummary;
64class SlowPathCode;
65class SsaBuilder;
66
67namespace mirror {
68class DexCache;
69}  // namespace mirror
70
71static const int kDefaultNumberOfBlocks = 8;
72static const int kDefaultNumberOfSuccessors = 2;
73static const int kDefaultNumberOfPredecessors = 2;
74static const int kDefaultNumberOfExceptionalPredecessors = 0;
75static const int kDefaultNumberOfDominatedBlocks = 1;
76static const int kDefaultNumberOfBackEdges = 1;
77
78// The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
79static constexpr int32_t kMaxIntShiftDistance = 0x1f;
80// The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
81static constexpr int32_t kMaxLongShiftDistance = 0x3f;
82
83static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
84static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
85
86static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
87
88static constexpr uint32_t kNoDexPc = -1;
89
90inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
91  // For the purposes of the compiler, the dex files must actually be the same object
92  // if we want to safely treat them as the same. This is especially important for JIT
93  // as custom class loaders can open the same underlying file (or memory) multiple
94  // times and provide different class resolution but no two class loaders should ever
95  // use the same DexFile object - doing so is an unsupported hack that can lead to
96  // all sorts of weird failures.
97  return &lhs == &rhs;
98}
99
100enum IfCondition {
101  // All types.
102  kCondEQ,  // ==
103  kCondNE,  // !=
104  // Signed integers and floating-point numbers.
105  kCondLT,  // <
106  kCondLE,  // <=
107  kCondGT,  // >
108  kCondGE,  // >=
109  // Unsigned integers.
110  kCondB,   // <
111  kCondBE,  // <=
112  kCondA,   // >
113  kCondAE,  // >=
114  // First and last aliases.
115  kCondFirst = kCondEQ,
116  kCondLast = kCondAE,
117};
118
119enum GraphAnalysisResult {
120  kAnalysisSkipped,
121  kAnalysisInvalidBytecode,
122  kAnalysisFailThrowCatchLoop,
123  kAnalysisFailAmbiguousArrayOp,
124  kAnalysisSuccess,
125};
126
127class HInstructionList : public ValueObject {
128 public:
129  HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
130
131  void AddInstruction(HInstruction* instruction);
132  void RemoveInstruction(HInstruction* instruction);
133
134  // Insert `instruction` before/after an existing instruction `cursor`.
135  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
136  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
137
138  // Return true if this list contains `instruction`.
139  bool Contains(HInstruction* instruction) const;
140
141  // Return true if `instruction1` is found before `instruction2` in
142  // this instruction list and false otherwise.  Abort if none
143  // of these instructions is found.
144  bool FoundBefore(const HInstruction* instruction1,
145                   const HInstruction* instruction2) const;
146
147  bool IsEmpty() const { return first_instruction_ == nullptr; }
148  void Clear() { first_instruction_ = last_instruction_ = nullptr; }
149
150  // Update the block of all instructions to be `block`.
151  void SetBlockOfInstructions(HBasicBlock* block) const;
152
153  void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
154  void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
155  void Add(const HInstructionList& instruction_list);
156
157  // Return the number of instructions in the list. This is an expensive operation.
158  size_t CountSize() const;
159
160 private:
161  HInstruction* first_instruction_;
162  HInstruction* last_instruction_;
163
164  friend class HBasicBlock;
165  friend class HGraph;
166  friend class HInstruction;
167  friend class HInstructionIterator;
168  friend class HBackwardInstructionIterator;
169
170  DISALLOW_COPY_AND_ASSIGN(HInstructionList);
171};
172
173class ReferenceTypeInfo : ValueObject {
174 public:
175  typedef Handle<mirror::Class> TypeHandle;
176
177  static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
178
179  static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
180    return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
181  }
182
183  static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
184    return ReferenceTypeInfo(type_handle, is_exact);
185  }
186
187  static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
188
189  static bool IsValidHandle(TypeHandle handle) {
190    return handle.GetReference() != nullptr;
191  }
192
193  bool IsValid() const {
194    return IsValidHandle(type_handle_);
195  }
196
197  bool IsExact() const { return is_exact_; }
198
199  bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
200    DCHECK(IsValid());
201    return GetTypeHandle()->IsObjectClass();
202  }
203
204  bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
205    DCHECK(IsValid());
206    return GetTypeHandle()->IsStringClass();
207  }
208
209  bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
210    DCHECK(IsValid());
211    return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
212  }
213
214  bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
215    DCHECK(IsValid());
216    return GetTypeHandle()->IsInterface();
217  }
218
219  bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
220    DCHECK(IsValid());
221    return GetTypeHandle()->IsArrayClass();
222  }
223
224  bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
225    DCHECK(IsValid());
226    return GetTypeHandle()->IsPrimitiveArray();
227  }
228
229  bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
230    DCHECK(IsValid());
231    return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
232  }
233
234  bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
235    DCHECK(IsValid());
236    if (!IsExact()) return false;
237    if (!IsArrayClass()) return false;
238    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
239  }
240
241  bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
242    DCHECK(IsValid());
243    if (!IsExact()) return false;
244    if (!IsArrayClass()) return false;
245    if (!rti.IsArrayClass()) return false;
246    return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
247        rti.GetTypeHandle()->GetComponentType());
248  }
249
250  Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
251
252  bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
253    DCHECK(IsValid());
254    DCHECK(rti.IsValid());
255    return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
256  }
257
258  bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
259    DCHECK(IsValid());
260    DCHECK(rti.IsValid());
261    return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
262        GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
263  }
264
265  // Returns true if the type information provide the same amount of details.
266  // Note that it does not mean that the instructions have the same actual type
267  // (because the type can be the result of a merge).
268  bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
269    if (!IsValid() && !rti.IsValid()) {
270      // Invalid types are equal.
271      return true;
272    }
273    if (!IsValid() || !rti.IsValid()) {
274      // One is valid, the other not.
275      return false;
276    }
277    return IsExact() == rti.IsExact()
278        && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
279  }
280
281 private:
282  ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
283  ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
284      : type_handle_(type_handle), is_exact_(is_exact) { }
285
286  // The class of the object.
287  TypeHandle type_handle_;
288  // Whether or not the type is exact or a superclass of the actual type.
289  // Whether or not we have any information about this type.
290  bool is_exact_;
291};
292
293std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
294
295// Control-flow graph of a method. Contains a list of basic blocks.
296class HGraph : public ArenaObject<kArenaAllocGraph> {
297 public:
298  HGraph(ArenaAllocator* arena,
299         const DexFile& dex_file,
300         uint32_t method_idx,
301         bool should_generate_constructor_barrier,
302         InstructionSet instruction_set,
303         InvokeType invoke_type = kInvalidInvokeType,
304         bool debuggable = false,
305         bool osr = false,
306         int start_instruction_id = 0)
307      : arena_(arena),
308        blocks_(arena->Adapter(kArenaAllocBlockList)),
309        reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
310        linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
311        entry_block_(nullptr),
312        exit_block_(nullptr),
313        maximum_number_of_out_vregs_(0),
314        number_of_vregs_(0),
315        number_of_in_vregs_(0),
316        temporaries_vreg_slots_(0),
317        has_bounds_checks_(false),
318        has_try_catch_(false),
319        has_irreducible_loops_(false),
320        debuggable_(debuggable),
321        current_instruction_id_(start_instruction_id),
322        dex_file_(dex_file),
323        method_idx_(method_idx),
324        invoke_type_(invoke_type),
325        in_ssa_form_(false),
326        should_generate_constructor_barrier_(should_generate_constructor_barrier),
327        instruction_set_(instruction_set),
328        cached_null_constant_(nullptr),
329        cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
330        cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
331        cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
332        cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
333        cached_current_method_(nullptr),
334        inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
335        osr_(osr) {
336    blocks_.reserve(kDefaultNumberOfBlocks);
337  }
338
339  // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
340  void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
341
342  ArenaAllocator* GetArena() const { return arena_; }
343  const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
344
345  bool IsInSsaForm() const { return in_ssa_form_; }
346  void SetInSsaForm() { in_ssa_form_ = true; }
347
348  HBasicBlock* GetEntryBlock() const { return entry_block_; }
349  HBasicBlock* GetExitBlock() const { return exit_block_; }
350  bool HasExitBlock() const { return exit_block_ != nullptr; }
351
352  void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
353  void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
354
355  void AddBlock(HBasicBlock* block);
356
357  void ComputeDominanceInformation();
358  void ClearDominanceInformation();
359  void ClearLoopInformation();
360  void FindBackEdges(ArenaBitVector* visited);
361  GraphAnalysisResult BuildDominatorTree();
362  void SimplifyCFG();
363  void SimplifyCatchBlocks();
364
365  // Analyze all natural loops in this graph. Returns a code specifying that it
366  // was successful or the reason for failure. The method will fail if a loop
367  // is a throw-catch loop, i.e. the header is a catch block.
368  GraphAnalysisResult AnalyzeLoops() const;
369
370  // Iterate over blocks to compute try block membership. Needs reverse post
371  // order and loop information.
372  void ComputeTryBlockInformation();
373
374  // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
375  // Returns the instruction to replace the invoke expression or null if the
376  // invoke is for a void method. Note that the caller is responsible for replacing
377  // and removing the invoke instruction.
378  HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
379
380  // Update the loop and try membership of `block`, which was spawned from `reference`.
381  // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
382  // should be the new back edge.
383  void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
384                                             HBasicBlock* reference,
385                                             bool replace_if_back_edge);
386
387  // Need to add a couple of blocks to test if the loop body is entered and
388  // put deoptimization instructions, etc.
389  void TransformLoopHeaderForBCE(HBasicBlock* header);
390
391  // Removes `block` from the graph. Assumes `block` has been disconnected from
392  // other blocks and has no instructions or phis.
393  void DeleteDeadEmptyBlock(HBasicBlock* block);
394
395  // Splits the edge between `block` and `successor` while preserving the
396  // indices in the predecessor/successor lists. If there are multiple edges
397  // between the blocks, the lowest indices are used.
398  // Returns the new block which is empty and has the same dex pc as `successor`.
399  HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
400
401  void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
402  void SimplifyLoop(HBasicBlock* header);
403
404  int32_t GetNextInstructionId() {
405    DCHECK_NE(current_instruction_id_, INT32_MAX);
406    return current_instruction_id_++;
407  }
408
409  int32_t GetCurrentInstructionId() const {
410    return current_instruction_id_;
411  }
412
413  void SetCurrentInstructionId(int32_t id) {
414    DCHECK_GE(id, current_instruction_id_);
415    current_instruction_id_ = id;
416  }
417
418  uint16_t GetMaximumNumberOfOutVRegs() const {
419    return maximum_number_of_out_vregs_;
420  }
421
422  void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
423    maximum_number_of_out_vregs_ = new_value;
424  }
425
426  void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
427    maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
428  }
429
430  void UpdateTemporariesVRegSlots(size_t slots) {
431    temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
432  }
433
434  size_t GetTemporariesVRegSlots() const {
435    DCHECK(!in_ssa_form_);
436    return temporaries_vreg_slots_;
437  }
438
439  void SetNumberOfVRegs(uint16_t number_of_vregs) {
440    number_of_vregs_ = number_of_vregs;
441  }
442
443  uint16_t GetNumberOfVRegs() const {
444    return number_of_vregs_;
445  }
446
447  void SetNumberOfInVRegs(uint16_t value) {
448    number_of_in_vregs_ = value;
449  }
450
451  uint16_t GetNumberOfInVRegs() const {
452    return number_of_in_vregs_;
453  }
454
455  uint16_t GetNumberOfLocalVRegs() const {
456    DCHECK(!in_ssa_form_);
457    return number_of_vregs_ - number_of_in_vregs_;
458  }
459
460  const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
461    return reverse_post_order_;
462  }
463
464  ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
465    DCHECK(GetReversePostOrder()[0] == entry_block_);
466    return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
467  }
468
469  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
470    return ReverseRange(GetReversePostOrder());
471  }
472
473  const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
474    return linear_order_;
475  }
476
477  IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
478    return ReverseRange(GetLinearOrder());
479  }
480
481  bool HasBoundsChecks() const {
482    return has_bounds_checks_;
483  }
484
485  void SetHasBoundsChecks(bool value) {
486    has_bounds_checks_ = value;
487  }
488
489  bool ShouldGenerateConstructorBarrier() const {
490    return should_generate_constructor_barrier_;
491  }
492
493  bool IsDebuggable() const { return debuggable_; }
494
495  // Returns a constant of the given type and value. If it does not exist
496  // already, it is created and inserted into the graph. This method is only for
497  // integral types.
498  HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
499
500  // TODO: This is problematic for the consistency of reference type propagation
501  // because it can be created anytime after the pass and thus it will be left
502  // with an invalid type.
503  HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
504
505  HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
506    return CreateConstant(value, &cached_int_constants_, dex_pc);
507  }
508  HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
509    return CreateConstant(value, &cached_long_constants_, dex_pc);
510  }
511  HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
512    return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
513  }
514  HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
515    return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
516  }
517
518  HCurrentMethod* GetCurrentMethod();
519
520  const DexFile& GetDexFile() const {
521    return dex_file_;
522  }
523
524  uint32_t GetMethodIdx() const {
525    return method_idx_;
526  }
527
528  InvokeType GetInvokeType() const {
529    return invoke_type_;
530  }
531
532  InstructionSet GetInstructionSet() const {
533    return instruction_set_;
534  }
535
536  bool IsCompilingOsr() const { return osr_; }
537
538  bool HasTryCatch() const { return has_try_catch_; }
539  void SetHasTryCatch(bool value) { has_try_catch_ = value; }
540
541  bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
542  void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
543
544  ArtMethod* GetArtMethod() const { return art_method_; }
545  void SetArtMethod(ArtMethod* method) { art_method_ = method; }
546
547  // Returns an instruction with the opposite boolean value from 'cond'.
548  // The instruction has been inserted into the graph, either as a constant, or
549  // before cursor.
550  HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
551
552  ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
553
554 private:
555  void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
556  void RemoveDeadBlocks(const ArenaBitVector& visited);
557
558  template <class InstructionType, typename ValueType>
559  InstructionType* CreateConstant(ValueType value,
560                                  ArenaSafeMap<ValueType, InstructionType*>* cache,
561                                  uint32_t dex_pc = kNoDexPc) {
562    // Try to find an existing constant of the given value.
563    InstructionType* constant = nullptr;
564    auto cached_constant = cache->find(value);
565    if (cached_constant != cache->end()) {
566      constant = cached_constant->second;
567    }
568
569    // If not found or previously deleted, create and cache a new instruction.
570    // Don't bother reviving a previously deleted instruction, for simplicity.
571    if (constant == nullptr || constant->GetBlock() == nullptr) {
572      constant = new (arena_) InstructionType(value, dex_pc);
573      cache->Overwrite(value, constant);
574      InsertConstant(constant);
575    }
576    return constant;
577  }
578
579  void InsertConstant(HConstant* instruction);
580
581  // Cache a float constant into the graph. This method should only be
582  // called by the SsaBuilder when creating "equivalent" instructions.
583  void CacheFloatConstant(HFloatConstant* constant);
584
585  // See CacheFloatConstant comment.
586  void CacheDoubleConstant(HDoubleConstant* constant);
587
588  ArenaAllocator* const arena_;
589
590  // List of blocks in insertion order.
591  ArenaVector<HBasicBlock*> blocks_;
592
593  // List of blocks to perform a reverse post order tree traversal.
594  ArenaVector<HBasicBlock*> reverse_post_order_;
595
596  // List of blocks to perform a linear order tree traversal. Unlike the reverse
597  // post order, this order is not incrementally kept up-to-date.
598  ArenaVector<HBasicBlock*> linear_order_;
599
600  HBasicBlock* entry_block_;
601  HBasicBlock* exit_block_;
602
603  // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
604  uint16_t maximum_number_of_out_vregs_;
605
606  // The number of virtual registers in this method. Contains the parameters.
607  uint16_t number_of_vregs_;
608
609  // The number of virtual registers used by parameters of this method.
610  uint16_t number_of_in_vregs_;
611
612  // Number of vreg size slots that the temporaries use (used in baseline compiler).
613  size_t temporaries_vreg_slots_;
614
615  // Has bounds checks. We can totally skip BCE if it's false.
616  bool has_bounds_checks_;
617
618  // Flag whether there are any try/catch blocks in the graph. We will skip
619  // try/catch-related passes if false.
620  bool has_try_catch_;
621
622  // Flag whether there are any irreducible loops in the graph.
623  bool has_irreducible_loops_;
624
625  // Indicates whether the graph should be compiled in a way that
626  // ensures full debuggability. If false, we can apply more
627  // aggressive optimizations that may limit the level of debugging.
628  const bool debuggable_;
629
630  // The current id to assign to a newly added instruction. See HInstruction.id_.
631  int32_t current_instruction_id_;
632
633  // The dex file from which the method is from.
634  const DexFile& dex_file_;
635
636  // The method index in the dex file.
637  const uint32_t method_idx_;
638
639  // If inlined, this encodes how the callee is being invoked.
640  const InvokeType invoke_type_;
641
642  // Whether the graph has been transformed to SSA form. Only used
643  // in debug mode to ensure we are not using properties only valid
644  // for non-SSA form (like the number of temporaries).
645  bool in_ssa_form_;
646
647  const bool should_generate_constructor_barrier_;
648
649  const InstructionSet instruction_set_;
650
651  // Cached constants.
652  HNullConstant* cached_null_constant_;
653  ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
654  ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
655  ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
656  ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
657
658  HCurrentMethod* cached_current_method_;
659
660  // The ArtMethod this graph is for. Note that for AOT, it may be null,
661  // for example for methods whose declaring class could not be resolved
662  // (such as when the superclass could not be found).
663  ArtMethod* art_method_;
664
665  // Keep the RTI of inexact Object to avoid having to pass stack handle
666  // collection pointer to passes which may create NullConstant.
667  ReferenceTypeInfo inexact_object_rti_;
668
669  // Whether we are compiling this graph for on stack replacement: this will
670  // make all loops seen as irreducible and emit special stack maps to mark
671  // compiled code entries which the interpreter can directly jump to.
672  const bool osr_;
673
674  friend class SsaBuilder;           // For caching constants.
675  friend class SsaLivenessAnalysis;  // For the linear order.
676  friend class HInliner;             // For the reverse post order.
677  ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
678  DISALLOW_COPY_AND_ASSIGN(HGraph);
679};
680
681class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
682 public:
683  HLoopInformation(HBasicBlock* header, HGraph* graph)
684      : header_(header),
685        suspend_check_(nullptr),
686        irreducible_(false),
687        contains_irreducible_loop_(false),
688        back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
689        // Make bit vector growable, as the number of blocks may change.
690        blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
691    back_edges_.reserve(kDefaultNumberOfBackEdges);
692  }
693
694  bool IsIrreducible() const { return irreducible_; }
695  bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
696
697  void Dump(std::ostream& os);
698
699  HBasicBlock* GetHeader() const {
700    return header_;
701  }
702
703  void SetHeader(HBasicBlock* block) {
704    header_ = block;
705  }
706
707  HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
708  void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
709  bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
710
711  void AddBackEdge(HBasicBlock* back_edge) {
712    back_edges_.push_back(back_edge);
713  }
714
715  void RemoveBackEdge(HBasicBlock* back_edge) {
716    RemoveElement(back_edges_, back_edge);
717  }
718
719  bool IsBackEdge(const HBasicBlock& block) const {
720    return ContainsElement(back_edges_, &block);
721  }
722
723  size_t NumberOfBackEdges() const {
724    return back_edges_.size();
725  }
726
727  HBasicBlock* GetPreHeader() const;
728
729  const ArenaVector<HBasicBlock*>& GetBackEdges() const {
730    return back_edges_;
731  }
732
733  // Returns the lifetime position of the back edge that has the
734  // greatest lifetime position.
735  size_t GetLifetimeEnd() const;
736
737  void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
738    ReplaceElement(back_edges_, existing, new_back_edge);
739  }
740
741  // Finds blocks that are part of this loop.
742  void Populate();
743
744  // Returns whether this loop information contains `block`.
745  // Note that this loop information *must* be populated before entering this function.
746  bool Contains(const HBasicBlock& block) const;
747
748  // Returns whether this loop information is an inner loop of `other`.
749  // Note that `other` *must* be populated before entering this function.
750  bool IsIn(const HLoopInformation& other) const;
751
752  // Returns true if instruction is not defined within this loop.
753  bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
754
755  const ArenaBitVector& GetBlocks() const { return blocks_; }
756
757  void Add(HBasicBlock* block);
758  void Remove(HBasicBlock* block);
759
760  void ClearAllBlocks() {
761    blocks_.ClearAllBits();
762  }
763
764  bool HasBackEdgeNotDominatedByHeader() const;
765
766  bool IsPopulated() const {
767    return blocks_.GetHighestBitSet() != -1;
768  }
769
770  bool DominatesAllBackEdges(HBasicBlock* block);
771
772  bool HasExitEdge() const;
773
774 private:
775  // Internal recursive implementation of `Populate`.
776  void PopulateRecursive(HBasicBlock* block);
777  void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
778
779  HBasicBlock* header_;
780  HSuspendCheck* suspend_check_;
781  bool irreducible_;
782  bool contains_irreducible_loop_;
783  ArenaVector<HBasicBlock*> back_edges_;
784  ArenaBitVector blocks_;
785
786  DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
787};
788
789// Stores try/catch information for basic blocks.
790// Note that HGraph is constructed so that catch blocks cannot simultaneously
791// be try blocks.
792class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
793 public:
794  // Try block information constructor.
795  explicit TryCatchInformation(const HTryBoundary& try_entry)
796      : try_entry_(&try_entry),
797        catch_dex_file_(nullptr),
798        catch_type_index_(DexFile::kDexNoIndex16) {
799    DCHECK(try_entry_ != nullptr);
800  }
801
802  // Catch block information constructor.
803  TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file)
804      : try_entry_(nullptr),
805        catch_dex_file_(&dex_file),
806        catch_type_index_(catch_type_index) {}
807
808  bool IsTryBlock() const { return try_entry_ != nullptr; }
809
810  const HTryBoundary& GetTryEntry() const {
811    DCHECK(IsTryBlock());
812    return *try_entry_;
813  }
814
815  bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
816
817  bool IsCatchAllTypeIndex() const {
818    DCHECK(IsCatchBlock());
819    return catch_type_index_ == DexFile::kDexNoIndex16;
820  }
821
822  uint16_t GetCatchTypeIndex() const {
823    DCHECK(IsCatchBlock());
824    return catch_type_index_;
825  }
826
827  const DexFile& GetCatchDexFile() const {
828    DCHECK(IsCatchBlock());
829    return *catch_dex_file_;
830  }
831
832 private:
833  // One of possibly several TryBoundary instructions entering the block's try.
834  // Only set for try blocks.
835  const HTryBoundary* try_entry_;
836
837  // Exception type information. Only set for catch blocks.
838  const DexFile* catch_dex_file_;
839  const uint16_t catch_type_index_;
840};
841
842static constexpr size_t kNoLifetime = -1;
843static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
844
845// A block in a method. Contains the list of instructions represented
846// as a double linked list. Each block knows its predecessors and
847// successors.
848
849class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
850 public:
851  explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
852      : graph_(graph),
853        predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
854        successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
855        loop_information_(nullptr),
856        dominator_(nullptr),
857        dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
858        block_id_(kInvalidBlockId),
859        dex_pc_(dex_pc),
860        lifetime_start_(kNoLifetime),
861        lifetime_end_(kNoLifetime),
862        try_catch_information_(nullptr) {
863    predecessors_.reserve(kDefaultNumberOfPredecessors);
864    successors_.reserve(kDefaultNumberOfSuccessors);
865    dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
866  }
867
868  const ArenaVector<HBasicBlock*>& GetPredecessors() const {
869    return predecessors_;
870  }
871
872  const ArenaVector<HBasicBlock*>& GetSuccessors() const {
873    return successors_;
874  }
875
876  ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
877  ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
878
879  bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
880    return ContainsElement(successors_, block, start_from);
881  }
882
883  const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
884    return dominated_blocks_;
885  }
886
887  bool IsEntryBlock() const {
888    return graph_->GetEntryBlock() == this;
889  }
890
891  bool IsExitBlock() const {
892    return graph_->GetExitBlock() == this;
893  }
894
895  bool IsSingleGoto() const;
896  bool IsSingleTryBoundary() const;
897
898  // Returns true if this block emits nothing but a jump.
899  bool IsSingleJump() const {
900    HLoopInformation* loop_info = GetLoopInformation();
901    return (IsSingleGoto() || IsSingleTryBoundary())
902           // Back edges generate a suspend check.
903           && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
904  }
905
906  void AddBackEdge(HBasicBlock* back_edge) {
907    if (loop_information_ == nullptr) {
908      loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
909    }
910    DCHECK_EQ(loop_information_->GetHeader(), this);
911    loop_information_->AddBackEdge(back_edge);
912  }
913
914  HGraph* GetGraph() const { return graph_; }
915  void SetGraph(HGraph* graph) { graph_ = graph; }
916
917  uint32_t GetBlockId() const { return block_id_; }
918  void SetBlockId(int id) { block_id_ = id; }
919  uint32_t GetDexPc() const { return dex_pc_; }
920
921  HBasicBlock* GetDominator() const { return dominator_; }
922  void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
923  void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
924
925  void RemoveDominatedBlock(HBasicBlock* block) {
926    RemoveElement(dominated_blocks_, block);
927  }
928
929  void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
930    ReplaceElement(dominated_blocks_, existing, new_block);
931  }
932
933  void ClearDominanceInformation();
934
935  int NumberOfBackEdges() const {
936    return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
937  }
938
939  HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
940  HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
941  const HInstructionList& GetInstructions() const { return instructions_; }
942  HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
943  HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
944  const HInstructionList& GetPhis() const { return phis_; }
945
946  HInstruction* GetFirstInstructionDisregardMoves() const;
947
948  void AddSuccessor(HBasicBlock* block) {
949    successors_.push_back(block);
950    block->predecessors_.push_back(this);
951  }
952
953  void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
954    size_t successor_index = GetSuccessorIndexOf(existing);
955    existing->RemovePredecessor(this);
956    new_block->predecessors_.push_back(this);
957    successors_[successor_index] = new_block;
958  }
959
960  void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
961    size_t predecessor_index = GetPredecessorIndexOf(existing);
962    existing->RemoveSuccessor(this);
963    new_block->successors_.push_back(this);
964    predecessors_[predecessor_index] = new_block;
965  }
966
967  // Insert `this` between `predecessor` and `successor. This method
968  // preserves the indicies, and will update the first edge found between
969  // `predecessor` and `successor`.
970  void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
971    size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
972    size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
973    successor->predecessors_[predecessor_index] = this;
974    predecessor->successors_[successor_index] = this;
975    successors_.push_back(successor);
976    predecessors_.push_back(predecessor);
977  }
978
979  void RemovePredecessor(HBasicBlock* block) {
980    predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
981  }
982
983  void RemoveSuccessor(HBasicBlock* block) {
984    successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
985  }
986
987  void ClearAllPredecessors() {
988    predecessors_.clear();
989  }
990
991  void AddPredecessor(HBasicBlock* block) {
992    predecessors_.push_back(block);
993    block->successors_.push_back(this);
994  }
995
996  void SwapPredecessors() {
997    DCHECK_EQ(predecessors_.size(), 2u);
998    std::swap(predecessors_[0], predecessors_[1]);
999  }
1000
1001  void SwapSuccessors() {
1002    DCHECK_EQ(successors_.size(), 2u);
1003    std::swap(successors_[0], successors_[1]);
1004  }
1005
1006  size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1007    return IndexOfElement(predecessors_, predecessor);
1008  }
1009
1010  size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1011    return IndexOfElement(successors_, successor);
1012  }
1013
1014  HBasicBlock* GetSinglePredecessor() const {
1015    DCHECK_EQ(GetPredecessors().size(), 1u);
1016    return GetPredecessors()[0];
1017  }
1018
1019  HBasicBlock* GetSingleSuccessor() const {
1020    DCHECK_EQ(GetSuccessors().size(), 1u);
1021    return GetSuccessors()[0];
1022  }
1023
1024  // Returns whether the first occurrence of `predecessor` in the list of
1025  // predecessors is at index `idx`.
1026  bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1027    DCHECK_EQ(GetPredecessors()[idx], predecessor);
1028    return GetPredecessorIndexOf(predecessor) == idx;
1029  }
1030
1031  // Create a new block between this block and its predecessors. The new block
1032  // is added to the graph, all predecessor edges are relinked to it and an edge
1033  // is created to `this`. Returns the new empty block. Reverse post order or
1034  // loop and try/catch information are not updated.
1035  HBasicBlock* CreateImmediateDominator();
1036
1037  // Split the block into two blocks just before `cursor`. Returns the newly
1038  // created, latter block. Note that this method will add the block to the
1039  // graph, create a Goto at the end of the former block and will create an edge
1040  // between the blocks. It will not, however, update the reverse post order or
1041  // loop and try/catch information.
1042  HBasicBlock* SplitBefore(HInstruction* cursor);
1043
1044  // Split the block into two blocks just before `cursor`. Returns the newly
1045  // created block. Note that this method just updates raw block information,
1046  // like predecessors, successors, dominators, and instruction list. It does not
1047  // update the graph, reverse post order, loop information, nor make sure the
1048  // blocks are consistent (for example ending with a control flow instruction).
1049  HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1050
1051  // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1052  HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1053
1054  // Merge `other` at the end of `this`. Successors and dominated blocks of
1055  // `other` are changed to be successors and dominated blocks of `this`. Note
1056  // that this method does not update the graph, reverse post order, loop
1057  // information, nor make sure the blocks are consistent (for example ending
1058  // with a control flow instruction).
1059  void MergeWithInlined(HBasicBlock* other);
1060
1061  // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1062  // of `this` are moved to `other`.
1063  // Note that this method does not update the graph, reverse post order, loop
1064  // information, nor make sure the blocks are consistent (for example ending
1065  // with a control flow instruction).
1066  void ReplaceWith(HBasicBlock* other);
1067
1068  // Merge `other` at the end of `this`. This method updates loops, reverse post
1069  // order, links to predecessors, successors, dominators and deletes the block
1070  // from the graph. The two blocks must be successive, i.e. `this` the only
1071  // predecessor of `other` and vice versa.
1072  void MergeWith(HBasicBlock* other);
1073
1074  // Disconnects `this` from all its predecessors, successors and dominator,
1075  // removes it from all loops it is included in and eventually from the graph.
1076  // The block must not dominate any other block. Predecessors and successors
1077  // are safely updated.
1078  void DisconnectAndDelete();
1079
1080  void AddInstruction(HInstruction* instruction);
1081  // Insert `instruction` before/after an existing instruction `cursor`.
1082  void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1083  void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1084  // Replace instruction `initial` with `replacement` within this block.
1085  void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1086                                       HInstruction* replacement);
1087  void AddPhi(HPhi* phi);
1088  void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1089  // RemoveInstruction and RemovePhi delete a given instruction from the respective
1090  // instruction list. With 'ensure_safety' set to true, it verifies that the
1091  // instruction is not in use and removes it from the use lists of its inputs.
1092  void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1093  void RemovePhi(HPhi* phi, bool ensure_safety = true);
1094  void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1095
1096  bool IsLoopHeader() const {
1097    return IsInLoop() && (loop_information_->GetHeader() == this);
1098  }
1099
1100  bool IsLoopPreHeaderFirstPredecessor() const {
1101    DCHECK(IsLoopHeader());
1102    return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1103  }
1104
1105  bool IsFirstPredecessorBackEdge() const {
1106    DCHECK(IsLoopHeader());
1107    return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1108  }
1109
1110  HLoopInformation* GetLoopInformation() const {
1111    return loop_information_;
1112  }
1113
1114  // Set the loop_information_ on this block. Overrides the current
1115  // loop_information if it is an outer loop of the passed loop information.
1116  // Note that this method is called while creating the loop information.
1117  void SetInLoop(HLoopInformation* info) {
1118    if (IsLoopHeader()) {
1119      // Nothing to do. This just means `info` is an outer loop.
1120    } else if (!IsInLoop()) {
1121      loop_information_ = info;
1122    } else if (loop_information_->Contains(*info->GetHeader())) {
1123      // Block is currently part of an outer loop. Make it part of this inner loop.
1124      // Note that a non loop header having a loop information means this loop information
1125      // has already been populated
1126      loop_information_ = info;
1127    } else {
1128      // Block is part of an inner loop. Do not update the loop information.
1129      // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1130      // at this point, because this method is being called while populating `info`.
1131    }
1132  }
1133
1134  // Raw update of the loop information.
1135  void SetLoopInformation(HLoopInformation* info) {
1136    loop_information_ = info;
1137  }
1138
1139  bool IsInLoop() const { return loop_information_ != nullptr; }
1140
1141  TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1142
1143  void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1144    try_catch_information_ = try_catch_information;
1145  }
1146
1147  bool IsTryBlock() const {
1148    return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1149  }
1150
1151  bool IsCatchBlock() const {
1152    return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1153  }
1154
1155  // Returns the try entry that this block's successors should have. They will
1156  // be in the same try, unless the block ends in a try boundary. In that case,
1157  // the appropriate try entry will be returned.
1158  const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1159
1160  bool HasThrowingInstructions() const;
1161
1162  // Returns whether this block dominates the blocked passed as parameter.
1163  bool Dominates(HBasicBlock* block) const;
1164
1165  size_t GetLifetimeStart() const { return lifetime_start_; }
1166  size_t GetLifetimeEnd() const { return lifetime_end_; }
1167
1168  void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1169  void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1170
1171  bool EndsWithControlFlowInstruction() const;
1172  bool EndsWithIf() const;
1173  bool EndsWithTryBoundary() const;
1174  bool HasSinglePhi() const;
1175
1176 private:
1177  HGraph* graph_;
1178  ArenaVector<HBasicBlock*> predecessors_;
1179  ArenaVector<HBasicBlock*> successors_;
1180  HInstructionList instructions_;
1181  HInstructionList phis_;
1182  HLoopInformation* loop_information_;
1183  HBasicBlock* dominator_;
1184  ArenaVector<HBasicBlock*> dominated_blocks_;
1185  uint32_t block_id_;
1186  // The dex program counter of the first instruction of this block.
1187  const uint32_t dex_pc_;
1188  size_t lifetime_start_;
1189  size_t lifetime_end_;
1190  TryCatchInformation* try_catch_information_;
1191
1192  friend class HGraph;
1193  friend class HInstruction;
1194
1195  DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1196};
1197
1198// Iterates over the LoopInformation of all loops which contain 'block'
1199// from the innermost to the outermost.
1200class HLoopInformationOutwardIterator : public ValueObject {
1201 public:
1202  explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1203      : current_(block.GetLoopInformation()) {}
1204
1205  bool Done() const { return current_ == nullptr; }
1206
1207  void Advance() {
1208    DCHECK(!Done());
1209    current_ = current_->GetPreHeader()->GetLoopInformation();
1210  }
1211
1212  HLoopInformation* Current() const {
1213    DCHECK(!Done());
1214    return current_;
1215  }
1216
1217 private:
1218  HLoopInformation* current_;
1219
1220  DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1221};
1222
1223#define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1224  M(Above, Condition)                                                   \
1225  M(AboveOrEqual, Condition)                                            \
1226  M(Add, BinaryOperation)                                               \
1227  M(And, BinaryOperation)                                               \
1228  M(ArrayGet, Instruction)                                              \
1229  M(ArrayLength, Instruction)                                           \
1230  M(ArraySet, Instruction)                                              \
1231  M(Below, Condition)                                                   \
1232  M(BelowOrEqual, Condition)                                            \
1233  M(BooleanNot, UnaryOperation)                                         \
1234  M(BoundsCheck, Instruction)                                           \
1235  M(BoundType, Instruction)                                             \
1236  M(CheckCast, Instruction)                                             \
1237  M(ClassTableGet, Instruction)                                         \
1238  M(ClearException, Instruction)                                        \
1239  M(ClinitCheck, Instruction)                                           \
1240  M(Compare, BinaryOperation)                                           \
1241  M(CurrentMethod, Instruction)                                         \
1242  M(Deoptimize, Instruction)                                            \
1243  M(Div, BinaryOperation)                                               \
1244  M(DivZeroCheck, Instruction)                                          \
1245  M(DoubleConstant, Constant)                                           \
1246  M(Equal, Condition)                                                   \
1247  M(Exit, Instruction)                                                  \
1248  M(FloatConstant, Constant)                                            \
1249  M(Goto, Instruction)                                                  \
1250  M(GreaterThan, Condition)                                             \
1251  M(GreaterThanOrEqual, Condition)                                      \
1252  M(If, Instruction)                                                    \
1253  M(InstanceFieldGet, Instruction)                                      \
1254  M(InstanceFieldSet, Instruction)                                      \
1255  M(InstanceOf, Instruction)                                            \
1256  M(IntConstant, Constant)                                              \
1257  M(InvokeUnresolved, Invoke)                                           \
1258  M(InvokeInterface, Invoke)                                            \
1259  M(InvokeStaticOrDirect, Invoke)                                       \
1260  M(InvokeVirtual, Invoke)                                              \
1261  M(LessThan, Condition)                                                \
1262  M(LessThanOrEqual, Condition)                                         \
1263  M(LoadClass, Instruction)                                             \
1264  M(LoadException, Instruction)                                         \
1265  M(LoadString, Instruction)                                            \
1266  M(LongConstant, Constant)                                             \
1267  M(MemoryBarrier, Instruction)                                         \
1268  M(MonitorOperation, Instruction)                                      \
1269  M(Mul, BinaryOperation)                                               \
1270  M(NativeDebugInfo, Instruction)                                       \
1271  M(Neg, UnaryOperation)                                                \
1272  M(NewArray, Instruction)                                              \
1273  M(NewInstance, Instruction)                                           \
1274  M(Not, UnaryOperation)                                                \
1275  M(NotEqual, Condition)                                                \
1276  M(NullConstant, Instruction)                                          \
1277  M(NullCheck, Instruction)                                             \
1278  M(Or, BinaryOperation)                                                \
1279  M(PackedSwitch, Instruction)                                          \
1280  M(ParallelMove, Instruction)                                          \
1281  M(ParameterValue, Instruction)                                        \
1282  M(Phi, Instruction)                                                   \
1283  M(Rem, BinaryOperation)                                               \
1284  M(Return, Instruction)                                                \
1285  M(ReturnVoid, Instruction)                                            \
1286  M(Ror, BinaryOperation)                                               \
1287  M(Shl, BinaryOperation)                                               \
1288  M(Shr, BinaryOperation)                                               \
1289  M(StaticFieldGet, Instruction)                                        \
1290  M(StaticFieldSet, Instruction)                                        \
1291  M(UnresolvedInstanceFieldGet, Instruction)                            \
1292  M(UnresolvedInstanceFieldSet, Instruction)                            \
1293  M(UnresolvedStaticFieldGet, Instruction)                              \
1294  M(UnresolvedStaticFieldSet, Instruction)                              \
1295  M(Select, Instruction)                                                \
1296  M(Sub, BinaryOperation)                                               \
1297  M(SuspendCheck, Instruction)                                          \
1298  M(Throw, Instruction)                                                 \
1299  M(TryBoundary, Instruction)                                           \
1300  M(TypeConversion, Instruction)                                        \
1301  M(UShr, BinaryOperation)                                              \
1302  M(Xor, BinaryOperation)                                               \
1303
1304/*
1305 * Instructions, shared across several (not all) architectures.
1306 */
1307#if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1308#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1309#else
1310#define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1311  M(BitwiseNegatedRight, Instruction)                                   \
1312  M(MultiplyAccumulate, Instruction)                                    \
1313  M(IntermediateAddress, Instruction)
1314#endif
1315
1316#ifndef ART_ENABLE_CODEGEN_arm
1317#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1318#else
1319#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                            \
1320  M(ArmDexCacheArraysBase, Instruction)
1321#endif
1322
1323#ifndef ART_ENABLE_CODEGEN_arm64
1324#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1325#else
1326#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                          \
1327  M(Arm64DataProcWithShifterOp, Instruction)
1328#endif
1329
1330#ifndef ART_ENABLE_CODEGEN_mips
1331#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1332#else
1333#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1334  M(MipsComputeBaseMethodAddress, Instruction)                          \
1335  M(MipsDexCacheArraysBase, Instruction)                                \
1336  M(MipsPackedSwitch, Instruction)
1337#endif
1338
1339#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1340
1341#ifndef ART_ENABLE_CODEGEN_x86
1342#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1343#else
1344#define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1345  M(X86ComputeBaseMethodAddress, Instruction)                           \
1346  M(X86LoadFromConstantTable, Instruction)                              \
1347  M(X86FPNeg, Instruction)                                              \
1348  M(X86PackedSwitch, Instruction)
1349#endif
1350
1351#define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1352
1353#define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1354  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1355  FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1356  FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1357  FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1358  FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1359  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1360  FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1361  FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1362
1363#define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1364  M(Condition, BinaryOperation)                                         \
1365  M(Constant, Instruction)                                              \
1366  M(UnaryOperation, Instruction)                                        \
1367  M(BinaryOperation, Instruction)                                       \
1368  M(Invoke, Instruction)
1369
1370#define FOR_EACH_INSTRUCTION(M)                                         \
1371  FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1372  FOR_EACH_ABSTRACT_INSTRUCTION(M)
1373
1374#define FORWARD_DECLARATION(type, super) class H##type;
1375FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1376#undef FORWARD_DECLARATION
1377
1378#define DECLARE_INSTRUCTION(type)                                         \
1379  InstructionKind GetKindInternal() const OVERRIDE { return k##type; }    \
1380  const char* DebugName() const OVERRIDE { return #type; }                \
1381  bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1382    return other->Is##type();                                             \
1383  }                                                                       \
1384  void Accept(HGraphVisitor* visitor) OVERRIDE
1385
1386#define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1387  bool Is##type() const { return As##type() != nullptr; }               \
1388  const H##type* As##type() const { return this; }                      \
1389  H##type* As##type() { return this; }
1390
1391template <typename T>
1392class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1393 public:
1394  T GetUser() const { return user_; }
1395  size_t GetIndex() const { return index_; }
1396  void SetIndex(size_t index) { index_ = index; }
1397
1398  // Hook for the IntrusiveForwardList<>.
1399  // TODO: Hide this better.
1400  IntrusiveForwardListHook hook;
1401
1402 private:
1403  HUseListNode(T user, size_t index)
1404      : user_(user), index_(index) {}
1405
1406  T const user_;
1407  size_t index_;
1408
1409  friend class HInstruction;
1410
1411  DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1412};
1413
1414template <typename T>
1415using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1416
1417// This class is used by HEnvironment and HInstruction classes to record the
1418// instructions they use and pointers to the corresponding HUseListNodes kept
1419// by the used instructions.
1420template <typename T>
1421class HUserRecord : public ValueObject {
1422 public:
1423  HUserRecord() : instruction_(nullptr), before_use_node_() {}
1424  explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1425
1426  HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1427      : HUserRecord(old_record.instruction_, before_use_node) {}
1428  HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1429      : instruction_(instruction), before_use_node_(before_use_node) {
1430    DCHECK(instruction_ != nullptr);
1431  }
1432
1433  HInstruction* GetInstruction() const { return instruction_; }
1434  typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1435  typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1436
1437 private:
1438  // Instruction used by the user.
1439  HInstruction* instruction_;
1440
1441  // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1442  typename HUseList<T>::iterator before_use_node_;
1443};
1444
1445// Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1446// This is used for HInstruction::GetInputs() to return a container wrapper providing
1447// HInstruction* values even though the underlying container has HUserRecord<>s.
1448struct HInputExtractor {
1449  HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1450    return record.GetInstruction();
1451  }
1452  const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1453    return record.GetInstruction();
1454  }
1455};
1456
1457using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1458using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1459
1460/**
1461 * Side-effects representation.
1462 *
1463 * For write/read dependences on fields/arrays, the dependence analysis uses
1464 * type disambiguation (e.g. a float field write cannot modify the value of an
1465 * integer field read) and the access type (e.g.  a reference array write cannot
1466 * modify the value of a reference field read [although it may modify the
1467 * reference fetch prior to reading the field, which is represented by its own
1468 * write/read dependence]). The analysis makes conservative points-to
1469 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1470 * the same, and any reference read depends on any reference read without
1471 * further regard of its type).
1472 *
1473 * The internal representation uses 38-bit and is described in the table below.
1474 * The first line indicates the side effect, and for field/array accesses the
1475 * second line indicates the type of the access (in the order of the
1476 * Primitive::Type enum).
1477 * The two numbered lines below indicate the bit position in the bitfield (read
1478 * vertically).
1479 *
1480 *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1481 *   +-------------+---------+---------+--------------+---------+---------+
1482 *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1483 *   |      3      |333333322|222222221|       1      |111111110|000000000|
1484 *   |      7      |654321098|765432109|       8      |765432109|876543210|
1485 *
1486 * Note that, to ease the implementation, 'changes' bits are least significant
1487 * bits, while 'dependency' bits are most significant bits.
1488 */
1489class SideEffects : public ValueObject {
1490 public:
1491  SideEffects() : flags_(0) {}
1492
1493  static SideEffects None() {
1494    return SideEffects(0);
1495  }
1496
1497  static SideEffects All() {
1498    return SideEffects(kAllChangeBits | kAllDependOnBits);
1499  }
1500
1501  static SideEffects AllChanges() {
1502    return SideEffects(kAllChangeBits);
1503  }
1504
1505  static SideEffects AllDependencies() {
1506    return SideEffects(kAllDependOnBits);
1507  }
1508
1509  static SideEffects AllExceptGCDependency() {
1510    return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1511  }
1512
1513  static SideEffects AllWritesAndReads() {
1514    return SideEffects(kAllWrites | kAllReads);
1515  }
1516
1517  static SideEffects AllWrites() {
1518    return SideEffects(kAllWrites);
1519  }
1520
1521  static SideEffects AllReads() {
1522    return SideEffects(kAllReads);
1523  }
1524
1525  static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1526    return is_volatile
1527        ? AllWritesAndReads()
1528        : SideEffects(TypeFlag(type, kFieldWriteOffset));
1529  }
1530
1531  static SideEffects ArrayWriteOfType(Primitive::Type type) {
1532    return SideEffects(TypeFlag(type, kArrayWriteOffset));
1533  }
1534
1535  static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1536    return is_volatile
1537        ? AllWritesAndReads()
1538        : SideEffects(TypeFlag(type, kFieldReadOffset));
1539  }
1540
1541  static SideEffects ArrayReadOfType(Primitive::Type type) {
1542    return SideEffects(TypeFlag(type, kArrayReadOffset));
1543  }
1544
1545  static SideEffects CanTriggerGC() {
1546    return SideEffects(1ULL << kCanTriggerGCBit);
1547  }
1548
1549  static SideEffects DependsOnGC() {
1550    return SideEffects(1ULL << kDependsOnGCBit);
1551  }
1552
1553  // Combines the side-effects of this and the other.
1554  SideEffects Union(SideEffects other) const {
1555    return SideEffects(flags_ | other.flags_);
1556  }
1557
1558  SideEffects Exclusion(SideEffects other) const {
1559    return SideEffects(flags_ & ~other.flags_);
1560  }
1561
1562  void Add(SideEffects other) {
1563    flags_ |= other.flags_;
1564  }
1565
1566  bool Includes(SideEffects other) const {
1567    return (other.flags_ & flags_) == other.flags_;
1568  }
1569
1570  bool HasSideEffects() const {
1571    return (flags_ & kAllChangeBits);
1572  }
1573
1574  bool HasDependencies() const {
1575    return (flags_ & kAllDependOnBits);
1576  }
1577
1578  // Returns true if there are no side effects or dependencies.
1579  bool DoesNothing() const {
1580    return flags_ == 0;
1581  }
1582
1583  // Returns true if something is written.
1584  bool DoesAnyWrite() const {
1585    return (flags_ & kAllWrites);
1586  }
1587
1588  // Returns true if something is read.
1589  bool DoesAnyRead() const {
1590    return (flags_ & kAllReads);
1591  }
1592
1593  // Returns true if potentially everything is written and read
1594  // (every type and every kind of access).
1595  bool DoesAllReadWrite() const {
1596    return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1597  }
1598
1599  bool DoesAll() const {
1600    return flags_ == (kAllChangeBits | kAllDependOnBits);
1601  }
1602
1603  // Returns true if `this` may read something written by `other`.
1604  bool MayDependOn(SideEffects other) const {
1605    const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1606    return (other.flags_ & depends_on_flags);
1607  }
1608
1609  // Returns string representation of flags (for debugging only).
1610  // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1611  std::string ToString() const {
1612    std::string flags = "|";
1613    for (int s = kLastBit; s >= 0; s--) {
1614      bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1615      if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1616        // This is a bit for the GC side effect.
1617        if (current_bit_is_set) {
1618          flags += "GC";
1619        }
1620        flags += "|";
1621      } else {
1622        // This is a bit for the array/field analysis.
1623        // The underscore character stands for the 'can trigger GC' bit.
1624        static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1625        if (current_bit_is_set) {
1626          flags += kDebug[s];
1627        }
1628        if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1629            (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1630          flags += "|";
1631        }
1632      }
1633    }
1634    return flags;
1635  }
1636
1637  bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1638
1639 private:
1640  static constexpr int kFieldArrayAnalysisBits = 9;
1641
1642  static constexpr int kFieldWriteOffset = 0;
1643  static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1644  static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1645  static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1646
1647  static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1648
1649  static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1650  static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1651  static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1652  static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1653
1654  static constexpr int kLastBit = kDependsOnGCBit;
1655  static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1656
1657  // Aliases.
1658
1659  static_assert(kChangeBits == kDependOnBits,
1660                "the 'change' bits should match the 'depend on' bits.");
1661
1662  static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1663  static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1664  static constexpr uint64_t kAllWrites =
1665      ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1666  static constexpr uint64_t kAllReads =
1667      ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1668
1669  // Translates type to bit flag.
1670  static uint64_t TypeFlag(Primitive::Type type, int offset) {
1671    CHECK_NE(type, Primitive::kPrimVoid);
1672    const uint64_t one = 1;
1673    const int shift = type;  // 0-based consecutive enum
1674    DCHECK_LE(kFieldWriteOffset, shift);
1675    DCHECK_LT(shift, kArrayWriteOffset);
1676    return one << (type + offset);
1677  }
1678
1679  // Private constructor on direct flags value.
1680  explicit SideEffects(uint64_t flags) : flags_(flags) {}
1681
1682  uint64_t flags_;
1683};
1684
1685// A HEnvironment object contains the values of virtual registers at a given location.
1686class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1687 public:
1688  HEnvironment(ArenaAllocator* arena,
1689               size_t number_of_vregs,
1690               const DexFile& dex_file,
1691               uint32_t method_idx,
1692               uint32_t dex_pc,
1693               InvokeType invoke_type,
1694               HInstruction* holder)
1695     : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1696       locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1697       parent_(nullptr),
1698       dex_file_(dex_file),
1699       method_idx_(method_idx),
1700       dex_pc_(dex_pc),
1701       invoke_type_(invoke_type),
1702       holder_(holder) {
1703  }
1704
1705  HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1706      : HEnvironment(arena,
1707                     to_copy.Size(),
1708                     to_copy.GetDexFile(),
1709                     to_copy.GetMethodIdx(),
1710                     to_copy.GetDexPc(),
1711                     to_copy.GetInvokeType(),
1712                     holder) {}
1713
1714  void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1715    if (parent_ != nullptr) {
1716      parent_->SetAndCopyParentChain(allocator, parent);
1717    } else {
1718      parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1719      parent_->CopyFrom(parent);
1720      if (parent->GetParent() != nullptr) {
1721        parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1722      }
1723    }
1724  }
1725
1726  void CopyFrom(const ArenaVector<HInstruction*>& locals);
1727  void CopyFrom(HEnvironment* environment);
1728
1729  // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1730  // input to the loop phi instead. This is for inserting instructions that
1731  // require an environment (like HDeoptimization) in the loop pre-header.
1732  void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1733
1734  void SetRawEnvAt(size_t index, HInstruction* instruction) {
1735    vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1736  }
1737
1738  HInstruction* GetInstructionAt(size_t index) const {
1739    return vregs_[index].GetInstruction();
1740  }
1741
1742  void RemoveAsUserOfInput(size_t index) const;
1743
1744  size_t Size() const { return vregs_.size(); }
1745
1746  HEnvironment* GetParent() const { return parent_; }
1747
1748  void SetLocationAt(size_t index, Location location) {
1749    locations_[index] = location;
1750  }
1751
1752  Location GetLocationAt(size_t index) const {
1753    return locations_[index];
1754  }
1755
1756  uint32_t GetDexPc() const {
1757    return dex_pc_;
1758  }
1759
1760  uint32_t GetMethodIdx() const {
1761    return method_idx_;
1762  }
1763
1764  InvokeType GetInvokeType() const {
1765    return invoke_type_;
1766  }
1767
1768  const DexFile& GetDexFile() const {
1769    return dex_file_;
1770  }
1771
1772  HInstruction* GetHolder() const {
1773    return holder_;
1774  }
1775
1776
1777  bool IsFromInlinedInvoke() const {
1778    return GetParent() != nullptr;
1779  }
1780
1781 private:
1782  ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1783  ArenaVector<Location> locations_;
1784  HEnvironment* parent_;
1785  const DexFile& dex_file_;
1786  const uint32_t method_idx_;
1787  const uint32_t dex_pc_;
1788  const InvokeType invoke_type_;
1789
1790  // The instruction that holds this environment.
1791  HInstruction* const holder_;
1792
1793  friend class HInstruction;
1794
1795  DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1796};
1797
1798class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1799 public:
1800  HInstruction(SideEffects side_effects, uint32_t dex_pc)
1801      : previous_(nullptr),
1802        next_(nullptr),
1803        block_(nullptr),
1804        dex_pc_(dex_pc),
1805        id_(-1),
1806        ssa_index_(-1),
1807        packed_fields_(0u),
1808        environment_(nullptr),
1809        locations_(nullptr),
1810        live_interval_(nullptr),
1811        lifetime_position_(kNoLifetime),
1812        side_effects_(side_effects),
1813        reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1814    SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1815  }
1816
1817  virtual ~HInstruction() {}
1818
1819#define DECLARE_KIND(type, super) k##type,
1820  enum InstructionKind {
1821    FOR_EACH_INSTRUCTION(DECLARE_KIND)
1822  };
1823#undef DECLARE_KIND
1824
1825  HInstruction* GetNext() const { return next_; }
1826  HInstruction* GetPrevious() const { return previous_; }
1827
1828  HInstruction* GetNextDisregardingMoves() const;
1829  HInstruction* GetPreviousDisregardingMoves() const;
1830
1831  HBasicBlock* GetBlock() const { return block_; }
1832  ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1833  void SetBlock(HBasicBlock* block) { block_ = block; }
1834  bool IsInBlock() const { return block_ != nullptr; }
1835  bool IsInLoop() const { return block_->IsInLoop(); }
1836  bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1837  bool IsIrreducibleLoopHeaderPhi() const {
1838    return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1839  }
1840
1841  virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
1842
1843  ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
1844    // One virtual method is enough, just const_cast<> and then re-add the const.
1845    return ArrayRef<const HUserRecord<HInstruction*>>(
1846        const_cast<HInstruction*>(this)->GetInputRecords());
1847  }
1848
1849  HInputsRef GetInputs() {
1850    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1851  }
1852
1853  HConstInputsRef GetInputs() const {
1854    return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
1855  }
1856
1857  size_t InputCount() const { return GetInputRecords().size(); }
1858  HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1859
1860  bool HasInput(HInstruction* input) const {
1861    for (const HInstruction* i : GetInputs()) {
1862      if (i == input) {
1863        return true;
1864      }
1865    }
1866    return false;
1867  }
1868
1869  void SetRawInputAt(size_t index, HInstruction* input) {
1870    SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1871  }
1872
1873  virtual void Accept(HGraphVisitor* visitor) = 0;
1874  virtual const char* DebugName() const = 0;
1875
1876  virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1877
1878  virtual bool NeedsEnvironment() const { return false; }
1879
1880  uint32_t GetDexPc() const { return dex_pc_; }
1881
1882  virtual bool IsControlFlow() const { return false; }
1883
1884  virtual bool CanThrow() const { return false; }
1885  bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1886
1887  bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1888  bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1889
1890  // Does not apply for all instructions, but having this at top level greatly
1891  // simplifies the null check elimination.
1892  // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1893  virtual bool CanBeNull() const {
1894    DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1895    return true;
1896  }
1897
1898  virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1899    return false;
1900  }
1901
1902  virtual bool IsActualObject() const {
1903    return GetType() == Primitive::kPrimNot;
1904  }
1905
1906  void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1907
1908  ReferenceTypeInfo GetReferenceTypeInfo() const {
1909    DCHECK_EQ(GetType(), Primitive::kPrimNot);
1910    return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1911                                              GetPackedFlag<kFlagReferenceTypeIsExact>());
1912  }
1913
1914  void AddUseAt(HInstruction* user, size_t index) {
1915    DCHECK(user != nullptr);
1916    // Note: fixup_end remains valid across push_front().
1917    auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1918    HUseListNode<HInstruction*>* new_node =
1919        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1920    uses_.push_front(*new_node);
1921    FixUpUserRecordsAfterUseInsertion(fixup_end);
1922  }
1923
1924  void AddEnvUseAt(HEnvironment* user, size_t index) {
1925    DCHECK(user != nullptr);
1926    // Note: env_fixup_end remains valid across push_front().
1927    auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
1928    HUseListNode<HEnvironment*>* new_node =
1929        new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
1930    env_uses_.push_front(*new_node);
1931    FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1932  }
1933
1934  void RemoveAsUserOfInput(size_t input) {
1935    HUserRecord<HInstruction*> input_use = InputRecordAt(input);
1936    HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1937    input_use.GetInstruction()->uses_.erase_after(before_use_node);
1938    input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1939  }
1940
1941  void RemoveAsUserOfAllInputs() {
1942    for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
1943      HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1944      input_use.GetInstruction()->uses_.erase_after(before_use_node);
1945      input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1946    }
1947  }
1948
1949  const HUseList<HInstruction*>& GetUses() const { return uses_; }
1950  const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
1951
1952  bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
1953  bool HasEnvironmentUses() const { return !env_uses_.empty(); }
1954  bool HasNonEnvironmentUses() const { return !uses_.empty(); }
1955  bool HasOnlyOneNonEnvironmentUse() const {
1956    return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
1957  }
1958
1959  bool IsRemovable() const {
1960    return
1961        !DoesAnyWrite() &&
1962        !CanThrow() &&
1963        !IsSuspendCheck() &&
1964        !IsControlFlow() &&
1965        !IsNativeDebugInfo() &&
1966        !IsParameterValue() &&
1967        // If we added an explicit barrier then we should keep it.
1968        !IsMemoryBarrier();
1969  }
1970
1971  bool IsDeadAndRemovable() const {
1972    return IsRemovable() && !HasUses();
1973  }
1974
1975  // Does this instruction strictly dominate `other_instruction`?
1976  // Returns false if this instruction and `other_instruction` are the same.
1977  // Aborts if this instruction and `other_instruction` are both phis.
1978  bool StrictlyDominates(HInstruction* other_instruction) const;
1979
1980  int GetId() const { return id_; }
1981  void SetId(int id) { id_ = id; }
1982
1983  int GetSsaIndex() const { return ssa_index_; }
1984  void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
1985  bool HasSsaIndex() const { return ssa_index_ != -1; }
1986
1987  bool HasEnvironment() const { return environment_ != nullptr; }
1988  HEnvironment* GetEnvironment() const { return environment_; }
1989  // Set the `environment_` field. Raw because this method does not
1990  // update the uses lists.
1991  void SetRawEnvironment(HEnvironment* environment) {
1992    DCHECK(environment_ == nullptr);
1993    DCHECK_EQ(environment->GetHolder(), this);
1994    environment_ = environment;
1995  }
1996
1997  void InsertRawEnvironment(HEnvironment* environment) {
1998    DCHECK(environment_ != nullptr);
1999    DCHECK_EQ(environment->GetHolder(), this);
2000    DCHECK(environment->GetParent() == nullptr);
2001    environment->parent_ = environment_;
2002    environment_ = environment;
2003  }
2004
2005  void RemoveEnvironment();
2006
2007  // Set the environment of this instruction, copying it from `environment`. While
2008  // copying, the uses lists are being updated.
2009  void CopyEnvironmentFrom(HEnvironment* environment) {
2010    DCHECK(environment_ == nullptr);
2011    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2012    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2013    environment_->CopyFrom(environment);
2014    if (environment->GetParent() != nullptr) {
2015      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2016    }
2017  }
2018
2019  void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2020                                                HBasicBlock* block) {
2021    DCHECK(environment_ == nullptr);
2022    ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
2023    environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2024    environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2025    if (environment->GetParent() != nullptr) {
2026      environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2027    }
2028  }
2029
2030  // Returns the number of entries in the environment. Typically, that is the
2031  // number of dex registers in a method. It could be more in case of inlining.
2032  size_t EnvironmentSize() const;
2033
2034  LocationSummary* GetLocations() const { return locations_; }
2035  void SetLocations(LocationSummary* locations) { locations_ = locations; }
2036
2037  void ReplaceWith(HInstruction* instruction);
2038  void ReplaceInput(HInstruction* replacement, size_t index);
2039
2040  // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2041  // uses of this instruction by `other` are *not* updated.
2042  void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2043    ReplaceWith(other);
2044    other->ReplaceInput(this, use_index);
2045  }
2046
2047  // Move `this` instruction before `cursor`.
2048  void MoveBefore(HInstruction* cursor);
2049
2050  // Move `this` before its first user and out of any loops. If there is no
2051  // out-of-loop user that dominates all other users, move the instruction
2052  // to the end of the out-of-loop common dominator of the user's blocks.
2053  //
2054  // This can be used only on non-throwing instructions with no side effects that
2055  // have at least one use but no environment uses.
2056  void MoveBeforeFirstUserAndOutOfLoops();
2057
2058#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2059  bool Is##type() const;                                                       \
2060  const H##type* As##type() const;                                             \
2061  H##type* As##type();
2062
2063  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2064#undef INSTRUCTION_TYPE_CHECK
2065
2066#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2067  bool Is##type() const { return (As##type() != nullptr); }                    \
2068  virtual const H##type* As##type() const { return nullptr; }                  \
2069  virtual H##type* As##type() { return nullptr; }
2070  FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2071#undef INSTRUCTION_TYPE_CHECK
2072
2073  // Returns whether the instruction can be moved within the graph.
2074  virtual bool CanBeMoved() const { return false; }
2075
2076  // Returns whether the two instructions are of the same kind.
2077  virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2078    return false;
2079  }
2080
2081  // Returns whether any data encoded in the two instructions is equal.
2082  // This method does not look at the inputs. Both instructions must be
2083  // of the same type, otherwise the method has undefined behavior.
2084  virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2085    return false;
2086  }
2087
2088  // Returns whether two instructions are equal, that is:
2089  // 1) They have the same type and contain the same data (InstructionDataEquals).
2090  // 2) Their inputs are identical.
2091  bool Equals(const HInstruction* other) const;
2092
2093  // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2094  // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2095  // the virtual function because the __attribute__((__pure__)) doesn't really
2096  // apply the strong requirement for virtual functions, preventing optimizations.
2097  InstructionKind GetKind() const PURE;
2098  virtual InstructionKind GetKindInternal() const = 0;
2099
2100  virtual size_t ComputeHashCode() const {
2101    size_t result = GetKind();
2102    for (const HInstruction* input : GetInputs()) {
2103      result = (result * 31) + input->GetId();
2104    }
2105    return result;
2106  }
2107
2108  SideEffects GetSideEffects() const { return side_effects_; }
2109  void SetSideEffects(SideEffects other) { side_effects_ = other; }
2110  void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2111
2112  size_t GetLifetimePosition() const { return lifetime_position_; }
2113  void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
2114  LiveInterval* GetLiveInterval() const { return live_interval_; }
2115  void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
2116  bool HasLiveInterval() const { return live_interval_ != nullptr; }
2117
2118  bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2119
2120  // Returns whether the code generation of the instruction will require to have access
2121  // to the current method. Such instructions are:
2122  // (1): Instructions that require an environment, as calling the runtime requires
2123  //      to walk the stack and have the current method stored at a specific stack address.
2124  // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2125  //      to access the dex cache.
2126  bool NeedsCurrentMethod() const {
2127    return NeedsEnvironment() || IsCurrentMethod();
2128  }
2129
2130  // Returns whether the code generation of the instruction will require to have access
2131  // to the dex cache of the current method's declaring class via the current method.
2132  virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2133
2134  // Does this instruction have any use in an environment before
2135  // control flow hits 'other'?
2136  bool HasAnyEnvironmentUseBefore(HInstruction* other);
2137
2138  // Remove all references to environment uses of this instruction.
2139  // The caller must ensure that this is safe to do.
2140  void RemoveEnvironmentUsers();
2141
2142  bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2143  void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2144
2145 protected:
2146  // If set, the machine code for this instruction is assumed to be generated by
2147  // its users. Used by liveness analysis to compute use positions accordingly.
2148  static constexpr size_t kFlagEmittedAtUseSite = 0u;
2149  static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2150  static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2151  static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2152
2153  const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2154    return GetInputRecords()[i];
2155  }
2156
2157  void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2158    ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2159    input_records[index] = input;
2160  }
2161
2162  uint32_t GetPackedFields() const {
2163    return packed_fields_;
2164  }
2165
2166  template <size_t flag>
2167  bool GetPackedFlag() const {
2168    return (packed_fields_ & (1u << flag)) != 0u;
2169  }
2170
2171  template <size_t flag>
2172  void SetPackedFlag(bool value = true) {
2173    packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2174  }
2175
2176  template <typename BitFieldType>
2177  typename BitFieldType::value_type GetPackedField() const {
2178    return BitFieldType::Decode(packed_fields_);
2179  }
2180
2181  template <typename BitFieldType>
2182  void SetPackedField(typename BitFieldType::value_type value) {
2183    DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2184    packed_fields_ = BitFieldType::Update(value, packed_fields_);
2185  }
2186
2187 private:
2188  void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2189    auto before_use_node = uses_.before_begin();
2190    for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2191      HInstruction* user = use_node->GetUser();
2192      size_t input_index = use_node->GetIndex();
2193      user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2194      before_use_node = use_node;
2195    }
2196  }
2197
2198  void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2199    auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2200    if (next != uses_.end()) {
2201      HInstruction* next_user = next->GetUser();
2202      size_t next_index = next->GetIndex();
2203      DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2204      next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2205    }
2206  }
2207
2208  void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2209    auto before_env_use_node = env_uses_.before_begin();
2210    for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2211      HEnvironment* user = env_use_node->GetUser();
2212      size_t input_index = env_use_node->GetIndex();
2213      user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2214      before_env_use_node = env_use_node;
2215    }
2216  }
2217
2218  void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2219    auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2220    if (next != env_uses_.end()) {
2221      HEnvironment* next_user = next->GetUser();
2222      size_t next_index = next->GetIndex();
2223      DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2224      next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2225    }
2226  }
2227
2228  HInstruction* previous_;
2229  HInstruction* next_;
2230  HBasicBlock* block_;
2231  const uint32_t dex_pc_;
2232
2233  // An instruction gets an id when it is added to the graph.
2234  // It reflects creation order. A negative id means the instruction
2235  // has not been added to the graph.
2236  int id_;
2237
2238  // When doing liveness analysis, instructions that have uses get an SSA index.
2239  int ssa_index_;
2240
2241  // Packed fields.
2242  uint32_t packed_fields_;
2243
2244  // List of instructions that have this instruction as input.
2245  HUseList<HInstruction*> uses_;
2246
2247  // List of environments that contain this instruction.
2248  HUseList<HEnvironment*> env_uses_;
2249
2250  // The environment associated with this instruction. Not null if the instruction
2251  // might jump out of the method.
2252  HEnvironment* environment_;
2253
2254  // Set by the code generator.
2255  LocationSummary* locations_;
2256
2257  // Set by the liveness analysis.
2258  LiveInterval* live_interval_;
2259
2260  // Set by the liveness analysis, this is the position in a linear
2261  // order of blocks where this instruction's live interval start.
2262  size_t lifetime_position_;
2263
2264  SideEffects side_effects_;
2265
2266  // The reference handle part of the reference type info.
2267  // The IsExact() flag is stored in packed fields.
2268  // TODO: for primitive types this should be marked as invalid.
2269  ReferenceTypeInfo::TypeHandle reference_type_handle_;
2270
2271  friend class GraphChecker;
2272  friend class HBasicBlock;
2273  friend class HEnvironment;
2274  friend class HGraph;
2275  friend class HInstructionList;
2276
2277  DISALLOW_COPY_AND_ASSIGN(HInstruction);
2278};
2279std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2280
2281class HInstructionIterator : public ValueObject {
2282 public:
2283  explicit HInstructionIterator(const HInstructionList& instructions)
2284      : instruction_(instructions.first_instruction_) {
2285    next_ = Done() ? nullptr : instruction_->GetNext();
2286  }
2287
2288  bool Done() const { return instruction_ == nullptr; }
2289  HInstruction* Current() const { return instruction_; }
2290  void Advance() {
2291    instruction_ = next_;
2292    next_ = Done() ? nullptr : instruction_->GetNext();
2293  }
2294
2295 private:
2296  HInstruction* instruction_;
2297  HInstruction* next_;
2298
2299  DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2300};
2301
2302class HBackwardInstructionIterator : public ValueObject {
2303 public:
2304  explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2305      : instruction_(instructions.last_instruction_) {
2306    next_ = Done() ? nullptr : instruction_->GetPrevious();
2307  }
2308
2309  bool Done() const { return instruction_ == nullptr; }
2310  HInstruction* Current() const { return instruction_; }
2311  void Advance() {
2312    instruction_ = next_;
2313    next_ = Done() ? nullptr : instruction_->GetPrevious();
2314  }
2315
2316 private:
2317  HInstruction* instruction_;
2318  HInstruction* next_;
2319
2320  DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2321};
2322
2323template<size_t N>
2324class HTemplateInstruction: public HInstruction {
2325 public:
2326  HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2327      : HInstruction(side_effects, dex_pc), inputs_() {}
2328  virtual ~HTemplateInstruction() {}
2329
2330  using HInstruction::GetInputRecords;  // Keep the const version visible.
2331  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2332    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2333  }
2334
2335 private:
2336  std::array<HUserRecord<HInstruction*>, N> inputs_;
2337
2338  friend class SsaBuilder;
2339};
2340
2341// HTemplateInstruction specialization for N=0.
2342template<>
2343class HTemplateInstruction<0>: public HInstruction {
2344 public:
2345  explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2346      : HInstruction(side_effects, dex_pc) {}
2347
2348  virtual ~HTemplateInstruction() {}
2349
2350  using HInstruction::GetInputRecords;  // Keep the const version visible.
2351  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2352    return ArrayRef<HUserRecord<HInstruction*>>();
2353  }
2354
2355 private:
2356  friend class SsaBuilder;
2357};
2358
2359template<intptr_t N>
2360class HExpression : public HTemplateInstruction<N> {
2361 public:
2362  HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2363      : HTemplateInstruction<N>(side_effects, dex_pc) {
2364    this->template SetPackedField<TypeField>(type);
2365  }
2366  virtual ~HExpression() {}
2367
2368  Primitive::Type GetType() const OVERRIDE {
2369    return TypeField::Decode(this->GetPackedFields());
2370  }
2371
2372 protected:
2373  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2374  static constexpr size_t kFieldTypeSize =
2375      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2376  static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2377  static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2378                "Too many packed fields.");
2379  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2380};
2381
2382// Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2383// instruction that branches to the exit block.
2384class HReturnVoid FINAL : public HTemplateInstruction<0> {
2385 public:
2386  explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2387      : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2388
2389  bool IsControlFlow() const OVERRIDE { return true; }
2390
2391  DECLARE_INSTRUCTION(ReturnVoid);
2392
2393 private:
2394  DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2395};
2396
2397// Represents dex's RETURN opcodes. A HReturn is a control flow
2398// instruction that branches to the exit block.
2399class HReturn FINAL : public HTemplateInstruction<1> {
2400 public:
2401  explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2402      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2403    SetRawInputAt(0, value);
2404  }
2405
2406  bool IsControlFlow() const OVERRIDE { return true; }
2407
2408  DECLARE_INSTRUCTION(Return);
2409
2410 private:
2411  DISALLOW_COPY_AND_ASSIGN(HReturn);
2412};
2413
2414class HPhi FINAL : public HInstruction {
2415 public:
2416  HPhi(ArenaAllocator* arena,
2417       uint32_t reg_number,
2418       size_t number_of_inputs,
2419       Primitive::Type type,
2420       uint32_t dex_pc = kNoDexPc)
2421      : HInstruction(SideEffects::None(), dex_pc),
2422        inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)),
2423        reg_number_(reg_number) {
2424    SetPackedField<TypeField>(ToPhiType(type));
2425    DCHECK_NE(GetType(), Primitive::kPrimVoid);
2426    // Phis are constructed live and marked dead if conflicting or unused.
2427    // Individual steps of SsaBuilder should assume that if a phi has been
2428    // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2429    SetPackedFlag<kFlagIsLive>(true);
2430    SetPackedFlag<kFlagCanBeNull>(true);
2431  }
2432
2433  // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2434  static Primitive::Type ToPhiType(Primitive::Type type) {
2435    return Primitive::PrimitiveKind(type);
2436  }
2437
2438  bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2439
2440  using HInstruction::GetInputRecords;  // Keep the const version visible.
2441  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2442    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2443  }
2444
2445  void AddInput(HInstruction* input);
2446  void RemoveInputAt(size_t index);
2447
2448  Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2449  void SetType(Primitive::Type new_type) {
2450    // Make sure that only valid type changes occur. The following are allowed:
2451    //  (1) int  -> float/ref (primitive type propagation),
2452    //  (2) long -> double (primitive type propagation).
2453    DCHECK(GetType() == new_type ||
2454           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2455           (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2456           (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2457    SetPackedField<TypeField>(new_type);
2458  }
2459
2460  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2461  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2462
2463  uint32_t GetRegNumber() const { return reg_number_; }
2464
2465  void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2466  void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2467  bool IsDead() const { return !IsLive(); }
2468  bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2469
2470  bool IsVRegEquivalentOf(const HInstruction* other) const {
2471    return other != nullptr
2472        && other->IsPhi()
2473        && other->AsPhi()->GetBlock() == GetBlock()
2474        && other->AsPhi()->GetRegNumber() == GetRegNumber();
2475  }
2476
2477  // Returns the next equivalent phi (starting from the current one) or null if there is none.
2478  // An equivalent phi is a phi having the same dex register and type.
2479  // It assumes that phis with the same dex register are adjacent.
2480  HPhi* GetNextEquivalentPhiWithSameType() {
2481    HInstruction* next = GetNext();
2482    while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2483      if (next->GetType() == GetType()) {
2484        return next->AsPhi();
2485      }
2486      next = next->GetNext();
2487    }
2488    return nullptr;
2489  }
2490
2491  DECLARE_INSTRUCTION(Phi);
2492
2493 private:
2494  static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2495  static constexpr size_t kFieldTypeSize =
2496      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2497  static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2498  static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2499  static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2500  static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2501  using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2502
2503  ArenaVector<HUserRecord<HInstruction*>> inputs_;
2504  const uint32_t reg_number_;
2505
2506  DISALLOW_COPY_AND_ASSIGN(HPhi);
2507};
2508
2509// The exit instruction is the only instruction of the exit block.
2510// Instructions aborting the method (HThrow and HReturn) must branch to the
2511// exit block.
2512class HExit FINAL : public HTemplateInstruction<0> {
2513 public:
2514  explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2515
2516  bool IsControlFlow() const OVERRIDE { return true; }
2517
2518  DECLARE_INSTRUCTION(Exit);
2519
2520 private:
2521  DISALLOW_COPY_AND_ASSIGN(HExit);
2522};
2523
2524// Jumps from one block to another.
2525class HGoto FINAL : public HTemplateInstruction<0> {
2526 public:
2527  explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2528
2529  bool IsControlFlow() const OVERRIDE { return true; }
2530
2531  HBasicBlock* GetSuccessor() const {
2532    return GetBlock()->GetSingleSuccessor();
2533  }
2534
2535  DECLARE_INSTRUCTION(Goto);
2536
2537 private:
2538  DISALLOW_COPY_AND_ASSIGN(HGoto);
2539};
2540
2541class HConstant : public HExpression<0> {
2542 public:
2543  explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2544      : HExpression(type, SideEffects::None(), dex_pc) {}
2545
2546  bool CanBeMoved() const OVERRIDE { return true; }
2547
2548  // Is this constant -1 in the arithmetic sense?
2549  virtual bool IsMinusOne() const { return false; }
2550  // Is this constant 0 in the arithmetic sense?
2551  virtual bool IsArithmeticZero() const { return false; }
2552  // Is this constant a 0-bit pattern?
2553  virtual bool IsZeroBitPattern() const { return false; }
2554  // Is this constant 1 in the arithmetic sense?
2555  virtual bool IsOne() const { return false; }
2556
2557  virtual uint64_t GetValueAsUint64() const = 0;
2558
2559  DECLARE_ABSTRACT_INSTRUCTION(Constant);
2560
2561 private:
2562  DISALLOW_COPY_AND_ASSIGN(HConstant);
2563};
2564
2565class HNullConstant FINAL : public HConstant {
2566 public:
2567  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2568    return true;
2569  }
2570
2571  uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2572
2573  size_t ComputeHashCode() const OVERRIDE { return 0; }
2574
2575  // The null constant representation is a 0-bit pattern.
2576  virtual bool IsZeroBitPattern() const { return true; }
2577
2578  DECLARE_INSTRUCTION(NullConstant);
2579
2580 private:
2581  explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2582
2583  friend class HGraph;
2584  DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2585};
2586
2587// Constants of the type int. Those can be from Dex instructions, or
2588// synthesized (for example with the if-eqz instruction).
2589class HIntConstant FINAL : public HConstant {
2590 public:
2591  int32_t GetValue() const { return value_; }
2592
2593  uint64_t GetValueAsUint64() const OVERRIDE {
2594    return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2595  }
2596
2597  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2598    DCHECK(other->IsIntConstant()) << other->DebugName();
2599    return other->AsIntConstant()->value_ == value_;
2600  }
2601
2602  size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2603
2604  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2605  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2606  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2607  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2608
2609  // Integer constants are used to encode Boolean values as well,
2610  // where 1 means true and 0 means false.
2611  bool IsTrue() const { return GetValue() == 1; }
2612  bool IsFalse() const { return GetValue() == 0; }
2613
2614  DECLARE_INSTRUCTION(IntConstant);
2615
2616 private:
2617  explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2618      : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2619  explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2620      : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2621
2622  const int32_t value_;
2623
2624  friend class HGraph;
2625  ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2626  ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2627  DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2628};
2629
2630class HLongConstant FINAL : public HConstant {
2631 public:
2632  int64_t GetValue() const { return value_; }
2633
2634  uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2635
2636  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2637    DCHECK(other->IsLongConstant()) << other->DebugName();
2638    return other->AsLongConstant()->value_ == value_;
2639  }
2640
2641  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2642
2643  bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2644  bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2645  bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2646  bool IsOne() const OVERRIDE { return GetValue() == 1; }
2647
2648  DECLARE_INSTRUCTION(LongConstant);
2649
2650 private:
2651  explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2652      : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2653
2654  const int64_t value_;
2655
2656  friend class HGraph;
2657  DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2658};
2659
2660class HFloatConstant FINAL : public HConstant {
2661 public:
2662  float GetValue() const { return value_; }
2663
2664  uint64_t GetValueAsUint64() const OVERRIDE {
2665    return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2666  }
2667
2668  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2669    DCHECK(other->IsFloatConstant()) << other->DebugName();
2670    return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2671  }
2672
2673  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2674
2675  bool IsMinusOne() const OVERRIDE {
2676    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2677  }
2678  bool IsArithmeticZero() const OVERRIDE {
2679    return std::fpclassify(value_) == FP_ZERO;
2680  }
2681  bool IsArithmeticPositiveZero() const {
2682    return IsArithmeticZero() && !std::signbit(value_);
2683  }
2684  bool IsArithmeticNegativeZero() const {
2685    return IsArithmeticZero() && std::signbit(value_);
2686  }
2687  bool IsZeroBitPattern() const OVERRIDE {
2688    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2689  }
2690  bool IsOne() const OVERRIDE {
2691    return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2692  }
2693  bool IsNaN() const {
2694    return std::isnan(value_);
2695  }
2696
2697  DECLARE_INSTRUCTION(FloatConstant);
2698
2699 private:
2700  explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2701      : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2702  explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2703      : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2704
2705  const float value_;
2706
2707  // Only the SsaBuilder and HGraph can create floating-point constants.
2708  friend class SsaBuilder;
2709  friend class HGraph;
2710  DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2711};
2712
2713class HDoubleConstant FINAL : public HConstant {
2714 public:
2715  double GetValue() const { return value_; }
2716
2717  uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2718
2719  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2720    DCHECK(other->IsDoubleConstant()) << other->DebugName();
2721    return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2722  }
2723
2724  size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2725
2726  bool IsMinusOne() const OVERRIDE {
2727    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2728  }
2729  bool IsArithmeticZero() const OVERRIDE {
2730    return std::fpclassify(value_) == FP_ZERO;
2731  }
2732  bool IsArithmeticPositiveZero() const {
2733    return IsArithmeticZero() && !std::signbit(value_);
2734  }
2735  bool IsArithmeticNegativeZero() const {
2736    return IsArithmeticZero() && std::signbit(value_);
2737  }
2738  bool IsZeroBitPattern() const OVERRIDE {
2739    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2740  }
2741  bool IsOne() const OVERRIDE {
2742    return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2743  }
2744  bool IsNaN() const {
2745    return std::isnan(value_);
2746  }
2747
2748  DECLARE_INSTRUCTION(DoubleConstant);
2749
2750 private:
2751  explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2752      : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2753  explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2754      : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2755
2756  const double value_;
2757
2758  // Only the SsaBuilder and HGraph can create floating-point constants.
2759  friend class SsaBuilder;
2760  friend class HGraph;
2761  DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2762};
2763
2764// Conditional branch. A block ending with an HIf instruction must have
2765// two successors.
2766class HIf FINAL : public HTemplateInstruction<1> {
2767 public:
2768  explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2769      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2770    SetRawInputAt(0, input);
2771  }
2772
2773  bool IsControlFlow() const OVERRIDE { return true; }
2774
2775  HBasicBlock* IfTrueSuccessor() const {
2776    return GetBlock()->GetSuccessors()[0];
2777  }
2778
2779  HBasicBlock* IfFalseSuccessor() const {
2780    return GetBlock()->GetSuccessors()[1];
2781  }
2782
2783  DECLARE_INSTRUCTION(If);
2784
2785 private:
2786  DISALLOW_COPY_AND_ASSIGN(HIf);
2787};
2788
2789
2790// Abstract instruction which marks the beginning and/or end of a try block and
2791// links it to the respective exception handlers. Behaves the same as a Goto in
2792// non-exceptional control flow.
2793// Normal-flow successor is stored at index zero, exception handlers under
2794// higher indices in no particular order.
2795class HTryBoundary FINAL : public HTemplateInstruction<0> {
2796 public:
2797  enum class BoundaryKind {
2798    kEntry,
2799    kExit,
2800    kLast = kExit
2801  };
2802
2803  explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2804      : HTemplateInstruction(SideEffects::None(), dex_pc) {
2805    SetPackedField<BoundaryKindField>(kind);
2806  }
2807
2808  bool IsControlFlow() const OVERRIDE { return true; }
2809
2810  // Returns the block's non-exceptional successor (index zero).
2811  HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2812
2813  ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2814    return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2815  }
2816
2817  // Returns whether `handler` is among its exception handlers (non-zero index
2818  // successors).
2819  bool HasExceptionHandler(const HBasicBlock& handler) const {
2820    DCHECK(handler.IsCatchBlock());
2821    return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2822  }
2823
2824  // If not present already, adds `handler` to its block's list of exception
2825  // handlers.
2826  void AddExceptionHandler(HBasicBlock* handler) {
2827    if (!HasExceptionHandler(*handler)) {
2828      GetBlock()->AddSuccessor(handler);
2829    }
2830  }
2831
2832  BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2833  bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2834
2835  bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2836
2837  DECLARE_INSTRUCTION(TryBoundary);
2838
2839 private:
2840  static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2841  static constexpr size_t kFieldBoundaryKindSize =
2842      MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2843  static constexpr size_t kNumberOfTryBoundaryPackedBits =
2844      kFieldBoundaryKind + kFieldBoundaryKindSize;
2845  static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2846                "Too many packed fields.");
2847  using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2848
2849  DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2850};
2851
2852// Deoptimize to interpreter, upon checking a condition.
2853class HDeoptimize FINAL : public HTemplateInstruction<1> {
2854 public:
2855  // We set CanTriggerGC to prevent any intermediate address to be live
2856  // at the point of the `HDeoptimize`.
2857  HDeoptimize(HInstruction* cond, uint32_t dex_pc)
2858      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
2859    SetRawInputAt(0, cond);
2860  }
2861
2862  bool CanBeMoved() const OVERRIDE { return true; }
2863  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2864    return true;
2865  }
2866  bool NeedsEnvironment() const OVERRIDE { return true; }
2867  bool CanThrow() const OVERRIDE { return true; }
2868
2869  DECLARE_INSTRUCTION(Deoptimize);
2870
2871 private:
2872  DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
2873};
2874
2875// Represents the ArtMethod that was passed as a first argument to
2876// the method. It is used by instructions that depend on it, like
2877// instructions that work with the dex cache.
2878class HCurrentMethod FINAL : public HExpression<0> {
2879 public:
2880  explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2881      : HExpression(type, SideEffects::None(), dex_pc) {}
2882
2883  DECLARE_INSTRUCTION(CurrentMethod);
2884
2885 private:
2886  DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
2887};
2888
2889// Fetches an ArtMethod from the virtual table or the interface method table
2890// of a class.
2891class HClassTableGet FINAL : public HExpression<1> {
2892 public:
2893  enum class TableKind {
2894    kVTable,
2895    kIMTable,
2896    kLast = kIMTable
2897  };
2898  HClassTableGet(HInstruction* cls,
2899                 Primitive::Type type,
2900                 TableKind kind,
2901                 size_t index,
2902                 uint32_t dex_pc)
2903      : HExpression(type, SideEffects::None(), dex_pc),
2904        index_(index) {
2905    SetPackedField<TableKindField>(kind);
2906    SetRawInputAt(0, cls);
2907  }
2908
2909  bool CanBeMoved() const OVERRIDE { return true; }
2910  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2911    return other->AsClassTableGet()->GetIndex() == index_ &&
2912        other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
2913  }
2914
2915  TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
2916  size_t GetIndex() const { return index_; }
2917
2918  DECLARE_INSTRUCTION(ClassTableGet);
2919
2920 private:
2921  static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
2922  static constexpr size_t kFieldTableKindSize =
2923      MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
2924  static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
2925  static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
2926                "Too many packed fields.");
2927  using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
2928
2929  // The index of the ArtMethod in the table.
2930  const size_t index_;
2931
2932  DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
2933};
2934
2935// PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
2936// have one successor for each entry in the switch table, and the final successor
2937// will be the block containing the next Dex opcode.
2938class HPackedSwitch FINAL : public HTemplateInstruction<1> {
2939 public:
2940  HPackedSwitch(int32_t start_value,
2941                uint32_t num_entries,
2942                HInstruction* input,
2943                uint32_t dex_pc = kNoDexPc)
2944    : HTemplateInstruction(SideEffects::None(), dex_pc),
2945      start_value_(start_value),
2946      num_entries_(num_entries) {
2947    SetRawInputAt(0, input);
2948  }
2949
2950  bool IsControlFlow() const OVERRIDE { return true; }
2951
2952  int32_t GetStartValue() const { return start_value_; }
2953
2954  uint32_t GetNumEntries() const { return num_entries_; }
2955
2956  HBasicBlock* GetDefaultBlock() const {
2957    // Last entry is the default block.
2958    return GetBlock()->GetSuccessors()[num_entries_];
2959  }
2960  DECLARE_INSTRUCTION(PackedSwitch);
2961
2962 private:
2963  const int32_t start_value_;
2964  const uint32_t num_entries_;
2965
2966  DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
2967};
2968
2969class HUnaryOperation : public HExpression<1> {
2970 public:
2971  HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
2972      : HExpression(result_type, SideEffects::None(), dex_pc) {
2973    SetRawInputAt(0, input);
2974  }
2975
2976  HInstruction* GetInput() const { return InputAt(0); }
2977  Primitive::Type GetResultType() const { return GetType(); }
2978
2979  bool CanBeMoved() const OVERRIDE { return true; }
2980  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2981    return true;
2982  }
2983
2984  // Try to statically evaluate `this` and return a HConstant
2985  // containing the result of this evaluation.  If `this` cannot
2986  // be evaluated as a constant, return null.
2987  HConstant* TryStaticEvaluation() const;
2988
2989  // Apply this operation to `x`.
2990  virtual HConstant* Evaluate(HIntConstant* x) const = 0;
2991  virtual HConstant* Evaluate(HLongConstant* x) const = 0;
2992  virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
2993  virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
2994
2995  DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
2996
2997 private:
2998  DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
2999};
3000
3001class HBinaryOperation : public HExpression<2> {
3002 public:
3003  HBinaryOperation(Primitive::Type result_type,
3004                   HInstruction* left,
3005                   HInstruction* right,
3006                   SideEffects side_effects = SideEffects::None(),
3007                   uint32_t dex_pc = kNoDexPc)
3008      : HExpression(result_type, side_effects, dex_pc) {
3009    SetRawInputAt(0, left);
3010    SetRawInputAt(1, right);
3011  }
3012
3013  HInstruction* GetLeft() const { return InputAt(0); }
3014  HInstruction* GetRight() const { return InputAt(1); }
3015  Primitive::Type GetResultType() const { return GetType(); }
3016
3017  virtual bool IsCommutative() const { return false; }
3018
3019  // Put constant on the right.
3020  // Returns whether order is changed.
3021  bool OrderInputsWithConstantOnTheRight() {
3022    HInstruction* left = InputAt(0);
3023    HInstruction* right = InputAt(1);
3024    if (left->IsConstant() && !right->IsConstant()) {
3025      ReplaceInput(right, 0);
3026      ReplaceInput(left, 1);
3027      return true;
3028    }
3029    return false;
3030  }
3031
3032  // Order inputs by instruction id, but favor constant on the right side.
3033  // This helps GVN for commutative ops.
3034  void OrderInputs() {
3035    DCHECK(IsCommutative());
3036    HInstruction* left = InputAt(0);
3037    HInstruction* right = InputAt(1);
3038    if (left == right || (!left->IsConstant() && right->IsConstant())) {
3039      return;
3040    }
3041    if (OrderInputsWithConstantOnTheRight()) {
3042      return;
3043    }
3044    // Order according to instruction id.
3045    if (left->GetId() > right->GetId()) {
3046      ReplaceInput(right, 0);
3047      ReplaceInput(left, 1);
3048    }
3049  }
3050
3051  bool CanBeMoved() const OVERRIDE { return true; }
3052  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3053    return true;
3054  }
3055
3056  // Try to statically evaluate `this` and return a HConstant
3057  // containing the result of this evaluation.  If `this` cannot
3058  // be evaluated as a constant, return null.
3059  HConstant* TryStaticEvaluation() const;
3060
3061  // Apply this operation to `x` and `y`.
3062  virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3063                              HNullConstant* y ATTRIBUTE_UNUSED) const {
3064    LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3065    UNREACHABLE();
3066  }
3067  virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3068  virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
3069  virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3070                              HIntConstant* y ATTRIBUTE_UNUSED) const {
3071    LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3072    UNREACHABLE();
3073  }
3074  virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3075  virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3076
3077  // Returns an input that can legally be used as the right input and is
3078  // constant, or null.
3079  HConstant* GetConstantRight() const;
3080
3081  // If `GetConstantRight()` returns one of the input, this returns the other
3082  // one. Otherwise it returns null.
3083  HInstruction* GetLeastConstantLeft() const;
3084
3085  DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3086
3087 private:
3088  DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3089};
3090
3091// The comparison bias applies for floating point operations and indicates how NaN
3092// comparisons are treated:
3093enum class ComparisonBias {
3094  kNoBias,  // bias is not applicable (i.e. for long operation)
3095  kGtBias,  // return 1 for NaN comparisons
3096  kLtBias,  // return -1 for NaN comparisons
3097  kLast = kLtBias
3098};
3099
3100std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3101
3102class HCondition : public HBinaryOperation {
3103 public:
3104  HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3105      : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3106    SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3107  }
3108
3109  // For code generation purposes, returns whether this instruction is just before
3110  // `instruction`, and disregard moves in between.
3111  bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3112
3113  DECLARE_ABSTRACT_INSTRUCTION(Condition);
3114
3115  virtual IfCondition GetCondition() const = 0;
3116
3117  virtual IfCondition GetOppositeCondition() const = 0;
3118
3119  bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3120  bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3121
3122  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3123  void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3124
3125  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3126    return GetPackedFields() == other->AsCondition()->GetPackedFields();
3127  }
3128
3129  bool IsFPConditionTrueIfNaN() const {
3130    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3131    IfCondition if_cond = GetCondition();
3132    if (if_cond == kCondNE) {
3133      return true;
3134    } else if (if_cond == kCondEQ) {
3135      return false;
3136    }
3137    return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3138  }
3139
3140  bool IsFPConditionFalseIfNaN() const {
3141    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3142    IfCondition if_cond = GetCondition();
3143    if (if_cond == kCondEQ) {
3144      return true;
3145    } else if (if_cond == kCondNE) {
3146      return false;
3147    }
3148    return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3149  }
3150
3151 protected:
3152  // Needed if we merge a HCompare into a HCondition.
3153  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3154  static constexpr size_t kFieldComparisonBiasSize =
3155      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3156  static constexpr size_t kNumberOfConditionPackedBits =
3157      kFieldComparisonBias + kFieldComparisonBiasSize;
3158  static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3159  using ComparisonBiasField =
3160      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3161
3162  template <typename T>
3163  int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3164
3165  template <typename T>
3166  int32_t CompareFP(T x, T y) const {
3167    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3168    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3169    // Handle the bias.
3170    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3171  }
3172
3173  // Return an integer constant containing the result of a condition evaluated at compile time.
3174  HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3175    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3176  }
3177
3178 private:
3179  DISALLOW_COPY_AND_ASSIGN(HCondition);
3180};
3181
3182// Instruction to check if two inputs are equal to each other.
3183class HEqual FINAL : public HCondition {
3184 public:
3185  HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3186      : HCondition(first, second, dex_pc) {}
3187
3188  bool IsCommutative() const OVERRIDE { return true; }
3189
3190  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3191                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3192    return MakeConstantCondition(true, GetDexPc());
3193  }
3194  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3195    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3196  }
3197  // In the following Evaluate methods, a HCompare instruction has
3198  // been merged into this HEqual instruction; evaluate it as
3199  // `Compare(x, y) == 0`.
3200  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3201    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3202                                 GetDexPc());
3203  }
3204  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3205    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3206  }
3207  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3208    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3209  }
3210
3211  DECLARE_INSTRUCTION(Equal);
3212
3213  IfCondition GetCondition() const OVERRIDE {
3214    return kCondEQ;
3215  }
3216
3217  IfCondition GetOppositeCondition() const OVERRIDE {
3218    return kCondNE;
3219  }
3220
3221 private:
3222  template <typename T> static bool Compute(T x, T y) { return x == y; }
3223
3224  DISALLOW_COPY_AND_ASSIGN(HEqual);
3225};
3226
3227class HNotEqual FINAL : public HCondition {
3228 public:
3229  HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3230      : HCondition(first, second, dex_pc) {}
3231
3232  bool IsCommutative() const OVERRIDE { return true; }
3233
3234  HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3235                      HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3236    return MakeConstantCondition(false, GetDexPc());
3237  }
3238  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3239    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3240  }
3241  // In the following Evaluate methods, a HCompare instruction has
3242  // been merged into this HNotEqual instruction; evaluate it as
3243  // `Compare(x, y) != 0`.
3244  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3245    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3246  }
3247  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3248    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3249  }
3250  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3251    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3252  }
3253
3254  DECLARE_INSTRUCTION(NotEqual);
3255
3256  IfCondition GetCondition() const OVERRIDE {
3257    return kCondNE;
3258  }
3259
3260  IfCondition GetOppositeCondition() const OVERRIDE {
3261    return kCondEQ;
3262  }
3263
3264 private:
3265  template <typename T> static bool Compute(T x, T y) { return x != y; }
3266
3267  DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3268};
3269
3270class HLessThan FINAL : public HCondition {
3271 public:
3272  HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3273      : HCondition(first, second, dex_pc) {}
3274
3275  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3276    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3277  }
3278  // In the following Evaluate methods, a HCompare instruction has
3279  // been merged into this HLessThan instruction; evaluate it as
3280  // `Compare(x, y) < 0`.
3281  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3282    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3283  }
3284  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3285    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3286  }
3287  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3288    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3289  }
3290
3291  DECLARE_INSTRUCTION(LessThan);
3292
3293  IfCondition GetCondition() const OVERRIDE {
3294    return kCondLT;
3295  }
3296
3297  IfCondition GetOppositeCondition() const OVERRIDE {
3298    return kCondGE;
3299  }
3300
3301 private:
3302  template <typename T> static bool Compute(T x, T y) { return x < y; }
3303
3304  DISALLOW_COPY_AND_ASSIGN(HLessThan);
3305};
3306
3307class HLessThanOrEqual FINAL : public HCondition {
3308 public:
3309  HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3310      : HCondition(first, second, dex_pc) {}
3311
3312  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3313    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3314  }
3315  // In the following Evaluate methods, a HCompare instruction has
3316  // been merged into this HLessThanOrEqual instruction; evaluate it as
3317  // `Compare(x, y) <= 0`.
3318  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3319    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3320  }
3321  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3322    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3323  }
3324  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3325    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3326  }
3327
3328  DECLARE_INSTRUCTION(LessThanOrEqual);
3329
3330  IfCondition GetCondition() const OVERRIDE {
3331    return kCondLE;
3332  }
3333
3334  IfCondition GetOppositeCondition() const OVERRIDE {
3335    return kCondGT;
3336  }
3337
3338 private:
3339  template <typename T> static bool Compute(T x, T y) { return x <= y; }
3340
3341  DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3342};
3343
3344class HGreaterThan FINAL : public HCondition {
3345 public:
3346  HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3347      : HCondition(first, second, dex_pc) {}
3348
3349  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3350    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3351  }
3352  // In the following Evaluate methods, a HCompare instruction has
3353  // been merged into this HGreaterThan instruction; evaluate it as
3354  // `Compare(x, y) > 0`.
3355  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3356    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3357  }
3358  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3359    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3360  }
3361  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3362    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3363  }
3364
3365  DECLARE_INSTRUCTION(GreaterThan);
3366
3367  IfCondition GetCondition() const OVERRIDE {
3368    return kCondGT;
3369  }
3370
3371  IfCondition GetOppositeCondition() const OVERRIDE {
3372    return kCondLE;
3373  }
3374
3375 private:
3376  template <typename T> static bool Compute(T x, T y) { return x > y; }
3377
3378  DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3379};
3380
3381class HGreaterThanOrEqual FINAL : public HCondition {
3382 public:
3383  HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3384      : HCondition(first, second, dex_pc) {}
3385
3386  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3387    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3388  }
3389  // In the following Evaluate methods, a HCompare instruction has
3390  // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3391  // `Compare(x, y) >= 0`.
3392  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3393    return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3394  }
3395  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3396    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3397  }
3398  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3399    return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3400  }
3401
3402  DECLARE_INSTRUCTION(GreaterThanOrEqual);
3403
3404  IfCondition GetCondition() const OVERRIDE {
3405    return kCondGE;
3406  }
3407
3408  IfCondition GetOppositeCondition() const OVERRIDE {
3409    return kCondLT;
3410  }
3411
3412 private:
3413  template <typename T> static bool Compute(T x, T y) { return x >= y; }
3414
3415  DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3416};
3417
3418class HBelow FINAL : public HCondition {
3419 public:
3420  HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3421      : HCondition(first, second, dex_pc) {}
3422
3423  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3424    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3425  }
3426  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3427    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3428  }
3429  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3430                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3431    LOG(FATAL) << DebugName() << " is not defined for float values";
3432    UNREACHABLE();
3433  }
3434  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3435                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3436    LOG(FATAL) << DebugName() << " is not defined for double values";
3437    UNREACHABLE();
3438  }
3439
3440  DECLARE_INSTRUCTION(Below);
3441
3442  IfCondition GetCondition() const OVERRIDE {
3443    return kCondB;
3444  }
3445
3446  IfCondition GetOppositeCondition() const OVERRIDE {
3447    return kCondAE;
3448  }
3449
3450 private:
3451  template <typename T> static bool Compute(T x, T y) {
3452    return MakeUnsigned(x) < MakeUnsigned(y);
3453  }
3454
3455  DISALLOW_COPY_AND_ASSIGN(HBelow);
3456};
3457
3458class HBelowOrEqual FINAL : public HCondition {
3459 public:
3460  HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3461      : HCondition(first, second, dex_pc) {}
3462
3463  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3464    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3465  }
3466  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3467    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3468  }
3469  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3470                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3471    LOG(FATAL) << DebugName() << " is not defined for float values";
3472    UNREACHABLE();
3473  }
3474  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3475                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3476    LOG(FATAL) << DebugName() << " is not defined for double values";
3477    UNREACHABLE();
3478  }
3479
3480  DECLARE_INSTRUCTION(BelowOrEqual);
3481
3482  IfCondition GetCondition() const OVERRIDE {
3483    return kCondBE;
3484  }
3485
3486  IfCondition GetOppositeCondition() const OVERRIDE {
3487    return kCondA;
3488  }
3489
3490 private:
3491  template <typename T> static bool Compute(T x, T y) {
3492    return MakeUnsigned(x) <= MakeUnsigned(y);
3493  }
3494
3495  DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3496};
3497
3498class HAbove FINAL : public HCondition {
3499 public:
3500  HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3501      : HCondition(first, second, dex_pc) {}
3502
3503  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3504    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3505  }
3506  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3507    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3508  }
3509  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3510                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3511    LOG(FATAL) << DebugName() << " is not defined for float values";
3512    UNREACHABLE();
3513  }
3514  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3515                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3516    LOG(FATAL) << DebugName() << " is not defined for double values";
3517    UNREACHABLE();
3518  }
3519
3520  DECLARE_INSTRUCTION(Above);
3521
3522  IfCondition GetCondition() const OVERRIDE {
3523    return kCondA;
3524  }
3525
3526  IfCondition GetOppositeCondition() const OVERRIDE {
3527    return kCondBE;
3528  }
3529
3530 private:
3531  template <typename T> static bool Compute(T x, T y) {
3532    return MakeUnsigned(x) > MakeUnsigned(y);
3533  }
3534
3535  DISALLOW_COPY_AND_ASSIGN(HAbove);
3536};
3537
3538class HAboveOrEqual FINAL : public HCondition {
3539 public:
3540  HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3541      : HCondition(first, second, dex_pc) {}
3542
3543  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3544    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3545  }
3546  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3547    return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3548  }
3549  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3550                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3551    LOG(FATAL) << DebugName() << " is not defined for float values";
3552    UNREACHABLE();
3553  }
3554  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3555                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3556    LOG(FATAL) << DebugName() << " is not defined for double values";
3557    UNREACHABLE();
3558  }
3559
3560  DECLARE_INSTRUCTION(AboveOrEqual);
3561
3562  IfCondition GetCondition() const OVERRIDE {
3563    return kCondAE;
3564  }
3565
3566  IfCondition GetOppositeCondition() const OVERRIDE {
3567    return kCondB;
3568  }
3569
3570 private:
3571  template <typename T> static bool Compute(T x, T y) {
3572    return MakeUnsigned(x) >= MakeUnsigned(y);
3573  }
3574
3575  DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3576};
3577
3578// Instruction to check how two inputs compare to each other.
3579// Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3580class HCompare FINAL : public HBinaryOperation {
3581 public:
3582  // Note that `comparison_type` is the type of comparison performed
3583  // between the comparison's inputs, not the type of the instantiated
3584  // HCompare instruction (which is always Primitive::kPrimInt).
3585  HCompare(Primitive::Type comparison_type,
3586           HInstruction* first,
3587           HInstruction* second,
3588           ComparisonBias bias,
3589           uint32_t dex_pc)
3590      : HBinaryOperation(Primitive::kPrimInt,
3591                         first,
3592                         second,
3593                         SideEffectsForArchRuntimeCalls(comparison_type),
3594                         dex_pc) {
3595    SetPackedField<ComparisonBiasField>(bias);
3596    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3597    DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3598  }
3599
3600  template <typename T>
3601  int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3602
3603  template <typename T>
3604  int32_t ComputeFP(T x, T y) const {
3605    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3606    DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3607    // Handle the bias.
3608    return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3609  }
3610
3611  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3612    // Note that there is no "cmp-int" Dex instruction so we shouldn't
3613    // reach this code path when processing a freshly built HIR
3614    // graph. However HCompare integer instructions can be synthesized
3615    // by the instruction simplifier to implement IntegerCompare and
3616    // IntegerSignum intrinsics, so we have to handle this case.
3617    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3618  }
3619  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3620    return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3621  }
3622  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3623    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3624  }
3625  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3626    return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3627  }
3628
3629  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3630    return GetPackedFields() == other->AsCompare()->GetPackedFields();
3631  }
3632
3633  ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3634
3635  // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3636  // Only meaningful for floating-point comparisons.
3637  bool IsGtBias() const {
3638    DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3639    return GetBias() == ComparisonBias::kGtBias;
3640  }
3641
3642  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3643    // Comparisons do not require a runtime call in any back end.
3644    return SideEffects::None();
3645  }
3646
3647  DECLARE_INSTRUCTION(Compare);
3648
3649 protected:
3650  static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3651  static constexpr size_t kFieldComparisonBiasSize =
3652      MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3653  static constexpr size_t kNumberOfComparePackedBits =
3654      kFieldComparisonBias + kFieldComparisonBiasSize;
3655  static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3656  using ComparisonBiasField =
3657      BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3658
3659  // Return an integer constant containing the result of a comparison evaluated at compile time.
3660  HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3661    DCHECK(value == -1 || value == 0 || value == 1) << value;
3662    return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3663  }
3664
3665 private:
3666  DISALLOW_COPY_AND_ASSIGN(HCompare);
3667};
3668
3669class HNewInstance FINAL : public HExpression<2> {
3670 public:
3671  HNewInstance(HInstruction* cls,
3672               HCurrentMethod* current_method,
3673               uint32_t dex_pc,
3674               uint16_t type_index,
3675               const DexFile& dex_file,
3676               bool needs_access_check,
3677               bool finalizable,
3678               QuickEntrypointEnum entrypoint)
3679      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3680        type_index_(type_index),
3681        dex_file_(dex_file),
3682        entrypoint_(entrypoint) {
3683    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
3684    SetPackedFlag<kFlagFinalizable>(finalizable);
3685    SetRawInputAt(0, cls);
3686    SetRawInputAt(1, current_method);
3687  }
3688
3689  uint16_t GetTypeIndex() const { return type_index_; }
3690  const DexFile& GetDexFile() const { return dex_file_; }
3691
3692  // Calls runtime so needs an environment.
3693  bool NeedsEnvironment() const OVERRIDE { return true; }
3694
3695  // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3696  bool CanThrow() const OVERRIDE { return true; }
3697
3698  // Needs to call into runtime to make sure it's instantiable/accessible.
3699  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
3700
3701  bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3702
3703  bool CanBeNull() const OVERRIDE { return false; }
3704
3705  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3706
3707  void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3708    entrypoint_ = entrypoint;
3709  }
3710
3711  bool IsStringAlloc() const;
3712
3713  DECLARE_INSTRUCTION(NewInstance);
3714
3715 private:
3716  static constexpr size_t kFlagNeedsAccessCheck = kNumberOfExpressionPackedBits;
3717  static constexpr size_t kFlagFinalizable = kFlagNeedsAccessCheck + 1;
3718  static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3719  static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3720                "Too many packed fields.");
3721
3722  const uint16_t type_index_;
3723  const DexFile& dex_file_;
3724  QuickEntrypointEnum entrypoint_;
3725
3726  DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3727};
3728
3729enum IntrinsicNeedsEnvironmentOrCache {
3730  kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
3731  kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
3732};
3733
3734enum IntrinsicSideEffects {
3735  kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
3736  kReadSideEffects,   // Intrinsic may read heap memory.
3737  kWriteSideEffects,  // Intrinsic may write heap memory.
3738  kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
3739};
3740
3741enum IntrinsicExceptions {
3742  kNoThrow,  // Intrinsic does not throw any exceptions.
3743  kCanThrow  // Intrinsic may throw exceptions.
3744};
3745
3746class HInvoke : public HInstruction {
3747 public:
3748  bool NeedsEnvironment() const OVERRIDE;
3749
3750  using HInstruction::GetInputRecords;  // Keep the const version visible.
3751  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
3752    return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
3753  }
3754
3755  void SetArgumentAt(size_t index, HInstruction* argument) {
3756    SetRawInputAt(index, argument);
3757  }
3758
3759  // Return the number of arguments.  This number can be lower than
3760  // the number of inputs returned by InputCount(), as some invoke
3761  // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3762  // inputs at the end of their list of inputs.
3763  uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3764
3765  Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3766
3767  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3768  const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
3769
3770  InvokeType GetInvokeType() const {
3771    return GetPackedField<InvokeTypeField>();
3772  }
3773
3774  Intrinsics GetIntrinsic() const {
3775    return intrinsic_;
3776  }
3777
3778  void SetIntrinsic(Intrinsics intrinsic,
3779                    IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3780                    IntrinsicSideEffects side_effects,
3781                    IntrinsicExceptions exceptions);
3782
3783  bool IsFromInlinedInvoke() const {
3784    return GetEnvironment()->IsFromInlinedInvoke();
3785  }
3786
3787  void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3788
3789  bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
3790
3791  bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); }
3792
3793  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3794    return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
3795  }
3796
3797  uint32_t* GetIntrinsicOptimizations() {
3798    return &intrinsic_optimizations_;
3799  }
3800
3801  const uint32_t* GetIntrinsicOptimizations() const {
3802    return &intrinsic_optimizations_;
3803  }
3804
3805  bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
3806
3807  ArtMethod* GetResolvedMethod() const { return resolved_method_; }
3808
3809  DECLARE_ABSTRACT_INSTRUCTION(Invoke);
3810
3811 protected:
3812  static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
3813  static constexpr size_t kFieldInvokeTypeSize =
3814      MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
3815  static constexpr size_t kFieldReturnType =
3816      kFieldInvokeType + kFieldInvokeTypeSize;
3817  static constexpr size_t kFieldReturnTypeSize =
3818      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
3819  static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
3820  static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
3821  static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3822  using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
3823  using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
3824
3825  HInvoke(ArenaAllocator* arena,
3826          uint32_t number_of_arguments,
3827          uint32_t number_of_other_inputs,
3828          Primitive::Type return_type,
3829          uint32_t dex_pc,
3830          uint32_t dex_method_index,
3831          ArtMethod* resolved_method,
3832          InvokeType invoke_type)
3833    : HInstruction(
3834          SideEffects::AllExceptGCDependency(), dex_pc),  // Assume write/read on all fields/arrays.
3835      number_of_arguments_(number_of_arguments),
3836      resolved_method_(resolved_method),
3837      inputs_(number_of_arguments + number_of_other_inputs,
3838              arena->Adapter(kArenaAllocInvokeInputs)),
3839      dex_method_index_(dex_method_index),
3840      intrinsic_(Intrinsics::kNone),
3841      intrinsic_optimizations_(0) {
3842    SetPackedField<ReturnTypeField>(return_type);
3843    SetPackedField<InvokeTypeField>(invoke_type);
3844    SetPackedFlag<kFlagCanThrow>(true);
3845  }
3846
3847  uint32_t number_of_arguments_;
3848  ArtMethod* const resolved_method_;
3849  ArenaVector<HUserRecord<HInstruction*>> inputs_;
3850  const uint32_t dex_method_index_;
3851  Intrinsics intrinsic_;
3852
3853  // A magic word holding optimizations for intrinsics. See intrinsics.h.
3854  uint32_t intrinsic_optimizations_;
3855
3856 private:
3857  DISALLOW_COPY_AND_ASSIGN(HInvoke);
3858};
3859
3860class HInvokeUnresolved FINAL : public HInvoke {
3861 public:
3862  HInvokeUnresolved(ArenaAllocator* arena,
3863                    uint32_t number_of_arguments,
3864                    Primitive::Type return_type,
3865                    uint32_t dex_pc,
3866                    uint32_t dex_method_index,
3867                    InvokeType invoke_type)
3868      : HInvoke(arena,
3869                number_of_arguments,
3870                0u /* number_of_other_inputs */,
3871                return_type,
3872                dex_pc,
3873                dex_method_index,
3874                nullptr,
3875                invoke_type) {
3876  }
3877
3878  DECLARE_INSTRUCTION(InvokeUnresolved);
3879
3880 private:
3881  DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
3882};
3883
3884class HInvokeStaticOrDirect FINAL : public HInvoke {
3885 public:
3886  // Requirements of this method call regarding the class
3887  // initialization (clinit) check of its declaring class.
3888  enum class ClinitCheckRequirement {
3889    kNone,      // Class already initialized.
3890    kExplicit,  // Static call having explicit clinit check as last input.
3891    kImplicit,  // Static call implicitly requiring a clinit check.
3892    kLast = kImplicit
3893  };
3894
3895  // Determines how to load the target ArtMethod*.
3896  enum class MethodLoadKind {
3897    // Use a String init ArtMethod* loaded from Thread entrypoints.
3898    kStringInit,
3899
3900    // Use the method's own ArtMethod* loaded by the register allocator.
3901    kRecursive,
3902
3903    // Use ArtMethod* at a known address, embed the direct address in the code.
3904    // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
3905    kDirectAddress,
3906
3907    // Use ArtMethod* at an address that will be known at link time, embed the direct
3908    // address in the code. If the image is relocatable, emit .patch_oat entry.
3909    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3910    // the image relocatable or not.
3911    kDirectAddressWithFixup,
3912
3913    // Load from resolved methods array in the dex cache using a PC-relative load.
3914    // Used when we need to use the dex cache, for example for invoke-static that
3915    // may cause class initialization (the entry may point to a resolution method),
3916    // and we know that we can access the dex cache arrays using a PC-relative load.
3917    kDexCachePcRelative,
3918
3919    // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
3920    // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
3921    // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
3922    // or unimplemented or impractical (i.e. slow) on a particular architecture.
3923    kDexCacheViaMethod,
3924  };
3925
3926  // Determines the location of the code pointer.
3927  enum class CodePtrLocation {
3928    // Recursive call, use local PC-relative call instruction.
3929    kCallSelf,
3930
3931    // Use PC-relative call instruction patched at link time.
3932    // Used for calls within an oat file, boot->boot or app->app.
3933    kCallPCRelative,
3934
3935    // Call to a known target address, embed the direct address in code.
3936    // Used for app->boot call with non-relocatable image and for JIT-compiled calls.
3937    kCallDirect,
3938
3939    // Call to a target address that will be known at link time, embed the direct
3940    // address in code. If the image is relocatable, emit .patch_oat entry.
3941    // Used for app->boot calls with relocatable image and boot->boot calls, whether
3942    // the image relocatable or not.
3943    kCallDirectWithFixup,
3944
3945    // Use code pointer from the ArtMethod*.
3946    // Used when we don't know the target code. This is also the last-resort-kind used when
3947    // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
3948    kCallArtMethod,
3949  };
3950
3951  struct DispatchInfo {
3952    MethodLoadKind method_load_kind;
3953    CodePtrLocation code_ptr_location;
3954    // The method load data holds
3955    //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
3956    //     Note that there are multiple string init methods, each having its own offset.
3957    //   - the method address for kDirectAddress
3958    //   - the dex cache arrays offset for kDexCachePcRel.
3959    uint64_t method_load_data;
3960    uint64_t direct_code_ptr;
3961  };
3962
3963  HInvokeStaticOrDirect(ArenaAllocator* arena,
3964                        uint32_t number_of_arguments,
3965                        Primitive::Type return_type,
3966                        uint32_t dex_pc,
3967                        uint32_t method_index,
3968                        ArtMethod* resolved_method,
3969                        DispatchInfo dispatch_info,
3970                        InvokeType invoke_type,
3971                        MethodReference target_method,
3972                        ClinitCheckRequirement clinit_check_requirement)
3973      : HInvoke(arena,
3974                number_of_arguments,
3975                // There is potentially one extra argument for the HCurrentMethod node, and
3976                // potentially one other if the clinit check is explicit, and potentially
3977                // one other if the method is a string factory.
3978                (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
3979                    (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
3980                return_type,
3981                dex_pc,
3982                method_index,
3983                resolved_method,
3984                invoke_type),
3985        target_method_(target_method),
3986        dispatch_info_(dispatch_info) {
3987    SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
3988  }
3989
3990  void SetDispatchInfo(const DispatchInfo& dispatch_info) {
3991    bool had_current_method_input = HasCurrentMethodInput();
3992    bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
3993
3994    // Using the current method is the default and once we find a better
3995    // method load kind, we should not go back to using the current method.
3996    DCHECK(had_current_method_input || !needs_current_method_input);
3997
3998    if (had_current_method_input && !needs_current_method_input) {
3999      DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4000      RemoveInputAt(GetSpecialInputIndex());
4001    }
4002    dispatch_info_ = dispatch_info;
4003  }
4004
4005  void AddSpecialInput(HInstruction* input) {
4006    // We allow only one special input.
4007    DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4008    DCHECK(InputCount() == GetSpecialInputIndex() ||
4009           (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4010    InsertInputAt(GetSpecialInputIndex(), input);
4011  }
4012
4013  using HInstruction::GetInputRecords;  // Keep the const version visible.
4014  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4015    ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4016    if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4017      DCHECK(!input_records.empty());
4018      DCHECK_GT(input_records.size(), GetNumberOfArguments());
4019      HInstruction* last_input = input_records.back().GetInstruction();
4020      // Note: `last_input` may be null during arguments setup.
4021      if (last_input != nullptr) {
4022        // `last_input` is the last input of a static invoke marked as having
4023        // an explicit clinit check. It must either be:
4024        // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4025        // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4026        DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4027      }
4028    }
4029    return input_records;
4030  }
4031
4032  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4033    // We access the method via the dex cache so we can't do an implicit null check.
4034    // TODO: for intrinsics we can generate implicit null checks.
4035    return false;
4036  }
4037
4038  bool CanBeNull() const OVERRIDE {
4039    return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
4040  }
4041
4042  // Get the index of the special input, if any.
4043  //
4044  // If the invoke HasCurrentMethodInput(), the "special input" is the current
4045  // method pointer; otherwise there may be one platform-specific special input,
4046  // such as PC-relative addressing base.
4047  uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
4048  bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4049
4050  MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4051  CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
4052  bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4053  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
4054  bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4055  bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
4056  bool HasPcRelativeDexCache() const {
4057    return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
4058  }
4059  bool HasCurrentMethodInput() const {
4060    // This function can be called only after the invoke has been fully initialized by the builder.
4061    if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4062      DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4063      return true;
4064    } else {
4065      DCHECK(InputCount() == GetSpecialInputIndex() ||
4066             !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4067      return false;
4068    }
4069  }
4070  bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
4071
4072  QuickEntrypointEnum GetStringInitEntryPoint() const {
4073    DCHECK(IsStringInit());
4074    return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4075  }
4076
4077  uint64_t GetMethodAddress() const {
4078    DCHECK(HasMethodAddress());
4079    return dispatch_info_.method_load_data;
4080  }
4081
4082  uint32_t GetDexCacheArrayOffset() const {
4083    DCHECK(HasPcRelativeDexCache());
4084    return dispatch_info_.method_load_data;
4085  }
4086
4087  uint64_t GetDirectCodePtr() const {
4088    DCHECK(HasDirectCodePtr());
4089    return dispatch_info_.direct_code_ptr;
4090  }
4091
4092  ClinitCheckRequirement GetClinitCheckRequirement() const {
4093    return GetPackedField<ClinitCheckRequirementField>();
4094  }
4095
4096  // Is this instruction a call to a static method?
4097  bool IsStatic() const {
4098    return GetInvokeType() == kStatic;
4099  }
4100
4101  MethodReference GetTargetMethod() const {
4102    return target_method_;
4103  }
4104
4105  // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4106  // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4107  // instruction; only relevant for static calls with explicit clinit check.
4108  void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4109    DCHECK(IsStaticWithExplicitClinitCheck());
4110    size_t last_input_index = inputs_.size() - 1u;
4111    HInstruction* last_input = inputs_.back().GetInstruction();
4112    DCHECK(last_input != nullptr);
4113    DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4114    RemoveAsUserOfInput(last_input_index);
4115    inputs_.pop_back();
4116    SetPackedField<ClinitCheckRequirementField>(new_requirement);
4117    DCHECK(!IsStaticWithExplicitClinitCheck());
4118  }
4119
4120  // Is this a call to a static method whose declaring class has an
4121  // explicit initialization check in the graph?
4122  bool IsStaticWithExplicitClinitCheck() const {
4123    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4124  }
4125
4126  // Is this a call to a static method whose declaring class has an
4127  // implicit intialization check requirement?
4128  bool IsStaticWithImplicitClinitCheck() const {
4129    return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4130  }
4131
4132  // Does this method load kind need the current method as an input?
4133  static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4134    return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4135  }
4136
4137  DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4138
4139 protected:
4140  void InsertInputAt(size_t index, HInstruction* input);
4141  void RemoveInputAt(size_t index);
4142
4143 private:
4144  static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4145  static constexpr size_t kFieldClinitCheckRequirementSize =
4146      MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4147  static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4148      kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4149  static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4150                "Too many packed fields.");
4151  using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4152                                               kFieldClinitCheckRequirement,
4153                                               kFieldClinitCheckRequirementSize>;
4154
4155  // Cached values of the resolved method, to avoid needing the mutator lock.
4156  MethodReference target_method_;
4157  DispatchInfo dispatch_info_;
4158
4159  DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4160};
4161std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4162std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4163
4164class HInvokeVirtual FINAL : public HInvoke {
4165 public:
4166  HInvokeVirtual(ArenaAllocator* arena,
4167                 uint32_t number_of_arguments,
4168                 Primitive::Type return_type,
4169                 uint32_t dex_pc,
4170                 uint32_t dex_method_index,
4171                 ArtMethod* resolved_method,
4172                 uint32_t vtable_index)
4173      : HInvoke(arena,
4174                number_of_arguments,
4175                0u,
4176                return_type,
4177                dex_pc,
4178                dex_method_index,
4179                resolved_method,
4180                kVirtual),
4181        vtable_index_(vtable_index) {}
4182
4183  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4184    // TODO: Add implicit null checks in intrinsics.
4185    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4186  }
4187
4188  uint32_t GetVTableIndex() const { return vtable_index_; }
4189
4190  DECLARE_INSTRUCTION(InvokeVirtual);
4191
4192 private:
4193  // Cached value of the resolved method, to avoid needing the mutator lock.
4194  const uint32_t vtable_index_;
4195
4196  DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4197};
4198
4199class HInvokeInterface FINAL : public HInvoke {
4200 public:
4201  HInvokeInterface(ArenaAllocator* arena,
4202                   uint32_t number_of_arguments,
4203                   Primitive::Type return_type,
4204                   uint32_t dex_pc,
4205                   uint32_t dex_method_index,
4206                   ArtMethod* resolved_method,
4207                   uint32_t imt_index)
4208      : HInvoke(arena,
4209                number_of_arguments,
4210                0u,
4211                return_type,
4212                dex_pc,
4213                dex_method_index,
4214                resolved_method,
4215                kInterface),
4216        imt_index_(imt_index) {}
4217
4218  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4219    // TODO: Add implicit null checks in intrinsics.
4220    return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4221  }
4222
4223  uint32_t GetImtIndex() const { return imt_index_; }
4224  uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4225
4226  DECLARE_INSTRUCTION(InvokeInterface);
4227
4228 private:
4229  // Cached value of the resolved method, to avoid needing the mutator lock.
4230  const uint32_t imt_index_;
4231
4232  DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4233};
4234
4235class HNeg FINAL : public HUnaryOperation {
4236 public:
4237  HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4238      : HUnaryOperation(result_type, input, dex_pc) {
4239    DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4240  }
4241
4242  template <typename T> static T Compute(T x) { return -x; }
4243
4244  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4245    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4246  }
4247  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4248    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4249  }
4250  HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4251    return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4252  }
4253  HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4254    return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4255  }
4256
4257  DECLARE_INSTRUCTION(Neg);
4258
4259 private:
4260  DISALLOW_COPY_AND_ASSIGN(HNeg);
4261};
4262
4263class HNewArray FINAL : public HExpression<2> {
4264 public:
4265  HNewArray(HInstruction* length,
4266            HCurrentMethod* current_method,
4267            uint32_t dex_pc,
4268            uint16_t type_index,
4269            const DexFile& dex_file,
4270            QuickEntrypointEnum entrypoint)
4271      : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
4272        type_index_(type_index),
4273        dex_file_(dex_file),
4274        entrypoint_(entrypoint) {
4275    SetRawInputAt(0, length);
4276    SetRawInputAt(1, current_method);
4277  }
4278
4279  uint16_t GetTypeIndex() const { return type_index_; }
4280  const DexFile& GetDexFile() const { return dex_file_; }
4281
4282  // Calls runtime so needs an environment.
4283  bool NeedsEnvironment() const OVERRIDE { return true; }
4284
4285  // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4286  bool CanThrow() const OVERRIDE { return true; }
4287
4288  bool CanBeNull() const OVERRIDE { return false; }
4289
4290  QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4291
4292  DECLARE_INSTRUCTION(NewArray);
4293
4294 private:
4295  const uint16_t type_index_;
4296  const DexFile& dex_file_;
4297  const QuickEntrypointEnum entrypoint_;
4298
4299  DISALLOW_COPY_AND_ASSIGN(HNewArray);
4300};
4301
4302class HAdd FINAL : public HBinaryOperation {
4303 public:
4304  HAdd(Primitive::Type result_type,
4305       HInstruction* left,
4306       HInstruction* right,
4307       uint32_t dex_pc = kNoDexPc)
4308      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4309
4310  bool IsCommutative() const OVERRIDE { return true; }
4311
4312  template <typename T> static T Compute(T x, T y) { return x + y; }
4313
4314  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4315    return GetBlock()->GetGraph()->GetIntConstant(
4316        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4317  }
4318  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4319    return GetBlock()->GetGraph()->GetLongConstant(
4320        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4321  }
4322  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4323    return GetBlock()->GetGraph()->GetFloatConstant(
4324        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4325  }
4326  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4327    return GetBlock()->GetGraph()->GetDoubleConstant(
4328        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4329  }
4330
4331  DECLARE_INSTRUCTION(Add);
4332
4333 private:
4334  DISALLOW_COPY_AND_ASSIGN(HAdd);
4335};
4336
4337class HSub FINAL : public HBinaryOperation {
4338 public:
4339  HSub(Primitive::Type result_type,
4340       HInstruction* left,
4341       HInstruction* right,
4342       uint32_t dex_pc = kNoDexPc)
4343      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4344
4345  template <typename T> static T Compute(T x, T y) { return x - y; }
4346
4347  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4348    return GetBlock()->GetGraph()->GetIntConstant(
4349        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4350  }
4351  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4352    return GetBlock()->GetGraph()->GetLongConstant(
4353        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4354  }
4355  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4356    return GetBlock()->GetGraph()->GetFloatConstant(
4357        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4358  }
4359  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4360    return GetBlock()->GetGraph()->GetDoubleConstant(
4361        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4362  }
4363
4364  DECLARE_INSTRUCTION(Sub);
4365
4366 private:
4367  DISALLOW_COPY_AND_ASSIGN(HSub);
4368};
4369
4370class HMul FINAL : public HBinaryOperation {
4371 public:
4372  HMul(Primitive::Type result_type,
4373       HInstruction* left,
4374       HInstruction* right,
4375       uint32_t dex_pc = kNoDexPc)
4376      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4377
4378  bool IsCommutative() const OVERRIDE { return true; }
4379
4380  template <typename T> static T Compute(T x, T y) { return x * y; }
4381
4382  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4383    return GetBlock()->GetGraph()->GetIntConstant(
4384        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4385  }
4386  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4387    return GetBlock()->GetGraph()->GetLongConstant(
4388        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4389  }
4390  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4391    return GetBlock()->GetGraph()->GetFloatConstant(
4392        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4393  }
4394  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4395    return GetBlock()->GetGraph()->GetDoubleConstant(
4396        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4397  }
4398
4399  DECLARE_INSTRUCTION(Mul);
4400
4401 private:
4402  DISALLOW_COPY_AND_ASSIGN(HMul);
4403};
4404
4405class HDiv FINAL : public HBinaryOperation {
4406 public:
4407  HDiv(Primitive::Type result_type,
4408       HInstruction* left,
4409       HInstruction* right,
4410       uint32_t dex_pc)
4411      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4412
4413  template <typename T>
4414  T ComputeIntegral(T x, T y) const {
4415    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4416    // Our graph structure ensures we never have 0 for `y` during
4417    // constant folding.
4418    DCHECK_NE(y, 0);
4419    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4420    return (y == -1) ? -x : x / y;
4421  }
4422
4423  template <typename T>
4424  T ComputeFP(T x, T y) const {
4425    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4426    return x / y;
4427  }
4428
4429  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4430    return GetBlock()->GetGraph()->GetIntConstant(
4431        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4432  }
4433  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4434    return GetBlock()->GetGraph()->GetLongConstant(
4435        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4436  }
4437  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4438    return GetBlock()->GetGraph()->GetFloatConstant(
4439        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4440  }
4441  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4442    return GetBlock()->GetGraph()->GetDoubleConstant(
4443        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4444  }
4445
4446  DECLARE_INSTRUCTION(Div);
4447
4448 private:
4449  DISALLOW_COPY_AND_ASSIGN(HDiv);
4450};
4451
4452class HRem FINAL : public HBinaryOperation {
4453 public:
4454  HRem(Primitive::Type result_type,
4455       HInstruction* left,
4456       HInstruction* right,
4457       uint32_t dex_pc)
4458      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4459
4460  template <typename T>
4461  T ComputeIntegral(T x, T y) const {
4462    DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4463    // Our graph structure ensures we never have 0 for `y` during
4464    // constant folding.
4465    DCHECK_NE(y, 0);
4466    // Special case -1 to avoid getting a SIGFPE on x86(_64).
4467    return (y == -1) ? 0 : x % y;
4468  }
4469
4470  template <typename T>
4471  T ComputeFP(T x, T y) const {
4472    DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4473    return std::fmod(x, y);
4474  }
4475
4476  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4477    return GetBlock()->GetGraph()->GetIntConstant(
4478        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4479  }
4480  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4481    return GetBlock()->GetGraph()->GetLongConstant(
4482        ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4483  }
4484  HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4485    return GetBlock()->GetGraph()->GetFloatConstant(
4486        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4487  }
4488  HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4489    return GetBlock()->GetGraph()->GetDoubleConstant(
4490        ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4491  }
4492
4493  DECLARE_INSTRUCTION(Rem);
4494
4495 private:
4496  DISALLOW_COPY_AND_ASSIGN(HRem);
4497};
4498
4499class HDivZeroCheck FINAL : public HExpression<1> {
4500 public:
4501  // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4502  // constructor.
4503  HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4504      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4505    SetRawInputAt(0, value);
4506  }
4507
4508  Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4509
4510  bool CanBeMoved() const OVERRIDE { return true; }
4511
4512  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4513    return true;
4514  }
4515
4516  bool NeedsEnvironment() const OVERRIDE { return true; }
4517  bool CanThrow() const OVERRIDE { return true; }
4518
4519  DECLARE_INSTRUCTION(DivZeroCheck);
4520
4521 private:
4522  DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4523};
4524
4525class HShl FINAL : public HBinaryOperation {
4526 public:
4527  HShl(Primitive::Type result_type,
4528       HInstruction* value,
4529       HInstruction* distance,
4530       uint32_t dex_pc = kNoDexPc)
4531      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4532    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4533    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4534  }
4535
4536  template <typename T>
4537  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4538    return value << (distance & max_shift_distance);
4539  }
4540
4541  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4542    return GetBlock()->GetGraph()->GetIntConstant(
4543        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4544  }
4545  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4546    return GetBlock()->GetGraph()->GetLongConstant(
4547        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4548  }
4549  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4550                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4551    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4552    UNREACHABLE();
4553  }
4554  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4555                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4556    LOG(FATAL) << DebugName() << " is not defined for float values";
4557    UNREACHABLE();
4558  }
4559  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4560                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4561    LOG(FATAL) << DebugName() << " is not defined for double values";
4562    UNREACHABLE();
4563  }
4564
4565  DECLARE_INSTRUCTION(Shl);
4566
4567 private:
4568  DISALLOW_COPY_AND_ASSIGN(HShl);
4569};
4570
4571class HShr FINAL : public HBinaryOperation {
4572 public:
4573  HShr(Primitive::Type result_type,
4574       HInstruction* value,
4575       HInstruction* distance,
4576       uint32_t dex_pc = kNoDexPc)
4577      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4578    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4579    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4580  }
4581
4582  template <typename T>
4583  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4584    return value >> (distance & max_shift_distance);
4585  }
4586
4587  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4588    return GetBlock()->GetGraph()->GetIntConstant(
4589        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4590  }
4591  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4592    return GetBlock()->GetGraph()->GetLongConstant(
4593        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4594  }
4595  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4596                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4597    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4598    UNREACHABLE();
4599  }
4600  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4601                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4602    LOG(FATAL) << DebugName() << " is not defined for float values";
4603    UNREACHABLE();
4604  }
4605  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4606                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4607    LOG(FATAL) << DebugName() << " is not defined for double values";
4608    UNREACHABLE();
4609  }
4610
4611  DECLARE_INSTRUCTION(Shr);
4612
4613 private:
4614  DISALLOW_COPY_AND_ASSIGN(HShr);
4615};
4616
4617class HUShr FINAL : public HBinaryOperation {
4618 public:
4619  HUShr(Primitive::Type result_type,
4620        HInstruction* value,
4621        HInstruction* distance,
4622        uint32_t dex_pc = kNoDexPc)
4623      : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4624    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4625    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4626  }
4627
4628  template <typename T>
4629  static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
4630    typedef typename std::make_unsigned<T>::type V;
4631    V ux = static_cast<V>(value);
4632    return static_cast<T>(ux >> (distance & max_shift_distance));
4633  }
4634
4635  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4636    return GetBlock()->GetGraph()->GetIntConstant(
4637        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4638  }
4639  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4640    return GetBlock()->GetGraph()->GetLongConstant(
4641        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4642  }
4643  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4644                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4645    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4646    UNREACHABLE();
4647  }
4648  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4649                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4650    LOG(FATAL) << DebugName() << " is not defined for float values";
4651    UNREACHABLE();
4652  }
4653  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4654                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4655    LOG(FATAL) << DebugName() << " is not defined for double values";
4656    UNREACHABLE();
4657  }
4658
4659  DECLARE_INSTRUCTION(UShr);
4660
4661 private:
4662  DISALLOW_COPY_AND_ASSIGN(HUShr);
4663};
4664
4665class HAnd FINAL : public HBinaryOperation {
4666 public:
4667  HAnd(Primitive::Type result_type,
4668       HInstruction* left,
4669       HInstruction* right,
4670       uint32_t dex_pc = kNoDexPc)
4671      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4672
4673  bool IsCommutative() const OVERRIDE { return true; }
4674
4675  template <typename T> static T Compute(T x, T y) { return x & y; }
4676
4677  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4678    return GetBlock()->GetGraph()->GetIntConstant(
4679        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4680  }
4681  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4682    return GetBlock()->GetGraph()->GetLongConstant(
4683        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4684  }
4685  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4686                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4687    LOG(FATAL) << DebugName() << " is not defined for float values";
4688    UNREACHABLE();
4689  }
4690  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4691                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4692    LOG(FATAL) << DebugName() << " is not defined for double values";
4693    UNREACHABLE();
4694  }
4695
4696  DECLARE_INSTRUCTION(And);
4697
4698 private:
4699  DISALLOW_COPY_AND_ASSIGN(HAnd);
4700};
4701
4702class HOr FINAL : public HBinaryOperation {
4703 public:
4704  HOr(Primitive::Type result_type,
4705      HInstruction* left,
4706      HInstruction* right,
4707      uint32_t dex_pc = kNoDexPc)
4708      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4709
4710  bool IsCommutative() const OVERRIDE { return true; }
4711
4712  template <typename T> static T Compute(T x, T y) { return x | y; }
4713
4714  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4715    return GetBlock()->GetGraph()->GetIntConstant(
4716        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4717  }
4718  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4719    return GetBlock()->GetGraph()->GetLongConstant(
4720        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4721  }
4722  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4723                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4724    LOG(FATAL) << DebugName() << " is not defined for float values";
4725    UNREACHABLE();
4726  }
4727  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4728                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4729    LOG(FATAL) << DebugName() << " is not defined for double values";
4730    UNREACHABLE();
4731  }
4732
4733  DECLARE_INSTRUCTION(Or);
4734
4735 private:
4736  DISALLOW_COPY_AND_ASSIGN(HOr);
4737};
4738
4739class HXor FINAL : public HBinaryOperation {
4740 public:
4741  HXor(Primitive::Type result_type,
4742       HInstruction* left,
4743       HInstruction* right,
4744       uint32_t dex_pc = kNoDexPc)
4745      : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4746
4747  bool IsCommutative() const OVERRIDE { return true; }
4748
4749  template <typename T> static T Compute(T x, T y) { return x ^ y; }
4750
4751  HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4752    return GetBlock()->GetGraph()->GetIntConstant(
4753        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4754  }
4755  HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4756    return GetBlock()->GetGraph()->GetLongConstant(
4757        Compute(x->GetValue(), y->GetValue()), GetDexPc());
4758  }
4759  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4760                      HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4761    LOG(FATAL) << DebugName() << " is not defined for float values";
4762    UNREACHABLE();
4763  }
4764  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4765                      HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4766    LOG(FATAL) << DebugName() << " is not defined for double values";
4767    UNREACHABLE();
4768  }
4769
4770  DECLARE_INSTRUCTION(Xor);
4771
4772 private:
4773  DISALLOW_COPY_AND_ASSIGN(HXor);
4774};
4775
4776class HRor FINAL : public HBinaryOperation {
4777 public:
4778  HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4779    : HBinaryOperation(result_type, value, distance) {
4780    DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4781    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4782  }
4783
4784  template <typename T>
4785  static T Compute(T value, int32_t distance, int32_t max_shift_value) {
4786    typedef typename std::make_unsigned<T>::type V;
4787    V ux = static_cast<V>(value);
4788    if ((distance & max_shift_value) == 0) {
4789      return static_cast<T>(ux);
4790    } else {
4791      const V reg_bits = sizeof(T) * 8;
4792      return static_cast<T>(ux >> (distance & max_shift_value)) |
4793                           (value << (reg_bits - (distance & max_shift_value)));
4794    }
4795  }
4796
4797  HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4798    return GetBlock()->GetGraph()->GetIntConstant(
4799        Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4800  }
4801  HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4802    return GetBlock()->GetGraph()->GetLongConstant(
4803        Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4804  }
4805  HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4806                      HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4807    LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4808    UNREACHABLE();
4809  }
4810  HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4811                      HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4812    LOG(FATAL) << DebugName() << " is not defined for float values";
4813    UNREACHABLE();
4814  }
4815  HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4816                      HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4817    LOG(FATAL) << DebugName() << " is not defined for double values";
4818    UNREACHABLE();
4819  }
4820
4821  DECLARE_INSTRUCTION(Ror);
4822
4823 private:
4824  DISALLOW_COPY_AND_ASSIGN(HRor);
4825};
4826
4827// The value of a parameter in this method. Its location depends on
4828// the calling convention.
4829class HParameterValue FINAL : public HExpression<0> {
4830 public:
4831  HParameterValue(const DexFile& dex_file,
4832                  uint16_t type_index,
4833                  uint8_t index,
4834                  Primitive::Type parameter_type,
4835                  bool is_this = false)
4836      : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
4837        dex_file_(dex_file),
4838        type_index_(type_index),
4839        index_(index) {
4840    SetPackedFlag<kFlagIsThis>(is_this);
4841    SetPackedFlag<kFlagCanBeNull>(!is_this);
4842  }
4843
4844  const DexFile& GetDexFile() const { return dex_file_; }
4845  uint16_t GetTypeIndex() const { return type_index_; }
4846  uint8_t GetIndex() const { return index_; }
4847  bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
4848
4849  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
4850  void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
4851
4852  DECLARE_INSTRUCTION(ParameterValue);
4853
4854 private:
4855  // Whether or not the parameter value corresponds to 'this' argument.
4856  static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
4857  static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
4858  static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
4859  static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
4860                "Too many packed fields.");
4861
4862  const DexFile& dex_file_;
4863  const uint16_t type_index_;
4864  // The index of this parameter in the parameters list. Must be less
4865  // than HGraph::number_of_in_vregs_.
4866  const uint8_t index_;
4867
4868  DISALLOW_COPY_AND_ASSIGN(HParameterValue);
4869};
4870
4871class HNot FINAL : public HUnaryOperation {
4872 public:
4873  HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4874      : HUnaryOperation(result_type, input, dex_pc) {}
4875
4876  bool CanBeMoved() const OVERRIDE { return true; }
4877  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4878    return true;
4879  }
4880
4881  template <typename T> static T Compute(T x) { return ~x; }
4882
4883  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4884    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4885  }
4886  HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4887    return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4888  }
4889  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4890    LOG(FATAL) << DebugName() << " is not defined for float values";
4891    UNREACHABLE();
4892  }
4893  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4894    LOG(FATAL) << DebugName() << " is not defined for double values";
4895    UNREACHABLE();
4896  }
4897
4898  DECLARE_INSTRUCTION(Not);
4899
4900 private:
4901  DISALLOW_COPY_AND_ASSIGN(HNot);
4902};
4903
4904class HBooleanNot FINAL : public HUnaryOperation {
4905 public:
4906  explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
4907      : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
4908
4909  bool CanBeMoved() const OVERRIDE { return true; }
4910  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4911    return true;
4912  }
4913
4914  template <typename T> static bool Compute(T x) {
4915    DCHECK(IsUint<1>(x)) << x;
4916    return !x;
4917  }
4918
4919  HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4920    return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4921  }
4922  HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4923    LOG(FATAL) << DebugName() << " is not defined for long values";
4924    UNREACHABLE();
4925  }
4926  HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4927    LOG(FATAL) << DebugName() << " is not defined for float values";
4928    UNREACHABLE();
4929  }
4930  HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4931    LOG(FATAL) << DebugName() << " is not defined for double values";
4932    UNREACHABLE();
4933  }
4934
4935  DECLARE_INSTRUCTION(BooleanNot);
4936
4937 private:
4938  DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
4939};
4940
4941class HTypeConversion FINAL : public HExpression<1> {
4942 public:
4943  // Instantiate a type conversion of `input` to `result_type`.
4944  HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
4945      : HExpression(result_type, SideEffects::None(), dex_pc) {
4946    SetRawInputAt(0, input);
4947    // Invariant: We should never generate a conversion to a Boolean value.
4948    DCHECK_NE(Primitive::kPrimBoolean, result_type);
4949  }
4950
4951  HInstruction* GetInput() const { return InputAt(0); }
4952  Primitive::Type GetInputType() const { return GetInput()->GetType(); }
4953  Primitive::Type GetResultType() const { return GetType(); }
4954
4955  bool CanBeMoved() const OVERRIDE { return true; }
4956  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4957    return true;
4958  }
4959
4960  // Try to statically evaluate the conversion and return a HConstant
4961  // containing the result.  If the input cannot be converted, return nullptr.
4962  HConstant* TryStaticEvaluation() const;
4963
4964  DECLARE_INSTRUCTION(TypeConversion);
4965
4966 private:
4967  DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
4968};
4969
4970static constexpr uint32_t kNoRegNumber = -1;
4971
4972class HNullCheck FINAL : public HExpression<1> {
4973 public:
4974  // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
4975  // constructor.
4976  HNullCheck(HInstruction* value, uint32_t dex_pc)
4977      : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4978    SetRawInputAt(0, value);
4979  }
4980
4981  bool CanBeMoved() const OVERRIDE { return true; }
4982  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4983    return true;
4984  }
4985
4986  bool NeedsEnvironment() const OVERRIDE { return true; }
4987
4988  bool CanThrow() const OVERRIDE { return true; }
4989
4990  bool CanBeNull() const OVERRIDE { return false; }
4991
4992
4993  DECLARE_INSTRUCTION(NullCheck);
4994
4995 private:
4996  DISALLOW_COPY_AND_ASSIGN(HNullCheck);
4997};
4998
4999class FieldInfo : public ValueObject {
5000 public:
5001  FieldInfo(MemberOffset field_offset,
5002            Primitive::Type field_type,
5003            bool is_volatile,
5004            uint32_t index,
5005            uint16_t declaring_class_def_index,
5006            const DexFile& dex_file,
5007            Handle<mirror::DexCache> dex_cache)
5008      : field_offset_(field_offset),
5009        field_type_(field_type),
5010        is_volatile_(is_volatile),
5011        index_(index),
5012        declaring_class_def_index_(declaring_class_def_index),
5013        dex_file_(dex_file),
5014        dex_cache_(dex_cache) {}
5015
5016  MemberOffset GetFieldOffset() const { return field_offset_; }
5017  Primitive::Type GetFieldType() const { return field_type_; }
5018  uint32_t GetFieldIndex() const { return index_; }
5019  uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
5020  const DexFile& GetDexFile() const { return dex_file_; }
5021  bool IsVolatile() const { return is_volatile_; }
5022  Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; }
5023
5024 private:
5025  const MemberOffset field_offset_;
5026  const Primitive::Type field_type_;
5027  const bool is_volatile_;
5028  const uint32_t index_;
5029  const uint16_t declaring_class_def_index_;
5030  const DexFile& dex_file_;
5031  const Handle<mirror::DexCache> dex_cache_;
5032};
5033
5034class HInstanceFieldGet FINAL : public HExpression<1> {
5035 public:
5036  HInstanceFieldGet(HInstruction* value,
5037                    Primitive::Type field_type,
5038                    MemberOffset field_offset,
5039                    bool is_volatile,
5040                    uint32_t field_idx,
5041                    uint16_t declaring_class_def_index,
5042                    const DexFile& dex_file,
5043                    Handle<mirror::DexCache> dex_cache,
5044                    uint32_t dex_pc)
5045      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5046        field_info_(field_offset,
5047                    field_type,
5048                    is_volatile,
5049                    field_idx,
5050                    declaring_class_def_index,
5051                    dex_file,
5052                    dex_cache) {
5053    SetRawInputAt(0, value);
5054  }
5055
5056  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5057
5058  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5059    const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5060    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5061  }
5062
5063  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5064    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5065  }
5066
5067  size_t ComputeHashCode() const OVERRIDE {
5068    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5069  }
5070
5071  const FieldInfo& GetFieldInfo() const { return field_info_; }
5072  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5073  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5074  bool IsVolatile() const { return field_info_.IsVolatile(); }
5075
5076  DECLARE_INSTRUCTION(InstanceFieldGet);
5077
5078 private:
5079  const FieldInfo field_info_;
5080
5081  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5082};
5083
5084class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5085 public:
5086  HInstanceFieldSet(HInstruction* object,
5087                    HInstruction* value,
5088                    Primitive::Type field_type,
5089                    MemberOffset field_offset,
5090                    bool is_volatile,
5091                    uint32_t field_idx,
5092                    uint16_t declaring_class_def_index,
5093                    const DexFile& dex_file,
5094                    Handle<mirror::DexCache> dex_cache,
5095                    uint32_t dex_pc)
5096      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5097        field_info_(field_offset,
5098                    field_type,
5099                    is_volatile,
5100                    field_idx,
5101                    declaring_class_def_index,
5102                    dex_file,
5103                    dex_cache) {
5104    SetPackedFlag<kFlagValueCanBeNull>(true);
5105    SetRawInputAt(0, object);
5106    SetRawInputAt(1, value);
5107  }
5108
5109  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5110    return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5111  }
5112
5113  const FieldInfo& GetFieldInfo() const { return field_info_; }
5114  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5115  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5116  bool IsVolatile() const { return field_info_.IsVolatile(); }
5117  HInstruction* GetValue() const { return InputAt(1); }
5118  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5119  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5120
5121  DECLARE_INSTRUCTION(InstanceFieldSet);
5122
5123 private:
5124  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5125  static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5126  static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5127                "Too many packed fields.");
5128
5129  const FieldInfo field_info_;
5130
5131  DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5132};
5133
5134class HArrayGet FINAL : public HExpression<2> {
5135 public:
5136  HArrayGet(HInstruction* array,
5137            HInstruction* index,
5138            Primitive::Type type,
5139            uint32_t dex_pc,
5140            bool is_string_char_at = false)
5141      : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5142    SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5143    SetRawInputAt(0, array);
5144    SetRawInputAt(1, index);
5145  }
5146
5147  bool CanBeMoved() const OVERRIDE { return true; }
5148  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5149    return true;
5150  }
5151  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5152    // TODO: We can be smarter here.
5153    // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5154    // which generates the implicit null check. There are cases when these can be removed
5155    // to produce better code. If we ever add optimizations to do so we should allow an
5156    // implicit check here (as long as the address falls in the first page).
5157    return false;
5158  }
5159
5160  bool IsEquivalentOf(HArrayGet* other) const {
5161    bool result = (GetDexPc() == other->GetDexPc());
5162    if (kIsDebugBuild && result) {
5163      DCHECK_EQ(GetBlock(), other->GetBlock());
5164      DCHECK_EQ(GetArray(), other->GetArray());
5165      DCHECK_EQ(GetIndex(), other->GetIndex());
5166      if (Primitive::IsIntOrLongType(GetType())) {
5167        DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5168      } else {
5169        DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5170        DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5171      }
5172    }
5173    return result;
5174  }
5175
5176  bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5177
5178  HInstruction* GetArray() const { return InputAt(0); }
5179  HInstruction* GetIndex() const { return InputAt(1); }
5180
5181  DECLARE_INSTRUCTION(ArrayGet);
5182
5183 private:
5184  // We treat a String as an array, creating the HArrayGet from String.charAt()
5185  // intrinsic in the instruction simplifier. We can always determine whether
5186  // a particular HArrayGet is actually a String.charAt() by looking at the type
5187  // of the input but that requires holding the mutator lock, so we prefer to use
5188  // a flag, so that code generators don't need to do the locking.
5189  static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5190  static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5191  static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5192                "Too many packed fields.");
5193
5194  DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5195};
5196
5197class HArraySet FINAL : public HTemplateInstruction<3> {
5198 public:
5199  HArraySet(HInstruction* array,
5200            HInstruction* index,
5201            HInstruction* value,
5202            Primitive::Type expected_component_type,
5203            uint32_t dex_pc)
5204      : HTemplateInstruction(SideEffects::None(), dex_pc) {
5205    SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5206    SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5207    SetPackedFlag<kFlagValueCanBeNull>(true);
5208    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5209    SetRawInputAt(0, array);
5210    SetRawInputAt(1, index);
5211    SetRawInputAt(2, value);
5212    // Make a best guess now, may be refined during SSA building.
5213    ComputeSideEffects();
5214  }
5215
5216  bool NeedsEnvironment() const OVERRIDE {
5217    // We call a runtime method to throw ArrayStoreException.
5218    return NeedsTypeCheck();
5219  }
5220
5221  // Can throw ArrayStoreException.
5222  bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5223
5224  bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5225    // TODO: Same as for ArrayGet.
5226    return false;
5227  }
5228
5229  void ClearNeedsTypeCheck() {
5230    SetPackedFlag<kFlagNeedsTypeCheck>(false);
5231  }
5232
5233  void ClearValueCanBeNull() {
5234    SetPackedFlag<kFlagValueCanBeNull>(false);
5235  }
5236
5237  void SetStaticTypeOfArrayIsObjectArray() {
5238    SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5239  }
5240
5241  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5242  bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5243  bool StaticTypeOfArrayIsObjectArray() const {
5244    return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5245  }
5246
5247  HInstruction* GetArray() const { return InputAt(0); }
5248  HInstruction* GetIndex() const { return InputAt(1); }
5249  HInstruction* GetValue() const { return InputAt(2); }
5250
5251  Primitive::Type GetComponentType() const {
5252    // The Dex format does not type floating point index operations. Since the
5253    // `expected_component_type_` is set during building and can therefore not
5254    // be correct, we also check what is the value type. If it is a floating
5255    // point type, we must use that type.
5256    Primitive::Type value_type = GetValue()->GetType();
5257    return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5258        ? value_type
5259        : GetRawExpectedComponentType();
5260  }
5261
5262  Primitive::Type GetRawExpectedComponentType() const {
5263    return GetPackedField<ExpectedComponentTypeField>();
5264  }
5265
5266  void ComputeSideEffects() {
5267    Primitive::Type type = GetComponentType();
5268    SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5269        SideEffectsForArchRuntimeCalls(type)));
5270  }
5271
5272  static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5273    return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5274  }
5275
5276  DECLARE_INSTRUCTION(ArraySet);
5277
5278 private:
5279  static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5280  static constexpr size_t kFieldExpectedComponentTypeSize =
5281      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5282  static constexpr size_t kFlagNeedsTypeCheck =
5283      kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5284  static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5285  // Cached information for the reference_type_info_ so that codegen
5286  // does not need to inspect the static type.
5287  static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5288  static constexpr size_t kNumberOfArraySetPackedBits =
5289      kFlagStaticTypeOfArrayIsObjectArray + 1;
5290  static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5291  using ExpectedComponentTypeField =
5292      BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5293
5294  DISALLOW_COPY_AND_ASSIGN(HArraySet);
5295};
5296
5297class HArrayLength FINAL : public HExpression<1> {
5298 public:
5299  HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
5300      : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5301    SetPackedFlag<kFlagIsStringLength>(is_string_length);
5302    // Note that arrays do not change length, so the instruction does not
5303    // depend on any write.
5304    SetRawInputAt(0, array);
5305  }
5306
5307  bool CanBeMoved() const OVERRIDE { return true; }
5308  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5309    return true;
5310  }
5311  bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5312    return obj == InputAt(0);
5313  }
5314
5315  bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5316
5317  DECLARE_INSTRUCTION(ArrayLength);
5318
5319 private:
5320  // We treat a String as an array, creating the HArrayLength from String.length()
5321  // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5322  // determine whether a particular HArrayLength is actually a String.length() by
5323  // looking at the type of the input but that requires holding the mutator lock, so
5324  // we prefer to use a flag, so that code generators don't need to do the locking.
5325  static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5326  static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5327  static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5328                "Too many packed fields.");
5329
5330  DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5331};
5332
5333class HBoundsCheck FINAL : public HExpression<2> {
5334 public:
5335  // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5336  // constructor.
5337  HBoundsCheck(HInstruction* index,
5338               HInstruction* length,
5339               uint32_t dex_pc,
5340               uint32_t string_char_at_method_index = DexFile::kDexNoIndex)
5341      : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc),
5342        string_char_at_method_index_(string_char_at_method_index) {
5343    DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5344    SetRawInputAt(0, index);
5345    SetRawInputAt(1, length);
5346  }
5347
5348  bool CanBeMoved() const OVERRIDE { return true; }
5349  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5350    return true;
5351  }
5352
5353  bool NeedsEnvironment() const OVERRIDE { return true; }
5354
5355  bool CanThrow() const OVERRIDE { return true; }
5356
5357  bool IsStringCharAt() const { return GetStringCharAtMethodIndex() != DexFile::kDexNoIndex; }
5358  uint32_t GetStringCharAtMethodIndex() const { return string_char_at_method_index_; }
5359
5360  HInstruction* GetIndex() const { return InputAt(0); }
5361
5362  DECLARE_INSTRUCTION(BoundsCheck);
5363
5364 private:
5365  // We treat a String as an array, creating the HBoundsCheck from String.charAt()
5366  // intrinsic in the instruction simplifier. We want to include the String.charAt()
5367  // in the stack trace if we actually throw the StringIndexOutOfBoundsException,
5368  // so we need to create an HEnvironment which will be translated to an InlineInfo
5369  // indicating the extra stack frame. Since we add this HEnvironment quite late,
5370  // in the PrepareForRegisterAllocation pass, we need to remember the method index
5371  // from the invoke as we don't want to look again at the dex bytecode.
5372  uint32_t string_char_at_method_index_;  // DexFile::kDexNoIndex if regular array.
5373
5374  DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5375};
5376
5377class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5378 public:
5379  explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5380      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5381
5382  bool NeedsEnvironment() const OVERRIDE {
5383    return true;
5384  }
5385
5386  void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5387  SlowPathCode* GetSlowPath() const { return slow_path_; }
5388
5389  DECLARE_INSTRUCTION(SuspendCheck);
5390
5391 private:
5392  // Only used for code generation, in order to share the same slow path between back edges
5393  // of a same loop.
5394  SlowPathCode* slow_path_;
5395
5396  DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5397};
5398
5399// Pseudo-instruction which provides the native debugger with mapping information.
5400// It ensures that we can generate line number and local variables at this point.
5401class HNativeDebugInfo : public HTemplateInstruction<0> {
5402 public:
5403  explicit HNativeDebugInfo(uint32_t dex_pc)
5404      : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5405
5406  bool NeedsEnvironment() const OVERRIDE {
5407    return true;
5408  }
5409
5410  DECLARE_INSTRUCTION(NativeDebugInfo);
5411
5412 private:
5413  DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5414};
5415
5416/**
5417 * Instruction to load a Class object.
5418 */
5419class HLoadClass FINAL : public HInstruction {
5420 public:
5421  // Determines how to load the Class.
5422  enum class LoadKind {
5423    // Use the Class* from the method's own ArtMethod*.
5424    kReferrersClass,
5425
5426    // Use boot image Class* address that will be known at link time.
5427    // Used for boot image classes referenced by boot image code in non-PIC mode.
5428    kBootImageLinkTimeAddress,
5429
5430    // Use PC-relative boot image Class* address that will be known at link time.
5431    // Used for boot image classes referenced by boot image code in PIC mode.
5432    kBootImageLinkTimePcRelative,
5433
5434    // Use a known boot image Class* address, embedded in the code by the codegen.
5435    // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
5436    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5437    // GetIncludePatchInformation().
5438    kBootImageAddress,
5439
5440    // Load from the resolved types array at an absolute address.
5441    // Used for classes outside the boot image referenced by JIT-compiled code.
5442    kDexCacheAddress,
5443
5444    // Load from resolved types array in the dex cache using a PC-relative load.
5445    // Used for classes outside boot image when we know that we can access
5446    // the dex cache arrays using a PC-relative load.
5447    kDexCachePcRelative,
5448
5449    // Load from resolved types array accessed through the class loaded from
5450    // the compiled method's own ArtMethod*. This is the default access type when
5451    // all other types are unavailable.
5452    kDexCacheViaMethod,
5453
5454    kLast = kDexCacheViaMethod
5455  };
5456
5457  HLoadClass(HCurrentMethod* current_method,
5458             uint16_t type_index,
5459             const DexFile& dex_file,
5460             bool is_referrers_class,
5461             uint32_t dex_pc,
5462             bool needs_access_check,
5463             bool is_in_dex_cache,
5464             bool is_in_boot_image)
5465      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5466        special_input_(HUserRecord<HInstruction*>(current_method)),
5467        type_index_(type_index),
5468        dex_file_(dex_file),
5469        loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5470    // Referrers class should not need access check. We never inline unverified
5471    // methods so we can't possibly end up in this situation.
5472    DCHECK(!is_referrers_class || !needs_access_check);
5473
5474    SetPackedField<LoadKindField>(
5475        is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
5476    SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5477    SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache);
5478    SetPackedFlag<kFlagIsInBootImage>(is_in_boot_image);
5479    SetPackedFlag<kFlagGenerateClInitCheck>(false);
5480  }
5481
5482  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5483    DCHECK(HasAddress(load_kind));
5484    load_data_.address = address;
5485    SetLoadKindInternal(load_kind);
5486  }
5487
5488  void SetLoadKindWithTypeReference(LoadKind load_kind,
5489                                    const DexFile& dex_file,
5490                                    uint32_t type_index) {
5491    DCHECK(HasTypeReference(load_kind));
5492    DCHECK(IsSameDexFile(dex_file_, dex_file));
5493    DCHECK_EQ(type_index_, type_index);
5494    SetLoadKindInternal(load_kind);
5495  }
5496
5497  void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5498                                        const DexFile& dex_file,
5499                                        uint32_t element_index) {
5500    DCHECK(HasDexCacheReference(load_kind));
5501    DCHECK(IsSameDexFile(dex_file_, dex_file));
5502    load_data_.dex_cache_element_index = element_index;
5503    SetLoadKindInternal(load_kind);
5504  }
5505
5506  LoadKind GetLoadKind() const {
5507    return GetPackedField<LoadKindField>();
5508  }
5509
5510  bool CanBeMoved() const OVERRIDE { return true; }
5511
5512  bool InstructionDataEquals(const HInstruction* other) const;
5513
5514  size_t ComputeHashCode() const OVERRIDE { return type_index_; }
5515
5516  bool CanBeNull() const OVERRIDE { return false; }
5517
5518  bool NeedsEnvironment() const OVERRIDE {
5519    return CanCallRuntime();
5520  }
5521
5522  void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5523    // The entrypoint the code generator is going to call does not do
5524    // clinit of the class.
5525    DCHECK(!NeedsAccessCheck());
5526    SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5527  }
5528
5529  bool CanCallRuntime() const {
5530    return MustGenerateClinitCheck() ||
5531           (!IsReferrersClass() && !IsInDexCache()) ||
5532           NeedsAccessCheck();
5533  }
5534
5535
5536  bool CanThrow() const OVERRIDE {
5537    return CanCallRuntime();
5538  }
5539
5540  ReferenceTypeInfo GetLoadedClassRTI() {
5541    return loaded_class_rti_;
5542  }
5543
5544  void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5545    // Make sure we only set exact types (the loaded class should never be merged).
5546    DCHECK(rti.IsExact());
5547    loaded_class_rti_ = rti;
5548  }
5549
5550  uint32_t GetTypeIndex() const { return type_index_; }
5551  const DexFile& GetDexFile() const { return dex_file_; }
5552
5553  uint32_t GetDexCacheElementOffset() const;
5554
5555  uint64_t GetAddress() const {
5556    DCHECK(HasAddress(GetLoadKind()));
5557    return load_data_.address;
5558  }
5559
5560  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !IsReferrersClass(); }
5561
5562  static SideEffects SideEffectsForArchRuntimeCalls() {
5563    return SideEffects::CanTriggerGC();
5564  }
5565
5566  bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
5567  bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5568  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5569  bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
5570  bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5571
5572  void MarkInDexCache() {
5573    SetPackedFlag<kFlagIsInDexCache>(true);
5574    DCHECK(!NeedsEnvironment());
5575    RemoveEnvironment();
5576    SetSideEffects(SideEffects::None());
5577  }
5578
5579  void MarkInBootImage() {
5580    SetPackedFlag<kFlagIsInBootImage>(true);
5581  }
5582
5583  void AddSpecialInput(HInstruction* special_input);
5584
5585  using HInstruction::GetInputRecords;  // Keep the const version visible.
5586  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5587    return ArrayRef<HUserRecord<HInstruction*>>(
5588        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5589  }
5590
5591  Primitive::Type GetType() const OVERRIDE {
5592    return Primitive::kPrimNot;
5593  }
5594
5595  DECLARE_INSTRUCTION(LoadClass);
5596
5597 private:
5598  static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
5599  static constexpr size_t kFlagIsInDexCache        = kFlagNeedsAccessCheck + 1;
5600  static constexpr size_t kFlagIsInBootImage       = kFlagIsInDexCache + 1;
5601  // Whether this instruction must generate the initialization check.
5602  // Used for code generation.
5603  static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
5604  static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
5605  static constexpr size_t kFieldLoadKindSize =
5606      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5607  static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5608  static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5609  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5610
5611  static bool HasTypeReference(LoadKind load_kind) {
5612    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5613        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5614        load_kind == LoadKind::kDexCacheViaMethod ||
5615        load_kind == LoadKind::kReferrersClass;
5616  }
5617
5618  static bool HasAddress(LoadKind load_kind) {
5619    return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress;
5620  }
5621
5622  static bool HasDexCacheReference(LoadKind load_kind) {
5623    return load_kind == LoadKind::kDexCachePcRelative;
5624  }
5625
5626  void SetLoadKindInternal(LoadKind load_kind);
5627
5628  // The special input is the HCurrentMethod for kDexCacheViaMethod or kReferrersClass.
5629  // For other load kinds it's empty or possibly some architecture-specific instruction
5630  // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
5631  HUserRecord<HInstruction*> special_input_;
5632
5633  const uint16_t type_index_;
5634  const DexFile& dex_file_;
5635
5636  union {
5637    uint32_t dex_cache_element_index;   // Only for dex cache reference.
5638    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5639  } load_data_;
5640
5641  ReferenceTypeInfo loaded_class_rti_;
5642
5643  DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5644};
5645std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
5646
5647// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5648inline uint32_t HLoadClass::GetDexCacheElementOffset() const {
5649  DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5650  return load_data_.dex_cache_element_index;
5651}
5652
5653// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
5654inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
5655  // The special input is used for PC-relative loads on some architectures,
5656  // including literal pool loads, which are PC-relative too.
5657  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5658         GetLoadKind() == LoadKind::kDexCachePcRelative ||
5659         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5660         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5661  DCHECK(special_input_.GetInstruction() == nullptr);
5662  special_input_ = HUserRecord<HInstruction*>(special_input);
5663  special_input->AddUseAt(this, 0);
5664}
5665
5666class HLoadString FINAL : public HInstruction {
5667 public:
5668  // Determines how to load the String.
5669  enum class LoadKind {
5670    // Use boot image String* address that will be known at link time.
5671    // Used for boot image strings referenced by boot image code in non-PIC mode.
5672    kBootImageLinkTimeAddress,
5673
5674    // Use PC-relative boot image String* address that will be known at link time.
5675    // Used for boot image strings referenced by boot image code in PIC mode.
5676    kBootImageLinkTimePcRelative,
5677
5678    // Use a known boot image String* address, embedded in the code by the codegen.
5679    // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5680    // Note: codegen needs to emit a linker patch if indicated by compiler options'
5681    // GetIncludePatchInformation().
5682    kBootImageAddress,
5683
5684    // Load from an entry in the .bss section using a PC-relative load.
5685    // Used for strings outside boot image when .bss is accessible with a PC-relative load.
5686    kBssEntry,
5687
5688    // Load from resolved strings array accessed through the class loaded from
5689    // the compiled method's own ArtMethod*. This is the default access type when
5690    // all other types are unavailable.
5691    kDexCacheViaMethod,
5692
5693    // Load from the root table associated with the JIT compiled method.
5694    kJitTableAddress,
5695
5696    kLast = kJitTableAddress,
5697  };
5698
5699  HLoadString(HCurrentMethod* current_method,
5700              uint32_t string_index,
5701              const DexFile& dex_file,
5702              uint32_t dex_pc)
5703      : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
5704        special_input_(HUserRecord<HInstruction*>(current_method)),
5705        string_index_(string_index) {
5706    SetPackedFlag<kFlagIsInDexCache>(false);
5707    SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5708    load_data_.dex_file_ = &dex_file;
5709  }
5710
5711  void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5712    DCHECK(HasAddress(load_kind));
5713    load_data_.address = address;
5714    SetLoadKindInternal(load_kind);
5715  }
5716
5717  void SetLoadKindWithStringReference(LoadKind load_kind,
5718                                      const DexFile& dex_file,
5719                                      uint32_t string_index) {
5720    DCHECK(HasStringReference(load_kind));
5721    load_data_.dex_file_ = &dex_file;
5722    string_index_ = string_index;
5723    SetLoadKindInternal(load_kind);
5724  }
5725
5726  LoadKind GetLoadKind() const {
5727    return GetPackedField<LoadKindField>();
5728  }
5729
5730  const DexFile& GetDexFile() const;
5731
5732  uint32_t GetStringIndex() const {
5733    DCHECK(HasStringReference(GetLoadKind()) || /* For slow paths. */ !IsInDexCache());
5734    return string_index_;
5735  }
5736
5737  uint64_t GetAddress() const {
5738    DCHECK(HasAddress(GetLoadKind()));
5739    return load_data_.address;
5740  }
5741
5742  bool CanBeMoved() const OVERRIDE { return true; }
5743
5744  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
5745
5746  size_t ComputeHashCode() const OVERRIDE { return string_index_; }
5747
5748  // Will call the runtime if we need to load the string through
5749  // the dex cache and the string is not guaranteed to be there yet.
5750  bool NeedsEnvironment() const OVERRIDE {
5751    LoadKind load_kind = GetLoadKind();
5752    if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5753        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5754        load_kind == LoadKind::kBootImageAddress ||
5755        load_kind == LoadKind::kJitTableAddress) {
5756      return false;
5757    }
5758    return !IsInDexCache();
5759  }
5760
5761  bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5762    return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5763  }
5764
5765  bool CanBeNull() const OVERRIDE { return false; }
5766  bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5767
5768  static SideEffects SideEffectsForArchRuntimeCalls() {
5769    return SideEffects::CanTriggerGC();
5770  }
5771
5772  bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5773
5774  void MarkInDexCache() {
5775    SetPackedFlag<kFlagIsInDexCache>(true);
5776    DCHECK(!NeedsEnvironment());
5777    RemoveEnvironment();
5778    SetSideEffects(SideEffects::None());
5779  }
5780
5781  void AddSpecialInput(HInstruction* special_input);
5782
5783  using HInstruction::GetInputRecords;  // Keep the const version visible.
5784  ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
5785    return ArrayRef<HUserRecord<HInstruction*>>(
5786        &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
5787  }
5788
5789  Primitive::Type GetType() const OVERRIDE {
5790    return Primitive::kPrimNot;
5791  }
5792
5793  DECLARE_INSTRUCTION(LoadString);
5794
5795 private:
5796  static constexpr size_t kFlagIsInDexCache = kNumberOfGenericPackedBits;
5797  static constexpr size_t kFieldLoadKind = kFlagIsInDexCache + 1;
5798  static constexpr size_t kFieldLoadKindSize =
5799      MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5800  static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5801  static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5802  using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5803
5804  static bool HasStringReference(LoadKind load_kind) {
5805    return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5806        load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5807        load_kind == LoadKind::kBssEntry ||
5808        load_kind == LoadKind::kDexCacheViaMethod ||
5809        load_kind == LoadKind::kJitTableAddress;
5810  }
5811
5812  static bool HasAddress(LoadKind load_kind) {
5813    return load_kind == LoadKind::kBootImageAddress;
5814  }
5815
5816  void SetLoadKindInternal(LoadKind load_kind);
5817
5818  // The special input is the HCurrentMethod for kDexCacheViaMethod.
5819  // For other load kinds it's empty or possibly some architecture-specific instruction
5820  // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
5821  HUserRecord<HInstruction*> special_input_;
5822
5823  // String index serves also as the hash code and it's also needed for slow-paths,
5824  // so it must not be overwritten with other load data.
5825  uint32_t string_index_;
5826
5827  union {
5828    const DexFile* dex_file_;            // For string reference.
5829    uint64_t address;  // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5830  } load_data_;
5831
5832  DISALLOW_COPY_AND_ASSIGN(HLoadString);
5833};
5834std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5835
5836// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5837inline const DexFile& HLoadString::GetDexFile() const {
5838  DCHECK(HasStringReference(GetLoadKind())) << GetLoadKind();
5839  return *load_data_.dex_file_;
5840}
5841
5842// Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5843inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5844  // The special input is used for PC-relative loads on some architectures,
5845  // including literal pool loads, which are PC-relative too.
5846  DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5847         GetLoadKind() == LoadKind::kBssEntry ||
5848         GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
5849         GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
5850  // HLoadString::GetInputRecords() returns an empty array at this point,
5851  // so use the GetInputRecords() from the base class to set the input record.
5852  DCHECK(special_input_.GetInstruction() == nullptr);
5853  special_input_ = HUserRecord<HInstruction*>(special_input);
5854  special_input->AddUseAt(this, 0);
5855}
5856
5857/**
5858 * Performs an initialization check on its Class object input.
5859 */
5860class HClinitCheck FINAL : public HExpression<1> {
5861 public:
5862  HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5863      : HExpression(
5864            Primitive::kPrimNot,
5865            SideEffects::AllChanges(),  // Assume write/read on all fields/arrays.
5866            dex_pc) {
5867    SetRawInputAt(0, constant);
5868  }
5869
5870  bool CanBeMoved() const OVERRIDE { return true; }
5871  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5872    return true;
5873  }
5874
5875  bool NeedsEnvironment() const OVERRIDE {
5876    // May call runtime to initialize the class.
5877    return true;
5878  }
5879
5880  bool CanThrow() const OVERRIDE { return true; }
5881
5882  HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); }
5883
5884  DECLARE_INSTRUCTION(ClinitCheck);
5885
5886 private:
5887  DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
5888};
5889
5890class HStaticFieldGet FINAL : public HExpression<1> {
5891 public:
5892  HStaticFieldGet(HInstruction* cls,
5893                  Primitive::Type field_type,
5894                  MemberOffset field_offset,
5895                  bool is_volatile,
5896                  uint32_t field_idx,
5897                  uint16_t declaring_class_def_index,
5898                  const DexFile& dex_file,
5899                  Handle<mirror::DexCache> dex_cache,
5900                  uint32_t dex_pc)
5901      : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc),
5902        field_info_(field_offset,
5903                    field_type,
5904                    is_volatile,
5905                    field_idx,
5906                    declaring_class_def_index,
5907                    dex_file,
5908                    dex_cache) {
5909    SetRawInputAt(0, cls);
5910  }
5911
5912
5913  bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5914
5915  bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5916    const HStaticFieldGet* other_get = other->AsStaticFieldGet();
5917    return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5918  }
5919
5920  size_t ComputeHashCode() const OVERRIDE {
5921    return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5922  }
5923
5924  const FieldInfo& GetFieldInfo() const { return field_info_; }
5925  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5926  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5927  bool IsVolatile() const { return field_info_.IsVolatile(); }
5928
5929  DECLARE_INSTRUCTION(StaticFieldGet);
5930
5931 private:
5932  const FieldInfo field_info_;
5933
5934  DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
5935};
5936
5937class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
5938 public:
5939  HStaticFieldSet(HInstruction* cls,
5940                  HInstruction* value,
5941                  Primitive::Type field_type,
5942                  MemberOffset field_offset,
5943                  bool is_volatile,
5944                  uint32_t field_idx,
5945                  uint16_t declaring_class_def_index,
5946                  const DexFile& dex_file,
5947                  Handle<mirror::DexCache> dex_cache,
5948                  uint32_t dex_pc)
5949      : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc),
5950        field_info_(field_offset,
5951                    field_type,
5952                    is_volatile,
5953                    field_idx,
5954                    declaring_class_def_index,
5955                    dex_file,
5956                    dex_cache) {
5957    SetPackedFlag<kFlagValueCanBeNull>(true);
5958    SetRawInputAt(0, cls);
5959    SetRawInputAt(1, value);
5960  }
5961
5962  const FieldInfo& GetFieldInfo() const { return field_info_; }
5963  MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5964  Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5965  bool IsVolatile() const { return field_info_.IsVolatile(); }
5966
5967  HInstruction* GetValue() const { return InputAt(1); }
5968  bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5969  void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5970
5971  DECLARE_INSTRUCTION(StaticFieldSet);
5972
5973 private:
5974  static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5975  static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
5976  static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
5977                "Too many packed fields.");
5978
5979  const FieldInfo field_info_;
5980
5981  DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
5982};
5983
5984class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
5985 public:
5986  HUnresolvedInstanceFieldGet(HInstruction* obj,
5987                              Primitive::Type field_type,
5988                              uint32_t field_index,
5989                              uint32_t dex_pc)
5990      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5991        field_index_(field_index) {
5992    SetRawInputAt(0, obj);
5993  }
5994
5995  bool NeedsEnvironment() const OVERRIDE { return true; }
5996  bool CanThrow() const OVERRIDE { return true; }
5997
5998  Primitive::Type GetFieldType() const { return GetType(); }
5999  uint32_t GetFieldIndex() const { return field_index_; }
6000
6001  DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6002
6003 private:
6004  const uint32_t field_index_;
6005
6006  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
6007};
6008
6009class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6010 public:
6011  HUnresolvedInstanceFieldSet(HInstruction* obj,
6012                              HInstruction* value,
6013                              Primitive::Type field_type,
6014                              uint32_t field_index,
6015                              uint32_t dex_pc)
6016      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6017        field_index_(field_index) {
6018    SetPackedField<FieldTypeField>(field_type);
6019    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6020    SetRawInputAt(0, obj);
6021    SetRawInputAt(1, value);
6022  }
6023
6024  bool NeedsEnvironment() const OVERRIDE { return true; }
6025  bool CanThrow() const OVERRIDE { return true; }
6026
6027  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6028  uint32_t GetFieldIndex() const { return field_index_; }
6029
6030  DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6031
6032 private:
6033  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6034  static constexpr size_t kFieldFieldTypeSize =
6035      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6036  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6037      kFieldFieldType + kFieldFieldTypeSize;
6038  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6039                "Too many packed fields.");
6040  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6041
6042  const uint32_t field_index_;
6043
6044  DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
6045};
6046
6047class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6048 public:
6049  HUnresolvedStaticFieldGet(Primitive::Type field_type,
6050                            uint32_t field_index,
6051                            uint32_t dex_pc)
6052      : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
6053        field_index_(field_index) {
6054  }
6055
6056  bool NeedsEnvironment() const OVERRIDE { return true; }
6057  bool CanThrow() const OVERRIDE { return true; }
6058
6059  Primitive::Type GetFieldType() const { return GetType(); }
6060  uint32_t GetFieldIndex() const { return field_index_; }
6061
6062  DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6063
6064 private:
6065  const uint32_t field_index_;
6066
6067  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
6068};
6069
6070class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6071 public:
6072  HUnresolvedStaticFieldSet(HInstruction* value,
6073                            Primitive::Type field_type,
6074                            uint32_t field_index,
6075                            uint32_t dex_pc)
6076      : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
6077        field_index_(field_index) {
6078    SetPackedField<FieldTypeField>(field_type);
6079    DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
6080    SetRawInputAt(0, value);
6081  }
6082
6083  bool NeedsEnvironment() const OVERRIDE { return true; }
6084  bool CanThrow() const OVERRIDE { return true; }
6085
6086  Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
6087  uint32_t GetFieldIndex() const { return field_index_; }
6088
6089  DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6090
6091 private:
6092  static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6093  static constexpr size_t kFieldFieldTypeSize =
6094      MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
6095  static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6096      kFieldFieldType + kFieldFieldTypeSize;
6097  static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6098                "Too many packed fields.");
6099  using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
6100
6101  const uint32_t field_index_;
6102
6103  DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
6104};
6105
6106// Implement the move-exception DEX instruction.
6107class HLoadException FINAL : public HExpression<0> {
6108 public:
6109  explicit HLoadException(uint32_t dex_pc = kNoDexPc)
6110      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
6111
6112  bool CanBeNull() const OVERRIDE { return false; }
6113
6114  DECLARE_INSTRUCTION(LoadException);
6115
6116 private:
6117  DISALLOW_COPY_AND_ASSIGN(HLoadException);
6118};
6119
6120// Implicit part of move-exception which clears thread-local exception storage.
6121// Must not be removed because the runtime expects the TLS to get cleared.
6122class HClearException FINAL : public HTemplateInstruction<0> {
6123 public:
6124  explicit HClearException(uint32_t dex_pc = kNoDexPc)
6125      : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
6126
6127  DECLARE_INSTRUCTION(ClearException);
6128
6129 private:
6130  DISALLOW_COPY_AND_ASSIGN(HClearException);
6131};
6132
6133class HThrow FINAL : public HTemplateInstruction<1> {
6134 public:
6135  HThrow(HInstruction* exception, uint32_t dex_pc)
6136      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6137    SetRawInputAt(0, exception);
6138  }
6139
6140  bool IsControlFlow() const OVERRIDE { return true; }
6141
6142  bool NeedsEnvironment() const OVERRIDE { return true; }
6143
6144  bool CanThrow() const OVERRIDE { return true; }
6145
6146
6147  DECLARE_INSTRUCTION(Throw);
6148
6149 private:
6150  DISALLOW_COPY_AND_ASSIGN(HThrow);
6151};
6152
6153/**
6154 * Implementation strategies for the code generator of a HInstanceOf
6155 * or `HCheckCast`.
6156 */
6157enum class TypeCheckKind {
6158  kUnresolvedCheck,       // Check against an unresolved type.
6159  kExactCheck,            // Can do a single class compare.
6160  kClassHierarchyCheck,   // Can just walk the super class chain.
6161  kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6162  kInterfaceCheck,        // No optimization yet when checking against an interface.
6163  kArrayObjectCheck,      // Can just check if the array is not primitive.
6164  kArrayCheck,            // No optimization yet when checking against a generic array.
6165  kLast = kArrayCheck
6166};
6167
6168std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6169
6170class HInstanceOf FINAL : public HExpression<2> {
6171 public:
6172  HInstanceOf(HInstruction* object,
6173              HLoadClass* constant,
6174              TypeCheckKind check_kind,
6175              uint32_t dex_pc)
6176      : HExpression(Primitive::kPrimBoolean,
6177                    SideEffectsForArchRuntimeCalls(check_kind),
6178                    dex_pc) {
6179    SetPackedField<TypeCheckKindField>(check_kind);
6180    SetPackedFlag<kFlagMustDoNullCheck>(true);
6181    SetRawInputAt(0, object);
6182    SetRawInputAt(1, constant);
6183  }
6184
6185  bool CanBeMoved() const OVERRIDE { return true; }
6186
6187  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6188    return true;
6189  }
6190
6191  bool NeedsEnvironment() const OVERRIDE {
6192    return CanCallRuntime(GetTypeCheckKind());
6193  }
6194
6195  // Used only in code generation.
6196  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6197  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6198  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6199  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6200
6201  static bool CanCallRuntime(TypeCheckKind check_kind) {
6202    // Mips currently does runtime calls for any other checks.
6203    return check_kind != TypeCheckKind::kExactCheck;
6204  }
6205
6206  static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6207    return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6208  }
6209
6210  DECLARE_INSTRUCTION(InstanceOf);
6211
6212 private:
6213  static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6214  static constexpr size_t kFieldTypeCheckKindSize =
6215      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6216  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6217  static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6218  static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6219  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6220
6221  DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6222};
6223
6224class HBoundType FINAL : public HExpression<1> {
6225 public:
6226  explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6227      : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6228        upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6229    SetPackedFlag<kFlagUpperCanBeNull>(true);
6230    SetPackedFlag<kFlagCanBeNull>(true);
6231    DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6232    SetRawInputAt(0, input);
6233  }
6234
6235  // {Get,Set}Upper* should only be used in reference type propagation.
6236  const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6237  bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6238  void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6239
6240  void SetCanBeNull(bool can_be_null) {
6241    DCHECK(GetUpperCanBeNull() || !can_be_null);
6242    SetPackedFlag<kFlagCanBeNull>(can_be_null);
6243  }
6244
6245  bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6246
6247  DECLARE_INSTRUCTION(BoundType);
6248
6249 private:
6250  // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6251  // is false then CanBeNull() cannot be true).
6252  static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6253  static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6254  static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6255  static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6256
6257  // Encodes the most upper class that this instruction can have. In other words
6258  // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6259  // It is used to bound the type in cases like:
6260  //   if (x instanceof ClassX) {
6261  //     // uper_bound_ will be ClassX
6262  //   }
6263  ReferenceTypeInfo upper_bound_;
6264
6265  DISALLOW_COPY_AND_ASSIGN(HBoundType);
6266};
6267
6268class HCheckCast FINAL : public HTemplateInstruction<2> {
6269 public:
6270  HCheckCast(HInstruction* object,
6271             HLoadClass* constant,
6272             TypeCheckKind check_kind,
6273             uint32_t dex_pc)
6274      : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6275    SetPackedField<TypeCheckKindField>(check_kind);
6276    SetPackedFlag<kFlagMustDoNullCheck>(true);
6277    SetRawInputAt(0, object);
6278    SetRawInputAt(1, constant);
6279  }
6280
6281  bool CanBeMoved() const OVERRIDE { return true; }
6282
6283  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6284    return true;
6285  }
6286
6287  bool NeedsEnvironment() const OVERRIDE {
6288    // Instruction may throw a CheckCastError.
6289    return true;
6290  }
6291
6292  bool CanThrow() const OVERRIDE { return true; }
6293
6294  bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6295  void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6296  TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6297  bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6298
6299  DECLARE_INSTRUCTION(CheckCast);
6300
6301 private:
6302  static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6303  static constexpr size_t kFieldTypeCheckKindSize =
6304      MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6305  static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6306  static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6307  static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6308  using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6309
6310  DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6311};
6312
6313/**
6314 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6315 * @details We define the combined barrier types that are actually required
6316 * by the Java Memory Model, rather than using exactly the terminology from
6317 * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6318 * primitives.  Note that the JSR-133 cookbook generally does not deal with
6319 * store atomicity issues, and the recipes there are not always entirely sufficient.
6320 * The current recipe is as follows:
6321 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6322 * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6323 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6324 * -# Use StoreStore barrier after all stores but before return from any constructor whose
6325 *    class has final fields.
6326 * -# Use NTStoreStore to order non-temporal stores with respect to all later
6327 *    store-to-memory instructions.  Only generated together with non-temporal stores.
6328 */
6329enum MemBarrierKind {
6330  kAnyStore,
6331  kLoadAny,
6332  kStoreStore,
6333  kAnyAny,
6334  kNTStoreStore,
6335  kLastBarrierKind = kNTStoreStore
6336};
6337std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6338
6339class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6340 public:
6341  explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6342      : HTemplateInstruction(
6343            SideEffects::AllWritesAndReads(), dex_pc) {  // Assume write/read on all fields/arrays.
6344    SetPackedField<BarrierKindField>(barrier_kind);
6345  }
6346
6347  MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6348
6349  DECLARE_INSTRUCTION(MemoryBarrier);
6350
6351 private:
6352  static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6353  static constexpr size_t kFieldBarrierKindSize =
6354      MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6355  static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6356      kFieldBarrierKind + kFieldBarrierKindSize;
6357  static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6358                "Too many packed fields.");
6359  using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6360
6361  DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6362};
6363
6364class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6365 public:
6366  enum class OperationKind {
6367    kEnter,
6368    kExit,
6369    kLast = kExit
6370  };
6371
6372  HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6373    : HTemplateInstruction(
6374          SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6375          dex_pc) {
6376    SetPackedField<OperationKindField>(kind);
6377    SetRawInputAt(0, object);
6378  }
6379
6380  // Instruction may go into runtime, so we need an environment.
6381  bool NeedsEnvironment() const OVERRIDE { return true; }
6382
6383  bool CanThrow() const OVERRIDE {
6384    // Verifier guarantees that monitor-exit cannot throw.
6385    // This is important because it allows the HGraphBuilder to remove
6386    // a dead throw-catch loop generated for `synchronized` blocks/methods.
6387    return IsEnter();
6388  }
6389
6390  OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6391  bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6392
6393  DECLARE_INSTRUCTION(MonitorOperation);
6394
6395 private:
6396  static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6397  static constexpr size_t kFieldOperationKindSize =
6398      MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6399  static constexpr size_t kNumberOfMonitorOperationPackedBits =
6400      kFieldOperationKind + kFieldOperationKindSize;
6401  static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6402                "Too many packed fields.");
6403  using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6404
6405 private:
6406  DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6407};
6408
6409class HSelect FINAL : public HExpression<3> {
6410 public:
6411  HSelect(HInstruction* condition,
6412          HInstruction* true_value,
6413          HInstruction* false_value,
6414          uint32_t dex_pc)
6415      : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6416    DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6417
6418    // First input must be `true_value` or `false_value` to allow codegens to
6419    // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6420    // that architectures which implement HSelect as a conditional move also
6421    // will not need to invert the condition.
6422    SetRawInputAt(0, false_value);
6423    SetRawInputAt(1, true_value);
6424    SetRawInputAt(2, condition);
6425  }
6426
6427  HInstruction* GetFalseValue() const { return InputAt(0); }
6428  HInstruction* GetTrueValue() const { return InputAt(1); }
6429  HInstruction* GetCondition() const { return InputAt(2); }
6430
6431  bool CanBeMoved() const OVERRIDE { return true; }
6432  bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6433    return true;
6434  }
6435
6436  bool CanBeNull() const OVERRIDE {
6437    return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6438  }
6439
6440  DECLARE_INSTRUCTION(Select);
6441
6442 private:
6443  DISALLOW_COPY_AND_ASSIGN(HSelect);
6444};
6445
6446class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6447 public:
6448  MoveOperands(Location source,
6449               Location destination,
6450               Primitive::Type type,
6451               HInstruction* instruction)
6452      : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6453
6454  Location GetSource() const { return source_; }
6455  Location GetDestination() const { return destination_; }
6456
6457  void SetSource(Location value) { source_ = value; }
6458  void SetDestination(Location value) { destination_ = value; }
6459
6460  // The parallel move resolver marks moves as "in-progress" by clearing the
6461  // destination (but not the source).
6462  Location MarkPending() {
6463    DCHECK(!IsPending());
6464    Location dest = destination_;
6465    destination_ = Location::NoLocation();
6466    return dest;
6467  }
6468
6469  void ClearPending(Location dest) {
6470    DCHECK(IsPending());
6471    destination_ = dest;
6472  }
6473
6474  bool IsPending() const {
6475    DCHECK(source_.IsValid() || destination_.IsInvalid());
6476    return destination_.IsInvalid() && source_.IsValid();
6477  }
6478
6479  // True if this blocks a move from the given location.
6480  bool Blocks(Location loc) const {
6481    return !IsEliminated() && source_.OverlapsWith(loc);
6482  }
6483
6484  // A move is redundant if it's been eliminated, if its source and
6485  // destination are the same, or if its destination is unneeded.
6486  bool IsRedundant() const {
6487    return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6488  }
6489
6490  // We clear both operands to indicate move that's been eliminated.
6491  void Eliminate() {
6492    source_ = destination_ = Location::NoLocation();
6493  }
6494
6495  bool IsEliminated() const {
6496    DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6497    return source_.IsInvalid();
6498  }
6499
6500  Primitive::Type GetType() const { return type_; }
6501
6502  bool Is64BitMove() const {
6503    return Primitive::Is64BitType(type_);
6504  }
6505
6506  HInstruction* GetInstruction() const { return instruction_; }
6507
6508 private:
6509  Location source_;
6510  Location destination_;
6511  // The type this move is for.
6512  Primitive::Type type_;
6513  // The instruction this move is assocatied with. Null when this move is
6514  // for moving an input in the expected locations of user (including a phi user).
6515  // This is only used in debug mode, to ensure we do not connect interval siblings
6516  // in the same parallel move.
6517  HInstruction* instruction_;
6518};
6519
6520std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6521
6522static constexpr size_t kDefaultNumberOfMoves = 4;
6523
6524class HParallelMove FINAL : public HTemplateInstruction<0> {
6525 public:
6526  explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6527      : HTemplateInstruction(SideEffects::None(), dex_pc),
6528        moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6529    moves_.reserve(kDefaultNumberOfMoves);
6530  }
6531
6532  void AddMove(Location source,
6533               Location destination,
6534               Primitive::Type type,
6535               HInstruction* instruction) {
6536    DCHECK(source.IsValid());
6537    DCHECK(destination.IsValid());
6538    if (kIsDebugBuild) {
6539      if (instruction != nullptr) {
6540        for (const MoveOperands& move : moves_) {
6541          if (move.GetInstruction() == instruction) {
6542            // Special case the situation where the move is for the spill slot
6543            // of the instruction.
6544            if ((GetPrevious() == instruction)
6545                || ((GetPrevious() == nullptr)
6546                    && instruction->IsPhi()
6547                    && instruction->GetBlock() == GetBlock())) {
6548              DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6549                  << "Doing parallel moves for the same instruction.";
6550            } else {
6551              DCHECK(false) << "Doing parallel moves for the same instruction.";
6552            }
6553          }
6554        }
6555      }
6556      for (const MoveOperands& move : moves_) {
6557        DCHECK(!destination.OverlapsWith(move.GetDestination()))
6558            << "Overlapped destination for two moves in a parallel move: "
6559            << move.GetSource() << " ==> " << move.GetDestination() << " and "
6560            << source << " ==> " << destination;
6561      }
6562    }
6563    moves_.emplace_back(source, destination, type, instruction);
6564  }
6565
6566  MoveOperands* MoveOperandsAt(size_t index) {
6567    return &moves_[index];
6568  }
6569
6570  size_t NumMoves() const { return moves_.size(); }
6571
6572  DECLARE_INSTRUCTION(ParallelMove);
6573
6574 private:
6575  ArenaVector<MoveOperands> moves_;
6576
6577  DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6578};
6579
6580}  // namespace art
6581
6582#if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6583#include "nodes_shared.h"
6584#endif
6585#ifdef ART_ENABLE_CODEGEN_arm
6586#include "nodes_arm.h"
6587#endif
6588#ifdef ART_ENABLE_CODEGEN_arm64
6589#include "nodes_arm64.h"
6590#endif
6591#ifdef ART_ENABLE_CODEGEN_mips
6592#include "nodes_mips.h"
6593#endif
6594#ifdef ART_ENABLE_CODEGEN_x86
6595#include "nodes_x86.h"
6596#endif
6597
6598namespace art {
6599
6600class HGraphVisitor : public ValueObject {
6601 public:
6602  explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6603  virtual ~HGraphVisitor() {}
6604
6605  virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6606  virtual void VisitBasicBlock(HBasicBlock* block);
6607
6608  // Visit the graph following basic block insertion order.
6609  void VisitInsertionOrder();
6610
6611  // Visit the graph following dominator tree reverse post-order.
6612  void VisitReversePostOrder();
6613
6614  HGraph* GetGraph() const { return graph_; }
6615
6616  // Visit functions for instruction classes.
6617#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6618  virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6619
6620  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6621
6622#undef DECLARE_VISIT_INSTRUCTION
6623
6624 private:
6625  HGraph* const graph_;
6626
6627  DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6628};
6629
6630class HGraphDelegateVisitor : public HGraphVisitor {
6631 public:
6632  explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6633  virtual ~HGraphDelegateVisitor() {}
6634
6635  // Visit functions that delegate to to super class.
6636#define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
6637  void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6638
6639  FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6640
6641#undef DECLARE_VISIT_INSTRUCTION
6642
6643 private:
6644  DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6645};
6646
6647// Iterator over the blocks that art part of the loop. Includes blocks part
6648// of an inner loop. The order in which the blocks are iterated is on their
6649// block id.
6650class HBlocksInLoopIterator : public ValueObject {
6651 public:
6652  explicit HBlocksInLoopIterator(const HLoopInformation& info)
6653      : blocks_in_loop_(info.GetBlocks()),
6654        blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6655        index_(0) {
6656    if (!blocks_in_loop_.IsBitSet(index_)) {
6657      Advance();
6658    }
6659  }
6660
6661  bool Done() const { return index_ == blocks_.size(); }
6662  HBasicBlock* Current() const { return blocks_[index_]; }
6663  void Advance() {
6664    ++index_;
6665    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6666      if (blocks_in_loop_.IsBitSet(index_)) {
6667        break;
6668      }
6669    }
6670  }
6671
6672 private:
6673  const BitVector& blocks_in_loop_;
6674  const ArenaVector<HBasicBlock*>& blocks_;
6675  size_t index_;
6676
6677  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6678};
6679
6680// Iterator over the blocks that art part of the loop. Includes blocks part
6681// of an inner loop. The order in which the blocks are iterated is reverse
6682// post order.
6683class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6684 public:
6685  explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6686      : blocks_in_loop_(info.GetBlocks()),
6687        blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6688        index_(0) {
6689    if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6690      Advance();
6691    }
6692  }
6693
6694  bool Done() const { return index_ == blocks_.size(); }
6695  HBasicBlock* Current() const { return blocks_[index_]; }
6696  void Advance() {
6697    ++index_;
6698    for (size_t e = blocks_.size(); index_ < e; ++index_) {
6699      if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6700        break;
6701      }
6702    }
6703  }
6704
6705 private:
6706  const BitVector& blocks_in_loop_;
6707  const ArenaVector<HBasicBlock*>& blocks_;
6708  size_t index_;
6709
6710  DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6711};
6712
6713inline int64_t Int64FromConstant(HConstant* constant) {
6714  if (constant->IsIntConstant()) {
6715    return constant->AsIntConstant()->GetValue();
6716  } else if (constant->IsLongConstant()) {
6717    return constant->AsLongConstant()->GetValue();
6718  } else {
6719    DCHECK(constant->IsNullConstant()) << constant->DebugName();
6720    return 0;
6721  }
6722}
6723
6724#define INSTRUCTION_TYPE_CHECK(type, super)                                    \
6725  inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
6726  inline const H##type* HInstruction::As##type() const {                       \
6727    return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
6728  }                                                                            \
6729  inline H##type* HInstruction::As##type() {                                   \
6730    return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
6731  }
6732
6733  FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6734#undef INSTRUCTION_TYPE_CHECK
6735
6736// Create space in `blocks` for adding `number_of_new_blocks` entries
6737// starting at location `at`. Blocks after `at` are moved accordingly.
6738inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6739                        size_t number_of_new_blocks,
6740                        size_t after) {
6741  DCHECK_LT(after, blocks->size());
6742  size_t old_size = blocks->size();
6743  size_t new_size = old_size + number_of_new_blocks;
6744  blocks->resize(new_size);
6745  std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6746}
6747
6748}  // namespace art
6749
6750#endif  // ART_COMPILER_OPTIMIZING_NODES_H_
6751